diff --git a/.github/.codecov.yml b/.github/.codecov.yml new file mode 100644 index 00000000000000..1faf5a6bab4644 --- /dev/null +++ b/.github/.codecov.yml @@ -0,0 +1,65 @@ +comment: + layout: "header, files, footer" # remove "new" from "header" and "footer" + hide_project_coverage: true # set to false + require_changes: false # if true: only post the comment if coverage changes + +codecov: + #due to ci-optimization, reports for modules that have not changed may be quite old + max_report_age: off + +flag_management: + default_rules: # the rules that will be followed for any flag added, generally + carryforward: true + statuses: + - type: project + target: auto + threshold: 0% #Not enforcing project coverage yet. + - type: patch + target: 90% + individual_flags: # exceptions to the default rules above, stated flag by flag + - name: frontend + paths: + - "datahub-frontend/**" + - "datahub-web-react/**" + - name: backend + paths: + - "metadata-models/**" + - "datahub-upgrade/**" + - "entity-registry/**" + - "li-utils/**" + - "metadata-auth/**" + - "metadata-dao-impl/**" + - "metadata-events/**" + - "metadata-jobs/**" + - "metadata-service/**" + - "metadata-utils/**" + - "metadata-operation-context/**" + - "datahub-graphql-core/**" + - name: metadata-io + paths: + - "metadata-io/**" + - name: ingestion + paths: + - "metadata-ingestion/**" + - name: ingestion-airflow + paths: + - "metadata-ingestion-modules/airflow-plugin/**" + - name: ingestion-dagster + paths: + - "metadata-ingestion-modules/dagster-plugin/**" + - name: ingestion-gx-plugin + paths: + - "metadata-ingestion-modules/gx-plugin/**" + - name: ingestion-prefect + paths: + - "metadata-ingestion-modules/prefect-plugin/**" +coverage: + status: + project: + default: + target: 0% # no threshold enforcement yet + only_pulls: true + patch: + default: + target: 90% # for new code added in the patch + only_pulls: true diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml index 0d435963382675..8a81859ae903a8 100644 --- a/.github/actions/ci-optimization/action.yml +++ b/.github/actions/ci-optimization/action.yml @@ -13,16 +13,16 @@ outputs: value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'true' }} backend-change: description: "Backend code has changed" - value: ${{ steps.filter.outputs.backend == 'true' }} + value: ${{ steps.filter.outputs.backend == 'true' || steps.trigger.outputs.trigger == 'manual' }} ingestion-change: description: "Ingestion code has changed" - value: ${{ steps.filter.outputs.ingestion == 'true' }} + value: ${{ steps.filter.outputs.ingestion == 'true' || steps.trigger.outputs.trigger == 'manual' }} ingestion-base-change: description: "Ingestion base image docker image has changed" value: ${{ steps.filter.outputs.ingestion-base == 'true' }} frontend-change: description: "Frontend code has changed" - value: ${{ steps.filter.outputs.frontend == 'true' }} + value: ${{ steps.filter.outputs.frontend == 'true' || steps.trigger.outputs.trigger == 'manual' }} docker-change: description: "Docker code has changed" value: ${{ steps.filter.outputs.docker == 'true' }} @@ -44,6 +44,15 @@ outputs: runs: using: "composite" steps: + - name: Check trigger type + id: trigger # Add an ID to reference this step + shell: bash + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo "trigger=manual" >> $GITHUB_OUTPUT + else + echo "trigger=pr" >> $GITHUB_OUTPUT + fi - uses: dorny/paths-filter@v3 id: filter with: diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index ccaff510c120aa..cc2c2bd86416d7 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -97,10 +97,11 @@ runs: cache-to: | type=inline - name: Upload image locally for testing (if not publishing) - uses: ishworkh/docker-image-artifact-upload@v1 + uses: ishworkh/container-image-artifact-upload@v2.0.0 if: ${{ inputs.publish != 'true' }} with: image: ${{ steps.single_tag.outputs.SINGLE_TAG }} + retention_days: "2" # Code for building multi-platform images and pushing to Docker Hub. - name: Set up QEMU diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000000..0d08e261a2ae9d --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + +version: 2 +updates: + - package-ecosystem: "github-actions" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "weekly" diff --git a/.github/pr-labeler-config.yml b/.github/pr-labeler-config.yml index 2625cb1cfdff68..e3fcc15b637e22 100644 --- a/.github/pr-labeler-config.yml +++ b/.github/pr-labeler-config.yml @@ -1,22 +1,32 @@ ingestion: - - "metadata-ingestion/**/*" - - "metadata-ingestion-modules/**/*" - - "metadata-integration/**/*" +- changed-files: + - any-glob-to-any-file: + - "metadata-ingestion/**/*" + - "metadata-ingestion-modules/**/*" + - "metadata-integration/**/*" devops: - - "docker/**/*" - - ".github/**/*" - - "perf-test/**/*" - - "metadata-service/**/*" +- changed-files: + - any-glob-to-any-file: + - "docker/**/*" + - ".github/**/*" + - "perf-test/**/*" + - "metadata-service/**/*" product: - - "datahub-web-react/**/*" - - "datahub-frontend/**/*" - - "datahub-graphql-core/**/*" - - "metadata-io/**/*" +- changed-files: + - any-glob-to-any-file: + - "datahub-web-react/**/*" + - "datahub-frontend/**/*" + - "datahub-graphql-core/**/*" + - "metadata-io/**/*" docs: - - "docs/**/*" +- changed-files: + - any-glob-to-any-file: + - "docs/**/*" smoke_test: - - "smoke-test/**/*" +- changed-files: + - any-glob-to-any-file: + - "smoke-test/**/*" diff --git a/.github/scripts/generate_pre_commit.py b/.github/scripts/generate_pre_commit.py new file mode 100755 index 00000000000000..2db73fd357ff5f --- /dev/null +++ b/.github/scripts/generate_pre_commit.py @@ -0,0 +1,279 @@ +"""Generate pre-commit hooks for Java and Python projects. + +This script scans a repository for Java and Python projects and generates appropriate +pre-commit hooks for linting and formatting. It also merges in additional hooks from +an override file. +""" + +import os +from dataclasses import dataclass +from enum import Enum, auto +from pathlib import Path +import datetime + +import yaml + + +class ProjectType(Enum): + """Types of projects supported for hook generation.""" + + JAVA = auto() + PYTHON = auto() + + +@dataclass +class Project: + """Represents a project found in the repository.""" + + path: str + type: ProjectType + + @property + def gradle_path(self) -> str: + """Convert path to Gradle task format.""" + return ":" + self.path.replace("/", ":") + + @property + def project_id(self) -> str: + """Generate a unique identifier for the project.""" + return self.path.replace("/", "-").replace(".", "-") + + +class ProjectFinder: + """Find Java and Python projects in a repository.""" + + JAVA_PATTERNS = [ + "plugins.hasPlugin('java')", + "apply plugin: 'java'", + "id 'java'", + "id 'java-library'", + "plugins.hasPlugin('java-library')", + "apply plugin: 'java-library'", + "plugins.hasPlugin('pegasus')", + "org.springframework.boot", + ] + + EXCLUDED_DIRS = {".git", "build", "node_modules", ".tox", "venv"} + SOURCE_EXTENSIONS = {".java", ".kt", ".groovy"} + + def __init__(self, root_dir: str): + self.root_path = Path(root_dir) + + def find_all_projects(self) -> list[Project]: + """Find all Java and Python projects in the repository.""" + java_projects = self._find_java_projects() + python_projects = self._find_python_projects() + + all_projects = [] + all_projects.extend( + Project(path=p, type=ProjectType.JAVA) for p in java_projects + ) + all_projects.extend( + Project(path=p, type=ProjectType.PYTHON) for p in python_projects + ) + + return sorted(all_projects, key=lambda p: p.path) + + def _find_java_projects(self) -> set[str]: + """Find all Java projects by checking build.gradle files.""" + java_projects = set() + + # Search both build.gradle and build.gradle.kts + for pattern in ["build.gradle", "build.gradle.kts"]: + for gradle_file in self.root_path.rglob(pattern): + if self._should_skip_directory(gradle_file.parent): + continue + + if self._is_java_project(gradle_file): + java_projects.add(self._get_relative_path(gradle_file.parent)) + + return { + p + for p in java_projects + if "buildSrc" not in p and "spark-smoke-test" not in p and p != "." + } + + def _find_python_projects(self) -> set[str]: + """Find all Python projects by checking for setup.py or pyproject.toml.""" + python_projects = set() + + for file_name in ["setup.py", "pyproject.toml"]: + for path in self.root_path.rglob(file_name): + if self._should_skip_directory(path.parent): + continue + + rel_path = self._get_relative_path(path.parent) + if "examples" not in rel_path: + python_projects.add(rel_path) + + return python_projects + + def _should_skip_directory(self, path: Path) -> bool: + """Check if directory should be skipped.""" + return any( + part in self.EXCLUDED_DIRS or part.startswith(".") for part in path.parts + ) + + def _is_java_project(self, gradle_file: Path) -> bool: + """Check if a Gradle file represents a Java project.""" + try: + content = gradle_file.read_text() + has_java_plugin = any(pattern in content for pattern in self.JAVA_PATTERNS) + + if has_java_plugin: + # Verify presence of source files + return any( + list(gradle_file.parent.rglob(f"*{ext}")) + for ext in self.SOURCE_EXTENSIONS + ) + return False + + except Exception as e: + print(f"Warning: Error reading {gradle_file}: {e}") + return False + + def _get_relative_path(self, path: Path) -> str: + """Get relative path from root, normalized with forward slashes.""" + return str(path.relative_to(self.root_path)).replace("\\", "/") + + +class HookGenerator: + """Generate pre-commit hooks for projects.""" + + def __init__(self, projects: list[Project], override_file: str = None): + self.projects = projects + self.override_file = override_file + + def generate_config(self) -> dict: + """Generate the complete pre-commit config.""" + hooks = [] + + for project in self.projects: + if project.type == ProjectType.PYTHON: + hooks.append(self._generate_lint_fix_hook(project)) + else: # ProjectType.JAVA + hooks.append(self._generate_spotless_hook(project)) + + config = {"repos": [{"repo": "local", "hooks": hooks}]} + + # Merge override hooks if they exist + if self.override_file and os.path.exists(self.override_file): + try: + with open(self.override_file, 'r') as f: + override_config = yaml.safe_load(f) + + if override_config and 'repos' in override_config: + for override_repo in override_config['repos']: + matching_repo = next( + (repo for repo in config['repos'] + if repo['repo'] == override_repo['repo']), + None + ) + + if matching_repo: + matching_repo['hooks'].extend(override_repo.get('hooks', [])) + else: + config['repos'].append(override_repo) + + print(f"Merged additional hooks from {self.override_file}") + except Exception as e: + print(f"Warning: Error reading override file {self.override_file}: {e}") + + return config + + def _generate_lint_fix_hook(self, project: Project) -> dict: + """Generate a lint-fix hook for Python projects.""" + return { + "id": f"{project.project_id}-lint-fix", + "name": f"{project.path} Lint Fix", + "entry": f"./gradlew {project.gradle_path}:lintFix", + "language": "system", + "files": f"^{project.path}/.*\\.py$", + "pass_filenames": False, + } + + def _generate_spotless_hook(self, project: Project) -> dict: + """Generate a spotless hook for Java projects.""" + return { + "id": f"{project.project_id}-spotless", + "name": f"{project.path} Spotless Apply", + "entry": f"./gradlew {project.gradle_path}:spotlessApply", + "language": "system", + "files": f"^{project.path}/.*\\.java$", + "pass_filenames": False, + } + + +class PrecommitDumper(yaml.Dumper): + """Custom YAML dumper that maintains proper indentation.""" + + def increase_indent(self, flow=False, *args, **kwargs): + return super().increase_indent(flow=flow, indentless=False) + + +def write_yaml_with_spaces(file_path: str, data: dict): + """Write YAML file with extra spacing between hooks and a timestamp header.""" + with open(file_path, "w") as f: + # Add timestamp header + current_time = datetime.datetime.now(datetime.timezone.utc) + formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S %Z") + header = f"# Auto-generated by .github/scripts/generate_pre_commit.py at {formatted_time}\n" + f.write(header) + header = f"# Do not edit this file directly. Run the script to regenerate.\n" + f.write(header) + header = f"# Add additional hooks in .github/scripts/pre-commit-override.yaml\n" + f.write(header) + + # Write the YAML content + yaml_str = yaml.dump( + data, Dumper=PrecommitDumper, sort_keys=False, default_flow_style=False + ) + + # Add extra newline between hooks + lines = yaml_str.split("\n") + result = [] + in_hook = False + + for line in lines: + if line.strip().startswith("- id:"): + if in_hook: # If we were already in a hook, add extra newline + result.append("") + in_hook = True + elif not line.strip() and in_hook: + in_hook = False + + result.append(line) + + f.write("\n".join(result)) + + +def main(): + root_dir = os.path.abspath(os.curdir) + override_file = ".github/scripts/pre-commit-override.yaml" + + # Find projects + finder = ProjectFinder(root_dir) + projects = finder.find_all_projects() + + # Print summary + print("Found projects:") + print("\nJava projects:") + for project in projects: + if project.type == ProjectType.JAVA: + print(f" - {project.path}") + + print("\nPython projects:") + for project in projects: + if project.type == ProjectType.PYTHON: + print(f" - {project.path}") + + # Generate and write config + generator = HookGenerator(projects, override_file) + config = generator.generate_config() + write_yaml_with_spaces(".pre-commit-config.yaml", config) + + print("\nGenerated .pre-commit-config.yaml") + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/.github/scripts/pre-commit-override.yaml b/.github/scripts/pre-commit-override.yaml new file mode 100644 index 00000000000000..ecd3c97ad61efa --- /dev/null +++ b/.github/scripts/pre-commit-override.yaml @@ -0,0 +1,9 @@ +repos: + - repo: local + hooks: + - id: smoke-test-cypress-lint-fix + name: smoke-test cypress Lint Fix + entry: ./gradlew :smoke-test:cypressLintFix + language: system + files: ^smoke-test/tests/cypress/.*\.tsx$ + pass_filenames: false \ No newline at end of file diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index 66a08dc63aa0de..31c77d754dc69d 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -18,6 +18,7 @@ on: - "metadata-models/**" release: types: [published] + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} @@ -27,36 +28,27 @@ jobs: airflow-plugin: runs-on: ubuntu-latest env: - SPARK_VERSION: 3.0.3 DATAHUB_TELEMETRY_ENABLED: false strategy: matrix: include: # Note: this should be kept in sync with tox.ini. - python-version: "3.8" - extra_pip_requirements: "apache-airflow~=2.1.4" - extra_pip_extras: plugin-v1 - - python-version: "3.8" - extra_pip_requirements: "apache-airflow~=2.2.4" - extra_pip_extras: plugin-v1 + extra_pip_requirements: "apache-airflow~=2.3.4" + extra_pip_extras: test-airflow23 - python-version: "3.10" extra_pip_requirements: "apache-airflow~=2.4.3" - extra_pip_extras: plugin-v2,test-airflow24 + extra_pip_extras: test-airflow24 - python-version: "3.10" extra_pip_requirements: "apache-airflow~=2.6.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.10.txt" - extra_pip_extras: plugin-v2 - python-version: "3.10" extra_pip_requirements: "apache-airflow~=2.7.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt" - extra_pip_extras: plugin-v2 - python-version: "3.10" extra_pip_requirements: "apache-airflow~=2.8.1 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.10.txt" - extra_pip_extras: plugin-v2 - python-version: "3.11" extra_pip_requirements: "apache-airflow~=2.9.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.9.3/constraints-3.11.txt" - extra_pip_extras: plugin-v2 - python-version: "3.11" - extra_pip_requirements: "apache-airflow~=2.10.2 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.11.txt" - extra_pip_extras: plugin-v2 + extra_pip_requirements: "apache-airflow~=2.10.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.10.3/constraints-3.11.txt" fail-fast: false steps: - name: Set up JDK 17 @@ -64,7 +56,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v5 with: @@ -77,7 +69,7 @@ jobs: - name: pip freeze show list installed if: always() run: source metadata-ingestion-modules/airflow-plugin/venv/bin/activate && uv pip freeze - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: ${{ always() && matrix.python-version == '3.10' && matrix.extra_pip_requirements == 'apache-airflow>=2.7.0' }} with: name: Test Results (Airflow Plugin ${{ matrix.python-version}}) @@ -86,22 +78,27 @@ jobs: **/build/test-results/test/** **/junit.*.xml !**/binary/** - - name: Upload coverage to Codecov + - name: Upload coverage to Codecov with ingestion flag if: always() - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} - directory: . + directory: ./build/coverage-reports/metadata-ingestion-modules/airflow-plugin/ fail_ci_if_error: false - flags: airflow,airflow-${{ matrix.extra_pip_extras }} + flags: ingestion-airflow name: pytest-airflow-${{ matrix.python-version }}-${{ matrix.extra_pip_requirements }} verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} event-file: runs-on: ubuntu-latest steps: - name: Upload - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Event File path: ${{ github.event_path }} diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 412c962cb6e36f..7f7dcb9c1f7548 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -12,6 +12,7 @@ on: paths-ignore: - "docs/**" - "**.md" + workflow_dispatch: release: types: [published] @@ -62,7 +63,7 @@ jobs: sudo apt-get remove 'dotnet-*' azure-cli || true sudo rm -rf /usr/local/lib/android/ || true sudo docker image prune -a -f || true - - uses: szenius/set-timezone@v1.2 + - uses: szenius/set-timezone@v2.0 with: timezoneLinux: ${{ matrix.timezone }} - name: Check out the repo @@ -75,12 +76,14 @@ jobs: path: | ~/.cache/uv key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }} + - name: Install dependencies + run: ./metadata-ingestion/scripts/install_deps.sh - name: Set up JDK 17 uses: actions/setup-java@v4 with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Gradle build (and test) for NOT metadata ingestion if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} # datahub-schematron:cli excluded due to dependency on metadata-ingestion @@ -107,16 +110,20 @@ jobs: if: ${{ matrix.command == 'frontend' && needs.setup.outputs.frontend_change == 'true' }} run: | ./gradlew :datahub-frontend:build :datahub-web-react:build --parallel - env: - NODE_OPTIONS: "--max-old-space-size=3072" - name: Gradle compile (jdk8) for legacy Spark if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} run: | ./gradlew -PjavaClassVersionDefault=8 :metadata-integration:java:spark-lineage:compileJava - - uses: actions/upload-artifact@v3 + - name: Gather coverage files + run: | + echo "BACKEND_FILES=`find ./build/coverage-reports/ -type f | grep -E '(metadata-models|entity-registry|datahuyb-graphql-core|metadata-io|metadata-jobs|metadata-utils|metadata-service|medata-dao-impl|metadata-operation|li-utils|metadata-integration|metadata-events|metadata-auth|ingestion-scheduler|notifications|datahub-upgrade)' | xargs | sed 's/ /,/g'`" >> $GITHUB_ENV + echo "FRONTEND_FILES=`find ./build/coverage-reports/ -type f | grep -E '(datahub-frontend|datahub-web-react).*\.(xml|json)$' | xargs | sed 's/ /,/g'`" >> $GITHUB_ENV + - name: Generate tz artifact name + run: echo "NAME_TZ=$(echo ${{ matrix.timezone }} | tr '/' '-')" >> $GITHUB_ENV + - uses: actions/upload-artifact@v4 if: always() with: - name: Test Results (build) + name: Test Results (build) - ${{ matrix.command}}-${{ env.NAME_TZ }} path: | **/build/reports/tests/test/** **/build/test-results/test/** @@ -124,6 +131,35 @@ jobs: !**/binary/** - name: Ensure codegen is updated uses: ./.github/actions/ensure-codegen-updated + - name: Upload backend coverage to Codecov + if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ${{ env.BACKEND_FILES }} + disable_search: true + #handle_no_reports_found: true + fail_ci_if_error: false + flags: backend + name: ${{ matrix.command }} + verbose: true + - name: Upload frontend coverage to Codecov + if: ${{ matrix.command == 'frontend' && needs.setup.outputs.frontend_change == 'true' }} + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ${{ env.FRONTEND_FILES }} + disable_search: true + #handle_no_reports_found: true + fail_ci_if_error: false + flags: frontend + name: ${{ matrix.command }} + verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} quickstart-compose-validation: runs-on: ubuntu-latest @@ -142,7 +178,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Upload - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Event File - path: ${{ github.event_path }} \ No newline at end of file + path: ${{ github.event_path }} diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 7a49f32729ec1f..1b284578483f30 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -5,12 +5,12 @@ on: branches: - master paths: - - "metadata-integration" + - "metadata-integration/**" pull_request: branches: - "**" paths: - - "metadata-integration" + - "metadata-integration/**" release: types: [published] @@ -28,15 +28,22 @@ jobs: runs-on: ubuntu-latest steps: - uses: acryldata/sane-checkout-action@v3 + - uses: actions/setup-python@v5 + with: + python-version: "3.10" + - uses: actions/cache@v4 + with: + path: | + ~/.cache/uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }} + - name: Install dependencies + run: ./metadata-ingestion/scripts/install_deps.sh - name: Set up JDK 17 uses: actions/setup-java@v4 with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 - - uses: actions/setup-python@v5 - with: - python-version: "3.10" + - uses: gradle/actions/setup-gradle@v4 - name: check ${{ matrix.command }} jar run: | ./gradlew :metadata-integration:java:${{ matrix.command }}:build --info diff --git a/.github/workflows/close-stale-issues.yml b/.github/workflows/close-stale-issues.yml index 98e3041f288040..005f41b767ea6d 100644 --- a/.github/workflows/close-stale-issues.yml +++ b/.github/workflows/close-stale-issues.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v6 + - uses: actions/stale@v9 with: ascending: true operations-per-run: 100 diff --git a/.github/workflows/contributor-open-pr-comment.yml b/.github/workflows/contributor-open-pr-comment.yml index 2f700290ee0f28..fe60601b0159bd 100644 --- a/.github/workflows/contributor-open-pr-comment.yml +++ b/.github/workflows/contributor-open-pr-comment.yml @@ -12,17 +12,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Get and Format Username (PR only) if: github.event_name == 'pull_request' run: | - formatted_username=$(echo "${{ github.event.pull_request.user.login }}" | tr '[:upper:]' '[:lower:]' | sed 's/ /-/g') - echo "FORMATTED_USERNAME=$formatted_username" >> $GITHUB_ENV + formatted_username="$(echo "${{ github.event.pull_request.user.login }}" | tr '[:upper:]' '[:lower:]' | sed 's/ /-/g')" + echo "FORMATTED_USERNAME=${formatted_username}" >> "$GITHUB_ENV" - name: Create Comment (PR only) if: github.event_name == 'pull_request' - uses: actions/github-script@v6 + uses: actions/github-script@v7 with: script: | if (context.payload.pull_request) { diff --git a/.github/workflows/dagster-plugin.yml b/.github/workflows/dagster-plugin.yml index 37b6c93ec841ab..61fddaeb7770bb 100644 --- a/.github/workflows/dagster-plugin.yml +++ b/.github/workflows/dagster-plugin.yml @@ -18,6 +18,7 @@ on: - "metadata-models/**" release: types: [published] + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} @@ -27,15 +28,14 @@ jobs: dagster-plugin: runs-on: ubuntu-latest env: - SPARK_VERSION: 3.0.3 DATAHUB_TELEMETRY_ENABLED: false strategy: matrix: - python-version: ["3.8", "3.10"] + python-version: ["3.9", "3.11"] include: - - python-version: "3.8" + - python-version: "3.9" extraPythonRequirement: "dagster>=1.3.3" - - python-version: "3.10" + - python-version: "3.11" extraPythonRequirement: "dagster>=1.3.3" fail-fast: false steps: @@ -44,7 +44,8 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: actions/checkout@v4 + - uses: gradle/actions/setup-gradle@v4 + - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -56,30 +57,35 @@ jobs: - name: pip freeze show list installed if: always() run: source metadata-ingestion-modules/dagster-plugin/venv/bin/activate && uv pip freeze - - uses: actions/upload-artifact@v3 - if: ${{ always() && matrix.python-version == '3.10' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }} + - uses: actions/upload-artifact@v4 + if: ${{ always() && matrix.python-version == '3.11' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }} with: name: Test Results (dagster Plugin ${{ matrix.python-version}}) path: | **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml - - name: Upload coverage to Codecov + - name: Upload coverage to Codecov with ingestion flag if: always() - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} - directory: . + directory: ./build/coverage-reports/metadata-ingestion-modules/dagster-plugin/ fail_ci_if_error: false - flags: dagster-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} + flags: ingestion-dagster-plugin name: pytest-dagster verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} event-file: runs-on: ubuntu-latest steps: - name: Upload - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Event File path: ${{ github.event_path }} diff --git a/.github/workflows/docker-postgres-setup.yml b/.github/workflows/docker-postgres-setup.yml index 956f3f7b1c3903..c028bfb55d48d5 100644 --- a/.github/workflows/docker-postgres-setup.yml +++ b/.github/workflows/docker-postgres-setup.yml @@ -52,7 +52,7 @@ jobs: with: images: | acryldata/datahub-postgres-setup - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' }} diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 49dd26e1cd27e3..5f944c8e28769f 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -130,7 +130,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Run lint on smoke test if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} run: | @@ -154,7 +154,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image @@ -186,12 +186,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -205,7 +205,7 @@ jobs: ignore-unfixed: true vuln-type: "os,library" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -225,7 +225,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image @@ -257,12 +257,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -276,7 +276,7 @@ jobs: ignore-unfixed: true vuln-type: "os,library" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -296,7 +296,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image @@ -328,12 +328,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -347,7 +347,7 @@ jobs: ignore-unfixed: true vuln-type: "os,library" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -367,7 +367,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image @@ -399,12 +399,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -418,7 +418,7 @@ jobs: ignore-unfixed: true vuln-type: "os,library" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -438,15 +438,13 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :datahub-frontend:dist -x test -x yarnTest -x yarnLint --parallel mv ./datahub-frontend/build/distributions/datahub-frontend-*.zip datahub-frontend.zip - env: - NODE_OPTIONS: "--max-old-space-size=3072" - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: @@ -472,12 +470,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: actions/checkout@v4 - name: Download image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -491,7 +489,7 @@ jobs: ignore-unfixed: true vuln-type: "os,library" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -533,12 +531,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -552,7 +550,7 @@ jobs: ignore-unfixed: true vuln-type: "os,library" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -594,12 +592,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -613,7 +611,7 @@ jobs: ignore-unfixed: true vuln-type: "os,library" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -655,12 +653,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -674,7 +672,7 @@ jobs: ignore-unfixed: true vuln-type: "os,library" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -727,7 +725,7 @@ jobs: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Download Base Image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' && needs.setup.outputs.ingestion_base_change == 'true' }} with: image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} @@ -775,7 +773,7 @@ jobs: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Download Base Image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' && needs.setup.outputs.ingestion_base_change == 'true' }} with: image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} @@ -831,12 +829,12 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Build codegen if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish =='true' }} run: ./gradlew :metadata-ingestion:codegen - name: Download Base Image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' && needs.setup.outputs.ingestion_base_change == 'true' }} with: image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }} @@ -883,12 +881,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image Slim Image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' }} with: image: ${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }} - name: Run Trivy vulnerability scanner Slim Image - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -903,7 +901,7 @@ jobs: vuln-type: "os,library" timeout: 15m - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -932,12 +930,12 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Build codegen if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} run: ./gradlew :metadata-ingestion:codegen - name: Download Base Image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' && needs.setup.outputs.ingestion_base_change == 'true' }} with: image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} @@ -982,12 +980,12 @@ jobs: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 - name: Download image Full Image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.datahub_ingestion_full_build.outputs.needs_artifact_download == 'true' }} with: image: ${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_full_build.outputs.tag }} - name: Run Trivy vulnerability scanner Full Image - uses: aquasecurity/trivy-action@0.26.0 + uses: aquasecurity/trivy-action@0.29.0 env: TRIVY_OFFLINE_SCAN: true TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 @@ -1002,7 +1000,7 @@ jobs: vuln-type: "os,library" timeout: 15m - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: "trivy-results.sarif" @@ -1011,18 +1009,39 @@ jobs: needs: setup outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} + cypress_batch_count: ${{ steps.set-batch-count.outputs.cypress_batch_count }} + python_batch_count: ${{ steps.set-batch-count.outputs.python_batch_count }} steps: + - id: set-batch-count + # Tests are split simply to ensure the configured number of batches for parallelization. This may need some + # increase as a new tests added increase the duration where an additional parallel batch helps. + # python_batch_count is used to split pytests in the smoke-test (batches of actual test functions) + # cypress_batch_count is used to split the collection of cypress test specs into batches. + run: | + echo "cypress_batch_count=5" >> "$GITHUB_OUTPUT" + echo "python_batch_count=3" >> "$GITHUB_OUTPUT" + - id: set-matrix + # For m batches for python and n batches for cypress, we need a test matrix of python x m + cypress x n. + # while the github action matrix generation can handle these two parts individually, there isnt a way to use the + # two generated matrices for the same job. So, produce that matrix with scripting and use the include directive + # to add it to the test matrix. run: | - if [ '${{ needs.setup.outputs.frontend_only }}' == 'true' ]; then - echo 'matrix=["cypress_suite1","cypress_rest"]' >> "$GITHUB_OUTPUT" - elif [ '${{ needs.setup.outputs.ingestion_only }}' == 'true' ]; then - echo 'matrix=["no_cypress_suite0","no_cypress_suite1"]' >> "$GITHUB_OUTPUT" - elif [[ '${{ needs.setup.outputs.backend_change }}' == 'true' || '${{ needs.setup.outputs.smoke_test_change }}' == 'true' ]]; then - echo 'matrix=["no_cypress_suite0","no_cypress_suite1","cypress_suite1","cypress_rest"]' >> "$GITHUB_OUTPUT" - else - echo 'matrix=[]' >> "$GITHUB_OUTPUT" + python_batch_count=${{ steps.set-batch-count.outputs.python_batch_count }} + python_matrix=$(printf "{\"test_strategy\":\"pytests\",\"batch\":\"0\",\"batch_count\":\"$python_batch_count\"}"; for ((i=1;i> "$GITHUB_OUTPUT" smoke_test: name: Run Smoke Tests @@ -1043,8 +1062,7 @@ jobs: ] strategy: fail-fast: false - matrix: - test_strategy: ${{ fromJson(needs.smoke_test_matrix.outputs.matrix) }} + matrix: ${{ fromJson(needs.smoke_test_matrix.outputs.matrix) }} if: ${{ always() && !failure() && !cancelled() && needs.smoke_test_matrix.outputs.matrix != '[]' }} steps: - name: Free up disk space @@ -1065,7 +1083,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Login to DockerHub uses: docker/login-action@v3 if: ${{ needs.setup.outputs.docker-login == 'true' }} @@ -1079,47 +1097,47 @@ jobs: - name: Disk Check run: df -h . && docker images - name: Download GMS image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.gms_build.result == 'success' }} with: image: ${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Frontend image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.frontend_build.result == 'success' }} with: image: ${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Kafka Setup image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.kafka_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Mysql Setup image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mysql_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Elastic Setup image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.elasticsearch_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MCE Consumer image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mce_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MAE Consumer image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mae_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download upgrade image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.datahub_upgrade_build.result == 'success' }} with: image: ${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download datahub-ingestion-slim image - uses: ishworkh/docker-image-artifact-download@v1 + uses: ishworkh/container-image-artifact-download@v2.0.0 if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' && needs.datahub_ingestion_slim_build.result == 'success' }} with: image: ${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }} @@ -1220,6 +1238,8 @@ jobs: CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} CLEANUP_DATA: "false" TEST_STRATEGY: ${{ matrix.test_strategy }} + BATCH_COUNT: ${{ matrix.batch_count }} + BATCH_NUMBER: ${{ matrix.batch }} run: | echo "$DATAHUB_VERSION" ./gradlew --stop @@ -1230,25 +1250,25 @@ jobs: if: failure() run: | docker ps -a - TEST_STRATEGY="-${{ matrix.test_strategy }}" + TEST_STRATEGY="-${{ matrix.test_strategy }}-${{ matrix.batch }}" source .github/scripts/docker_logs.sh - name: Upload logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: docker-logs-${{ matrix.test_strategy }} + name: docker-logs-${{ matrix.test_strategy }}-${{ matrix.batch }} path: "docker_logs/*.log" retention-days: 5 - name: Upload screenshots - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: cypress-snapshots-${{ matrix.test_strategy }} + name: cypress-snapshots-${{ matrix.test_strategy }}-${{ matrix.batch }} path: smoke-test/tests/cypress/cypress/screenshots/ - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: - name: Test Results (smoke tests) ${{ matrix.test_strategy }} + name: Test Results (smoke tests) ${{ matrix.test_strategy }} ${{ matrix.batch }} path: | **/build/reports/tests/test/** **/build/test-results/test/** diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index e6044badb1b41c..f9d459652086fb 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -5,6 +5,7 @@ on: branches: - "**" paths: + - ".github/workflows/documentation.yml" - "metadata-ingestion/**" - "metadata-models/**" - "docs/**" @@ -13,6 +14,7 @@ on: branches: - master paths: + - ".github/workflows/documentation.yml" - "metadata-ingestion/**" - "metadata-models/**" - "docs/**" @@ -40,7 +42,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - uses: actions/setup-python@v5 with: python-version: "3.10" @@ -57,8 +59,12 @@ jobs: - name: Deploy if: github.event_name == 'push' && github.repository == 'datahub-project/datahub' - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./docs-website/build cname: datahubproject.io + # The gh-pages branch stores the built docs site. We don't need to preserve + # the full history of the .html files, since they're generated from our + # source files. Doing so significantly reduces the size of the repo's .git dir. + force_orphan: true diff --git a/.github/workflows/gx-plugin.yml b/.github/workflows/gx-plugin.yml index aa7c3f069c7654..68d7934d1d6e6f 100644 --- a/.github/workflows/gx-plugin.yml +++ b/.github/workflows/gx-plugin.yml @@ -18,6 +18,7 @@ on: - "metadata-models/**" release: types: [published] + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} @@ -27,7 +28,6 @@ jobs: gx-plugin: runs-on: ubuntu-latest env: - SPARK_VERSION: 3.0.3 DATAHUB_TELEMETRY_ENABLED: false strategy: matrix: @@ -48,7 +48,8 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: actions/checkout@v4 + - uses: gradle/actions/setup-gradle@v4 + - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -60,7 +61,7 @@ jobs: - name: pip freeze show list installed if: always() run: source metadata-ingestion-modules/gx-plugin/venv/bin/activate && uv pip freeze - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: ${{ always() && matrix.python-version == '3.11' && matrix.extraPythonRequirement == 'great-expectations~=0.17.0' }} with: name: Test Results (GX Plugin ${{ matrix.python-version}}) @@ -68,22 +69,27 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml - - name: Upload coverage to Codecov + - name: Upload coverage to Codecov with ingestion flag if: always() - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} - directory: . + directory: ./build/coverage-reports/metadata-ingestion-modules/gx-plugin/ fail_ci_if_error: false - flags: gx-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} + flags: ingestion-gx-plugin name: pytest-gx verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} event-file: runs-on: ubuntu-latest steps: - name: Upload - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Event File path: ${{ github.event_path }} diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index c0eafe891fb0aa..ff4531ab0b25c1 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -18,6 +18,7 @@ on: - "metadata-models/**" release: types: [published] + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} @@ -28,13 +29,12 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 40 env: - SPARK_VERSION: 3.3.2 DATAHUB_TELEMETRY_ENABLED: false # TODO: Enable this once the test is fixed. # DATAHUB_LOOKML_GIT_TEST_SSH_KEY: ${{ secrets.DATAHUB_LOOKML_GIT_TEST_SSH_KEY }} strategy: matrix: - python-version: ["3.8", "3.10"] + python-version: ["3.8", "3.11"] command: [ "testQuick", @@ -42,9 +42,6 @@ jobs: "testIntegrationBatch1", "testIntegrationBatch2", ] - include: - - python-version: "3.8" - - python-version: "3.10" fail-fast: false steps: - name: Free up disk space @@ -57,7 +54,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v5 with: @@ -84,30 +81,35 @@ jobs: df -hl docker image ls docker system df - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: - name: Test Results (metadata ingestion ${{ matrix.python-version }}) + name: Test Results (metadata ingestion ${{ matrix.python-version }} ${{ matrix.command }}) path: | **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml !**/binary/** - - name: Upload coverage to Codecov - if: ${{ always() && matrix.python-version == '3.10' }} - uses: codecov/codecov-action@v3 + - name: Upload coverage to Codecov with ingestion flag + if: ${{ always() && matrix.python-version == '3.11' }} + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} - directory: . + directory: ./build/coverage-reports/metadata-ingestion/ fail_ci_if_error: false - flags: pytest-${{ matrix.command }} - name: pytest-${{ matrix.command }} + flags: ingestion + name: pytest-${{ matrix.python-version }}-${{ matrix.command }} verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} event-file: runs-on: ubuntu-latest steps: - name: Upload - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Event File path: ${{ github.event_path }} diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 5ee2223d71b039..6514b9e7226b60 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -20,6 +20,7 @@ on: - ".github/workflows/metadata-io.yml" release: types: [published] + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} @@ -66,11 +67,11 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - name: Gradle build (and test) run: | ./gradlew :metadata-io:test - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: Test Results (metadata-io) @@ -81,12 +82,27 @@ jobs: !**/binary/** - name: Ensure codegen is updated uses: ./.github/actions/ensure-codegen-updated + - name: Upload coverage to Codecov + if: ${{ always()}} + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + directory: ./build/coverage-reports/metadata-io/ + fail_ci_if_error: false + flags: metadata-io + name: metadata-io-test + verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} event-file: runs-on: ubuntu-latest steps: - name: Upload - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Event File path: ${{ github.event_path }} diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index 632e6ac35d673e..f9262d89f9ef00 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -20,11 +20,9 @@ jobs: steps: - name: Check whether upload to datahub is enabled id: publish - env: - ENABLE_PUBLISH: ${{ secrets.DataHubToken }} run: | - echo "Enable publish: ${{ env.ENABLE_PUBLISH != '' }}" - echo "publish=${{ env.ENABLE_PUBLISH != '' }}" >> $GITHUB_OUTPUT + echo "Enable publish: ${{ github.repository == 'datahub-project/datahub' }}" + echo "publish=${{ github.repository == 'datahub-project/datahub' }}" >> $GITHUB_OUTPUT metadata-ingestion-docgen: runs-on: ubuntu-latest needs: setup @@ -34,7 +32,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v5 with: diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml index 7da20ece44f6d6..55f7fea875388a 100644 --- a/.github/workflows/pr-labeler.yml +++ b/.github/workflows/pr-labeler.yml @@ -3,6 +3,10 @@ on: pull_request_target: types: [opened, reopened] +permissions: + contents: read + pull-requests: write + jobs: triage: permissions: @@ -10,11 +14,11 @@ jobs: pull-requests: write runs-on: ubuntu-latest steps: - - uses: actions/labeler@v4 + - uses: actions/labeler@v5 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" configuration-path: ".github/pr-labeler-config.yml" - - uses: actions-ecosystem/action-add-labels@v1.1.0 + - uses: actions-ecosystem/action-add-labels@v1.1.3 # only add names of Acryl Data team members here if: ${{ @@ -25,11 +29,9 @@ jobs: "chriscollins3456", "david-leifker", "shirshanka", - "sid-acryl", "swaroopjagadish", "treff7es", "yoonhyejin", - "eboneil", "gabe-lyons", "hsheth2", "jjoyce0510", @@ -37,16 +39,19 @@ jobs: "pedro93", "RyanHolstien", "sakethvarma397", - "Kunal-kankriya", "purnimagarg1", - "dushayntAW", "sagar-salvi-apptware", "kushagra-apptware", "Salman-Apptware", "mayurinehate", "noggi", "skrydal", - "kevinkarchacryl" + "kevinkarchacryl", + "sgomezvillamor", + "acrylJonny", + "chakru-r", + "brock-acryl", + "mminichino" ]'), github.actor ) @@ -55,7 +60,7 @@ jobs: github_token: ${{ github.token }} labels: | community-contribution - - uses: actions-ecosystem/action-add-labels@v1.1.0 + - uses: actions-ecosystem/action-add-labels@v1.1.3 # only add names of champions here. Confirm with DevRel Team if: ${{ diff --git a/.github/workflows/prefect-plugin.yml b/.github/workflows/prefect-plugin.yml index b0af00f92b7727..f1b06f31a05224 100644 --- a/.github/workflows/prefect-plugin.yml +++ b/.github/workflows/prefect-plugin.yml @@ -18,6 +18,7 @@ on: - "metadata-models/**" release: types: [published] + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} @@ -27,25 +28,20 @@ jobs: prefect-plugin: runs-on: ubuntu-latest env: - SPARK_VERSION: 3.0.3 DATAHUB_TELEMETRY_ENABLED: false strategy: matrix: - python-version: ["3.8", "3.9", "3.10"] - include: - - python-version: "3.8" - - python-version: "3.9" - - python-version: "3.10" + python-version: ["3.8", "3.9", "3.10", "3.11"] fail-fast: false steps: - name: Set up JDK 17 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: gradle/actions/setup-gradle@v4 + - uses: acryldata/sane-checkout-action@v3 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: "pip" @@ -56,8 +52,8 @@ jobs: - name: pip freeze show list installed if: always() run: source metadata-ingestion-modules/prefect-plugin/venv/bin/activate && uv pip freeze - - uses: actions/upload-artifact@v3 - if: ${{ always() && matrix.python-version == '3.10'}} + - uses: actions/upload-artifact@v4 + if: ${{ always() && matrix.python-version == '3.11'}} with: name: Test Results (Prefect Plugin ${{ matrix.python-version}}) path: | @@ -65,22 +61,27 @@ jobs: **/build/test-results/test/** **/junit.*.xml !**/binary/** - - name: Upload coverage to Codecov + - name: Upload coverage to Codecov with ingestion flag if: always() - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} - directory: . + directory: ./build/coverage-reports/metadata-ingestion-modules/prefect-plugin/ fail_ci_if_error: false - flags: prefect,prefect-${{ matrix.extra_pip_extras }} + flags: ingestion-prefect-plugin name: pytest-prefect-${{ matrix.python-version }} verbose: true + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} event-file: runs-on: ubuntu-latest steps: - name: Upload - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Event File path: ${{ github.event_path }} diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index eb57c29e151ae6..fa3554c05d374c 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -57,7 +57,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - uses: actions/setup-python@v5 with: python-version: "3.10" @@ -196,3 +196,52 @@ jobs: echo signingKey=$SIGNING_KEY >> gradle.properties ./gradlew -PreleaseVersion=${{ needs.setup.outputs.tag }} :metadata-integration:java:custom-plugin-lib:publish ./gradlew :metadata-integration:java:custom-plugin-lib:closeAndReleaseRepository --info + publish-java8: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + needs: ["check-secret", "setup", "publish"] + if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }} + steps: + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.10" + cache: "pip" + - name: checkout upstream repo + run: | + git remote add upstream https://github.com/datahub-project/datahub.git + git fetch upstream --tags --force --filter=tree:0 + - name: publish datahub-client jar snapshot + if: ${{ github.event_name != 'release' }} + env: + RELEASE_USERNAME: ${{ secrets.RELEASE_USERNAME }} + RELEASE_PASSWORD: ${{ secrets.RELEASE_PASSWORD }} + SIGNING_PASSWORD: ${{ secrets.SIGNING_PASSWORD }} + SIGNING_KEY: ${{ secrets.SIGNING_KEY }} + NEXUS_USERNAME: ${{ secrets.NEXUS_USERNAME }} + NEXUS_PASSWORD: ${{ secrets.NEXUS_PASSWORD }} + run: | + echo signingKey=$SIGNING_KEY >> gradle.properties + ./gradlew :metadata-integration:java:datahub-client:printVersion -PjavaClassVersionDefault=8 -ParchiveAppendix=java8 + ./gradlew :metadata-integration:java:datahub-client:publish -PjavaClassVersionDefault=8 -ParchiveAppendix=java8 + - name: release datahub-client jar + if: ${{ github.event_name == 'release' }} + env: + RELEASE_USERNAME: ${{ secrets.RELEASE_USERNAME }} + RELEASE_PASSWORD: ${{ secrets.RELEASE_PASSWORD }} + SIGNING_PASSWORD: ${{ secrets.SIGNING_PASSWORD }} + SIGNING_KEY: ${{ secrets.SIGNING_KEY }} + NEXUS_USERNAME: ${{ secrets.NEXUS_USERNAME }} + NEXUS_PASSWORD: ${{ secrets.NEXUS_PASSWORD }} + run: | + echo signingKey=$SIGNING_KEY >> gradle.properties + ./gradlew -PreleaseVersion=${{ needs.setup.outputs.tag }} :metadata-integration:java:datahub-client:publish -PjavaClassVersionDefault=8 -ParchiveAppendix=java8 + ./gradlew :metadata-integration:java:datahub-client:closeAndReleaseRepository --info -PjavaClassVersionDefault=8 -ParchiveAppendix=java8 \ No newline at end of file diff --git a/.github/workflows/python-build-pages.yml b/.github/workflows/python-build-pages.yml new file mode 100644 index 00000000000000..9e1a2563478039 --- /dev/null +++ b/.github/workflows/python-build-pages.yml @@ -0,0 +1,64 @@ +name: Python Build +on: + push: + branches: + - master + paths: + - ".github/workflows/python-build-pages.yml" + - "metadata-ingestion/**" + - "metadata-ingestion-modules/**" + - "metadata-models/**" + pull_request: + branches: + - "**" + paths: + - ".github/workflows/python-build-pages.yml" + - "metadata-ingestion/**" + - "metadata-ingestion-modules/**" + - "metadata-models/**" + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + deploy-pages: + runs-on: ubuntu-latest + if: ${{ vars.CLOUDFLARE_WHEELS_PROJECT_NAME != '' }} + + name: Python Wheels + permissions: + contents: read + pull-requests: read + deployments: write + steps: + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v4 + - uses: acryldata/sane-checkout-action@v3 + - uses: actions/setup-python@v5 + with: + python-version: "3.10" + cache: "pip" + - uses: actions/cache@v4 + with: + path: | + ~/.cache/uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }} + - name: Build Python wheel site + run: | + ./gradlew :python-build:buildSite + env: + GITHUB_TOKEN: ${{ github.token }} + - name: Publish + uses: cloudflare/pages-action@v1 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + projectName: ${{ vars.CLOUDFLARE_WHEELS_PROJECT_NAME }} + workingDirectory: python-build + directory: site + gitHubToken: ${{ github.token }} diff --git a/.github/workflows/qodana-scan.yml b/.github/workflows/qodana-scan.yml deleted file mode 100644 index 750cf24ad38e57..00000000000000 --- a/.github/workflows/qodana-scan.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Qodana -on: - workflow_dispatch: - pull_request: - push: - branches: - - master - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - qodana: - runs-on: ubuntu-latest - steps: - - uses: acryldata/sane-checkout-action@v3 - - name: "Qodana Scan" - uses: JetBrains/qodana-action@v2022.3.4 - - uses: github/codeql-action/upload-sarif@v2 - with: - sarif_file: ${{ runner.temp }}/qodana/results/qodana.sarif.json - cache-default-branch-only: true diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 23413336404f2b..7a2080b7a9ddf3 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -35,7 +35,7 @@ jobs: with: distribution: "zulu" java-version: 17 - - uses: gradle/actions/setup-gradle@v3 + - uses: gradle/actions/setup-gradle@v4 - uses: actions/setup-python@v5 with: python-version: "3.10" @@ -72,14 +72,14 @@ jobs: docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true - name: Upload logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: docker logs path: | "**/build/container-logs/*.log" "*.log" - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: Test Results (smoke tests) diff --git a/.gitignore b/.gitignore index 43c627f9ed244f..19909b25fefe7f 100644 --- a/.gitignore +++ b/.gitignore @@ -85,6 +85,7 @@ metadata-service/plugin/src/test/resources/sample-plugins/** smoke-test/rollback-reports coverage*.xml .vercel +.envrc # A long series of binary directories we should ignore datahub-frontend/bin/main/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 898e3d262b3941..103e1680b8d105 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,26 +1,446 @@ -exclude: ^$ -files: ^(docs/|docs-website/|metadata-ingestion/) +# Auto-generated by .github/scripts/generate_pre_commit.py at 2025-01-17 16:43:31 UTC +# Do not edit this file directly. Run the script to regenerate. +# Add additional hooks in .github/scripts/pre-commit-override.yaml repos: - - repo: https://github.com/pre-commit/mirrors-isort - rev: v5.10.1 + - repo: local hooks: - - id: isort - - repo: https://github.com/ambv/black - rev: 23.1.0 - hooks: - - id: black - - repo: https://github.com/myint/autoflake - rev: v1.4 - hooks: - - id: autoflake - args: - - --in-place - - --remove-unused-variables - - --remove-all-unused-imports - - --expand-star-imports - - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v3.0.0-alpha.6" # Use the sha or tag you want to point at - hooks: - - id: prettier - args: - - --write \ No newline at end of file + - id: datahub-graphql-core-spotless + name: datahub-graphql-core Spotless Apply + entry: ./gradlew :datahub-graphql-core:spotlessApply + language: system + files: ^datahub-graphql-core/.*\.java$ + pass_filenames: false + + - id: datahub-upgrade-spotless + name: datahub-upgrade Spotless Apply + entry: ./gradlew :datahub-upgrade:spotlessApply + language: system + files: ^datahub-upgrade/.*\.java$ + pass_filenames: false + + - id: entity-registry-spotless + name: entity-registry Spotless Apply + entry: ./gradlew :entity-registry:spotlessApply + language: system + files: ^entity-registry/.*\.java$ + pass_filenames: false + + - id: ingestion-scheduler-spotless + name: ingestion-scheduler Spotless Apply + entry: ./gradlew :ingestion-scheduler:spotlessApply + language: system + files: ^ingestion-scheduler/.*\.java$ + pass_filenames: false + + - id: li-utils-spotless + name: li-utils Spotless Apply + entry: ./gradlew :li-utils:spotlessApply + language: system + files: ^li-utils/.*\.java$ + pass_filenames: false + + - id: metadata-auth-auth-api-spotless + name: metadata-auth/auth-api Spotless Apply + entry: ./gradlew :metadata-auth:auth-api:spotlessApply + language: system + files: ^metadata-auth/auth-api/.*\.java$ + pass_filenames: false + + - id: metadata-dao-impl-kafka-producer-spotless + name: metadata-dao-impl/kafka-producer Spotless Apply + entry: ./gradlew :metadata-dao-impl:kafka-producer:spotlessApply + language: system + files: ^metadata-dao-impl/kafka-producer/.*\.java$ + pass_filenames: false + + - id: metadata-events-mxe-avro-spotless + name: metadata-events/mxe-avro Spotless Apply + entry: ./gradlew :metadata-events:mxe-avro:spotlessApply + language: system + files: ^metadata-events/mxe-avro/.*\.java$ + pass_filenames: false + + - id: metadata-events-mxe-registration-spotless + name: metadata-events/mxe-registration Spotless Apply + entry: ./gradlew :metadata-events:mxe-registration:spotlessApply + language: system + files: ^metadata-events/mxe-registration/.*\.java$ + pass_filenames: false + + - id: metadata-events-mxe-schemas-spotless + name: metadata-events/mxe-schemas Spotless Apply + entry: ./gradlew :metadata-events:mxe-schemas:spotlessApply + language: system + files: ^metadata-events/mxe-schemas/.*\.java$ + pass_filenames: false + + - id: metadata-events-mxe-utils-avro-spotless + name: metadata-events/mxe-utils-avro Spotless Apply + entry: ./gradlew :metadata-events:mxe-utils-avro:spotlessApply + language: system + files: ^metadata-events/mxe-utils-avro/.*\.java$ + pass_filenames: false + + - id: metadata-ingestion-lint-fix + name: metadata-ingestion Lint Fix + entry: ./gradlew :metadata-ingestion:lintFix + language: system + files: ^metadata-ingestion/.*\.py$ + pass_filenames: false + + - id: metadata-ingestion-modules-airflow-plugin-lint-fix + name: metadata-ingestion-modules/airflow-plugin Lint Fix + entry: ./gradlew :metadata-ingestion-modules:airflow-plugin:lintFix + language: system + files: ^metadata-ingestion-modules/airflow-plugin/.*\.py$ + pass_filenames: false + + - id: metadata-ingestion-modules-dagster-plugin-lint-fix + name: metadata-ingestion-modules/dagster-plugin Lint Fix + entry: ./gradlew :metadata-ingestion-modules:dagster-plugin:lintFix + language: system + files: ^metadata-ingestion-modules/dagster-plugin/.*\.py$ + pass_filenames: false + + - id: metadata-ingestion-modules-gx-plugin-lint-fix + name: metadata-ingestion-modules/gx-plugin Lint Fix + entry: ./gradlew :metadata-ingestion-modules:gx-plugin:lintFix + language: system + files: ^metadata-ingestion-modules/gx-plugin/.*\.py$ + pass_filenames: false + + - id: metadata-ingestion-modules-prefect-plugin-lint-fix + name: metadata-ingestion-modules/prefect-plugin Lint Fix + entry: ./gradlew :metadata-ingestion-modules:prefect-plugin:lintFix + language: system + files: ^metadata-ingestion-modules/prefect-plugin/.*\.py$ + pass_filenames: false + + - id: metadata-integration-java-acryl-spark-lineage-spotless + name: metadata-integration/java/acryl-spark-lineage Spotless Apply + entry: ./gradlew :metadata-integration:java:acryl-spark-lineage:spotlessApply + language: system + files: ^metadata-integration/java/acryl-spark-lineage/.*\.java$ + pass_filenames: false + + - id: metadata-integration-java-datahub-client-spotless + name: metadata-integration/java/datahub-client Spotless Apply + entry: ./gradlew :metadata-integration:java:datahub-client:spotlessApply + language: system + files: ^metadata-integration/java/datahub-client/.*\.java$ + pass_filenames: false + + - id: metadata-integration-java-datahub-event-spotless + name: metadata-integration/java/datahub-event Spotless Apply + entry: ./gradlew :metadata-integration:java:datahub-event:spotlessApply + language: system + files: ^metadata-integration/java/datahub-event/.*\.java$ + pass_filenames: false + + - id: metadata-integration-java-datahub-protobuf-spotless + name: metadata-integration/java/datahub-protobuf Spotless Apply + entry: ./gradlew :metadata-integration:java:datahub-protobuf:spotlessApply + language: system + files: ^metadata-integration/java/datahub-protobuf/.*\.java$ + pass_filenames: false + + - id: metadata-integration-java-datahub-schematron-cli-spotless + name: metadata-integration/java/datahub-schematron/cli Spotless Apply + entry: ./gradlew :metadata-integration:java:datahub-schematron:cli:spotlessApply + language: system + files: ^metadata-integration/java/datahub-schematron/cli/.*\.java$ + pass_filenames: false + + - id: metadata-integration-java-datahub-schematron-lib-spotless + name: metadata-integration/java/datahub-schematron/lib Spotless Apply + entry: ./gradlew :metadata-integration:java:datahub-schematron:lib:spotlessApply + language: system + files: ^metadata-integration/java/datahub-schematron/lib/.*\.java$ + pass_filenames: false + + - id: metadata-integration-java-examples-spotless + name: metadata-integration/java/examples Spotless Apply + entry: ./gradlew :metadata-integration:java:examples:spotlessApply + language: system + files: ^metadata-integration/java/examples/.*\.java$ + pass_filenames: false + + - id: metadata-integration-java-openlineage-converter-spotless + name: metadata-integration/java/openlineage-converter Spotless Apply + entry: ./gradlew :metadata-integration:java:openlineage-converter:spotlessApply + language: system + files: ^metadata-integration/java/openlineage-converter/.*\.java$ + pass_filenames: false + + - id: metadata-integration-java-spark-lineage-legacy-spotless + name: metadata-integration/java/spark-lineage-legacy Spotless Apply + entry: ./gradlew :metadata-integration:java:spark-lineage-legacy:spotlessApply + language: system + files: ^metadata-integration/java/spark-lineage-legacy/.*\.java$ + pass_filenames: false + + - id: metadata-io-spotless + name: metadata-io Spotless Apply + entry: ./gradlew :metadata-io:spotlessApply + language: system + files: ^metadata-io/.*\.java$ + pass_filenames: false + + - id: metadata-io-metadata-io-api-spotless + name: metadata-io/metadata-io-api Spotless Apply + entry: ./gradlew :metadata-io:metadata-io-api:spotlessApply + language: system + files: ^metadata-io/metadata-io-api/.*\.java$ + pass_filenames: false + + - id: metadata-jobs-common-spotless + name: metadata-jobs/common Spotless Apply + entry: ./gradlew :metadata-jobs:common:spotlessApply + language: system + files: ^metadata-jobs/common/.*\.java$ + pass_filenames: false + + - id: metadata-jobs-mae-consumer-spotless + name: metadata-jobs/mae-consumer Spotless Apply + entry: ./gradlew :metadata-jobs:mae-consumer:spotlessApply + language: system + files: ^metadata-jobs/mae-consumer/.*\.java$ + pass_filenames: false + + - id: metadata-jobs-mae-consumer-job-spotless + name: metadata-jobs/mae-consumer-job Spotless Apply + entry: ./gradlew :metadata-jobs:mae-consumer-job:spotlessApply + language: system + files: ^metadata-jobs/mae-consumer-job/.*\.java$ + pass_filenames: false + + - id: metadata-jobs-mce-consumer-spotless + name: metadata-jobs/mce-consumer Spotless Apply + entry: ./gradlew :metadata-jobs:mce-consumer:spotlessApply + language: system + files: ^metadata-jobs/mce-consumer/.*\.java$ + pass_filenames: false + + - id: metadata-jobs-mce-consumer-job-spotless + name: metadata-jobs/mce-consumer-job Spotless Apply + entry: ./gradlew :metadata-jobs:mce-consumer-job:spotlessApply + language: system + files: ^metadata-jobs/mce-consumer-job/.*\.java$ + pass_filenames: false + + - id: metadata-jobs-pe-consumer-spotless + name: metadata-jobs/pe-consumer Spotless Apply + entry: ./gradlew :metadata-jobs:pe-consumer:spotlessApply + language: system + files: ^metadata-jobs/pe-consumer/.*\.java$ + pass_filenames: false + + - id: metadata-models-spotless + name: metadata-models Spotless Apply + entry: ./gradlew :metadata-models:spotlessApply + language: system + files: ^metadata-models/.*\.java$ + pass_filenames: false + + - id: metadata-models-custom-spotless + name: metadata-models-custom Spotless Apply + entry: ./gradlew :metadata-models-custom:spotlessApply + language: system + files: ^metadata-models-custom/.*\.java$ + pass_filenames: false + + - id: metadata-models-validator-spotless + name: metadata-models-validator Spotless Apply + entry: ./gradlew :metadata-models-validator:spotlessApply + language: system + files: ^metadata-models-validator/.*\.java$ + pass_filenames: false + + - id: metadata-operation-context-spotless + name: metadata-operation-context Spotless Apply + entry: ./gradlew :metadata-operation-context:spotlessApply + language: system + files: ^metadata-operation-context/.*\.java$ + pass_filenames: false + + - id: metadata-service-auth-config-spotless + name: metadata-service/auth-config Spotless Apply + entry: ./gradlew :metadata-service:auth-config:spotlessApply + language: system + files: ^metadata-service/auth-config/.*\.java$ + pass_filenames: false + + - id: metadata-service-auth-filter-spotless + name: metadata-service/auth-filter Spotless Apply + entry: ./gradlew :metadata-service:auth-filter:spotlessApply + language: system + files: ^metadata-service/auth-filter/.*\.java$ + pass_filenames: false + + - id: metadata-service-auth-impl-spotless + name: metadata-service/auth-impl Spotless Apply + entry: ./gradlew :metadata-service:auth-impl:spotlessApply + language: system + files: ^metadata-service/auth-impl/.*\.java$ + pass_filenames: false + + - id: metadata-service-auth-servlet-impl-spotless + name: metadata-service/auth-servlet-impl Spotless Apply + entry: ./gradlew :metadata-service:auth-servlet-impl:spotlessApply + language: system + files: ^metadata-service/auth-servlet-impl/.*\.java$ + pass_filenames: false + + - id: metadata-service-configuration-spotless + name: metadata-service/configuration Spotless Apply + entry: ./gradlew :metadata-service:configuration:spotlessApply + language: system + files: ^metadata-service/configuration/.*\.java$ + pass_filenames: false + + - id: metadata-service-factories-spotless + name: metadata-service/factories Spotless Apply + entry: ./gradlew :metadata-service:factories:spotlessApply + language: system + files: ^metadata-service/factories/.*\.java$ + pass_filenames: false + + - id: metadata-service-graphql-servlet-impl-spotless + name: metadata-service/graphql-servlet-impl Spotless Apply + entry: ./gradlew :metadata-service:graphql-servlet-impl:spotlessApply + language: system + files: ^metadata-service/graphql-servlet-impl/.*\.java$ + pass_filenames: false + + - id: metadata-service-openapi-analytics-servlet-spotless + name: metadata-service/openapi-analytics-servlet Spotless Apply + entry: ./gradlew :metadata-service:openapi-analytics-servlet:spotlessApply + language: system + files: ^metadata-service/openapi-analytics-servlet/.*\.java$ + pass_filenames: false + + - id: metadata-service-openapi-entity-servlet-spotless + name: metadata-service/openapi-entity-servlet Spotless Apply + entry: ./gradlew :metadata-service:openapi-entity-servlet:spotlessApply + language: system + files: ^metadata-service/openapi-entity-servlet/.*\.java$ + pass_filenames: false + + - id: metadata-service-openapi-entity-servlet-generators-spotless + name: metadata-service/openapi-entity-servlet/generators Spotless Apply + entry: ./gradlew :metadata-service:openapi-entity-servlet:generators:spotlessApply + language: system + files: ^metadata-service/openapi-entity-servlet/generators/.*\.java$ + pass_filenames: false + + - id: metadata-service-openapi-servlet-spotless + name: metadata-service/openapi-servlet Spotless Apply + entry: ./gradlew :metadata-service:openapi-servlet:spotlessApply + language: system + files: ^metadata-service/openapi-servlet/.*\.java$ + pass_filenames: false + + - id: metadata-service-openapi-servlet-models-spotless + name: metadata-service/openapi-servlet/models Spotless Apply + entry: ./gradlew :metadata-service:openapi-servlet:models:spotlessApply + language: system + files: ^metadata-service/openapi-servlet/models/.*\.java$ + pass_filenames: false + + - id: metadata-service-plugin-spotless + name: metadata-service/plugin Spotless Apply + entry: ./gradlew :metadata-service:plugin:spotlessApply + language: system + files: ^metadata-service/plugin/.*\.java$ + pass_filenames: false + + - id: metadata-service-plugin-src-test-sample-test-plugins-spotless + name: metadata-service/plugin/src/test/sample-test-plugins Spotless Apply + entry: ./gradlew :metadata-service:plugin:src:test:sample-test-plugins:spotlessApply + language: system + files: ^metadata-service/plugin/src/test/sample-test-plugins/.*\.java$ + pass_filenames: false + + - id: metadata-service-restli-client-spotless + name: metadata-service/restli-client Spotless Apply + entry: ./gradlew :metadata-service:restli-client:spotlessApply + language: system + files: ^metadata-service/restli-client/.*\.java$ + pass_filenames: false + + - id: metadata-service-restli-client-api-spotless + name: metadata-service/restli-client-api Spotless Apply + entry: ./gradlew :metadata-service:restli-client-api:spotlessApply + language: system + files: ^metadata-service/restli-client-api/.*\.java$ + pass_filenames: false + + - id: metadata-service-restli-servlet-impl-spotless + name: metadata-service/restli-servlet-impl Spotless Apply + entry: ./gradlew :metadata-service:restli-servlet-impl:spotlessApply + language: system + files: ^metadata-service/restli-servlet-impl/.*\.java$ + pass_filenames: false + + - id: metadata-service-schema-registry-api-spotless + name: metadata-service/schema-registry-api Spotless Apply + entry: ./gradlew :metadata-service:schema-registry-api:spotlessApply + language: system + files: ^metadata-service/schema-registry-api/.*\.java$ + pass_filenames: false + + - id: metadata-service-schema-registry-servlet-spotless + name: metadata-service/schema-registry-servlet Spotless Apply + entry: ./gradlew :metadata-service:schema-registry-servlet:spotlessApply + language: system + files: ^metadata-service/schema-registry-servlet/.*\.java$ + pass_filenames: false + + - id: metadata-service-services-spotless + name: metadata-service/services Spotless Apply + entry: ./gradlew :metadata-service:services:spotlessApply + language: system + files: ^metadata-service/services/.*\.java$ + pass_filenames: false + + - id: metadata-service-servlet-spotless + name: metadata-service/servlet Spotless Apply + entry: ./gradlew :metadata-service:servlet:spotlessApply + language: system + files: ^metadata-service/servlet/.*\.java$ + pass_filenames: false + + - id: metadata-utils-spotless + name: metadata-utils Spotless Apply + entry: ./gradlew :metadata-utils:spotlessApply + language: system + files: ^metadata-utils/.*\.java$ + pass_filenames: false + + - id: mock-entity-registry-spotless + name: mock-entity-registry Spotless Apply + entry: ./gradlew :mock-entity-registry:spotlessApply + language: system + files: ^mock-entity-registry/.*\.java$ + pass_filenames: false + + - id: smoke-test-lint-fix + name: smoke-test Lint Fix + entry: ./gradlew :smoke-test:lintFix + language: system + files: ^smoke-test/.*\.py$ + pass_filenames: false + + - id: test-models-spotless + name: test-models Spotless Apply + entry: ./gradlew :test-models:spotlessApply + language: system + files: ^test-models/.*\.java$ + pass_filenames: false + + - id: smoke-test-cypress-lint-fix + name: smoke-test cypress Lint Fix + entry: ./gradlew :smoke-test:cypressLintFix + language: system + files: ^smoke-test/tests/cypress/.*\.tsx$ + pass_filenames: false diff --git a/build.gradle b/build.gradle index be4d7ee8a562b9..2984812bda13b8 100644 --- a/build.gradle +++ b/build.gradle @@ -34,17 +34,18 @@ buildscript { // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md ext.pegasusVersion = '29.57.0' ext.mavenVersion = '3.6.3' - ext.springVersion = '6.1.13' + ext.versionGradle = '8.11.1' + ext.springVersion = '6.1.14' ext.springBootVersion = '3.2.9' ext.springKafkaVersion = '3.1.6' ext.openTelemetryVersion = '1.18.0' - ext.neo4jVersion = '5.14.0' - ext.neo4jTestVersion = '5.14.0' - ext.neo4jApocVersion = '5.14.0' + ext.neo4jVersion = '5.20.0' + ext.neo4jTestVersion = '5.20.0' + ext.neo4jApocVersion = '5.20.0' ext.testContainersVersion = '1.17.4' ext.elasticsearchVersion = '2.11.1' // ES 7.10, Opensearch 1.x, 2.x ext.jacksonVersion = '2.15.3' - ext.jettyVersion = '11.0.21' + ext.jettyVersion = '12.0.16' // see also datahub-frontend/play.gradle ext.playVersion = '2.8.22' ext.playScalaVersion = '2.13' @@ -57,7 +58,7 @@ buildscript { ext.hazelcastVersion = '5.3.6' ext.ebeanVersion = '15.5.2' ext.googleJavaFormatVersion = '1.18.1' - ext.openLineageVersion = '1.24.2' + ext.openLineageVersion = '1.25.0' ext.logbackClassicJava8 = '1.2.12' ext.docker_registry = 'acryldata' @@ -78,7 +79,7 @@ buildscript { plugins { id 'com.gorylenko.gradle-git-properties' version '2.4.1' - id 'com.github.johnrengelman.shadow' version '8.1.1' apply false + id 'com.gradleup.shadow' version '8.3.5' apply false id 'com.palantir.docker' version '0.35.0' apply false id 'com.avast.gradle.docker-compose' version '0.17.6' id "com.diffplug.spotless" version "6.23.3" @@ -135,7 +136,8 @@ project.ext.externalDependency = [ 'datastaxOssNativeProtocol': 'com.datastax.oss:native-protocol:1.5.1', 'datastaxOssCore': 'com.datastax.oss:java-driver-core:4.14.1', 'datastaxOssQueryBuilder': 'com.datastax.oss:java-driver-query-builder:4.14.1', - 'dgraph4j' : 'io.dgraph:dgraph4j:21.12.0', + 'dgraph4j' : 'io.dgraph:dgraph4j:24.1.1', + 'dgraphNetty': 'io.grpc:grpc-netty-shaded:1.69.0', 'dropwizardMetricsCore': 'io.dropwizard.metrics:metrics-core:4.2.3', 'dropwizardMetricsJmx': 'io.dropwizard.metrics:metrics-jmx:4.2.3', 'ebean': 'io.ebean:ebean:' + ebeanVersion, @@ -175,8 +177,9 @@ project.ext.externalDependency = [ 'jakartaValidation': 'jakarta.validation:jakarta.validation-api:3.1.0-M2', 'jerseyCore': 'org.glassfish.jersey.core:jersey-client:2.41', 'jerseyGuava': 'org.glassfish.jersey.bundles.repackaged:jersey-guava:2.25.1', - 'jettyJaas': "org.eclipse.jetty:jetty-jaas:$jettyVersion", + 'jettySecurity': "org.eclipse.jetty:jetty-security:$jettyVersion", 'jettyClient': "org.eclipse.jetty:jetty-client:$jettyVersion", + 'jettyJmx': "org.eclipse.jetty:jetty-jmx:$jettyVersion", 'jettison': 'org.codehaus.jettison:jettison:1.5.4', 'jgrapht': 'org.jgrapht:jgrapht-core:1.5.1', 'jna': 'net.java.dev.jna:jna:5.12.1', @@ -192,9 +195,9 @@ project.ext.externalDependency = [ 'junitJupiterEngine': "org.junit.jupiter:junit-jupiter-engine:$junitJupiterVersion", // avro-serde includes dependencies for `kafka-avro-serializer` `kafka-schema-registry-client` and `avro` 'kafkaAvroSerde': "io.confluent:kafka-streams-avro-serde:$kafkaVersion", - 'kafkaAvroSerializer': 'io.confluent:kafka-avro-serializer:5.1.4', + 'kafkaAvroSerializer': "io.confluent:kafka-avro-serializer:$kafkaVersion", 'kafkaClients': "org.apache.kafka:kafka-clients:$kafkaVersion-ccs", - 'snappy': 'org.xerial.snappy:snappy-java:1.1.10.4', + 'snappy': 'org.xerial.snappy:snappy-java:1.1.10.5', 'logbackClassic': "ch.qos.logback:logback-classic:$logbackClassic", 'logbackClassicJava8' : "ch.qos.logback:logback-classic:$logbackClassicJava8", 'slf4jApi': "org.slf4j:slf4j-api:$slf4jVersion", @@ -210,7 +213,7 @@ project.ext.externalDependency = [ 'mockitoInline': 'org.mockito:mockito-inline:4.11.0', 'mockServer': 'org.mock-server:mockserver-netty:5.11.2', 'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2', - 'mysqlConnector': 'mysql:mysql-connector-java:8.0.28', + 'mysqlConnector': 'com.mysql:mysql-connector-j:8.4.0', 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jTestVersion, 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion, 'neo4jTestJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jTestVersion, @@ -234,7 +237,7 @@ project.ext.externalDependency = [ 'playFilters': "com.typesafe.play:filters-helpers_$playScalaVersion:$playVersion", 'pac4j': 'org.pac4j:pac4j-oidc:6.0.6', 'playPac4j': "org.pac4j:play-pac4j_$playScalaVersion:12.0.0-PLAY2.8", - 'postgresql': 'org.postgresql:postgresql:42.3.9', + 'postgresql': 'org.postgresql:postgresql:42.7.4', 'protobuf': 'com.google.protobuf:protobuf-java:3.25.5', 'grpcProtobuf': 'io.grpc:grpc-protobuf:1.53.0', 'rangerCommons': 'org.apache.ranger:ranger-plugins-common:2.3.0', @@ -285,7 +288,8 @@ project.ext.externalDependency = [ 'annotationApi': 'javax.annotation:javax.annotation-api:1.3.2', 'jakartaAnnotationApi': 'jakarta.annotation:jakarta.annotation-api:3.0.0', 'classGraph': 'io.github.classgraph:classgraph:4.8.172', - 'mustache': 'com.github.spullara.mustache.java:compiler:0.9.14' + 'mustache': 'com.github.spullara.mustache.java:compiler:0.9.14', + 'javaxMail': 'com.sun.mail:jakarta.mail:1.6.7' ] allprojects { @@ -372,9 +376,19 @@ configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) { exclude group: "org.slf4j", module: "slf4j-log4j12" exclude group: "org.slf4j", module: "slf4j-nop" exclude group: "org.slf4j", module: "slf4j-ext" + exclude group: "org.codehaus.jackson", module: "jackson-mapper-asl" + exclude group: "javax.mail", module: "mail" resolutionStrategy.force externalDependency.antlr4Runtime resolutionStrategy.force externalDependency.antlr4 + resolutionStrategy.force 'org.apache.mina:mina-core:2.2.4' + resolutionStrategy { + force "org.eclipse.jetty:jetty-security:${jettyVersion}" + force "org.eclipse.jetty:jetty-server:${jettyVersion}" + force "org.eclipse.jetty:jetty-ee10-servlet:${jettyVersion}" + force "org.eclipse.jetty:jetty-ee10-webapp:${jettyVersion}" + force "org.eclipse.jetty:jetty-xml:${jettyVersion}" + } } } @@ -402,7 +416,7 @@ subprojects { implementation externalDependency.annotationApi constraints { implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") - implementation('io.netty:netty-all:4.1.115.Final') + implementation('io.netty:netty-all:4.1.116.Final') implementation('org.apache.commons:commons-compress:1.27.1') implementation('org.apache.velocity:velocity-engine-core:2.4') implementation('org.hibernate:hibernate-validator:6.0.20.Final') @@ -472,10 +486,6 @@ subprojects { if (compileJavaTask != null) { spotlessJavaTask.dependsOn compileJavaTask } - // TODO - Do not run this in CI. How? - // tasks.withType(JavaCompile) { - // finalizedBy(tasks.findByName('spotlessApply')) - // } } } @@ -499,3 +509,8 @@ subprojects { } } } + +wrapper { + gradleVersion = project.versionGradle + distributionType = Wrapper.DistributionType.ALL +} diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index 7fa99ab3cb2621..3de0170fc70389 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -27,6 +27,7 @@ import io.datahubproject.metadata.context.EntityRegistryContext; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.RetrieverContext; import io.datahubproject.metadata.context.SearchContext; import io.datahubproject.metadata.context.ValidationContext; import java.nio.charset.StandardCharsets; @@ -180,7 +181,12 @@ protected OperationContext provideOperationContext( final Authentication systemAuthentication, final ConfigurationProvider configurationProvider) { ActorContext systemActorContext = - ActorContext.builder().systemAuth(true).authentication(systemAuthentication).build(); + ActorContext.builder() + .systemAuth(true) + .authentication(systemAuthentication) + .enforceExistenceEnabled( + configurationProvider.getAuthentication().isEnforceExistenceEnabled()) + .build(); OperationContextConfig systemConfig = OperationContextConfig.builder() .viewAuthorizationConfiguration(configurationProvider.getAuthorization().getView()) @@ -195,7 +201,10 @@ protected OperationContext provideOperationContext( .searchContext(SearchContext.EMPTY) .entityRegistryContext(EntityRegistryContext.builder().build(EmptyEntityRegistry.EMPTY)) .validationContext(ValidationContext.builder().alternateValidation(false).build()) - .build(systemAuthentication); + .retrieverContext(RetrieverContext.EMPTY) + .build( + systemAuthentication, + configurationProvider.getAuthentication().isEnforceExistenceEnabled()); } @Provides diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 97e916769a6c45..9bc28be1bfc89f 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -1,5 +1,6 @@ package config; +import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authorization.AuthorizationConfiguration; import com.linkedin.metadata.config.VisualConfiguration; import com.linkedin.metadata.config.cache.CacheConfiguration; @@ -30,4 +31,7 @@ public class ConfigurationProvider { /** Configuration for authorization */ private AuthorizationConfiguration authorization; + + /** Configuration for authentication */ + private AuthenticationConfiguration authentication; } diff --git a/datahub-frontend/app/security/AuthenticationManager.java b/datahub-frontend/app/security/AuthenticationManager.java index f46dc57c232bd2..8e7d51a0776c23 100644 --- a/datahub-frontend/app/security/AuthenticationManager.java +++ b/datahub-frontend/app/security/AuthenticationManager.java @@ -1,68 +1,33 @@ package security; import com.google.common.base.Preconditions; -import java.util.Collections; import javax.annotation.Nonnull; import javax.naming.AuthenticationException; -import javax.security.auth.callback.Callback; -import javax.security.auth.callback.CallbackHandler; -import javax.security.auth.callback.NameCallback; -import javax.security.auth.callback.PasswordCallback; -import javax.security.auth.login.LoginContext; -import javax.security.auth.login.LoginException; import org.apache.commons.lang3.StringUtils; -import org.eclipse.jetty.jaas.JAASLoginService; -import org.eclipse.jetty.jaas.PropertyUserStoreManager; -import play.Logger; +import org.eclipse.jetty.security.UserPrincipal; +import org.eclipse.jetty.util.security.Credential; public class AuthenticationManager { - - private AuthenticationManager(boolean verbose) {} + private AuthenticationManager() {} // Prevent instantiation public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) throws Exception { Preconditions.checkArgument(!StringUtils.isAnyEmpty(userName), "Username cannot be empty"); - JAASLoginService jaasLoginService = new JAASLoginService("WHZ-Authentication"); - PropertyUserStoreManager propertyUserStoreManager = new PropertyUserStoreManager(); - propertyUserStoreManager.start(); - jaasLoginService.setBeans(Collections.singletonList(propertyUserStoreManager)); - JAASLoginService.INSTANCE.set(jaasLoginService); - try { - LoginContext lc = - new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); - lc.login(); - } catch (LoginException le) { - AuthenticationException authenticationException = - new AuthenticationException(le.getMessage()); - authenticationException.setRootCause(le); - throw authenticationException; - } - } - private static class WHZCallbackHandler implements CallbackHandler { - private String password; - private String username; - - private WHZCallbackHandler(@Nonnull String username, @Nonnull String password) { - this.username = username; - this.password = password; - } + try { + // Create and configure credentials for authentication + UserPrincipal userPrincipal = new UserPrincipal(userName, Credential.getCredential(password)); - @Override - public void handle(@Nonnull Callback[] callbacks) { - NameCallback nc = null; - PasswordCallback pc = null; - for (Callback callback : callbacks) { - Logger.debug( - "The submitted callback is of type: " + callback.getClass() + " : " + callback); - if (callback instanceof NameCallback) { - nc = (NameCallback) callback; - nc.setName(this.username); - } else if (callback instanceof PasswordCallback) { - pc = (PasswordCallback) callback; - pc.setPassword(this.password.toCharArray()); - } + // Verify credentials + if (!userPrincipal.authenticate(password)) { + throw new AuthenticationException("Invalid credentials for user: " + userName); } + + } catch (Exception e) { + AuthenticationException authenticationException = + new AuthenticationException("Authentication failed"); + authenticationException.setRootCause(e); + throw authenticationException; } } } diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 7750e169b11fbe..5cc5af50d217ba 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -4,8 +4,9 @@ plugins { id 'org.gradle.playframework' } -apply from: "../gradle/versioning/versioning.gradle" +apply from: '../gradle/versioning/versioning.gradle' apply from: './play.gradle' +apply from: '../gradle/coverage/java-coverage.gradle' ext { docker_repo = 'datahub-frontend-react' @@ -18,6 +19,13 @@ java { } } +test { + jacoco { + // jacoco instrumentation is failing when dealing with code of this dependency, excluding it. + excludes = ["com/gargoylesoftware/**"] + } +} + model { // Must specify the dependency here as "stage" is added by rule based model. tasks.myTar { diff --git a/datahub-frontend/conf/logback.xml b/datahub-frontend/conf/logback.xml index 78da231b4a71c5..de37c56cba38a7 100644 --- a/datahub-frontend/conf/logback.xml +++ b/datahub-frontend/conf/logback.xml @@ -61,7 +61,7 @@ - + diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index d513c3c232d9a0..1a9ffeede56251 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -50,7 +50,7 @@ dependencies { implementation externalDependency.springBeans implementation externalDependency.springContext implementation externalDependency.springBootAutoconfigure - implementation externalDependency.jettyJaas + implementation externalDependency.jettySecurity implementation externalDependency.graphqlJava implementation externalDependency.antlr4Runtime implementation externalDependency.antlr4 diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index 49a7fa7fbfbc2f..47ada8e9929dd3 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -3,6 +3,7 @@ plugins { id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" } +apply from: '../gradle/coverage/java-coverage.gradle' dependencies { implementation project(':metadata-service:restli-client-api') diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index 69306862a46ef7..aec5352dec1a64 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -28,6 +28,7 @@ private Constants() {} public static final String INCIDENTS_SCHEMA_FILE = "incident.graphql"; public static final String CONTRACTS_SCHEMA_FILE = "contract.graphql"; public static final String CONNECTIONS_SCHEMA_FILE = "connection.graphql"; + public static final String VERSION_SCHEMA_FILE = "versioning.graphql"; public static final String BROWSE_PATH_DELIMITER = "/"; public static final String BROWSE_PATH_V2_DELIMITER = "␟"; public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index d1da55268a50d5..2c5e841322f45f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -56,6 +56,7 @@ import com.linkedin.datahub.graphql.generated.DataJobInputOutput; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; +import com.linkedin.datahub.graphql.generated.DataProcessInstance; import com.linkedin.datahub.graphql.generated.DataQualityContract; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetStatsSummary; @@ -67,6 +68,7 @@ import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.EntityRelationship; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; +import com.linkedin.datahub.graphql.generated.FacetMetadata; import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; import com.linkedin.datahub.graphql.generated.FormActorAssignment; import com.linkedin.datahub.graphql.generated.FreshnessContract; @@ -122,6 +124,8 @@ import com.linkedin.datahub.graphql.generated.TestResult; import com.linkedin.datahub.graphql.generated.TypeQualifier; import com.linkedin.datahub.graphql.generated.UserUsageCounts; +import com.linkedin.datahub.graphql.generated.VersionProperties; +import com.linkedin.datahub.graphql.generated.VersionSet; import com.linkedin.datahub.graphql.resolvers.MeResolver; import com.linkedin.datahub.graphql.resolvers.assertion.AssertionRunEventResolver; import com.linkedin.datahub.graphql.resolvers.assertion.DeleteAssertionResolver; @@ -172,6 +176,8 @@ import com.linkedin.datahub.graphql.resolvers.embed.UpdateEmbedResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityExistsResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityPrivilegesResolver; +import com.linkedin.datahub.graphql.resolvers.entity.versioning.LinkAssetVersionResolver; +import com.linkedin.datahub.graphql.resolvers.entity.versioning.UnlinkAssetVersionResolver; import com.linkedin.datahub.graphql.resolvers.form.BatchAssignFormResolver; import com.linkedin.datahub.graphql.resolvers.form.BatchRemoveFormResolver; import com.linkedin.datahub.graphql.resolvers.form.CreateDynamicFormAssignmentResolver; @@ -320,6 +326,7 @@ import com.linkedin.datahub.graphql.resolvers.user.ListUsersResolver; import com.linkedin.datahub.graphql.resolvers.user.RemoveUserResolver; import com.linkedin.datahub.graphql.resolvers.user.UpdateUserStatusResolver; +import com.linkedin.datahub.graphql.resolvers.versioning.VersionsSearchResolver; import com.linkedin.datahub.graphql.resolvers.view.CreateViewResolver; import com.linkedin.datahub.graphql.resolvers.view.DeleteViewResolver; import com.linkedin.datahub.graphql.resolvers.view.ListGlobalViewsResolver; @@ -345,6 +352,7 @@ import com.linkedin.datahub.graphql.types.datajob.DataJobType; import com.linkedin.datahub.graphql.types.dataplatform.DataPlatformType; import com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType; +import com.linkedin.datahub.graphql.types.dataprocessinst.DataProcessInstanceType; import com.linkedin.datahub.graphql.types.dataprocessinst.mappers.DataProcessInstanceRunEventMapper; import com.linkedin.datahub.graphql.types.dataproduct.DataProductType; import com.linkedin.datahub.graphql.types.dataset.DatasetType; @@ -376,6 +384,7 @@ import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertyType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; +import com.linkedin.datahub.graphql.types.versioning.VersionSetType; import com.linkedin.datahub.graphql.types.view.DataHubViewType; import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.client.SystemEntityClient; @@ -388,6 +397,7 @@ import com.linkedin.metadata.config.telemetry.TelemetryConfiguration; import com.linkedin.metadata.connection.ConnectionService; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.versioning.EntityVersioningService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.graph.SiblingGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -473,6 +483,7 @@ public class GmsGraphQLEngine { private final RestrictedService restrictedService; private ConnectionService connectionService; private AssertionService assertionService; + private final EntityVersioningService entityVersioningService; private final BusinessAttributeService businessAttributeService; private final FeatureFlags featureFlags; @@ -529,6 +540,8 @@ public class GmsGraphQLEngine { private final FormType formType; private final IncidentType incidentType; private final RestrictedType restrictedType; + private final DataProcessInstanceType dataProcessInstanceType; + private final VersionSetType versionSetType; private final int graphQLQueryComplexityLimit; private final int graphQLQueryDepthLimit; @@ -595,6 +608,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.restrictedService = args.restrictedService; this.connectionService = args.connectionService; this.assertionService = args.assertionService; + this.entityVersioningService = args.entityVersioningService; this.businessAttributeService = args.businessAttributeService; this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); @@ -648,6 +662,8 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.formType = new FormType(entityClient); this.incidentType = new IncidentType(entityClient); this.restrictedType = new RestrictedType(entityClient, restrictedService); + this.dataProcessInstanceType = new DataProcessInstanceType(entityClient, featureFlags); + this.versionSetType = new VersionSetType(entityClient); this.graphQLQueryComplexityLimit = args.graphQLQueryComplexityLimit; this.graphQLQueryDepthLimit = args.graphQLQueryDepthLimit; @@ -697,8 +713,10 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { entityTypeType, formType, incidentType, + versionSetType, restrictedType, - businessAttributeType)); + businessAttributeType, + dataProcessInstanceType)); this.loadableTypes = new ArrayList<>(entityTypes); // Extend loadable types with types from the plugins // This allows us to offer search and browse capabilities out of the box for @@ -798,6 +816,8 @@ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { configureConnectionResolvers(builder); configureDeprecationResolvers(builder); configureMetadataAttributionResolver(builder); + configureVersionPropertiesResolvers(builder); + configureVersionSetResolvers(builder); } private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { @@ -852,7 +872,8 @@ public GraphQLEngine.Builder builder() { .addSchema(fileBasedSchema(ASSERTIONS_SCHEMA_FILE)) .addSchema(fileBasedSchema(INCIDENTS_SCHEMA_FILE)) .addSchema(fileBasedSchema(CONTRACTS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(COMMON_SCHEMA_FILE)); + .addSchema(fileBasedSchema(COMMON_SCHEMA_FILE)) + .addSchema(fileBasedSchema(VERSION_SCHEMA_FILE)); for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { List pluginSchemaFiles = plugin.getSchemaFiles(); @@ -1023,6 +1044,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("tag", getResolver(tagType)) .dataFetcher("dataFlow", getResolver(dataFlowType)) .dataFetcher("dataJob", getResolver(dataJobType)) + .dataFetcher("dataProcessInstance", getResolver(dataProcessInstanceType)) .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) .dataFetcher("domain", getResolver((domainType))) @@ -1038,6 +1060,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("form", getResolver(formType)) .dataFetcher("view", getResolver(dataHubViewType)) .dataFetcher("structuredProperty", getResolver(structuredPropertyType)) + .dataFetcher("versionSet", getResolver(versionSetType)) .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) @@ -1317,7 +1340,8 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) .dataFetcher( - "createDataProduct", new CreateDataProductResolver(this.dataProductService)) + "createDataProduct", + new CreateDataProductResolver(this.dataProductService, this.entityService)) .dataFetcher( "updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) .dataFetcher( @@ -1384,6 +1408,16 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { "removeBusinessAttribute", new RemoveBusinessAttributeResolver(this.entityService)); } + if (featureFlags.isEntityVersioning()) { + typeWiring + .dataFetcher( + "linkAssetVersion", + new LinkAssetVersionResolver(this.entityVersioningService, this.featureFlags)) + .dataFetcher( + "unlinkAssetVersion", + new UnlinkAssetVersionResolver( + this.entityVersioningService, this.featureFlags)); + } return typeWiring; }); } @@ -1474,6 +1508,19 @@ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder "entity", new EntityTypeResolver( entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity()))) + .type( + "FacetMetadata", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> { + FacetMetadata facetMetadata = env.getSource(); + return facetMetadata.getEntity() != null + ? facetMetadata.getEntity() + : null; + }))) .type( "LineageRelationship", typeWiring -> @@ -2259,7 +2306,15 @@ private void configureTypeResolvers(final RuntimeWiring.Builder builder) { .type( "TimeSeriesAspect", typeWiring -> typeWiring.typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) - .type("ResultsType", typeWiring -> typeWiring.typeResolver(new ResultsTypeResolver())); + .type("ResultsType", typeWiring -> typeWiring.typeResolver(new ResultsTypeResolver())) + .type( + "SupportsVersions", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))); } /** Configures custom type extensions leveraged within our GraphQL schema. */ @@ -2362,6 +2417,17 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { ? dataJob.getDataPlatformInstance().getUrn() : null; })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getContainer() != null + ? dataJob.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) .dataFetcher("runs", new DataJobRunsResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) @@ -2439,6 +2505,17 @@ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { ? dataFlow.getDataPlatformInstance().getUrn() : null; })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final DataFlow dataFlow = env.getSource(); + return dataFlow.getContainer() != null + ? dataFlow.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) .dataFetcher( "health", new EntityHealthResolver( @@ -3021,6 +3098,25 @@ private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder b "DataProcessInstance", typeWiring -> typeWiring + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final DataProcessInstance dataProcessInstance = env.getSource(); + return dataProcessInstance.getDataPlatformInstance() != null + ? dataProcessInstance.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final DataProcessInstance dpi = env.getSource(); + return dpi.getContainer() != null ? dpi.getContainer().getUrn() : null; + })) .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher( "lineage", @@ -3235,4 +3331,34 @@ private void configureMetadataAttributionResolver(final RuntimeWiring.Builder bu entityTypes, (env) -> ((MetadataAttribution) env.getSource()).getSource()))); } + + private void configureVersionPropertiesResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "VersionProperties", + typeWiring -> + typeWiring.dataFetcher( + "versionSet", + new LoadableTypeResolver<>( + versionSetType, + (env) -> { + final VersionProperties versionProperties = env.getSource(); + return versionProperties != null + ? versionProperties.getVersionSet().getUrn() + : null; + }))); + } + + private void configureVersionSetResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "VersionSet", + typeWiring -> + typeWiring + .dataFetcher( + "latestVersion", + new EntityTypeResolver( + entityTypes, (env) -> ((VersionSet) env.getSource()).getLatestVersion())) + .dataFetcher( + "versionsSearch", + new VersionsSearchResolver(this.entityClient, this.viewService))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index f6ab3a603dbb7b..131f4e87637807 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -21,6 +21,7 @@ import com.linkedin.metadata.config.telemetry.TelemetryConfiguration; import com.linkedin.metadata.connection.ConnectionService; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.versioning.EntityVersioningService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.graph.SiblingGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -88,6 +89,7 @@ public class GmsGraphQLEngineArgs { BusinessAttributeService businessAttributeService; ConnectionService connectionService; AssertionService assertionService; + EntityVersioningService entityVersioningService; // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index 197ac87c1e22d8..d9b8008d46286a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -125,7 +125,7 @@ private AnalyticsChart getTopUsersChart(OperationContext opContext) { final DateRange trailingMonthDateRange = dateUtil.getTrailingMonthDateRange(); final List columns = ImmutableList.of("Name", "Title", "Email"); - final String topUsersTitle = "Top Users"; + final String topUsersTitle = "Top Users (Last 30 Days)"; final List topUserRows = _analyticsService.getTopNTableChart( _analyticsService.getUsageIndexName(), @@ -198,7 +198,7 @@ private Row buildNewUsersRow(@Nonnull final SearchEntity entity) { private AnalyticsChart getNewUsersChart(OperationContext opContext) { try { final List columns = ImmutableList.of("Name", "Title", "Email"); - final String newUsersTitle = "New Users"; + final String newUsersTitle = "Active Users (Last 30 Days)"; final SearchResult result = searchForNewUsers(opContext); final List newUserRows = new ArrayList<>(); for (SearchEntity entity : result.getEntities()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index ca60acaa805387..29d1c02dacb416 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -232,6 +232,10 @@ public static T restrictEntity(@Nonnull Object entity, Class clazz) { try { Object[] args = allFields.stream() + // New versions of graphql.codegen generate serialVersionUID + // We need to filter serialVersionUID out because serialVersionUID is + // never part of the entity type constructor + .filter(field -> !field.getName().contains("serialVersionUID")) .map( field -> { // properties are often not required but only because @@ -339,6 +343,11 @@ public static boolean canManageStructuredProperties(@Nonnull QueryContext contex context.getOperationContext(), PoliciesConfig.MANAGE_STRUCTURED_PROPERTIES_PRIVILEGE); } + public static boolean canViewStructuredPropertiesPage(@Nonnull QueryContext context) { + return AuthUtil.isAuthorized( + context.getOperationContext(), PoliciesConfig.VIEW_STRUCTURED_PROPERTIES_PAGE_PRIVILEGE); + } + public static boolean canManageForms(@Nonnull QueryContext context) { return AuthUtil.isAuthorized( context.getOperationContext(), PoliciesConfig.MANAGE_DOCUMENTATION_FORMS_PRIVILEGE); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index b1101ae3ee8657..8297392e642d51 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -93,6 +93,10 @@ public CompletableFuture get(DataFetchingEnvironment environm BusinessAttributeAuthorizationUtils.canCreateBusinessAttribute(context)); platformPrivileges.setManageBusinessAttributes( BusinessAttributeAuthorizationUtils.canManageBusinessAttribute(context)); + platformPrivileges.setManageStructuredProperties( + AuthorizationUtils.canManageStructuredProperties(context)); + platformPrivileges.setViewStructuredPropertiesPage( + AuthorizationUtils.canViewStructuredPropertiesPage(context)); // Construct and return authenticated user object. final AuthenticatedUser authUser = new AuthenticatedUser(); authUser.setCorpUser(corpUser); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index 259d05c631557d..8cdc13a14be87c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -188,6 +188,8 @@ public CompletableFuture get(final DataFetchingEnvironment environmen .setDataContractsEnabled(_featureFlags.isDataContractsEnabled()) .setEditableDatasetNameEnabled(_featureFlags.isEditableDatasetNameEnabled()) .setShowSeparateSiblings(_featureFlags.isShowSeparateSiblings()) + .setShowManageStructuredProperties(_featureFlags.isShowManageStructuredProperties()) + .setEntityVersioningEnabled(_featureFlags.isEntityVersioning()) .build(); appConfig.setFeatureFlags(featureFlagsConfig); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index 470267264f12f2..8bee544ca55c33 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -10,8 +10,11 @@ import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateDataProductInput; import com.linkedin.datahub.graphql.generated.DataProduct; +import com.linkedin.datahub.graphql.generated.OwnerEntityType; +import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.datahub.graphql.types.dataproduct.mappers.DataProductMapper; import com.linkedin.entity.EntityResponse; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -24,6 +27,7 @@ public class CreateDataProductResolver implements DataFetcher> { private final DataProductService _dataProductService; + private final EntityService _entityService; @Override public CompletableFuture get(final DataFetchingEnvironment environment) @@ -56,6 +60,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm context.getOperationContext(), dataProductUrn, UrnUtils.getUrn(input.getDomainUrn())); + OwnerUtils.addCreatorAsOwner( + context, dataProductUrn.toString(), OwnerEntityType.CORP_USER, _entityService); EntityResponse response = _dataProductService.getDataProductEntityResponse( context.getOperationContext(), dataProductUrn); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/LinkAssetVersionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/LinkAssetVersionResolver.java new file mode 100644 index 00000000000000..f32fd03a384005 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/LinkAssetVersionResolver.java @@ -0,0 +1,98 @@ +package com.linkedin.datahub.graphql.resolvers.entity.versioning; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.VERSION_SET_ENTITY_NAME; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; + +import com.datahub.authorization.AuthUtil; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.LinkVersionInput; +import com.linkedin.datahub.graphql.generated.VersionSet; +import com.linkedin.metadata.entity.IngestResult; +import com.linkedin.metadata.entity.versioning.EntityVersioningService; +import com.linkedin.metadata.entity.versioning.VersionPropertiesInput; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang.StringUtils; + +/** + * Currently only supports linking the latest version, but may be modified later to support inserts + */ +@Slf4j +@RequiredArgsConstructor +public class LinkAssetVersionResolver implements DataFetcher> { + + private final EntityVersioningService entityVersioningService; + private final FeatureFlags featureFlags; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final LinkVersionInput input = + bindArgument(environment.getArgument("input"), LinkVersionInput.class); + if (!featureFlags.isEntityVersioning()) { + throw new IllegalAccessError( + "Entity Versioning is not configured, please enable before attempting to use this feature."); + } + Urn versionSetUrn = UrnUtils.getUrn(input.getVersionSet()); + if (!VERSION_SET_ENTITY_NAME.equals(versionSetUrn.getEntityType())) { + throw new IllegalArgumentException( + String.format("Version Set urn %s must be of type Version Set.", input.getVersionSet())); + } + Urn entityUrn = UrnUtils.getUrn(input.getLinkedEntity()); + OperationContext opContext = context.getOperationContext(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + opContext, UPDATE, ImmutableSet.of(versionSetUrn, entityUrn))) { + throw new AuthorizationException( + String.format( + "%s is unauthorized to %s entities %s and %s", + opContext.getAuthentication().getActor().toUrnStr(), + UPDATE, + input.getVersionSet(), + input.getLinkedEntity())); + } + VersionPropertiesInput versionPropertiesInput = + new VersionPropertiesInput( + input.getComment(), + input.getVersion(), + input.getSourceTimestamp(), + input.getSourceCreator()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + List linkResults = + entityVersioningService.linkLatestVersion( + opContext, versionSetUrn, entityUrn, versionPropertiesInput); + + String successVersionSetUrn = + linkResults.stream() + .filter( + ingestResult -> + input.getLinkedEntity().equals(ingestResult.getUrn().toString())) + .map(ingestResult -> ingestResult.getUrn().toString()) + .findAny() + .orElse(StringUtils.EMPTY); + + if (StringUtils.isEmpty(successVersionSetUrn)) { + return null; + } + VersionSet versionSet = new VersionSet(); + versionSet.setUrn(versionSetUrn.toString()); + versionSet.setType(EntityType.VERSION_SET); + return versionSet; + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/UnlinkAssetVersionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/UnlinkAssetVersionResolver.java new file mode 100644 index 00000000000000..33ab83a59c6771 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/UnlinkAssetVersionResolver.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.resolvers.entity.versioning; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.VERSION_SET_ENTITY_NAME; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; + +import com.datahub.authorization.AuthUtil; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.UnlinkVersionInput; +import com.linkedin.datahub.graphql.generated.VersionSet; +import com.linkedin.metadata.entity.RollbackResult; +import com.linkedin.metadata.entity.versioning.EntityVersioningService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +public class UnlinkAssetVersionResolver implements DataFetcher> { + + private final EntityVersioningService entityVersioningService; + private final FeatureFlags featureFlags; + + public UnlinkAssetVersionResolver( + EntityVersioningService entityVersioningService, FeatureFlags featureFlags) { + this.entityVersioningService = entityVersioningService; + this.featureFlags = featureFlags; + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + if (!featureFlags.isEntityVersioning()) { + throw new IllegalAccessError( + "Entity Versioning is not configured, please enable before attempting to use this feature."); + } + final QueryContext context = environment.getContext(); + final UnlinkVersionInput input = + bindArgument(environment.getArgument("input"), UnlinkVersionInput.class); + Urn versionSetUrn = UrnUtils.getUrn(input.getVersionSet()); + if (!VERSION_SET_ENTITY_NAME.equals(versionSetUrn.getEntityType())) { + throw new IllegalArgumentException( + String.format("Version Set urn %s must be of type Version Set.", input.getVersionSet())); + } + Urn entityUrn = UrnUtils.getUrn(input.getUnlinkedEntity()); + OperationContext opContext = context.getOperationContext(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + opContext, UPDATE, ImmutableSet.of(versionSetUrn, entityUrn))) { + throw new AuthorizationException( + String.format( + "%s is unauthorized to %s entities %s and %s", + opContext.getAuthentication().getActor(), + UPDATE, + input.getVersionSet(), + input.getUnlinkedEntity())); + } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + List results = + entityVersioningService.unlinkVersion(opContext, versionSetUrn, entityUrn); + if (results.isEmpty() || results.stream().allMatch(RollbackResult::isNoOp)) { + return null; + } + VersionSet versionSet = new VersionSet(); + versionSet.setUrn(versionSetUrn.toString()); + versionSet.setType(EntityType.VERSION_SET); + return versionSet; + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index 29b71d95ad9749..31ed2de7a6d513 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -66,11 +66,17 @@ public CompletableFuture get(DataFetchingEnvironment environme final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); + final SearchFlags searchFlags = + input.getSearchFlags() != null + ? mapInputFlags(context, input.getSearchFlags()) + : new SearchFlags(); final List facets = input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; + // do not include default facets if we're requesting any facets specifically + searchFlags.setIncludeDefaultFacets(facets == null || facets.size() <= 0); + List finalEntities = maybeResolvedView != null ? SearchUtils.intersectEntityTypes( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index d103704146d399..29bc3a82a16498 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -2,19 +2,28 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.service.ViewService; +import com.linkedin.metadata.utils.CriterionUtils; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -64,24 +73,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); - List sortCriteria; - if (input.getSortInput() != null) { - if (input.getSortInput().getSortCriteria() != null) { - sortCriteria = - input.getSortInput().getSortCriteria().stream() - .map(SearchUtils::mapSortCriterion) - .collect(Collectors.toList()); - } else { - sortCriteria = - input.getSortInput().getSortCriterion() != null - ? Collections.singletonList( - mapSortCriterion(input.getSortInput().getSortCriterion())) - : Collections.emptyList(); - } - - } else { - sortCriteria = Collections.emptyList(); - } + List sortCriteria = SearchUtils.getSortCriteria(input.getSortInput()); try { log.debug( @@ -101,6 +93,14 @@ public CompletableFuture get(DataFetchingEnvironment environment) return SearchUtils.createEmptySearchResults(start, count); } + boolean shouldIncludeStructuredPropertyFacets = + input.getSearchFlags() != null + && input.getSearchFlags().getIncludeStructuredPropertyFacets() != null + ? input.getSearchFlags().getIncludeStructuredPropertyFacets() + : false; + List structuredPropertyFacets = + shouldIncludeStructuredPropertyFacets ? getStructuredPropertyFacets(context) : null; + return UrnSearchResultsMapper.map( context, _entityClient.searchAcrossEntities( @@ -113,7 +113,8 @@ public CompletableFuture get(DataFetchingEnvironment environment) : baseFilter, start, count, - sortCriteria)); + sortCriteria, + structuredPropertyFacets)); } catch (Exception e) { log.error( "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", @@ -133,4 +134,45 @@ public CompletableFuture get(DataFetchingEnvironment environment) this.getClass().getSimpleName(), "get"); } + + private List getStructuredPropertyFacets(final QueryContext context) { + try { + SearchFlags searchFlags = new SearchFlags().setSkipCache(true); + SearchResult result = + _entityClient.searchAcrossEntities( + context.getOperationContext().withSearchFlags(flags -> searchFlags), + getEntityNames(ImmutableList.of(EntityType.STRUCTURED_PROPERTY)), + "*", + createStructuredPropertyFilter(), + 0, + 100, + Collections.emptyList(), + null); + return result.getEntities().stream() + .map(entity -> String.format("structuredProperties.%s", entity.getEntity().getId())) + .collect(Collectors.toList()); + } catch (Exception e) { + log.error("Failed to get structured property facets to filter on", e); + return Collections.emptyList(); + } + } + + private Filter createStructuredPropertyFilter() { + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + CriterionUtils.buildCriterion( + "filterStatus", Condition.EQUAL, "ENABLED")))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + CriterionUtils.buildCriterion( + "showInSearchFilters", Condition.EQUAL, "true"))))))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index 04777c3fcdb4e2..f105a72a1273ee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -18,12 +18,18 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.generated.SearchSortInput; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -32,24 +38,32 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.Set; +import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; import org.codehaus.plexus.util.CollectionUtils; @Slf4j public class SearchUtils { private SearchUtils() {} + private static final int DEFAULT_SEARCH_COUNT = 10; + private static final int DEFAULT_SCROLL_COUNT = 10; + private static final String DEFAULT_SCROLL_KEEP_ALIVE = "5m"; + /** Entities that are searched by default in Search Across Entities */ public static final List SEARCHABLE_ENTITY_TYPES = ImmutableList.of( @@ -326,4 +340,119 @@ public static SearchResults createEmptySearchResults(final int start, final int result.setFacets(new ArrayList<>()); return result; } + + public static List getSortCriteria(@Nullable final SearchSortInput sortInput) { + List sortCriteria; + if (sortInput != null) { + if (sortInput.getSortCriteria() != null) { + sortCriteria = + sortInput.getSortCriteria().stream() + .map(SearchUtils::mapSortCriterion) + .collect(Collectors.toList()); + } else { + sortCriteria = + sortInput.getSortCriterion() != null + ? Collections.singletonList(mapSortCriterion(sortInput.getSortCriterion())) + : new ArrayList<>(); + } + } else { + sortCriteria = new ArrayList<>(); + } + + return sortCriteria; + } + + public static CompletableFuture searchAcrossEntities( + QueryContext inputContext, + final EntityClient _entityClient, + final ViewService _viewService, + List inputEntityTypes, + String inputQuery, + Filter baseFilter, + String viewUrn, + List sortCriteria, + com.linkedin.datahub.graphql.generated.SearchFlags inputSearchFlags, + Integer inputCount, + Integer inputStart, + String className) { + + final List entityTypes = + (inputEntityTypes == null || inputEntityTypes.isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : inputEntityTypes; + final List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + + // escape forward slash since it is a reserved character in Elasticsearch, default to * if + // blank/empty + final String query = + StringUtils.isNotBlank(inputQuery) ? ResolverUtils.escapeForwardSlash(inputQuery) : "*"; + + final Optional searchFlags = + Optional.ofNullable(inputSearchFlags) + .map((flags) -> SearchFlagsInputMapper.map(inputContext, flags)); + final OperationContext context = + inputContext.getOperationContext().withSearchFlags(searchFlags::orElse); + + final int count = Optional.ofNullable(inputCount).orElse(DEFAULT_SEARCH_COUNT); + final int start = Optional.ofNullable(inputStart).orElse(0); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final OperationContext baseContext = inputContext.getOperationContext(); + final Optional maybeResolvedView = + Optional.ofNullable(viewUrn) + .map((urn) -> resolveView(baseContext, _viewService, UrnUtils.getUrn(urn))); + + final List finalEntityNames = + maybeResolvedView + .map( + (view) -> + intersectEntityTypes(entityNames, view.getDefinition().getEntityTypes())) + .orElse(entityNames); + + final Filter finalFilters = + maybeResolvedView + .map((view) -> combineFilters(baseFilter, view.getDefinition().getFilter())) + .orElse(baseFilter); + + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + finalEntityNames, + query, + finalFilters, + start, + count); + + try { + final SearchResult searchResult = + _entityClient.searchAcrossEntities( + context, + finalEntityNames, + query, + finalFilters, + start, + count, + sortCriteria, + null); + return UrnSearchResultsMapper.map(inputContext, searchResult); + } catch (Exception e) { + log.warn( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + finalEntityNames, + query, + finalFilters, + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + finalEntityNames, query, finalFilters, start, count), + e); + } + }, + className, + "searchAcrossEntities"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolver.java index 328f63b893d06f..7d232748f0d93c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.structuredproperties; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_ENTITY_NAME; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.buildMetadataChangeProposalWithUrn; +import static com.linkedin.metadata.Constants.*; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; @@ -12,20 +13,24 @@ import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateStructuredPropertyInput; import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertySettingsInput; import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertyMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.aspect.patch.builder.StructuredPropertyDefinitionPatchBuilder; +import com.linkedin.metadata.models.StructuredPropertyUtils; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.structured.PrimitivePropertyValue; import com.linkedin.structured.PropertyCardinality; import com.linkedin.structured.PropertyValue; import com.linkedin.structured.StructuredPropertyKey; +import com.linkedin.structured.StructuredPropertySettings; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; -import java.util.UUID; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; @@ -54,40 +59,28 @@ public CompletableFuture get(final DataFetchingEnviron "Unable to create structured property. Please contact your admin."); } final StructuredPropertyKey key = new StructuredPropertyKey(); - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + final String id = + StructuredPropertyUtils.getPropertyId(input.getId(), input.getQualifiedName()); key.setId(id); final Urn propertyUrn = EntityKeyUtils.convertEntityKeyToUrn(key, STRUCTURED_PROPERTY_ENTITY_NAME); - StructuredPropertyDefinitionPatchBuilder builder = - new StructuredPropertyDefinitionPatchBuilder().urn(propertyUrn); - - builder.setQualifiedName(input.getQualifiedName()); - builder.setValueType(input.getValueType()); - input.getEntityTypes().forEach(builder::addEntityType); - if (input.getDisplayName() != null) { - builder.setDisplayName(input.getDisplayName()); - } - if (input.getDescription() != null) { - builder.setDescription(input.getDescription()); - } - if (input.getImmutable() != null) { - builder.setImmutable(input.getImmutable()); - } - if (input.getTypeQualifier() != null) { - buildTypeQualifier(input, builder); - } - if (input.getAllowedValues() != null) { - buildAllowedValues(input, builder); + + if (_entityClient.exists(context.getOperationContext(), propertyUrn)) { + throw new IllegalArgumentException( + "A structured property already exists with this urn"); } - if (input.getCardinality() != null) { - builder.setCardinality( - PropertyCardinality.valueOf(input.getCardinality().toString())); + + List mcps = new ArrayList<>(); + + // first, create the property definition itself + mcps.add(createPropertyDefinition(context, propertyUrn, id, input)); + + // then add the settings aspect if we're adding any settings inputs + if (input.getSettings() != null) { + mcps.add(createPropertySettings(context, propertyUrn, input.getSettings())); } - builder.setCreated(context.getOperationContext().getAuditStamp()); - builder.setLastModified(context.getOperationContext().getAuditStamp()); - MetadataChangeProposal mcp = builder.build(); - _entityClient.ingestProposal(context.getOperationContext(), mcp, false); + _entityClient.batchIngestProposals(context.getOperationContext(), mcps, false); EntityResponse response = _entityClient.getV2( @@ -103,6 +96,72 @@ public CompletableFuture get(final DataFetchingEnviron }); } + private MetadataChangeProposal createPropertySettings( + @Nonnull final QueryContext context, + @Nonnull final Urn propertyUrn, + final StructuredPropertySettingsInput settingsInput) + throws Exception { + StructuredPropertySettings settings = new StructuredPropertySettings(); + + if (settingsInput.getIsHidden() != null) { + settings.setIsHidden(settingsInput.getIsHidden()); + } + if (settingsInput.getShowInSearchFilters() != null) { + settings.setShowInSearchFilters(settingsInput.getShowInSearchFilters()); + } + if (settingsInput.getShowInAssetSummary() != null) { + settings.setShowInAssetSummary(settingsInput.getShowInAssetSummary()); + } + if (settingsInput.getShowAsAssetBadge() != null) { + settings.setShowAsAssetBadge(settingsInput.getShowAsAssetBadge()); + } + if (settingsInput.getShowInColumnsTable() != null) { + settings.setShowInColumnsTable(settingsInput.getShowInColumnsTable()); + } + settings.setLastModified(context.getOperationContext().getAuditStamp()); + + StructuredPropertyUtils.validatePropertySettings(settings, true); + + return buildMetadataChangeProposalWithUrn( + propertyUrn, STRUCTURED_PROPERTY_SETTINGS_ASPECT_NAME, settings); + } + + private MetadataChangeProposal createPropertyDefinition( + @Nonnull final QueryContext context, + @Nonnull final Urn propertyUrn, + @Nonnull final String id, + final CreateStructuredPropertyInput input) + throws Exception { + StructuredPropertyDefinitionPatchBuilder builder = + new StructuredPropertyDefinitionPatchBuilder().urn(propertyUrn); + + builder.setQualifiedName(id); + builder.setValueType(input.getValueType()); + input.getEntityTypes().forEach(builder::addEntityType); + if (input.getDisplayName() != null) { + builder.setDisplayName(input.getDisplayName()); + } + if (input.getDescription() != null) { + builder.setDescription(input.getDescription()); + } + if (input.getImmutable() != null) { + builder.setImmutable(input.getImmutable()); + } + if (input.getTypeQualifier() != null) { + buildTypeQualifier(input, builder); + } + if (input.getAllowedValues() != null) { + buildAllowedValues(input, builder); + } + if (input.getCardinality() != null) { + builder.setCardinality(PropertyCardinality.valueOf(input.getCardinality().toString())); + } + builder.setCreated(context.getOperationContext().getAuditStamp()); + builder.setLastModified(context.getOperationContext().getAuditStamp()); + + return builder.build(); + } + private void buildTypeQualifier( @Nonnull final CreateStructuredPropertyInput input, @Nonnull final StructuredPropertyDefinitionPatchBuilder builder) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/DeleteStructuredPropertyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/DeleteStructuredPropertyResolver.java index e7d59494654fdd..58f8d340fcc074 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/DeleteStructuredPropertyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/DeleteStructuredPropertyResolver.java @@ -6,6 +6,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.DeleteStructuredPropertyInput; import com.linkedin.entity.client.EntityClient; @@ -42,6 +43,23 @@ public CompletableFuture get(final DataFetchingEnvironment environment) "Unable to delete structured property. Please contact your admin."); } _entityClient.deleteEntity(context.getOperationContext(), propertyUrn); + // Asynchronously Delete all references to the entity (to return quickly) + GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _entityClient.deleteEntityReferences( + context.getOperationContext(), propertyUrn); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Structured Property with urn %s", + propertyUrn), + e); + } + return null; + }, + this.getClass().getSimpleName(), + "get"); return true; } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolver.java index ea8c6dac36a4af..313e0a16d8916d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolver.java @@ -93,7 +93,7 @@ public CompletableFuture get(final DataFetchingEnviron "Unable to update structured property. Please contact your admin."); } final Urn propertyUrn = UrnUtils.getUrn(input.getUrn()); - StructuredPropertyDefinition existingDefinition = + final EntityResponse entityResponse = getExistingStructuredProperty(context, propertyUrn); - StructuredPropertyDefinitionPatchBuilder builder = - new StructuredPropertyDefinitionPatchBuilder().urn(propertyUrn); - if (input.getDisplayName() != null) { - builder.setDisplayName(input.getDisplayName()); - } - if (input.getDescription() != null) { - builder.setDescription(input.getDescription()); - } - if (input.getImmutable() != null) { - builder.setImmutable(input.getImmutable()); - } - if (input.getTypeQualifier() != null) { - buildTypeQualifier(input, builder, existingDefinition); - } - if (input.getNewAllowedValues() != null) { - buildAllowedValues(input, builder); - } - if (input.getSetCardinalityAsMultiple() != null) { - builder.setCardinality(PropertyCardinality.MULTIPLE); + List mcps = new ArrayList<>(); + + // first update the definition aspect if we need to + MetadataChangeProposal definitionMcp = + updateDefinition(input, context, propertyUrn, entityResponse); + if (definitionMcp != null) { + mcps.add(definitionMcp); } - if (input.getNewEntityTypes() != null) { - input.getNewEntityTypes().forEach(builder::addEntityType); + + // then update the settings aspect if we need to + if (input.getSettings() != null) { + mcps.add(updateSettings(context, input.getSettings(), propertyUrn, entityResponse)); } - builder.setLastModified(context.getOperationContext().getAuditStamp()); - MetadataChangeProposal mcp = builder.build(); - _entityClient.ingestProposal(context.getOperationContext(), mcp, false); + _entityClient.batchIngestProposals(context.getOperationContext(), mcps, false); EntityResponse response = _entityClient.getV2( @@ -102,6 +95,120 @@ public CompletableFuture get(final DataFetchingEnviron }); } + private boolean hasSettingsChanged( + StructuredPropertySettings existingSettings, StructuredPropertySettingsInput settingsInput) { + if (settingsInput.getIsHidden() != null + && !existingSettings.isIsHidden().equals(settingsInput.getIsHidden())) { + return true; + } + if (settingsInput.getShowInSearchFilters() != null + && !existingSettings + .isShowInSearchFilters() + .equals(settingsInput.getShowInSearchFilters())) { + return true; + } + if (settingsInput.getShowInAssetSummary() != null + && !existingSettings.isShowInAssetSummary().equals(settingsInput.getShowInAssetSummary())) { + return true; + } + if (settingsInput.getShowAsAssetBadge() != null + && !existingSettings.isShowAsAssetBadge().equals(settingsInput.getShowAsAssetBadge())) { + return true; + } + if (settingsInput.getShowInColumnsTable() != null + && !existingSettings.isShowInColumnsTable().equals(settingsInput.getShowInColumnsTable())) { + return true; + } + return false; + } + + private MetadataChangeProposal updateSettings( + @Nonnull final QueryContext context, + @Nonnull final StructuredPropertySettingsInput settingsInput, + @Nonnull final Urn propertyUrn, + @Nonnull final EntityResponse entityResponse) + throws Exception { + StructuredPropertySettings existingSettings = + getExistingStructuredPropertySettings(entityResponse); + // check if settings has changed to determine if we should update the timestamp + boolean hasChanged = hasSettingsChanged(existingSettings, settingsInput); + if (hasChanged) { + existingSettings.setLastModified(context.getOperationContext().getAuditStamp()); + } + + if (settingsInput.getIsHidden() != null) { + existingSettings.setIsHidden(settingsInput.getIsHidden()); + } + if (settingsInput.getShowInSearchFilters() != null) { + existingSettings.setShowInSearchFilters(settingsInput.getShowInSearchFilters()); + } + if (settingsInput.getShowInAssetSummary() != null) { + existingSettings.setShowInAssetSummary(settingsInput.getShowInAssetSummary()); + } + if (settingsInput.getShowAsAssetBadge() != null) { + existingSettings.setShowAsAssetBadge(settingsInput.getShowAsAssetBadge()); + } + if (settingsInput.getShowInColumnsTable() != null) { + existingSettings.setShowInColumnsTable(settingsInput.getShowInColumnsTable()); + } + + StructuredPropertyUtils.validatePropertySettings(existingSettings, true); + + return buildMetadataChangeProposalWithUrn( + propertyUrn, STRUCTURED_PROPERTY_SETTINGS_ASPECT_NAME, existingSettings); + } + + private MetadataChangeProposal updateDefinition( + @Nonnull final UpdateStructuredPropertyInput input, + @Nonnull final QueryContext context, + @Nonnull final Urn propertyUrn, + @Nonnull final EntityResponse entityResponse) + throws Exception { + StructuredPropertyDefinition existingDefinition = + getExistingStructuredPropertyDefinition(entityResponse); + StructuredPropertyDefinitionPatchBuilder builder = + new StructuredPropertyDefinitionPatchBuilder().urn(propertyUrn); + + boolean hasUpdatedDefinition = false; + + if (input.getDisplayName() != null) { + builder.setDisplayName(input.getDisplayName()); + hasUpdatedDefinition = true; + } + if (input.getDescription() != null) { + builder.setDescription(input.getDescription()); + hasUpdatedDefinition = true; + } + if (input.getImmutable() != null) { + builder.setImmutable(input.getImmutable()); + hasUpdatedDefinition = true; + } + if (input.getTypeQualifier() != null) { + buildTypeQualifier(input, builder, existingDefinition); + hasUpdatedDefinition = true; + } + if (input.getNewAllowedValues() != null) { + buildAllowedValues(input, builder); + hasUpdatedDefinition = true; + } + if (input.getSetCardinalityAsMultiple() != null + && input.getSetCardinalityAsMultiple().equals(true)) { + builder.setCardinality(PropertyCardinality.MULTIPLE); + hasUpdatedDefinition = true; + } + if (input.getNewEntityTypes() != null) { + input.getNewEntityTypes().forEach(builder::addEntityType); + hasUpdatedDefinition = true; + } + + if (hasUpdatedDefinition) { + builder.setLastModified(context.getOperationContext().getAuditStamp()); + + return builder.build(); + } + return null; + } + private void buildTypeQualifier( @Nonnull final UpdateStructuredPropertyInput input, @Nonnull final StructuredPropertyDefinitionPatchBuilder builder, @@ -141,17 +248,40 @@ private void buildAllowedValues( }); } - private StructuredPropertyDefinition getExistingStructuredProperty( + private EntityResponse getExistingStructuredProperty( @Nonnull final QueryContext context, @Nonnull final Urn propertyUrn) throws Exception { - EntityResponse response = - _entityClient.getV2( - context.getOperationContext(), STRUCTURED_PROPERTY_ENTITY_NAME, propertyUrn, null); + return _entityClient.getV2( + context.getOperationContext(), STRUCTURED_PROPERTY_ENTITY_NAME, propertyUrn, null); + } + private StructuredPropertyDefinition getExistingStructuredPropertyDefinition( + EntityResponse response) throws Exception { if (response != null && response.getAspects().containsKey(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME)) { return new StructuredPropertyDefinition( - response.getAspects().get(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME).getValue().data()); + response + .getAspects() + .get(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .getValue() + .data() + .copy()); } - return null; + throw new IllegalArgumentException( + "Attempting to update a structured property with no definition aspect."); + } + + private StructuredPropertySettings getExistingStructuredPropertySettings(EntityResponse response) + throws Exception { + if (response != null + && response.getAspects().containsKey(STRUCTURED_PROPERTY_SETTINGS_ASPECT_NAME)) { + return new StructuredPropertySettings( + response + .getAspects() + .get(STRUCTURED_PROPERTY_SETTINGS_ASPECT_NAME) + .getValue() + .data() + .copy()); + } + return new StructuredPropertySettings(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java index 770c8a0d749c38..6c1d7949332fbe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java @@ -103,7 +103,7 @@ public CompletableFuture> { + + private static final String VERSION_SET_FIELD_NAME = "versionSet"; + + private final EntityClient _entityClient; + private final ViewService _viewService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final Entity entity = environment.getSource(); + final QueryContext context = environment.getContext(); + final SearchAcrossEntitiesInput input = + bindArgument(environment.getArgument("input"), SearchAcrossEntitiesInput.class); + + final Criterion versionSetFilter = + CriterionUtils.buildCriterion(VERSION_SET_FIELD_NAME, Condition.EQUAL, entity.getUrn()); + final Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(versionSetFilter)))); + final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + + final List initialSortCriteria = + SearchUtils.getSortCriteria(input.getSortInput()); + final List sortCriteria = + Stream.concat( + initialSortCriteria.stream(), + Stream.of( + new SortCriterion() + .setField(VERSION_SORT_ID_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))) + .toList(); + + SearchFlags searchFlags = Optional.ofNullable(input.getSearchFlags()).orElse(new SearchFlags()); + searchFlags.setFilterNonLatestVersions(false); + + return SearchUtils.searchAcrossEntities( + context, + _entityClient, + _viewService, + input.getTypes(), + input.getQuery(), + SearchUtils.combineFilters(inputFilter, baseFilter), + input.getViewUrn(), + sortCriteria, + searchFlags, + input.getCount(), + input.getStart(), + this.getClass().getSimpleName()); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index 561c3b9bec1e03..e23eaa9e5c1ec1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -142,7 +142,8 @@ public Chart apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((chart, dataMap) -> chart.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java index 4345819867617b..ab3127a3ae232b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -28,6 +29,11 @@ public DataPlatformInstance apply( result.setType(EntityType.DATA_PLATFORM_INSTANCE); result.setUrn(input.getInstance().toString()); } + result.setPlatform( + DataPlatform.builder() + .setUrn(input.getPlatform().toString()) + .setType(EntityType.DATA_PLATFORM) + .build()); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataTransformLogicMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataTransformLogicMapper.java new file mode 100644 index 00000000000000..04602e7ff6dde9 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataTransformLogicMapper.java @@ -0,0 +1,73 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataTransform; +import com.linkedin.datahub.graphql.generated.DataTransformLogic; +import com.linkedin.datahub.graphql.generated.QueryLanguage; +import com.linkedin.datahub.graphql.generated.QueryStatement; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class DataTransformLogicMapper + implements ModelMapper< + com.linkedin.common.DataTransformLogic, + com.linkedin.datahub.graphql.generated.DataTransformLogic> { + + public static final DataTransformLogicMapper INSTANCE = new DataTransformLogicMapper(); + + public static DataTransformLogic map( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.DataTransformLogic input) { + return INSTANCE.apply(context, input); + } + + @Override + public DataTransformLogic apply( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.DataTransformLogic input) { + + final DataTransformLogic result = new DataTransformLogic(); + + // Map transforms array using DataTransformMapper + result.setTransforms( + input.getTransforms().stream() + .map(transform -> DataTransformMapper.map(context, transform)) + .collect(Collectors.toList())); + + return result; + } +} + +class DataTransformMapper + implements ModelMapper< + com.linkedin.common.DataTransform, com.linkedin.datahub.graphql.generated.DataTransform> { + + public static final DataTransformMapper INSTANCE = new DataTransformMapper(); + + public static DataTransform map( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.DataTransform input) { + return INSTANCE.apply(context, input); + } + + @Override + public DataTransform apply( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.DataTransform input) { + + final DataTransform result = new DataTransform(); + + // Map query statement if present + if (input.hasQueryStatement()) { + QueryStatement statement = + new QueryStatement( + input.getQueryStatement().getValue(), + QueryLanguage.valueOf(input.getQueryStatement().getLanguage().toString())); + result.setQueryStatement(statement); + } + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/QueryPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/QueryPropertiesMapper.java new file mode 100644 index 00000000000000..e29bea5b3943ce --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/QueryPropertiesMapper.java @@ -0,0 +1,61 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.*; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.query.QueryProperties; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class QueryPropertiesMapper + implements ModelMapper< + QueryProperties, com.linkedin.datahub.graphql.generated.QueryProperties> { + + public static final QueryPropertiesMapper INSTANCE = new QueryPropertiesMapper(); + + public static com.linkedin.datahub.graphql.generated.QueryProperties map( + @Nullable final QueryContext context, @Nonnull final QueryProperties input) { + return INSTANCE.apply(context, input); + } + + @Override + public com.linkedin.datahub.graphql.generated.QueryProperties apply( + @Nullable final QueryContext context, @Nonnull final QueryProperties input) { + + final com.linkedin.datahub.graphql.generated.QueryProperties result = + new com.linkedin.datahub.graphql.generated.QueryProperties(); + + // Map Query Source + result.setSource(QuerySource.valueOf(input.getSource().toString())); + + // Map Query Statement + result.setStatement( + new QueryStatement( + input.getStatement().getValue(), + QueryLanguage.valueOf(input.getStatement().getLanguage().toString()))); + + // Map optional fields + result.setName(input.getName(GetMode.NULL)); + result.setDescription(input.getDescription(GetMode.NULL)); + + // Map origin if present + if (input.hasOrigin() && input.getOrigin() != null) { + result.setOrigin(UrnToEntityMapper.map(context, input.getOrigin())); + } + + // Map created audit stamp + AuditStamp created = new AuditStamp(); + created.setTime(input.getCreated().getTime()); + created.setActor(input.getCreated().getActor(GetMode.NULL).toString()); + result.setCreated(created); + + // Map last modified audit stamp + AuditStamp lastModified = new AuditStamp(); + lastModified.setTime(input.getLastModified().getTime()); + lastModified.setActor(input.getLastModified().getActor(GetMode.NULL).toString()); + result.setLastModified(lastModified); + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index 9f5025ccf303a2..0b3a445175c4c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -69,6 +69,9 @@ public com.linkedin.metadata.query.SearchFlags apply( result.setCustomHighlightingFields( new StringArray(searchFlags.getCustomHighlightingFields())); } + if (searchFlags.getFilterNonLatestVersions() != null) { + result.setFilterNonLatestVersions(searchFlags.getFilterNonLatestVersions()); + } return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/TimeStampToAuditStampMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/TimeStampToAuditStampMapper.java new file mode 100644 index 00000000000000..58f78b146b406c --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/TimeStampToAuditStampMapper.java @@ -0,0 +1,24 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.TimeStamp; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AuditStamp; +import javax.annotation.Nullable; + +public class TimeStampToAuditStampMapper { + + public static final TimeStampToAuditStampMapper INSTANCE = new TimeStampToAuditStampMapper(); + + public static AuditStamp map( + @Nullable final QueryContext context, @Nullable final TimeStamp input) { + if (input == null) { + return null; + } + final AuditStamp result = new AuditStamp(); + result.setTime(input.getTime()); + if (input.hasActor()) { + result.setActor(input.getActor().toString()); + } + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 1988cafc486c18..b815c1b1c1dd9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -18,6 +18,7 @@ import com.linkedin.datahub.graphql.generated.DataJob; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; +import com.linkedin.datahub.graphql.generated.DataProcessInstance; import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.Domain; @@ -40,6 +41,7 @@ import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; import com.linkedin.datahub.graphql.generated.Tag; import com.linkedin.datahub.graphql.generated.Test; +import com.linkedin.datahub.graphql.generated.VersionSet; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -225,6 +227,16 @@ public Entity apply(@Nullable QueryContext context, Urn input) { ((BusinessAttribute) partialEntity).setUrn(input.toString()); ((BusinessAttribute) partialEntity).setType(EntityType.BUSINESS_ATTRIBUTE); } + if (input.getEntityType().equals(DATA_PROCESS_INSTANCE_ENTITY_NAME)) { + partialEntity = new DataProcessInstance(); + ((DataProcessInstance) partialEntity).setUrn(input.toString()); + ((DataProcessInstance) partialEntity).setType(EntityType.DATA_PROCESS_INSTANCE); + } + if (input.getEntityType().equals(VERSION_SET_ENTITY_NAME)) { + partialEntity = new VersionSet(); + ((VersionSet) partialEntity).setUrn(input.toString()); + ((VersionSet) partialEntity).setType(EntityType.VERSION_SET); + } return partialEntity; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index 02357b3ddc349e..7ac00c46475bce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -161,7 +161,9 @@ public static Container map( if (envelopedStructuredProps != null) { result.setStructuredProperties( StructuredPropertiesMapper.map( - context, new StructuredProperties(envelopedStructuredProps.getValue().data()))); + context, + new StructuredProperties(envelopedStructuredProps.getValue().data()), + entityUrn)); } final EnvelopedAspect envelopedForms = aspects.get(FORMS_ASPECT_NAME); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java index 6246cf64bbf7f8..010816431f54de 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java @@ -59,7 +59,8 @@ public CorpGroup apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index 4fa278983399b1..a94b555daebdfb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -88,7 +88,8 @@ public CorpUser apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 4fa52b11365641..fd1c7a5db2a79d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -142,7 +142,8 @@ public Dashboard apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((dashboard, dataMap) -> dashboard.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index 3a697517bdecee..f2d38aadf49656 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -74,6 +74,7 @@ public class DataFlowType DOMAINS_ASPECT_NAME, DEPRECATION_ASPECT_NAME, DATA_PLATFORM_INSTANCE_ASPECT_NAME, + CONTAINER_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, STRUCTURED_PROPERTIES_ASPECT_NAME, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java index 9e2612f60abda1..0902d6f2080b8f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java @@ -16,6 +16,7 @@ import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataFlow; import com.linkedin.datahub.graphql.generated.DataFlowEditableProperties; import com.linkedin.datahub.graphql.generated.DataFlowInfo; @@ -106,6 +107,7 @@ public DataFlow apply( (dataset, dataMap) -> dataset.setDataPlatformInstance( DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DataFlowMapper::mapContainers); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (dataFlow, dataMap) -> @@ -114,7 +116,8 @@ public DataFlow apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> @@ -205,6 +208,17 @@ private static void mapGlobalTags( dataFlow.setTags(globalTags); } + private static void mapContainers( + @Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataFlow.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); + } + private static void mapDomains( @Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index b32832a28d5d57..317ee39ea565e5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -75,11 +75,13 @@ public class DataJobType DOMAINS_ASPECT_NAME, DEPRECATION_ASPECT_NAME, DATA_PLATFORM_INSTANCE_ASPECT_NAME, + CONTAINER_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, SUB_TYPES_ASPECT_NAME, STRUCTURED_PROPERTIES_ASPECT_NAME, - FORMS_ASPECT_NAME); + FORMS_ASPECT_NAME, + DATA_TRANSFORM_LOGIC_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("flow"); private final EntityClient _entityClient; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index d7da875bc2a29f..3403d1f8f7b7f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -4,20 +4,12 @@ import static com.linkedin.metadata.Constants.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.BrowsePathsV2; -import com.linkedin.common.DataPlatformInstance; -import com.linkedin.common.Deprecation; -import com.linkedin.common.Forms; -import com.linkedin.common.GlobalTags; -import com.linkedin.common.GlossaryTerms; -import com.linkedin.common.InstitutionalMemory; -import com.linkedin.common.Ownership; -import com.linkedin.common.Status; -import com.linkedin.common.SubTypes; +import com.linkedin.common.*; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataFlow; import com.linkedin.datahub.graphql.generated.DataJob; import com.linkedin.datahub.graphql.generated.DataJobEditableProperties; @@ -26,15 +18,7 @@ import com.linkedin.datahub.graphql.generated.DataJobProperties; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; -import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; -import com.linkedin.datahub.graphql.types.common.mappers.FineGrainedLineagesMapper; -import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; -import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.*; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.form.FormsMapper; @@ -129,15 +113,27 @@ public DataJob apply( } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { result.setDataPlatformInstance( DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); + } else if (CONTAINER_ASPECT_NAME.equals(name)) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(data); + result.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { result.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(data))); } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { result.setSubTypes(SubTypesMapper.map(context, new SubTypes(data))); } else if (STRUCTURED_PROPERTIES_ASPECT_NAME.equals(name)) { result.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(data))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(data), entityUrn)); } else if (FORMS_ASPECT_NAME.equals(name)) { result.setForms(FormsMapper.map(new Forms(data), entityUrn.toString())); + } else if (DATA_TRANSFORM_LOGIC_ASPECT_NAME.equals(name)) { + result.setDataTransformLogic( + DataTransformLogicMapper.map(context, new DataTransformLogic(data))); } }); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/DataProcessInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/DataProcessInstanceType.java new file mode 100644 index 00000000000000..eeaaaa96f51704 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/DataProcessInstanceType.java @@ -0,0 +1,102 @@ +package com.linkedin.datahub.graphql.types.dataprocessinst; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; +import com.linkedin.datahub.graphql.generated.DataProcessInstance; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.dataprocessinst.mappers.DataProcessInstanceMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class DataProcessInstanceType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + DATA_PROCESS_INSTANCE_KEY_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, + DATA_PROCESS_INSTANCE_INPUT_ASPECT_NAME, + DATA_PROCESS_INSTANCE_OUTPUT_ASPECT_NAME, + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + TEST_RESULTS_ASPECT_NAME, + DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, + ML_TRAINING_RUN_PROPERTIES_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + CONTAINER_ASPECT_NAME); + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; + + @Override + public EntityType type() { + return EntityType.DATA_PROCESS_INSTANCE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataProcessInstance.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataProcessInstanceUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + Map entities = new HashMap<>(); + if (_featureFlags.isDataProcessInstanceEntityEnabled()) { + entities = + _entityClient.batchGetV2( + context.getOperationContext(), + DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + ASPECTS_TO_FETCH); + } + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + if (_featureFlags.isDataProcessInstanceEntityEnabled()) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + } + + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataProcessInstanceMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + + } catch (Exception e) { + throw new RuntimeException("Failed to load Data Process Instance entity", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java index 7a4d342281fe54..d721f5a5fb522d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java @@ -2,25 +2,37 @@ import static com.linkedin.metadata.Constants.*; +import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.SubTypes; +import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataProcessInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.mlmodel.mappers.MLHyperParamMapper; +import com.linkedin.datahub.graphql.types.mlmodel.mappers.MLMetricMapper; import com.linkedin.dataprocess.DataProcessInstanceProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.ml.metadata.MLTrainingRunProperties; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * *

To be replaced by auto-generated mappers implementations */ +@Slf4j public class DataProcessInstanceMapper implements ModelMapper { public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); @@ -30,6 +42,19 @@ public static DataProcessInstance map( return INSTANCE.apply(context, entityResponse); } + private void mapContainers( + @Nullable final QueryContext context, + @Nonnull DataProcessInstance dataProcessInstance, + @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataProcessInstance.setContainer( + com.linkedin.datahub.graphql.generated.Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); + } + @Override public DataProcessInstance apply( @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { @@ -37,24 +62,93 @@ public DataProcessInstance apply( result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.DATA_PROCESS_INSTANCE); + Urn entityUrn = entityResponse.getUrn(); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult( - context, DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); + DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, + (dataProcessInstance, dataMap) -> + mapDataProcessProperties(context, dataProcessInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + ML_TRAINING_RUN_PROPERTIES_ASPECT_NAME, + (dataProcessInstance, dataMap) -> + mapTrainingRunProperties(context, dataProcessInstance, dataMap)); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataProcessInstance, dataMap) -> { + DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataMap); + dataProcessInstance.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, dataPlatformInstance)); + }); + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dataProcessInstance, dataMap) -> + dataProcessInstance.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); + mappingHelper.mapToResult( + CONTAINER_ASPECT_NAME, + (dataProcessInstance, dataMap) -> mapContainers(context, dataProcessInstance, dataMap)); return mappingHelper.getResult(); } - private void mapDataProcessProperties( + private void mapTrainingRunProperties( @Nonnull QueryContext context, @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { + MLTrainingRunProperties trainingProperties = new MLTrainingRunProperties(dataMap); + + com.linkedin.datahub.graphql.generated.MLTrainingRunProperties properties = + new com.linkedin.datahub.graphql.generated.MLTrainingRunProperties(); + if (trainingProperties.hasId()) { + properties.setId(trainingProperties.getId()); + } + if (trainingProperties.hasOutputUrls()) { + properties.setOutputUrls( + trainingProperties.getOutputUrls().stream() + .map(url -> url.toString()) + .collect(Collectors.toList())); + } + if (trainingProperties.getHyperParams() != null) { + properties.setHyperParams( + trainingProperties.getHyperParams().stream() + .map(param -> MLHyperParamMapper.map(context, param)) + .collect(Collectors.toList())); + } + if (trainingProperties.getTrainingMetrics() != null) { + properties.setTrainingMetrics( + trainingProperties.getTrainingMetrics().stream() + .map(metric -> MLMetricMapper.map(context, metric)) + .collect(Collectors.toList())); + } + if (trainingProperties.hasId()) { + properties.setId(trainingProperties.getId()); + } + dpi.setMlTrainingRunProperties(properties); + } + + private void mapDataProcessProperties( + @Nonnull QueryContext context, + @Nonnull DataProcessInstance dpi, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { DataProcessInstanceProperties dataProcessInstanceProperties = new DataProcessInstanceProperties(dataMap); + + com.linkedin.datahub.graphql.generated.DataProcessInstanceProperties properties = + new com.linkedin.datahub.graphql.generated.DataProcessInstanceProperties(); + dpi.setName(dataProcessInstanceProperties.getName()); - if (dataProcessInstanceProperties.hasCreated()) { - dpi.setCreated(AuditStampMapper.map(context, dataProcessInstanceProperties.getCreated())); - } + properties.setName(dataProcessInstanceProperties.getName()); if (dataProcessInstanceProperties.hasExternalUrl()) { dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); + properties.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); + } + if (dataProcessInstanceProperties.hasCustomProperties()) { + properties.setCustomProperties( + CustomPropertiesMapper.map( + dataProcessInstanceProperties.getCustomProperties(), entityUrn)); + } + if (dataProcessInstanceProperties.hasCreated()) { + dpi.setCreated(AuditStampMapper.map(context, dataProcessInstanceProperties.getCreated())); } + dpi.setProperties(properties); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java index 08637dbfd01edc..8693ec97f1a2ee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java @@ -92,7 +92,8 @@ public DataProduct apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 6a3f9cb9b21f38..74ef4cf125cd24 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -89,7 +89,8 @@ public class DatasetType ACCESS_ASPECT_NAME, STRUCTURED_PROPERTIES_ASPECT_NAME, FORMS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME); + SUB_TYPES_ASPECT_NAME, + VERSION_PROPERTIES_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "dataset"; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 0869463ba73ac2..aa7033b180e80e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -17,6 +17,7 @@ import com.linkedin.common.Status; import com.linkedin.common.SubTypes; import com.linkedin.common.TimeStamp; +import com.linkedin.common.VersionProperties; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -48,6 +49,7 @@ import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.datahub.graphql.types.versioning.VersionPropertiesMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; import com.linkedin.dataset.EditableDatasetProperties; @@ -173,7 +175,8 @@ public Dataset apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((dataset, dataMap) -> @@ -182,6 +185,11 @@ public Dataset apply( SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); + mappingHelper.mapToResult( + VERSION_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> + entity.setVersionProperties( + VersionPropertiesMapper.map(context, new VersionProperties(dataMap)))); if (context != null && !canView(context.getOperationContext(), entityUrn)) { return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Dataset.class); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java index 7d05e0862a96da..ffcb94a0b7e29e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java @@ -71,7 +71,9 @@ public static Domain map(@Nullable QueryContext context, final EntityResponse en if (envelopedStructuredProps != null) { result.setStructuredProperties( StructuredPropertiesMapper.map( - context, new StructuredProperties(envelopedStructuredProps.getValue().data()))); + context, + new StructuredProperties(envelopedStructuredProps.getValue().data()), + entityUrn)); } final EnvelopedAspect envelopedForms = aspects.get(FORMS_ASPECT_NAME); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 4912d18614f415..a694b62999080e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -59,7 +59,8 @@ public GlossaryNode apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java index 1274646f45ec49..e309ffad84df58 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java @@ -90,7 +90,8 @@ public GlossaryTerm apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/EmbeddedModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/EmbeddedModelMapper.java new file mode 100644 index 00000000000000..62e7c90ab9b0e0 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/EmbeddedModelMapper.java @@ -0,0 +1,12 @@ +package com.linkedin.datahub.graphql.types.mappers; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** Made for models that are embedded in other models and thus do not encode their own URN. */ +public interface EmbeddedModelMapper { + O apply( + @Nullable final QueryContext context, @Nonnull final I input, @Nonnull final Urn entityUrn); +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 0d69e62c621a60..8fe58df2d2edec 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -70,6 +70,9 @@ public static FacetMetadata mapFacet( aggregationFacets.stream() .map(facet -> facet.equals("entity") || facet.contains("_entityType")) .collect(Collectors.toList()); + if (aggregationMetadata.getEntity() != null) { + facetMetadata.setEntity(UrnToEntityMapper.map(context, aggregationMetadata.getEntity())); + } facetMetadata.setField(aggregationMetadata.getName()); facetMetadata.setDisplayName( Optional.ofNullable(aggregationMetadata.getDisplayName()) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java index a4f3aa7a0e2261..74076fd2f4ee9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java @@ -75,7 +75,8 @@ public MLFeature apply( mlFeature.setOwnership( OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( - context, ML_FEATURE_PROPERTIES_ASPECT_NAME, MLFeatureMapper::mapMLFeatureProperties); + ML_FEATURE_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> mapMLFeatureProperties(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeature, dataMap) -> @@ -115,7 +116,8 @@ public MLFeature apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((mlFeature, dataMap) -> mlFeature.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> @@ -137,10 +139,13 @@ private static void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataM private static void mapMLFeatureProperties( @Nullable final QueryContext context, @Nonnull MLFeature mlFeature, - @Nonnull DataMap dataMap) { + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); - mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(context, featureProperties)); - mlFeature.setProperties(MLFeaturePropertiesMapper.map(context, featureProperties)); + com.linkedin.datahub.graphql.generated.MLFeatureProperties graphqlProperties = + MLFeaturePropertiesMapper.map(context, featureProperties, entityUrn); + mlFeature.setFeatureProperties(graphqlProperties); + mlFeature.setProperties(graphqlProperties); mlFeature.setDescription(featureProperties.getDescription()); if (featureProperties.getDataType() != null) { mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java index 92d090275867da..08ac3a1b5f138f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java @@ -1,29 +1,34 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; -import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.mappers.EmbeddedModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import javax.annotation.Nullable; -import lombok.NonNull; public class MLFeaturePropertiesMapper - implements ModelMapper { + implements EmbeddedModelMapper< + com.linkedin.ml.metadata.MLFeatureProperties, MLFeatureProperties> { public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); public static MLFeatureProperties map( @Nullable QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - return INSTANCE.apply(context, mlFeatureProperties); + @Nonnull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties, + @Nonnull Urn entityUrn) { + return INSTANCE.apply(context, mlFeatureProperties, entityUrn); } @Override public MLFeatureProperties apply( @Nullable QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + @Nonnull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties, + @Nonnull Urn entityUrn) { final MLFeatureProperties result = new MLFeatureProperties(); result.setDescription(mlFeatureProperties.getDescription()); @@ -45,6 +50,9 @@ public MLFeatureProperties apply( .collect(Collectors.toList())); } + result.setCustomProperties( + CustomPropertiesMapper.map(mlFeatureProperties.getCustomProperties(), entityUrn)); + return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java index 30bf4dda1cf4fd..65bc8e84f7bbb5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java @@ -76,7 +76,7 @@ public MLFeatureTable apply( mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); mappingHelper.mapToResult( ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, - (entity, dataMap) -> this.mapMLFeatureTableProperties(context, entity, dataMap, entityUrn)); + (entity, dataMap) -> mapMLFeatureTableProperties(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> @@ -117,7 +117,8 @@ public MLFeatureTable apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((mlFeatureTable, dataMap) -> mlFeatureTable.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> @@ -145,10 +146,10 @@ private static void mapMLFeatureTableProperties( @Nonnull DataMap dataMap, Urn entityUrn) { MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); - mlFeatureTable.setFeatureTableProperties( - MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn)); - mlFeatureTable.setProperties( - MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn)); + com.linkedin.datahub.graphql.generated.MLFeatureTableProperties graphqlProperties = + MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn); + mlFeatureTable.setFeatureTableProperties(graphqlProperties); + mlFeatureTable.setProperties(graphqlProperties); mlFeatureTable.setDescription(featureTableProperties.getDescription()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java index d9fed13ed0d0be..3c054cb6a9a5b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java @@ -8,26 +8,30 @@ import com.linkedin.datahub.graphql.generated.MLFeatureTableProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.mappers.EmbeddedModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import javax.annotation.Nullable; -import lombok.NonNull; -public class MLFeatureTablePropertiesMapper { +public class MLFeatureTablePropertiesMapper + implements EmbeddedModelMapper< + com.linkedin.ml.metadata.MLFeatureTableProperties, MLFeatureTableProperties> { public static final MLFeatureTablePropertiesMapper INSTANCE = new MLFeatureTablePropertiesMapper(); public static MLFeatureTableProperties map( @Nullable final QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, - Urn entityUrn) { + @Nonnull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + @Nonnull Urn entityUrn) { return INSTANCE.apply(context, mlFeatureTableProperties, entityUrn); } - public static MLFeatureTableProperties apply( + @Override + public MLFeatureTableProperties apply( @Nullable final QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, - Urn entityUrn) { + @Nonnull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + @Nonnull Urn entityUrn) { final MLFeatureTableProperties result = new MLFeatureTableProperties(); result.setDescription(mlFeatureTableProperties.getDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java index 7e99040e44c82e..9009972a47616d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java @@ -75,9 +75,8 @@ public MLModelGroup apply( mappingHelper.mapToResult( ML_MODEL_GROUP_KEY_ASPECT_NAME, MLModelGroupMapper::mapToMLModelGroupKey); mappingHelper.mapToResult( - context, ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, - MLModelGroupMapper::mapToMLModelGroupProperties); + (entity, dataMap) -> mapToMLModelGroupProperties(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( STATUS_ASPECT_NAME, (mlModelGroup, dataMap) -> @@ -112,7 +111,8 @@ public MLModelGroup apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((mlModelGroup, dataMap) -> mlModelGroup.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> @@ -135,9 +135,13 @@ private static void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap data } private static void mapToMLModelGroupProperties( - @Nullable final QueryContext context, MLModelGroup mlModelGroup, DataMap dataMap) { + @Nullable final QueryContext context, + MLModelGroup mlModelGroup, + DataMap dataMap, + @Nonnull Urn entityUrn) { MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); - mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(context, modelGroupProperties)); + mlModelGroup.setProperties( + MLModelGroupPropertiesMapper.map(context, modelGroupProperties, entityUrn)); if (modelGroupProperties.getDescription() != null) { mlModelGroup.setDescription(modelGroupProperties.getDescription()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java index 9f1918f9ec4893..a6cfded9865d90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java @@ -1,27 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelGroupProperties; -import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.mappers.EmbeddedModelMapper; +import javax.annotation.Nonnull; import javax.annotation.Nullable; -import lombok.NonNull; public class MLModelGroupPropertiesMapper - implements ModelMapper< + implements EmbeddedModelMapper< com.linkedin.ml.metadata.MLModelGroupProperties, MLModelGroupProperties> { public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); public static MLModelGroupProperties map( @Nullable QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - return INSTANCE.apply(context, mlModelGroupProperties); + @Nonnull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties, + @Nonnull Urn entityUrn) { + return INSTANCE.apply(context, mlModelGroupProperties, entityUrn); } @Override public MLModelGroupProperties apply( @Nullable QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + @Nonnull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties, + @Nonnull Urn entityUrn) { final MLModelGroupProperties result = new MLModelGroupProperties(); result.setDescription(mlModelGroupProperties.getDescription()); @@ -30,6 +34,9 @@ public MLModelGroupProperties apply( } result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + result.setCustomProperties( + CustomPropertiesMapper.map(mlModelGroupProperties.getCustomProperties(), entityUrn)); + return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java index a3bc5c663c89ae..11e6b5180f8c1c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java @@ -13,6 +13,7 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.VersionProperties; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; @@ -38,6 +39,7 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.datahub.graphql.types.versioning.VersionPropertiesMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; @@ -174,11 +176,17 @@ public MLModel apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((dataset, dataMap) -> dataset.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + mappingHelper.mapToResult( + VERSION_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> + entity.setVersionProperties( + VersionPropertiesMapper.map(context, new VersionProperties(dataMap)))); if (context != null && !canView(context.getOperationContext(), entityUrn)) { return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLModel.class); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java index a89904b3ab915c..7b00fe88f2d683 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java @@ -7,28 +7,40 @@ import com.linkedin.datahub.graphql.generated.MLModelGroup; import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.TimeStampToAuditStampMapper; +import com.linkedin.datahub.graphql.types.mappers.EmbeddedModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import javax.annotation.Nullable; -import lombok.NonNull; -public class MLModelPropertiesMapper { +public class MLModelPropertiesMapper + implements EmbeddedModelMapper { public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); public static MLModelProperties map( @Nullable final QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, + @Nonnull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { return INSTANCE.apply(context, mlModelProperties, entityUrn); } public MLModelProperties apply( @Nullable final QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, - Urn entityUrn) { + @Nonnull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, + @Nonnull Urn entityUrn) { final MLModelProperties result = new MLModelProperties(); result.setDate(mlModelProperties.getDate()); + if (mlModelProperties.getName() != null) { + result.setName(mlModelProperties.getName()); + } else { + // backfill name from URN for backwards compatibility + result.setName(entityUrn.getEntityKey().get(1)); // indexed access is safe here + } + result.setCreated(TimeStampToAuditStampMapper.map(context, mlModelProperties.getCreated())); + result.setLastModified( + TimeStampToAuditStampMapper.map(context, mlModelProperties.getLastModified())); result.setDescription(mlModelProperties.getDescription()); if (mlModelProperties.getExternalUrl() != null) { result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java index 36784f96ea30ea..d48d93ede9c1ab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java @@ -74,9 +74,8 @@ public MLPrimaryKey apply( mappingHelper.mapToResult( ML_PRIMARY_KEY_KEY_ASPECT_NAME, MLPrimaryKeyMapper::mapMLPrimaryKeyKey); mappingHelper.mapToResult( - context, ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, - MLPrimaryKeyMapper::mapMLPrimaryKeyProperties); + (entity, dataMap) -> mapMLPrimaryKeyProperties(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlPrimaryKey, dataMap) -> @@ -112,7 +111,8 @@ public MLPrimaryKey apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> @@ -131,11 +131,15 @@ private static void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMa } private static void mapMLPrimaryKeyProperties( - @Nullable final QueryContext context, MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + @Nullable final QueryContext context, + MLPrimaryKey mlPrimaryKey, + DataMap dataMap, + @Nonnull Urn entityUrn) { MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); - mlPrimaryKey.setPrimaryKeyProperties( - MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties)); - mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties)); + com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties graphqlProperties = + MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties, entityUrn); + mlPrimaryKey.setPrimaryKeyProperties(graphqlProperties); + mlPrimaryKey.setProperties(graphqlProperties); mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); if (primaryKeyProperties.getDataType() != null) { mlPrimaryKey.setDataType( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java index 09e41fe7ee4e8e..0bbe8f53f32713 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java @@ -1,30 +1,34 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; -import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.mappers.EmbeddedModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import javax.annotation.Nullable; -import lombok.NonNull; public class MLPrimaryKeyPropertiesMapper - implements ModelMapper< + implements EmbeddedModelMapper< com.linkedin.ml.metadata.MLPrimaryKeyProperties, MLPrimaryKeyProperties> { public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); public static MLPrimaryKeyProperties map( @Nullable QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - return INSTANCE.apply(context, mlPrimaryKeyProperties); + @Nonnull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties, + @Nonnull Urn entityUrn) { + return INSTANCE.apply(context, mlPrimaryKeyProperties, entityUrn); } @Override public MLPrimaryKeyProperties apply( @Nullable QueryContext context, - @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + @Nonnull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties, + @Nonnull Urn entityUrn) { final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); result.setDescription(mlPrimaryKeyProperties.getDescription()); @@ -45,6 +49,9 @@ public MLPrimaryKeyProperties apply( }) .collect(Collectors.toList())); + result.setCustomProperties( + CustomPropertiesMapper.map(mlPrimaryKeyProperties.getCustomProperties(), entityUrn)); + return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java index e71b569e9ae238..916ebc772f545f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java @@ -5,18 +5,13 @@ import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; -import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.QueryEntity; -import com.linkedin.datahub.graphql.generated.QueryLanguage; -import com.linkedin.datahub.graphql.generated.QuerySource; -import com.linkedin.datahub.graphql.generated.QueryStatement; import com.linkedin.datahub.graphql.generated.QuerySubject; -import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.common.mappers.QueryPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; @@ -48,7 +43,10 @@ public QueryEntity apply( result.setType(EntityType.QUERY); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(context, QUERY_PROPERTIES_ASPECT_NAME, this::mapQueryProperties); + mappingHelper.mapToResult( + QUERY_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> + entity.setProperties(QueryPropertiesMapper.map(context, new QueryProperties(dataMap)))); mappingHelper.mapToResult(QUERY_SUBJECTS_ASPECT_NAME, this::mapQuerySubjects); mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapPlatform); return mappingHelper.getResult(); @@ -64,37 +62,6 @@ private void mapPlatform(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { } } - private void mapQueryProperties( - @Nullable final QueryContext context, @Nonnull QueryEntity query, @Nonnull DataMap dataMap) { - QueryProperties queryProperties = new QueryProperties(dataMap); - com.linkedin.datahub.graphql.generated.QueryProperties res = - new com.linkedin.datahub.graphql.generated.QueryProperties(); - - // Query Source must be kept in sync. - res.setSource(QuerySource.valueOf(queryProperties.getSource().toString())); - res.setStatement( - new QueryStatement( - queryProperties.getStatement().getValue(), - QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); - res.setName(queryProperties.getName(GetMode.NULL)); - res.setDescription(queryProperties.getDescription(GetMode.NULL)); - if (queryProperties.hasOrigin() && queryProperties.getOrigin() != null) { - res.setOrigin(UrnToEntityMapper.map(context, queryProperties.getOrigin())); - } - - AuditStamp created = new AuditStamp(); - created.setTime(queryProperties.getCreated().getTime()); - created.setActor(queryProperties.getCreated().getActor(GetMode.NULL).toString()); - res.setCreated(created); - - AuditStamp lastModified = new AuditStamp(); - lastModified.setTime(queryProperties.getLastModified().getTime()); - lastModified.setActor(queryProperties.getLastModified().getActor(GetMode.NULL).toString()); - res.setLastModified(lastModified); - - query.setProperties(res); - } - @Nonnull private void mapQuerySubjects(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QuerySubjects querySubjects = new QuerySubjects(dataMap); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java index b1f27357d45504..30eac54aede9bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java @@ -41,7 +41,8 @@ public SchemaFieldEntity apply( STRUCTURED_PROPERTIES_ASPECT_NAME, ((schemaField, dataMap) -> schemaField.setStructuredProperties( - StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map( + context, new StructuredProperties(dataMap), entityUrn)))); mappingHelper.mapToResult( BUSINESS_ATTRIBUTE_ASPECT, (((schemaField, dataMap) -> diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java index dc1ff7ca329714..4f155903c055b1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java @@ -25,23 +25,29 @@ public class StructuredPropertiesMapper { public static final StructuredPropertiesMapper INSTANCE = new StructuredPropertiesMapper(); public static com.linkedin.datahub.graphql.generated.StructuredProperties map( - @Nullable QueryContext context, @Nonnull final StructuredProperties structuredProperties) { - return INSTANCE.apply(context, structuredProperties); + @Nullable QueryContext context, + @Nonnull final StructuredProperties structuredProperties, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, structuredProperties, entityUrn); } public com.linkedin.datahub.graphql.generated.StructuredProperties apply( - @Nullable QueryContext context, @Nonnull final StructuredProperties structuredProperties) { + @Nullable QueryContext context, + @Nonnull final StructuredProperties structuredProperties, + @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.StructuredProperties result = new com.linkedin.datahub.graphql.generated.StructuredProperties(); result.setProperties( structuredProperties.getProperties().stream() - .map(p -> mapStructuredProperty(context, p)) + .map(p -> mapStructuredProperty(context, p, entityUrn)) .collect(Collectors.toList())); return result; } private StructuredPropertiesEntry mapStructuredProperty( - @Nullable QueryContext context, StructuredPropertyValueAssignment valueAssignment) { + @Nullable QueryContext context, + StructuredPropertyValueAssignment valueAssignment, + @Nonnull final Urn entityUrn) { StructuredPropertiesEntry entry = new StructuredPropertiesEntry(); entry.setStructuredProperty(createStructuredPropertyEntity(valueAssignment)); final List values = new ArrayList<>(); @@ -58,6 +64,7 @@ private StructuredPropertiesEntry mapStructuredProperty( }); entry.setValues(values); entry.setValueEntities(entities); + entry.setAssociatedUrn(entityUrn.toString()); return entry; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java index c539c65118ac6d..5dc73d9ad09388 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java @@ -17,6 +17,7 @@ import com.linkedin.datahub.graphql.generated.StringValue; import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertySettings; import com.linkedin.datahub.graphql.generated.TypeQualifier; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.MapperUtils; @@ -55,6 +56,8 @@ public StructuredPropertyEntity apply( MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult( STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, (this::mapStructuredPropertyDefinition)); + mappingHelper.mapToResult( + STRUCTURED_PROPERTY_SETTINGS_ASPECT_NAME, (this::mapStructuredPropertySettings)); return mappingHelper.getResult(); } @@ -112,6 +115,21 @@ private List mapAllowedValues(@Nonnull PropertyValueArray gmsValue return allowedValues; } + private void mapStructuredPropertySettings( + @Nonnull StructuredPropertyEntity extendedProperty, @Nonnull DataMap dataMap) { + com.linkedin.structured.StructuredPropertySettings gmsSettings = + new com.linkedin.structured.StructuredPropertySettings(dataMap); + StructuredPropertySettings settings = new StructuredPropertySettings(); + + settings.setIsHidden(gmsSettings.isIsHidden()); + settings.setShowInSearchFilters(gmsSettings.isShowInSearchFilters()); + settings.setShowInAssetSummary(gmsSettings.isShowInAssetSummary()); + settings.setShowAsAssetBadge(gmsSettings.isShowAsAssetBadge()); + settings.setShowInColumnsTable(gmsSettings.isShowInColumnsTable()); + + extendedProperty.setSettings(settings); + } + private DataTypeEntity createDataTypeEntity(final Urn dataTypeUrn) { final DataTypeEntity dataType = new DataTypeEntity(); dataType.setUrn(dataTypeUrn.toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java index 22e161d320f215..e451e96a3e84d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java @@ -27,7 +27,8 @@ public class StructuredPropertyType implements com.linkedin.datahub.graphql.types.EntityType { public static final Set ASPECTS_TO_FETCH = - ImmutableSet.of(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME); + ImmutableSet.of( + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, STRUCTURED_PROPERTY_SETTINGS_ASPECT_NAME); private final EntityClient _entityClient; @Override diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionPropertiesMapper.java new file mode 100644 index 00000000000000..f89ebdc9f2b043 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionPropertiesMapper.java @@ -0,0 +1,53 @@ +package com.linkedin.datahub.graphql.types.versioning; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.VersionProperties; +import com.linkedin.datahub.graphql.generated.VersionSet; +import com.linkedin.datahub.graphql.types.mappers.MapperUtils; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.mlmodel.mappers.VersionTagMapper; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class VersionPropertiesMapper + implements ModelMapper { + public static final VersionPropertiesMapper INSTANCE = new VersionPropertiesMapper(); + + public static VersionProperties map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.VersionProperties versionProperties) { + return INSTANCE.apply(context, versionProperties); + } + + @Override + public VersionProperties apply( + @Nullable QueryContext context, @Nonnull com.linkedin.common.VersionProperties input) { + final VersionProperties result = new VersionProperties(); + + result.setVersionSet( + VersionSet.builder() + .setUrn(input.getVersionSet().toString()) + .setType(EntityType.VERSION_SET) + .build()); + + result.setVersion(VersionTagMapper.map(context, input.getVersion())); + result.setAliases( + input.getAliases().stream() + .map(alias -> VersionTagMapper.map(context, alias)) + .collect(Collectors.toList())); + result.setComment(input.getComment()); + result.setIsLatest(Boolean.TRUE.equals(input.isIsLatest())); + + if (input.getMetadataCreatedTimestamp() != null) { + result.setCreated(MapperUtils.createResolvedAuditStamp(input.getMetadataCreatedTimestamp())); + } + if (input.getSourceCreatedTimestamp() != null) { + result.setCreatedInSource( + MapperUtils.createResolvedAuditStamp(input.getSourceCreatedTimestamp())); + } + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionSetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionSetMapper.java new file mode 100644 index 00000000000000..3a07115ece5f6e --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionSetMapper.java @@ -0,0 +1,47 @@ +package com.linkedin.datahub.graphql.types.versioning; + +import static com.linkedin.metadata.Constants.VERSION_SET_PROPERTIES_ASPECT_NAME; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.VersionSet; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class VersionSetMapper implements ModelMapper { + + public static final VersionSetMapper INSTANCE = new VersionSetMapper(); + + public static VersionSet map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public VersionSet apply(@Nullable QueryContext context, @Nonnull EntityResponse entityResponse) { + final VersionSet result = new VersionSet(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.VERSION_SET); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + VERSION_SET_PROPERTIES_ASPECT_NAME, + (versionSet, dataMap) -> mapVersionSetProperties(context, versionSet, dataMap)); + + return result; + } + + private void mapVersionSetProperties( + @Nullable QueryContext context, @Nonnull VersionSet versionSet, @Nonnull DataMap dataMap) { + com.linkedin.versionset.VersionSetProperties versionProperties = + new com.linkedin.versionset.VersionSetProperties(dataMap); + versionSet.setLatestVersion(UrnToEntityMapper.map(context, versionProperties.getLatest())); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionSetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionSetType.java new file mode 100644 index 00000000000000..ed2beff4530949 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/versioning/VersionSetType.java @@ -0,0 +1,79 @@ +package com.linkedin.datahub.graphql.types.versioning; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.VersionSet; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class VersionSetType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(VERSION_SET_PROPERTIES_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.VERSION_SET; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return VersionSet.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List versionSetUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + VERSION_SET_ENTITY_NAME, + new HashSet<>(versionSetUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : versionSetUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(VersionSetMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Queries", e); + } + } +} diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 262d2384d84ada..ca7f89415f6b87 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -156,6 +156,15 @@ type PlatformPrivileges { """ manageBusinessAttributes: Boolean! + """ + Whether the user can create, edit, and delete structured properties. + """ + manageStructuredProperties: Boolean! + + """ + Whether the user can view the manage structured properties page. + """ + viewStructuredPropertiesPage: Boolean! } """ @@ -517,6 +526,16 @@ type FeatureFlagsConfig { If turned on, all siblings will be separated with no way to get to a "combined" sibling view """ showSeparateSiblings: Boolean! + + """ + If turned on, show the manage structured properties tab in the govern dropdown + """ + showManageStructuredProperties: Boolean! + + """ + If turned on, exposes the versioning feature by allowing users to link entities in the UI. + """ + entityVersioningEnabled: Boolean! } """ @@ -559,4 +578,4 @@ type DocPropagationSettings { The default doc propagation setting for the platform. """ docColumnPropagation: Boolean -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 049527e5d77e3b..64c79b05745ded 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -262,8 +262,16 @@ type Query { Fetch all Business Attributes """ listBusinessAttributes(input: ListBusinessAttributesInput!): ListBusinessAttributesResult + + """ + Fetch a Data Process Instance by primary key (urn) + """ + dataProcessInstance(urn: String!): DataProcessInstance + + } + """ An ERModelRelationship is a high-level abstraction that dictates what datasets fields are erModelRelationshiped. """ @@ -1213,6 +1221,11 @@ enum EntityType { A Business Attribute """ BUSINESS_ATTRIBUTE + + """ + A set of versioned entities, representing a single source / logical entity over time + """ + VERSION_SET } """ @@ -3838,6 +3851,11 @@ enum CorpUserStatus { A User that has been provisioned and logged in """ ACTIVE + + """ + A user that has been suspended + """ + SUSPENDED } union ResolvedActor = CorpUser | CorpGroup @@ -6262,6 +6280,16 @@ type DataFlow implements EntityWithRelationships & Entity & BrowsableEntity { """ dataPlatformInstance: DataPlatformInstance + """ + The parent container in which the entity resides + """ + container: Container + + """ + Recursively get the lineage of containers for this entity + """ + parentContainers: ParentContainersResult + """ Granular API for querying edges extending from this entity """ @@ -6444,6 +6472,16 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { """ dataPlatformInstance: DataPlatformInstance + """ + The parent container in which the entity resides + """ + container: Container + + """ + Recursively get the lineage of containers for this entity + """ + parentContainers: ParentContainersResult + """ Additional read write properties associated with the Data Job """ @@ -6556,6 +6594,11 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { The forms associated with the Dataset """ forms: Forms + + """ + Data Transform Logic associated with the Data Job + """ + dataTransformLogic: DataTransformLogic } """ @@ -6773,6 +6816,26 @@ type DataJobInputOutput { fineGrainedLineages: [FineGrainedLineage!] } +""" +Information about a transformation applied to data assets +""" +type DataTransform { + """ + The transformation may be defined by a query statement + """ + queryStatement: QueryStatement +} + +""" +Information about transformations applied to data assets +""" +type DataTransformLogic { + """ + List of transformations applied + """ + transforms: [DataTransform!]! +} + """ Information about individual user usage of a Dataset """ @@ -9827,13 +9890,45 @@ type MLModelGroup implements EntityWithRelationships & Entity & BrowsableEntity privileges: EntityPrivileges } +""" +Properties describing a group of related ML models +""" type MLModelGroupProperties { + """ + Display name of the model group + """ + name: String -description: String + """ + Detailed description of the model group's purpose and contents + """ + description: String - createdAt: Long + """ + When this model group was created + """ + created: AuditStamp + """ + When this model group was last modified + """ + lastModified: AuditStamp + + """ + Version identifier for this model group + """ version: VersionTag + + """ + Custom key-value properties for the model group + """ + customProperties: [CustomPropertiesEntry!] + + """ + Deprecated creation timestamp + @deprecated Use the 'created' field instead + """ + createdAt: Long @deprecated(reason: "Use `created` instead") } """ @@ -9983,40 +10078,103 @@ description: String } type MLMetric { + """ + Name of the metric (e.g. accuracy, precision, recall) + """ name: String + """ + Description of what this metric measures + """ description: String + """ + The computed value of the metric + """ value: String + """ + Timestamp when this metric was recorded + """ createdAt: Long } type MLModelProperties { + """ + The display name of the model used in the UI + """ + name: String + """ + Detailed description of the model's purpose and characteristics + """ description: String - date: Long + """ + When the model was last modified + """ + lastModified: AuditStamp + """ + Version identifier for this model + """ version: String + """ + The type/category of ML model (e.g. classification, regression) + """ type: String + """ + Mapping of hyperparameter configurations + """ hyperParameters: HyperParameterMap - hyperParams: [MLHyperParam] + """ + List of hyperparameter settings used to train this model + """ + hyperParams: [MLHyperParam] + """ + Performance metrics from model training + """ trainingMetrics: [MLMetric] + """ + Names of ML features used by this model + """ mlFeatures: [String!] + """ + Tags for categorizing and searching models + """ tags: [String!] + """ + Model groups this model belongs to + """ groups: [MLModelGroup] + """ + Additional custom properties specific to this model + """ customProperties: [CustomPropertiesEntry!] + """ + URL to view this model in external system + """ externalUrl: String + + """ + When this model was created + """ + created: AuditStamp + + """ + Deprecated timestamp for model creation + @deprecated Use 'created' field instead + """ + date: Long @deprecated(reason: "Use `created` instead") } type MLFeatureProperties { @@ -10028,6 +10186,8 @@ type MLFeatureProperties { version: VersionTag sources: [Dataset] + + customProperties: [CustomPropertiesEntry!] } """ @@ -10164,13 +10324,15 @@ type MLPrimaryKey implements EntityWithRelationships & Entity { type MLPrimaryKeyProperties { -description: String + description: String dataType: MLFeatureDataType version: VersionTag sources: [Dataset] + + customProperties: [CustomPropertiesEntry!] } """ @@ -10347,7 +10509,7 @@ type MLModelGroupEditableProperties { type MLFeatureTableProperties { -description: String + description: String mlFeatures: [MLFeature] @@ -12793,3 +12955,87 @@ type CronSchedule { """ timezone: String! } + + +""" +Properties describing a data process instance's execution metadata +""" +type DataProcessInstanceProperties { + """ + The display name of this process instance + """ + name: String! + + """ + URL to view this process instance in the external system + """ + externalUrl: String + + """ + When this process instance was created + """ + created: AuditStamp + + """ + Additional custom properties specific to this process instance + """ + customProperties: [CustomPropertiesEntry!] +} + +""" +Properties specific to an ML model training run instance +""" +type MLTrainingRunProperties { + """ + Unique identifier for this training run + """ + id: String + + """ + List of URLs to access training run outputs (e.g. model artifacts, logs) + """ + outputUrls: [String] + + """ + Hyperparameters used in this training run + """ + hyperParams: [MLHyperParam] + + """ + Performance metrics recorded during this training run + """ + trainingMetrics: [MLMetric] +} + +extend type DataProcessInstance { + + """ + Additional read only properties associated with the Data Job + """ + properties: DataProcessInstanceProperties + + """ + The specific instance of the data platform that this entity belongs to + """ + dataPlatformInstance: DataPlatformInstance + + """ + Sub Types that this entity implements + """ + subTypes: SubTypes + + """ + The parent container in which the entity resides + """ + container: Container + + """ + Recursively get the lineage of containers for this entity + """ + parentContainers: ParentContainersResult + + """ + Additional properties when subtype is Training Run + """ + mlTrainingRunProperties: MLTrainingRunProperties +} diff --git a/datahub-graphql-core/src/main/resources/properties.graphql b/datahub-graphql-core/src/main/resources/properties.graphql index 292381d064f362..ff20caa50bf036 100644 --- a/datahub-graphql-core/src/main/resources/properties.graphql +++ b/datahub-graphql-core/src/main/resources/properties.graphql @@ -49,6 +49,11 @@ type StructuredPropertyEntity implements Entity { """ definition: StructuredPropertyDefinition! + """ + Definition of this structured property including its name + """ + settings: StructuredPropertySettings + """ Granular API for querying edges extending from this entity """ @@ -117,6 +122,36 @@ type StructuredPropertyDefinition { lastModified: ResolvedAuditStamp } +""" +Settings specific to a structured property entity +""" +type StructuredPropertySettings { + """ + Whether or not this asset should be hidden in the main application + """ + isHidden: Boolean! + + """ + Whether or not this asset should be displayed as a search filter + """ + showInSearchFilters: Boolean! + + """ + Whether or not this asset should be displayed in the asset sidebar + """ + showInAssetSummary: Boolean! + + """ + Whether or not this asset should be displayed as an asset badge on other asset's headers + """ + showAsAssetBadge: Boolean! + + """ + Whether or not this asset should be displayed as a column in the schema field table in a Dataset's "Columns" tab. + """ + showInColumnsTable: Boolean! +} + """ An entry for an allowed value for a structured property """ @@ -202,6 +237,11 @@ type StructuredPropertiesEntry { The optional entities associated with the values if the values are entity urns """ valueEntities: [Entity] + + """ + The urn of the entity this property came from for tracking purposes e.g. when sibling nodes are merged together + """ + associatedUrn: String! } """ @@ -330,8 +370,9 @@ input CreateStructuredPropertyInput { """ The unique fully qualified name of this structured property, dot delimited. + This will be required to match the ID of this structured property. """ - qualifiedName: String! + qualifiedName: String """ The optional display name for this property @@ -375,6 +416,11 @@ input CreateStructuredPropertyInput { For example: ["urn:li:entityType:datahub.dataset"] """ entityTypes: [String!]! + + """ + Settings for this structured property + """ + settings: StructuredPropertySettingsInput } """ @@ -455,6 +501,11 @@ input UpdateStructuredPropertyInput { For backwards compatibility, this is append only. """ newEntityTypes: [String!] + + """ + Settings for this structured property + """ + settings: StructuredPropertySettingsInput } """ @@ -477,3 +528,34 @@ input DeleteStructuredPropertyInput { """ urn: String! } + +""" +Settings for a structured property +""" +input StructuredPropertySettingsInput { + """ + Whether or not this asset should be hidden in the main application + """ + isHidden: Boolean + + """ + Whether or not this asset should be displayed as a search filter + """ + showInSearchFilters: Boolean + + """ + Whether or not this asset should be displayed in the asset sidebar + """ + showInAssetSummary: Boolean + + """ + Whether or not this asset should be displayed as an asset badge on other asset's headers + """ + showAsAssetBadge: Boolean + + """ + Whether or not this asset should be displayed as a column in the schema field table in a Dataset's "Columns" tab. + """ + showInColumnsTable: Boolean +} + diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index d0f669f05f9598..d8f17faa3d11c2 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -167,6 +167,16 @@ input SearchFlags { fields to include for custom Highlighting """ customHighlightingFields: [String!] + + """ + Whether or not to fetch and request for structured property facets when doing a search + """ + includeStructuredPropertyFacets: Boolean + + """ + Determines whether to filter out any non-latest entity version if entity is part of a Version Set, default true + """ + filterNonLatestVersions: Boolean } """ @@ -872,6 +882,11 @@ type FacetMetadata { """ displayName: String + """ + Entity corresponding to the facet + """ + entity: Entity + """ Aggregated search result counts by value of the field """ @@ -1487,4 +1502,4 @@ input GroupingCriterion { """ groupingEntityType: EntityType! -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/resources/versioning.graphql b/datahub-graphql-core/src/main/resources/versioning.graphql new file mode 100644 index 00000000000000..4a63463509c84d --- /dev/null +++ b/datahub-graphql-core/src/main/resources/versioning.graphql @@ -0,0 +1,148 @@ +type VersionSet implements Entity { + """ + The primary key of the VersionSet + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + The latest versioned entity linked to in this version set + """ + latestVersion: Entity + + """ + Executes a search on all versioned entities linked to this version set + By default sorts by sortId in descending order + """ + versionsSearch(input: SearchAcrossEntitiesInput!): SearchResults +} + +type VersionProperties { + """ + The linked Version Set entity that ties multiple versioned assets together + """ + versionSet: VersionSet! + + """ + Label for this versioned asset, should be unique within a version set (not enforced) + """ + version: VersionTag! + + """ + Additional version identifiers for this versioned asset. + """ + aliases: [VersionTag!]! + + """ + Comment documenting what this version was created for, changes, or represents + """ + comment: String + + """ + Whether this version is currently the latest in its verison set + """ + isLatest: Boolean! + + """ + Timestamp reflecting when the metadata for this version was created in DataHub + """ + created: ResolvedAuditStamp + + """ + Timestamp reflecting when the metadata for this version was created in DataHub + """ + createdInSource: ResolvedAuditStamp +} + +interface SupportsVersions { + """ + Indicates that this entity is versioned and provides information about the version. + """ + versionProperties: VersionProperties +} + +extend type Dataset implements SupportsVersions { + versionProperties: VersionProperties +} + +extend type MLModel implements SupportsVersions { + versionProperties: VersionProperties +} + +extend type Query { + """ + Fetch a Version Set by its URN + """ + versionSet(urn: String!): VersionSet +} + +""" +Input for linking a versioned entity to a Version Set +""" +input LinkVersionInput { + """ + The target version set + """ + versionSet: String! + + """ + The target versioned entity to link + """ + linkedEntity: String! + + """ + Version Tag label for the version, should be unique within a version set (not enforced) + """ + version: String! + + """ + Optional timestamp from the source system + """ + sourceTimestamp: Long + + """ + Optional creator from the source system, will be converted to an Urn + """ + sourceCreator: String + + """ + Optional comment about the version + """ + comment: String +} + +""" +Input for unlinking a versioned entity from a Version Set +""" +input UnlinkVersionInput { + """ + The target version set + """ + versionSet: String + + """ + The target versioned entity to unlink + """ + unlinkedEntity: String +} + +extend type Mutation { + """ + Link the latest versioned entity to a Version Set + """ + linkAssetVersion(input: LinkVersionInput!): VersionSet + + """ + Unlink a versioned entity from a Version Set + """ + unlinkAssetVersion(input: UnlinkVersionInput!): VersionSet +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/LinkAssetVersionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/LinkAssetVersionResolverTest.java new file mode 100644 index 00000000000000..c2eb92f4d1cd4c --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/LinkAssetVersionResolverTest.java @@ -0,0 +1,101 @@ +package com.linkedin.datahub.graphql.resolvers.entity.versioning; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; +import com.linkedin.datahub.graphql.generated.LinkVersionInput; +import com.linkedin.metadata.entity.IngestResult; +import com.linkedin.metadata.entity.versioning.EntityVersioningService; +import com.linkedin.metadata.entity.versioning.VersionPropertiesInput; +import graphql.schema.DataFetchingEnvironment; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class LinkAssetVersionResolverTest { + + private static final String TEST_VERSION_SET_URN = "urn:li:versionSet:test-version-set"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + + @Test + public void testGetSuccessful() throws Exception { + EntityVersioningService mockService = Mockito.mock(EntityVersioningService.class); + FeatureFlags mockFlags = Mockito.mock(FeatureFlags.class); + + Mockito.when(mockFlags.isEntityVersioning()).thenReturn(true); + + IngestResult mockResult = + IngestResult.builder().urn(Urn.createFromString(TEST_ENTITY_URN)).build(); + + Mockito.when( + mockService.linkLatestVersion( + any(), + eq(UrnUtils.getUrn(TEST_VERSION_SET_URN)), + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + any(VersionPropertiesInput.class))) + .thenReturn(ImmutableList.of(mockResult)); + + LinkAssetVersionResolver resolver = new LinkAssetVersionResolver(mockService, mockFlags); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + LinkVersionInput input = new LinkVersionInput(); + input.setVersionSet(TEST_VERSION_SET_URN); + input.setLinkedEntity(TEST_ENTITY_URN); + input.setComment("Test comment"); + input.setVersion("v1"); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertEquals(resolver.get(mockEnv).get().getUrn(), TEST_VERSION_SET_URN); + } + + @Test + public void testGetFeatureFlagDisabled() throws Exception { + EntityVersioningService mockService = Mockito.mock(EntityVersioningService.class); + FeatureFlags mockFlags = Mockito.mock(FeatureFlags.class); + + Mockito.when(mockFlags.isEntityVersioning()).thenReturn(false); + + LinkAssetVersionResolver resolver = new LinkAssetVersionResolver(mockService, mockFlags); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + LinkVersionInput input = new LinkVersionInput(); + input.setVersionSet(TEST_VERSION_SET_URN); + input.setLinkedEntity(TEST_ENTITY_URN); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + + assertThrows(IllegalAccessError.class, () -> resolver.get(mockEnv)); + } + + @Test + public void testGetInvalidVersionSetUrn() throws Exception { + EntityVersioningService mockService = Mockito.mock(EntityVersioningService.class); + FeatureFlags mockFlags = Mockito.mock(FeatureFlags.class); + + Mockito.when(mockFlags.isEntityVersioning()).thenReturn(true); + + LinkAssetVersionResolver resolver = new LinkAssetVersionResolver(mockService, mockFlags); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + LinkVersionInput input = new LinkVersionInput(); + input.setVersionSet("urn:li:dataset:invalid-version-set"); // Invalid URN type + input.setLinkedEntity(TEST_ENTITY_URN); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + + assertThrows(IllegalArgumentException.class, () -> resolver.get(mockEnv)); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/UnlinkAssetVersionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/UnlinkAssetVersionResolverTest.java new file mode 100644 index 00000000000000..e162ce96e627c6 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/versioning/UnlinkAssetVersionResolverTest.java @@ -0,0 +1,123 @@ +package com.linkedin.datahub.graphql.resolvers.entity.versioning; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; +import com.linkedin.datahub.graphql.generated.UnlinkVersionInput; +import com.linkedin.metadata.entity.versioning.EntityVersioningService; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class UnlinkAssetVersionResolverTest { + + private static final String TEST_VERSION_SET_URN = "urn:li:versionSet:test-version-set"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + + @Test + public void testGetSuccessful() throws Exception { + EntityVersioningService mockService = Mockito.mock(EntityVersioningService.class); + FeatureFlags mockFlags = Mockito.mock(FeatureFlags.class); + + Mockito.when(mockFlags.isEntityVersioning()).thenReturn(true); + + Mockito.when( + mockService.unlinkVersion( + any(), + eq(UrnUtils.getUrn(TEST_VERSION_SET_URN)), + eq(UrnUtils.getUrn(TEST_ENTITY_URN)))) + .thenReturn(Collections.emptyList()); + + UnlinkAssetVersionResolver resolver = new UnlinkAssetVersionResolver(mockService, mockFlags); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + UnlinkVersionInput input = new UnlinkVersionInput(); + input.setVersionSet(TEST_VERSION_SET_URN); + input.setUnlinkedEntity(TEST_ENTITY_URN); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertEquals(resolver.get(mockEnv).get(), null); + + Mockito.verify(mockService) + .unlinkVersion( + any(), eq(UrnUtils.getUrn(TEST_VERSION_SET_URN)), eq(UrnUtils.getUrn(TEST_ENTITY_URN))); + } + + @Test + public void testGetFeatureFlagDisabled() throws Exception { + EntityVersioningService mockService = Mockito.mock(EntityVersioningService.class); + FeatureFlags mockFlags = Mockito.mock(FeatureFlags.class); + + Mockito.when(mockFlags.isEntityVersioning()).thenReturn(false); + + UnlinkAssetVersionResolver resolver = new UnlinkAssetVersionResolver(mockService, mockFlags); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + UnlinkVersionInput input = new UnlinkVersionInput(); + input.setVersionSet(TEST_VERSION_SET_URN); + input.setUnlinkedEntity(TEST_ENTITY_URN); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + + assertThrows(IllegalAccessError.class, () -> resolver.get(mockEnv)); + } + + @Test + public void testGetInvalidVersionSetUrn() throws Exception { + EntityVersioningService mockService = Mockito.mock(EntityVersioningService.class); + FeatureFlags mockFlags = Mockito.mock(FeatureFlags.class); + + Mockito.when(mockFlags.isEntityVersioning()).thenReturn(true); + + UnlinkAssetVersionResolver resolver = new UnlinkAssetVersionResolver(mockService, mockFlags); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + UnlinkVersionInput input = new UnlinkVersionInput(); + input.setVersionSet("urn:li:dataset:invalid-version-set"); // Invalid URN type + input.setUnlinkedEntity(TEST_ENTITY_URN); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + + assertThrows(IllegalArgumentException.class, () -> resolver.get(mockEnv)); + } + + @Test + public void testGetServiceException() throws Exception { + EntityVersioningService mockService = Mockito.mock(EntityVersioningService.class); + FeatureFlags mockFlags = Mockito.mock(FeatureFlags.class); + + Mockito.when(mockFlags.isEntityVersioning()).thenReturn(true); + + Mockito.doThrow(new RuntimeException("Service error")) + .when(mockService) + .unlinkVersion(any(), any(), any()); + + UnlinkAssetVersionResolver resolver = new UnlinkAssetVersionResolver(mockService, mockFlags); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + UnlinkVersionInput input = new UnlinkVersionInput(); + input.setVersionSet(TEST_VERSION_SET_URN); + input.setUnlinkedEntity(TEST_ENTITY_URN); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index 42768b8a2de21b..89d218683e33ec 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -471,7 +471,8 @@ private static EntityClient initMockEntityClient( Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(Collections.emptyList()))) + Mockito.eq(Collections.emptyList()), + Mockito.eq(null))) .thenReturn(result); return client; } @@ -496,7 +497,8 @@ private static void verifyMockEntityClient( Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(Collections.emptyList())); + Mockito.eq(Collections.emptyList()), + Mockito.eq(null)); } private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolverTest.java index 72cdb78542e414..fec2251f92b63f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolverTest.java @@ -10,11 +10,11 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateStructuredPropertyInput; import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertySettingsInput; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; @@ -36,7 +36,8 @@ public class CreateStructuredPropertyResolverTest { null, null, null, - new ArrayList<>()); + new ArrayList<>(), + null); @Test public void testGetSuccess() throws Exception { @@ -56,7 +57,40 @@ public void testGetSuccess() throws Exception { // Validate that we called ingest Mockito.verify(mockEntityClient, Mockito.times(1)) - .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); + } + + @Test + public void testGetMismatchIdAndQualifiedName() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + CreateStructuredPropertyResolver resolver = + new CreateStructuredPropertyResolver(mockEntityClient); + + CreateStructuredPropertyInput testInput = + new CreateStructuredPropertyInput( + "mismatched", + "io.acryl.test", + "Display Name", + "description", + true, + null, + null, + null, + null, + new ArrayList<>(), + null); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate ingest is not called + Mockito.verify(mockEntityClient, Mockito.times(0)) + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); } @Test @@ -75,7 +109,7 @@ public void testGetUnauthorized() throws Exception { // Validate that we did NOT call ingest Mockito.verify(mockEntityClient, Mockito.times(0)) - .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); } @Test @@ -94,7 +128,83 @@ public void testGetFailure() throws Exception { // Validate that ingest was called, but that caused a failure Mockito.verify(mockEntityClient, Mockito.times(1)) - .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); + } + + @Test + public void testGetInvalidSettingsInput() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + CreateStructuredPropertyResolver resolver = + new CreateStructuredPropertyResolver(mockEntityClient); + + // if isHidden is true, other fields should not be true + StructuredPropertySettingsInput settingsInput = new StructuredPropertySettingsInput(); + settingsInput.setIsHidden(true); + settingsInput.setShowAsAssetBadge(true); + + CreateStructuredPropertyInput testInput = + new CreateStructuredPropertyInput( + null, + "io.acryl.test", + "Display Name", + "description", + true, + null, + null, + null, + null, + new ArrayList<>(), + settingsInput); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate ingest is not called + Mockito.verify(mockEntityClient, Mockito.times(0)) + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); + } + + @Test + public void testGetSuccessWithSettings() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + CreateStructuredPropertyResolver resolver = + new CreateStructuredPropertyResolver(mockEntityClient); + + StructuredPropertySettingsInput settingsInput = new StructuredPropertySettingsInput(); + settingsInput.setShowAsAssetBadge(true); + + CreateStructuredPropertyInput testInput = + new CreateStructuredPropertyInput( + null, + "io.acryl.test", + "Display Name", + "description", + true, + null, + null, + null, + null, + new ArrayList<>(), + settingsInput); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + StructuredPropertyEntity prop = resolver.get(mockEnv).get(); + + assertEquals(prop.getUrn(), TEST_STRUCTURED_PROPERTY_URN); + + // Validate that we called ingest + Mockito.verify(mockEntityClient, Mockito.times(1)) + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); } private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exception { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/DeleteStructuredPropertyResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/DeleteStructuredPropertyResolverTest.java new file mode 100644 index 00000000000000..7ecec25708f2d5 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/DeleteStructuredPropertyResolverTest.java @@ -0,0 +1,91 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DeleteStructuredPropertyInput; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class DeleteStructuredPropertyResolverTest { + private static final String TEST_PROP_URN = "urn:li:structuredProperty:test"; + + private static final DeleteStructuredPropertyInput TEST_INPUT = + new DeleteStructuredPropertyInput(TEST_PROP_URN); + + @Test + public void testGetSuccess() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + DeleteStructuredPropertyResolver resolver = + new DeleteStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Boolean success = resolver.get(mockEnv).get(); + assertTrue(success); + + // Validate that we called delete + Mockito.verify(mockEntityClient, Mockito.times(1)) + .deleteEntity(any(), Mockito.eq(UrnUtils.getUrn(TEST_PROP_URN))); + } + + @Test + public void testGetUnauthorized() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + DeleteStructuredPropertyResolver resolver = + new DeleteStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we did NOT call delete + Mockito.verify(mockEntityClient, Mockito.times(0)) + .deleteEntity(any(), Mockito.eq(UrnUtils.getUrn(TEST_PROP_URN))); + } + + @Test + public void testGetFailure() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(false); + DeleteStructuredPropertyResolver resolver = + new DeleteStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that deleteEntity was called, but since it's the thing that failed it was called + // once still + Mockito.verify(mockEntityClient, Mockito.times(1)) + .deleteEntity(any(), Mockito.eq(UrnUtils.getUrn(TEST_PROP_URN))); + } + + private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + if (!shouldSucceed) { + Mockito.doThrow(new RemoteInvocationException()).when(client).deleteEntity(any(), any()); + } + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/StructuredPropertyUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/StructuredPropertyUtilsTest.java new file mode 100644 index 00000000000000..0e9d064b3c7af7 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/StructuredPropertyUtilsTest.java @@ -0,0 +1,42 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static org.testng.Assert.*; + +import com.linkedin.metadata.models.StructuredPropertyUtils; +import java.util.UUID; +import org.testng.annotations.Test; + +public class StructuredPropertyUtilsTest { + + @Test + public void testGetIdMismatchedInput() throws Exception { + assertThrows( + IllegalArgumentException.class, + () -> StructuredPropertyUtils.getPropertyId("test1", "test2")); + } + + @Test + public void testGetIdConsistentInput() throws Exception { + assertEquals(StructuredPropertyUtils.getPropertyId("test1", "test1"), "test1"); + } + + @Test + public void testGetIdNullQualifiedName() throws Exception { + assertEquals(StructuredPropertyUtils.getPropertyId("test1", null), "test1"); + } + + @Test + public void testGetIdNullId() throws Exception { + assertEquals(StructuredPropertyUtils.getPropertyId(null, "test1"), "test1"); + } + + @Test + public void testGetIdNullForBoth() throws Exception { + try { + String id = StructuredPropertyUtils.getPropertyId(null, null); + UUID.fromString(id); + } catch (Exception e) { + fail("ID produced is not a UUID"); + } + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolverTest.java index b818bcfb7d7f4f..2b0e7fd83b7cee 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolverTest.java @@ -2,20 +2,25 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME; import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertThrows; +import com.linkedin.common.UrnArray; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertySettingsInput; import com.linkedin.datahub.graphql.generated.UpdateStructuredPropertyInput; +import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.structured.StructuredPropertyDefinition; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -33,6 +38,7 @@ public class UpdateStructuredPropertyResolverTest { null, null, null, + null, null); @Test @@ -53,7 +59,7 @@ public void testGetSuccess() throws Exception { // Validate that we called ingest Mockito.verify(mockEntityClient, Mockito.times(1)) - .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); } @Test @@ -72,7 +78,7 @@ public void testGetUnauthorized() throws Exception { // Validate that we did NOT call ingest Mockito.verify(mockEntityClient, Mockito.times(0)) - .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); } @Test @@ -91,7 +97,80 @@ public void testGetFailure() throws Exception { // Validate that ingest was not called since there was a get failure before ingesting Mockito.verify(mockEntityClient, Mockito.times(0)) - .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); + } + + @Test + public void testGetInvalidSettingsInput() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + UpdateStructuredPropertyResolver resolver = + new UpdateStructuredPropertyResolver(mockEntityClient); + + // if isHidden is true, other fields should not be true + StructuredPropertySettingsInput settingsInput = new StructuredPropertySettingsInput(); + settingsInput.setIsHidden(true); + settingsInput.setShowInSearchFilters(true); + + final UpdateStructuredPropertyInput testInput = + new UpdateStructuredPropertyInput( + TEST_STRUCTURED_PROPERTY_URN, + "New Display Name", + "new description", + true, + null, + null, + null, + null, + settingsInput); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that ingest was not called since there was a get failure before ingesting + Mockito.verify(mockEntityClient, Mockito.times(0)) + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); + } + + @Test + public void testGetValidSettingsInput() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + UpdateStructuredPropertyResolver resolver = + new UpdateStructuredPropertyResolver(mockEntityClient); + + // if isHidden is true, other fields should not be true + StructuredPropertySettingsInput settingsInput = new StructuredPropertySettingsInput(); + settingsInput.setIsHidden(true); + + final UpdateStructuredPropertyInput testInput = + new UpdateStructuredPropertyInput( + TEST_STRUCTURED_PROPERTY_URN, + "New Display Name", + "new description", + true, + null, + null, + null, + null, + settingsInput); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + StructuredPropertyEntity prop = resolver.get(mockEnv).get(); + + assertEquals(prop.getUrn(), TEST_STRUCTURED_PROPERTY_URN); + + // Validate that we called ingest + Mockito.verify(mockEntityClient, Mockito.times(1)) + .batchIngestProposals(any(), Mockito.anyList(), Mockito.eq(false)); } private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exception { @@ -99,7 +178,11 @@ private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exceptio EntityResponse response = new EntityResponse(); response.setEntityName(Constants.STRUCTURED_PROPERTY_ENTITY_NAME); response.setUrn(UrnUtils.getUrn(TEST_STRUCTURED_PROPERTY_URN)); - response.setAspects(new EnvelopedAspectMap()); + final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + aspectMap.put( + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createDefinition().data()))); + response.setAspects(aspectMap); if (shouldSucceed) { Mockito.when( client.getV2( @@ -120,4 +203,13 @@ private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exceptio return client; } + + private StructuredPropertyDefinition createDefinition() { + StructuredPropertyDefinition definition = new StructuredPropertyDefinition(); + definition.setDisplayName("test"); + definition.setQualifiedName("test"); + definition.setValueType(UrnUtils.getUrn("urn:li:dataType:datahub.string")); + definition.setEntityTypes(new UrnArray()); + return definition; + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/versioning/VersionsSearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/versioning/VersionsSearchResolverTest.java new file mode 100644 index 00000000000000..3554df074df698 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/versioning/VersionsSearchResolverTest.java @@ -0,0 +1,294 @@ +package com.linkedin.datahub.graphql.resolvers.versioning; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.CriterionUtils.*; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AndFilterInput; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; +import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; +import com.linkedin.datahub.graphql.generated.SearchFlags; +import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.generated.SearchSortInput; +import com.linkedin.datahub.graphql.generated.SortCriterion; +import com.linkedin.datahub.graphql.generated.SortOrder; +import com.linkedin.datahub.graphql.generated.VersionSet; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.SearchResultMetadata; +import com.linkedin.metadata.service.ViewService; +import com.linkedin.metadata.utils.CriterionUtils; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.view.DataHubViewDefinition; +import com.linkedin.view.DataHubViewInfo; +import com.linkedin.view.DataHubViewType; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class VersionsSearchResolverTest { + + private static final String VERSION_SET_URN = "urn:li:versionSet:(my_version_set,dataset)"; + private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); + private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); + + private static final SearchAcrossEntitiesInput BASIC_INPUT = + new SearchAcrossEntitiesInput( + List.of(EntityType.DATASET), "", 0, 10, null, null, null, null, null); + + private static final SearchAcrossEntitiesInput COMPLEX_INPUT = + new SearchAcrossEntitiesInput( + List.of(EntityType.CHART, EntityType.DATASET), + "query", + 2, + 5, + null, + List.of( + AndFilterInput.builder() + .setAnd( + List.of( + FacetFilterInput.builder() + .setField("field1") + .setValues(List.of("1", "2")) + .build(), + FacetFilterInput.builder() + .setField("field2") + .setValues(List.of("a")) + .build())) + .build(), + AndFilterInput.builder() + .setAnd( + List.of( + FacetFilterInput.builder() + .setField("field3") + .setValues(List.of("3", "4")) + .build(), + FacetFilterInput.builder() + .setField("field4") + .setValues(List.of("b")) + .build())) + .build()), + TEST_VIEW_URN.toString(), + SearchFlags.builder().setSkipCache(true).build(), + SearchSortInput.builder() + .setSortCriteria( + List.of( + SortCriterion.builder() + .setField("sortField1") + .setSortOrder(SortOrder.DESCENDING) + .build(), + SortCriterion.builder() + .setField("sortField2") + .setSortOrder(SortOrder.ASCENDING) + .build())) + .build()); + + @Test + public void testGetSuccessBasic() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(); + ViewService mockViewService = Mockito.mock(ViewService.class); + VersionsSearchResolver resolver = new VersionsSearchResolver(mockEntityClient, mockViewService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(BASIC_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + VersionSet versionSet = new VersionSet(); + versionSet.setUrn(VERSION_SET_URN); + Mockito.when(mockEnv.getSource()).thenReturn(versionSet); + + SearchResults result = resolver.get(mockEnv).get(); + + // Validate the result + assertEquals(result.getSearchResults().size(), 0); + + // Validate that we called the search service correctly + Mockito.verify(mockEntityClient, Mockito.times(1)) + .searchAcrossEntities( + Mockito.argThat( + context -> + !context.getSearchContext().getSearchFlags().isFilterNonLatestVersions()), + Mockito.eq(List.of(Constants.DATASET_ENTITY_NAME)), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + CriterionUtils.buildCriterion( + "versionSet", Condition.EQUAL, VERSION_SET_URN)))))), + Mockito.eq(0), + Mockito.eq(10), + Mockito.eq( + List.of( + new com.linkedin.metadata.query.filter.SortCriterion() + .setField(VERSION_SORT_ID_FIELD_NAME) + .setOrder(com.linkedin.metadata.query.filter.SortOrder.DESCENDING))), + any()); + } + + @Test + public void testGetSuccessComplex() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(); + + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + List.of(buildCriterion("viewField", Condition.EQUAL, "test")))))); + DataHubViewInfo viewInfo = + new DataHubViewInfo() + .setName("test") + .setType(DataHubViewType.GLOBAL) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + List.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + ViewService mockViewService = Mockito.mock(ViewService.class); + Mockito.when(mockViewService.getViewInfo(any(), Mockito.eq(TEST_VIEW_URN))) + .thenReturn(viewInfo); + + VersionsSearchResolver resolver = new VersionsSearchResolver(mockEntityClient, mockViewService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(COMPLEX_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + VersionSet versionSet = new VersionSet(); + versionSet.setUrn(VERSION_SET_URN); + Mockito.when(mockEnv.getSource()).thenReturn(versionSet); + + SearchResults result = resolver.get(mockEnv).get(); + + // Validate the result + assertEquals(result.getSearchResults().size(), 0); + + // Validate that we called the search service correctly + Mockito.verify(mockEntityClient, Mockito.times(1)) + .searchAcrossEntities( + Mockito.argThat( + context -> + !context.getSearchContext().getSearchFlags().isFilterNonLatestVersions() + && context.getSearchContext().getSearchFlags().isSkipCache()), + Mockito.eq(List.of(Constants.DATASET_ENTITY_NAME)), + Mockito.eq("query"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + CriterionUtils.buildCriterion( + "field1", Condition.EQUAL, "1", "2"), + CriterionUtils.buildCriterion( + "field2", Condition.EQUAL, "a"), + CriterionUtils.buildCriterion( + "versionSet", Condition.EQUAL, VERSION_SET_URN), + CriterionUtils.buildCriterion( + "viewField", Condition.EQUAL, "test"))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + CriterionUtils.buildCriterion( + "field3", Condition.EQUAL, "3", "4"), + CriterionUtils.buildCriterion( + "field4", Condition.EQUAL, "b"), + CriterionUtils.buildCriterion( + "versionSet", Condition.EQUAL, VERSION_SET_URN), + CriterionUtils.buildCriterion( + "viewField", Condition.EQUAL, "test")))))), + Mockito.eq(2), + Mockito.eq(5), + Mockito.eq( + List.of( + new com.linkedin.metadata.query.filter.SortCriterion() + .setField("sortField1") + .setOrder(com.linkedin.metadata.query.filter.SortOrder.DESCENDING), + new com.linkedin.metadata.query.filter.SortCriterion() + .setField("sortField2") + .setOrder(com.linkedin.metadata.query.filter.SortOrder.ASCENDING), + new com.linkedin.metadata.query.filter.SortCriterion() + .setField(VERSION_SORT_ID_FIELD_NAME) + .setOrder(com.linkedin.metadata.query.filter.SortOrder.DESCENDING))), + any()); + } + + @Test + public void testThrowsError() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(); + ViewService mockViewService = Mockito.mock(ViewService.class); + + Mockito.when( + mockEntityClient.searchAcrossEntities( + any(), any(), any(), any(), Mockito.anyInt(), Mockito.anyInt(), any(), any())) + .thenThrow(new RemoteInvocationException()); + + VersionsSearchResolver resolver = new VersionsSearchResolver(mockEntityClient, mockViewService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(BASIC_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + VersionSet versionSet = new VersionSet(); + versionSet.setUrn(VERSION_SET_URN); + Mockito.when(mockEnv.getSource()).thenReturn(versionSet); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } + + private EntityClient initMockEntityClient() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + + Mockito.when( + client.searchAcrossEntities( + any(), + any(), + Mockito.anyString(), + any(), + Mockito.anyInt(), + Mockito.anyInt(), + any(), + Mockito.eq(null))) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/DataTransformLogicMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/DataTransformLogicMapperTest.java new file mode 100644 index 00000000000000..f94738ff049efb --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/DataTransformLogicMapperTest.java @@ -0,0 +1,103 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertNull; + +import com.linkedin.common.DataTransform; +import com.linkedin.common.DataTransformArray; +import com.linkedin.common.DataTransformLogic; +import com.linkedin.query.QueryLanguage; +import com.linkedin.query.QueryStatement; +import java.util.Arrays; +import org.testng.annotations.Test; + +public class DataTransformLogicMapperTest { + + @Test + public void testMapWithQueryStatement() throws Exception { + // Create test data + DataTransformLogic input = new DataTransformLogic(); + + // Create a transform with query statement + DataTransform transform1 = new DataTransform(); + QueryStatement statement = new QueryStatement(); + statement.setValue("SELECT * FROM source_table"); + statement.setLanguage(QueryLanguage.SQL); + transform1.setQueryStatement(statement); + + // Create another transform + DataTransform transform2 = new DataTransform(); + QueryStatement statement2 = new QueryStatement(); + statement2.setValue("INSERT INTO target_table SELECT * FROM temp_table"); + statement2.setLanguage(QueryLanguage.SQL); + transform2.setQueryStatement(statement2); + + // Set transforms + input.setTransforms(new DataTransformArray(Arrays.asList(transform1, transform2))); + + // Map the object + com.linkedin.datahub.graphql.generated.DataTransformLogic result = + DataTransformLogicMapper.map(null, input); + + // Verify result + assertNotNull(result); + assertEquals(result.getTransforms().size(), 2); + + // Verify first transform + com.linkedin.datahub.graphql.generated.DataTransform resultTransform1 = + result.getTransforms().get(0); + assertNotNull(resultTransform1.getQueryStatement()); + assertEquals(resultTransform1.getQueryStatement().getValue(), "SELECT * FROM source_table"); + assertEquals(resultTransform1.getQueryStatement().getLanguage().toString(), "SQL"); + + // Verify second transform + com.linkedin.datahub.graphql.generated.DataTransform resultTransform2 = + result.getTransforms().get(1); + assertNotNull(resultTransform2.getQueryStatement()); + assertEquals( + resultTransform2.getQueryStatement().getValue(), + "INSERT INTO target_table SELECT * FROM temp_table"); + assertEquals(resultTransform2.getQueryStatement().getLanguage().toString(), "SQL"); + } + + @Test + public void testMapWithoutQueryStatement() throws Exception { + // Create test data + DataTransformLogic input = new DataTransformLogic(); + + // Create a transform without query statement + DataTransform transform = new DataTransform(); + + // Set transforms + input.setTransforms(new DataTransformArray(Arrays.asList(transform))); + + // Map the object + com.linkedin.datahub.graphql.generated.DataTransformLogic result = + DataTransformLogicMapper.map(null, input); + + // Verify result + assertNotNull(result); + assertEquals(result.getTransforms().size(), 1); + + // Verify transform + com.linkedin.datahub.graphql.generated.DataTransform resultTransform = + result.getTransforms().get(0); + assertNull(resultTransform.getQueryStatement()); + } + + @Test + public void testMapWithEmptyTransforms() throws Exception { + // Create test data + DataTransformLogic input = new DataTransformLogic(); + input.setTransforms(new DataTransformArray(Arrays.asList())); + + // Map the object + com.linkedin.datahub.graphql.generated.DataTransformLogic result = + DataTransformLogicMapper.map(null, input); + + // Verify result + assertNotNull(result); + assertEquals(result.getTransforms().size(), 0); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/QueryPropertiesMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/QueryPropertiesMapperTest.java new file mode 100644 index 00000000000000..a0251adca78f9d --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/QueryPropertiesMapperTest.java @@ -0,0 +1,117 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertNull; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.query.QueryLanguage; +import com.linkedin.query.QueryProperties; +import com.linkedin.query.QuerySource; +import com.linkedin.query.QueryStatement; +import org.testng.annotations.Test; + +public class QueryPropertiesMapperTest { + + @Test + public void testMapWithRequiredFields() throws Exception { + // Create test data + QueryProperties input = new QueryProperties(); + + // Set required fields + QueryStatement statement = new QueryStatement(); + statement.setValue("SELECT * FROM table"); + statement.setLanguage(QueryLanguage.SQL); + input.setStatement(statement); + + input.setSource(QuerySource.MANUAL); + + Urn userUrn = Urn.createFromString("urn:li:corpuser:test"); + + AuditStamp created = new AuditStamp(); + created.setTime(1000L); + created.setActor(userUrn); + input.setCreated(created); + + AuditStamp lastModified = new AuditStamp(); + lastModified.setTime(2000L); + lastModified.setActor(userUrn); + input.setLastModified(lastModified); + + // Map the object + com.linkedin.datahub.graphql.generated.QueryProperties result = + QueryPropertiesMapper.map(null, input); + + // Verify required fields + assertNotNull(result); + assertEquals(result.getSource().toString(), "MANUAL"); + assertEquals(result.getStatement().getValue(), "SELECT * FROM table"); + assertEquals(result.getStatement().getLanguage().toString(), "SQL"); + + // Verify audit stamps + assertEquals(result.getCreated().getTime().longValue(), 1000L); + assertEquals(result.getCreated().getActor(), userUrn.toString()); + assertEquals(result.getLastModified().getTime().longValue(), 2000L); + assertEquals(result.getLastModified().getActor(), userUrn.toString()); + + // Verify optional fields are null + assertNull(result.getName()); + assertNull(result.getDescription()); + assertNull(result.getOrigin()); + } + + @Test + public void testMapWithOptionalFields() throws Exception { + // Create test data + QueryProperties input = new QueryProperties(); + + // Set required fields + QueryStatement statement = new QueryStatement(); + statement.setValue("SELECT * FROM table"); + statement.setLanguage(QueryLanguage.SQL); + input.setStatement(statement); + + input.setSource(QuerySource.SYSTEM); + + Urn userUrn = Urn.createFromString("urn:li:corpuser:test"); + Urn originUrn = Urn.createFromString("urn:li:dataset:test"); + + AuditStamp created = new AuditStamp(); + created.setTime(1000L); + created.setActor(userUrn); + input.setCreated(created); + + AuditStamp lastModified = new AuditStamp(); + lastModified.setTime(2000L); + lastModified.setActor(userUrn); + input.setLastModified(lastModified); + + // Set optional fields + input.setName("Test Query"); + input.setDescription("Test Description"); + input.setOrigin(originUrn); + + // Map the object + com.linkedin.datahub.graphql.generated.QueryProperties result = + QueryPropertiesMapper.map(null, input); + + // Verify required fields + assertNotNull(result); + assertEquals(result.getSource().toString(), "SYSTEM"); + assertEquals(result.getStatement().getValue(), "SELECT * FROM table"); + assertEquals(result.getStatement().getLanguage().toString(), "SQL"); + + // Verify audit stamps + assertEquals(result.getCreated().getTime().longValue(), 1000L); + assertEquals(result.getCreated().getActor(), userUrn.toString()); + assertEquals(result.getLastModified().getTime().longValue(), 2000L); + assertEquals(result.getLastModified().getActor(), userUrn.toString()); + + // Verify optional fields + assertEquals(result.getName(), "Test Query"); + assertEquals(result.getDescription(), "Test Description"); + assertNotNull(result.getOrigin()); + assertEquals(result.getOrigin().getUrn(), originUrn.toString()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/TimeStampToAuditStampMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/TimeStampToAuditStampMapperTest.java new file mode 100644 index 00000000000000..4e0dbd7b1733b4 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/TimeStampToAuditStampMapperTest.java @@ -0,0 +1,46 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import static org.testng.Assert.*; + +import com.linkedin.common.TimeStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.AuditStamp; +import org.testng.annotations.Test; + +public class TimeStampToAuditStampMapperTest { + + private static final String TEST_ACTOR_URN = "urn:li:corpuser:testUser"; + private static final long TEST_TIME = 1234567890L; + + @Test + public void testMapWithActor() throws Exception { + TimeStamp input = new TimeStamp(); + input.setTime(TEST_TIME); + input.setActor(Urn.createFromString(TEST_ACTOR_URN)); + + AuditStamp result = TimeStampToAuditStampMapper.map(null, input); + + assertNotNull(result); + assertEquals(result.getTime().longValue(), TEST_TIME); + assertEquals(result.getActor(), TEST_ACTOR_URN); + } + + @Test + public void testMapWithoutActor() { + TimeStamp input = new TimeStamp(); + input.setTime(TEST_TIME); + + AuditStamp result = TimeStampToAuditStampMapper.map(null, input); + + assertNotNull(result); + assertEquals(result.getTime().longValue(), TEST_TIME); + assertNull(result.getActor()); + } + + @Test + public void testMapNull() { + AuditStamp result = TimeStampToAuditStampMapper.map(null, null); + + assertNull(result); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapperTest.java new file mode 100644 index 00000000000000..a49f063f94d336 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapperTest.java @@ -0,0 +1,42 @@ +package com.linkedin.datahub.graphql.types.dataflow.mappers; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.DataFlow; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class DataFlowMapperTest { + private static final Urn TEST_DATA_FLOW_URN = + Urn.createFromTuple(Constants.DATA_FLOW_ENTITY_NAME, "dataflow1"); + private static final Urn TEST_CONTAINER_URN = + Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "container1"); + + @Test + public void testMapDataFlowContainer() throws URISyntaxException { + com.linkedin.container.Container input = new com.linkedin.container.Container(); + input.setContainer(TEST_CONTAINER_URN); + + final Map containerAspect = new HashMap<>(); + containerAspect.put( + Constants.CONTAINER_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATA_FLOW_ENTITY_NAME) + .setUrn(TEST_DATA_FLOW_URN) + .setAspects(new EnvelopedAspectMap(containerAspect)); + + final DataFlow actual = DataFlowMapper.map(null, response); + + Assert.assertEquals(actual.getUrn(), TEST_DATA_FLOW_URN.toString()); + Assert.assertEquals(actual.getContainer().getUrn(), TEST_CONTAINER_URN.toString()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapperTest.java new file mode 100644 index 00000000000000..d7fc0f198977eb --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapperTest.java @@ -0,0 +1,42 @@ +package com.linkedin.datahub.graphql.types.datajob.mappers; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.DataJob; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class DataJobMapperTest { + private static final Urn TEST_DATA_JOB_URN = + Urn.createFromTuple(Constants.DATA_JOB_ENTITY_NAME, "datajob1"); + private static final Urn TEST_CONTAINER_URN = + Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "container1"); + + @Test + public void testMapDataJobContainer() throws URISyntaxException { + com.linkedin.container.Container input = new com.linkedin.container.Container(); + input.setContainer(TEST_CONTAINER_URN); + + final Map containerAspect = new HashMap<>(); + containerAspect.put( + Constants.CONTAINER_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATA_JOB_ENTITY_NAME) + .setUrn(TEST_DATA_JOB_URN) + .setAspects(new EnvelopedAspectMap(containerAspect)); + + final DataJob actual = DataJobMapper.map(null, response); + + Assert.assertEquals(actual.getUrn(), TEST_DATA_JOB_URN.toString()); + Assert.assertEquals(actual.getContainer().getUrn(), TEST_CONTAINER_URN.toString()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mapper/DataPlatformInstanceAspectMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mapper/DataPlatformInstanceAspectMapperTest.java new file mode 100644 index 00000000000000..479d7340fef945 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mapper/DataPlatformInstanceAspectMapperTest.java @@ -0,0 +1,75 @@ +package com.linkedin.datahub.graphql.types.dataplatforminstance.mapper; + +import static org.testng.Assert.*; + +import com.linkedin.common.urn.DataPlatformUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.DataPlatformInstance; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; +import org.testng.annotations.Test; + +public class DataPlatformInstanceAspectMapperTest { + + private static final String TEST_PLATFORM = "hive"; + private static final String TEST_INSTANCE = "prod"; + private static final String TEST_PLATFORM_URN = "urn:li:dataPlatform:" + TEST_PLATFORM; + private static final String TEST_INSTANCE_URN = + String.format( + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:%s,%s)", TEST_PLATFORM, TEST_INSTANCE); + + @Test + public void testMapWithInstance() throws Exception { + // Create test input + com.linkedin.common.DataPlatformInstance input = new com.linkedin.common.DataPlatformInstance(); + DataPlatformUrn platformUrn = new DataPlatformUrn(TEST_PLATFORM); + Urn instanceUrn = Urn.createFromString(TEST_INSTANCE_URN); + + input.setPlatform(platformUrn); + input.setInstance(instanceUrn); + + // Map and verify + DataPlatformInstance result = DataPlatformInstanceAspectMapper.map(null, input); + + assertNotNull(result); + assertEquals(result.getType(), EntityType.DATA_PLATFORM_INSTANCE); + assertEquals(result.getUrn(), TEST_INSTANCE_URN); + + // Verify platform mapping + assertNotNull(result.getPlatform()); + assertEquals(result.getPlatform().getType(), EntityType.DATA_PLATFORM); + assertEquals(result.getPlatform().getUrn(), TEST_PLATFORM_URN); + } + + @Test + public void testMapWithoutInstance() throws Exception { + // Create test input with only platform + com.linkedin.common.DataPlatformInstance input = new com.linkedin.common.DataPlatformInstance(); + DataPlatformUrn platformUrn = new DataPlatformUrn(TEST_PLATFORM); + input.setPlatform(platformUrn); + + // Map and verify + DataPlatformInstance result = DataPlatformInstanceAspectMapper.map(null, input); + + assertNotNull(result); + assertNull(result.getType()); // Type should be null when no instance + assertNull(result.getUrn()); // URN should be null when no instance + + // Verify platform is still mapped correctly + assertNotNull(result.getPlatform()); + assertEquals(result.getPlatform().getType(), EntityType.DATA_PLATFORM); + assertEquals(result.getPlatform().getUrn(), TEST_PLATFORM_URN); + } + + @Test(expectedExceptions = NullPointerException.class) + public void testMapNull() { + DataPlatformInstanceAspectMapper.map(null, null); + } + + @Test + public void testSingleton() { + assertNotNull(DataPlatformInstanceAspectMapper.INSTANCE); + assertSame( + DataPlatformInstanceAspectMapper.INSTANCE, DataPlatformInstanceAspectMapper.INSTANCE); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataprocessinst/DataProcessInstanceTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataprocessinst/DataProcessInstanceTypeTest.java new file mode 100644 index 00000000000000..437c74ab669146 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataprocessinst/DataProcessInstanceTypeTest.java @@ -0,0 +1,246 @@ +package com.linkedin.datahub.graphql.types.dataprocessinst; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.FabricType; +import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.DataPlatformUrn; +import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.container.Container; +import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; +import com.linkedin.datahub.graphql.generated.DataProcessInstance; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.dataprocess.DataProcessInstanceInput; +import com.linkedin.dataprocess.DataProcessInstanceOutput; +import com.linkedin.dataprocess.DataProcessInstanceProperties; +import com.linkedin.dataprocess.DataProcessInstanceRelationships; +import com.linkedin.dataprocess.DataProcessInstanceRunEvent; +import com.linkedin.dataprocess.DataProcessRunStatus; +import com.linkedin.dataprocess.DataProcessType; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.DataProcessInstanceKey; +import com.linkedin.ml.metadata.MLTrainingRunProperties; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.test.TestResult; +import com.linkedin.test.TestResultArray; +import com.linkedin.test.TestResultType; +import com.linkedin.test.TestResults; +import graphql.execution.DataFetcherResult; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class DataProcessInstanceTypeTest { + + private static final String TEST_INSTANCE_URN = + "urn:li:dataProcessInstance:(test-workflow,test-instance-1)"; + private static final String TEST_DPI_1_URN = "urn:li:dataProcessInstance:id-1"; + private static final DatasetUrn DATASET_URN = + new DatasetUrn(new DataPlatformUrn("kafka"), "dataset1", FabricType.TEST); + private static final Urn DPI_URN_REL = UrnUtils.getUrn("urn:li:dataProcessInstance:id-2"); + private static final DataProcessInstanceKey TEST_DPI_1_KEY = + new DataProcessInstanceKey().setId("id-1"); + private static final DataProcessInstanceProperties TEST_DPI_1_PROPERTIES = + new DataProcessInstanceProperties().setName("Test DPI").setType(DataProcessType.STREAMING); + private static final DataProcessInstanceInput TEST_DPI_1_DPI_INPUT = + new DataProcessInstanceInput().setInputs(new UrnArray(ImmutableList.of(DATASET_URN))); + private static final DataProcessInstanceOutput TEST_DPI_1_DPI_OUTPUT = + new DataProcessInstanceOutput().setOutputs(new UrnArray(ImmutableList.of(DATASET_URN))); + private static final DataProcessInstanceRelationships TEST_DPI_1_DPI_RELATIONSHIPS = + new DataProcessInstanceRelationships() + .setParentInstance(DPI_URN_REL) + .setUpstreamInstances(new UrnArray(ImmutableList.of(DPI_URN_REL))) + .setParentTemplate(DPI_URN_REL); + private static final DataProcessInstanceRunEvent TEST_DPI_1_DPI_RUN_EVENT = + new DataProcessInstanceRunEvent().setStatus(DataProcessRunStatus.COMPLETE); + private static final DataPlatformInstance TEST_DPI_1_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(new DataPlatformUrn("kafka")); + private static final Status TEST_DPI_1_STATUS = new Status().setRemoved(false); + private static final TestResults TEST_DPI_1_TEST_RESULTS = + new TestResults() + .setPassing( + new TestResultArray( + ImmutableList.of( + new TestResult() + .setTest(UrnUtils.getUrn("urn:li:test:123")) + .setType(TestResultType.SUCCESS)))) + .setFailing(new TestResultArray()); + private static final SubTypes TEST_DPI_1_SUB_TYPES = + new SubTypes().setTypeNames(new StringArray("subtype1")); + private static final Container TEST_DPI_1_CONTAINER = + new Container().setContainer(UrnUtils.getUrn("urn:li:container:123")); + private static final MLTrainingRunProperties ML_TRAINING_RUN_PROPERTIES = + new MLTrainingRunProperties().setId("mytrainingrun"); + + private static final String TEST_DPI_2_URN = "urn:li:dataProcessInstance:id-2"; + + @Test + public void testBatchLoadFull() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + + Urn dpiUrn1 = Urn.createFromString(TEST_DPI_1_URN); + Urn dpiUrn2 = Urn.createFromString(TEST_DPI_2_URN); + + Map aspectMap = new HashMap<>(); + aspectMap.put( + Constants.DATA_PROCESS_INSTANCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_KEY.data()))); + aspectMap.put( + Constants.DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_PROPERTIES.data()))); + aspectMap.put( + Constants.DATA_PROCESS_INSTANCE_INPUT_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_DPI_INPUT.data()))); + aspectMap.put( + Constants.DATA_PROCESS_INSTANCE_OUTPUT_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_DPI_OUTPUT.data()))); + aspectMap.put( + Constants.DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_DPI_RELATIONSHIPS.data()))); + aspectMap.put( + Constants.DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_DPI_RUN_EVENT.data()))); + aspectMap.put( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_DATA_PLATFORM_INSTANCE.data()))); + aspectMap.put( + Constants.STATUS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_STATUS.data()))); + aspectMap.put( + Constants.TEST_RESULTS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_TEST_RESULTS.data()))); + aspectMap.put( + Constants.SUB_TYPES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_SUB_TYPES.data()))); + aspectMap.put( + Constants.CONTAINER_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DPI_1_CONTAINER.data()))); + aspectMap.put( + Constants.ML_TRAINING_RUN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(ML_TRAINING_RUN_PROPERTIES.data()))); + + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(dpiUrn1, dpiUrn2))), + Mockito.eq(DataProcessInstanceType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + dpiUrn1, + new EntityResponse() + .setEntityName(Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME) + .setUrn(dpiUrn1) + .setAspects(new EnvelopedAspectMap(aspectMap)))); + + FeatureFlags mockFeatureFlags = Mockito.mock(FeatureFlags.class); + Mockito.when(mockFeatureFlags.isDataProcessInstanceEntityEnabled()).thenReturn(true); + + DataProcessInstanceType type = new DataProcessInstanceType(client, mockFeatureFlags); + + QueryContext mockContext = getMockAllowContext(); + List> result = + type.batchLoad(ImmutableList.of(TEST_DPI_1_URN, TEST_DPI_2_URN), mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(dpiUrn1, dpiUrn2)), + Mockito.eq(DataProcessInstanceType.ASPECTS_TO_FETCH)); + + assertEquals(result.size(), 2); + + DataProcessInstance dpi1 = result.get(0).getData(); + assertEquals(dpi1.getUrn(), TEST_DPI_1_URN); + assertEquals(dpi1.getName(), "Test DPI"); + assertEquals(dpi1.getType(), EntityType.DATA_PROCESS_INSTANCE); + + // Assert second element is null + assertNull(result.get(1)); + } + + @Test + public void testBatchLoad() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + FeatureFlags mockFeatureFlags = Mockito.mock(FeatureFlags.class); + Mockito.when(mockFeatureFlags.isDataProcessInstanceEntityEnabled()).thenReturn(true); + + DataProcessInstanceType type = new DataProcessInstanceType(mockClient, mockFeatureFlags); + + List> result = + type.batchLoad(ImmutableList.of(TEST_INSTANCE_URN), getMockAllowContext()); + + assertEquals(result.size(), 1); + } + + @Test + public void testBatchLoadFeatureFlagDisabled() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + FeatureFlags mockFeatureFlags = Mockito.mock(FeatureFlags.class); + Mockito.when(mockFeatureFlags.isDataProcessInstanceEntityEnabled()).thenReturn(false); + + DataProcessInstanceType type = new DataProcessInstanceType(mockClient, mockFeatureFlags); + + List> result = + type.batchLoad(ImmutableList.of(TEST_INSTANCE_URN), getMockAllowContext()); + + assertEquals(result.size(), 0); + + Mockito.verify(mockClient, Mockito.never()) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); + } + + @Test(expectedExceptions = RuntimeException.class) + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + FeatureFlags mockFeatureFlags = Mockito.mock(FeatureFlags.class); + Mockito.when(mockFeatureFlags.isDataProcessInstanceEntityEnabled()).thenReturn(true); + + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); + + DataProcessInstanceType type = new DataProcessInstanceType(mockClient, mockFeatureFlags); + type.batchLoad(ImmutableList.of(TEST_INSTANCE_URN), getMockAllowContext()); + } + + @Test + public void testGetType() { + EntityClient mockClient = Mockito.mock(EntityClient.class); + FeatureFlags mockFeatureFlags = Mockito.mock(FeatureFlags.class); + DataProcessInstanceType type = new DataProcessInstanceType(mockClient, mockFeatureFlags); + + assertEquals(type.type(), EntityType.DATA_PROCESS_INSTANCE); + } + + @Test + public void testObjectClass() { + EntityClient mockClient = Mockito.mock(EntityClient.class); + FeatureFlags mockFeatureFlags = Mockito.mock(FeatureFlags.class); + DataProcessInstanceType type = new DataProcessInstanceType(mockClient, mockFeatureFlags); + + assertEquals(type.objectClass(), DataProcessInstance.class); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapperTest.java new file mode 100644 index 00000000000000..cd9d58b54e6b3a --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapperTest.java @@ -0,0 +1,128 @@ +package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.url.Url; +import com.linkedin.common.urn.Urn; +import com.linkedin.container.Container; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.generated.DataProcessInstance; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.dataprocess.DataProcessInstanceProperties; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import com.linkedin.ml.metadata.MLTrainingRunProperties; +import java.util.HashMap; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class DataProcessInstanceMapperTest { + + private static final String TEST_PLATFORM_URN = "urn:li:dataPlatform:kafka"; + private static final String TEST_INSTANCE_URN = + "urn:li:dataProcessInstance:(test-workflow,test-instance)"; + private static final String TEST_CONTAINER_URN = "urn:li:container:testContainer"; + private static final String TEST_EXTERNAL_URL = "https://example.com/process"; + private static final String TEST_NAME = "Test Process Instance"; + + private EntityResponse entityResponse; + private Urn urn; + + @BeforeMethod + public void setup() throws Exception { + urn = Urn.createFromString(TEST_INSTANCE_URN); + entityResponse = new EntityResponse(); + entityResponse.setUrn(urn); + entityResponse.setAspects(new EnvelopedAspectMap(new HashMap<>())); + } + + @Test + public void testMapBasicFields() throws Exception { + DataProcessInstance instance = DataProcessInstanceMapper.map(null, entityResponse); + + assertNotNull(instance); + assertEquals(instance.getUrn(), urn.toString()); + assertEquals(instance.getType(), EntityType.DATA_PROCESS_INSTANCE); + } + + @Test + public void testMapDataProcessProperties() throws Exception { + // Create DataProcessInstanceProperties + DataProcessInstanceProperties properties = new DataProcessInstanceProperties(); + properties.setName(TEST_NAME); + properties.setExternalUrl(new Url(TEST_EXTERNAL_URL)); + + // Add properties aspect + addAspect(Constants.DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, properties); + + DataProcessInstance instance = DataProcessInstanceMapper.map(null, entityResponse); + + assertNotNull(instance.getProperties()); + assertEquals(instance.getName(), TEST_NAME); + assertEquals(instance.getExternalUrl(), TEST_EXTERNAL_URL); + } + + @Test + public void testMapPlatformInstance() throws Exception { + // Create DataPlatformInstance + DataPlatformInstance platformInstance = new DataPlatformInstance(); + platformInstance.setPlatform(Urn.createFromString(TEST_PLATFORM_URN)); + + // Add platform instance aspect + addAspect(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, platformInstance); + + DataProcessInstance instance = DataProcessInstanceMapper.map(null, entityResponse); + + assertNotNull(instance.getDataPlatformInstance()); + assertNotNull(instance.getDataPlatformInstance().getPlatform()); + assertEquals(instance.getDataPlatformInstance().getPlatform().getUrn(), TEST_PLATFORM_URN); + assertEquals( + instance.getDataPlatformInstance().getPlatform().getType(), EntityType.DATA_PLATFORM); + } + + @Test + public void testMapContainer() throws Exception { + // Create Container aspect + Container container = new Container(); + container.setContainer(Urn.createFromString(TEST_CONTAINER_URN)); + + // Add container aspect + addAspect(Constants.CONTAINER_ASPECT_NAME, container); + + DataProcessInstance instance = DataProcessInstanceMapper.map(null, entityResponse); + + assertNotNull(instance.getContainer()); + assertEquals(instance.getContainer().getUrn(), TEST_CONTAINER_URN); + assertEquals(instance.getContainer().getType(), EntityType.CONTAINER); + } + + @Test + public void testMapMLTrainingProperties() throws Exception { + // Create MLTrainingRunProperties + MLTrainingRunProperties trainingProperties = new MLTrainingRunProperties(); + trainingProperties.setId("test-run-id"); + trainingProperties.setOutputUrls(new StringArray("s3://test-bucket/model")); + + // Add ML training properties aspect + addAspect(Constants.ML_TRAINING_RUN_PROPERTIES_ASPECT_NAME, trainingProperties); + + DataProcessInstance instance = DataProcessInstanceMapper.map(null, entityResponse); + + assertNotNull(instance); + assertEquals(instance.getMlTrainingRunProperties().getId(), "test-run-id"); + assertEquals( + instance.getMlTrainingRunProperties().getOutputUrls().get(0), "s3://test-bucket/model"); + } + + private void addAspect(String aspectName, RecordTemplate aspect) { + EnvelopedAspect envelopedAspect = new EnvelopedAspect(); + envelopedAspect.setValue(new Aspect(aspect.data())); + entityResponse.getAspects().put(aspectName, envelopedAspect); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapperTest.java new file mode 100644 index 00000000000000..79cc7725b1fc7f --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapperTest.java @@ -0,0 +1,20 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static org.testng.Assert.*; + +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.metadata.Constants; +import org.testng.annotations.Test; + +public class EntityTypeMapperTest { + + @Test + public void testGetType() throws Exception { + assertEquals(EntityTypeMapper.getType(Constants.DATASET_ENTITY_NAME), EntityType.DATASET); + } + + @Test + public void testGetName() throws Exception { + assertEquals(EntityTypeMapper.getName(EntityType.DATASET), Constants.DATASET_ENTITY_NAME); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapperTest.java new file mode 100644 index 00000000000000..ed16226d0685ee --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapperTest.java @@ -0,0 +1,30 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static org.testng.Assert.*; + +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.metadata.Constants; +import org.testng.annotations.Test; + +public class EntityTypeUrnMapperTest { + + @Test + public void testGetName() throws Exception { + assertEquals( + EntityTypeUrnMapper.getName("urn:li:entityType:datahub.dataset"), + Constants.DATASET_ENTITY_NAME); + } + + @Test + public void testGetEntityType() throws Exception { + assertEquals( + EntityTypeUrnMapper.getEntityType("urn:li:entityType:datahub.dataset"), EntityType.DATASET); + } + + @Test + public void testGetEntityTypeUrn() throws Exception { + assertEquals( + EntityTypeUrnMapper.getEntityTypeUrn(Constants.DATASET_ENTITY_NAME), + "urn:li:entityType:datahub.dataset"); + } +} diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index b783efa09713d1..a3b2e9ad6b3e22 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -5,6 +5,7 @@ plugins { } apply from: "../gradle/versioning/versioning.gradle" +apply from: "../gradle/coverage/java-coverage.gradle" ext { docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry @@ -59,7 +60,7 @@ dependencies { // mock internal schema registry implementation externalDependency.kafkaAvroSerde implementation externalDependency.kafkaAvroSerializer - implementation "org.apache.kafka:kafka_2.12:3.7.1" + implementation "org.apache.kafka:kafka_2.13:3.7.2" implementation externalDependency.slf4jApi compileOnly externalDependency.lombok diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 661717c6309cfc..d0493019a40af2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -13,6 +13,7 @@ import com.linkedin.gms.factory.kafka.common.TopicConventionFactory; import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.aspect.CachingAspectRetriever; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; @@ -186,13 +187,15 @@ protected OperationContext javaSystemOperationContext( components.getIndexConvention(), RetrieverContext.builder() .aspectRetriever(entityServiceAspectRetriever) + .cachingAspectRetriever(CachingAspectRetriever.EMPTY) .graphRetriever(systemGraphRetriever) .searchRetriever(searchServiceSearchRetriever) .build(), ValidationContext.builder() .alternateValidation( configurationProvider.getFeatureFlags().isAlternateMCPValidation()) - .build()); + .build(), + true); entityServiceAspectRetriever.setSystemOperationContext(systemOperationContext); systemGraphRetriever.setSystemOperationContext(systemOperationContext); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 4d53b603c1eaff..1e5cd6cdb24174 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -180,7 +180,7 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { aspectRecord = EntityUtils.toSystemAspect( - context.opContext().getRetrieverContext().get(), aspect.toEntityAspect()) + context.opContext().getRetrieverContext(), aspect.toEntityAspect()) .get() .getRecordTemplate(); } catch (Exception e) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/AbstractMCLStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/AbstractMCLStep.java index cd7947ce3c11aa..56feffd211bcd7 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/AbstractMCLStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/AbstractMCLStep.java @@ -113,8 +113,7 @@ public Function executable() { List, SystemAspect>> futures; futures = EntityUtils.toSystemAspectFromEbeanAspects( - opContext.getRetrieverContext().get(), - batch.collect(Collectors.toList())) + opContext.getRetrieverContext(), batch.collect(Collectors.toList())) .stream() .map( systemAspect -> { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/bootstrapmcps/BootstrapMCPUtil.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/bootstrapmcps/BootstrapMCPUtil.java index 4cc3edff3eb52d..5b807c6c450afb 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/bootstrapmcps/BootstrapMCPUtil.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/bootstrapmcps/BootstrapMCPUtil.java @@ -100,8 +100,8 @@ static AspectsBatch generateAspectBatch( .collect(Collectors.toList()); return AspectsBatchImpl.builder() - .mcps(mcps, auditStamp, opContext.getRetrieverContext().get()) - .retrieverContext(opContext.getRetrieverContext().get()) + .mcps(mcps, auditStamp, opContext.getRetrieverContext()) + .retrieverContext(opContext.getRetrieverContext()) .build(); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/dataprocessinstances/BackfillDataProcessInstancesHasRunEventsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/dataprocessinstances/BackfillDataProcessInstancesHasRunEventsStep.java index 55cdcae931ab5b..1bdea10123999a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/dataprocessinstances/BackfillDataProcessInstancesHasRunEventsStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/dataprocessinstances/BackfillDataProcessInstancesHasRunEventsStep.java @@ -2,6 +2,8 @@ import static com.linkedin.metadata.Constants.*; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.base.Throwables; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.upgrade.UpgradeContext; @@ -23,8 +25,6 @@ import java.util.Set; import java.util.function.Function; import lombok.extern.slf4j.Slf4j; -import org.codehaus.jackson.node.JsonNodeFactory; -import org.codehaus.jackson.node.ObjectNode; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.RequestOptions; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/schemafield/GenerateSchemaFieldsFromSchemaMetadataStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/schemafield/GenerateSchemaFieldsFromSchemaMetadataStep.java index 55bc8edbf6a768..de03538907432f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/schemafield/GenerateSchemaFieldsFromSchemaMetadataStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/schemafield/GenerateSchemaFieldsFromSchemaMetadataStep.java @@ -168,13 +168,13 @@ public Function executable() { AspectsBatch aspectsBatch = AspectsBatchImpl.builder() - .retrieverContext(opContext.getRetrieverContext().get()) + .retrieverContext(opContext.getRetrieverContext()) .items( batch .flatMap( ebeanAspectV2 -> EntityUtils.toSystemAspectFromEbeanAspects( - opContext.getRetrieverContext().get(), + opContext.getRetrieverContext(), Set.of(ebeanAspectV2)) .stream()) .map( @@ -189,11 +189,7 @@ public Function executable() { .auditStamp(systemAspect.getAuditStamp()) .systemMetadata( withAppSource(systemAspect.getSystemMetadata())) - .build( - opContext - .getRetrieverContext() - .get() - .getAspectRetriever())) + .build(opContext.getAspectRetriever())) .collect(Collectors.toList())) .build(); diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/schemafield/GenerateSchemaFieldsFromSchemaMetadataStepTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/schemafield/GenerateSchemaFieldsFromSchemaMetadataStepTest.java index 3a2728b4e1d3d6..04b1095e770e0e 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/schemafield/GenerateSchemaFieldsFromSchemaMetadataStepTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/schemafield/GenerateSchemaFieldsFromSchemaMetadataStepTest.java @@ -22,7 +22,6 @@ import com.linkedin.upgrade.DataHubUpgradeState; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.RetrieverContext; -import java.util.Optional; import java.util.stream.Stream; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -48,7 +47,7 @@ public void setup() { step = new GenerateSchemaFieldsFromSchemaMetadataStep( mockOpContext, mockEntityService, mockAspectDao, 10, 100, 1000); - when(mockOpContext.getRetrieverContext()).thenReturn(Optional.of(mockRetrieverContext)); + when(mockOpContext.getRetrieverContext()).thenReturn(mockRetrieverContext); } /** Test to verify the correct step ID is returned. */ diff --git a/datahub-web-react/.storybook/DocTemplate.mdx b/datahub-web-react/.storybook/DocTemplate.mdx new file mode 100644 index 00000000000000..9ea1250075e11f --- /dev/null +++ b/datahub-web-react/.storybook/DocTemplate.mdx @@ -0,0 +1,42 @@ +import React from 'react'; + +import { ThemeProvider } from 'styled-components'; +import { GlobalStyle } from './styledComponents'; + +import { Meta, Title, Subtitle, Description, Primary, Controls, Stories } from '@storybook/blocks'; +import { CodeBlock } from '../src/alchemy-components/.docs/mdx-components'; + +{/* + * 👇 The isTemplate property is required to tell Storybook that this is a template + * See https://storybook.js.org/docs/api/doc-block-meta + * to learn how to use +*/} + + + + + + + + + <Subtitle /> + + <div className="docsDescription"> + <Description /> + </div> + + <br /> + + ### Import + + <CodeBlock /> + + <br/> + + ### Customize + + <Primary /> + <Controls /> + + <Stories /> +</ThemeProvider> \ No newline at end of file diff --git a/datahub-web-react/.storybook/main.js b/datahub-web-react/.storybook/main.js new file mode 100644 index 00000000000000..2b92dffd88eb3a --- /dev/null +++ b/datahub-web-react/.storybook/main.js @@ -0,0 +1,25 @@ +// Docs for badges: https://storybook.js.org/addons/@geometricpanda/storybook-addon-badges + +export default { + framework: '@storybook/react-vite', + features: { + buildStoriesJson: true, + }, + core: { + disableTelemetry: true, + }, + stories: [ + '../src/alchemy-components/.docs/*.mdx', + '../src/alchemy-components/components/**/*.stories.@(js|jsx|mjs|ts|tsx)' + ], + addons: [ + '@storybook/addon-onboarding', + '@storybook/addon-essentials', + '@storybook/addon-interactions', + '@storybook/addon-links', + '@geometricpanda/storybook-addon-badges', + ], + typescript: { + reactDocgen: 'react-docgen-typescript', + }, +} \ No newline at end of file diff --git a/datahub-web-react/.storybook/manager-head.html b/datahub-web-react/.storybook/manager-head.html new file mode 100644 index 00000000000000..98e6a2895f45c7 --- /dev/null +++ b/datahub-web-react/.storybook/manager-head.html @@ -0,0 +1,33 @@ +<style type="text/css"> + /* Regular */ + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 400; + src: url('../src/fonts/Mulish-Regular.ttf') format('truetype'); + } + + /* Medium */ + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 500; + src: url('../src/fonts/Mulish-Medium.ttf') format('truetype'); + } + + /* SemiBold */ + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 600; + src: url('../src/fonts/Mulish-SemiBold.ttf') format('truetype'); + } + + /* Bold */ + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 700; + src: url('../src/fonts/Mulish-Bold.ttf') format('truetype'); + } +</style> \ No newline at end of file diff --git a/datahub-web-react/.storybook/manager.js b/datahub-web-react/.storybook/manager.js new file mode 100644 index 00000000000000..6e9c62dd96c23f --- /dev/null +++ b/datahub-web-react/.storybook/manager.js @@ -0,0 +1,15 @@ +import './storybook-theme.css'; + +import { addons } from '@storybook/manager-api'; +import acrylTheme from './storybook-theme.js'; + +// Theme setup +addons.setConfig({ + theme: acrylTheme, +}); + +// Favicon +const link = document.createElement('link'); +link.setAttribute('rel', 'shortcut icon'); +link.setAttribute('href', 'https://www.acryldata.io/icons/favicon.ico'); +document.head.appendChild(link); \ No newline at end of file diff --git a/datahub-web-react/.storybook/preview-head.html b/datahub-web-react/.storybook/preview-head.html new file mode 100644 index 00000000000000..98e6a2895f45c7 --- /dev/null +++ b/datahub-web-react/.storybook/preview-head.html @@ -0,0 +1,33 @@ +<style type="text/css"> + /* Regular */ + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 400; + src: url('../src/fonts/Mulish-Regular.ttf') format('truetype'); + } + + /* Medium */ + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 500; + src: url('../src/fonts/Mulish-Medium.ttf') format('truetype'); + } + + /* SemiBold */ + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 600; + src: url('../src/fonts/Mulish-SemiBold.ttf') format('truetype'); + } + + /* Bold */ + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 700; + src: url('../src/fonts/Mulish-Bold.ttf') format('truetype'); + } +</style> \ No newline at end of file diff --git a/datahub-web-react/.storybook/preview.js b/datahub-web-react/.storybook/preview.js new file mode 100644 index 00000000000000..a497ce7bccf3c8 --- /dev/null +++ b/datahub-web-react/.storybook/preview.js @@ -0,0 +1,84 @@ +import './storybook-theme.css'; +// FYI: import of antd styles required to show components based on it correctly +import 'antd/dist/antd.css'; + +import { BADGE, defaultBadgesConfig } from '@geometricpanda/storybook-addon-badges'; +import DocTemplate from './DocTemplate.mdx'; + +const preview = { + tags: ['!dev', 'autodocs'], + parameters: { + previewTabs: { + 'storybook/docs/panel': { index: -1 }, + }, + controls: { + matchers: { + color: /(background|color)$/i, + date: /Date$/i, + }, + }, + options: { + storySort: { + method: 'alphabetical', + order: [ + // Order of Docs Pages + 'Introduction', + 'Style Guide', + 'Design Tokens', + 'Style Utilities', + 'Icons', + + // Order of Components + 'Layout', + 'Forms', + 'Data Display', + 'Feedback', + 'Typography', + 'Overlay', + 'Disclosure', + 'Navigation', + 'Media', + 'Other', + ], + locales: '', + }, + }, + docs: { + page: DocTemplate, + toc: { + disable: false, + }, + docs: { + source: { + format: true, + }, + }, + }, + + // Reconfig the premade badges with better titles + badgesConfig: { + stable: { + ...defaultBadgesConfig[BADGE.STABLE], + title: 'Stable', + tooltip: 'This component is stable but may have frequent changes. Use at own discretion.', + }, + productionReady: { + ...defaultBadgesConfig[BADGE.STABLE], + title: 'Production Ready', + tooltip: 'This component is production ready and has been tested in a production environment.', + }, + WIP: { + ...defaultBadgesConfig[BADGE.BETA], + title: 'WIP', + tooltip: 'This component is a work in progress and may not be fully functional or tested.', + }, + readyForDesignReview: { + ...defaultBadgesConfig[BADGE.NEEDS_REVISION], + title: 'Ready for Design Review', + tooltip: 'This component is ready for design review and feedback.', + }, + }, + }, +}; + +export default preview; diff --git a/datahub-web-react/.storybook/storybook-logo.svg b/datahub-web-react/.storybook/storybook-logo.svg new file mode 100644 index 00000000000000..5cc86813b59336 --- /dev/null +++ b/datahub-web-react/.storybook/storybook-logo.svg @@ -0,0 +1 @@ +<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 116.71 125.19"><defs><style>.cls-1{fill:#08303a;}.cls-2{fill:#11696b;}.cls-3{fill:#20d3bd;}</style></defs><g id="artwork"><path class="cls-1" d="M96.39,34.23,79.87,11.08a26.43,26.43,0,0,0-43,0L20.32,34.23A26.42,26.42,0,0,0,41.83,76h33A26.42,26.42,0,0,0,96.39,34.23ZM74.87,68h-33a18.42,18.42,0,0,1-15-29.12L43.35,15.72a18.43,18.43,0,0,1,30,0L89.87,38.88A18.42,18.42,0,0,1,74.87,68Z"/><path class="cls-2" d="M105.89,72.32,73,26.24a18,18,0,0,0-29.31,0L10.82,72.32a18,18,0,0,0,14.65,28.46H91.24a18,18,0,0,0,14.65-28.46ZM91.24,92.78H25.47A10,10,0,0,1,17.33,77L50.21,30.88a10,10,0,0,1,16.28,0L99.38,77A10,10,0,0,1,91.24,92.78Z"/><path class="cls-3" d="M114.83,109.26,66.56,41.61a10.07,10.07,0,0,0-16.41,0L1.88,109.26a10.08,10.08,0,0,0,8.2,15.93h96.55a10.08,10.08,0,0,0,8.2-15.93Zm-8.2,7.93H10.08a2.08,2.08,0,0,1-1.69-3.29L56.66,46.25a2.08,2.08,0,0,1,1.69-.87,2.05,2.05,0,0,1,1.69.87l48.28,67.65A2.08,2.08,0,0,1,106.63,117.19Z"/></g></svg> \ No newline at end of file diff --git a/datahub-web-react/.storybook/storybook-theme.css b/datahub-web-react/.storybook/storybook-theme.css new file mode 100644 index 00000000000000..edf93c57cf2086 --- /dev/null +++ b/datahub-web-react/.storybook/storybook-theme.css @@ -0,0 +1,263 @@ +/* Storybook Theme CSS Overrides */ + +/* Regular */ +@font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 400; + src: url('../src/fonts/Mulish-Regular.ttf') format('truetype'); +} + +/* Medium */ +@font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 500; + src: url('../src/fonts/Mulish-Medium.ttf') format('truetype'); +} + +/* SemiBold */ +@font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 600; + src: url('../src/fonts/Mulish-SemiBold.ttf') format('truetype'); +} + +/* Bold */ +@font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 700; + src: url('../src/fonts/Mulish-Bold.ttf') format('truetype'); +} + +body { + font-family: 'Mulish', sans-serif !important; +} + +::-webkit-scrollbar { + height: 8px; + width: 8px; +} + +*::-webkit-scrollbar-track { + background: rgba(193, 196, 208, 0); + border-radius: 10px; +} + +*::-webkit-scrollbar-thumb { + background: rgba(193, 196, 208, 0); + border-radius: 10px; + transition: 0.3s; +} + +*:hover::-webkit-scrollbar-track { + background: rgba(193, 196, 208, 0.3); +} + +*:hover::-webkit-scrollbar-thumb { + background: rgba(193, 196, 208, 0.8); +} + +.sbdocs-wrapper { + max-width: 95% !important; +} + +.sidebar-header img { + max-height: 25px !important; +} + +.sb-bar { + box-shadow: none !important; + border-bottom: 1px solid hsla(203, 50%, 30%, 0.15) !important; +} + +.sbdocs-preview, +.docblock-argstable-body, +.docblock-source { + box-shadow: none !important; + filter: none !important; +} + +.docblock-source { + max-width: 100% !important; + overflow: auto !important; + margin: 1rem 0 !important; +} + +.sidebar-item, +.sidebar-item[data-selected="true"] { + height: 32px !important; + display: flex !important; + align-items: center !important; + padding-right: 0 !important; + padding: 6px 12px !important; + font-size: 15px !important; + margin-bottom: 4px !important; + color: #000 !important; +} + +.sidebar-item:hover { + background-color: #eff8fc !important; +} + +.sidebar-item>a { + align-items: center !important; + gap: 8px !important; + padding: 0 !important; +} + +.sidebar-item[data-nodetype="group"] { + margin-top: 8px !important; +} + +.sidebar-item[data-nodetype="component"] { + padding-left: 8px !important; +} + +[data-nodetype="root"]>[data-action="collapse-root"]>div:first-child, +[data-nodetype="component"] div { + display: none !important; +} + +[data-nodetype="document"][data-parent-id], +[data-nodetype="story"][data-parent-id] { + padding: 0 !important; + margin-left: 16px !important; + height: 18px !important; + min-height: auto !important; + font-weight: 400 !important; +} + +[data-nodetype="document"][data-parent-id] svg, +[data-nodetype="story"][data-parent-id] svg { + display: none !important; +} + +[data-nodetype="document"][data-parent-id]::before, +[data-nodetype="story"][data-parent-id]::before { + content: '→' !important; +} + +[data-nodetype="document"][data-parent-id]:hover, +[data-nodetype="story"][data-parent-id]:hover, +[data-nodetype="document"][data-parent-id][data-selected="true"]:hover, +[data-nodetype="story"][data-parent-id][data-selected="true"]:hover { + background-color: #fff !important; + color: #4da1bf !important; +} + +[data-nodetype="document"][data-parent-id][data-selected="true"], +[data-nodetype="story"][data-parent-id][data-selected="true"] { + background-color: #fff !important; + height: 18px !important; + min-height: auto !important; + font-weight: 400 !important; +} + +.sbdocs-content div[id*=--sandbox]~div[id*=--sandbox]~div[id*=--sandbox], +li:has(a[href="#sandbox"]) { + display: none !important; +} + +[data-nodetype="document"]:not([data-parent-id]) { + padding-left: 0 !important; +} + +[data-nodetype="document"]:not([data-parent-id]) svg { + display: none !important; +} + +[data-nodetype="document"]:not([data-parent-id])>a { + font-size: 18px !important; + font-weight: 300 !important; +} + +[data-nodetype="component"][aria-expanded="true"], +[data-nodetype="document"][data-selected="true"] { + color: #000 !important; + background-color: transparent !important; + font-weight: 700 !important; +} + +[data-nodetype="root"][data-selected="true"] { + background-color: transparent !important; +} + +[data-nodetype="document"][data-selected="true"], +[data-nodetype="document"][data-parent-id][data-selected="true"] { + color: #4da1bf !important; +} + +.sidebar-subheading { + font-size: 12px !important; + font-weight: 600 !important; + letter-spacing: 1px !important; + color: #a9adbd !important; +} + +.sbdocs-wrapper { + padding: 2rem !important; +} + +table, +tr, +tbody>tr>* { + border-color: hsla(203, 50%, 30%, 0.15) !important; + background-color: transparent; +} + +:where(table:not(.sb-anchor, .sb-unstyled, .sb-unstyled table)) tr:nth-of-type(2n) { + background-color: transparent !important; +} + +tr { + border-top: 0 !important; +} + +th { + border: 0 !important; +} + +h2#stories { + display: none; +} + +.tabbutton { + border-bottom: none !important +} + +.tabbutton.tabbutton-active { + color: rgb(120, 201, 230) !important; +} + +.toc-wrapper { + margin-top: -2.5rem !important; + font-family: 'Mulish', sans-serif !important; +} + +/* Custom Doc Styles */ + +.custom-docs { + position: relative; +} + +.acrylBg { + position: fixed; + bottom: 0; + left: -20px; + background-repeat: repeat; + z-index: 0; +} + +.acrylBg img { + filter: invert(8); +} + +.custom-docs p, +.docsDescription p, +.custom-docs li { + font-size: 16px; + line-height: 1.75; +} \ No newline at end of file diff --git a/datahub-web-react/.storybook/storybook-theme.js b/datahub-web-react/.storybook/storybook-theme.js new file mode 100644 index 00000000000000..462bf2f03da944 --- /dev/null +++ b/datahub-web-react/.storybook/storybook-theme.js @@ -0,0 +1,47 @@ +import { create } from '@storybook/theming'; +import brandImage from './storybook-logo.svg'; + +import theme, { typography } from '../src/alchemy-components/theme'; + +export default create({ + // config + base: 'light', + brandTitle: 'Acryl Design System', + brandUrl: '/?path=/docs/', + brandImage: brandImage, + brandTarget: '_self', + + // styles + fontBase: typography.fontFamily, + fontCode: 'monospace', + + colorPrimary: theme.semanticTokens.colors.primary, + colorSecondary: theme.semanticTokens.colors.secondary, + + // UI + appBg: theme.semanticTokens.colors['body-bg'], + appContentBg: theme.semanticTokens.colors['body-bg'], + appPreviewBg: theme.semanticTokens.colors['body-bg'], + appBorderColor: theme.semanticTokens.colors['border-color'], + appBorderRadius: 4, + + // Text colors + textColor: theme.semanticTokens.colors['body-text'], + textInverseColor: theme.semanticTokens.colors['inverse-text'], + textMutedColor: theme.semanticTokens.colors['subtle-text'], + + // Toolbar default and active colors + barTextColor: theme.semanticTokens.colors['body-text'], + barSelectedColor: theme.semanticTokens.colors['subtle-bg'], + barHoverColor: theme.semanticTokens.colors['subtle-bg'], + barBg: theme.semanticTokens.colors['body-bg'], + + // Form colors + inputBg: theme.semanticTokens.colors['body-bg'], + inputBorder: theme.semanticTokens.colors['border-color'], + inputTextColor: theme.semanticTokens.colors['body-text'], + inputBorderRadius: 4, + + // Grid + gridCellSize: 6, +}); \ No newline at end of file diff --git a/datahub-web-react/.storybook/styledComponents.ts b/datahub-web-react/.storybook/styledComponents.ts new file mode 100644 index 00000000000000..5951c810d89985 --- /dev/null +++ b/datahub-web-react/.storybook/styledComponents.ts @@ -0,0 +1,36 @@ +import { createGlobalStyle } from 'styled-components'; + +import '../src/fonts/Mulish-Regular.ttf'; +import '../src/fonts/Mulish-Medium.ttf'; +import '../src/fonts/Mulish-SemiBold.ttf'; +import '../src/fonts/Mulish-Bold.ttf'; + +export const GlobalStyle = createGlobalStyle` + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 400; + src: url('../src/fonts/Mulish-Regular.ttf) format('truetype'); + } + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 500; + src: url('../src/fonts/Mulish-Medium.ttf) format('truetype'); + } + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 600; + src: url('../src/fonts/Mulish-SemiBold.ttf) format('truetype'); + } + @font-face { + font-family: 'Mulish'; + font-style: normal; + font-weight: 700; + src: url('../src/fonts/Mulish-Bold.ttf) format('truetype'); + } + body { + font-family: 'Mulish', sans-serif; + } +`; \ No newline at end of file diff --git a/datahub-web-react/.storybook/webpack.config.js b/datahub-web-react/.storybook/webpack.config.js new file mode 100644 index 00000000000000..22e4ec1de63050 --- /dev/null +++ b/datahub-web-react/.storybook/webpack.config.js @@ -0,0 +1,13 @@ +const path = require('path'); + +module.exports = { + module: { + loaders: [ + { + test: /\.(png|woff|woff2|eot|ttf|svg)$/, + loaders: ['file-loader'], + include: path.resolve(__dirname, '../'), + }, + ], + }, +}; \ No newline at end of file diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index b9fffce173c5c4..3dad778a2b3038 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -16,7 +16,7 @@ node { } // Version of node to use. - version = '21.2.0' + version = '22.12.0' // Version of Yarn to use. yarnVersion = '1.22.22' @@ -79,7 +79,7 @@ task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { task yarnTest(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { // Explicitly runs in non-watch mode. - args = ['run', 'test', 'run'] + args = ['run', project.hasProperty('withCoverage') ? 'test-coverage' : 'test', 'run'] } task yarnLint(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -93,7 +93,6 @@ task yarnLintFix(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { } task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - environment = [NODE_OPTIONS: "--max-old-space-size=3072 --openssl-legacy-provider"] args = ['run', 'build'] outputs.cacheIf { true } diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index dcaef6004d7022..0ff68de2481ed0 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -9,8 +9,12 @@ "@ant-design/colors": "^5.0.0", "@ant-design/icons": "^4.3.0", "@apollo/client": "^3.3.19", + "@fontsource/mulish": "^5.0.16", + "@geometricpanda/storybook-addon-badges": "^2.0.2", "@graphql-codegen/fragment-matcher": "^5.0.0", "@monaco-editor/react": "^4.3.1", + "@mui/icons-material": "^5.15.21", + "@mui/material": "^5.15.21", "@react-hook/window-size": "^3.0.7", "@react-spring/web": "^9.7.3", "@remirror/pm": "^2.0.3", @@ -30,6 +34,7 @@ "@uiw/react-md-editor": "^3.3.4", "@visx/axis": "^3.1.0", "@visx/curve": "^3.0.0", + "@visx/gradient": "^3.3.0", "@visx/group": "^3.0.0", "@visx/hierarchy": "^3.0.0", "@visx/legend": "^3.2.0", @@ -84,16 +89,19 @@ "scripts": { "analyze": "source-map-explorer 'dist/assets/*.js'", "start": "yarn run generate && vite", - "ec2-dev": "yarn run generate && CI=true;export CI;vite", - "build": "yarn run generate && NODE_OPTIONS='--max-old-space-size=3072 --openssl-legacy-provider' CI=false vite build", - "test": "vitest", - "generate": "graphql-codegen --config codegen.yml", + "ec2-dev": "yarn run generate && CI=true vite", + "build": "yarn run generate && CI=false NODE_OPTIONS='--max-old-space-size=5120 --openssl-legacy-provider' vite build", + "test": "NODE_OPTIONS='--max-old-space-size=5120 --openssl-legacy-provider' vitest", + "test-coverage": "yarn test run --coverage", + "generate": "NODE_OPTIONS='--max-old-space-size=5120 --openssl-legacy-provider' graphql-codegen --config codegen.yml", "lint": "eslint . --ext .ts,.tsx --quiet && yarn format-check && yarn type-check", "lint-fix": "eslint '*/**/*.{ts,tsx}' --quiet --fix && yarn format", "format-check": "prettier --check src", "format": "prettier --write src", "type-check": "tsc --noEmit", - "type-watch": "tsc -w --noEmit" + "type-watch": "tsc -w --noEmit", + "storybook": "storybook dev -p 6006", + "build-storybook": "NODE_OPTIONS='--max-old-space-size=5120 --openssl-legacy-provider' storybook build" }, "browserslist": { "production": [ @@ -112,12 +120,23 @@ "@graphql-codegen/near-operation-file-preset": "^1.17.13", "@graphql-codegen/typescript-operations": "1.17.13", "@graphql-codegen/typescript-react-apollo": "2.2.1", + "@storybook/addon-essentials": "^8.1.11", + "@storybook/addon-interactions": "^8.1.11", + "@storybook/addon-links": "^8.1.11", + "@storybook/addon-onboarding": "^8.1.11", + "@storybook/blocks": "^8.1.11", + "@storybook/builder-vite": "^8.1.11", + "@storybook/manager-api": "^8.1.11", + "@storybook/react-vite": "^8.1.11", + "@storybook/test": "^8.1.11", + "@storybook/theming": "^8.1.11", "@types/graphql": "^14.5.0", "@types/query-string": "^6.3.0", "@types/styled-components": "^5.1.7", "@typescript-eslint/eslint-plugin": "^5.38.1", "@typescript-eslint/parser": "^5.38.1", "@vitejs/plugin-react": "^4.1.1", + "@vitest/coverage-v8": "^0.34.6", "eslint": "^8.2.0", "eslint-config-airbnb": "19.0.4", "eslint-config-airbnb-typescript": "^17.0.0", @@ -132,7 +151,8 @@ "less": "^4.2.0", "prettier": "^2.8.8", "source-map-explorer": "^2.5.2", - "vite": "^4.5.5", + "storybook": "^8.1.11", + "vite": "^4.5.6", "vite-plugin-babel-macros": "^1.0.6", "vite-plugin-static-copy": "^0.17.0", "vite-plugin-svgr": "^4.1.0", diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index 2fdd7c8ed68004..81f137417f1f8a 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -79,7 +79,7 @@ export const InnerApp: React.VFC = () => { <HelmetProvider> <CustomThemeProvider> <Helmet> - <title>{useCustomTheme().theme?.content.title} + {useCustomTheme().theme?.content?.title} diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index 329d6250e576ab..063b784920e234 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -645,6 +645,7 @@ export const dataset3 = { structuredProperties: null, forms: null, activeIncidents: null, + versionProperties: null, } as Dataset; export const dataset3WithSchema = { @@ -1714,6 +1715,7 @@ export const mlModel = { }, tags: [], properties: { + name: 'trust model', description: 'a ml trust model', date: null, version: '1', @@ -2204,7 +2206,7 @@ export const mocks = [ count: 10, filters: [], orFilters: [], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -2244,6 +2246,7 @@ export const mocks = [ field: 'origin', displayName: 'origin', aggregations: [{ value: 'PROD', count: 3, entity: null }], + entity: null, }, { field: '_entityType', @@ -2252,6 +2255,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { field: 'platform', @@ -2261,6 +2265,7 @@ export const mocks = [ { value: 'MySQL', count: 1, entity: null }, { value: 'Kafka', count: 1, entity: null }, ], + entity: null, }, ], suggestions: [], @@ -2290,7 +2295,7 @@ export const mocks = [ ], }, ], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -2325,6 +2330,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: '_entityType', @@ -2333,6 +2339,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { __typename: 'FacetMetadata', @@ -2343,6 +2350,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], suggestions: [], @@ -2393,6 +2401,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: '_entityType', @@ -2401,6 +2410,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { field: 'platform', @@ -2410,6 +2420,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -2464,7 +2475,7 @@ export const mocks = [ ], }, ], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -2501,6 +2512,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { __typename: 'FacetMetadata', @@ -2510,6 +2522,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { __typename: 'FacetMetadata', @@ -2520,6 +2533,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], + entity: null, }, ], }, @@ -2669,6 +2683,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: '_entityType', @@ -2677,6 +2692,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { field: 'platform', @@ -2686,6 +2702,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -2743,6 +2760,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: '_entityType', @@ -2751,6 +2769,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { field: 'platform', @@ -2760,6 +2779,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -2809,6 +2829,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: 'platform', @@ -2822,6 +2843,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -2953,6 +2975,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: 'platform', @@ -2966,6 +2989,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -3013,7 +3037,7 @@ export const mocks = [ ], }, ], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -3050,6 +3074,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, // { // displayName: 'Domain', @@ -3071,6 +3096,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { __typename: 'FacetMetadata', @@ -3096,6 +3122,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, ], }, @@ -3181,7 +3208,7 @@ export const mocks = [ ], }, ], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -3215,6 +3242,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: 'platform', @@ -3228,6 +3256,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -3256,7 +3285,7 @@ export const mocks = [ ], }, ], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -3290,6 +3319,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: '_entityType', @@ -3298,6 +3328,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { field: 'platform', @@ -3307,6 +3338,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -3335,7 +3367,7 @@ export const mocks = [ ], }, ], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -3377,6 +3409,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: '_entityType', @@ -3385,6 +3418,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { field: 'platform', @@ -3394,6 +3428,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -3428,7 +3463,7 @@ export const mocks = [ ], }, ], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -3465,6 +3500,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { __typename: 'FacetMetadata', @@ -3474,6 +3510,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { __typename: 'FacetMetadata', @@ -3484,6 +3521,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], + entity: null, }, ], }, @@ -3518,7 +3556,7 @@ export const mocks = [ ], }, ], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -3555,6 +3593,7 @@ export const mocks = [ __typename: 'AggregationMetadata', }, ], + entity: null, }, { __typename: 'FacetMetadata', @@ -3564,6 +3603,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { __typename: 'FacetMetadata', @@ -3574,6 +3614,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], + entity: null, }, ], }, @@ -3635,6 +3676,8 @@ export const mocks = [ manageGlobalAnnouncements: true, createBusinessAttributes: true, manageBusinessAttributes: true, + manageStructuredProperties: true, + viewStructuredPropertiesPage: true, }, }, }, @@ -3722,7 +3765,7 @@ export const mocks = [ count: 10, filters: [], orFilters: [], - searchFlags: { getSuggestions: true }, + searchFlags: { getSuggestions: true, includeStructuredPropertyFacets: true }, }, }, }, @@ -3821,6 +3864,7 @@ export const mocks = [ entity: null, }, ], + entity: null, }, { field: '_entityType', @@ -3829,6 +3873,7 @@ export const mocks = [ { count: 37, entity: null, value: 'DATASET', __typename: 'AggregationMetadata' }, { count: 7, entity: null, value: 'CHART', __typename: 'AggregationMetadata' }, ], + entity: null, }, { field: 'platform', @@ -3838,6 +3883,7 @@ export const mocks = [ { value: 'mysql', count: 1, entity: null }, { value: 'kafka', count: 1, entity: null }, ], + entity: null, }, ], }, @@ -3912,4 +3958,6 @@ export const platformPrivileges: PlatformPrivileges = { manageGlobalAnnouncements: true, createBusinessAttributes: true, manageBusinessAttributes: true, + manageStructuredProperties: true, + viewStructuredPropertiesPage: true, }; diff --git a/datahub-web-react/src/alchemy-components/.docs/Contributing.mdx b/datahub-web-react/src/alchemy-components/.docs/Contributing.mdx new file mode 100644 index 00000000000000..75a31d011903f8 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/Contributing.mdx @@ -0,0 +1,43 @@ +import { Meta } from '@storybook/blocks'; + + + +

+ ## Contributing + + Building and maintinging a design system is a collaborative effort. We welcome contributions from all team members, regardless of their role or experience level. This document outlines the process for contributing to the Acryl Component Library. + + ### Development + + To run Storybook locally, use the following command: + + ``` + yarn storybook + ``` + + Storybook will start a local development server and open a new browser window with the Storybook interface on port `6006`. When developing new components or updating existing ones, you can use Storybook to preview your changes in real-time. This will ensure that the component looks and behaves as expected before merging your changes. + + ### Crafting New Components + + When creating new components, make sure to follow the established design patterns and coding standards. This will help maintain consistency across all Acryl products and make it easier for other team members to understand and use your components. + + Design new components with reusability in mind. Components should be flexible, extensible, and easy to customize. Avoid hardcoding values and use props to pass data and styles to your components. This will make it easier to reuse the component in different contexts and scenarios. + + Our design team works exclusively in Figma, so if questions arise about the design or implementation of a component, please refer to the Figma files for more information. If you have any questions or need clarification, feel free to reach out to the design team for assistance. + + ### Pull Requests + + When submitting a pull request, please follow these guidelines: + + 1. Create a new branch for your changes. + 2. Make sure your code is well-documented and follows the established coding standards. + 3. Write clear and concise commit messages. + 4. Include a detailed description of the changes in your pull request. + + If applicable, include screenshots or GIFs to demonstrate the changes visually. This will help reviewers understand the context of your changes and provide more accurate feedback. If a Figma file exists, include a link to the file in the pull request description. + + ### Review Process + + All pull requests will be reviewed by the UI and design team to ensure that the changes align with the design system guidelines and best practices. The team will provide feedback and suggestions for improvement, and you may be asked to make additional changes before your pull request is merged. + +
diff --git a/datahub-web-react/src/alchemy-components/.docs/DesignTokens.mdx b/datahub-web-react/src/alchemy-components/.docs/DesignTokens.mdx new file mode 100644 index 00000000000000..0ebdebbf9db4cb --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/DesignTokens.mdx @@ -0,0 +1,63 @@ +import { Meta, Source } from '@storybook/blocks'; + +import theme from '@components/theme'; + +import { ColorCard, CopyButton } from './mdx-components'; + + + +
+ ## Design Tokens + + To streamline the design process and ensure consistency across all Acryl products, we use a set of design tokens that define the visual properties of our design system. These tokens include colors, typography, spacing, and other visual elements that can be used to create a cohesive user experience. + + ### Colors + + ```tsx + import theme from '@components/theme'; + + // Accessing a color via object path +
Hello, World!
+ + // Using CSS variables +
Hello, World!
+ ``` + + + + + + + + + + + {Object.keys(theme.semanticTokens.colors).map((color) => { + const objectKey = `colors['${color}']`; + const hexValue = theme.semanticTokens.colors[color]; + const cssVar = `--alch-color-${color}`; + + return ( + + + + + + ); + })} + +
Token ValueSelectorCSS Variable (coming soon)
+ + +
+ {color} + {hexValue} +
+
+
+ + {objectKey} + + {cssVar}
+ +
diff --git a/datahub-web-react/src/alchemy-components/.docs/Icons.mdx b/datahub-web-react/src/alchemy-components/.docs/Icons.mdx new file mode 100644 index 00000000000000..e3f6ab68461196 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/Icons.mdx @@ -0,0 +1,34 @@ +import { Meta, Source } from '@storybook/blocks'; + +import { AVAILABLE_ICONS } from '@components'; +import { IconGalleryWithSearch } from './mdx-components'; + + + +
+ ## Icons + + Under the hood, we're utilizing the Material Design Icon Library. However, we've crafted out own resuable component to make it easier to use these icons in our application. + + + View the component documentation to learn more + + + In addition to using Materials Design Icons, we've also added a few custom icons to the library. You can access them through the same `` component and are represented in the list of available options below. + + ```tsx + import { Icon } from '@components'; + + + ``` + +
+ + ### Gallery + + There are {AVAILABLE_ICONS.length} icons available.
+ Name values populate the `icon` prop on the `` component. + + + +
diff --git a/datahub-web-react/src/alchemy-components/.docs/Intro.mdx b/datahub-web-react/src/alchemy-components/.docs/Intro.mdx new file mode 100644 index 00000000000000..f81d08059c7b44 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/Intro.mdx @@ -0,0 +1,14 @@ +import { Meta, Description } from '@storybook/blocks'; +import ReadMe from '../README.mdx'; + + + +
+
+ Acryl Logo +
+ + {/* To simply, we're rendering the root readme here */} + + +
diff --git a/datahub-web-react/src/alchemy-components/.docs/StyleGuide.mdx b/datahub-web-react/src/alchemy-components/.docs/StyleGuide.mdx new file mode 100644 index 00000000000000..43199cbbca62d1 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/StyleGuide.mdx @@ -0,0 +1,209 @@ +import { Meta, Source } from '@storybook/blocks'; + +import { Heading } from '@components'; +import { colors } from '@components/theme'; + +import { Grid, FlexGrid, ColorCard, CopyButton, Seperator } from './mdx-components'; + +import borderSource from '@components/theme/foundations/borders?raw'; +import colorsSource from '@components/theme/foundations/colors?raw'; +import typographySource from '@components/theme/foundations/typography?raw'; +import radiusSource from '@components/theme/foundations/radius?raw'; +import shadowsSource from '@components/theme/foundations/shadows?raw'; +import sizesSource from '@components/theme/foundations/sizes?raw'; +import spacingSource from '@components/theme/foundations/spacing?raw'; +import transitionSource from '@components/theme/foundations/transition?raw'; +import zIndexSource from '@components/theme/foundations/zIndex?raw'; + + + +
+ ## Style Guide + + The purpose of this Style Guide is to establish a unified and cohesive design language that ensures a consistent user experience across all Acryl products. By adhering to these guidelines, we can maintain a high standard of design quality and improve the usability of our applications. + + ### Theme + + You can import the theme object into any component or file in your application and use it to style your components. The theme object is a single source of truth for your application's design system. + + ```tsx + import { typography, colors, spacing } from '@components/theme'; + ``` + + ### Colors + + Colors are managed via the `colors.ts` file in the `theme/foundations` directory. The colors are defined as a nested object with the following structure: + + + + By default, all `500` values are considered the "default" value of that color range. For example, `gray.500` is the default gray color. The other values are used for shading and highlighting. Color values are defined in hex format and their values range between 25 and 1000. With 25 being the lighest and 1000 being the darkest. + + #### Black & White + + + +
+ Black + {colors['black']} +
+
+ + +
+ White + {colors['white']} +
+
+
+ + + + #### Gray + + {Object.keys(colors.gray).map((color) => ( + + +
+ + Gray {color} + + {colors['gray'][color]} +
+
+ ))} +
+ + + + #### Violet (Primary) + + {Object.keys(colors.violet).map((color) => ( + + +
+ + Violet {color} + + {colors['violet'][color]} +
+
+ ))} +
+ + + + #### Blue + + {Object.keys(colors.blue).map((color) => ( + + +
+ + Blue {color} + + {colors['blue'][color]} +
+
+ ))} +
+ + + + #### Green + + {Object.keys(colors.green).map((color) => ( + + +
+ + Green {color} + + {colors['green'][color]} +
+
+ ))} +
+ + + + #### Yellow + + {Object.keys(colors.yellow).map((color) => ( + + +
+ + Yellow {color} + + {colors['yellow'][color]} +
+
+ ))} +
+ + + + #### Red + + {Object.keys(colors.red).map((color) => ( + + +
+ + Red {color} + + {colors['red'][color]} +
+
+ ))} +
+ + ### Typography + + Font styles are managed via the `typography.ts` file in the `theme/foundations` directory. The primary font family in use is `Mulish`. The font styles are defined as a nested object with the following structure: + + + + ### Borders + + A set of border values defined by the border key. + + + + ### Border Radius + + A set smooth corner radius values defined by the radii key. + + + + ### Shadows + + A set of shadow values defined by the shadows key. + + + + ## Sizes + + A set of size values defined by the sizes key. + + + + ### Spacing + + A set of spacing values defined by the spacing key. + + + + ### Transitions + + A set of transition values defined by the transition key. + + + + ### Z-Index + + A set of z-index values defined by the zindex key. + + + +
diff --git a/datahub-web-react/src/alchemy-components/.docs/mdx-components/CodeBlock.tsx b/datahub-web-react/src/alchemy-components/.docs/mdx-components/CodeBlock.tsx new file mode 100644 index 00000000000000..43b9ebfae64149 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/mdx-components/CodeBlock.tsx @@ -0,0 +1,24 @@ +import React from 'react'; + +import { Source, DocsContext } from '@storybook/blocks'; + +export const CodeBlock = () => { + const context = React.useContext(DocsContext); + + const { primaryStory } = context as any; + const component = context ? primaryStory.component.__docgenInfo.displayName : ''; + + if (!context || !primaryStory) return null; + + return ( +
+ +
+ ); +}; diff --git a/datahub-web-react/src/alchemy-components/.docs/mdx-components/CopyButton.tsx b/datahub-web-react/src/alchemy-components/.docs/mdx-components/CopyButton.tsx new file mode 100644 index 00000000000000..c81aa6ed442892 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/mdx-components/CopyButton.tsx @@ -0,0 +1,16 @@ +import React from 'react'; + +import { Button, Icon } from '@components'; +import { copyToClipboard } from './utils'; + +interface Props { + text: string; +} + +export const CopyButton = ({ text }: Props) => ( +
+ +
+); diff --git a/datahub-web-react/src/alchemy-components/.docs/mdx-components/GridList.tsx b/datahub-web-react/src/alchemy-components/.docs/mdx-components/GridList.tsx new file mode 100644 index 00000000000000..5cb4bd27e521a4 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/mdx-components/GridList.tsx @@ -0,0 +1,32 @@ +/* + Docs Only Component that helps to display a list of components in a grid layout. +*/ + +import React, { ReactNode } from 'react'; + +const styles = { + display: 'flex', + alignItems: 'center', + justifyContent: 'center', + gap: '8px', +}; + +interface Props { + isVertical?: boolean; + width?: number | string; + children: ReactNode; +} + +export const GridList = ({ isVertical = false, width = '100%', children }: Props) => { + return ( +
+ {children} +
+ ); +}; diff --git a/datahub-web-react/src/alchemy-components/.docs/mdx-components/IconGalleryWithSearch.tsx b/datahub-web-react/src/alchemy-components/.docs/mdx-components/IconGalleryWithSearch.tsx new file mode 100644 index 00000000000000..d8751509bd6a72 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/mdx-components/IconGalleryWithSearch.tsx @@ -0,0 +1,291 @@ +import React, { useState } from 'react'; + +import { Icon, Button, ButtonProps } from '@components'; +import { IconGrid, IconGridItem, IconDisplayBlock } from './components'; + +interface Props { + icons: string[]; +} + +export const IconGalleryWithSearch = ({ icons }: Props) => { + const [iconSet, setIconSet] = useState(icons); + const [search, setSearch] = useState(''); + const [variant, setVariant] = useState('outline'); + + const filteredIcons = iconSet.filter((icon) => icon.toLowerCase().includes(search.toLowerCase())); + + const arrows = [ + 'ArrowBack', + 'ArrowCircleDown', + 'ArrowCircleLeft', + 'ArrowCircleRight', + 'ArrowCircleUp', + 'ArrowDownward', + 'ArrowForward', + 'ArrowOutward', + 'ArrowUpward', + 'CloseFullscreen', + 'Cached', + 'Code', + 'CodeOff', + 'CompareArrows', + 'Compress', + 'ChevronLeft', + 'ChevronRight', + 'DoubleArrow', + 'FastForward', + 'FastRewind', + 'FileDownload', + 'FileUpload', + 'ForkLeft', + 'ForkRight', + 'GetApp', + 'LastPage', + 'Launch', + 'Login', + 'Logout', + 'LowPriority', + 'ManageHistory', + 'Merge', + 'MergeType', + 'MoveUp', + 'MultipleStop', + 'OpenInFull', + 'Outbound', + 'Outbox', + 'Output', + 'PlayArrow', + 'PlayCircle', + 'Publish', + 'ReadMore', + 'ExitToApp', + 'Redo', + 'Refresh', + 'Replay', + 'ReplyAll', + 'Reply', + 'Restore', + 'SaveAlt', + 'Shortcut', + 'SkipNext', + 'SkipPrevious', + 'Start', + 'Straight', + 'SubdirectoryArrowLeft', + 'SubdirectoryArrowRight', + 'SwapHoriz', + 'SwapVert', + 'SwitchLeft', + 'SwitchRight', + 'SyncAlt', + 'SyncDisabled', + 'SyncLock', + 'Sync', + 'Shuffle', + 'SyncProblem', + 'TrendingDown', + 'TrendingFlat', + 'TrendingUp', + 'TurnLeft', + 'TurnRight', + 'TurnSlightLeft', + 'TurnSlightRight', + 'Undo', + 'UnfoldLessDouble', + 'UnfoldLess', + 'UnfoldMoreDouble', + 'UnfoldMore', + 'UpdateDisabled', + 'Update', + 'Upgrade', + 'Upload', + 'ZoomInMap', + 'ZoomOutMap', + ]; + + const dataViz = [ + 'AccountTree', + 'Analytics', + 'ArtTrack', + 'Article', + 'BackupTable', + 'BarChart', + 'BubbleChart', + 'Calculate', + 'Equalizer', + 'List', + 'FormatListBulleted', + 'FormatListNumbered', + 'Grading', + 'InsertChart', + 'Hub', + 'Insights', + 'Lan', + 'Leaderboard', + 'LegendToggle', + 'Map', + 'MultilineChart', + 'Nat', + 'PivotTableChart', + 'Poll', + 'Polyline', + 'QueryStats', + 'Radar', + 'Route', + 'Rule', + 'Schema', + 'Sort', + 'SortByAlpha', + 'ShowChart', + 'Source', + 'SsidChart', + 'StackedBarChart', + 'StackedLineChart', + 'Storage', + 'TableChart', + 'TableRows', + 'TableView', + 'Timeline', + 'ViewAgenda', + 'ViewArray', + 'ViewCarousel', + 'ViewColumn', + 'ViewComfy', + 'ViewCompact', + 'ViewCozy', + 'ViewDay', + 'ViewHeadline', + 'ViewKanban', + 'ViewList', + 'ViewModule', + 'ViewQuilt', + 'ViewSidebar', + 'ViewStream', + 'ViewTimeline', + 'ViewWeek', + 'Visibility', + 'VisibilityOff', + 'Webhook', + 'Window', + ]; + + const social = [ + 'AccountCircle', + 'Badge', + 'Campaign', + 'Celebration', + 'Chat', + 'ChatBubble', + 'CommentBank', + 'Comment', + 'CommentsDisabled', + 'Message', + 'ContactPage', + 'Contacts', + 'GroupAdd', + 'Group', + 'GroupRemove', + 'Groups', + 'Handshake', + 'ManageAccounts', + 'MoodBad', + 'SentimentDissatisfied', + 'SentimentNeutral', + 'SentimentSatisfied', + 'Mood', + 'NoAccounts', + 'People', + 'PersonAddAlt1', + 'PersonOff', + 'Person', + 'PersonRemoveAlt1', + 'PersonSearch', + 'SwitchAccount', + 'StarBorder', + 'StarHalf', + 'Star', + 'ThumbDown', + 'ThumbUp', + 'ThumbsUpDown', + 'Verified', + 'VerifiedUser', + ]; + + const notifs = [ + 'Mail', + 'Drafts', + 'MarkAsUnread', + 'Inbox', + 'Outbox', + 'MoveToInbox', + 'Unsubscribe', + 'Upcoming', + 'NotificationAdd', + 'NotificationImportant', + 'NotificationsActive', + 'NotificationsOff', + 'Notifications', + 'NotificationsPaused', + ]; + + const handleChangeSet = (set) => { + setIconSet(set); + setSearch(''); + }; + + const handleResetSet = () => { + setIconSet(icons); + setSearch(''); + }; + + const smButtonProps: ButtonProps = { + size: 'sm', + color: 'gray', + }; + + return ( + <> + setSearch(e.target.value)} + placeholder="Search for an icon…" + style={{ width: '100%', padding: '0.5rem', marginBottom: '0.5rem' }} + /> +
+
+ + + + + +
+
+ +
+
+ + {filteredIcons.map((icon) => ( + + + + + {icon} + + ))} + + + ); +}; diff --git a/datahub-web-react/src/alchemy-components/.docs/mdx-components/components.ts b/datahub-web-react/src/alchemy-components/.docs/mdx-components/components.ts new file mode 100644 index 00000000000000..28d428493b17b2 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/mdx-components/components.ts @@ -0,0 +1,110 @@ +/* + Docs Only Components that helps to display information in info guides. +*/ + +import styled from 'styled-components'; + +import theme from '@components/theme'; + +export const Grid = styled.div` + display: grid; + grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); + gap: 16px; +`; + +export const FlexGrid = styled.div` + display: flex; + gap: 16px; +`; + +export const VerticalFlexGrid = styled.div` + display: flex; + flex-direction: column; + gap: 16px; +`; + +export const Seperator = styled.div` + height: 16px; +`; + +export const ColorCard = styled.div<{ color: string; size?: string }>` + display: flex; + gap: 16px; + align-items: center; + + ${({ size }) => + size === 'sm' && + ` + gap: 8px; + `} + + & span { + display: block; + line-height: 1.3; + } + + & .colorChip { + background: ${({ color }) => color}; + width: 3rem; + height: 3rem; + + ${({ size }) => + size === 'sm' && + ` + width: 2rem; + height: 2rem; + border-radius: 4px; + `} + + border-radius: 8px; + box-shadow: rgba(0, 0, 0, 0.06) 0px 2px 4px 0px inset; + } + + & .colorValue { + display: flex; + align-items: center; + gap: 0; + font-weight: bold; + font-size: 14px; + } + + & .hex { + font-size: 11px; + opacity: 0.5; + text-transform: uppercase; + } +`; + +export const IconGrid = styled.div` + display: grid; + grid-template-columns: repeat(auto-fill, minmax(100px, 1fr)); + gap: 16px; + margin-top: 20px; +`; + +export const IconGridItem = styled.div` + display: flex; + flex-direction: column; + align-items: center; + justify-content: space-between; + + border: 1px solid ${theme.semanticTokens.colors['border-color']}; + border-radius: 8px; + overflow: hidden; + + & span { + width: 100%; + border-top: 1px solid ${theme.semanticTokens.colors['border-color']}; + background-color: ${theme.semanticTokens.colors['subtle-bg']}; + text-align: center; + padding: 4px 8px; + font-size: 10px; + } +`; + +export const IconDisplayBlock = styled.div` + display: flex; + align-items: center; + justify-content: center; + height: 50px; +`; diff --git a/datahub-web-react/src/alchemy-components/.docs/mdx-components/index.ts b/datahub-web-react/src/alchemy-components/.docs/mdx-components/index.ts new file mode 100644 index 00000000000000..d1c1848d1eb378 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/mdx-components/index.ts @@ -0,0 +1,6 @@ +export * from './CodeBlock'; +export * from './CopyButton'; +export * from './GridList'; +export * from './IconGalleryWithSearch'; +export * from './components'; +export * from './utils'; diff --git a/datahub-web-react/src/alchemy-components/.docs/mdx-components/utils.ts b/datahub-web-react/src/alchemy-components/.docs/mdx-components/utils.ts new file mode 100644 index 00000000000000..d4fa47dc9e9674 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/.docs/mdx-components/utils.ts @@ -0,0 +1,15 @@ +/* + Docs related utils +*/ + +/** + * Copies the given text to the clipboard. + * @param {string} text - The text to be copied to the clipboard. + * @returns {Promise} A promise that resolves when the text is copied. + */ +export const copyToClipboard = (text: string) => { + return navigator.clipboard + .writeText(text) + .then(() => console.log(`${text} copied to clipboard`)) + .catch(); +}; diff --git a/datahub-web-react/src/alchemy-components/README.mdx b/datahub-web-react/src/alchemy-components/README.mdx new file mode 100644 index 00000000000000..5373432c0ede03 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/README.mdx @@ -0,0 +1,73 @@ +# Alchemy Component Library + +This is a comprehensive library of accessible and reusable React components that streamlines the development of Acryl's applications and websites. The library offers a diverse range of components that can be easily combined to build complex user interfaces while adhering to accessibility best practices. + +### Component Usage + +It's easy to use the components availble in the library. Simply import the component and use it anywhere you're rendering React components. + +```tsx +import { Button } from '@components'; + +function YourComponent() { + return ; +} +``` + +In addition to the components themselves, you can also import their types: + +```tsx +import type { ButtonProps } from '@components'; +``` + +### Theme Usage + +This component library comes with a complete theme utility that pre-defines all of our styling atoms and makes them accessible at `@components/theme`. + +```tsx +import { colors } from '@components/theme'; + +function YourComponent() { + return ( +
+ This div has a green background! +
+ ) +} +``` + +You can access the theme types at `@components/theme/types` and the theme config at `@components/theme/config`. + +### Writing Docs + +Our docs are generated using [Storybook](https://storybook.js.org/) and deployed to [Cloudfare](https://www.cloudflare.com/). + +- Storybook config is located at `.storybook` +- Static doc files are located at `alchemy-components/.docs` +- Component stories are located in each component directory:
`alchemy-components/components/Component/Component.stories.tsx` + +Storybook serves as our playground for developing components. You can start it locally: + +```bash +yarn storybook +``` + +This launches the docs app at `localhost:6006` and enables everything you need to quickly develop and document components. + +### Contributing + +Building a component library is a collaboriate effort! We're aiming to provide a first-class experience, so here's a list of the standards we'll be looking for: + +- Consitent prop and variant naming conventions:
+ -- `variant` is used to define style types, such as `outline` or `filled`.
+ -- `color` is used to define the components color, such as `violet` or `blue`.
+ -- `size` is used to define the components size, such as `xs` or `4xl`.
+ -- Booleans are prefixed with `is`: `isLoading` or `isDisabled`. +- All style props have a correseponding theme type, ie. `FontSizeOptions`. +- All components have an export of default props. +- Styles are defined using `style objects` instead of `tagged template literals`. +- Stories are organized into the correct directory . + +### FAQs + +- **How are components being styled?**
Our components are built using [Styled Components](https://styled-components.com/) that dynamically generate styles based on variant selection. diff --git a/datahub-web-react/src/alchemy-components/components/Avatar/Avatar.stories.tsx b/datahub-web-react/src/alchemy-components/components/Avatar/Avatar.stories.tsx new file mode 100644 index 00000000000000..09d0d37f15421a --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Avatar/Avatar.stories.tsx @@ -0,0 +1,133 @@ +import { BADGE } from '@geometricpanda/storybook-addon-badges'; +import { GridList } from '@src/alchemy-components/.docs/mdx-components'; +import { Meta, StoryObj } from '@storybook/react'; +import React from 'react'; +import { Avatar, avatarDefaults } from './Avatar'; + +const IMAGE_URL = + 'https://is1-ssl.mzstatic.com/image/thumb/Purple211/v4/78/cb/e1/78cbe16d-28d9-057e-9f73-524c32eb5fe5/AppIcon-0-0-1x_U007emarketing-0-7-0-85-220.png/512x512bb.jpg'; + +// Auto Docs +const meta = { + title: 'Components / Avatar', + component: Avatar, + + // Display Properties + parameters: { + layout: 'centered', + badges: [BADGE.STABLE, 'readyForDesignReview'], + docs: { + subtitle: 'This component allows users to render a user pill with picture and name', + }, + }, + + // Component-level argTypes + argTypes: { + name: { + description: 'Name of the user.', + table: { + defaultValue: { summary: `${avatarDefaults.name}` }, + }, + control: 'text', + }, + imageUrl: { + description: 'URL of the user image.', + control: 'text', + }, + onClick: { + description: 'On click function for the Avatar.', + }, + size: { + description: 'Size of the Avatar.', + table: { + defaultValue: { summary: `${avatarDefaults.size}` }, + }, + control: 'select', + }, + showInPill: { + description: 'Whether Avatar is shown in pill format with name.', + table: { + defaultValue: { summary: `${avatarDefaults.showInPill}` }, + }, + control: 'boolean', + }, + + isOutlined: { + description: 'Whether Avatar is outlined.', + table: { + defaultValue: { summary: `${avatarDefaults.isOutlined}` }, + }, + control: 'boolean', + }, + }, + + // Define defaults + args: { + name: 'John Doe', + size: 'default', + showInPill: false, + isOutlined: false, + }, +} satisfies Meta; + +export default meta; + +// Stories + +type Story = StoryObj; + +// Basic story is what is displayed 1st in storybook & is used as the code sandbox +// Pass props to this so that it can be customized via the UI props panel +export const sandbox: Story = { + tags: ['dev'], + render: (props) => , +}; + +export const sizes = () => ( + + + + + + +); + +export const withImage = () => ( + + + + + + +); + +export const pills = () => ( + + + + + + + + + + + + + + +); + +export const outlined = () => ( + + + + +); + +export const withOnClick = () => ( + + window.alert('Avatar clicked')} /> + window.alert('Avatar clicked')} showInPill /> + +); diff --git a/datahub-web-react/src/alchemy-components/components/Avatar/Avatar.tsx b/datahub-web-react/src/alchemy-components/components/Avatar/Avatar.tsx new file mode 100644 index 00000000000000..9e5ec025e08e3d --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Avatar/Avatar.tsx @@ -0,0 +1,40 @@ +import React, { useState } from 'react'; +import { AvatarImage, AvatarImageWrapper, AvatarText, Container } from './components'; +import { AvatarProps } from './types'; +import getAvatarColor, { getNameInitials } from './utils'; + +export const avatarDefaults: AvatarProps = { + name: 'User name', + size: 'default', + showInPill: false, + isOutlined: false, +}; + +export const Avatar = ({ + name = avatarDefaults.name, + imageUrl, + size = avatarDefaults.size, + onClick, + showInPill = avatarDefaults.showInPill, + isOutlined = avatarDefaults.isOutlined, +}: AvatarProps) => { + const [hasError, setHasError] = useState(false); + + return ( + + + {!hasError && imageUrl ? ( + setHasError(true)} /> + ) : ( + <>{getNameInitials(name)} + )} + + {showInPill && {name}} + + ); +}; diff --git a/datahub-web-react/src/alchemy-components/components/Avatar/_tests_/getNameInitials.test.ts b/datahub-web-react/src/alchemy-components/components/Avatar/_tests_/getNameInitials.test.ts new file mode 100644 index 00000000000000..54bb258acb0d81 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Avatar/_tests_/getNameInitials.test.ts @@ -0,0 +1,34 @@ +import { getNameInitials } from '../utils'; + +describe('get initials of the name', () => { + it('get initials of name with first name and last name', () => { + expect(getNameInitials('John Doe ')).toEqual('JD'); + }); + it('get initials of name with first name and last name in lower case', () => { + expect(getNameInitials('john doe')).toEqual('JD'); + }); + it('get initials of name with only first name', () => { + expect(getNameInitials('Robert')).toEqual('RO'); + }); + it('get initials of name with only first name in lower case', () => { + expect(getNameInitials('robert')).toEqual('RO'); + }); + it('get initials of name with three names', () => { + expect(getNameInitials('James Edward Brown')).toEqual('JB'); + }); + it('get initials of name with four names', () => { + expect(getNameInitials('Michael James Alexander Scott')).toEqual('MS'); + }); + it('get initials of name with a hyphen', () => { + expect(getNameInitials('Mary-Jane Watson')).toEqual('MW'); + }); + it('get initials of name with an apostrophe', () => { + expect(getNameInitials("O'Connor")).toEqual('OC'); + }); + it('get initials of name with a single letter', () => { + expect(getNameInitials('J')).toEqual('J'); + }); + it('get initials of name with an empty string', () => { + expect(getNameInitials('')).toEqual(''); + }); +}); diff --git a/datahub-web-react/src/alchemy-components/components/Avatar/components.ts b/datahub-web-react/src/alchemy-components/components/Avatar/components.ts new file mode 100644 index 00000000000000..bcd23a8ab086c9 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Avatar/components.ts @@ -0,0 +1,51 @@ +import { colors } from '@src/alchemy-components/theme'; +import { AvatarSizeOptions } from '@src/alchemy-components/theme/config'; +import styled from 'styled-components'; +import { getAvatarColorStyles, getAvatarNameSizes, getAvatarSizes } from './utils'; + +export const Container = styled.div<{ $hasOnClick: boolean; $showInPill?: boolean }>` + display: inline-flex; + align-items: center; + gap: 4px; + border-radius: 20px; + border: ${(props) => props.$showInPill && `1px solid ${colors.gray[100]}`}; + padding: ${(props) => props.$showInPill && '3px 6px 3px 4px'}; + + ${(props) => + props.$hasOnClick && + ` + :hover { + cursor: pointer; + } + `} +`; + +export const AvatarImageWrapper = styled.div<{ + $color: string; + $size?: AvatarSizeOptions; + $isOutlined?: boolean; + $hasImage?: boolean; +}>` + ${(props) => getAvatarSizes(props.$size)} + + border-radius: 50%; + color: ${(props) => props.$color}; + border: ${(props) => props.$isOutlined && `1px solid ${colors.gray[1800]}`}; + display: flex; + align-items: center; + justify-content: center; + ${(props) => !props.$hasImage && getAvatarColorStyles(props.$color)} +`; + +export const AvatarImage = styled.img` + width: 100%; + height: 100%; + object-fit: cover; + border-radius: 50%; +`; + +export const AvatarText = styled.span<{ $size?: AvatarSizeOptions }>` + color: ${colors.gray[1700]}; + font-weight: 600; + font-size: ${(props) => getAvatarNameSizes(props.$size)}; +`; diff --git a/datahub-web-react/src/alchemy-components/components/Avatar/index.ts b/datahub-web-react/src/alchemy-components/components/Avatar/index.ts new file mode 100644 index 00000000000000..d3fb6dfa7c09e1 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Avatar/index.ts @@ -0,0 +1 @@ +export { Avatar } from './Avatar'; diff --git a/datahub-web-react/src/alchemy-components/components/Avatar/types.ts b/datahub-web-react/src/alchemy-components/components/Avatar/types.ts new file mode 100644 index 00000000000000..98c554b620dcbd --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Avatar/types.ts @@ -0,0 +1,10 @@ +import { AvatarSizeOptions } from '@src/alchemy-components/theme/config'; + +export interface AvatarProps { + name: string; + imageUrl?: string; + onClick?: () => void; + size?: AvatarSizeOptions; + showInPill?: boolean; + isOutlined?: boolean; +} diff --git a/datahub-web-react/src/alchemy-components/components/Avatar/utils.ts b/datahub-web-react/src/alchemy-components/components/Avatar/utils.ts new file mode 100644 index 00000000000000..46b2ee25488b89 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Avatar/utils.ts @@ -0,0 +1,64 @@ +import { colors } from '@src/alchemy-components/theme'; + +export const getNameInitials = (userName: string) => { + if (!userName) return ''; + const names = userName.trim().split(/[\s']+/); // Split by spaces or apostrophes + if (names.length === 1) { + const firstName = names[0]; + return firstName.length > 1 ? firstName[0]?.toUpperCase() + firstName[1]?.toUpperCase() : firstName[0]; + } + return names[0][0]?.toUpperCase() + names[names.length - 1][0]?.toUpperCase() || ''; +}; + +export function hashString(str: string) { + let hash = 0; + if (str.length === 0) { + return hash; + } + for (let i = 0; i < str.length; i++) { + const char = str.charCodeAt(i); + // eslint-disable-next-line + hash = (hash << 5) - hash + char; + // eslint-disable-next-line + hash = hash & hash; // Convert to 32bit integer + } + return Math.abs(hash); +} + +const colorMap = { + [colors.violet[500]]: { backgroundColor: colors.gray[1000], border: `1px solid ${colors.violet[1000]}` }, + [colors.blue[1000]]: { backgroundColor: colors.gray[1100], border: `1px solid ${colors.blue[200]}` }, + [colors.gray[600]]: { backgroundColor: colors.gray[1500], border: `1px solid ${colors.gray[100]}` }, +}; + +const avatarColors = Object.keys(colorMap); + +export const getAvatarColorStyles = (color) => { + return { + ...colorMap[color], + }; +}; + +export default function getAvatarColor(name: string) { + return avatarColors[hashString(name) % avatarColors.length]; +} + +export const getAvatarSizes = (size) => { + const sizeMap = { + sm: { width: '18px', height: '18px', fontSize: '8px' }, + md: { width: '24px', height: '24px', fontSize: '12px' }, + lg: { width: '28px', height: '28px', fontSize: '14px' }, + default: { width: '20px', height: '20px', fontSize: '10px' }, + }; + + return { + ...sizeMap[size], + }; +}; + +export const getAvatarNameSizes = (size) => { + if (size === 'lg') return '16px'; + if (size === 'sm') return '10px'; + if (size === 'md') return '14px'; + return '12px'; +}; diff --git a/datahub-web-react/src/alchemy-components/components/Badge/Badge.stories.tsx b/datahub-web-react/src/alchemy-components/components/Badge/Badge.stories.tsx new file mode 100644 index 00000000000000..88d499226feafd --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Badge/Badge.stories.tsx @@ -0,0 +1,102 @@ +import React from 'react'; + +import { BADGE } from '@geometricpanda/storybook-addon-badges'; +import type { Meta, StoryObj } from '@storybook/react'; + +import { GridList } from '@components/.docs/mdx-components'; +import { Badge, badgeDefault } from './Badge'; +import pillMeta from '../Pills/Pill.stories'; +import { omitKeys } from './utils'; + +const pillMetaArgTypes = omitKeys(pillMeta.argTypes, ['label']); +const pillMetaArgs = omitKeys(pillMeta.args, ['label']); + +const meta = { + title: 'Components / Badge', + component: Badge, + + // Display Properties + parameters: { + layout: 'centered', + badges: [BADGE.EXPERIMENTAL], + docs: { + subtitle: 'A component that is used to get badge', + }, + }, + + // Component-level argTypes + argTypes: { + count: { + description: 'Count to show.', + table: { + defaultValue: { summary: `${badgeDefault.count}` }, + }, + control: { + type: 'number', + }, + }, + overflowCount: { + description: 'Max count to show.', + table: { + defaultValue: { summary: `${badgeDefault.overflowCount}` }, + }, + control: { + type: 'number', + }, + }, + showZero: { + description: 'Whether to show badge when `count` is zero.', + table: { + defaultValue: { summary: `${badgeDefault.showZero}` }, + }, + control: { + type: 'boolean', + }, + }, + ...pillMetaArgTypes, + }, + + // Define defaults + args: { + count: 100, + overflowCount: badgeDefault.overflowCount, + showZero: badgeDefault.showZero, + ...pillMetaArgs, + }, +} satisfies Meta; + +export default meta; + +type Story = StoryObj; + +export const sandbox: Story = { + tags: ['dev'], + render: (props) => , +}; + +export const sizes = () => ( + + + + + +); + +export const colors = () => ( + + + + + + + + +); + +export const withIcon = () => ( + + + + + +); diff --git a/datahub-web-react/src/alchemy-components/components/Badge/Badge.tsx b/datahub-web-react/src/alchemy-components/components/Badge/Badge.tsx new file mode 100644 index 00000000000000..1c934ef120eee8 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Badge/Badge.tsx @@ -0,0 +1,29 @@ +import { Pill } from '@components'; +import React, { useMemo } from 'react'; + +import { BadgeProps } from './types'; +import { formatBadgeValue } from './utils'; +import { BadgeContainer } from './components'; + +export const badgeDefault: BadgeProps = { + count: 0, + overflowCount: 99, + showZero: false, +}; + +export function Badge({ + count = badgeDefault.count, + overflowCount = badgeDefault.overflowCount, + showZero = badgeDefault.showZero, + ...props +}: BadgeProps) { + const label = useMemo(() => formatBadgeValue(count, overflowCount), [count, overflowCount]); + + if (!showZero && count === 0) return null; + + return ( + + + + ); +} diff --git a/datahub-web-react/src/alchemy-components/components/Badge/components.ts b/datahub-web-react/src/alchemy-components/components/Badge/components.ts new file mode 100644 index 00000000000000..a7791cd4f5ff88 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Badge/components.ts @@ -0,0 +1,6 @@ +import styled from 'styled-components'; + +export const BadgeContainer = styled.div({ + // Base root styles + display: 'inline-flex', +}); diff --git a/datahub-web-react/src/alchemy-components/components/Badge/index.ts b/datahub-web-react/src/alchemy-components/components/Badge/index.ts new file mode 100644 index 00000000000000..26a9e305c7ffd5 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Badge/index.ts @@ -0,0 +1 @@ +export { Badge } from './Badge'; diff --git a/datahub-web-react/src/alchemy-components/components/Badge/types.ts b/datahub-web-react/src/alchemy-components/components/Badge/types.ts new file mode 100644 index 00000000000000..21348f2a083419 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Badge/types.ts @@ -0,0 +1,8 @@ +import { HTMLAttributes } from 'react'; +import { PillProps } from '../Pills/types'; + +export interface BadgeProps extends HTMLAttributes, Omit { + count: number; + overflowCount?: number; + showZero?: boolean; +} diff --git a/datahub-web-react/src/alchemy-components/components/Badge/utils.ts b/datahub-web-react/src/alchemy-components/components/Badge/utils.ts new file mode 100644 index 00000000000000..e59ec2af998e74 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Badge/utils.ts @@ -0,0 +1,15 @@ +export const formatBadgeValue = (value: number, overflowCount?: number): string => { + if (overflowCount === undefined || value < overflowCount) return String(value); + + return `${overflowCount}+`; +}; + +export function omitKeys(obj: T, keys: K[]): Omit { + const { ...rest } = obj; + + keys.forEach((key) => { + delete rest[key]; + }); + + return rest; +} diff --git a/datahub-web-react/src/alchemy-components/components/BarChart/BarChart.stories.tsx b/datahub-web-react/src/alchemy-components/components/BarChart/BarChart.stories.tsx new file mode 100644 index 00000000000000..1258ff398c0a7e --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/BarChart/BarChart.stories.tsx @@ -0,0 +1,90 @@ +import React from 'react'; +import { BADGE } from '@geometricpanda/storybook-addon-badges'; +import type { Meta, StoryObj } from '@storybook/react'; +import { BarChart } from './BarChart'; +import { getMockedProps } from './utils'; + +const meta = { + title: 'Charts / BarChart', + component: BarChart, + + // Display Properties + parameters: { + layout: 'centered', + badges: [BADGE.EXPERIMENTAL], + docs: { + subtitle: 'A component that is used to show BarChart', + }, + }, + + // Component-level argTypes + argTypes: { + data: { + description: 'Array of datum to show', + }, + xAccessor: { + description: 'A function to convert datum to value of X', + }, + yAccessor: { + description: 'A function to convert datum to value of Y', + }, + renderTooltipContent: { + description: 'A function to replace default rendering of toolbar', + }, + margin: { + description: 'Add margins to chart', + }, + leftAxisTickFormat: { + description: 'A function to format labels of left axis', + }, + leftAxisTickLabelProps: { + description: 'Props for label of left axis', + }, + bottomAxisTickFormat: { + description: 'A function to format labels of bottom axis', + }, + bottomAxisTickLabelProps: { + description: 'Props for label of bottom axis', + }, + barColor: { + description: 'Color of bar', + control: { + type: 'color', + }, + }, + barSelectedColor: { + description: 'Color of selected bar', + control: { + type: 'color', + }, + }, + gridColor: { + description: "Color of grid's lines", + control: { + type: 'color', + }, + }, + renderGradients: { + description: 'A function to render different gradients that can be used as colors', + }, + }, + + // Define defaults + args: { + ...getMockedProps(), + renderTooltipContent: (datum) => <>DATUM: {JSON.stringify(datum)}, + }, +} satisfies Meta; + +export default meta; + +type Story = StoryObj; + +export const sandbox: Story = { + tags: ['dev'], + render: (props) => ( +
+ +
+ ), +}; diff --git a/datahub-web-react/src/alchemy-components/components/BarChart/BarChart.tsx b/datahub-web-react/src/alchemy-components/components/BarChart/BarChart.tsx new file mode 100644 index 00000000000000..eb5465a1d1217b --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/BarChart/BarChart.tsx @@ -0,0 +1,152 @@ +import React, { useState } from 'react'; +import { colors } from '@src/alchemy-components/theme'; +import { TickLabelProps } from '@visx/axis'; +import { LinearGradient } from '@visx/gradient'; +import { ParentSize } from '@visx/responsive'; +import { Axis, AxisScale, BarSeries, Grid, Tooltip, XYChart } from '@visx/xychart'; +import dayjs from 'dayjs'; +import { Popover } from '../Popover'; +import { ChartWrapper, StyledBarSeries } from './components'; +import { BarChartProps } from './types'; +import { abbreviateNumber } from '../dataviz/utils'; + +const commonTickLabelProps: TickLabelProps = { + fontSize: 10, + fontFamily: 'Mulish', + fill: colors.gray[1700], +}; + +export const barChartDefault: BarChartProps = { + data: [], + xAccessor: (datum) => datum?.x, + yAccessor: (datum) => datum?.y, + leftAxisTickFormat: abbreviateNumber, + leftAxisTickLabelProps: { + ...commonTickLabelProps, + textAnchor: 'end', + }, + bottomAxisTickFormat: (value) => dayjs(value).format('DD MMM'), + bottomAxisTickLabelProps: { + ...commonTickLabelProps, + textAnchor: 'middle', + verticalAnchor: 'start', + width: 20, + }, + barColor: 'url(#bar-gradient)', + barSelectedColor: colors.violet[500], + gridColor: '#e0e0e0', + renderGradients: () => , +}; + +export function BarChart({ + data, + xAccessor = barChartDefault.xAccessor, + yAccessor = barChartDefault.yAccessor, + renderTooltipContent, + margin, + leftAxisTickFormat = barChartDefault.leftAxisTickFormat, + leftAxisTickLabelProps = barChartDefault.leftAxisTickLabelProps, + bottomAxisTickFormat = barChartDefault.bottomAxisTickFormat, + bottomAxisTickLabelProps = barChartDefault.bottomAxisTickLabelProps, + barColor = barChartDefault.barColor, + barSelectedColor = barChartDefault.barSelectedColor, + gridColor = barChartDefault.gridColor, + renderGradients = barChartDefault.renderGradients, +}: BarChartProps) { + const [hasSelectedBar, setHasSelectedBar] = useState(false); + + // FYI: additional margins to show left and bottom axises + const internalMargin = { + top: (margin?.top ?? 0) + 30, + right: margin?.right ?? 0, + bottom: (margin?.bottom ?? 0) + 35, + left: (margin?.left ?? 0) + 40, + }; + + const accessors = { xAccessor, yAccessor }; + + return ( + + + {({ width, height }) => { + return ( + + {renderGradients?.()} + + + + + + + + + + } + $hasSelectedItem={hasSelectedBar} + $color={barColor} + $selectedColor={barSelectedColor} + dataKey="bar-seria-0" + data={data} + radius={4} + radiusTop + onBlur={() => setHasSelectedBar(false)} + onFocus={() => setHasSelectedBar(true)} + // Internally the library doesn't emmit these events if handlers are empty + // They are requred to show/hide/move tooltip + onPointerMove={() => null} + onPointerUp={() => null} + onPointerOut={() => null} + {...accessors} + /> + + + snapTooltipToDatumX + snapTooltipToDatumY + unstyled + applyPositionStyle + renderTooltip={({ tooltipData }) => { + return ( + tooltipData?.nearestDatum && ( + + ) + ); + }} + /> + + ); + }} + + + ); +} diff --git a/datahub-web-react/src/alchemy-components/components/BarChart/components.tsx b/datahub-web-react/src/alchemy-components/components/BarChart/components.tsx new file mode 100644 index 00000000000000..aa8f1320ef21dd --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/BarChart/components.tsx @@ -0,0 +1,34 @@ +import { colors } from '@src/alchemy-components/theme'; +import { BarSeries } from '@visx/xychart'; +import styled from 'styled-components'; + +export const ChartWrapper = styled.div` + width: 100%; + height: 100%; + position: relative; +`; + +export const StyledBarSeries = styled(BarSeries)<{ + $hasSelectedItem?: boolean; + $color?: string; + $selectedColor?: string; +}>` + & { + cursor: pointer; + + fill: ${(props) => (props.$hasSelectedItem ? props.$selectedColor : props.$color) || colors.violet[500]}; + ${(props) => props.$hasSelectedItem && 'opacity: 0.3;'} + + :hover { + fill: ${(props) => props.$selectedColor || colors.violet[500]}; + filter: drop-shadow(0px -2px 5px rgba(33, 23, 95, 0.3)); + opacity: 1; + } + + :focus { + fill: ${(props) => props.$selectedColor || colors.violet[500]}; + outline: none; + opacity: 1; + } + } +`; diff --git a/datahub-web-react/src/alchemy-components/components/BarChart/index.ts b/datahub-web-react/src/alchemy-components/components/BarChart/index.ts new file mode 100644 index 00000000000000..fdfc3f3ab44a89 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/BarChart/index.ts @@ -0,0 +1 @@ +export { BarChart } from './BarChart'; diff --git a/datahub-web-react/src/alchemy-components/components/BarChart/types.ts b/datahub-web-react/src/alchemy-components/components/BarChart/types.ts new file mode 100644 index 00000000000000..5fd7e2e63e2411 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/BarChart/types.ts @@ -0,0 +1,18 @@ +import { TickFormatter, TickLabelProps } from '@visx/axis'; +import { Margin } from '@visx/xychart'; + +export type BarChartProps = { + data: DatumType[]; + xAccessor: (datum: DatumType) => string | number; + yAccessor: (datum: DatumType) => number; + renderTooltipContent?: (datum: DatumType) => React.ReactNode; + margin?: Margin; + leftAxisTickFormat?: TickFormatter; + leftAxisTickLabelProps?: TickLabelProps; + bottomAxisTickFormat?: TickFormatter; + bottomAxisTickLabelProps?: TickLabelProps; + barColor?: string; + barSelectedColor?: string; + gridColor?: string; + renderGradients?: () => React.ReactNode; +}; diff --git a/datahub-web-react/src/alchemy-components/components/BarChart/utils.ts b/datahub-web-react/src/alchemy-components/components/BarChart/utils.ts new file mode 100644 index 00000000000000..0b592da7f59b08 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/BarChart/utils.ts @@ -0,0 +1,26 @@ +import dayjs from 'dayjs'; + +export function generateMockData(length = 30, maxValue = 50_000, minValue = 0) { + return Array(length) + .fill(0) + .map((_, index) => { + const date = dayjs() + .startOf('day') + .add(index - length, 'days') + .toDate(); + const value = Math.max(Math.random() * maxValue, minValue); + + return { + x: date, + y: value, + }; + }); +} + +export function getMockedProps() { + return { + data: generateMockData(), + xAccessor: (datum) => datum.x, + yAccessor: (datum) => Math.max(datum.y, 1000), + }; +} diff --git a/datahub-web-react/src/alchemy-components/components/Button/Button.stories.tsx b/datahub-web-react/src/alchemy-components/components/Button/Button.stories.tsx new file mode 100644 index 00000000000000..e2d7c2852da519 --- /dev/null +++ b/datahub-web-react/src/alchemy-components/components/Button/Button.stories.tsx @@ -0,0 +1,203 @@ +import React from 'react'; + +import type { Meta, StoryObj } from '@storybook/react'; +import { BADGE } from '@geometricpanda/storybook-addon-badges'; + +import { GridList } from '@components/.docs/mdx-components'; +import { AVAILABLE_ICONS } from '@components'; + +import { Button, buttonDefaults } from '.'; + +// Auto Docs +const meta = { + title: 'Forms / Button', + component: Button, + + // Display Properties + parameters: { + layout: 'centered', + badges: [BADGE.STABLE, 'readyForDesignReview'], + docs: { + subtitle: + 'Buttons are used to trigger an action or event, such as submitting a form, opening a dialog, canceling an action, or performing a delete operation.', + }, + }, + + // Component-level argTypes + argTypes: { + children: { + description: 'The content of the Button.', + control: { + type: 'text', + }, + }, + variant: { + description: 'The variant of the Button.', + options: ['filled', 'outline', 'text'], + table: { + defaultValue: { summary: buttonDefaults.variant }, + }, + control: { + type: 'radio', + }, + }, + color: { + description: 'The color of the Button.', + options: ['violet', 'green', 'red', 'blue', 'gray'], + table: { + defaultValue: { summary: buttonDefaults.color }, + }, + control: { + type: 'select', + }, + }, + size: { + description: 'The size of the Button.', + options: ['sm', 'md', 'lg', 'xl'], + table: { + defaultValue: { summary: buttonDefaults.size }, + }, + control: { + type: 'select', + }, + }, + icon: { + description: 'The icon to display in the Button.', + type: 'string', + options: AVAILABLE_ICONS, + table: { + defaultValue: { summary: 'undefined' }, + }, + control: { + type: 'select', + }, + }, + iconPosition: { + description: 'The position of the icon in the Button.', + options: ['left', 'right'], + table: { + defaultValue: { summary: buttonDefaults.iconPosition }, + }, + control: { + type: 'radio', + }, + }, + isCircle: { + description: + 'Whether the Button should be a circle. If this is selected, the Button will ignore children content, so add an Icon to the Button.', + table: { + defaultValue: { summary: buttonDefaults?.isCircle?.toString() }, + }, + control: { + type: 'boolean', + }, + }, + isLoading: { + description: 'Whether the Button is in a loading state.', + table: { + defaultValue: { summary: buttonDefaults?.isLoading?.toString() }, + }, + control: { + type: 'boolean', + }, + }, + isDisabled: { + description: 'Whether the Button is disabled.', + table: { + defaultValue: { summary: buttonDefaults?.isDisabled?.toString() }, + }, + control: { + type: 'boolean', + }, + }, + isActive: { + description: 'Whether the Button is active.', + table: { + defaultValue: { summary: buttonDefaults?.isActive?.toString() }, + }, + control: { + type: 'boolean', + }, + }, + onClick: { + description: 'Function to call when the button is clicked', + table: { + defaultValue: { summary: 'undefined' }, + }, + action: 'clicked', + }, + }, + + // Define defaults + args: { + children: 'Button Content', + variant: buttonDefaults.variant, + color: buttonDefaults.color, + size: buttonDefaults.size, + icon: undefined, + iconPosition: buttonDefaults.iconPosition, + isCircle: buttonDefaults.isCircle, + isLoading: buttonDefaults.isLoading, + isDisabled: buttonDefaults.isDisabled, + isActive: buttonDefaults.isActive, + onClick: () => console.log('Button clicked'), + }, +} satisfies Meta; + +export default meta; + +// Stories + +type Story = StoryObj; + +// Basic story is what is displayed 1st in storybook & is used as the code sandbox +// Pass props to this so that it can be customized via the UI props panel +export const sandbox: Story = { + tags: ['dev'], + render: (props) => , +}; + +export const states = () => ( + + + + + + +); + +export const colors = () => ( + + + + + + + +); + +export const sizes = () => ( + + + + + + +); + +export const withIcon = () => ( + + + + +); + +export const circleShape = () => ( + +