Skip to content

Commit

Permalink
Merge branch 'master' into tests-datahub-client-avroschemaconverter
Browse files Browse the repository at this point in the history
  • Loading branch information
sgomezvillamor authored Dec 16, 2024
2 parents fe60852 + ecf6c8c commit dacef70
Show file tree
Hide file tree
Showing 643 changed files with 38,709 additions and 3,068 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/airflow-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,10 @@ jobs:
!**/binary/**
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: airflow,airflow-${{ matrix.extra_pip_extras }}
name: pytest-airflow-${{ matrix.python-version }}-${{ matrix.extra_pip_requirements }}
Expand Down
10 changes: 10 additions & 0 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,16 @@ jobs:
!**/binary/**
- name: Ensure codegen is updated
uses: ./.github/actions/ensure-codegen-updated
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: ${{ matrix.timezone }}
name: ${{ matrix.command }}
verbose: true

quickstart-compose-validation:
runs-on: ubuntu-latest
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/dagster-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ jobs:
DATAHUB_TELEMETRY_ENABLED: false
strategy:
matrix:
python-version: ["3.8", "3.10"]
python-version: ["3.9", "3.10"]
include:
- python-version: "3.8"
- python-version: "3.9"
extraPythonRequirement: "dagster>=1.3.3"
- python-version: "3.10"
extraPythonRequirement: "dagster>=1.3.3"
Expand Down Expand Up @@ -66,10 +66,10 @@ jobs:
**/junit.*.xml
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: dagster-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }}
name: pytest-dagster
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/gx-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,10 @@ jobs:
**/junit.*.xml
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: gx-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }}
name: pytest-gx
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/metadata-ingestion.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,10 @@ jobs:
!**/binary/**
- name: Upload coverage to Codecov
if: ${{ always() && matrix.python-version == '3.10' }}
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: pytest-${{ matrix.command }}
name: pytest-${{ matrix.command }}
Expand Down
9 changes: 9 additions & 0 deletions .github/workflows/metadata-io.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,15 @@ jobs:
!**/binary/**
- name: Ensure codegen is updated
uses: ./.github/actions/ensure-codegen-updated
- name: Upload coverage to Codecov
if: ${{ always()}}
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./build/coverage-reports/
fail_ci_if_error: false
name: metadata-io-test
verbose: true

event-file:
runs-on: ubuntu-latest
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/prefect-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,10 @@ jobs:
!**/binary/**
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: prefect,prefect-${{ matrix.extra_pip_extras }}
name: pytest-prefect-${{ matrix.python-version }}
Expand Down
49 changes: 49 additions & 0 deletions .github/workflows/publish-datahub-jars.yml
Original file line number Diff line number Diff line change
Expand Up @@ -196,3 +196,52 @@ jobs:
echo signingKey=$SIGNING_KEY >> gradle.properties
./gradlew -PreleaseVersion=${{ needs.setup.outputs.tag }} :metadata-integration:java:custom-plugin-lib:publish
./gradlew :metadata-integration:java:custom-plugin-lib:closeAndReleaseRepository --info
publish-java8:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
needs: ["check-secret", "setup", "publish"]
if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }}
steps:
- uses: acryldata/sane-checkout-action@v3
- name: Set up JDK 17
uses: actions/setup-java@v4
with:
distribution: "zulu"
java-version: 17
- uses: gradle/actions/setup-gradle@v3
- uses: actions/setup-python@v5
with:
python-version: "3.10"
cache: "pip"
- name: checkout upstream repo
run: |
git remote add upstream https://github.com/datahub-project/datahub.git
git fetch upstream --tags --force --filter=tree:0
- name: publish datahub-client jar snapshot
if: ${{ github.event_name != 'release' }}
env:
RELEASE_USERNAME: ${{ secrets.RELEASE_USERNAME }}
RELEASE_PASSWORD: ${{ secrets.RELEASE_PASSWORD }}
SIGNING_PASSWORD: ${{ secrets.SIGNING_PASSWORD }}
SIGNING_KEY: ${{ secrets.SIGNING_KEY }}
NEXUS_USERNAME: ${{ secrets.NEXUS_USERNAME }}
NEXUS_PASSWORD: ${{ secrets.NEXUS_PASSWORD }}
run: |
echo signingKey=$SIGNING_KEY >> gradle.properties
./gradlew :metadata-integration:java:datahub-client:printVersion -PjavaClassVersionDefault=8 -ParchiveAppendix=java8
./gradlew :metadata-integration:java:datahub-client:publish -PjavaClassVersionDefault=8 -ParchiveAppendix=java8
- name: release datahub-client jar
if: ${{ github.event_name == 'release' }}
env:
RELEASE_USERNAME: ${{ secrets.RELEASE_USERNAME }}
RELEASE_PASSWORD: ${{ secrets.RELEASE_PASSWORD }}
SIGNING_PASSWORD: ${{ secrets.SIGNING_PASSWORD }}
SIGNING_KEY: ${{ secrets.SIGNING_KEY }}
NEXUS_USERNAME: ${{ secrets.NEXUS_USERNAME }}
NEXUS_PASSWORD: ${{ secrets.NEXUS_PASSWORD }}
run: |
echo signingKey=$SIGNING_KEY >> gradle.properties
./gradlew -PreleaseVersion=${{ needs.setup.outputs.tag }} :metadata-integration:java:datahub-client:publish -PjavaClassVersionDefault=8 -ParchiveAppendix=java8
./gradlew :metadata-integration:java:datahub-client:closeAndReleaseRepository --info -PjavaClassVersionDefault=8 -ParchiveAppendix=java8
10 changes: 8 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ buildscript {
// Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md
ext.pegasusVersion = '29.57.0'
ext.mavenVersion = '3.6.3'
ext.versionGradle = '8.11.1'
ext.springVersion = '6.1.13'
ext.springBootVersion = '3.2.9'
ext.springKafkaVersion = '3.1.6'
Expand All @@ -57,7 +58,7 @@ buildscript {
ext.hazelcastVersion = '5.3.6'
ext.ebeanVersion = '15.5.2'
ext.googleJavaFormatVersion = '1.18.1'
ext.openLineageVersion = '1.24.2'
ext.openLineageVersion = '1.25.0'
ext.logbackClassicJava8 = '1.2.12'

ext.docker_registry = 'acryldata'
Expand All @@ -78,7 +79,7 @@ buildscript {

plugins {
id 'com.gorylenko.gradle-git-properties' version '2.4.1'
id 'com.github.johnrengelman.shadow' version '8.1.1' apply false
id 'com.gradleup.shadow' version '8.3.5' apply false
id 'com.palantir.docker' version '0.35.0' apply false
id 'com.avast.gradle.docker-compose' version '0.17.6'
id "com.diffplug.spotless" version "6.23.3"
Expand Down Expand Up @@ -499,3 +500,8 @@ subprojects {
}
}
}

wrapper {
gradleVersion = project.versionGradle
distributionType = Wrapper.DistributionType.ALL
}
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
import com.linkedin.datahub.graphql.generated.EntityPath;
import com.linkedin.datahub.graphql.generated.EntityRelationship;
import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy;
import com.linkedin.datahub.graphql.generated.FacetMetadata;
import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint;
import com.linkedin.datahub.graphql.generated.FormActorAssignment;
import com.linkedin.datahub.graphql.generated.FreshnessContract;
Expand Down Expand Up @@ -1317,7 +1318,8 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService))
.dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService))
.dataFetcher(
"createDataProduct", new CreateDataProductResolver(this.dataProductService))
"createDataProduct",
new CreateDataProductResolver(this.dataProductService, this.entityService))
.dataFetcher(
"updateDataProduct", new UpdateDataProductResolver(this.dataProductService))
.dataFetcher(
Expand Down Expand Up @@ -1474,6 +1476,19 @@ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder
"entity",
new EntityTypeResolver(
entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity())))
.type(
"FacetMetadata",
typeWiring ->
typeWiring.dataFetcher(
"entity",
new EntityTypeResolver(
entityTypes,
(env) -> {
FacetMetadata facetMetadata = env.getSource();
return facetMetadata.getEntity() != null
? facetMetadata.getEntity()
: null;
})))
.type(
"LineageRelationship",
typeWiring ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,11 @@ public static boolean canManageStructuredProperties(@Nonnull QueryContext contex
context.getOperationContext(), PoliciesConfig.MANAGE_STRUCTURED_PROPERTIES_PRIVILEGE);
}

public static boolean canViewStructuredPropertiesPage(@Nonnull QueryContext context) {
return AuthUtil.isAuthorized(
context.getOperationContext(), PoliciesConfig.VIEW_STRUCTURED_PROPERTIES_PAGE_PRIVILEGE);
}

public static boolean canManageForms(@Nonnull QueryContext context) {
return AuthUtil.isAuthorized(
context.getOperationContext(), PoliciesConfig.MANAGE_DOCUMENTATION_FORMS_PRIVILEGE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,10 @@ public CompletableFuture<AuthenticatedUser> get(DataFetchingEnvironment environm
BusinessAttributeAuthorizationUtils.canCreateBusinessAttribute(context));
platformPrivileges.setManageBusinessAttributes(
BusinessAttributeAuthorizationUtils.canManageBusinessAttribute(context));
platformPrivileges.setManageStructuredProperties(
AuthorizationUtils.canManageStructuredProperties(context));
platformPrivileges.setViewStructuredPropertiesPage(
AuthorizationUtils.canViewStructuredPropertiesPage(context));
// Construct and return authenticated user object.
final AuthenticatedUser authUser = new AuthenticatedUser();
authUser.setCorpUser(corpUser);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,7 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen
.setDataContractsEnabled(_featureFlags.isDataContractsEnabled())
.setEditableDatasetNameEnabled(_featureFlags.isEditableDatasetNameEnabled())
.setShowSeparateSiblings(_featureFlags.isShowSeparateSiblings())
.setShowManageStructuredProperties(_featureFlags.isShowManageStructuredProperties())
.build();

appConfig.setFeatureFlags(featureFlagsConfig);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,11 @@
import com.linkedin.datahub.graphql.exception.AuthorizationException;
import com.linkedin.datahub.graphql.generated.CreateDataProductInput;
import com.linkedin.datahub.graphql.generated.DataProduct;
import com.linkedin.datahub.graphql.generated.OwnerEntityType;
import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils;
import com.linkedin.datahub.graphql.types.dataproduct.mappers.DataProductMapper;
import com.linkedin.entity.EntityResponse;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.service.DataProductService;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
Expand All @@ -24,6 +27,7 @@
public class CreateDataProductResolver implements DataFetcher<CompletableFuture<DataProduct>> {

private final DataProductService _dataProductService;
private final EntityService _entityService;

@Override
public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment)
Expand Down Expand Up @@ -56,6 +60,8 @@ public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environm
context.getOperationContext(),
dataProductUrn,
UrnUtils.getUrn(input.getDomainUrn()));
OwnerUtils.addCreatorAsOwner(
context, dataProductUrn.toString(), OwnerEntityType.CORP_USER, _entityService);
EntityResponse response =
_dataProductService.getDataProductEntityResponse(
context.getOperationContext(), dataProductUrn);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,17 @@ public CompletableFuture<AggregateResults> get(DataFetchingEnvironment environme

final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters());

final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags());
final SearchFlags searchFlags =
input.getSearchFlags() != null
? mapInputFlags(context, input.getSearchFlags())
: new SearchFlags();

final List<String> facets =
input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null;

// do not include default facets if we're requesting any facets specifically
searchFlags.setIncludeDefaultFacets(facets == null || facets.size() <= 0);

List<String> finalEntities =
maybeResolvedView != null
? SearchUtils.intersectEntityTypes(
Expand Down
Loading

0 comments on commit dacef70

Please sign in to comment.