Skip to content

Commit

Permalink
Merge pull request #1 from linkedin/master
Browse files Browse the repository at this point in the history
Sync with upstream
  • Loading branch information
leifker authored Mar 2, 2022
2 parents d17f2bf + 2a5cf3d commit cc6b2c6
Show file tree
Hide file tree
Showing 447 changed files with 23,869 additions and 3,993 deletions.
5 changes: 5 additions & 0 deletions .github/workflows/docker-ingestion.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,10 @@ jobs:
# add git short SHA as Docker tag
tag-custom: ${{ needs.setup.outputs.tag }}
tag-custom-only: true
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
if: ${{ needs.setup.outputs.publish == 'true' }}
uses: docker/login-action@v1
Expand All @@ -81,6 +85,7 @@ jobs:
with:
context: .
file: ./docker/datahub-ingestion/Dockerfile
platforms: linux/amd64,linux/arm64
tags: ${{ steps.docker_meta.outputs.tags }}
push: ${{ needs.setup.outputs.publish == 'true' }}
build-args: |
Expand Down
5 changes: 5 additions & 0 deletions .github/workflows/documentation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@ jobs:
uses: actions/setup-java@v1
with:
java-version: 1.8
- uses: actions/setup-python@v2
with:
python-version: 3.9.9
- name: Install Python dependencies
run: ./metadata-ingestion/scripts/install_deps.sh
- name: Build Docs
run: |
./gradlew --info docs-website:build
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/metadata-ingestion.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ jobs:

metadata-ingestion-general:
runs-on: ubuntu-latest
env:
SPARK_VERSION: 3.0.3
strategy:
matrix:
python-version: ["3.6", "3.9.9"]
Expand Down Expand Up @@ -46,6 +48,8 @@ jobs:
metadata-ingestion-by-version:
runs-on: ubuntu-latest
env:
SPARK_VERSION: 3.0.3
strategy:
matrix:
python-version: ["3.6", "3.9.9"]
Expand Down
55 changes: 55 additions & 0 deletions .github/workflows/spark-smoke-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
name: spark smoke test
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
push:
branches:
- master
paths:
- "metadata_models/**"
- "metadata-integration/java/datahub-client/**"
- "metadata-integration/java/spark-lineage"
pull_request:
branches:
- master
paths:
- "metadata_models/**"
- "metadata-integration/java/datahub-client/**"
- "metadata-integration/java/spark-lineage"
release:
types: [published, edited]

jobs:
spark-smoke-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- uses: actions/setup-python@v2
with:
python-version: "3.6"
- name: Install dependencies
run: ./metadata-ingestion/scripts/install_deps.sh
- name: Gradle build
run: ./gradlew build -x check -x docs-website:build -x test
- name: Smoke test
run: ./gradlew metadata-integration:java:spark-lineage:integrationTest
- uses: actions/upload-artifact@v2
if: always()
with:
name: Test Results (smoke tests)
path: |
**/build/reports/tests/test/**
**/build/test-results/test/**
**/junit.*.xml
- name: Slack failure notification
if: failure() && github.event_name == 'push'
uses: kpritam/slack-job-status-action@v1
with:
job-status: ${{ job.status }}
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
channel: github-activities

7 changes: 6 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ MANIFEST
**/spark-lineage/**/out.csv/
.vscode

#spark smoke test
smoke-test/spark-smoke-test/docker/workspace/
smoke-test/spark-smoke-test/__pycache__/

# cypress integration test generated files
**/cypress/videos
**/cypress/screenshots
Expand All @@ -54,4 +58,5 @@ MANIFEST
# Metadata Ingestion Generated
metadata-ingestion/generated/**

.remote*
# docs
docs/generated/
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,10 @@ HOSTED_DOCS_ONLY-->
[Town Hall](https://datahubproject.io/docs/townhalls)

---
> 📣 Next DataHub town hall meeting on Dec 17th, 9am-10am PDT ([convert to your local time](https://greenwichmeantime.com/time/to/pacific-local/))
> 📣 Next DataHub town hall meeting on Feb 25th, 9am-10am PDT ([convert to your local time](https://greenwichmeantime.com/time/to/pacific-local/))
>
> - Topic Proposals: [submit here](https://docs.google.com/forms/d/1v2ynbAXjJlqY97xE_X1DAntNrXDznOFiNfryUkMPtkI/)
> - Signup to get a calendar invite: [here](https://www.addevent.com/event/WE11214120)
> - Signup to get a calendar invite: [here](https://www.addevent.com/event/lD12111844)
> - Town-hall Zoom link: [zoom.datahubproject.io](https://zoom.datahubproject.io)
> - [Meeting details](docs/townhalls.md) & [past recordings](docs/townhall-history.md)
Expand Down Expand Up @@ -107,6 +107,7 @@ Join our [slack workspace](https://slack.datahubproject.io) for discussions and

Here are the companies that have officially adopted DataHub. Please feel free to add yours to the list if we missed it.

- [Adevinta](https://www.adevinta.com/)
- [Banksalad](https://www.banksalad.com)
- [DefinedCrowd](http://www.definedcrowd.com)
- [DFDS](https://www.dfds.com/)
Expand Down
3 changes: 3 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -120,15 +120,18 @@ project.ext.externalDependency = [
'springBeans': 'org.springframework:spring-beans:5.2.3.RELEASE',
'springContext': 'org.springframework:spring-context:5.2.3.RELEASE',
'springCore': 'org.springframework:spring-core:5.2.3.RELEASE',
'springDocUI': 'org.springdoc:springdoc-openapi-ui:1.6.6',
'springJdbc': 'org.springframework:spring-jdbc:5.2.3.RELEASE',
'springWeb': 'org.springframework:spring-web:5.2.3.RELEASE',
'springWebMVC': 'org.springframework:spring-webmvc:5.2.3.RELEASE',
'springBoot': 'org.springframework.boot:spring-boot:2.1.14.RELEASE',
'springBootAutoconfigure': 'org.springframework.boot:spring-boot-autoconfigure:2.1.4.RELEASE',
'springBootStarterWeb': 'org.springframework.boot:spring-boot-starter-web:2.1.4.RELEASE',
'springBootStarterJetty': 'org.springframework.boot:spring-boot-starter-jetty:2.1.4.RELEASE',
'springBootStarterCache': 'org.springframework.boot:spring-boot-starter-cache:2.1.4.RELEASE',
'springKafka': 'org.springframework.kafka:spring-kafka:2.2.14.RELEASE',
'springActuator': 'org.springframework.boot:spring-boot-starter-actuator:2.1.4.RELEASE',
'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.1.12',
'testng': 'org.testng:testng:7.3.0',
'testContainers': 'org.testcontainers:testcontainers:1.15.3',
'testContainersJunit': 'org.testcontainers:junit-jupiter:1.15.3',
Expand Down
4 changes: 2 additions & 2 deletions datahub-frontend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ cd datahub-frontend/run && ./run-local-frontend
After starting your application in one of the two ways mentioned above, you can connect to it by typing below
into your favorite web browser:
```
http://localhost:9001
http://localhost:9002
```

To be able to sign in, you need to provide your user name. The default account is `datahub`, password `datahub`.
Expand Down Expand Up @@ -69,7 +69,7 @@ see the [OIDC in React](../docs/how/auth/sso/configure-oidc-react.md) document.
Most DataHub frontend API endpoints are protected using [Play Authentication](https://www.playframework.com/documentation/2.1.0/JavaGuide4), which means it requires authentication information stored in the cookie for the request to go through. This makes debugging using curl difficult. One option is to first make a curl call against the `/authenticate` endpoint and stores the authentication info in a cookie file like this

```
curl -c cookie.txt -d '{"username":"datahub", "password":"datahub"}' -H 'Content-Type: application/json' http://localhost:9001/authenticate
curl -c cookie.txt -d '{"username":"datahub", "password":"datahub"}' -H 'Content-Type: application/json' http://localhost:9002/authenticate
```

You can then make all subsequent calls using the same cookie file to pass the authentication check.
Expand Down
6 changes: 6 additions & 0 deletions datahub-frontend/conf/routes
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,12 @@ POST /api/*path co
DELETE /api/*path controllers.Application.proxy(path)
PUT /api/*path controllers.Application.proxy(path)

# Proxies API requests to the metadata service api
GET /openapi/*path controllers.Application.proxy(path)
POST /openapi/*path controllers.Application.proxy(path)
DELETE /openapi/*path controllers.Application.proxy(path)
PUT /openapi/*path controllers.Application.proxy(path)

# Map static resources from the /public folder to the /assets URL path
GET /assets/*file controllers.Assets.at(path="/public", file)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@
import com.linkedin.datahub.graphql.analytics.resolver.AnalyticsChartTypeResolver;
import com.linkedin.datahub.graphql.analytics.resolver.GetChartsResolver;
import com.linkedin.datahub.graphql.analytics.resolver.GetHighlightsResolver;
import com.linkedin.datahub.graphql.analytics.resolver.GetMetadataAnalyticsResolver;
import com.linkedin.datahub.graphql.analytics.resolver.IsAnalyticsEnabledResolver;
import com.linkedin.datahub.graphql.analytics.service.AnalyticsService;
import com.linkedin.datahub.graphql.generated.ActorFilter;
import com.linkedin.datahub.graphql.generated.AggregationMetadata;
import com.linkedin.datahub.graphql.generated.Aspect;
import com.linkedin.datahub.graphql.generated.BrowseResults;
Expand All @@ -26,8 +28,8 @@
import com.linkedin.datahub.graphql.generated.EntityRelationship;
import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy;
import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint;
import com.linkedin.datahub.graphql.generated.ListDomainsResult;
import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata;
import com.linkedin.datahub.graphql.generated.ListDomainsResult;
import com.linkedin.datahub.graphql.generated.MLFeature;
import com.linkedin.datahub.graphql.generated.MLFeatureProperties;
import com.linkedin.datahub.graphql.generated.MLFeatureTable;
Expand All @@ -45,10 +47,10 @@
import com.linkedin.datahub.graphql.resolvers.AuthenticatedResolver;
import com.linkedin.datahub.graphql.resolvers.MeResolver;
import com.linkedin.datahub.graphql.resolvers.auth.GetAccessTokenResolver;
import com.linkedin.datahub.graphql.resolvers.container.ContainerEntitiesResolver;
import com.linkedin.datahub.graphql.resolvers.browse.BrowsePathsResolver;
import com.linkedin.datahub.graphql.resolvers.browse.BrowseResolver;
import com.linkedin.datahub.graphql.resolvers.config.AppConfigResolver;
import com.linkedin.datahub.graphql.resolvers.container.ContainerEntitiesResolver;
import com.linkedin.datahub.graphql.resolvers.deprecation.UpdateDeprecationResolver;
import com.linkedin.datahub.graphql.resolvers.domain.CreateDomainResolver;
import com.linkedin.datahub.graphql.resolvers.domain.DomainEntitiesResolver;
Expand All @@ -61,18 +63,18 @@
import com.linkedin.datahub.graphql.resolvers.group.ListGroupsResolver;
import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupMembersResolver;
import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupResolver;
import com.linkedin.datahub.graphql.resolvers.group.UpdateUserStatusResolver;
import com.linkedin.datahub.graphql.resolvers.user.UpdateUserStatusResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.CancelIngestionExecutionRequestResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateIngestionExecutionRequestResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.GetIngestionExecutionRequestResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.IngestionSourceExecutionRequestsResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.secret.CreateSecretResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.DeleteIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.secret.DeleteSecretResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.GetIngestionExecutionRequestResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.GetIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.secret.GetSecretValuesResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.IngestionSourceExecutionRequestsResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.ListIngestionSourcesResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.secret.ListSecretsResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.DeleteIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.GetIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.ListIngestionSourcesResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.UpsertIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.load.AspectResolver;
import com.linkedin.datahub.graphql.resolvers.load.EntityRelationshipsResultResolver;
Expand Down Expand Up @@ -116,10 +118,10 @@
import com.linkedin.datahub.graphql.types.SearchableEntityType;
import com.linkedin.datahub.graphql.types.aspect.AspectType;
import com.linkedin.datahub.graphql.types.chart.ChartType;
import com.linkedin.datahub.graphql.types.container.ContainerType;
import com.linkedin.datahub.graphql.types.corpuser.CorpUserType;
import com.linkedin.datahub.graphql.types.common.mappers.OperationMapper;
import com.linkedin.datahub.graphql.types.container.ContainerType;
import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupType;
import com.linkedin.datahub.graphql.types.corpuser.CorpUserType;
import com.linkedin.datahub.graphql.types.dashboard.DashboardType;
import com.linkedin.datahub.graphql.types.dataflow.DataFlowType;
import com.linkedin.datahub.graphql.types.datajob.DataJobType;
Expand Down Expand Up @@ -164,8 +166,15 @@
import org.dataloader.DataLoader;
import org.dataloader.DataLoaderOptions;

import static com.linkedin.datahub.graphql.Constants.*;
import static graphql.Scalars.*;
import static com.linkedin.datahub.graphql.Constants.ANALYTICS_SCHEMA_FILE;
import static com.linkedin.datahub.graphql.Constants.APP_SCHEMA_FILE;
import static com.linkedin.datahub.graphql.Constants.AUTH_SCHEMA_FILE;
import static com.linkedin.datahub.graphql.Constants.GMS_SCHEMA_FILE;
import static com.linkedin.datahub.graphql.Constants.INGESTION_SCHEMA_FILE;
import static com.linkedin.datahub.graphql.Constants.RECOMMENDATIONS_SCHEMA_FILE;
import static com.linkedin.datahub.graphql.Constants.SEARCH_SCHEMA_FILE;
import static com.linkedin.datahub.graphql.Constants.URN_FIELD_NAME;
import static graphql.Scalars.GraphQLLong;

/**
* A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS graph.
Expand Down Expand Up @@ -447,6 +456,7 @@ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) {
configureContainerResolvers(builder);
configureGlossaryTermResolvers(builder);
configureDomainResolvers(builder);
configurePolicyResolvers(builder);
}

public GraphQLEngine.Builder builder() {
Expand All @@ -469,9 +479,10 @@ private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) {
builder.type("Query", typeWiring -> typeWiring.dataFetcher("isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled)))
.type("AnalyticsChart", typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver()));
if (isAnalyticsEnabled) {
builder.type("Query",
typeWiring -> typeWiring.dataFetcher("getAnalyticsCharts", new GetChartsResolver(analyticsService))
.dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)));
builder.type("Query", typeWiring -> typeWiring.dataFetcher("getAnalyticsCharts",
new GetChartsResolver(analyticsService, entityClient))
.dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService))
.dataFetcher("getMetadataAnalyticsCharts", new GetMetadataAnalyticsResolver(entityClient)));
}
}

Expand Down Expand Up @@ -595,7 +606,8 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("updateDashboard", new AuthenticatedResolver<>(new MutableTypeResolver<>(dashboardType)))
.dataFetcher("updateDataJob", new AuthenticatedResolver<>(new MutableTypeResolver<>(dataJobType)))
.dataFetcher("updateDataFlow", new AuthenticatedResolver<>(new MutableTypeResolver<>(dataFlowType)))
.dataFetcher("updateCorpUserProperties", new AuthenticatedResolver<>(new MutableTypeResolver<>(corpUserType)))
.dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType))
.dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType))
.dataFetcher("addTag", new AuthenticatedResolver<>(new AddTagResolver(entityService)))
.dataFetcher("removeTag", new AuthenticatedResolver<>(new RemoveTagResolver(entityService)))
.dataFetcher("addTerm", new AuthenticatedResolver<>(new AddTermResolver(entityService)))
Expand Down Expand Up @@ -1102,6 +1114,24 @@ private void configureDomainResolvers(final RuntimeWiring.Builder builder) {
);
}

private void configurePolicyResolvers(final RuntimeWiring.Builder builder) {
// Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type.
builder.type("ActorFilter", typeWiring -> typeWiring
.dataFetcher("resolvedUsers", new LoadableTypeBatchResolver<>(corpUserType,
(env) -> {
final ActorFilter filter = env.getSource();
return filter.getUsers();
}
))
.dataFetcher("resolvedGroups", new LoadableTypeBatchResolver<>(corpGroupType,
(env) -> {
final ActorFilter filter = env.getSource();
return filter.getGroups();
}
))
);
}

private <T> DataLoader<String, DataFetcherResult<T>> createDataLoader(final LoadableType<T> graphType, final QueryContext queryContext) {
BatchLoaderContextProvider contextProvider = () -> queryContext;
DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider);
Expand Down
Loading

0 comments on commit cc6b2c6

Please sign in to comment.