bland-orange-13353
04/20/2023, 10:44 AMquiet-rain-16785
04/20/2023, 10:44 AMquiet-rain-16785
04/20/2023, 10:45 AMpowerful-cat-68806
04/20/2023, 1:47 PMbland-orange-13353
04/21/2023, 4:20 AMlively-raincoat-33818
04/21/2023, 5:43 AMlittle-france-8891
04/21/2023, 8:54 AMㅇ gms partial log
2023-04-21 08:39:35,811 [main] INFO c.l.r.s.c.ResourceMethodConfigProviderImpl:88 - RestLi MethodLevel Configuration for property alwaysProjectedFields sorted by priority - first match gets applied:
2023-04-21 08:39:35,813 [main] INFO c.l.r.s.c.ResourceMethodConfigProviderImpl:88 - RestLi MethodLevel Configuration for property timeoutMs sorted by priority - first match gets applied:
*.* = 0
2023-04-21 08:39:35,814 [main] INFO c.l.r.s.c.ResourceMethodConfigProviderImpl:88 - RestLi MethodLevel Configuration for property alwaysProjectedFields sorted by priority - first match gets applied:
2023-04-21 08:39:35,907 [main] INFO c.l.g.f.k.KafkaEventConsumerFactory:104 - Event-based KafkaListenerContainerFactory built successfully. Consumers = 1
2023-04-21 08:39:35,979 [main] INFO c.l.g.f.k.SimpleKafkaConsumerFactory:48 - Simple KafkaListenerContainerFactory built successfully
2023-04-21 08:39:36,079 [main] INFO c.d.event.PlatformEventProcessor:37 - Creating Platform Event Processor
2023-04-21 08:39:37,572 [main] INFO c.l.g.f.k.s.AwsGlueSchemaRegistryFactory:40 - Creating AWS Glue registry
2023-04-21 08:39:38,437 [main] WARN c.l.metadata.entity.EntityService:807 - Unable to produce legacy MAE, entity may not have legacy Snapshot schema.
java.lang.UnsupportedOperationException: Failed to find Typeref schema associated with Config-based Entity
at com.linkedin.metadata.models.ConfigEntitySpec.getAspectTyperefSchema(ConfigEntitySpec.java:80)
at com.linkedin.metadata.entity.EntityService.toAspectUnion(EntityService.java:1511)
at com.linkedin.metadata.entity.EntityService.buildSnapshot(EntityService.java:1460)
at com.linkedin.metadata.entity.EntityService.produceMetadataAuditEvent(EntityService.java:1270)
at com.linkedin.metadata.entity.EntityService.sendEventForUpdateAspectResult(EntityService.java:803)
at com.linkedin.metadata.entity.EntityService.ingestAspectIfNotPresent(EntityService.java:767)
at com.linkedin.gms.factory.telemetry.TelemetryUtils.createClientIdIfNotPresent(TelemetryUtils.java:39)
at com.linkedin.gms.factory.telemetry.TelemetryUtils.getClientId(TelemetryUtils.java:25)
at com.linkedin.gms.factory.telemetry.DailyReport.<init>(DailyReport.java:40)
at com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory.dailyReport(ScheduledAnalyticsFactory.java:26)
at com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory$$EnhancerBySpringCGLIB$$ef3ca200.CGLIB$dailyReport$0(<generated>)
at com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory$$EnhancerBySpringCGLIB$$ef3ca200$$FastClassBySpringCGLIB$$b9d1fa7.invoke(<generated>)
at org.springframework.cglib.proxy.MethodProxy.invokeSuper(MethodProxy.java:244)
at org.springframework.context.annotation.ConfigurationClassEnhancer$BeanMethodInterceptor.intercept(ConfigurationClassEnhancer.java:331)
at com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory$$EnhancerBySpringCGLIB$$ef3ca200.dailyReport(<generated>)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:154)
at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:653)
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:638)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:955)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:920)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:583)
at org.springframework.web.context.ContextLoader.configureAndRefreshWebApplicationContext(ContextLoader.java:401)
at org.springframework.web.context.ContextLoader.initWebApplicationContext(ContextLoader.java:292)
at org.springframework.web.context.ContextLoaderListener.contextInitialized(ContextLoaderListener.java:103)
at org.eclipse.jetty.server.handler.ContextHandler.callContextInitialized(ContextHandler.java:1073)
at org.eclipse.jetty.servlet.ServletContextHandler.callContextInitialized(ServletContextHandler.java:572)
at org.eclipse.jetty.server.handler.ContextHandler.contextInitialized(ContextHandler.java:1002)
at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:746)
at org.eclipse.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:379)
at org.eclipse.jetty.webapp.WebAppContext.startWebapp(WebAppContext.java:1449)
at org.eclipse.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1414)
at org.eclipse.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:916)
at org.eclipse.jetty.servlet.ServletContextHandler.doStart(ServletContextHandler.java:288)
at org.eclipse.jetty.webapp.WebAppContext.doStart(WebAppContext.java:524)
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
at org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
at org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:117)
at org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
at org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
at org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:117)
at org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
at org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
at org.eclipse.jetty.server.Server.start(Server.java:423)
at org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:110)
at org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
at org.eclipse.jetty.server.Server.doStart(Server.java:387)
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
at org.eclipse.jetty.runner.Runner.run(Runner.java:519)
at org.eclipse.jetty.runner.Runner.main(Runner.java:564)
ㅇ cleanup partial log
2023-04-21 08:31:58.127 WARN 1 --- [ main] c.l.r.t.http.client.HttpClientFactory : No callback executor is provided to HttpClientFactory, using it's own call back executor.
2023-04-21 08:31:58.127 WARN 1 --- [ main] c.l.r.t.http.client.HttpClientFactory : No Compression executor is provided to HttpClientFactory, using it's own compression executor.
2023-04-21 08:31:58.127 INFO 1 --- [ main] c.l.r.t.http.client.HttpClientFactory : The service 'null' has been assigned to the ChannelPoolManager with key 'noSpecifiedNamePrefix 1138266797 ', http.protocolVersion=HTTP_1_1, usePipelineV2=false, requestTimeout=10000ms, streamingTimeout=-1ms
2023-04-21 08:31:58.937 WARN 1 --- [ main] c.d.p.configuration.ConfigProvider : Configuration config.yml file not found at location /etc/datahub/plugins/auth
2023-04-21 08:31:58.938 INFO 1 --- [ main] c.l.g.f.auth.AuthorizerChainFactory : Default DataHubAuthorizer is enabled. Appending it to the authorization chain.
2023-04-21 08:32:04.528 INFO 1 --- [ main] c.l.g.f.kafka.KafkaEventConsumerFactory : Event-based KafkaListenerContainerFactory built successfully. Consumers = 1
2023-04-21 08:32:04.624 INFO 1 --- [ main] c.l.g.f.k.s.AwsGlueSchemaRegistryFactory : Creating AWS Glue registry
2023-04-21 08:32:04.631 INFO 1 --- [ main] c.l.g.f.k.SimpleKafkaConsumerFactory : Simple KafkaListenerContainerFactory built successfully
Starting upgrade with id NoCodeDataMigrationCleanup...
Executing Step 1/4: UpgradeQualificationStep...
Found qualified upgrade candidate. Proceeding with upgrade...
Completed Step 1/4: UpgradeQualificationStep successfully.
Executing Step 2/4: DeleteLegacyAspectRowsStep...
Completed Step 2/4: DeleteLegacyAspectRowsStep successfully.
Executing Step 3/4: DeleteLegacyGraphRelationshipStep...
Failed to delete legacy data from graph: java.lang.ClassCastException: class com.linkedin.metadata.graph.elastic.ElasticSearchGraphService cannot be cast to class com.linkedin.metadata.graph.neo4j.Neo4jGraphService (com.linkedin.metadata.graph.elastic.ElasticSearchGraphService and com.linkedin.metadata.graph.neo4j.Neo4jGraphService are in unnamed module of loader org.springframework.boot.loader.LaunchedURLClassLoader @61a485d2)
Failed to delete legacy data from graph: java.lang.ClassCastException: class com.linkedin.metadata.graph.elastic.ElasticSearchGraphService cannot be cast to class com.linkedin.metadata.graph.neo4j.Neo4jGraphService (com.linkedin.metadata.graph.elastic.ElasticSearchGraphService and com.linkedin.metadata.graph.neo4j.Neo4jGraphService are in unnamed module of loader org.springframework.boot.loader.LaunchedURLClassLoader @61a485d2)
Failed Step 3/4: DeleteLegacyGraphRelationshipStep. Failed after 1 retries.
Exiting upgrade NoCodeDataMigrationCleanup with failure.
Upgrade NoCodeDataMigrationCleanup completed with result FAILED. Exiting...
2023-04-21 08:32:08.725 INFO 1 --- [ionShutdownHook] c.l.r.t.h.c.common.AbstractNettyClient : Shutdown requested
2023-04-21 08:32:08.725 INFO 1 --- [ionShutdownHook] c.l.r.t.h.c.common.AbstractNettyClient : Shutting down
better-actor-45043
04/21/2023, 9:43 AM/entities/v1/latest
endpoint? Because that only allows to search on URN, and I have some issues when I call that (see 🧵 )elegant-salesmen-99143
04/21/2023, 5:59 PM{
searchAcrossEntities(
input:{query: "browsePaths: *data_mart*"}
) {
start
count
total
searchResults {
entity {
type
... on Dataset {
urn
properties {name}
}
}
}
}
}
fancy-ambulance-73900
04/22/2023, 1:45 PM--constraint <https://raw.githubusercontent.com/apache/airflow/constraints-2.4.3/constraints-3.10.txt>
acryl-datahub[glue, sagemaker, datahub-rest, datahub-kafka, datahub-business-glossary]==0.9.3
acryl-datahub-airflow-plugin==0.9.3
jq==1.4.1
I am unsure whether these plugins are supposed to be installed via the DAGs themselves etc, or if this method isn't supported. Any help would be appreciated.rich-dusk-60426
04/23/2023, 11:50 AMenough-football-92033
04/24/2023, 10:59 AMdatahub docker quickstart
on v0.10.2 using this guide https://datahubproject.io/docs/quickstart/
Any ideas? or when it will be fixed?enough-football-92033
04/24/2023, 11:58 AMDOCKER_BUILDKIT=1 docker build --file ./docker/datahub-ingestion/Dockerfile --platform=linux/amd64 --rm --tag datahub-ingestion:latest .
but the build is stuck in the same step each time:
(venv) pavelborshchenko@MacBook-Pro-8 scribd-datahub (v0.10.2) $ DOCKER_BUILDKIT=1 docker build --file ./docker/datahub-ingestion/Dockerfile --platform=linux/amd64 --rm --tag datahub-ingestion:latest .
[+] Building 591.7s (9/16)
...
=> [prod-build 4/4] RUN (for attempt in 1 2 3 4 5; do ./gradlew --version && break ; echo "Failed to download gradle wrapper (attempt $attempt)" && sleep $((2<<$attempt)) ; done ) 563.6s
=> => # at java.base/sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1520)
=> => # at java.base/sun.net.www.protocol.https.HttpsURLConnectionImpl.getInputStream(HttpsURLConnectionImpl.java:250)
=> => # at org.gradle.wrapper.Download.downloadInternal(Download.java:87)
=> => # ... 7 more
=> => # Failed to download gradle wrapper (attempt 5)
=> => # Downloading <https://services.gradle.org/distributions/gradle-6.9.2-bin.zip>
I run v0.10.2 version on local mac M1 environment.
How this can be fixed?proud-lamp-13920
04/24/2023, 1:22 PMis_keyword
in the sqlparse package.
In Datahub version 0.8.43, sqlparse 0.4.2 is specified in requirements, and the is_keyword
function exists in that version.
However, the log shows that sqlparse 0.4.4 is being used, and the error is occurring because there is no function called is_keryword in that version.
Please guide me on how to solve it.creamy-machine-95935
04/24/2023, 9:33 PMhallowed-airline-89779
04/25/2023, 7:51 AMsome-car-2349
04/25/2023, 10:11 AMpython3 -m datahub docker quickstart --arch m1 --quickstart-compose-file docker-compose-without-neo4j.quickstart.yaml
and my versions are
DataHub CLI version: 0.10.2.1
Python version: 3.9.6
Appreciate if someone can help outcolossal-waitress-83487
04/25/2023, 10:48 AMTask entity registrycompileJava FAILED
Task datahub web reactnodeSetup FAILEDFAILURE: Build completed with 2 failures. 1: Task failed with an exception. ----------- * What went wrong: Execution failed for task 'entity registrycompileJava'.
error: release version 11 not supported* Try: Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights. = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = 2: Task failed with an exception. ----------- * What went wrong: Execution failed for task 'datahub web reactnodeSetup'.
java.nio.file.FileSystemException: / MNT/HGFS/TMP/datahub/datahub - master/datahub - web - the react /. Gradle nodejs/node - v16.8.0 - Linux - x64 / bin/NPM: Operation not supportedMy environment is centos7 java --version results in openjdk 11.0.18 2023-01-17LTS The result of gradle -v is Gradle 5.0 The result of python3 -V is Python 3.10.10 The result of python -V is Python 2.7.5 Is there anyone who can help
bland-orange-13353
04/25/2023, 1:33 PMwonderful-jordan-36532
04/25/2023, 1:40 PMOne or more of the attributes used in your filter contain sensitive data so your results have been automatically filtered to contain only the results you have permissions to see
Seems like there is a Obfuscate mode enabled during ingestion?handsome-football-66174
04/25/2023, 2:41 PMadamant-car-44878
04/25/2023, 6:55 PMmutation {
updateDescription(
input: {
description:"testing description for mssql"
resourceUrn: "urn:li:dataset:(urn:li:dataset:(urn:li:dataPlatform:glue,sam.testpipelinedata,PROD)",
subResourceType:DATASET_FIELD,
subResource:"serialnumber"
})
}
but the post request is throwing an error of
{
"errors": [
{
"message": "An unknown error occurred.",
"locations": [
{
"line": 2,
"column": 5
}
],
"path": [
"updateDescription"
],
"extensions": {
"code": 500,
"type": "SERVER_ERROR",
"classification": "DataFetchingException"
}
}
],
"data": {
"updateDescription": null
},
"extensions": {}
}
same error when i m adding tags in column level. But when i add tags to the table then it is working fine.
flat-painter-78331
04/26/2023, 6:03 AMdatahub-system-update-job
saying Error: secret "datahub-auth-secrets" not found
. I've had deployed datahub previously and was working fine as well. I deleted the deployment and am trying to re-deploy now,
Can someone tell me if there's anything I need to do or something im not looking at please?
Thanks in advance!wonderful-jordan-36532
04/26/2023, 10:15 AMflat-painter-78331
04/26/2023, 10:50 AMairflow connections add --conn-type 'datahub_rest' 'datahub_rest_default' --conn-host '<https://scx-datahub.cxos.tech>' --conn-password '<token>'
. Now 'm getting this error when trying to log in to datahub : "_*Failed to log in! An unexpected error occurred"*_
And the following list of errors are shown in the logs
AuthenticatorChain:80 - Authentication chain failed to resolve a valid authentication. Errors: [(com.datahub.authentication.authenticator.DataHubSystemAuthenticator,Failed to authenticate inbound request: Authorization header is missing Authorization header.), (com.datahub.authentication.authenticator.DataHubTokenAuthenticator,Failed to authenticate inbound request: Request is missing 'Authorization' header.)]
AuthenticatorChain:80 - Authentication chain failed to resolve a valid authentication. Errors: [(com.datahub.authentication.authenticator.DataHubSystemAuthenticator,Failed to authenticate inbound request: Provided credentials do not match known system client id & client secret. Check your configuration values...), (com.datahub.authentication.authenticator.DataHubTokenAuthenticator,Failed to authenticate inbound request: Authorization header missing 'Bearer' prefix.)]
The above log is from the datahub-gms pod
! @81cce9ebh - Internal server error, for (POST) [/logIn] ->
play.api.UnexpectedException: Unexpected exception[RuntimeException: Failed to generate session token for user]
at play.api.http.HttpErrorHandlerExceptions$.throwableToUsefulException(HttpErrorHandler.scala:358)
at play.api.http.DefaultHttpErrorHandler.onServerError(HttpErrorHandler.scala:264)
at play.core.server.AkkaHttpServer$$anonfun$2.applyOrElse(AkkaHttpServer.scala:436)
at play.core.server.AkkaHttpServer$$anonfun$2.applyOrElse(AkkaHttpServer.scala:428)
at scala.concurrent.Future.$anonfun$recoverWith$1(Future.scala:417)
at scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63)
at akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)
at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48)
at java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290)
at java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020)
at java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656)
at java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594)
at java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183)
Caused by: java.lang.RuntimeException: Failed to generate session token for user
at client.AuthServiceClient.generateSessionTokenForUser(AuthServiceClient.java:101)
at controllers.AuthenticationController.logIn(AuthenticationController.java:182)
at router.Routes$$anonfun$routes$1.$anonfun$applyOrElse$17(Routes.scala:581)
at play.core.routing.HandlerInvokerFactory$$anon$8.resultCall(HandlerInvoker.scala:150)
at play.core.routing.HandlerInvokerFactory$$anon$8.resultCall(HandlerInvoker.scala:149)
at play.core.routing.HandlerInvokerFactory$JavaActionInvokerFactory$$anon$3$$anon$4$$anon$5.invocation(HandlerInvoker.scala:115)
at play.core.j.JavaAction$$anon$1.call(JavaAction.scala:119)
at play.http.DefaultActionCreator$1.call(DefaultActionCreator.java:33)
at play.core.j.JavaAction.$anonfun$apply$8(JavaAction.scala:175)
at scala.concurrent.Future$.$anonfun$apply$1(Future.scala:659)
at scala.util.Success.$anonfun$map$1(Try.scala:255)
at scala.util.Success.map(Try.scala:213)
at scala.concurrent.Future.$anonfun$map$1(Future.scala:292)
at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33)
at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
at play.core.j.HttpExecutionContext.$anonfun$execute$1(HttpExecutionContext.scala:64)
at play.api.libs.streams.Execution$trampoline$.execute(Execution.scala:70)
at play.core.j.HttpExecutionContext.execute(HttpExecutionContext.scala:59)
at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)
at scala.concurrent.impl.Promise$KeptPromise$Kept.onComplete(Promise.scala:372)
at scala.concurrent.impl.Promise$KeptPromise$Kept.onComplete$(Promise.scala:371)
at scala.concurrent.impl.Promise$KeptPromise$Successful.onComplete(Promise.scala:379)
at scala.concurrent.impl.Promise.transform(Promise.scala:33)
at scala.concurrent.impl.Promise.transform$(Promise.scala:31)
at scala.concurrent.impl.Promise$KeptPromise$Successful.transform(Promise.scala:379)
at scala.concurrent.Future.map(Future.scala:292)
at scala.concurrent.Future.map$(Future.scala:292)
at scala.concurrent.impl.Promise$KeptPromise$Successful.map(Promise.scala:379)
at scala.concurrent.Future$.apply(Future.scala:659)
at play.core.j.JavaAction.apply(JavaAction.scala:176)
at play.api.mvc.Action.$anonfun$apply$4(Action.scala:82)
at scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)
... 14 common frames omitted
Caused by: java.lang.RuntimeException: Bad response from the Metadata Service: HTTP/1.1 401 Unauthorized ResponseEntityProxy{[Content-Type: text/html;charset=iso-8859-1,Content-Length: 567,Chunked: false]}
at client.AuthServiceClient.generateSessionTokenForUser(AuthServiceClient.java:97)
... 46 common frames omitted
The above logs are from the datahub-frontend pod
I'm running on datahub version 0.10.0
Requesting your kind support on this...wonderful-air-18168
04/26/2023, 11:19 AMsquare-ocean-28447
04/26/2023, 12:27 PMdelete_cli.delete_with_filters(dry_run=False,
force=True,
include_removed=True,
soft=False,
entity_type=self.entity_type,
platform=self.platform,
env=self.env,
search_query="*")
however, I encountered a server error The server encountered a temporary error and could not complete your request.<p>Please try again in 30 seconds.
I'm guessing the query filter returned a lot of entities, but even if I tried to narrow down the search filter it still encounters the same issue.
moreover, I also tried to delete the entities using the delete cli through bash and encountered these messages
Discrepancy in entities yielded 0 and num entities 2255. This means all entities may not have been deleted.
can you please advise on how we can address these issues? we're currently in the middle of upgrade and wanted to have a clean slate of entities. thank you in advancebrief-petabyte-56179
04/26/2023, 3:06 PMurn:li:glossaryTerm:6ac81097-494c-4e56-ac95-54543455545
to just users within the test-group
, While this does not appear to be possible in the UI, I was able to do the following through a graphql mutation. From this I would expect anyone not in the specified group (any "no role" users) would NOT be able to drill down and edit these entities. Unfortunately they can. I have tried this exact approach with other fields as well (tags, origin, etc) with no success. Any ideas?
Note: when I use Chrome's Developer Tools>inspect policy>Network tab, I can see the glossaryterm filter (from below mutation) in the policy. So it does appear to be somehow attached...just not working to filter data.
mutation updatePolicy
{
updatePolicy(
urn: "urn:li:dataHubPolicy:69efb392-7f49-4bfd-be9d-48845854"
input:{
type: METADATA
name: "My-Policy-Name"
state: ACTIVE
description: "restricts access to specific glossary term for users not in the data-team-testing group"
resources:{
# I also tried allResources: true and adding a second criteria field for ResourceType
allResources: false
filter:{
criteria:[
{
field:"glossaryTerm"
values: ["urn:li:glossaryTerm:6ac81097-494c-4e56-ac95-54543455545"]
condition: EQUALS
}
]
}
}
privileges: ["VIEW_ENTITY_PAGE","EDIT_ENTITY"]
actors:{
groups: "urn:li:corpGroup:test-group"
resourceOwners: false
allUsers: false
allGroups: false
}
}
)
}
adorable-river-99503
04/26/2023, 3:53 PMFile "/tmp/datahub/ingest/venv-bigquery-0.10.2/lib/python3.10/site-packages/datahub/ingestion/run/pipeline.py", line 121, in _add_init_error_context
raise PipelineInitError(f"Failed to {step}: {e}") from e
datahub.ingestion.run.pipeline.PipelineInitError: Failed to find a registered source for type bigquery: 'str' object is not callable
bland-orange-13353
04/27/2023, 5:51 AM