Is anybody else having issues with the `scrollAcro...
# troubleshoot
q
Is anybody else having issues with the
scrollAcrossEntities
graphQL? Have tried this from the docs here: https://datahubproject.io/docs/how/search/#searching-at-scale
Copy code
{
  scrollAcrossEntities(input: { types: [DATASET], query: "*", count: 10}) {
    nextScrollId
    count
    searchResults {
      entity {
        type
        ... on Dataset {
          urn
          name
        }
      }
    }
  }
}
and I get returned the following error:
Copy code
{
  "errors": [
    {
      "message": "An unknown error occurred.",
      "locations": [
        {
          "line": 2,
          "column": 3
        }
      ],
      "path": [
        "scrollAcrossEntities"
      ],
      "extensions": {
        "code": 500,
        "type": "SERVER_ERROR",
        "classification": "DataFetchingException"
      }
    }
  ],
  "data": {
    "scrollAcrossEntities": null
  },
  "extensions": {}
}
Is this something wrong on our end? Or is this an issue other teams are having. We’re currently on datahub
0.10.2
.
plus1 1
Copy code
2023-05-16 14:01:03,881 [ForkJoinPool.commonPool-worker-43] ERROR c.l.d.g.r.s.ScrollAcrossEntitiesResolver:93 - Failed to execute search for multiple entities: entity types [DATASET], query *, filters: null, searchAfter: null, count: 10
2023-05-16 14:01:03,881 [ForkJoinPool.commonPool-worker-43] ERROR c.l.d.g.e.DataHubDataFetcherExceptionHandler:21 - Failed to execute DataFetcher
java.util.concurrent.CompletionException: java.lang.RuntimeException: Failed to execute search: entity types [DATASET], query *, filters: null, start: null, count: 10
	at java.base/java.util.concurrent.CompletableFuture.encodeThrowable(CompletableFuture.java:314)
	at java.base/java.util.concurrent.CompletableFuture.completeThrowable(CompletableFuture.java:319)
	at java.base/java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1702)
	at java.base/java.util.concurrent.CompletableFuture$AsyncSupply.exec(CompletableFuture.java:1692)
	at java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290)
	at java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020)
	at java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656)
	at java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594)
	at java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183)
Caused by: java.lang.RuntimeException: Failed to execute search: entity types [DATASET], query *, filters: null, start: null, count: 10
	at com.linkedin.datahub.graphql.resolvers.search.ScrollAcrossEntitiesResolver.lambda$get$0(ScrollAcrossEntitiesResolver.java:97)
	at java.base/java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1700)
	... 6 common frames omitted
Caused by: com.hazelcast.nio.serialization.HazelcastSerializationException: Failed to serialize 'com.linkedin.metadata.search.ScrollResult'
	at com.hazelcast.internal.serialization.impl.SerializationUtil.handleSerializeException(SerializationUtil.java:129)
	at com.hazelcast.internal.serialization.impl.AbstractSerializationService.toBytes(AbstractSerializationService.java:241)
	at com.hazelcast.internal.serialization.impl.AbstractSerializationService.toBytes(AbstractSerializationService.java:217)
	at com.hazelcast.internal.serialization.impl.AbstractSerializationService.toData(AbstractSerializationService.java:202)
	at com.hazelcast.internal.serialization.impl.AbstractSerializationService.toData(AbstractSerializationService.java:157)
	at com.hazelcast.spi.impl.NodeEngineImpl.toData(NodeEngineImpl.java:390)
	at com.hazelcast.spi.impl.AbstractDistributedObject.toData(AbstractDistributedObject.java:78)
	at com.hazelcast.map.impl.proxy.MapProxyImpl.set(MapProxyImpl.java:251)
	at com.hazelcast.map.impl.proxy.MapProxyImpl.set(MapProxyImpl.java:242)
	at com.hazelcast.spring.cache.HazelcastCache.put(HazelcastCache.java:114)
	at com.linkedin.metadata.search.client.CachingEntitySearchService.getCachedScrollResults(CachingEntitySearchService.java:253)
	at com.linkedin.metadata.search.client.CachingEntitySearchService.scroll(CachingEntitySearchService.java:131)
	at com.linkedin.metadata.search.SearchService.scrollAcrossEntities(SearchService.java:113)
	at com.linkedin.metadata.client.JavaEntityClient.scrollAcrossEntities(JavaEntityClient.java:341)
	at com.linkedin.datahub.graphql.resolvers.search.ScrollAcrossEntitiesResolver.lambda$get$0(ScrollAcrossEntitiesResolver.java:79)
	... 7 common frames omitted
Caused by: com.hazelcast.nio.serialization.HazelcastSerializationException: com.hazelcast.nio.serialization.HazelcastSerializationException: The 'class java.lang.reflect.Constructor' cannot be serialized with zero configuration Compact serialization because this type is not supported yet. If you want to serialize 'class com.linkedin.metadata.search.SearchEntityArray' which uses this class in its fields, consider writing a CompactSerializer for it.
	at com.hazelcast.internal.serialization.impl.compact.ReflectiveCompactSerializer.writeFast(ReflectiveCompactSerializer.java:107)
	at com.hazelcast.internal.serialization.impl.compact.ReflectiveCompactSerializer.write(ReflectiveCompactSerializer.java:79)
	at com.hazelcast.internal.serialization.impl.compact.CompactStreamSerializer.writeObject(CompactStreamSerializer.java:153)
	at com.hazelcast.internal.serialization.impl.compact.CompactStreamSerializer.write(CompactStreamSerializer.java:116)
	at com.hazelcast.internal.serialization.impl.compact.CompactStreamSerializer.write(CompactStreamSerializer.java:109)
	at com.hazelcast.internal.serialization.impl.StreamSerializerAdapter.write(StreamSerializerAdapter.java:39)
	at com.hazelcast.internal.serialization.impl.AbstractSerializationService.toBytes(AbstractSerializationService.java:238)
	... 20 common frames omitted
Caused by: com.hazelcast.nio.serialization.HazelcastSerializationException: The 'class java.lang.reflect.Constructor' cannot be serialized with zero configuration Compact serialization because this type is not supported yet. If you want to serialize 'class com.linkedin.metadata.search.SearchEntityArray' which uses this class in its fields, consider writing a CompactSerializer for it.
	at com.hazelcast.internal.serialization.impl.compact.CompactUtil.verifyFieldClassIsCompactSerializable(CompactUtil.java:194)
	at com.hazelcast.internal.serialization.impl.compact.zeroconfig.ValueReaderWriters.readerWriterFor(ValueReaderWriters.java:214)
	at com.hazelcast.internal.serialization.impl.compact.ReflectiveCompactSerializer.createFastReadWriteCaches(ReflectiveCompactSerializer.java:299)
	at com.hazelcast.internal.serialization.impl.compact.ReflectiveCompactSerializer.write(ReflectiveCompactSerializer.java:82)
	at com.hazelcast.internal.serialization.impl.compact.CompactStreamSerializer.buildSchema(CompactStreamSerializer.java:379)
	at com.hazelcast.internal.serialization.impl.compact.CompactStreamSerializer.writeObject(CompactStreamSerializer.java:147)
	at com.hazelcast.internal.serialization.impl.compact.DefaultCompactWriter.lambda$writeCompact$0(DefaultCompactWriter.java:262)
	at com.hazelcast.internal.serialization.impl.compact.DefaultCompactWriter.writeVariableSizeField(DefaultCompactWriter.java:247)
	at com.hazelcast.internal.serialization.impl.compact.DefaultCompactWriter.writeCompact(DefaultCompactWriter.java:261)
	at com.hazelcast.internal.serialization.impl.compact.zeroconfig.ValueReaderWriters$CompactReaderWriter.write(ValueReaderWriters.java:1247)
	at com.hazelcast.internal.serialization.impl.compact.ReflectiveCompactSerializer$ReaderWriterAdapter.write(ReflectiveCompactSerializer.java:329)
	at com.hazelcast.internal.serialization.impl.compact.ReflectiveCompactSerializer.writeFast(ReflectiveCompactSerializer.java:105)
	... 26 common frames omitted
2023-05-16 14:01:03,881 [ForkJoinPool.commonPool-worker-47] ERROR c.datahub.graphql.GraphQLController:101 - Errors while executing graphQL query: "{\n  scrollAcrossEntities(input: { types: [DATASET], query: \"*\", count: 10}) {\n    nextScrollId\n    count\n    searchResults {\n      entity {\n        type\n        ... on Dataset {\n          urn\n          name\n        }\n      }\n    }\n  }\n}", result: {errors=[{message=An unknown error occurred., locations=[{line=2, column=3}], path=[scrollAcrossEntities], extensions={code=500, type=SERVER_ERROR, classification=DataFetchingException}}], data={scrollAcrossEntities=null}, extensions={tracing={version=1, startTime=2023-05-16T14:01:03.846516Z, endTime=2023-05-16T14:01:03.881777Z, duration=35262458, parsing={startOffset=222877, duration=189033}, validation={startOffset=443642, duration=209405}, execution={resolvers=[{path=[scrollAcrossEntities], parentType=Query, returnType=ScrollResults, fieldName=scrollAcrossEntities, startOffset=481495, duration=34254650}]}}}}, errors: [DataHubGraphQLError{path=[scrollAcrossEntities], code=SERVER_ERROR, locations=[SourceLocation{line=2, column=3}]}]
^ the logs from the GMS pod.
g
@echoing-airport-49548 will help you
s
Hi, was this issue fixed?