alert-electrician-67912
01/30/2023, 4:12 PMfrom typing import List
import datahub.emitter.mce_builder as builder
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.emitter.rest_emitter import DatahubRestEmitter
from datahub.metadata.com.linkedin.pegasus2avro.datajob import DataJobInputOutputClass
# Construct the DataJobInputOutput aspect.
input_datasets: List[str] = [
builder.make_dataset_urn(platform="mysql", name="librarydb.member", env="PROD"),
builder.make_dataset_urn(platform="mysql", name="librarydb.checkout", env="PROD"),
]
output_datasets: List[str] = [
builder.make_dataset_urn(
platform="kafka", name="debezium.topics.librarydb.member_checkout", env="PROD"
)
]
input_data_jobs: List[str] = [
builder.make_data_job_urn(
orchestrator="airflow", flow_id="flow1", job_id="job0", cluster="PROD"
)
]
datajob_input_output = DataJobInputOutputClass(
inputDatasets=input_datasets,
outputDatasets=output_datasets,
inputDatajobs=input_data_jobs,
)
# Construct a MetadataChangeProposalWrapper object.
# NOTE: This will overwrite all of the existing lineage information associated with this job.
datajob_input_output_mcp = MetadataChangeProposalWrapper(
entityUrn=builder.make_data_job_urn(
orchestrator="airflow", flow_id="flow1", job_id="job1", cluster="PROD"
),
aspect=datajob_input_output,
)
# Create an emitter to the GMS REST API.
emitter = DatahubRestEmitter("<http://localhost:8080>")
# Emit metadata!
emitter.emit_mcp(datajob_input_output_mcp)
but I encounter some error:
OperationalError(
datahub.configuration.common.OperationalError: ('Unable to emit metadata to DataHub GMS', {'exceptionClass': 'com.linkedin.restli.server.RestLiServiceException', 'stackTrace': 'com.linkedin.restli.server.RestLiServiceException [HTTP Status:500]: INTERNAL SERVER ERROR\n\tat com.linkedin.restli.internal.server.RestLiMethodInvoker.doInvoke(RestLiMethodInvoker.java:210)\n\tat com.linkedin.restli.internal.server.RestLiMethodInvoker.invoke(RestLiMethodInvoker.java:333)\n\tat com.linkedin.restli.internal.server.filter.FilterChainDispatcherImpl.onRequestSuccess(FilterChainDispatcherImpl.java:47)\n\tat com.linkedin.restli.internal.server.filter.RestLiFilterChainIterator.onRequest(RestLiFilterChainIterator.java:86)\n\tat com.linkedin.restli.internal.server.filter.RestLiFilterChainIterator.lambda$onRequest$0(RestLiFilterChainIterator.java:73)\n\tat java.base/java.util.concurrent.CompletableFuture.uniAcceptNow(CompletableFuture.java:753)\n\tat java.base/java.util.concurrent.CompletableFuture.uniAcceptStage(CompletableFuture.java:731)\n\tat java.base/java.util.concurrent.CompletableFuture.thenAccept(CompletableFuture.java:2108)\n\tat com.linkedin.restli.internal.server.filter.RestLiFilterChainIterator.onRequest(RestLiFilterChainIterator.java:72)\n\tat com.linkedin.restli.internal.server.filter.RestLiFilterChain.onRequest(RestLiFilterChain.java:55)\n\tat com.linkedin.restli.server.BaseRestLiServer.handleResourceRequest(BaseRestLiServer.java:262)\n\tat com.linkedin.restli.server.RestRestLiServer.handleResourceRequestWithRestLiResponse(RestRestLiServer.java:294)\n\tat com.linkedin.restli.server.RestRestLiServer.handleResourceRequest(RestRestLiServer.java:262)\n\tat com.linkedin.restli.server.RestRestLiServer.handleResourceRequest(RestRestLiServer.java:232)\n\tat com.linkedin.restli.server.RestRestLiServer.doHandleRequest(RestRestLiServer.java:215)\n\tat com.linkedin.restli.server.RestRestLiServer.handleRequest(RestRestLiServer.java:171)\n\tat com.linkedin.restli.server.RestLiServer.handleRequest(RestLiServer.java:130)\n\tat com.linkedin.restli.server.DelegatingTransportDispatcher.handleRestRequest(DelegatingTransportDispatcher.java:70)\n\tat com.linkedin.r2.filter.transport.DispatcherRequestFilter.onRestRequest(DispatcherRequestFilter.java:70)\n\tat com.linkedin.r2.filter.TimedRestFilter.onRestRequest(TimedRestFilter.java:76)\n\tat com.linkedin.r2.filter.FilterChainIterator$FilterChainRestIterator.doOnRequest(FilterChainIterator.java:146)\n\tat com.linkedin.r2.filter.FilterChainIterator$FilterChainRestIterator.doOnRequest(FilterChainIterator.java:132)\n\tat com.linkedin.r2.filter.FilterChainIterator.onRequest(FilterChainIterator.java:62)\n\tat com.linkedin.r2.filter.TimedNextFilter.onRequest(TimedNextFilter.java:55)\n\tat com.linkedin.r2.filter.transport.ServerQueryTunnelFilter.onRestRequest(ServerQueryTunnelFilter.java:58)\n\tat com.linkedin.r2.filter.TimedRestFilter.onRestRequest(TimedRestFilter.java:76)\n\tat com.linkedin.r2.filter.FilterChainIterator$FilterChainRestIterator.doOnRequest(FilterChainIterator.java:146)\n\tat com.linkedin.r2.filter.FilterChainIterator$FilterChainRestIterator.doOnRequest(FilterChainIterator.java:132)\n\tat com.linkedin.r2.filter.FilterChainIterator.onRequest(FilterChainIterator.java:62)\n\tat com.linkedin.r2.filter.TimedNextFilter.onRequest(TimedNextFilter.java:55)\n\tat com.linkedin.r2.filter.message.rest.RestFilter.onRestRequest(RestFilter.java:50)\n\tat com.linkedin.r2.filter.TimedRestFilter.onRestRequest(TimedRestFilter.java:76)\n\tat com.linkedin.r2.filter.FilterChainIterator$FilterChainRestIterator.doOnRequest(FilterChainIterator.java:146)\n\tat com.linkedin.r2.filter.FilterChainIterator$FilterChainRestIterator.doOnRequest(FilterChainIterator.java:132)\n\tat com.linkedin.r2.filter.FilterChainIterator.onRequest(FilterChainIterator.java:62)\n\tat com.linkedin.r2.filter.FilterChainImpl.onRestRequest(FilterChainImpl.java:106)\n\tat com.linkedin.r2.filter.transport.FilterChainDispatcher.handleRestRequest(FilterChainDispatcher.java:75)\n\tat com.linkedin.r2.util.finalizer.RequestFinalizerDispatcher.handleRestRequest(RequestFinalizerDispatcher.java:61)\n\tat com.linkedin.r2.transport.http.server.HttpDispatcher.handleRequest(HttpDispatcher.java:101)\n\tat com.linkedin.r2.transport.http.server.AbstractR2Servlet.service(AbstractR2Servlet.java:105)\n\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:790)\n\tat com.linkedin.restli.server.spring.ParallelRestliHttpRequestHandler.handleRequest(ParallelRestliHttpRequestHandler.java:63)\n\tat org.springframework.web.context.support.HttpRequestHandlerServlet.service(HttpRequestHandlerServlet.java:73)\n\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:790)\n\tat org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)\n\tat org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1631)\n\tat com.datahub.authentication.filter.AuthenticationFilter.doFilter(AuthenticationFilter.java:88)\n\tat org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)\n\tat org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)\n\tat org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:548)\n\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143)\n\tat org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:600)\n\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)\n\tat org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235)\n\tat org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1624)\n\tat org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)\n\tat org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1440)\n\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)\n\tat org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501)\n\tat org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1594)\n\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)\n\tat org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1355)\n\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)\n\tat org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:191)\n\tat org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:146)\n\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)\n\tat org.eclipse.jetty.server.Server.handle(Server.java:516)\n\tat org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487)\n\tat org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:732)\n\tat org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:479)\n\tat org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)\n\tat org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)\n\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)\n\tat org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)\n\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338)\n\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315)\n\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173)\n\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131)\n\tat org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409)\n\tat org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)\n\tat org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: java.lang.RuntimeException: Failed to validate entity URN urn:li:dataPlatform:TestPlatform\n\tat com.linkedin.metadata.utils.EntityKeyUtils.getUrnFromProposal(EntityKeyUtils.java:37)\n\tat com.linkedin.metadata.entity.AspectUtils.getAdditionalChanges(AspectUtils.java:39)\n\tat com.linkedin.metadata.resources.entity.AspectResource.ingestProposal(AspectResource.java:145)\n\tat jdk.internal.reflect.GeneratedMethodAccessor63.invoke(Unknown Source)\n\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\n\tat com.linkedin.restli.internal.server.RestLiMethodInvoker.doInvoke(RestLiMethodInvoker.java:177)\n\t... 81 more\n, 'message': 'INTERNAL SERVER ERROR', 'status': 500})
bright-beard-86474
01/30/2023, 10:50 PM.tab
for tab separated files, not tsv
. Is there a workaround? the other question is related to parquet files ingestion. In my case files do not have extension - how to process them? The Header=True
is also hardcode, what if my files do not have them? Thanks!future-analyst-98466
01/31/2023, 2:55 AMfuture-analyst-98466
01/31/2023, 2:55 AMplain-france-42647
01/31/2023, 4:42 AMlimited-forest-73733
01/31/2023, 6:52 AMmicroscopic-machine-90437
01/31/2023, 9:00 AMhigh-nail-23255
01/31/2023, 9:17 AMbest-umbrella-88325
01/31/2023, 9:43 AMconnection to server at '
'"<http://XXXX.rds.amazonaws.com|XXXX.rds.amazonaws.com>" (<http://XX.XXX.XXX.XXX|XX.XXX.XXX.XXX>), port 5432 failed: FATAL: too many connections for '
'role "XXX"\n
shy-dog-84302
01/31/2023, 12:13 PM/tmp
folder due to insufficient permissions. How can I fix this situation?adventurous-angle-99988
01/31/2023, 3:05 PMv0.9.6.1
using docker quick start. I'm facing issues while triggering ingestion for Looker. Getting ERROR: No matching distribution found for acryl-datahub[datahub-kafka,datahub-rest,looker]==0.9.6-1
(Full log attached)
Deployment details.
OS: Ubuntu 18.04
DATAHUB_VERSION=v0.9.6.1
ACTIONS_VERSION=v0.0.11
Would anyone be able to point me towards the right solution?best-umbrella-88325
01/31/2023, 3:59 PMtable=test_tbl1
- year=2022
- month=01
- day=15
- file1.parquet
table=test_tbl2
- year=2022
- month=01
- day=01
- test1.parquet
- day=02
- test2.parquet
- month=02
- day=02
- test3.parquet
But here's the question. We have 2 files in the month=01 directory for test_tbl2 and 1 file in month=02 directory for test_tbl2, but contents of file test1.parquet are only loaded, and the ingestion seems to ignore the other files in the bucket (test2.parquet and test3.parquet).
We get metadata for test1.parquet and file1.parquet. Does anyone have any idea what we are missing here? S3 recipe being used is mentioned in 🧵hallowed-farmer-50988
01/31/2023, 5:15 PMpath_spec.include
would be something like this <s3://bucket-name/{table}/*>
, but that is not a valid path for the S3 source. Has anyone come across the same thing before? Any suggestions for ingesting this?bland-barista-59197
01/31/2023, 7:28 PMlively-dusk-19162
01/31/2023, 8:20 PMlively-dusk-19162
01/31/2023, 8:38 PMelegant-state-4
01/31/2023, 8:58 PMnumerous-ram-92457
01/31/2023, 9:34 PMlimited-forest-73733
02/01/2023, 8:08 AMquiet-jelly-11365
02/01/2023, 9:10 AMplain-cricket-83456
02/01/2023, 9:43 AMadventurous-angle-99988
02/01/2023, 10:12 AMdebug logs
in datahub-actions container? I'm trying to see get more information on an ingestion that I have configured from the UI. I'm running the datahub using docker. I can only see info logs and above in /tmp/datahub/logs
bitter-evening-61050
02/01/2023, 11:48 AMagreeable-cricket-61480
02/01/2023, 11:57 AMlemon-scooter-69730
02/01/2023, 12:09 PMNo JVM shared library file (libjvm.so)
)
'http://<redacted>:8083 is ok\n'
'[2023-02-01 11:59:38,228] ERROR {datahub.entrypoints:213} - Command failed: Failed to configure the source (kafka-connect): No JVM '
'shared library file (libjvm.so) found. Try setting up the JAVA_HOME environment variable properly.\n'
'Traceback (most recent call last):\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/ingestion/run/pipeline.py", line 114, in '
'_add_init_error_context\n'
' yield\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/ingestion/run/pipeline.py", line 192, in '
'__init__\n'
' self.source = source_class.create(\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/ingestion/source/kafka_connect.py", line 944, '
'in create\n'
' return cls(config, ctx)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/ingestion/source/kafka_connect.py", line 939, '
'in __init__\n'
' jpype.startJVM()\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/jpype/_core.py", line 184, in startJVM\n'
' jvmpath = getDefaultJVMPath()\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/jpype/_jvmfinder.py", line 74, in getDefaultJVMPath\n'
' return finder.get_jvm_path()\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/jpype/_jvmfinder.py", line 212, in get_jvm_path\n'
' raise JVMNotFoundException("No JVM shared library file ({0}) "\n'
'jpype._jvmfinder.JVMNotFoundException: No JVM shared library file (libjvm.so) found. Try setting up the JAVA_HOME environment variable '
'properly.\n'
'\n'
'The above exception was the direct cause of the following exception:\n'
'\n'
'Traceback (most recent call last):\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/entrypoints.py", line 171, in main\n'
' sys.exit(datahub(standalone_mode=False, **kwargs))\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/click/core.py", line 1130, in __call__\n'
' return self.main(*args, **kwargs)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/click/core.py", line 1055, in main\n'
' rv = self.invoke(ctx)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/click/core.py", line 1657, in invoke\n'
' return _process_result(sub_ctx.command.invoke(sub_ctx))\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/click/core.py", line 1657, in invoke\n'
' return _process_result(sub_ctx.command.invoke(sub_ctx))\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/click/core.py", line 1404, in invoke\n'
' return ctx.invoke(self.callback, **ctx.params)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/click/core.py", line 760, in invoke\n'
' return __callback(*args, **kwargs)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/click/decorators.py", line 26, in new_func\n'
' return f(get_current_context(), *args, **kwargs)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/telemetry/telemetry.py", line 350, in wrapper\n'
' raise e\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/telemetry/telemetry.py", line 302, in wrapper\n'
' res = func(*args, **kwargs)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/utilities/memory_leak_detector.py", line 95, '
'in wrapper\n'
' return func(ctx, *args, **kwargs)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/cli/ingest_cli.py", line 179, in run\n'
' pipeline = Pipeline.create(\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/ingestion/run/pipeline.py", line 303, in '
'create\n'
' return cls(\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/ingestion/run/pipeline.py", line 191, in '
'__init__\n'
' with _add_init_error_context(f"configure the source ({source_type})"):\n'
' File "/usr/local/lib/python3.10/contextlib.py", line 153, in __exit__\n'
' self.gen.throw(typ, value, traceback)\n'
' File "/tmp/datahub/ingest/venv-kafka-connect-0.9.6/lib/python3.10/site-packages/datahub/ingestion/run/pipeline.py", line 116, in '
'_add_init_error_context\n'
' raise PipelineInitError(f"Failed to {step}: {e}") from e\n'
'datahub.ingestion.run.pipeline.PipelineInitError: Failed to configure the source (kafka-connect): No JVM shared library file (libjvm.so) '
'found. Try setting up the JAVA_HOME environment variable properly.\n',
"2023-02-01 11:59:38.618950 [exec_id=8e4beb30-8808-40f3-aae5-3122b48f689c] INFO: Failed to execute 'datahub ingest'",
'2023-02-01 11:59:38.626712 [exec_id=8e4beb30-8808-40f3-aae5-3122b48f689c] INFO: Caught exception EXECUTING '
'task_id=8e4beb30-8808-40f3-aae5-3122b48f689c, name=RUN_INGEST, stacktrace=Traceback (most recent call last):\n'
' File "/usr/local/lib/python3.10/site-packages/acryl/executor/execution/default_executor.py", line 123, in execute_task\n'
' task_event_loop.run_until_complete(task_future)\n'
' File "/usr/local/lib/python3.10/asyncio/base_events.py", line 646, in run_until_complete\n'
' return future.result()\n'
' File "/usr/local/lib/python3.10/site-packages/acryl/executor/execution/sub_process_ingestion_task.py", line 168, in execute\n'
' raise TaskError("Failed to execute \'datahub ingest\'")\n'
"acryl.executor.execution.task.TaskError: Failed to execute 'datahub ingest'\n"]}
Execution finished with errors.
aloof-dentist-85908
02/01/2023, 2:05 PMcrooked-dinner-59545
02/01/2023, 2:37 PMCommand failed: Failed to load checkpoint: Failed to read checkpoint content: Generic S3 error: Error performing get request application2/delta/_delta_log/_last_checkpoint: response error "<?xml version="1.0" encoding="UTF-8"?>
<Error><Code>InvalidAccessKeyId</Code><Message>The AWS Access Key Id you provided does not exist in our records.</Message>
We are using IAM roles for authentication and with the S3 ingestions (also from these same exact folders) works without a hitch, so I’m having trouble comprehending how this could be due to some authentication issue. The complete recipe looks like this:
source:
type: delta-lake
config:
env: dev
base_path: '<s3://dataengineersandbox-public-bucket/application2/delta/>'
s3:
aws_config:
aws_region: eu-west-1
Has anyone experienced similar issues? Any suggestions where to look to resolve this?kind-sunset-55628
02/01/2023, 3:46 PMlemon-daybreak-58504
02/01/2023, 5:06 PMrich-state-73859
02/01/2023, 7:12 PM../gradlew :metadata-ingestion:installDev
, it seems feast couldn’t be installed.
Collecting feast~=0.26.0
Using cached feast-0.26.0.tar.gz (3.6 MB)
Installing build dependencies: started
Installing build dependencies: finished with status 'done'
Getting requirements to build wheel: started
Getting requirements to build wheel: finished with status 'done'
Installing backend dependencies: started
Installing backend dependencies: finished with status 'done'
Preparing metadata (pyproject.toml): started
Preparing metadata (pyproject.toml): finished with status 'error'
error: subprocess-exited-with-error
× Preparing metadata (pyproject.toml) did not run successfully.
│ exit code: 1
╰─> [1 lines of output]
error in feast setup command: 'extras_require' must be a dictionary whose values are strings or lists of strings containing valid project/version requirement specifiers.
[end of output]
note: This error originates from a subprocess, and is likely not a problem with pip.
error: metadata-generation-failed
× Encountered error while generating package metadata.
╰─> See above for output.