```from pyspark.sql import SparkSession from pys...
# getting-started
s
Copy code
from pyspark.sql import  SparkSession

from pyspark import  SparkConf

# spark = SparkSession.builder \
#     .master("local") \
#     .appName("spark-lineage") \
#     .config("spark.jars.packages","io.acryl:datahub-spark-lineage:0.8.43") \
#     .config("spark.extraListeners","datahub.spark.DatahubSparkListener") \
#     .config("spark.datahub.rest.server", "<https://datahub-frontend-sso.meeshotest.in:9002>") \
#     .enableHiveSupport() \
#     .getOrCreate()
#
# # df= spark.createDataFrame([(1, "value1"), (2, "value2")], ["id", "value"])
# # df.write.mode("overwrite").saveAsTable("spark_model")
# # spark.stop()
# # #
#conf = SparkConf().set("spark.jars", "/Users/nishchayagarwal/documents/datahub-spark-lineage-0.8.43.jar")
#
spark=SparkSession.builder \
.master("local[1]") \
    .appName("Main") \
    .config("spark.sql.warehouse.dir", "/tmp/data") \
    .config("spark.jars.packages", "io.acryl:datahub-spark-lineage:0.8.43") \
    .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") \
    .config("spark.datahub.rest.server", "<http://172.31.18.133:8080>") \
    .config("spark.datahub.metadata.dataset.platformInstance", "dataset") \
    .config("spark.datahub.rest.token", "eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIiwiYWN0b3JJZCI6Im1vaGl0LmdhcmciLCJ0eXBlIjoiUEVSU09OQUwiLCJ2ZXJzaW9uIjoiMiIsImV4cCI6MTY2MzkxOTkzOSwianRpIjoiMjk2Y2E3MGUtMjA2My00ODM0LTkwNmYtMGIzZjRjMTVlY2RhIiwic3ViIjoibW9oaXQuZ2FyZyIsImlzcyI6ImRhdGFodWItbWV0YWRhdGEtc2VydmljZSJ9.tr2mu_FueVfHKz9Ze2BWmN4dqhOrTwR1t_WrfxspOmY") \
    .enableHiveSupport() \
    .getOrCreate();

spark.sparkContext.addPyFile("/Users/nishchayagarwal/documents/datahub-spark-lineage-0.8.43.jar")