spark = SparkSession.builder \
.appName("MongoDBToDatabricks") \
.config("spark.jars.packages", "org.mongodb.spark:mongo-spark-connector_2.12:10.4.0") \
.config("spark.mongodb.read.connection.uri", mongodb_uri) \
.config("spark.mongodb.write.connection.uri", mongodb_uri) \
.getOrCreate()
# Read data from MongoDB
df = spark.read \
.format("com.mongodb.spark.sql.connector.MongoTableProvider") \
.option("database", database_name) \
.option("collection", collection_name) \
.option("readPreference", "primary") \
.load()
df.printSchema()