from pyspark.sql import *
if __name__ == "__main__":
spark = SparkSession.builder \
.appName("hello Spark") \
.master("local[2]") \
.getOrCreate()
data_list = [("Ravi",28),
("David",45),
("Abdul",37),
]
df = spark.createDataFrame(data_list).toDF("Name","age")
df.show()
error --->
D:\SparkPractice\venv\Scripts\python.exe D:\SparkPractice\practice.py
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
Traceback (most recent call last):
File "D:\SparkPractice\practice.py", line 3, in <module>
spark = SparkSession.builder.master("local[1]").appName("Test").getOrCreate()
File "D:\SparkPractice\venv\Lib\site-packages\pyspark\sql\session.py", line 559, in getOrCreate
session = SparkSession(sc, options=self._options)
File "D:\SparkPractice\venv\Lib\site-packages\pyspark\sql\session.py", line 635, in __init__
jSparkSessionClass.getDefaultSession().isDefined()
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^
TypeError: 'JavaPackage' object is not callable
Process finished with exit code 1