from pyspark import SparkContext, SparkConf
conf = SparkConf().setMaster('local')
sc = SparkContext(conf=conf)Set up spark in Jupyter Notebook
spark
jupyter notebook
Using the low-level RDD API
sc.stop()Using high-level DSL operators and the DataFrame API
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName('chapter2').getOrCreate()spark.stop()