3.1 Getting all map Keys from DataFrame MapType column
df.withColumn("rank", rank().over(Window.partitionBy("Dept_id").orderBy($"salary".desc)))
.filter($"rank" <= 3)
.drop("rank")
mport org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.{rank, desc}
val n: Int = ???
// Window definition
val w = Window.partitionBy($"user").orderBy(desc("rating"))
// Filter
df.withColumn("rank", rank.over(w)).where($"rank" <= n)