Below is an example of reading data from hive and storing in hdfs,
import org.apache.spark.sql.SparkSession;
object SparkSQL{
def main(args: Array[String]): Unit = {
val spark = SparkSession
  .builder()
 .master("local")
  .appName("Spark SQL basic example")
  .config("spark.sql.warehouse.directory", "/user/hive/warehouse")
  .enableHiveSupport()
  .getOrCreate();
val emp1 = spark.sql("SELECT * from <table_name>");
emp1.rdd.coalesce(1).saveAsTextFile("<hdfs_path>") ;
}
}