我想在spark sql中创建表我正在使用scala ide我已经添加了来自maven的依赖项我尝试了这个它它给了我下面的错误

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql._

object HiveFromSpark {
  def main(args: Array[String]) {
    val sparkConf = new SparkConf().setAppName("HiveFromSpark").setMaster("local")
    val sc = new SparkContext(sparkConf)
    val sqlContext = new SQLContext(sc)

    sqlContext.sql("SET hive.metastore.warehouse.dir=hdfs://localhost:9000/user/hive/warehouse")
    sqlContext.sql("CREATE TABLE Test (code string,description string,code string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TextFile")
    sqlContext.sql("LOAD DATA INPATH 'C:\\Users\\mhattabi\\Desktop\\cars.csv' OVERWRITE INTO TABLE Test")
    val df = sqlContext.sql("SELECT * from Test")
    df.show()
  }
}

我收到了这个错误:

线程“main”中的异常java.lang.RuntimeException:[1.1]失败:``insert''期望但标识符CREATE found CREATE TABLE Test(代码字符串,描述字符串,代码字符串)ROW FORMAT DELIMITED FIELDS TEREINATED BY','存储为TextFile