当我将 pandas.DataFrame 转移到 spark.sql.dataframe 时,它引发了"FileNotFoundError" . 代码可以在之前正常运行,但现在它不能 . 这是追溯:

import pyspark.sql.types as typ
spark=SparkSession.builder.master("local").appName('yiguan').getOrCreate()
sc=spark.sparkContext
device_train_spark=spark.createDataFrame(deviceid_train)
device_train_spark.show(4)

deviceid_train 是"pandas.core.frame.DataFrame"

FileNotFoundError Traceback(最近一次调用last)in()11#“age_level”,typ.IntegerType(),True)12#])---> 13 device_train_spark = spark.createDataFrame(deviceid_train)14 device_train_spark.show(4) e:\ software \ python36 \ lib \ site-packages \ pyspark \ sql \ session.py in createDataFrame(self,data,schema,samplingRatio,verifySchema)689 rdd,schema = self._createFromRDD(data.map(prepare),schema ,samplingRatio)690 else: - > 691 rdd,schema = self._createFromLocal(map(prepare,data),schema)692 jrdd = self._jvm.SerDeUtil.toJavaArray(rdd._to_java_object_rdd())693 jdf = self._jsparkSession .applySchemaToPythonRDD(jrdd.rdd(),schema.json())e:\ software \ python36 \ lib \ site-packages \ pyspark \ sql \ session.py in _createFromLocal(self,data,schema)422#将python对象转换为sql data 423 data = [schema.toInternal(row)for data in data] - > 424 return self._sc.parallelize(data),schema 425 426 def _get_numpy_record_dtype(self,rec):e:\ software \ python36 \ lib \ site-packages \ pyspark \ context.py in parallelize(self,c,numSlices)494 batchSize = max(1,min(len(c)// numSlices,self._batchSize或1024))495 serializer = BatchedSerializer(self._unbatched_serializer,batchSize) - > 496 jrdd = self ._serialize_to_jvm(c,numSlices,serializer)497返回RDD(jrdd,self,serializer)498 e:\ software \ python36 \ lib \ site-packages \ pyspark \ context.py in _serialize_to_jvm(self,data,parallelism,serializer)503对象被写入文件并通过textFile()加载 . 504“”“ - > 505 tempFile = NamedTemporaryFile(delete = False,dir = self._temp_dir)506 try:507 serializer.dump_stream(data,tempFile)e:\ software \ python36 \ lib \ tempfile.py in NamedTemporaryFile(mode ,缓冲,编码,换行,后缀,前缀,目录,删除)547标志| = _os.O_TEMPORARY 548 - > 549(fd,name)= _mkstemp_inner(dir,prefix,suffix,flags,output_type)550 try:551 file = _io.open(fd,mode,buffering = buffering,e:\ software \ python36 \ lib \ tempfile.py in _mkstemp_inner(dir,pre,suf,flags,output_type)258 file = _os.path.join(dir,pre名称suf)259尝试: - > 260 fd = _os.open(file,flags,0o600)261除了FileExistsError:262 continue#再试一次FileNotFoundError:[Errno 2]没有这样的文件或目录:'C:\ Users \ CT \应用程序数据\本地的\ Temp \火花b55a0865-0f1f-415e-96d8-a826df1c43ec \ pyspark-1e1331d8-98d6-424a-ab5e-44c0bf2e7dd7 \ tmpb_l2gn63'

为什么现在不起作用?