我运行此代码,它给出了一个空指针异常 . 当我挖掘更多时,我发现它无法在kie会话中创建对象


码:

package com.rsrit.cob.drools;

import java.io.BufferedWriter;
import java.io.OutputStreamWriter;
import java.io.Serializable;

import org.kie.api.runtime.StatelessKieSession;
import org.kie.internal.command.CommandFactory;

import com.rsrit.cob.Variables.ClaimInfo;
import com.rsrit.cob.drools.KieSessionFactory;

@SuppressWarnings("serial")
public class RulesExecutor implements Serializable{
    public static BufferedWriter log = new BufferedWriter(new OutputStreamWriter(System.out)); 
    @SuppressWarnings("unchecked")
    public ClaimInfo evalRules(ClaimInfo claimObj,String ruleFileLoc){
        if (ruleFileLoc != null){
        StatelessKieSession ksession = KieSessionFactory.getKieSession(ruleFileLoc);
        ksession.execute(CommandFactory.newInsert(claimObj));
        }else{
            try{
            log.write("Rules File Location is Invalid or Null\n");
             log.flush();
            }catch(Exception e){
                e.printStackTrace();
            }
        }
        return claimObj;
    }
    /*public static String ruleFileConnection(String _ruleFileLoc){

        try{
            String rulesPath = _ruleFileLoc;
            ClassLoader loader =Thread.currentThread().getContextClassLoader();
            Properties props = new Properties();
            try(InputStream rulesLocStream = loader.getResourceAsStream(rulesPath)){
                props.load(rulesLocStream);
            }
            return props.getProperty("ruleFileLoc");

        } catch (FileNotFoundException ex) {
            return null;
        } catch (IOException ex) {
            return null;
        }
    }*/


}


命令:spark-submit --class com.rsrit.cob.application.RecoverableClaimsMain molinaHealthcare-yarn.jar ClaimsCompleteInfo.txt CompleteMembersInfo.txt rules.drl


驱动程序堆栈跟踪:at org.apache.spark.scheduler.DAGScheduler.org $ apache $ spark $ scheduler $ DAGScheduler $$ failJobAndIndependentStages(DAGScheduler.scala:1753)at org.apache.spark.scheduler.DAGScheduler $$ anonfun $ abortStage $ 1 . 申请(DAGScheduler.scala:1741)at org.apache.spark.scheduler.DAGScheduler $$ anonfun $ abortStage $ 1.apply(DAGScheduler.scala:1740)at scala.collection.mutable.ResizableArray $ class.foreach(ResizableArray.scala: 59)位于org.apache.spark.scheduler.DAGScheduler上的org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1740)中的scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) anonfun $ handleTaskSetFailed $ 1.apply(DAGScheduler.scala:871)at org.apache.spark.scheduler.DAGScheduler $$ anonfun $ handleTaskSetFailed $ 1.apply(DAGScheduler.scala:871)at scala.Option.foreach(Option.scala:257) )org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:871)at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler . scala:1974)org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1923)org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1912)at org.apache.spark.util .EventLoop $$ anon $ 1.run(EventLoop.scala:48)org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:682)at org.apache.spark.SparkContext.runJob(SparkContext.scala:2034) )org.apache.spark.SparkContext.runJob(SparkContext.scala:2055)org.apache.spark.SparkContext.runJob(SparkContext.scala:2074)at org.apache.spark.SparkContext.runJob(SparkContext.scala: 2099)org.apache.spark.rdd.RDD.count(RDD.scala:1162)位于org.apache.spark的org.apache.spark.api.java.JavaRDDLike $ class.count(JavaRDDLike.scala:455)位于sun.reflect的sun.reflect.NativeMethodAccessorImpl.invoke0(本地方法)的com.rsrit.cob.application.RecoverableClaimsMain.main(RecoverableClaimsMain.java:169)中的.api.java.AbstractJavaRDDLike.count(JavaRDDLike.scala:45) .NativeMethodAccessorImpl.invok e(NativeMethodAccessorImpl.java:62)位于org.apache.spark.deploy.JavaMainApplication的java.lang.reflect.Method.invoke(Method.java:498)的sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) .start(SparkApplication.scala:52)atg.apache.spark.deploy.SparkSubmit $ .org $ apache $ spark $ deploy $ SparkSubmit $$ runMain(SparkSubmit.scala:894)org.apache.spark.deploy.SparkSubmit $ .doRunMain $ 1(SparkSubmit.scala:198)org.apache.spark.deploy.SparkSubmit $ .submit(SparkSubmit.scala:228)at org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:137 )org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)由com.rsrit上的com.rsrit.cob.drools.RulesExecutor.evalRules(RulesExecutor.java:20)引起的:java.lang.NullPointerException . cob.application.RecoverableClaimsMain.lambda $ main $ bc755edb $ 1(RecoverableClaimsMain.java:165)org.apache.spark.api.java.JavaPairRDD $$ anonfun $ toScalaFunction $ 1.apply(JavaPairRDD.scala:1040)at scala.collection .Iterator $$不久$ 11.next (Iterator.scala:409)位于org.apache的org.apache.spark.util.Utils $ .getIteratorSize(Utils.scala:1836)的scala.collection.Iterator $$ anon $ 13.hasNext(Iterator.scala:462) .spark.rdd.RDD $$ anonfun $ count $ 1.apply(RDD.scala:1162)atg.apache.spark.rdd.RDD $$ anonfun $ count $ 1.apply(RDD.scala:1162)at org.apache .spark.SparkContext $$ anonfun $ runJob $ 5.apply(SparkContext.scala:2074)org.apache.spark.SparkContext $$ anonfun $ runJob $ 5.apply(SparkContext.scala:2074)at org.apache.spark.scheduler .ResultTask.runTask(ResultTask.scala:87)atg.apache.spark.scheduler.Task.run(Task.scala:109)at org.apache.spark.executor.Executor $ TaskRunner.run(Executor.scala:345) )java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)at java.util.concurrent.ThreadPoolExecutor $ Worker.run(ThreadPoolExecutor.java:624)at java.lang.Thread.run(Thread.java: 748)18/08/08 21:04:06 INFO SparkContext:从关闭钩子调用stop()