有研究过 kettle 代码的吗?
我用java代码运行kettle的job文件,生成日志文件有错误:
运行一遍   生成的日志文件里面为空
连续运行两遍   生成的日志文件里面为运行第一遍的全部日志+第二遍的一半日志,感觉没导完就断了~
不知道是怎么回事
下面是调用job的代码:
package atestkitchen;import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import javax.swing.JFrame;
import javax.swing.JScrollPane;
//import jxl.write.biff.File;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.logging.Log4jStringAppender;
import org.pentaho.di.core.logging.LogWriter;
import org.apache.derby.client.am.SqlException;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobEntryLoader;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.kitchen.Messages;
import org.pentaho.di.pan.CommandLineOption;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.StepLoader;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.w3c.dom.Document;public class TestSystemVariableJob {
public static void main (String[] args) throws KettleException, SqlException
 {
System.out.println("开始执行job");
String a="F:/工作/bs/src/kettle/1_main/bs_import_raw_data01.kjb";
String b="F:/工作/bs/src/kettle/1_main/log/import.log";
String c="BASIC";
String[] d={"F:/工作/bs/src/kettle/1_main/input","09181",""};

TestSystemVariableJob.JobExecute(a,b,c,d);
System.out.println("开始执行job");
TestSystemVariableJob.JobExecute(a,b,c,d);
//System.out.println("开始执行job");
//TestSystemVariableJob.JobExecute(a,b,c,d);
//System.out.println("job执行完成!");

System.out.println("执行完成!");

 } public static void JobExecute (String jobfile,String logfile,String loglevel,String[] argument) throws KettleException{

String jobFileName=jobfile;
String logfilename=logfile;
String logLevel=loglevel;
String [] arguments =argument;
EnvUtil.environmentInit(); //初始化环境, 加载 kettle.properties 文件
LogWriter log = LogWriter.getInstance(logfilename,true,3);
try {
StepLoader.init();
}catch(KettleException e){
log.logError("LoadingStepsError", Messages.getString("Kitchen.Error.LoadingSteps"), e);
exitJVM(8);
}
StepLoader stepLoader = StepLoader.getInstance();
     
try {
JobEntryLoader.init();
}catch(KettleException e){
         log.logError("LoadingJobEntriesError", Messages.getString("Kitchen.Error.LoadingJobEntries"), e);
         return;
}
log.setLogLevel(logLevel);
Log4jStringAppender stringAppender = LogWriter.createStringAppender();
    log.addAppender(stringAppender);
JobMeta jobMeta = new JobMeta(log, jobFileName, null, null);
jobMeta.setArguments(arguments);//获取参数
Job job = new Job(log, stepLoader, null, jobMeta);
Result result = null;
job.getJobMeta().setInternalKettleVariables(job);
//job.start();
//job.execute();
result = job.execute();// Execute the selected job.
job.waitUntilFinished(); 
job.endProcessing("end",result);


}
private static final void exitJVM(int status) {
// Close the open appenders...
LogWriter.getInstance().close();
System.exit(status);
}
}