我把所有要插入的数据放在Vector中,然后用prepareStatement来设置各个参数,相关代码如下:
db = new Database(dbURL, false);
conn = db.getConn();
conn.setAutoCommit(false);
pstmt = conn.prepareStatement(preSql);
for (int j = 0; j < count; ++j)
{
try {
pstmt.clearBatch();
pstmt.clearParameters();
String[] tmp = ((String[])setConf.elementAt(j));
for (int i = 1; i <= tmp.length; ++i) {
pstmt.setString(i, tmp[i - 1]);
}
pstmt.addBatch();
tmpRets = pstmt.executeBatch();
} catch (SQLException e) {
Loger.log.error("some operation about pstmt throw SQLException!\n" + e);
continue;
}
if (tmpRets != null)
rets[j] = tmpRets[0];
else
Loger.log.debug("executeBatch[" + j + "] return null!");
}
conn.commit();
我需要在插入一条数据的时候出现异常会继续向下做。但发现这么多记录的时候会出现长事务的异常。应该分段来插入数据吗?比如50000条插一次。有没有其它更好的办法呢?
db = new Database(dbURL, false);
conn = db.getConn();
conn.setAutoCommit(false);
pstmt = conn.prepareStatement(preSql);
for (int j = 0; j < count; ++j)
{
try {
pstmt.clearBatch();
pstmt.clearParameters();
String[] tmp = ((String[])setConf.elementAt(j));
for (int i = 1; i <= tmp.length; ++i) {
pstmt.setString(i, tmp[i - 1]);
}
pstmt.addBatch();
tmpRets = pstmt.executeBatch();
} catch (SQLException e) {
Loger.log.error("some operation about pstmt throw SQLException!\n" + e);
continue;
}
if (tmpRets != null)
rets[j] = tmpRets[0];
else
Loger.log.debug("executeBatch[" + j + "] return null!");
}
conn.commit();
我需要在插入一条数据的时候出现异常会继续向下做。但发现这么多记录的时候会出现长事务的异常。应该分段来插入数据吗?比如50000条插一次。有没有其它更好的办法呢?
* Excute sql
*/
public boolean doStatement(String[] sqlStrs)
{
boolean flag = false;
if (sqlStrs.length < 1)
return false;
try
{
this.recordCount = sqlStrs.length;
InitialContext initial = new InitialContext();
ds = (DataSource) initial.lookup(dbjndi);
con = ds.getConnection();
con.setAutoCommit(false);
stmt = con.createStatement(); //System.out.println("sqlStrs.length:" + sqlStrs.length); int count = sqlStrs.length / 7000 + 1;
System.out.println("count:" + count);
int end = 0;
for (int ii = 0; ii < count; ii++)
{
stmt.clearBatch();
end = (ii + 1) * 7000;
if (end > sqlStrs.length)
end = sqlStrs.length;
for (int i = ii * 7000; i < end; i++)
{
//modify by lcl
if (sqlStrs[i] != null && sqlStrs[i].trim().length() > 0)
stmt.addBatch(sqlStrs[i]);
System.out.println("in DbOperate.doStatement sql : " + sqlStrs[i]);
this.progress = i;
}
stmt.executeBatch();
//System.out.println("excute back:" + ii + "Strins begin:" + ii * 7000 + " end :" + end);
} con.commit();
System.out.println("excute sql finished!");
flag = true;
}
catch (NamingException ee)
{
//System.out.println("DbOperate.java : find data source error, " + ee.toString());
stmtStr = "find data source error";
flag = false;
}
catch (Exception ex)
{
stmtStr = StringTool.removeNewline(ex.toString());
ex.printStackTrace();
if (ex.toString().indexOf("ORA") > 0)
stmtStr = StringTool.removeNewline(ex.toString().substring(ex.toString().indexOf("ORA") + 10));
//System.out.println("err String :" + stmtStr);
flag = false; try
{
con.rollback();
}
catch (Exception ee)
{
ee.printStackTrace();
stmtStr = StringTool.removeNewline(ee.toString());
}
flag = false;
}
finally
{ try
{
this.progress = 0;
this.recordCount = 0;
con.setAutoCommit(true);
}
catch (SQLException e)
{
// TODO ???? catch ?
e.printStackTrace();
}
if (stmt != null)
{
try
{
stmt.close();
}
catch (SQLException e1)
{
// TODO ???? catch ?
e1.printStackTrace();
}
}
if (con != null)
{
try
{
con.close();
}
catch (SQLException e1)
{
// TODO ???? catch ?
e1.printStackTrace();
}
} }
return flag;
}