Update of /cvsroot/csvtosql/csvtosql_jdk50/src/net/sf/csv2sql/writers
In directory sc8-pr-cvs4.sourceforge.net:/tmp/cvs-serv20341/src/net/sf/csv2sql/writers
Added Files:
JdbcExtendedBatchWriter.java
Log Message:
--- NEW FILE: JdbcExtendedBatchWriter.java ---
/*
Copyright (C) 2006 Ivan Ryndin <jj...@us...>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package net.sf.csv2sql.writers;
import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import net.sf.csv2sql.storage.exceptions.StorageException;
import net.sf.csv2sql.utils.DBUtils;
import net.sf.csv2sql.writers.exceptions.InvalidParameterValueException;
import net.sf.csv2sql.writers.exceptions.WriterException;
/**
* Load generated statements in a jdbc resource with batch method.
* Use given connection. After having commit done doesn't close
* connection. This is convinient when importing data from several files
* one after another into the same database. Doesn't waste time for open and close
* connection after each file data processed.
* @see
* @author <a href="mailto:jj...@us...">Ivan Ryndin</a>
* @version $Revision: 1.1 $, $Date: 2006/10/13, Friday :-)
* @since 3.0
*/
public class JdbcExtendedBatchWriter extends AbstractWriter {
int batchCount;
int commitBatchCount;
private void init() throws InvalidParameterValueException {
batchCount = Integer.parseInt(getWriterProperties().getProperty("batchcount", "10"));
//optional
commitBatchCount = Integer.parseInt(getWriterProperties().getProperty("commitbatchcount", "0"));
}
/**
* @see AbstractWriter#write
*/
public void write()
throws WriterException{
//get configuration options
init();
String currentSQL = null;
int sqlIndex = 0;
try {
HashMap<String, Object> params = getWriterParameters();
if (params == null)
throw new WriterException("Cannot get writer parameters! Returned HashMap is null");
Connection connection = (Connection)params.get("connection");
if (connection==null)
throw new WriterException("Cannot get not-null connection! Set please connection to database in writer parameters");
Statement stmt = connection.createStatement();
try {
//start transaction
if (commitBatchCount != 1) {
connection.setAutoCommit(false);
}
int batchSize = 0;
int commitIdx = 0;
for (int i=0; i<getStorage().size(); i++) {
String sql = getStorage().get(i);
stmt.addBatch(sql);
sqlIndex++;
currentSQL = sql;
if (++batchSize == batchCount) {
batchSize = 0;
stmt.executeBatch();
if (commitBatchCount > 1 && commitBatchCount == ++commitIdx) {
connection.commit();
commitIdx = 0;
}
stmt.clearBatch();
}
}
// take care of the remaining records.
stmt.executeBatch();
stmt.clearBatch();
//stop transaction
if (commitBatchCount != 1) {
connection.commit();
}
} finally {
stmt.close();
}
} catch (StorageException e) {
throw new WriterException("cannot read data from temporary storage", e);
} catch (SQLException e) {
throw new WriterException("cannot write statement to database: " + sqlIndex + "\n" + e.getMessage() + "\n" + currentSQL, e);
}
}
protected HashMap<String,String> requiredParameterList() {
HashMap<String,String> hm = new HashMap<String,String>();
hm.put("commitbatchcount","commitbatchcount.");
return hm;
}
protected HashMap<String,String> optionalParameterList() {
HashMap<String,String> hm = new HashMap<String,String>();
hm.put("batchcount","batchcount.");
return hm;
}
}
|