diff --git a/samples/codes/entity/User.java b/samples/codes/entity/User.java index 1509c8bb..da2510dc 100644 --- a/samples/codes/entity/User.java +++ b/samples/codes/entity/User.java @@ -1,28 +1,29 @@ package codes.entity; -import jakarta.persistence.Column; -import javax.persistence.Id; - -import com.landawn.abacus.annotation.JsonXmlConfig; -import com.landawn.abacus.annotation.NonUpdatable; -import com.landawn.abacus.annotation.ReadOnly; -import com.landawn.abacus.annotation.Table; -import com.landawn.abacus.annotation.Type; -import com.landawn.abacus.annotation.Type.EnumBy; -import com.landawn.abacus.util.NamingPolicy; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.experimental.Accessors; - -@Builder -@Data -@NoArgsConstructor -@AllArgsConstructor -@Accessors(chain = true) -@JsonXmlConfig(namingPolicy = NamingPolicy.UPPER_CASE_WITH_UNDERSCORE, ignoredFields = { "id", "create_time" }, dateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'", timeZone = "PDT", numberFormat = "#.###", enumerated = EnumBy.ORDINAL) +import jakarta.persistence.Column; +import javax.persistence.Id; + +import com.landawn.abacus.annotation.JsonXmlConfig; +import com.landawn.abacus.annotation.NonUpdatable; +import com.landawn.abacus.annotation.ReadOnly; +import com.landawn.abacus.annotation.Table; +import com.landawn.abacus.annotation.Type; +import com.landawn.abacus.annotation.Type.EnumBy; +import com.landawn.abacus.util.NamingPolicy; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.experimental.Accessors; + +@Builder +@Data +@NoArgsConstructor +@AllArgsConstructor +@Accessors(chain = true) +@JsonXmlConfig(namingPolicy = NamingPolicy.UPPER_CASE_WITH_UNDERSCORE, ignoredFields = { "id", + "create_time" }, dateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'", timeZone = "PDT", numberFormat = "#.###", enumerated = EnumBy.ORDINAL) @Table("user1") public class User { diff --git a/samples/codes/entity/UserQueryAllResult.java b/samples/codes/entity/UserQueryAllResult.java index 3fe62009..d79b2072 100644 --- a/samples/codes/entity/UserQueryAllResult.java +++ b/samples/codes/entity/UserQueryAllResult.java @@ -3,29 +3,30 @@ import java.util.List; import java.util.Set; -import jakarta.persistence.Column; -import javax.persistence.Id; - -import com.landawn.abacus.annotation.JsonXmlConfig; -import com.landawn.abacus.annotation.NonUpdatable; -import com.landawn.abacus.annotation.ReadOnly; -import com.landawn.abacus.annotation.Table; -import com.landawn.abacus.annotation.Type; -import com.landawn.abacus.annotation.Type.EnumBy; -import com.landawn.abacus.util.NamingPolicy; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.experimental.Accessors; - -@Builder -@Data -@NoArgsConstructor -@AllArgsConstructor -@Accessors(chain = true) -@JsonXmlConfig(namingPolicy = NamingPolicy.UPPER_CASE_WITH_UNDERSCORE, ignoredFields = { "id", "create_time" }, dateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'", timeZone = "PDT", numberFormat = "#.###", enumerated = EnumBy.ORDINAL) +import jakarta.persistence.Column; +import javax.persistence.Id; + +import com.landawn.abacus.annotation.JsonXmlConfig; +import com.landawn.abacus.annotation.NonUpdatable; +import com.landawn.abacus.annotation.ReadOnly; +import com.landawn.abacus.annotation.Table; +import com.landawn.abacus.annotation.Type; +import com.landawn.abacus.annotation.Type.EnumBy; +import com.landawn.abacus.util.NamingPolicy; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.experimental.Accessors; + +@Builder +@Data +@NoArgsConstructor +@AllArgsConstructor +@Accessors(chain = true) +@JsonXmlConfig(namingPolicy = NamingPolicy.UPPER_CASE_WITH_UNDERSCORE, ignoredFields = { "id", + "create_time" }, dateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'", timeZone = "PDT", numberFormat = "#.###", enumerated = EnumBy.ORDINAL) @Table("UserQueryAllResult") public class UserQueryAllResult { @@ -51,10 +52,10 @@ public class UserQueryAllResult { @Type(name = "List") private java.util.Date create_time; - // test - private List users; - - private Set userSet; // test + // test + private List users; + + private Set userSet; // test public UserQueryAllResult copy() { final UserQueryAllResult copy = new UserQueryAllResult(); diff --git a/samples/com/landawn/abacus/samples/DaoTest.java b/samples/com/landawn/abacus/samples/DaoTest.java index 109d3b0b..f5114786 100644 --- a/samples/com/landawn/abacus/samples/DaoTest.java +++ b/samples/com/landawn/abacus/samples/DaoTest.java @@ -367,7 +367,7 @@ public void test_exportCSV() throws Exception { try (Connection conn = JdbcTest.dataSource.getConnection(); PreparedStatement stmt = conn.prepareStatement("select * from user1"); ResultSet rs = stmt.executeQuery()) { - JdbcUtils.exportCSV(System.out, rs); + JdbcUtils.exportCSV(IOUtil.newOutputStreamWriter(System.out), rs); } N.println(IOUtil.LINE_SEPARATOR); @@ -376,7 +376,7 @@ public void test_exportCSV() throws Exception { try (Connection conn = JdbcTest.dataSource.getConnection(); PreparedStatement stmt = conn.prepareStatement("select * from user1"); ResultSet rs = stmt.executeQuery()) { - JdbcUtils.exportCSV(System.out, rs, 0, 10, true, false); + JdbcUtils.exportCSV(IOUtil.newOutputStreamWriter(System.out), rs, JdbcUtil.getColumnLabelList(rs), true, false); } userDao.batchDelete(users); @@ -1335,9 +1335,6 @@ public void test_joinedBy_2() throws SQLException { userDao.batchDelete(users); } - /** - * - */ @Test public void test_SQLParser() { final String sql = "SELECT employee_id AS \"employeeId\", first_name AS \"firstName\", last_name AS \"lastName\" FROM employee WHERE 1 < 2"; diff --git a/samples/com/landawn/abacus/samples/DataSetTest.java b/samples/com/landawn/abacus/samples/DataSetTest.java index bcac3062..94fedf0b 100644 --- a/samples/com/landawn/abacus/samples/DataSetTest.java +++ b/samples/com/landawn/abacus/samples/DataSetTest.java @@ -25,9 +25,6 @@ public class DataSetTest { static final Random RAND = new Random(); - /** - * - */ @Test public void test_join() { final User user = User.builder().id(1001).firstName("Tom").build(); diff --git a/samples/com/landawn/abacus/samples/UncheckedDaoTest.java b/samples/com/landawn/abacus/samples/UncheckedDaoTest.java index 4ae7ef74..87b8e854 100644 --- a/samples/com/landawn/abacus/samples/UncheckedDaoTest.java +++ b/samples/com/landawn/abacus/samples/UncheckedDaoTest.java @@ -103,9 +103,6 @@ public void test_define() throws SQLException { // assertEquals(N.asList(sql), uncheckedUserDao.getCachedSqls("selectById")); // } - /** - * - */ @Test public void test_orderBy() { JdbcUtil.enableSqlLog(); @@ -129,9 +126,6 @@ public void test_orderBy() { assertFalse(uncheckedUserDao.exists(id)); } - /** - * - */ @Test public void test_cache() { final User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -153,9 +147,6 @@ public void test_cache() { uncheckedUserDao.delete(userFromDB); } - /** - * - */ @Test public void test_handler() { final User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -178,9 +169,6 @@ public void test_handler() { assertFalse(uncheckedUserDao.exists(userFromDB.getId())); } - /** - * - */ @Test public void test_sql_log() { @@ -210,9 +198,6 @@ public void test_sql_log() { }); } - /** - * - */ @Test public void test_operation_log() { @@ -222,9 +207,6 @@ public void test_operation_log() { uncheckedUserDao.delete_propagation_SUPPORTS(100); } - /** - * - */ @Test public void test_propagation() { final User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -298,9 +280,6 @@ public void test_batch() throws SQLException { assertEquals(0, N.sum(uncheckedUserDao.batchDeleteByIds_2(ids))); } - /** - * - */ @Test public void test_save_insert() { final User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -321,9 +300,6 @@ public void test_save_insert() { assertFalse(uncheckedUserDao.exists(id)); } - /** - * - */ @Test public void test_save_insert_2() { final User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -404,9 +380,6 @@ public void test_readOnlyDao() throws SQLException { uncheckedUserDao.delete(user); } - /** - * - */ @Test public void test_batchGet() { final User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -438,9 +411,6 @@ public void test_batchGet() { uncheckedUserDao.deleteById(100L); } - /** - * - */ @Test public void test_batchDelete() { User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -474,9 +444,6 @@ public void test_batchDelete() { uncheckedUserDao.deleteById(100L); } - /** - * - */ @Test public void test_findFirst() { final User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -506,9 +473,6 @@ public void test_findFirst() { uncheckedUserDao.deleteById(100L); } - /** - * - */ @Test public void test_list() { final User user = User.builder().id(100).firstName("Forrest").lastName("Gump").email("123@email.com").build(); @@ -717,9 +681,6 @@ public void crud_joinedBy_2() throws SQLException { uncheckedUserDao.batchDelete(users); } - /** - * - */ @Test public void test_SQLParser() { final String sql = "SELECT employee_id AS \"employeeId\", first_name AS \"firstName\", last_name AS \"lastName\" FROM employee WHERE 1 < 2"; diff --git a/src/main/java/com/landawn/abacus/jdbc/JdbcUtils.java b/src/main/java/com/landawn/abacus/jdbc/JdbcUtils.java index cdabadfa..c91435ad 100644 --- a/src/main/java/com/landawn/abacus/jdbc/JdbcUtils.java +++ b/src/main/java/com/landawn/abacus/jdbc/JdbcUtils.java @@ -17,10 +17,7 @@ package com.landawn.abacus.jdbc; import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.sql.Connection; @@ -90,16 +87,23 @@ private JdbcUtils() { } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: + * + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
* * @param dataset * @param sourceDataSource - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
+ * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. * @return * @throws SQLException */ @@ -114,401 +118,402 @@ public static int importData(final DataSet dataset, final javax.sql.DataSource s } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @return - * @throws SQLException + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
+ * + * @param dataset the DataSet containing the data to be imported + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Connection conn, final String insertSQL) throws SQLException { return importData(dataset, dataset.columnNameList(), conn, insertSQL); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param selectColumnNames - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @return - * @throws SQLException + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(selectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
+ * + * @param dataset the DataSet containing the data to be imported + * @param selectColumnNames the collection of column names to be selected + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Collection selectColumnNames, final Connection conn, final String insertSQL) throws SQLException { - return importData(dataset, selectColumnNames, 0, dataset.size(), conn, insertSQL); + return importData(dataset, selectColumnNames, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param selectColumnNames - * @param offset - * @param count - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @return - * @throws SQLException - */ - public static int importData(final DataSet dataset, final Collection selectColumnNames, final int offset, final int count, final Connection conn, - final String insertSQL) throws SQLException { - return importData(dataset, selectColumnNames, offset, count, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0); - } - - /** - * Imports the data from {@code DataSet} to database. + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(selectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
* - * @param dataset - * @param selectColumnNames - * @param offset - * @param count - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param batchSize - * @param batchIntervalInMillis - * @return - * @throws SQLException + * @param dataset the DataSet containing the data to be imported + * @param selectColumnNames the collection of column names to be selected + * @param offset the starting point in the DataSet + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ - public static int importData(final DataSet dataset, final Collection selectColumnNames, final int offset, final int count, final Connection conn, - final String insertSQL, final int batchSize, final long batchIntervalInMillis) throws SQLException { - return importData(dataset, selectColumnNames, offset, count, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis); + public static int importData(final DataSet dataset, final Collection selectColumnNames, final Connection conn, final String insertSQL, + final int batchSize, final long batchIntervalInMillis) throws SQLException { + return importData(dataset, selectColumnNames, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - *
- * The {@code insertSQL} can be generated by {@code RE.insert(columnNameList).into(tableName).sql()}. *
      * 
-     * List columnNameList = new ArrayList<>(dataset.columnNameList());
-     * columnNameList.retainAll(yourSelectColumnNames);
-     * String sql = RE.insert(columnNameList).into(tableName).sql();
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(selectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
      * 
      * 
* * @param the type of exception that might be thrown - * @param dataset the dataset containing the data to be imported + * @param dataset the DataSet containing the data to be imported * @param selectColumnNames the collection of column names to be selected - * @param offset the starting point in the dataset from where data should be imported - * @param count the number of records to be imported from the dataset * @param filter a predicate to filter the data - * @param conn the database connection - * @param insertSQL the SQL insert statement, the column order in the SQL must be consistent with the column order in the DataSet - * @param batchSize the number of records to be inserted in each batch - * @param batchIntervalInMillis the interval in milliseconds between each batch insertion - * @return the number of records successfully imported + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws E if the filter throws an exception */ - public static int importData(final DataSet dataset, final Collection selectColumnNames, final int offset, final int count, + public static int importData(final DataSet dataset, final Collection selectColumnNames, final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis) throws SQLException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importData(dataset, selectColumnNames, offset, count, filter, stmt, batchSize, batchIntervalInMillis); + return importData(dataset, selectColumnNames, filter, stmt, batchSize, batchIntervalInMillis); } } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param columnTypeMap - * @return - * @throws SQLException + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
+ * + * @param dataset the DataSet containing the data to be imported + * @param offset the starting point in the DataSet + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param columnTypeMap a map specifying the types of the columns + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ @SuppressWarnings("rawtypes") public static int importData(final DataSet dataset, final Connection conn, final String insertSQL, final Map columnTypeMap) throws SQLException { - return importData(dataset, 0, dataset.size(), conn, insertSQL, columnTypeMap); + return importData(dataset, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param offset - * @param count - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param columnTypeMap - * @return - * @throws SQLException + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
+ * + * @param dataset the DataSet containing the data to be imported + * @param offset the starting point in the DataSet + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param columnTypeMap a map specifying the types of the columns + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ @SuppressWarnings("rawtypes") - public static int importData(final DataSet dataset, final int offset, final int count, final Connection conn, final String insertSQL, + public static int importData(final DataSet dataset, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException { - return importData(dataset, offset, count, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); + return importData(dataset, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeMap); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param offset - * @param count - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param batchSize - * @param batchIntervalInMillis - * @param columnTypeMap - * @return - * @throws SQLException - */ - @SuppressWarnings("rawtypes") - public static int importData(final DataSet dataset, final int offset, final int count, final Connection conn, final String insertSQL, final int batchSize, - final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException { - return importData(dataset, offset, count, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeMap); - } - - /** - * Imports the data from {@code DataSet} to database. + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
* - * @param - * @param dataset - * @param offset - * @param count - * @param filter - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param batchSize - * @param batchIntervalInMillis - * @param columnTypeMap - * @return - * @throws SQLException - * @throws E + * @param the type of exception that might be thrown + * @param dataset the DataSet containing the data to be imported + * @param filter a predicate to filter the data + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param columnTypeMap a map specifying the types of the columns + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws E if the filter throws an exception */ @SuppressWarnings("rawtypes") - public static int importData(final DataSet dataset, final int offset, final int count, - final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, - final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException, E { - + public static int importData(final DataSet dataset, final Throwables.Predicate filter, final Connection conn, + final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) + throws SQLException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importData(dataset, offset, count, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); + return importData(dataset, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); } } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param stmtSetter - * @return - * @throws SQLException + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
+ * + * @param dataset the DataSet containing the data to be imported + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Connection conn, final String insertSQL, final Throwables.BiConsumer stmtSetter) throws SQLException { - return importData(dataset, 0, dataset.size(), conn, insertSQL, stmtSetter); + return importData(dataset, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param offset - * @param count - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param stmtSetter - * @return - * @throws SQLException + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
+ * + * @param dataset the DataSet containing the data to be imported + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ - public static int importData(final DataSet dataset, final int offset, final int count, final Connection conn, final String insertSQL, + public static int importData(final DataSet dataset, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { - return importData(dataset, offset, count, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); + return importData(dataset, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, stmtSetter); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param offset - * @param count - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - */ - public static int importData(final DataSet dataset, final int offset, final int count, final Connection conn, final String insertSQL, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException { - return importData(dataset, offset, count, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, stmtSetter); - } - - /** - * Imports the data from {@code DataSet} to database. + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
* - * @param - * @param dataset - * @param offset - * @param count - * @param filter - * @param conn - * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. Here is sample about how to create the sql: - *

-     *         List columnNameList = new ArrayList<>(dataset.columnNameList());
-     *         columnNameList.retainAll(yourSelectColumnNames);
-     *         String sql = RE.insert(columnNameList).into(tableName).sql();
-     * 
- * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws E + * @param the type of exception that might be thrown + * @param dataset the DataSet containing the data to be imported + * @param filter a predicate to filter the data + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws E if the filter throws an exception */ - public static int importData(final DataSet dataset, final int offset, final int count, - final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException, E { - + public static int importData(final DataSet dataset, final Throwables.Predicate filter, final Connection conn, + final String insertSQL, final int batchSize, final long batchIntervalInMillis, + final Throwables.BiConsumer stmtSetter) throws SQLException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importData(dataset, offset, count, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); + return importData(dataset, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); } } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

* - * @param dataset - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @return - * @throws SQLException + * @param dataset the DataSet containing the data to be imported + * @param stmt the PreparedStatement to be used for the import + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final PreparedStatement stmt) throws SQLException { return importData(dataset, dataset.columnNameList(), stmt); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param selectColumnNames - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @return - * @throws SQLException + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(selectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
+ * + * @param dataset the DataSet containing the data to be imported + * @param selectColumnNames the collection of column names to be selected + * @param stmt the PreparedStatement to be used for the import + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Collection selectColumnNames, final PreparedStatement stmt) throws SQLException { - return importData(dataset, selectColumnNames, 0, dataset.size(), stmt); + return importData(dataset, selectColumnNames, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param selectColumnNames - * @param offset - * @param count - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @return - * @throws SQLException - */ - public static int importData(final DataSet dataset, final Collection selectColumnNames, final int offset, final int count, - final PreparedStatement stmt) throws SQLException { - return importData(dataset, selectColumnNames, offset, count, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0); - } - - /** - * Imports the data from {@code DataSet} to database. + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(selectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
* - * @param dataset - * @param selectColumnNames - * @param offset - * @param count - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param batchSize - * @param batchIntervalInMillis - * @return - * @throws SQLException + * @param dataset the DataSet containing the data to be imported + * @param selectColumnNames the collection of column names to be selected + * @param stmt the PreparedStatement to be used for the import + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ - public static int importData(final DataSet dataset, final Collection selectColumnNames, final int offset, final int count, - final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis) throws SQLException { - return importData(dataset, selectColumnNames, offset, count, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis); + public static int importData(final DataSet dataset, final Collection selectColumnNames, final PreparedStatement stmt, final int batchSize, + final long batchIntervalInMillis) throws SQLException { + return importData(dataset, selectColumnNames, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param - * @param dataset - * @param selectColumnNames - * @param offset - * @param count - * @param filter - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param batchSize - * @param batchIntervalInMillis - * @return - * @throws SQLException - * @throws E + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(selectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
+ * + * @param the type of exception that might be thrown + * @param dataset the DataSet containing the data to be imported + * @param selectColumnNames the collection of column names to be selected + * @param filter a predicate to filter the data + * @param stmt the PreparedStatement to be used for the import + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws E if the filter throws an exception */ - public static int importData(final DataSet dataset, final Collection selectColumnNames, final int offset, final int count, + public static int importData(final DataSet dataset, final Collection selectColumnNames, final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis) throws SQLException, E { final Type objType = N.typeOf(Object.class); @@ -518,81 +523,78 @@ public static int importData(final DataSet dataset, final columnTypeMap.put(propName, objType); } - return importData(dataset, offset, count, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); + return importData(dataset, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

* - * @param dataset - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param columnTypeMap - * @return - * @throws SQLException + * @param dataset the DataSet containing the data to be imported + * @param stmt the PreparedStatement to be used for the import + * @param columnTypeMap a map specifying the types of the columns + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ @SuppressWarnings("rawtypes") public static int importData(final DataSet dataset, final PreparedStatement stmt, final Map columnTypeMap) throws SQLException { - return importData(dataset, 0, dataset.size(), stmt, columnTypeMap); + return importData(dataset, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

* - * @param dataset - * @param offset - * @param count - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param columnTypeMap - * @return - * @throws SQLException + * @param dataset the DataSet containing the data to be imported + * @param stmt the PreparedStatement to be used for the import + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param columnTypeMap a map specifying the types of the columns + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ @SuppressWarnings("rawtypes") - public static int importData(final DataSet dataset, final int offset, final int count, final PreparedStatement stmt, + public static int importData(final DataSet dataset, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException { - return importData(dataset, offset, count, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); + return importData(dataset, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param offset - * @param count - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param batchSize - * @param batchIntervalInMillis - * @param columnTypeMap - * @return - * @throws SQLException - */ - @SuppressWarnings("rawtypes") - public static int importData(final DataSet dataset, final int offset, final int count, final PreparedStatement stmt, final int batchSize, - final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException { - return importData(dataset, offset, count, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); - } - - /** - * Imports the data from {@code DataSet} to database. + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
* - * @param - * @param dataset - * @param offset - * @param count - * @param filter - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param batchSize - * @param batchIntervalInMillis - * @param columnTypeMap - * @return - * @throws IllegalArgumentException - * @throws SQLException - * @throws E + * @param the type of exception that might be thrown + * @param dataset the DataSet containing the data to be imported + * @param filter a predicate to filter the data + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param columnTypeMap a map specifying the types of the columns + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws E if the filter throws an exception */ @SuppressWarnings({ "rawtypes", "null" }) - public static int importData(final DataSet dataset, final int offset, final int count, - final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, - final Map columnTypeMap) throws IllegalArgumentException, SQLException, E { - N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); //NOSONAR + public static int importData(final DataSet dataset, final Throwables.Predicate filter, + final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) + throws IllegalArgumentException, SQLException, E { + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); //NOSONAR N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", //NOSONAR batchSize, batchIntervalInMillis); @@ -635,78 +637,76 @@ public void accept(final PreparedQuery t, final Object[] u) throws SQLException } }; - return importData(dataset, offset, count, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); + return importData(dataset, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); } /** + * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

* - * @param dataset - * @param stmt - * @param stmtSetter - * @return - * @throws SQLException + * @param dataset the DataSet containing the data to be imported + * @param stmt the PreparedStatement to be used for the import + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final PreparedStatement stmt, final Throwables.BiConsumer stmtSetter) throws SQLException { - return importData(dataset, 0, dataset.size(), stmt, stmtSetter); + return importData(dataset, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

* - * @param dataset - * @param offset - * @param count - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param stmtSetter - * @return - * @throws SQLException + * @param dataset the DataSet containing the data to be imported + * @param stmt the PreparedStatement to be used for the import + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ - public static int importData(final DataSet dataset, final int offset, final int count, final PreparedStatement stmt, + public static int importData(final DataSet dataset, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { - return importData(dataset, offset, count, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); + return importData(dataset, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); } /** - * Imports the data from {@code DataSet} to database. + * Imports the data from {@code DataSet} to the database. + *

+ * The column order in the SQL must be consistent with the column order in the DataSet. + *

+ * The specified {@code insertSQL} can be generated by below code: * - * @param dataset - * @param offset - * @param count - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - */ - public static int importData(final DataSet dataset, final int offset, final int count, final PreparedStatement stmt, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException { - return importData(dataset, offset, count, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); - } - - /** - * Imports the data from {@code DataSet} to database. + *
+     * 
+     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
+     *   columnNameList.retainAll(yourSelectColumnNames);
+     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
+     * 
+     * 
* - * @param - * @param dataset - * @param offset - * @param count - * @param filter - * @param stmt the column order in the sql must be consistent with the column order in the DataSet. - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws IllegalArgumentException - * @throws SQLException - * @throws E + * @param the type of exception that might be thrown + * @param dataset the DataSet containing the data to be imported + * @param filter a predicate to filter the data + * @param stmt the PreparedStatement to be used for the import + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws IllegalArgumentException if the offset or count is negative, or if batchSize is not greater than 0, or if batchIntervalInMillis is negative + * @throws SQLException if a database access error occurs + * @throws E if the filter throws an exception */ - public static int importData(final DataSet dataset, final int offset, final int count, - final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, + public static int importData(final DataSet dataset, final Throwables.Predicate filter, + final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws IllegalArgumentException, SQLException, E { - N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); @@ -716,7 +716,7 @@ public static int importData(final DataSet dataset, final final Object[] row = new Object[columnCount]; int result = 0; - for (int i = offset, size = dataset.size(); result < count && i < size; i++) { + for (int i = 0, size = dataset.size(); result < size && i < size; i++) { dataset.absolute(i); for (int j = 0; j < columnCount; j++) { @@ -748,315 +748,136 @@ public static int importData(final DataSet dataset, final } /** + * Imports the data from a file to the database. * - * @param - * @param file - * @param sourceDataSource - * @param insertSQL - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final File file, final javax.sql.DataSource sourceDataSource, final String insertSQL, - final Throwables.Function func) throws SQLException, IOException, E { - final Connection conn = sourceDataSource.getConnection(); - - try { - return importData(file, conn, insertSQL, func); - } finally { - JdbcUtil.releaseConnection(conn, sourceDataSource); - } - } - - /** - * - * @param - * @param file - * @param conn - * @param insertSQL - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final File file, final Connection conn, final String insertSQL, - final Throwables.Function func) throws SQLException, IOException, E { - return importData(file, 0, Long.MAX_VALUE, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); - } - - /** - * - * @param - * @param file - * @param offset - * @param count - * @param conn - * @param insertSQL - * @param batchSize - * @param batchIntervalInMillis - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final File file, final long offset, final long count, final Connection conn, final String insertSQL, - final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) - throws SQLException, IOException, E { - - try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importData(file, offset, count, stmt, batchSize, batchIntervalInMillis, func); - } - } - - /** - * - * @param - * @param file - * @param stmt - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final File file, final PreparedStatement stmt, - final Throwables.Function func) throws SQLException, IOException, E { - return importData(file, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); - } - - /** - * Imports the data from file to database. - * - * @param - * @param file - * @param offset - * @param count - * @param stmt - * @param batchSize - * @param batchIntervalInMillis - * @param func convert line to the parameters for record insert. Returns a {@code null} array to skip the line. - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final File file, final long offset, final long count, final PreparedStatement stmt, final int batchSize, - final long batchIntervalInMillis, final Throwables.Function func) throws SQLException, IOException, E { - - try (Reader reader = IOUtil.newFileReader(file)) { - return importData(reader, offset, count, stmt, batchSize, batchIntervalInMillis, func); - } - } - - /** - * - * @param - * @param is - * @param sourceDataSource - * @param insertSQL - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E + * @param the type of exception that might be thrown + * @param file the file containing the data to be imported + * @param sourceDataSource the DataSource to get the database connection + * @param insertSQL the SQL insert statement; + * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs + * @throws E if the function throws an exception */ - public static long importData(final InputStream is, final javax.sql.DataSource sourceDataSource, final String insertSQL, + public static long importData(final File file, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.Function func) throws SQLException, IOException, E { final Connection conn = sourceDataSource.getConnection(); try { - return importData(is, conn, insertSQL, func); + return importData(file, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** + * Imports the data from a file to the database using the provided {@code Connection}. * - * @param - * @param is - * @param conn - * @param insertSQL - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final InputStream is, final Connection conn, final String insertSQL, - final Throwables.Function func) throws SQLException, IOException, E { - return importData(is, 0, Long.MAX_VALUE, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); - } - - /** - * - * @param - * @param is - * @param offset - * @param count - * @param conn - * @param insertSQL - * @param batchSize - * @param batchIntervalInMillis - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E + * @param the type of exception that might be thrown + * @param file the file containing the data to be imported + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement. + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs + * @throws E if the function throws an exception */ - public static long importData(final InputStream is, final long offset, final long count, final Connection conn, - final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) - throws SQLException, IOException, E { + public static long importData(final File file, final Connection conn, final String insertSQL, final int batchSize, + final long batchIntervalInMillis, final Throwables.Function func) throws SQLException, IOException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importData(is, offset, count, stmt, batchSize, batchIntervalInMillis, func); + return importData(file, stmt, batchSize, batchIntervalInMillis, func); } } /** + * Imports the data from a file to the database using the provided {@code PreparedStatement}. * - * @param - * @param is - * @param stmt - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E + * @param the type of exception that might be thrown + * @param file the file containing the data to be imported + * @param stmt the PreparedStatement to be used for the import + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs + * @throws E if the function throws an exception */ - public static long importData(final InputStream is, final PreparedStatement stmt, + public static long importData(final File file, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) throws SQLException, IOException, E { - return importData(is, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); - } - - /** - * Imports the data from file to database. - * - * @param - * @param is - * @param offset - * @param count - * @param stmt - * @param batchSize - * @param batchIntervalInMillis - * @param func convert line to the parameters for record insert. Returns a {@code null} array to skip the line. - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final InputStream is, final long offset, final long count, final PreparedStatement stmt, - final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) - throws SQLException, IOException, E { - final Reader reader = IOUtil.newInputStreamReader(is); - - return importData(reader, offset, count, stmt, batchSize, batchIntervalInMillis, func); + try (Reader reader = IOUtil.newFileReader(file)) { + return importData(reader, stmt, batchSize, batchIntervalInMillis, func); + } } /** + * Imports the data from a {@code Reader} to the database. * - * @param - * @param reader - * @param sourceDataSource - * @param insertSQL - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E + * @param the type of exception that might be thrown + * @param reader the Reader containing the data to be imported + * @param sourceDataSource the DataSource to get the database connection + * @param insertSQL the SQL insert statement. + * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs + * @throws E if the function throws an exception */ public static long importData(final Reader reader, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.Function func) throws SQLException, IOException, E { final Connection conn = sourceDataSource.getConnection(); try { - return importData(reader, conn, insertSQL, func); + return importData(reader, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** + * Imports the data from a {@code Reader} to the database using the provided {@code Connection}. * - * @param - * @param reader - * @param conn - * @param insertSQL - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final Reader reader, final Connection conn, final String insertSQL, - final Throwables.Function func) throws SQLException, IOException, E { - return importData(reader, 0, Long.MAX_VALUE, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); - } - - /** - * - * @param - * @param reader - * @param offset - * @param count - * @param conn - * @param insertSQL - * @param batchSize - * @param batchIntervalInMillis - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E + * @param the type of exception that might be thrown + * @param reader the Reader containing the data to be imported + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement. + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs + * @throws E if the function throws an exception */ - public static long importData(final Reader reader, final long offset, final long count, final Connection conn, final String insertSQL, - final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) - throws SQLException, IOException, E { + public static long importData(final Reader reader, final Connection conn, final String insertSQL, final int batchSize, + final long batchIntervalInMillis, final Throwables.Function func) throws SQLException, IOException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importData(reader, offset, count, stmt, batchSize, batchIntervalInMillis, func); + return importData(reader, stmt, batchSize, batchIntervalInMillis, func); } } /** + * Imports the data from a {@code Reader} to the database using the provided {@code PreparedStatement}. * - * @param - * @param reader - * @param stmt - * @param func - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importData(final Reader reader, final PreparedStatement stmt, - final Throwables.Function func) throws SQLException, IOException, E { - return importData(reader, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); - } - - /** - * Imports the data from file to database. - * - * @param - * @param reader - * @param offset - * @param count - * @param stmt - * @param batchSize - * @param batchIntervalInMillis - * @param func convert line to the parameters for record insert. Returns a {@code null} array to skip the line. - * @return - * @throws IllegalArgumentException - * @throws SQLException - * @throws IOException - * @throws E + * @param the type of exception that might be thrown + * @param reader the Reader containing the data to be imported + * @param stmt the PreparedStatement to be used for the import + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs + * @throws E if the function throws an exception */ - public static long importData(final Reader reader, long offset, final long count, final PreparedStatement stmt, final int batchSize, + public static long importData(final Reader reader, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) throws IllegalArgumentException, SQLException, IOException, E { - N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); @@ -1064,14 +885,10 @@ public static long importData(final Reader reader, long of final BufferedReader br = Objectory.createBufferedReader(reader); try { - while (offset-- > 0 && br.readLine() != null) { - // skip. - } - String line = null; Object[] row = null; - while (result < count && (line = br.readLine()) != null) { + while ((line = br.readLine()) != null) { row = func.apply(line); if (row == null) { @@ -1104,158 +921,72 @@ public static long importData(final Reader reader, long of } /** + * Imports the data from an {@code Iterator} to the database. * - * @param - * @param iter - * @param sourceDataSource - * @param insertSQL - * @param stmtSetter - * @return - * @throws SQLException + * @param the type of elements in the iterator + * @param iter the Iterator containing the data to be imported + * @param sourceDataSource the DataSource to get the database connection + * @param insertSQL the SQL insert statement. + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ public static long importData(final Iterator iter, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.BiConsumer stmtSetter) throws SQLException { final Connection conn = sourceDataSource.getConnection(); try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importData(iter, stmt, stmtSetter); + return importData(iter, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** + * Imports the data from an {@code Iterator} to the database using the provided {@code Connection}. * - * @param - * @param iter - * @param conn - * @param insertSQL - * @param stmtSetter - * @return - * @throws SQLException - */ - public static long importData(final Iterator iter, final Connection conn, final String insertSQL, - final Throwables.BiConsumer stmtSetter) throws SQLException { - return importData(iter, 0, Long.MAX_VALUE, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); - } - - /** - * - * @param - * @param iter - * @param offset - * @param count - * @param conn - * @param insertSQL - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - */ - public static long importData(final Iterator iter, final long offset, final long count, final Connection conn, final String insertSQL, - final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException { - return importData(iter, offset, count, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, stmtSetter); - } - - /** - * - * @param - * @param - * @param iter - * @param offset - * @param count - * @param filter - * @param conn - * @param insertSQL - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws E + * @param the type of elements in the iterator + * @param iter the Iterator containing the data to be imported + * @param conn the Connection to the database + * @param insertSQL the SQL insert statement. + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ - public static long importData(final Iterator iter, final long offset, final long count, - final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException, E { + public static long importData(final Iterator iter, final Connection conn, final String insertSQL, final int batchSize, + final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importData(iter, offset, count, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); + return importData(iter, stmt, batchSize, batchIntervalInMillis, stmtSetter); } } /** + * Imports the data from an {@code Iterator} to the database using the provided {@code PreparedStatement}. * - * @param - * @param iter - * @param stmt - * @param stmtSetter - * @return - * @throws SQLException + * @param the type of elements in the iterator + * @param iter the Iterator containing the data to be imported + * @param stmt the PreparedStatement to be used for the import + * @param batchSize the number of rows to be inserted in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs */ - public static long importData(final Iterator iter, final PreparedStatement stmt, + public static long importData(final Iterator iter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { - return importData(iter, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); - } - - /** - * - * @param - * @param iter - * @param offset - * @param count - * @param stmt - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - */ - public static long importData(final Iterator iter, final long offset, final long count, final PreparedStatement stmt, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { - return importData(iter, offset, count, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); - } - - /** - * Imports the data from Iterator to database. - * - * @param - * @param - * @param iter - * @param offset - * @param count - * @param filter - * @param stmt - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws IllegalArgumentException - * @throws SQLException - * @throws E - */ - public static long importData(final Iterator iter, long offset, final long count, - final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, - final Throwables.BiConsumer stmtSetter) throws IllegalArgumentException, SQLException, E { - N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); final PreparedQuery stmtForSetter = new PreparedQuery(stmt); long result = 0; - while (offset-- > 0 && iter.hasNext()) { - iter.next(); - } - T next = null; - while (result < count && iter.hasNext()) { + while (iter.hasNext()) { next = iter.next(); - if (filter != null && !filter.test(next)) { - continue; - } - stmtSetter.accept(stmtForSetter, next); stmtForSetter.addBatch(); @@ -1307,9 +1038,9 @@ public static long importData(final Iterator columnTypeList) throws SQLException, IOException { - // return importCSV(file, offset, count, skipTitle, Fn. alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeList); + // return importCSV(file, skipTitle, Fn. alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** @@ -1332,12 +1063,12 @@ public static long importData(final Iterator long importCSV(final File file, final long offset, final long count, final boolean skipTitle, + // public static long importCSV(final File file, final boolean skipTitle, // final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, // final long batchIntervalInMillis, final List columnTypeList) throws SQLException, IOException, E { // // try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - // return importCSV(file, offset, count, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); + // return importCSV(file, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // } // @@ -1370,9 +1101,9 @@ public static long importData(final Iterator columnTypeList) throws SQLException, IOException { - // return importCSV(file, offset, count, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); + // return importCSV(file, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** @@ -1394,12 +1125,12 @@ public static long importData(final Iterator long importCSV(final File file, final long offset, final long count, final boolean skipTitle, + // public static long importCSV(final File file, final boolean skipTitle, // final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, // final List columnTypeList) throws SQLException, IOException, E { // // try (Reader reader = new FileReader(file)) { - // return importCSV(reader, offset, count, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); + // return importCSV(reader, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // } // @@ -1433,9 +1164,9 @@ public static long importData(final Iterator columnTypeList) throws SQLException, IOException { - // return importCSV(is, offset, count, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); + // return importCSV(is, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** @@ -1457,12 +1188,12 @@ public static long importData(final Iterator long importCSV(final InputStream is, final long offset, final long count, final boolean skipTitle, + // public static long importCSV(final InputStream is, final boolean skipTitle, // final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, // final List columnTypeList) throws SQLException, IOException, E { // final Reader reader = new InputStreamReader(is); // - // return importCSV(reader, offset, count, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); + // return importCSV(reader, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** @@ -1495,9 +1226,9 @@ public static long importData(final Iterator columnTypeList) throws SQLException, IOException { - // return importCSV(reader, offset, count, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); + // return importCSV(reader, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** @@ -1519,10 +1250,10 @@ public static long importData(final Iterator long importCSV(final Reader reader, long offset, final long count, final boolean skipTitle, + // public static long importCSV(final Reader reader, final boolean skipTitle, // final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, // final List columnTypeList) throws SQLException, IOException, E { - // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); // N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", // batchSize, batchIntervalInMillis); // @@ -1618,9 +1349,9 @@ public static long importData(final Iterator columnTypeMap) throws SQLException, IOException { - // return importCSV(file, offset, count, Fn. alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeMap); + // return importCSV(file, Fn. alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** @@ -1641,12 +1372,12 @@ public static long importData(final Iterator long importCSV(final File file, final long offset, final long count, final Throwables.Predicate filter, + // public static long importCSV(final File file, final Throwables.Predicate filter, // final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, // final Map columnTypeMap) throws SQLException, IOException, E { // // try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - // return importCSV(file, offset, count, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); + // return importCSV(file, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // } // @@ -1679,9 +1410,9 @@ public static long importData(final Iterator columnTypeMap) throws SQLException, IOException { - // return importCSV(file, offset, count, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); + // return importCSV(file, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** @@ -1702,12 +1433,12 @@ public static long importData(final Iterator long importCSV(final File file, final long offset, final long count, final Throwables.Predicate filter, + // public static long importCSV(final File file, final Throwables.Predicate filter, // final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) // throws SQLException, IOException, E { // // try (Reader reader = new FileReader(file)) { - // return importCSV(reader, offset, count, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); + // return importCSV(reader, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // } // @@ -1740,9 +1471,9 @@ public static long importData(final Iterator columnTypeMap) throws SQLException, IOException { - // return importCSV(is, offset, count, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); + // return importCSV(is, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** @@ -1763,11 +1494,11 @@ public static long importData(final Iterator long importCSV(final InputStream is, long offset, final long count, final Throwables.Predicate filter, + // public static long importCSV(final InputStream is, final Throwables.Predicate filter, // final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) // throws SQLException, IOException, E { // final Reader reader = new InputStreamReader(is); - // return importCSV(reader, offset, count, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); + // return importCSV(reader, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** @@ -1799,9 +1530,9 @@ public static long importData(final Iterator columnTypeMap) throws SQLException, IOException { - // return importCSV(reader, offset, count, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); + // return importCSV(reader, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** @@ -1822,10 +1553,10 @@ public static long importData(final Iterator long importCSV(final Reader reader, long offset, final long count, final Throwables.Predicate filter, + // public static long importCSV(final Reader reader, final Throwables.Predicate filter, // final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) // throws SQLException, IOException, E { - // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); // N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", // batchSize, batchIntervalInMillis); // @@ -1933,215 +1664,115 @@ public static long importCSV(final File file, final javax.sql.DataSource sourceD } /** - * Imports the data from CSV to database. + * Imports the data from a CSV file to the database using the provided connection. + *

+ * This method reads data from the specified CSV file and inserts it into the database + * using the provided connection and SQL insert statement. The column order in the SQL + * statement should be consistent with the column order in the CSV file. + *

* - * @param file - * @param conn - * @param insertSQL the column order in the sql should be consistent with the column order in the CSV file. - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException + * @param file the CSV file containing the data to be imported + * @param conn the database connection to be used + * @param insertSQL the SQL insert statement + * @param batchSize the number of rows to be batched together for insertion + * @param batchIntervalInMillis the interval in milliseconds between batch insertions + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long importCSV(final File file, final Connection conn, final String insertSQL, + public static long importCSV(final File file, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { - return importCSV(file, 0, Long.MAX_VALUE, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); - } - - /** - * - * @param file - * @param offset - * @param count - * @param conn - * @param insertSQL - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException - */ - public static long importCSV(final File file, final long offset, final long count, final Connection conn, final String insertSQL, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException, IOException { - return importCSV(file, offset, count, Fn. alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, stmtSetter); - } - - /** - * - * @param - * @param file - * @param offset - * @param count - * @param filter - * @param conn - * @param insertSQL the column order in the sql should be consistent with the column order in the CSV file. - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importCSV(final File file, final long offset, final long count, - final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException, IOException, E { - try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importCSV(file, offset, count, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); + return importCSV(file, stmt, batchSize, batchIntervalInMillis, stmtSetter); } } /** + * Imports the data from a CSV file to the database using the provided PreparedStatement. + *

+ * This method reads data from the specified CSV file and inserts it into the database + * using the provided PreparedStatement. The column order in the SQL statement should + * be consistent with the column order in the CSV file. + *

* - * @param file - * @param stmt - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException + * @param file the CSV file containing the data to be imported + * @param stmt the PreparedStatement to be used for the insertion + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long importCSV(final File file, final PreparedStatement stmt, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { - return importCSV(file, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); + return importCSV(file, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** + * Imports the data from a CSV file to the database using the provided PreparedStatement. + *

+ * This method reads data from the specified CSV file and inserts it into the database + * using the provided PreparedStatement. The column order in the SQL statement should + * be consistent with the column order in the CSV file. + *

* - * @param file - * @param offset - * @param count - * @param stmt - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException + * @param file the CSV file containing the data to be imported + * @param stmt the PreparedStatement to be used for the insertion + * @param batchSize the number of rows to be batched together for insertion + * @param batchIntervalInMillis the interval in milliseconds between batch insertions + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long importCSV(final File file, final long offset, final long count, final PreparedStatement stmt, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException, IOException { - return importCSV(file, offset, count, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); + public static long importCSV(final File file, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, + final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { + return importCSV(file, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); } /** - * Imports the data from CSV to database. + * Imports the data from a CSV file to the database using the provided PreparedStatement. + *

+ * This method reads data from the specified CSV file and inserts it into the database + * using the provided PreparedStatement. The column order in the SQL statement should + * be consistent with the column order in the CSV file. + *

* - * @param - * @param file - * @param offset - * @param count - * @param filter - * @param stmt the column order in the sql should be consistent with the column order in the CSV file. - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException - * @throws E + * @param the type of exception that may be thrown by the filter + * @param file the CSV file containing the data to be imported + * @param filter a predicate to filter the rows to be imported + * @param stmt the PreparedStatement to be used for the insertion + * @param batchSize the number of rows to be batched together for insertion + * @param batchIntervalInMillis the interval in milliseconds between batch insertions + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs + * @throws E if the filter throws an exception */ - public static long importCSV(final File file, final long offset, final long count, - final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, + public static long importCSV(final File file, final Throwables.Predicate filter, final PreparedStatement stmt, + final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException, E { try (Reader reader = IOUtil.newFileReader(file)) { - return importCSV(reader, offset, count, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); - } - } - - /** - * Imports the data from CSV to database. - * - * @param is - * @param sourceDataSource - * @param insertSQL the column order in the sql should be consistent with the column order in the CSV file. - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException - */ - public static long importCSV(final InputStream is, final javax.sql.DataSource sourceDataSource, final String insertSQL, - final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { - final Connection conn = sourceDataSource.getConnection(); - - try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { - return importCSV(is, stmt, stmtSetter); - } finally { - JdbcUtil.releaseConnection(conn, sourceDataSource); + return importCSV(reader, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); } } /** + * Imports the data from a CSV file to the database using the provided DataSource and SQL insert statement. + *

+ * This method reads data from the specified CSV file and inserts it into the database + * using the provided DataSource and SQL insert statement. The column order in the SQL statement should + * be consistent with the column order in the CSV file. + *

* - * @param is - * @param stmt - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException - */ - public static long importCSV(final InputStream is, final PreparedStatement stmt, - final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { - return importCSV(is, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); - } - - /** - * - * @param is - * @param offset - * @param count - * @param stmt - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException - */ - public static long importCSV(final InputStream is, final long offset, final long count, final PreparedStatement stmt, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException, IOException { - return importCSV(is, offset, count, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); - } - - /** - * Imports the data from CSV to database. - * - * @param - * @param is - * @param offset - * @param count - * @param filter - * @param stmt the column order in the sql should be consistent with the column order in the CSV file. - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException - * @throws E - */ - public static long importCSV(final InputStream is, final long offset, final long count, - final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, - final Throwables.BiConsumer stmtSetter) throws SQLException, IOException, E { - final Reader reader = IOUtil.newInputStreamReader(is); - return importCSV(reader, offset, count, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); - } - - /** - * Imports the data from CSV to database. - * - * @param reader - * @param sourceDataSource - * @param insertSQL the column order in the sql should be consistent with the column order in the CSV file. - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException + * @param reader the Reader to read the CSV file + * @param sourceDataSource the DataSource to obtain the database connection + * @param insertSQL the SQL insert statement to be used for the insertion + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long importCSV(final Reader reader, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { @@ -2155,63 +1786,75 @@ public static long importCSV(final Reader reader, final javax.sql.DataSource sou } /** + * Imports the data from a CSV file to the database using the provided PreparedStatement. + *

+ * This method reads data from the specified CSV file and inserts it into the database + * using the provided PreparedStatement. The column order in the SQL statement should + * be consistent with the column order in the CSV file. + *

* - * @param reader - * @param stmt - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException + * @param reader the Reader to read the CSV file + * @param stmt the PreparedStatement to be used for the insertion + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long importCSV(final Reader reader, final PreparedStatement stmt, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { - return importCSV(reader, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); + return importCSV(reader, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** + * Imports the data from a CSV file to the database using the provided PreparedStatement. + *

+ * This method reads data from the specified CSV file and inserts it into the database + * using the provided PreparedStatement. The column order in the SQL statement should + * be consistent with the column order in the CSV file. + *

* - * @param reader - * @param offset - * @param count - * @param stmt - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - * @throws IOException + * @param reader the Reader to read the CSV file + * @param stmt the PreparedStatement to be used for the insertion + * @param batchSize the number of rows to be batched together for insertion + * @param batchIntervalInMillis the interval in milliseconds between batch insertions + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ @SuppressWarnings({ "unchecked" }) - public static long importCSV(final Reader reader, final long offset, final long count, final PreparedStatement stmt, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException, IOException { - return importCSV(reader, offset, count, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); - } - - /** - * Imports the data from CSV to database. - * - * @param - * @param reader - * @param offset - * @param count - * @param filter - * @param stmt the column order in the sql should be consistent with the column order in the CSV file. - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws IllegalArgumentException - * @throws SQLException - * @throws IOException - * @throws E + public static long importCSV(final Reader reader, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, + final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { + return importCSV(reader, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); + } + + /** + * Imports the data from a CSV file to the database using the provided PreparedStatement. + *

+ * This method reads data from the specified CSV file and inserts it into the database + * using the provided PreparedStatement. The column order in the SQL statement should + * be consistent with the column order in the CSV file. + *

+ * + * @param the type of exception that may be thrown by the filter + * @param reader the Reader to read the CSV file + * @param filter a Predicate to filter the rows to be inserted + * @param stmt the PreparedStatement to be used for the insertion + * @param batchSize the number of rows to be batched together for insertion + * @param batchIntervalInMillis the interval in milliseconds between batch insertions + * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement + * @return the number of rows affected + * @throws IllegalArgumentException if the batch size or batch interval is invalid + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs + * @throws E if the filter throws an exception */ @SuppressWarnings({ "unchecked", "resource" }) - public static long importCSV(final Reader reader, long offset, final long count, - final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, + public static long importCSV(final Reader reader, final Throwables.Predicate filter, + final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws IllegalArgumentException, SQLException, IOException, E { - N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); @@ -2225,13 +1868,9 @@ public static long importCSV(final Reader reader, long off String line = br.readLine(); final String[] titles = headerParser.apply(line); - while (offset-- > 0 && br.readLine() != null) { - // continue - } - final String[] output = new String[titles.length]; - while (result < count && (line = br.readLine()) != null) { + while ((line = br.readLine()) != null) { lineParser.accept(line, output); if (filter != null && !filter.test(output)) { @@ -2263,16 +1902,18 @@ public static long importCSV(final Reader reader, long off } /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title. + *

* - * @param out - * @param sourceDataSource - * @param querySQL - * @return - * @throws SQLException - * @throws IOException + * @param out the File to write the CSV data to + * @param sourceDataSource the DataSource to get the database connection from + * @param querySQL the SQL query to execute to retrieve the data + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final javax.sql.DataSource sourceDataSource, final String querySQL) throws SQLException, IOException { final Connection conn = sourceDataSource.getConnection(); @@ -2285,321 +1926,225 @@ public static long exportCSV(final File out, final javax.sql.DataSource sourceDa } /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title. + *

* - * @param out - * @param conn - * @param querySQL - * @return - * @throws SQLException - * @throws IOException + * @param out the File to write the CSV data to + * @param conn the Connection to the database + * @param querySQL the SQL query to execute to retrieve the data + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final Connection conn, final String querySQL) throws SQLException, IOException { - return exportCSV(out, conn, querySQL, 0, Long.MAX_VALUE, true, true); + return exportCSV(out, conn, querySQL, true, true); } /** - * Exports the data from database to CVS. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param conn - * @param querySQL - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException + * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. + * Each value will be quoted if {@code quoted} is {@code true}. + *

+ * + * @param out the File to write the CSV data to + * @param conn the Connection to the database + * @param querySQL the SQL query to execute to retrieve the data + * @param writeTitle whether to write the column names as the first line + * @param quoted whether to quote each value in the CSV file + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long exportCSV(final File out, final Connection conn, final String querySQL, final long offset, final long count, final boolean writeTitle, - final boolean quoted) throws SQLException, IOException { - return exportCSV(out, conn, querySQL, null, offset, count, writeTitle, quoted); + public static long exportCSV(final File out, final Connection conn, final String querySQL, final boolean writeTitle, final boolean quoted) + throws SQLException, IOException { + return exportCSV(out, conn, querySQL, null, writeTitle, quoted); } /** - * Exports the data from database to CVS. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. + * Each value will be quoted if {@code quoted} is {@code true}. + *

* - * @param out - * @param conn - * @param querySQL - * @param selectColumnNames - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException + * @param out the File to write the CSV data to + * @param conn the Connection to the database + * @param querySQL the SQL query to execute to retrieve the data + * @param selectColumnNames the collection of column names to be selected + * @param offset the starting point of the data to be exported + * @param count the number of rows to be exported + * @param writeTitle whether to write the column names as the first line + * @param quoted whether to quote each value in the CSV file + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long exportCSV(final File out, final Connection conn, final String querySQL, final Collection selectColumnNames, final long offset, - final long count, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { + public static long exportCSV(final File out, final Connection conn, final String querySQL, final Collection selectColumnNames, + final boolean writeTitle, final boolean quoted) throws SQLException, IOException { final ParsedSql sql = ParsedSql.parse(querySQL); try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, sql.getParameterizedSql(), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)) { setFetchForBigResult(conn, stmt); - return exportCSV(out, stmt, selectColumnNames, offset, count, writeTitle, quoted); + return exportCSV(out, stmt, selectColumnNames, writeTitle, quoted); } } /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title, and each value will be quoted. + *

* - * @param out - * @param stmt - * @return - * @throws SQLException - * @throws IOException + * @param out the File to write the CSV data to + * @param stmt the PreparedStatement to execute to retrieve the data + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ @SuppressWarnings("unchecked") public static long exportCSV(final File out, final PreparedStatement stmt) throws SQLException, IOException { - return exportCSV(out, stmt, 0, Long.MAX_VALUE, true, true); + return exportCSV(out, stmt, true, true); } /** - * Exports the data from database to CVS. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param stmt - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException + * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. + * Each value will be quoted if {@code quoted} is {@code true}. + *

+ * + * @param out the File to write the CSV data to + * @param stmt the PreparedStatement to execute to retrieve the data + * @param writeTitle whether to write the column names as the first line + * @param quoted whether to quote each value in the CSV file + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long exportCSV(final File out, final PreparedStatement stmt, final long offset, final long count, final boolean writeTitle, - final boolean quoted) throws SQLException, IOException { - return exportCSV(out, stmt, null, offset, count, writeTitle, quoted); + public static long exportCSV(final File out, final PreparedStatement stmt, final boolean writeTitle, final boolean quoted) + throws SQLException, IOException { + return exportCSV(out, stmt, null, writeTitle, quoted); } /** - * Exports the data from database to CVS. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. + * Each value will be quoted if {@code quoted} is {@code true}. + *

* - * @param out - * @param stmt - * @param selectColumnNames - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException + * @param out the File to write the CSV data to + * @param stmt the PreparedStatement to execute to retrieve the data + * @param selectColumnNames the collection of column names to be selected + * @param writeTitle whether to write the column names as the first line + * @param quoted whether to quote each value in the CSV file + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long exportCSV(final File out, final PreparedStatement stmt, final Collection selectColumnNames, final long offset, final long count, - final boolean writeTitle, final boolean quoted) throws SQLException, IOException { + public static long exportCSV(final File out, final PreparedStatement stmt, final Collection selectColumnNames, final boolean writeTitle, + final boolean quoted) throws SQLException, IOException { ResultSet rs = null; try { rs = JdbcUtil.executeQuery(stmt); // rs.setFetchSize(DEFAULT_FETCH_SIZE); - return exportCSV(out, rs, selectColumnNames, offset, count, writeTitle, quoted); + return exportCSV(out, rs, selectColumnNames, writeTitle, quoted); } finally { JdbcUtil.closeQuietly(rs); } } /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
+ * Exports the data from the ResultSet to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title, and each value will be quoted. + *

* - * @param out - * @param rs - * @return - * @throws SQLException - * @throws IOException + * @param out the File to write the CSV data to + * @param rs the ResultSet containing the data to be exported + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final ResultSet rs) throws SQLException, IOException { - return exportCSV(out, rs, 0, Long.MAX_VALUE, true, true); + return exportCSV(out, rs, true, true); } /** - * Exports the data from database to CVS. - *
+ * Exports the data from the ResultSet to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param rs - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException + * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. + * Each value will be quoted if {@code quoted} is {@code true}. + *

+ * + * @param out the File to write the CSV data to + * @param rs the ResultSet containing the data to be exported + * @param writeTitle whether to write the column names as the first line + * @param quoted whether to quote each value in the CSV file + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long exportCSV(final File out, final ResultSet rs, final long offset, final long count, final boolean writeTitle, final boolean quoted) - throws SQLException, IOException { - return exportCSV(out, rs, null, offset, count, writeTitle, quoted); + public static long exportCSV(final File out, final ResultSet rs, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { + return exportCSV(out, rs, null, writeTitle, quoted); } /** + * Exports the data from the ResultSet to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. + * Each value will be quoted if {@code quoted} is {@code true}. + *

* - * @param out - * @param rs - * @param selectColumnNames - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException + * @param out the File to write the CSV data to + * @param rs the ResultSet containing the data to be exported + * @param selectColumnNames the collection of column names to be selected + * @param writeTitle whether to write the column names as the first line + * @param quoted whether to quote each value in the CSV file + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long exportCSV(final File out, final ResultSet rs, final Collection selectColumnNames, final long offset, final long count, - final boolean writeTitle, final boolean quoted) throws SQLException, IOException { + public static long exportCSV(final File out, final ResultSet rs, final Collection selectColumnNames, final boolean writeTitle, final boolean quoted) + throws SQLException, IOException { if (!out.exists()) { out.createNewFile(); //NOSONAR } - try (OutputStream os = new FileOutputStream(out)) { - final long result = exportCSV(os, rs, selectColumnNames, offset, count, writeTitle, quoted); - - os.flush(); - - return result; - } - } - - /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
- * Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param sourceDataSource - * @param querySQL - * @return - * @throws SQLException - * @throws IOException - */ - public static long exportCSV(final OutputStream out, final javax.sql.DataSource sourceDataSource, final String querySQL) throws SQLException, IOException { - final Connection conn = sourceDataSource.getConnection(); - - try { - return exportCSV(out, conn, querySQL); - } finally { - JdbcUtil.releaseConnection(conn, sourceDataSource); - } - } - - /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
- * Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param conn - * @param querySQL - * @return - * @throws SQLException - * @throws IOException - */ - public static long exportCSV(final OutputStream out, final Connection conn, final String querySQL) throws SQLException, IOException { - final ParsedSql sql = ParsedSql.parse(querySQL); - - try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, sql.getParameterizedSql(), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - ResultSet rs = JdbcUtil.executeQuery(stmt)) { - - setFetchForBigResult(conn, stmt); - - return exportCSV(out, rs); + try (Writer writer = IOUtil.newFileWriter(out)) { + return exportCSV(writer, rs, selectColumnNames, writeTitle, quoted); } } /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
- * Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param rs - * @return - * @throws SQLException - * @throws IOException - */ - public static long exportCSV(final OutputStream out, final ResultSet rs) throws SQLException, IOException { - return exportCSV(out, rs, 0, Long.MAX_VALUE, true, true); - } - - /** - * Exports the data from database to CVS. - *
- * Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param rs - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException - */ - public static long exportCSV(final OutputStream out, final ResultSet rs, final long offset, final long count, final boolean writeTitle, - final boolean quoted) throws SQLException, IOException { - return exportCSV(out, rs, null, offset, count, writeTitle, quoted); - } - - /** - * Exports the data from database to CVS. - *
- * Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param rs - * @param selectColumnNames - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException - */ - public static long exportCSV(final OutputStream out, final ResultSet rs, final Collection selectColumnNames, final long offset, final long count, - final boolean writeTitle, final boolean quoted) throws SQLException, IOException { - - final Writer writer = IOUtil.newOutputStreamWriter(out); - - final long result = exportCSV(writer, rs, selectColumnNames, offset, count, writeTitle, quoted); - - writer.flush(); - - return result; - } - - /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title, and each value will be quoted. + *

* - * @param out - * @param sourceDataSource - * @param querySQL - * @return - * @throws SQLException - * @throws IOException + * @param out the Writer to write the CSV data to + * @param sourceDataSource the DataSource to get the database connection from + * @param querySQL the SQL query to execute to retrieve the data + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long exportCSV(final Writer out, final javax.sql.DataSource sourceDataSource, final String querySQL) throws SQLException, IOException { final Connection conn = sourceDataSource.getConnection(); @@ -2612,16 +2157,18 @@ public static long exportCSV(final Writer out, final javax.sql.DataSource source } /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
+ * Exports the data from the database to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title, and each value will be quoted. + *

* - * @param out - * @param conn - * @param querySQL - * @return - * @throws SQLException - * @throws IOException + * @param out the Writer to write the CSV data to + * @param conn the Connection to the database + * @param querySQL the SQL query to execute to retrieve the data + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long exportCSV(final Writer out, final Connection conn, final String querySQL) throws SQLException, IOException { final ParsedSql sql = ParsedSql.parse(querySQL); @@ -2636,61 +2183,64 @@ public static long exportCSV(final Writer out, final Connection conn, final Stri } /** - * Exports the data from database to CVS. Title will be added at the first line and columns will be quoted. - *
+ * Exports the data from the ResultSet to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. + * The first line of the CSV file will contain the column names as the title, and each value will be quoted. + *

* - * @param out - * @param rs - * @return - * @throws SQLException - * @throws IOException + * @param out the Writer to write the CSV data to + * @param rs the ResultSet containing the data to be exported + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ public static long exportCSV(final Writer out, final ResultSet rs) throws SQLException, IOException { - return exportCSV(out, rs, 0, Long.MAX_VALUE, true, true); + return exportCSV(out, rs, true, true); } /** - * Exports the data from database to CVS. - *
+ * Exports the data from the ResultSet to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param rs - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws SQLException - * @throws IOException + * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}, + * and each value will be quoted if {@code quoted} is {@code true}. + *

+ * + * @param out the Writer to write the CSV data to + * @param rs the ResultSet containing the data to be exported + * @param writeTitle whether to write the column names as the first line of the CSV file + * @param quoted whether to quote each value in the CSV file + * @return the number of rows exported + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ - public static long exportCSV(final Writer out, final ResultSet rs, final long offset, final long count, final boolean writeTitle, final boolean quoted) - throws SQLException, IOException { - return exportCSV(out, rs, null, offset, count, writeTitle, quoted); + public static long exportCSV(final Writer out, final ResultSet rs, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { + return exportCSV(out, rs, null, writeTitle, quoted); } /** - * Exports the data from database to CVS. - *
+ * Exports the data from the ResultSet to a CSV file. + *

* Each line in the output file/Writer is an array of JSON String without root bracket. - * - * @param out - * @param rs - * @param selectColumnNames - * @param offset - * @param count - * @param writeTitle - * @param quoted - * @return - * @throws IllegalArgumentException - * @throws SQLException - * @throws IOException + * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}, + * and each value will be quoted if {@code quoted} is {@code true}. + *

+ * + * @param out the Writer to write the CSV data to + * @param rs the ResultSet containing the data to be exported + * @param selectColumnNames the collection of column names to be selected for export + * @param writeTitle whether to write the column names as the first line of the CSV file + * @param quoted whether to quote each value in the CSV file + * @return the number of rows exported + * @throws IllegalArgumentException if an argument is invalid + * @throws SQLException if a database access error occurs + * @throws IOException if an I/O error occurs */ @SuppressWarnings("deprecation") - public static long exportCSV(final Writer out, final ResultSet rs, final Collection selectColumnNames, final long offset, final long count, - final boolean writeTitle, final boolean quoted) throws IllegalArgumentException, SQLException, IOException { - N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + public static long exportCSV(final Writer out, final ResultSet rs, final Collection selectColumnNames, final boolean writeTitle, + final boolean quoted) throws IllegalArgumentException, SQLException, IOException { + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); final JSONSerializationConfig config = JSC.create(); config.setDateTimeFormat(DateTimeFormat.ISO_8601_TIMESTAMP); @@ -2754,11 +2304,7 @@ public static long exportCSV(final Writer out, final ResultSet rs, final Collect Type type = null; Object value = null; - if (offset > 0) { - JdbcUtil.skip(rs, offset); - } - - while (result < count && rs.next()) { + while (rs.next()) { if (result++ > 0) { bw.write(IOUtil.LINE_SEPARATOR); } @@ -2825,10 +2371,14 @@ public void accept(final PreparedQuery stmt, final ResultSet rs) throws SQLExcep }; /** + * Creates a parameter setter for a PreparedQuery using the provided ColumnGetter. + *

+ * This method returns a BiConsumer that sets parameters on a PreparedQuery based on the values + * from a ResultSet using the specified ColumnGetter. + *

* - * - * @param columnGetterForAll - * @return + * @param columnGetterForAll the ColumnGetter to use for setting parameters + * @return a BiConsumer that sets parameters on a PreparedQuery using the ResultSet */ @Beta @SequentialOnly @@ -2851,12 +2401,16 @@ public void accept(final PreparedQuery stmt, final ResultSet rs) throws SQLExcep } /** + * Copies data from a source data source to a target data source for the specified table. + *

+ * This method copies all data from the table in the source data source to the table in the target data source. + *

* - * @param sourceDataSource - * @param targetDataSource - * @param tableName - * @return - * @throws SQLException + * @param sourceDataSource the data source from which to copy data + * @param targetDataSource the data source to which to copy data + * @param tableName the name of the table to copy + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String tableName) throws SQLException { @@ -2864,13 +2418,17 @@ public static long copy(final javax.sql.DataSource sourceDataSource, final javax } /** + * Copies data from a source data source to a target data source for the specified table. + *

+ * This method copies all data from the table in the source data source to the table in the target data source. + *

* - * @param sourceDataSource - * @param targetDataSource - * @param sourceTableName - * @param targetTableName - * @return - * @throws SQLException + * @param sourceDataSource the data source from which to copy data + * @param targetDataSource the data source to which to copy data + * @param sourceTableName the name of the table in the source data source + * @param targetTableName the name of the table in the target data source + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String sourceTableName, final String targetTableName) throws SQLException { @@ -2878,14 +2436,18 @@ public static long copy(final javax.sql.DataSource sourceDataSource, final javax } /** + * Copies data from a source data source to a target data source for the specified table. + *

+ * This method copies all data from the table in the source data source to the table in the target data source. + *

* - * @param sourceDataSource - * @param targetDataSource - * @param sourceTableName - * @param targetTableName - * @param batchSize - * @return - * @throws SQLException + * @param sourceDataSource the data source from which to copy data + * @param targetDataSource the data source to which to copy data + * @param sourceTableName the name of the table in the source data source + * @param targetTableName the name of the table in the target data source + * @param batchSize the number of rows to copy in each batch + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String sourceTableName, final String targetTableName, final int batchSize) throws SQLException { @@ -2910,14 +2472,18 @@ public static long copy(final javax.sql.DataSource sourceDataSource, final javax } /** + * Copies data from a source data source to a target data source for the specified table and columns. + *

+ * This method copies data from the specified columns of the table in the source data source to the table in the target data source. + *

* - * @param sourceDataSource - * @param targetDataSource - * @param sourceTableName - * @param targetTableName - * @param selectColumnNames - * @return - * @throws SQLException + * @param sourceDataSource the data source from which to copy data + * @param targetDataSource the data source to which to copy data + * @param sourceTableName the name of the table in the source data source + * @param targetTableName the name of the table in the target data source + * @param selectColumnNames the collection of column names to copy + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String sourceTableName, final String targetTableName, final Collection selectColumnNames) throws SQLException { @@ -2925,15 +2491,19 @@ public static long copy(final javax.sql.DataSource sourceDataSource, final javax } /** + * Copies data from a source data source to a target data source for the specified table and columns. + *

+ * This method copies data from the specified columns of the table in the source data source to the table in the target data source. + *

* - * @param sourceDataSource - * @param targetDataSource - * @param sourceTableName - * @param targetTableName - * @param selectColumnNames - * @param batchSize - * @return - * @throws SQLException + * @param sourceDataSource the data source from which to copy data + * @param targetDataSource the data source to which to copy data + * @param sourceTableName the name of the table in the source data source + * @param targetTableName the name of the table in the target data source + * @param selectColumnNames the collection of column names to copy + * @param batchSize the number of rows to copy in each batch + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String sourceTableName, final String targetTableName, final Collection selectColumnNames, final int batchSize) throws SQLException { @@ -2958,13 +2528,17 @@ public static long copy(final javax.sql.DataSource sourceDataSource, final javax } /** + * Copies data from a source data source to a target data source using the specified SQL queries. + *

+ * This method copies data from the result of the select SQL query in the source data source to the target data source using the insert SQL query. + *

* - * @param sourceDataSource - * @param selectSql - * @param targetDataSource - * @param insertSql - * @return - * @throws SQLException + * @param sourceDataSource the data source from which to copy data + * @param selectSql the SQL query to select data from the source data source + * @param targetDataSource the data source to which to copy data + * @param insertSql the SQL query to insert data into the target data source + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final String selectSql, final javax.sql.DataSource targetDataSource, final String insertSql) throws SQLException { @@ -2972,51 +2546,66 @@ public static long copy(final javax.sql.DataSource sourceDataSource, final Strin } /** + * Copies data from a source data source to a target data source using the specified SQL queries and fetch size. + *

+ * This method copies data from the result of the select SQL query in the source data source to the target data source using the insert SQL query. + *

* - * @param sourceDataSource - * @param selectSql + * @param sourceDataSource the data source from which to copy data + * @param selectSql the SQL query to select data from the source data source * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. - * @param targetDataSource - * @param insertSql - * @param batchSize - * @return - * @throws SQLException + * @param targetDataSource the data source to which to copy data + * @param insertSql the SQL query to insert data into the target data source + * @param batchSize the number of rows to copy in each batch + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final String selectSql, final int fetchSize, final javax.sql.DataSource targetDataSource, final String insertSql, final int batchSize) throws SQLException { - return copy(sourceDataSource, selectSql, fetchSize, targetDataSource, insertSql, batchSize, supplierOfStmtSetterByRS.get()); + return copy(sourceDataSource, selectSql, fetchSize, targetDataSource, insertSql, batchSize, 0, supplierOfStmtSetterByRS.get()); } /** + * Copies data from a source data source to a target data source using the specified SQL queries and statement setter. + *

+ * This method executes a select SQL query on the source data source and inserts the result into the target data source using the provided insert SQL query. + * The statement setter is used to set parameters on the prepared statement. + *

* - * @param sourceDataSource - * @param selectSql - * @param targetDataSource - * @param insertSql - * @param stmtSetter - * @return - * @throws SQLException + * @param sourceDataSource the data source from which to copy data + * @param selectSql the SQL query to select data from the source data source + * @param targetDataSource the data source to which to copy data + * @param insertSql the SQL query to insert data into the target data source + * @param stmtSetter a bi-consumer to set parameters on the prepared statement + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final String selectSql, final javax.sql.DataSource targetDataSource, final String insertSql, final Throwables.BiConsumer stmtSetter) throws SQLException { - return copy(sourceDataSource, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetDataSource, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE, + return copy(sourceDataSource, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetDataSource, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** + * Copies data from a source data source to a target data source using the specified SQL queries and statement setter. + *

+ * This method executes a select SQL query on the source data source and inserts the result into the target data source using the provided insert SQL query. + * The statement setter is used to set parameters on the prepared statement. + *

* - * @param sourceDataSource - * @param selectSql + * @param sourceDataSource the data source from which to copy data + * @param selectSql the SQL query to select data from the source data source * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. - * @param targetDataSource - * @param insertSql - * @param batchSize - * @param stmtSetter - * @return - * @throws SQLException + * @param targetDataSource the data source to which to copy data + * @param insertSql the SQL query to insert data into the target data source + * @param batchSize the number of rows to copy in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param stmtSetter a bi-consumer to set parameters on the prepared statement + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final String selectSql, final int fetchSize, - final javax.sql.DataSource targetDataSource, final String insertSql, final int batchSize, + final javax.sql.DataSource targetDataSource, final String insertSql, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { Connection sourceConn = null; Connection targetConn = null; @@ -3025,7 +2614,7 @@ public static long copy(final javax.sql.DataSource sourceDataSource, final Strin sourceConn = JdbcUtil.getConnection(sourceDataSource); targetConn = JdbcUtil.getConnection(targetDataSource); - return copy(sourceConn, selectSql, fetchSize, targetConn, insertSql, batchSize, stmtSetter); + return copy(sourceConn, selectSql, fetchSize, targetConn, insertSql, batchSize, batchIntervalInMillis, stmtSetter); } finally { if (sourceConn != null) { JdbcUtil.releaseConnection(sourceConn, sourceDataSource); @@ -3038,25 +2627,33 @@ public static long copy(final javax.sql.DataSource sourceDataSource, final Strin } /** + * Copies data from a source data source to a target data source using the specified table name. + *

+ * This method copies data from the source table to the target table within the same database or across different databases. + *

* - * @param sourceConn - * @param targetConn - * @param tableName - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param targetConn the connection to the target database + * @param tableName the name of the table to copy data from and to + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String tableName) throws SQLException { return copy(sourceConn, targetConn, tableName, tableName); } /** + * Copies data from a source table to a target table using the specified connections. + *

+ * This method copies data from the source table to the target table within the same database or across different databases. + *

* - * @param sourceConn - * @param targetConn - * @param sourceTableName - * @param targetTableName - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param targetConn the connection to the target database + * @param sourceTableName the name of the source table to copy data from + * @param targetTableName the name of the target table to copy data to + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String sourceTableName, final String targetTableName) throws SQLException { @@ -3064,14 +2661,18 @@ public static long copy(final Connection sourceConn, final Connection targetConn } /** + * Copies data from a source table to a target table using the specified connections. + *

+ * This method copies data from the source table to the target table within the same database or across different databases. + *

* - * @param sourceConn - * @param targetConn - * @param sourceTableName - * @param targetTableName - * @param batchSize - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param targetConn the connection to the target database + * @param sourceTableName the name of the source table to copy data from + * @param targetTableName the name of the target table to copy data to + * @param batchSize the number of rows to copy in each batch + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String sourceTableName, final String targetTableName, final int batchSize) throws SQLException { @@ -3086,14 +2687,18 @@ public static long copy(final Connection sourceConn, final Connection targetConn } /** + * Copies data from a source table to a target table using the specified connections and column names. + *

+ * This method copies data from the source table to the target table within the same database or across different databases. + *

* - * @param sourceConn - * @param targetConn - * @param sourceTableName - * @param targetTableName - * @param selectColumnNames - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param targetConn the connection to the target database + * @param sourceTableName the name of the source table to copy data from + * @param targetTableName the name of the target table to copy data to + * @param selectColumnNames the collection of column names to be copied + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String sourceTableName, final String targetTableName, final Collection selectColumnNames) throws SQLException { @@ -3101,15 +2706,19 @@ public static long copy(final Connection sourceConn, final Connection targetConn } /** + * Copies data from a source table to a target table using the specified connections, column names, and batch size. + *

+ * This method copies data from the source table to the target table within the same database or across different databases. + *

* - * @param sourceConn - * @param targetConn - * @param sourceTableName - * @param targetTableName - * @param selectColumnNames - * @param batchSize - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param targetConn the connection to the target database + * @param sourceTableName the name of the source table to copy data from + * @param targetTableName the name of the target table to copy data to + * @param selectColumnNames the collection of column names to be copied + * @param batchSize the number of rows to be copied in each batch + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String sourceTableName, final String targetTableName, final Collection selectColumnNames, final int batchSize) throws SQLException { @@ -3173,84 +2782,82 @@ private static String generateInsertSql(final Connection conn, final String tabl } /** + * Copies data from a source database to a target database using the specified SQL queries. + *

+ * This method executes the provided select SQL on the source connection and inserts the results into the target connection using the provided insert SQL. + *

* - * @param sourceConn - * @param selectSql - * @param targetConn - * @param insertSql - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param selectSql the SQL query to select data from the source database + * @param targetConn the connection to the target database + * @param insertSql the SQL query to insert data into the target database + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final String selectSql, final Connection targetConn, final String insertSql) throws SQLException { return copy(sourceConn, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetConn, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE); } /** + * Copies data from a source database to a target database using the specified SQL queries. + *

+ * This method executes the provided select SQL on the source connection and inserts the results into the target connection using the provided insert SQL. + *

* - * @param sourceConn - * @param selectSql - * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. - * @param targetConn - * @param insertSql - * @param batchSize - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param selectSql the SQL query to select data from the source database + * @param fetchSize the number of rows to fetch at a time from the source database; should be larger than batchSize + * @param targetConn the connection to the target database + * @param insertSql the SQL query to insert data into the target database + * @param batchSize the number of rows to be copied in each batch + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final String selectSql, final int fetchSize, final Connection targetConn, final String insertSql, final int batchSize) throws SQLException { - return copy(sourceConn, selectSql, fetchSize, targetConn, insertSql, batchSize, supplierOfStmtSetterByRS.get()); + return copy(sourceConn, selectSql, fetchSize, targetConn, insertSql, batchSize, 0, supplierOfStmtSetterByRS.get()); } /** + * Copies data from a source database to a target database using the specified SQL queries and a custom statement setter. + *

+ * This method executes the provided select SQL on the source connection and inserts the results into the target connection using the provided insert SQL. + * The custom statement setter is used to set the parameters of the prepared statement. + *

* - * - * @param sourceConn - * @param selectSql - * @param targetConn - * @param insertSql - * @param stmtSetter - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param selectSql the SQL query to select data from the source database + * @param targetConn the connection to the target database + * @param insertSql the SQL query to insert data into the target database + * @param stmtSetter the custom statement setter to set the parameters of the prepared statement + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final String selectSql, final Connection targetConn, final String insertSql, final Throwables.BiConsumer stmtSetter) throws SQLException { - return copy(sourceConn, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetConn, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE, stmtSetter); + return copy(sourceConn, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetConn, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** + * Copies data from a source database to a target database using the specified SQL queries. + *

+ * This method executes the provided select SQL on the source connection and inserts the results into the target connection using the provided insert SQL. + * The custom statement setter is used to set the parameters of the prepared statement. + *

* - * @param sourceConn - * @param selectSql - * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. - * @param targetConn - * @param insertSql - * @param batchSize - * @param stmtSetter - * @return - * @throws SQLException + * @param sourceConn the connection to the source database + * @param selectSql the SQL query to select data from the source database + * @param fetchSize the number of rows to fetch at a time from the source database; should be larger than batchSize + * @param targetConn the connection to the target database + * @param insertSql the SQL query to insert data into the target database + * @param batchSize the number of rows to be copied in each batch + * @param batchIntervalInMillis the interval in milliseconds between each batch + * @param stmtSetter the custom statement setter to set the parameters of the prepared statement + * @return the number of rows copied + * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final String selectSql, final int fetchSize, final Connection targetConn, final String insertSql, - final int batchSize, final Throwables.BiConsumer stmtSetter) throws SQLException { - return copy(sourceConn, selectSql, fetchSize, 0, Long.MAX_VALUE, targetConn, insertSql, batchSize, 0, stmtSetter); - } - - /** - * - * @param sourceConn - * @param selectSql - * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. - * @param offset - * @param count - * @param targetConn - * @param insertSql - * @param batchSize - * @param batchIntervalInMillis - * @param stmtSetter - * @return - * @throws SQLException - */ - public static long copy(final Connection sourceConn, final String selectSql, final int fetchSize, final long offset, final long count, - final Connection targetConn, final String insertSql, final int batchSize, final long batchIntervalInMillis, + final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { PreparedStatement selectStmt = null; PreparedStatement insertStmt = null; @@ -3263,7 +2870,7 @@ public static long copy(final Connection sourceConn, final String selectSql, fin insertStmt = JdbcUtil.prepareStatement(targetConn, insertSql); - copy(selectStmt, offset, count, insertStmt, batchSize, batchIntervalInMillis, stmtSetter); + copy(selectStmt, insertStmt, batchSize, batchIntervalInMillis, stmtSetter); } finally { JdbcUtil.closeQuietly(selectStmt); JdbcUtil.closeQuietly(insertStmt); @@ -3273,24 +2880,9 @@ public static long copy(final Connection sourceConn, final String selectSql, fin } /** - * - * @param selectStmt - * @param insertStmt - * @param stmtSetter - * @return - * @throws SQLException - */ - public static long copy(final PreparedStatement selectStmt, final PreparedStatement insertStmt, - final Throwables.BiConsumer stmtSetter) throws SQLException { - return copy(selectStmt, 0, Long.MAX_VALUE, insertStmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); - } - - /** - * Copies data selected from by {@code selectStmt} to a target data source inserted by {@code insertStmt}. + * Copies data selected from by {@code selectStmt} to a target data source inserted by {@code insertStmt}. * * @param selectStmt the PreparedStatement used to select data from the source. - * @param offset the starting point from which to begin copying data. - * @param count the number of rows to copy. * @param insertStmt the PreparedStatement used to insert data into the target. * @param batchSize the number of rows to process in each batch. * @param batchIntervalInMillis the interval in milliseconds between each batch. @@ -3298,10 +2890,9 @@ public static long copy(final PreparedStatement selectStmt, final PreparedStatem * @return the number of rows copied. * @throws SQLException if a database access error occurs. */ - public static long copy(final PreparedStatement selectStmt, final long offset, final long count, final PreparedStatement insertStmt, final int batchSize, - final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) - throws SQLException { - N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + public static long copy(final PreparedStatement selectStmt, final PreparedStatement insertStmt, final int batchSize, final long batchIntervalInMillis, + final Throwables.BiConsumer stmtSetter) throws SQLException { + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); @@ -3314,13 +2905,9 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f try { rs = JdbcUtil.executeQuery(selectStmt); - if (offset > 0) { - JdbcUtil.skip(rs, offset); - } - long cnt = 0; - while (cnt < count && rs.next()) { + while (rs.next()) { cnt++; stmtSetterForInsert.accept(preparedQueryForInsert, rs); @@ -3472,7 +3059,7 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @return // * @throws SQLException // */ - // public static long copyInParallel(final Connection sourceConn, final String selectSql, final int fetchSize, final long offset, final long count, + // public static long copyInParallel(final Connection sourceConn, final String selectSql, final int fetchSize, // final Connection targetConn, final String insertSql, final int batchSize, final long batchIntervalInMillis, // final Throwables.BiConsumer stmtSetter) throws SQLException { // PreparedStatement selectStmt = null; @@ -3486,7 +3073,7 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // // insertStmt = JdbcUtil.prepareStatement(targetConn, insertSql); // - // copyInParallel(selectStmt, offset, count, insertStmt, batchSize, batchIntervalInMillis, Jdbc.BiRowMapper.TO_ARRAY, stmtSetter); + // copyInParallel(selectStmt, insertStmt, batchSize, batchIntervalInMillis, Jdbc.BiRowMapper.TO_ARRAY, stmtSetter); // } finally { // JdbcUtil.closeQuietly(selectStmt); // JdbcUtil.closeQuietly(insertStmt); @@ -3535,10 +3122,10 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @return // * @throws SQLException // */ - // public static long copyInParallel(final PreparedStatement selectStmt, final long offset, final long count, final PreparedStatement insertStmt, + // public static long copyInParallel(final PreparedStatement selectStmt, final PreparedStatement insertStmt, // final int batchSize, final long batchIntervalInMillis, final Jdbc.BiRowMapper rowMapper, // final Throwables.BiConsumer stmtSetter) throws SQLException { - // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative", offset, count); + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); // N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", // batchSize, batchIntervalInMillis); // @@ -3624,9 +3211,9 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @throws SQLException // * @throws E // */ - // public static void parse(final Connection conn, final String sql, final long offset, final long count, + // public static void parse(final Connection conn, final String sql, // final Throwables.Consumer rowParser) throws SQLException, E { - // parse(conn, sql, offset, count, rowParser, Fn.emptyAction()); + // parse(conn, sql, rowParser, Fn.emptyAction()); // } // // /** @@ -3643,9 +3230,9 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @throws E // * @throws E2 // */ - // public static void parse(final Connection conn, final String sql, final long offset, final long count, + // public static void parse(final Connection conn, final String sql, // final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws SQLException, E, E2 { - // parse(conn, sql, offset, count, 0, 0, rowParser, onComplete); + // parse(conn, sql, 0, 0, rowParser, onComplete); // } // // /** @@ -3661,9 +3248,9 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @throws SQLException // * @throws E // */ - // public static void parse(final Connection conn, final String sql, final long offset, final long count, final int processThreadNum, + // public static void parse(final Connection conn, final String sql, final int processThreadNum, // final int queueSize, final Throwables.Consumer rowParser) throws SQLException, E { - // parse(conn, sql, offset, count, processThreadNum, queueSize, rowParser, Fn.emptyAction()); + // parse(conn, sql, processThreadNum, queueSize, rowParser, Fn.emptyAction()); // } // // /** @@ -3683,14 +3270,14 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @throws E // * @throws E2 // */ - // public static void parse(final Connection conn, final String sql, final long offset, final long count, + // public static void parse(final Connection conn, final String sql, // final int processThreadNum, final int queueSize, final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) // throws SQLException, E, E2 { // try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, sql)) { // // setFetchForBigResult(conn, stmt); // - // parse(stmt, offset, count, processThreadNum, queueSize, rowParser, onComplete); + // parse(stmt, processThreadNum, queueSize, rowParser, onComplete); // } // } // @@ -3732,9 +3319,9 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @throws SQLException // * @throws E // */ - // public static void parse(final PreparedStatement stmt, final long offset, final long count, + // public static void parse(final PreparedStatement stmt, // final Throwables.Consumer rowParser) throws SQLException, E { - // parse(stmt, offset, count, rowParser, Fn.emptyAction()); + // parse(stmt, rowParser, Fn.emptyAction()); // } // // /** @@ -3750,9 +3337,9 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @throws E // * @throws E2 // */ - // public static void parse(final PreparedStatement stmt, final long offset, final long count, + // public static void parse(final PreparedStatement stmt, // final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws SQLException, E, E2 { - // parse(stmt, offset, count, 0, 0, rowParser, onComplete); + // parse(stmt, 0, 0, rowParser, onComplete); // } // // /** @@ -3767,9 +3354,9 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @throws SQLException // * @throws E // */ - // public static void parse(final PreparedStatement stmt, final long offset, final long count, final int processThreadNum, + // public static void parse(final PreparedStatement stmt, final int processThreadNum, // final int queueSize, final Throwables.Consumer rowParser) throws SQLException, E { - // parse(stmt, offset, count, processThreadNum, queueSize, rowParser, Fn.emptyAction()); + // parse(stmt, processThreadNum, queueSize, rowParser, Fn.emptyAction()); // } // // /** @@ -3788,7 +3375,7 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // * @throws E // * @throws E2 // */ - // public static void parse(final PreparedStatement stmt, final long offset, final long count, + // public static void parse(final PreparedStatement stmt, // final int processThreadNum, final int queueSize, final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) // throws SQLException, E, E2 { // ResultSet rs = null; @@ -3796,7 +3383,7 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // try { // rs = JdbcUtil.executeQuery(stmt); // - // parse(rs, offset, count, processThreadNum, queueSize, rowParser, onComplete); + // parse(rs, processThreadNum, queueSize, rowParser, onComplete); // } finally { // JdbcUtil.closeQuietly(rs); // } @@ -3842,7 +3429,7 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // */ // public static void parse(final ResultSet rs, long offset, long count, final Throwables.Consumer rowParser) // throws SQLException, E { - // parse(rs, offset, count, rowParser, Fn.emptyAction()); + // parse(rs, rowParser, Fn.emptyAction()); // } // // /** @@ -3860,7 +3447,7 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // */ // public static void parse(final ResultSet rs, long offset, long count, // final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws SQLException, E, E2 { - // parse(rs, offset, count, 0, 0, rowParser, onComplete); + // parse(rs, 0, 0, rowParser, onComplete); // } // // /** @@ -3877,7 +3464,7 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // */ // public static void parse(final ResultSet rs, long offset, long count, final int processThreadNum, final int queueSize, // final Throwables.Consumer rowParser) throws SQLException, E { - // parse(rs, offset, count, processThreadNum, queueSize, rowParser, Fn.emptyAction()); + // parse(rs, processThreadNum, queueSize, rowParser, Fn.emptyAction()); // } // // /** @@ -3897,9 +3484,9 @@ public static long copy(final PreparedStatement selectStmt, final long offset, f // */ // public static void parse(final ResultSet rs, long offset, long count, final int processThreadNum, // final int queueSize, final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws E, E2 { - // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can not be negative", offset, count); + // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can not be negative"); // - // Iterators.forEach(iter, offset, count, processThreadNum, queueSize, elementParser); + // Iterators.forEach(iter, processThreadNum, queueSize, elementParser); // } private static void setFetchForBigResult(final Connection conn, final PreparedStatement stmt) throws SQLException { diff --git a/src/main/java/com/landawn/abacus/jdbc/OP.java b/src/main/java/com/landawn/abacus/jdbc/OP.java index c71f5f5b..9f97ff46 100644 --- a/src/main/java/com/landawn/abacus/jdbc/OP.java +++ b/src/main/java/com/landawn/abacus/jdbc/OP.java @@ -37,14 +37,8 @@ public enum OP { */ stream, - /** - * - */ queryForSingle, - /** - * - */ queryForUnique, /** @@ -67,21 +61,12 @@ public enum OP { */ executeAndGetOutParameters, - /** - * - */ update, - /** - * - */ largeUpdate, /* batchUpdate,*/ - /** - * - */ DEFAULT; }