本文主要是介绍jdbc 与 mysql 连接 - Blod及批量数据处理,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!
Blob 类型的数据操作
package com.atguigu5.blob;import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Blob;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;import org.junit.Test;import com.atguigu3.bean.Customer;
import com.atguigu3.util.JDBCUtils;/*** 测试使用 PreparedStatement 操作 Blob 类型的数据* */
public class BlobTest {// 向数据表 customers 中插入 Blob 类型的字段@Testpublic void testInsert1() throws Exception {Connection conn = JDBCUtils.getConnection();String sql = "insert into customers(name,email,birth,photo)values(?,?,?,?)";PreparedStatement ps = conn.prepareStatement(sql);ps.setObject(1, "李阿豪");ps.setObject(2,"li@qq.com");ps.setObject(3, "1992-09-08");// 操纵 Blob 类型的变量FileInputStream is = new FileInputStream(new File("壁纸3.jpg"));ps.setBlob(4, is);ps.execute();is.close();JDBCUtils.closeResource(conn, ps);}// 向数据表 customers 中插入 Blob 类型的字段@Testpublic void testInsert2() throws Exception {Connection conn = JDBCUtils.getConnection();String sql = "insert into customers(name,email,birth,photo)values(?,?,?,?)";PreparedStatement ps = conn.prepareStatement(sql);ps.setObject(1, "元昊");ps.setObject(2,"yuanhao@qq.com");ps.setObject(3, "1992-09-08");// 操纵 Blob 类型的变量FileInputStream is = new FileInputStream(new File("壁纸5.jpg"));ps.setBlob(4, is);ps.execute();is.close();JDBCUtils.closeResource(conn, ps);// 在 mysql 下的 my.ini 文件尾添加: max_allowed_packet=16M// 完成后,"此电脑" - "管理" - "服务和应用程序" - "服务" - "MySQL" - "重新启动"// 完成操作后,就可以上传大于 1M 的图片到 MySQL 了。}// 向数据表 customers 中修改 Blob 类型的字段 @Testpublic void testUpdate() throws Exception {Connection conn = JDBCUtils.getConnection();String sql = "update customers set photo = ? where id = ?";PreparedStatement ps = conn.prepareStatement(sql);// 填充占位符// 操纵 Blob 类型的变量FileInputStream is = new FileInputStream(new File("壁纸1.jpeg"));ps.setBlob(1, is);// 23 : 要修改 id 号ps.setInt(2,23);ps.execute();is.close();JDBCUtils.closeResource(conn, ps);}// 查询数据表 customers 中 Blob 类型的字段@Testpublic void testQuery() {Connection conn = null;PreparedStatement ps = null;InputStream is = null;FileOutputStream fos = null;ResultSet rs = null;try {conn = JDBCUtils.getConnection();String sql = "select id,name,email,birth,photo from customers where id = ?";ps = conn.prepareStatement(sql);ps.setInt(1, 23);rs = ps.executeQuery();if(rs.next()) {// 方式一:通过顺序的方式 类似于 ArrayList
// int id = rs.getInt(1);
// String name = rs.getString(2);
// String email = rs.getString(3);
// Date birth = rs.getDate(4);// 方式二:通过别名的方式int id = rs.getInt("id");String name = rs.getString("name");String email = rs.getString("email");Date birth = rs.getDate("birth");Customer cust = new Customer(id,name,email,birth);System.out.println(cust);// 将 Blob 类型的字段下载下来,以文件的方式保存在本地Blob photo = rs.getBlob("photo");// photo 是一个比较大的数据,这时需要用 流 的方式获取is = photo.getBinaryStream();fos = new FileOutputStream("zhangyuhao.jpg");byte[] buffer = new byte[1024];int len;while((len = is.read(buffer)) != -1) {fos.write(buffer,0,len);}}} catch (Exception e) {e.printStackTrace();} finally {try {if(is != null)is.close();} catch (IOException e) {e.printStackTrace();}try {if(fos != null)fos.close();} catch (IOException e) {e.printStackTrace();}JDBCUtils.closeResource(conn, ps, rs);}}
}
让 mysql 开启批处理的支持 :
?rewriteBatchedStatements = true 写在配置文件的 url 后面
user=root
password=123456
url=jdbc:mysql://localhost:3306/test?rewriteBatchedStatements = true
driverClass=com.mysql.cj.jdbc.Driver
package com.atguigu5.blob;import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;import org.junit.Test;import com.atguigu3.util.JDBCUtils;/*** 使用 PreparedStatement 实现批量数据的操作** update、delete 本身就具有批量操作的效果。* 此时的批量操作,主要指的时批量插入。使用 PreparedStatement 如何实现更高效的批量插入?* * 题目:向 goods 表中插入 20000 条数据* CREATE TABLE goods(* id INT PRIMARY KEY AUTO_INCREMENT,* NAME VARCHAR(25)* );* * 方式一:使用 Statement* Connection conn = JDBCUtils.getConnection();* Statement st = conn.createStatement();* for(int i = 1; i <= 20000; i++){* String sql = "insert into goods(name) values('name_" + i + "')";* st.execute(sql);* }* */
public class InsertTest {// 批量插入的方式二:使用 PreparedStatement@Testpublic void testInsert1() {Connection conn = null;PreparedStatement ps = null;try {long start = System.currentTimeMillis();conn = JDBCUtils.getConnection();String sql = "insert into goods(name)values(?)";ps = conn.prepareStatement(sql);for(int i = 1; i <= 20000; i++) {ps.setObject(1, "name_" + i);ps.execute();}long end = System.currentTimeMillis();System.out.println("花费的时间为: " + (end - start));// 20000:76229 } catch (Exception e) {e.printStackTrace();} finally {JDBCUtils.closeResource(conn, ps);}}/** 批量插入的方式三* 1.addBatch()、executeBatch()、clearBatch()* 2.mysql 服务器默认是关闭此处理的,我们需要通过一个参数,让 mysql 开启批处理的支持。* ?rewriteBatchedStatements = true 写在配置文件的 url 后面* 3.使用更新的 mysql 驱动:mysql-connector-java-5.1.37-bin.jar* */@Testpublic void testInsert2() {Connection conn = null;PreparedStatement ps = null;try {long start = System.currentTimeMillis();conn = JDBCUtils.getConnection();String sql = "insert into goods(name)values(?)";ps = conn.prepareStatement(sql);for(int i = 1; i <= 1000000; i++) {ps.setObject(1, "name_" + i);// 1."攒" sqlps.addBatch();if(i % 500 == 0) {// 2.执行 batchps.executeBatch();// 3.清空 batchps.clearBatch();}}long end = System.currentTimeMillis();System.out.println("花费的时间为: " + (end - start));// 20000:76229 -- 1525// 1000000:18780 -- } catch (Exception e) {e.printStackTrace();} finally {JDBCUtils.closeResource(conn, ps);}}/** 批量插入的方式四: 设置连接不允许自动提交数据 * */@Testpublic void testInsert3() {Connection conn = null;PreparedStatement ps = null;try {long start = System.currentTimeMillis();conn = JDBCUtils.getConnection();// 设置不允许自动提交数据(为了提高插入速度,缩短插入时间)conn.setAutoCommit(false);String sql = "insert into goods(name)values(?)";ps = conn.prepareStatement(sql);for(int i = 1; i <= 1000000; i++) {ps.setObject(1, "name_" + i);// 1."攒" sqlps.addBatch();if(i % 500 == 0) {// 2.执行 batchps.executeBatch();// 3.清空 batchps.clearBatch();}}// 提交数据conn.commit();long end = System.currentTimeMillis();System.out.println("花费的时间为: " + (end - start));// 20000:76229 -- 1525// 1000000:18780 -- 9939 } catch (Exception e) {e.printStackTrace();} finally {JDBCUtils.closeResource(conn, ps);}}
}
这篇关于jdbc 与 mysql 连接 - Blod及批量数据处理的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!