阿里巴巴的开源数据库连接池工具Druid数据源,在实际使用的时候,通常会和Spring或SpringBoot整合一起使用。
但有些时候如果不用使用Spring或SpringBoot,而直接使用Druid的原生API来操作数据库的话,该如何正确使用呢?
下面是一个使用Druid连接池简单操作数据库的工具类,重点关注Druid数据源的创建和复用:
/** * 数据库操作工具类 * * @author shiyanjun * @since 2020/01/05 */ @Slf4j public class DbUtil { private static final String DB_URL = "jdbc:mysql://localhost:3306/authapi"; // 数据库连接URL private static final String DB_USERNAME = "root"; // 数据库用户名 private static final String DB_PASSWORD = "123456"; // 数据库密码 // Druid数据源,全局唯一(只创建一次) private static DruidDataSource druidDataSource; /** * 执行SQL更新 * * @param updateSql * @throws SQLException */ public static void insert(String updateSql) throws SQLException { Connection connection = null; Statement statement = null; ResultSet resultSet = null; try { connection = getDruidConnection(); statement = connection.createStatement(); int count = statement.executeUpdate(updateSql); log.info(">>>>>>>>>>>>> 插入数据 {}", count); } finally { // 切记!!! 一定要释放资源 closeResource(connection, statement, resultSet); } } /** * 执行SQL查询 * * @param querySql * @return * @throws Exception */ public static List<Map<String, Object>> executeQuery(String querySql) throws Exception { List<Map<String, Object>> resultList = new ArrayList<>(); Connection connection = null; Statement statement = null; ResultSet resultSet = null; try { connection = getDruidConnection(); statement = connection.createStatement(); resultSet = statement.executeQuery(querySql); ResultSetMetaData metaData = resultSet.getMetaData(); while (resultSet.next()) { int columnCount = metaData.getColumnCount(); Map<String, Object> resultMap = new LinkedHashMap<>(); for (int i = 1; i <= columnCount; i++) { String columnName = metaData.getColumnName(i);// 字段名称 Object columnValue = resultSet.getObject(columnName);// 字段值 resultMap.put(columnName, columnValue); } resultList.add(resultMap); } log.info(">>>>>>>>>>>> 查询数据:{}", resultList); } finally { // 切记!!! 一定要释放资源 closeResource(connection, statement, resultSet); } return resultList; } /** * 获取Druid数据源 * * @return * @throws SQLException */ private static DruidDataSource getDruidDataSource() throws SQLException { // 保证Druid数据源在多线程下只创建一次 if (druidDataSource == null) { synchronized (DbUtil.class) { if (druidDataSource == null) { druidDataSource = createDruidDataSource(); return druidDataSource; } } } log.info(">>>>>>>>>>> 复用Druid数据源:url={}, username={}, password={}", druidDataSource.getUrl(), druidDataSource.getUsername(), druidDataSource.getPassword()); return druidDataSource; } /** * 创建Druid数据源 * * @return * @throws SQLException */ private static DruidDataSource createDruidDataSource() throws SQLException { DruidDataSource druidDataSource = new DruidDataSource(); druidDataSource.setUrl(DB_URL); druidDataSource.setUsername(DB_USERNAME); druidDataSource.setPassword(DB_PASSWORD); /*----下面的具体配置参数自己根据项目情况进行调整----*/ druidDataSource.setMaxActive(20); druidDataSource.setInitialSize(1); druidDataSource.setMinIdle(1); druidDataSource.setMaxWait(60000); druidDataSource.setValidationQuery("select 1 from dual"); druidDataSource.setTimeBetweenEvictionRunsMillis(60000); druidDataSource.setMinEvictableIdleTimeMillis(300000); druidDataSource.setTestWhileIdle(true); druidDataSource.setTestOnBorrow(false); druidDataSource.setTestOnReturn(false); druidDataSource.setPoolPreparedStatements(true); druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(20); druidDataSource.init(); log.info(">>>>>>>>>>> 创建Druid数据源:url={}, username={}, password={}", druidDataSource.getUrl(), druidDataSource.getUsername(), druidDataSource.getPassword()); return druidDataSource; } /** * 获取Druid连接 * * @return * @throws SQLException */ private static DruidPooledConnection getDruidConnection() throws SQLException { DruidDataSource druidDataSource = getDruidDataSource(); DruidPooledConnection connection = druidDataSource.getConnection(); return connection; } /** * 释放资源 * * @param connection * @param statement * @param resultSet * @throws SQLException */ private static void closeResource(Connection connection, Statement statement, ResultSet resultSet) throws SQLException { // 注意资源释放顺序 if (resultSet != null) { resultSet.close(); } if (statement != null) { statement.close(); } if (connection != null) { connection.close(); } } }
一个简单的测试类,模拟多线程向数据库表中插入1w条数据:
import java.text.SimpleDateFormat; import java.util.Date; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * CREATE TABLE `auth_code` ( * `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增主键', * `auth_code` varchar(255) CHARACTER SET utf8mb4 NOT NULL COMMENT '授权码', * `status` varchar(255) CHARACTER SET utf8mb4 NOT NULL DEFAULT '1' COMMENT '状态', * `remark` varchar(255) CHARACTER SET utf8mb4 DEFAULT NULL COMMENT '备注', * `create_date` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建日期', * `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改日期', * PRIMARY KEY (`id`) * ) ENGINE=InnoDB AUTO_INCREMENT=18 DEFAULT CHARSET=latin1; */ public class AppTest { private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static ExecutorService executorService = Executors.newCachedThreadPool();// 线程池 public static void main(String[] args) throws Exception { String INSERT_SQL = "INSERT INTO `authapi`.`auth_code` " + "(`auth_code`, `status`, `remark`, `create_date`, `update_time`) VALUES ('%s',%s,'%s','%s','%s')"; // 模拟多线程向数据库插入1万条数据 for (int i = 0; i < 10000; i++) { Thread.sleep(5); String code = UUID.randomUUID().toString().replaceAll("-", ""); String dateStr = dateFormat.format(new Date()); String formatSql = String.format(INSERT_SQL, code, i, code, dateStr, dateStr); executorService.execute(() -> { try { DbUtil.insert(formatSql); } catch (Exception e) { e.printStackTrace(); } }); } } }