解决问题:
场景>>>某大型业务系统对接多个业务信息,配置了N个数据集信息,客户根据数据集查询相应的数据信息。
实际数据查询逻辑>>>根据数据集配置的数据库信息,动态创建连接并获取数据信息。
// TODO
spring: datasource: default: driverClassName: com.mysql.jdbc.Driver url: jdbc:mysql://%s:3306/%s?useUnicode=true&characterEncoding=utf8&useSSL=false username: bigdataApi password: bigdataApi@253.com druid: # 初始化大小,最小,最大 initialSize: 10 minIdle: 5 maxActive: 100 # 获取数据库连接等待的超时时间 maxWait: 60000 maxOpenPreparedStatements: -1 # 系统启动时通过该sql语句验证数据库是否可用,如果不配置validationQuery,则下面三项无效 validationQuery: SELECT 1 # 启用空闲连接检测,以便回收 testWhileIdle: true # 从连接池获取连接时,是否检测连接可用性,开启性能会有些许影响 testOnBorrow: false # 释放连接到连接池时,是否检测连接可用性,开启性能会有些许影响 testOnReturn: false # 配置多久进行一次检测,检测需要关闭的空闲连接 单位毫秒 timeBetweenEvictionRunsMillis: 60000 # 配置连接在池中的最小生存时间 minEvictableIdleTimeMillis: 300000 # 配置连接在池中的最大生存时间 maxEvictableIdleTimeMillis: 400000 # 打开PSCache,并且指定每个连接上PSCache的大小 poolPreparedStatements: true maxPoolPreparedStatementPerConnectionSize: 20 # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙 filters: stat useGlobalDataSourceStat: true # 通过connectProperties属性来打开mergeSql功能;慢SQL记录 connectProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 # 设置链接超时回收 removeAbandoned: true # 从获取到链接开始,超过这么长时间,链接将被连接池强制回收 removeAbandonedTimeout: 180 # 强制回收链接是,将堆栈追踪信息打印到日志中 logAbandoned: true breakAfterAcquireFailure: true connectionErrorRetryAttempts: 0
package com.general.datasource.config; import com.general.datasource.helper.DynamicDataSource; import lombok.extern.slf4j.Slf4j; import org.apache.ibatis.session.SqlSessionFactory; import org.mybatis.spring.SqlSessionFactoryBean; import org.mybatis.spring.annotation.MapperScan; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.jdbc.DataSourceBuilder; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import javax.sql.DataSource; /** * 数据源配置管理 */ @Slf4j @Configuration @MapperScan(basePackages = "com.general.mapper.dynamic", sqlSessionFactoryRef = "dynamicSqlSession") public class DataSourceConfig { /** * 根据配置参数创建数据源。使用派生的子类。 * * @return 数据源 */ @Bean(name = "dynamicDataSource") @ConfigurationProperties(prefix = "spring.datasource.default") public DataSource getDataSource() { DataSourceBuilder builder = DataSourceBuilder.create(); builder.type(DynamicDataSource.class); return builder.build(); } /** * 创建会话工厂。 * * @param dataSource 数据源 * @return 会话工厂 */ @Bean(name = "dynamicSqlSession") public SqlSessionFactory getSqlSessionFactory(@Qualifier("dynamicDataSource") DataSource dataSource) { SqlSessionFactoryBean bean = new SqlSessionFactoryBean(); bean.setDataSource(dataSource); try { bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:mapper/dynamic/*.xml")); return bean.getObject(); } catch (Exception e) { log.error("DynamicSqlSession 创建会话工厂失败"); return null; } } }
首先增加一个数据库标识类,用于区分不同的数据库(DBIdentifier.java),为每个数据库创建了单独连接,使用唯一编码作为数据库连接的索引。而微服务支持多线程并发的,采用线程变量。
package com.general.datasource.helper; /** * 数据库标识管理类。用于区分数据源连接的不同数据库 */ public class DBIdentifier { /** * 用不同的工程编码来区分数据库 */ private static ThreadLocal<String> projectCode = new ThreadLocal<>(); public static String getProjectCode() { return projectCode.get(); } public static void setProjectCode(String code) { projectCode.set(code); } }
通过DDSHolder来管理不同的数据源,提供数据源的添加、查询功能(DDSHolder.java)
package com.general.datasource.helper; import com.alibaba.druid.pool.DruidDataSource; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Timer; /** * 动态数据源管理器 */ public class DDSHolder { /** * 管理动态数据源列表。<工程编码,数据源> */ private Map<String, DDSTimer> ddsMap = new HashMap<>(); /** * 通过定时任务周期性清除不使用的数据源 */ private static Timer clearIdleTask = new Timer(); static { clearIdleTask.schedule(new ClearIdleTimerTask(), 5000, 60 * 1000); } private DDSHolder() {} /* * 获取单例对象 */ public static DDSHolder instance() { return DDSHolderBuilder.instance; } /** * 添加动态数据源。 */ public synchronized void addDDS(String projectCode, DruidDataSource dds) { DDSTimer ddst = new DDSTimer(dds); ddsMap.put(projectCode, ddst); } /** * 查询动态数据源 * * @param projectCode 项目编码 * @return dds */ public synchronized DruidDataSource getDDS(String projectCode) { if (ddsMap.containsKey(projectCode)) { DDSTimer ddst = ddsMap.get(projectCode); ddst.refreshTime(); return ddst.getDds(); } return null; } /** * 清除超时无人使用的数据源。 */ public synchronized void clearIdleDDS() { Iterator<Map.Entry<String, DDSTimer>> iter = ddsMap.entrySet().iterator(); for (; iter.hasNext(); ) { Map.Entry<String, DDSTimer> entry = iter.next(); if (entry.getValue().checkAndClose()) { iter.remove(); } } } /** * 单例构件类 */ private static class DDSHolderBuilder { private static DDSHolder instance = new DDSHolder(); } }
从DataSource派生了一个DynamicDataSource,在其中实现数据库连接的动态切换(DynamicDataSource.java)
package com.general.datasource.helper; import com.alibaba.druid.pool.DruidDataSource; import com.alibaba.druid.pool.DruidPooledConnection; import lombok.extern.slf4j.Slf4j; import java.sql.SQLException; /** * 定义动态数据源派生类。从基础的DataSource派生,动态性自己实现 */ @Slf4j public class DynamicDataSource extends DruidDataSource { /** * 改写本方法是为了在请求不同工程的数据时去连接不同的数据库 */ @Override public DruidPooledConnection getConnection() { String projectCode = DBIdentifier.getProjectCode(); //1、获取数据源 DruidDataSource dds = DDSHolder.instance().getDDS(projectCode); //2、如果数据源不存在则创建 if (dds == null) { try { DruidDataSource newDDS = initDDS(projectCode); DDSHolder.instance().addDDS(projectCode, newDDS); } catch (IllegalArgumentException e) { log.error("Init data source fail. projectCode:" + projectCode); return null; } } dds = DDSHolder.instance().getDDS(projectCode); try { return dds.getConnection(); } catch (SQLException e) { log.error(e.getMessage()); return null; } } @Override public DruidPooledConnection getConnection(String username, String password) { return null; } /** * 以当前数据对象作为模板复制一份 */ private DruidDataSource initDDS(String projectCode) throws IllegalArgumentException { DruidDataSource dds = new DruidDataSource(); // 2、复制PoolConfiguration的属性 dds.setInitialSize(10); dds.setMaxActive(50); dds.setMinIdle(5); dds.setMaxWait(6000); dds.setValidationQuery("SELECT 1"); // 设置链接超时回收 dds.setRemoveAbandoned(true); // 从获取到链接开始,超过这么长时间,链接将被连接池强制回收 dds.setRemoveAbandonedTimeout(180); // 强制回收链接是,将堆栈追踪信息打印到日志中 dds.setLogAbandoned(true); if ("project_default".equals(projectCode)) { // 设置数据库名称和IP(一般来说,端口和用户名、密码都是统一固定的) String urlFormat = this.getUrl(); String url = String.format(urlFormat, ProjectDBMgr.instance().getDBIP(projectCode), ProjectDBMgr.instance().getDBSchema(projectCode)); dds.setUrl(url); dds.setDriverClassName(this.getDriverClassName()); dds.setUsername(this.getUsername()); dds.setPassword(this.getPassword()); return dds; } dds.setUrl(ProjectDBMgr.instance().getDBUrl(projectCode)); dds.setDriverClassName(ProjectDBMgr.instance().getDBDriverClass(projectCode)); dds.setUsername(ProjectDBMgr.instance().getDBUserName(projectCode)); dds.setPassword(ProjectDBMgr.instance().getDBPassword(projectCode)); return dds; } }
通过DDSTimer控制数据连接释放(DDSTimer.java)
package com.general.datasource.helper; import com.alibaba.druid.pool.DruidDataSource; /** * 动态数据源定时器管理。长时间无访问的数据库连接关闭 */ public class DDSTimer { /** * 空闲时间周期。超过这个时长没有访问的数据库连接将被释放。默认为10分钟。 */ private static long idlePeriodTime = 10 * 60 * 1000; /** * 动态数据源 */ private DruidDataSource dds; /** * 上一次访问的时间 */ private long lastUseTime; public DDSTimer(DruidDataSource dds) { this.dds = dds; this.lastUseTime = System.currentTimeMillis(); } /** * 更新最近访问时间 */ public void refreshTime() { lastUseTime = System.currentTimeMillis(); } /** * 检测数据连接是否超时关闭。 * * @return true-已超时关闭; false-未超时 */ public boolean checkAndClose() { if (System.currentTimeMillis() - lastUseTime > idlePeriodTime) { dds.close(); return true; } return false; } public DruidDataSource getDds() { return dds; } }
使用定时器任务ClearIdleTimerTask定时清除空闲的数据源(ClearIdleTimerTask.java)
package com.general.datasource.helper; import java.util.TimerTask; /** * 清除空闲连接任务 */ public class ClearIdleTimerTask extends TimerTask { @Override public void run() { DDSHolder.instance().clearIdleDDS(); } }
package com.general.datasource.helper; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * 项目数据库管理。提供连接唯一编码查询数据库连接配置信息。 */ public class ProjectDBMgr { /** * 保存项目编码与数据库driverClass的映射关系。 */ private Map<String, String> driverClassNameMap = new HashMap<>(); /** * 保存项目编码与数据库URL的映射关系。 */ private Map<String, String> dbUrlMap = new HashMap<>(); /** * 保存项目编码与数据名称的映射关系。这里是硬编码,实际开发中这个关系数据可以保存到redis缓存中; * 新增一个项目或者删除一个项目只需要更新缓存。到时这个类的接口只需要修改为从缓存拿数据。 */ private Map<String, String> dbUserNameMap = new HashMap<>(); /** * 保存项目编码与数据名称的映射关系。这里是硬编码,实际开发中这个关系数据可以保存到redis缓存中; * 新增一个项目或者删除一个项目只需要更新缓存。到时这个类的接口只需要修改为从缓存拿数据。 */ private Map<String, String> dbPasswordMap = new HashMap<>(); private Map<String, String> dbIPMap = new HashMap<>(); private Map<String, String> dbSchemaMap = new HashMap<>(); private ProjectDBMgr() { dbIPMap.put("project_default", "172.16.41.156"); dbSchemaMap.put("project_default", "bigdata_api_stable"); driverClassNameMap.put("project_default", "com.mysql.jdbc.Driver"); dbUrlMap.put("project_default", "jdbc:mysql://172.16.41.156:3306/bigdata_api_stable?useUnicode=true&characterEncoding=utf8&useSSL=false"); dbUserNameMap.put("project_default", "bigdataApi"); dbPasswordMap.put("project_default", "bigdataApi@253.com"); } public static ProjectDBMgr instance() { return ProjectDBMgrBuilder.instance; } // 实际开发中改为从缓存获取 public void set(String dbType, String driverClassName, String dbUrl, String userName, String password) { driverClassNameMap.put(dbType + "_" + getIp(dbUrl), driverClassName); dbUrlMap.put(dbType + "_" + getIp(dbUrl), dbUrl); dbUserNameMap.put(dbType + "_" + getIp(dbUrl), userName); dbPasswordMap.put(dbType + "_" + getIp(dbUrl), password); } // 实际开发中改为从缓存获取 public String getDBIP(String projectCode) { if (dbIPMap.containsKey(projectCode)) { return dbIPMap.get(projectCode); } return ""; } // 实际开发中改为从缓存获取 public String getDBSchema(String projectCode) { if (dbSchemaMap.containsKey(projectCode)) { return dbSchemaMap.get(projectCode); } return ""; } // 实际开发中改为从缓存获取 public String getDBDriverClass(String projectCode) { if (driverClassNameMap.containsKey(projectCode)) { return driverClassNameMap.get(projectCode); } return ""; } // 实际开发中改为从缓存获取 public String getDBUrl(String projectCode) { if (dbUrlMap.containsKey(projectCode)) { return dbUrlMap.get(projectCode); } return ""; } // 实际开发中改为从缓存获取 public String getDBUserName(String projectCode) { if (dbUserNameMap.containsKey(projectCode)) { return dbUserNameMap.get(projectCode); } return ""; } // 实际开发中改为从缓存获取 public String getDBPassword(String projectCode) { if (dbPasswordMap.containsKey(projectCode)) { return dbPasswordMap.get(projectCode); } return ""; } private static class ProjectDBMgrBuilder { private static ProjectDBMgr instance = new ProjectDBMgr(); } public static String getIp(String url) { // ipv4使用正则表达式截取 String regEx = "((2[0-4]\\d|25[0-5]|[01]?\\d\\d?)\\.){3}(2[0-4]\\d|25[0-5]|[01]?\\d\\d?)"; Pattern p = Pattern.compile(regEx); Matcher m = p.matcher(url); if (m.find()) { return m.group(); } // ipv6截取 if (url.contains(":")) { int start = url.indexOf(":") + 3; int end = url.lastIndexOf(":"); return url.substring(start, end); } return null; } }
...... // 首先初始化数据库连接信息和唯一编码映射信息 ProjectDBMgr.instance().set(datasetDto.getDbType(), "org.apache.hive.jdbc.HiveDriver", datasetDto.getDbUrl(), "cldboozie", "oozie"); ...... // 设置需要访问的数据库连接 DBIdentifier.setProjectCode(datasetDto.getDbType() + "_" + ProjectDBMgr.getIp(datasetDto.getDbUrl())); ...... dynamicMapper.selectByCondition(qo);
Mapper信息
package com.general.mapper.dynamic; import com.general.common.domain.GIAggregateInfo; import com.general.common.dto.TableStructure; import com.general.common.qo.GeneralInterfaceQo; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import java.util.List; /** * DynamicExportMapper * * @author Eason */ @Mapper public interface DynamicMapper { List<Object> selectByCondition(GeneralInterfaceQo qo); GIAggregateInfo getAggregateInfo(GeneralInterfaceQo qo); List<TableStructure> getTableStructure(@Param("databaseAndTableName") String databaseAndTableName); }
Mapper.xml信息
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <mapper namespace="com.general.mapper.dynamic.DynamicMapper"> <!-- 查询所有维度字段拼接 --> <sql id="Dimension_Column_List"> <if test="dimensions != null and dimensions.size() > 0"> <foreach item="dimension" index="index" collection="dimensions" open="," separator="," close=""> <choose> <when test='dimension.aliasName == null or "" == dimension.aliasName'> ${dimension.columnName} </when> <when test='dimension.aliasName != null and "" != dimension.aliasName'> ${dimension.columnName} AS ${dimension.aliasName} </when> </choose> </foreach> </if> </sql> <!-- 查询所有指标字段拼接 --> <sql id="Aggregation_Column_List"> <if test='quotas != null and quotas.size() > 0'> <foreach item="quota" index="index" collection="quotas" open="," separator="," close=""> <choose> <when test='quota.columnName != null and "COUNT" == quota.aggreType'> count(1) AS total_count </when> <when test='quota.columnName != null and "AVG" == quota.aggreType'> avg(${quota.columnName}) AS avg_${quota.columnName} </when> <when test='quota.columnName != null and "MAX" == quota.aggreType'> max(${quota.columnName}) AS max_${quota.columnName} </when> <when test='quota.columnName != null and "MIN" == quota.aggreType'> min(${quota.columnName}) AS min_${quota.columnName} </when> <when test='quota.columnName != null and "SUM" == quota.aggreType'> sum(${quota.columnName}) AS sum_${quota.columnName} </when> </choose> </foreach> </if> </sql> <!--WHERE 条件查询--> <sql id="whereSql"> <where> <!-- 时间范围过滤--> <if test="partitionTimeRange != null"> <choose> <when test='partitionTimeRange.columnName != null'> <if test="partitionTimeRange.startTime != null"> AND ${partitionTimeRange.columnName} >= #{partitionTimeRange.startTime} </if> <if test="partitionTimeRange.endTime != null"> AND ${partitionTimeRange.columnName} <= #{partitionTimeRange.endTime} </if> </when> </choose> </if> <if test="querys != null and querys.size() > 0"> <foreach item="query" index="index" collection="querys"> <choose> <when test='query.columnName != null and "IN" == query.queryType'> AND ${query.columnName} IN <foreach collection="query.values" item="value" index="index" open="(" separator="," close=")"> <if test='"STRING" == query.dataType'> #{value , jdbcType = VARCHAR} </if> <if test='"LONG" == query.dataType'> #{value , jdbcType = BIGINT} </if> <if test='"DOUBLE" == query.dataType'> #{value , jdbcType = DOUBLE} </if> </foreach> </when> <when test='query.columnName != null and "NOT IN" == query.queryType'> AND ${query.columnName} NOT IN <foreach collection="query.values" item="value" index="index" open="(" separator="," close=")"> <if test='"STRING" == query.dataType'> #{value , jdbcType = VARCHAR} </if> <if test='"LONG" == query.dataType'> #{value , jdbcType = BIGINT} </if> <if test='"DOUBLE" == query.dataType'> #{value , jdbcType = DOUBLE} </if> </foreach> </when> <when test='query.columnName != null and "LIKE" == query.queryType'> AND ${query.columnName} LIKE concat('%', #{query.values[0], jdbcType = VARCHAR}, '%') </when> <when test='query.columnName != null and "EQ" == query.queryType'> AND ${query.columnName} = <if test='"STRING" == query.dataType'> #{query.values[0], jdbcType = VARCHAR} </if> <if test='"LONG" == query.dataType'> #{query.values[0], jdbcType = BIGINT} </if> <if test='"DOUBLE" == query.dataType'> #{query.values[0], jdbcType = DOUBLE} </if> </when> <when test='query.columnName != null and "GE" == query.queryType'> <if test='"LONG" == query.dataType'> AND ${query.columnName} >= #{query.values[0], jdbcType = BIGINT} </if> <if test='"STRING" == query.dataType'> AND ${query.columnName} >= #{query.values[0], jdbcType = VARCHAR} </if> <if test='"DOUBLE" == query.dataType'> AND ${query.columnName} >= #{query.values[0], jdbcType = DOUBLE} </if> </when> <when test='query.columnName != null and "LE" == query.queryType'> <if test='"LONG" == query.dataType'> AND ${query.columnName} <= #{query.values[0], jdbcType = BIGINT} </if> <if test='"STRING" == query.dataType'> AND ${query.columnName} <= #{query.values[0], jdbcType = VARCHAR} </if> <if test='"DOUBLE" == query.dataType'> AND ${query.columnName} <= #{query.values[0], jdbcType = DOUBLE} </if> </when> <when test='query.columnName != null and "LT" == query.queryType'> <if test='"LONG" == query.dataType'> AND ${query.columnName} < #{query.values[0], jdbcType = BIGINT} </if> <if test='"STRING" == query.dataType'> AND ${query.columnName} < #{query.values[0], jdbcType = VARCHAR} </if> <if test='"DOUBLE" == query.dataType'> AND ${query.columnName} < #{query.values[0], jdbcType = DOUBLE} </if> </when> <when test='query.columnName != null and "GT" == query.queryType'> <if test='"LONG" == query.dataType'> AND ${query.columnName} > #{query.values[0], jdbcType = BIGINT} </if> <if test='"STRING" == query.dataType'> AND ${query.columnName} > #{query.values[0], jdbcType = VARCHAR} </if> <if test='"DOUBLE" == query.dataType'> AND ${query.columnName} > #{query.values[0], jdbcType = DOUBLE} </if> </when> <when test='query.columnName != null and "NE" == query.queryType'> <if test='"LONG" == query.dataType'> AND ${query.columnName} <> #{query.values[0], jdbcType = BIGINT} </if> <if test='"STRING" == query.dataType'> AND ${query.columnName} <> #{query.values[0], jdbcType = VARCHAR} </if> <if test='"DOUBLE" == query.dataType'> AND ${query.columnName} <> #{query.values[0], jdbcType = DOUBLE} </if> </when> </choose> </foreach> </if> </where> </sql> <!--GROUP BY--> <sql id="groupBySql"> <if test='quotas != null and quotas.size() > 0'> <if test="dimensions != null and dimensions.size() > 0"> GROUP BY <trim suffixOverrides=","> <foreach item="dimension" index="index" collection="dimensions" open="" separator="," close=""> ${dimension} </foreach> </trim> </if> </if> </sql> <!--ORDER BY--> <sql id="orderBySql"> ORDER BY <trim suffixOverrides=","> <foreach item="sort" index="index" collection="sorts" open="" separator="," close=""> ${sort} </foreach> </trim> <choose> <when test="sortType != null and sortType !=''"> ${sortType} </when> <otherwise> DESC </otherwise> </choose> </sql> <!-- 查询数据20条示例数据 --> <select id="selectByCondition" resultType="java.util.HashMap" parameterType="com.general.common.qo.GeneralInterfaceQo"> SELECT <if test='quotas != null and quotas.size() > 0'> <trim prefixOverrides=","> <if test="dimensions != null and dimensions.size() > 0"> <include refid="Dimension_Column_List"/> </if> <include refid="Aggregation_Column_List"/> </trim> </if> <if test='!(quotas != null and quotas.size() > 0)'> <if test="dimensions != null and dimensions.size() > 0"> <trim prefixOverrides=","> <include refid="Dimension_Column_List"/> </trim> </if> <if test="!(dimensions != null and dimensions.size() > 0)"> * </if> </if> FROM <if test="dbName != null and dbName != ''"> ${dbName}.${dbTable} </if> <if test="!(dbName != null and dbName != '')"> ${dbTable} </if> <include refid="whereSql"/> <if test='quotas != null and quotas.size() > 0'> <include refid="groupBySql"/> </if> <!-- 排序 --> <if test='sorts != null and sorts.size() > 0'> <include refid="orderBySql"/> </if> <!-- need_download 为 "0", 不写入SQL,前端分页, 需要分页 --> <if test='needDownload != null and needDownload == "0"'> LIMIT ${(currentPage - 1) * pageSize}, ${pageSize} </if> <if test='needDownload != null and needDownload == "1"'> LIMIT 20 </if> </select> <!-- 聚合结果查询 --> <select id="getAggregateInfo" resultType="com.general.common.domain.GIAggregateInfo" parameterType="com.general.common.qo.GeneralInterfaceQo"> <if test='quotas != null and quotas.size() > 0'> SELECT count(1) AS total FROM ( SELECT <trim prefixOverrides=","> <if test="dimensions != null and dimensions.size() > 0"> <include refid="Dimension_Column_List"/> </if> <include refid="Aggregation_Column_List"/> </trim> FROM <if test="dbName != null and dbName != ''"> ${dbName}.${dbTable} </if> <if test="!(dbName != null and dbName != '')"> ${dbTable} </if> <include refid="whereSql"/> <include refid="groupBySql"/>) AS temp </if> <if test='!(quotas != null and quotas.size() > 0)'> SELECT count(1) AS total <include refid="Aggregation_Column_List"/> FROM <if test="dbName != null and dbName != ''"> ${dbName}.${dbTable} </if> <if test="!(dbName != null and dbName != '')"> ${dbTable} </if> <include refid="whereSql"/> </if> </select> <select id="getTableStructure" resultType="com.general.common.dto.TableStructure"> <if test="(databaseAndTableName != null and databaseAndTableName != '')"> desc ${databaseAndTableName} </if> </select> </mapper>
实体类信息
package com.general.common.qo; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; import javax.validation.Valid; import javax.validation.constraints.NotBlank; import java.util.List; /** * 通用接口请求实体类 */ @Data public class GeneralInterfaceQo { /** * 数据集唯一id:会依据数据来源ID在Mysql数据库中查询数据库类型,数据库名称,对应的数据库表等信息(对接时由大数据提供) */ @NotBlank(message = "数据来源(dataset_id)不能为空") @JsonProperty("dataset_id") private String datasetId; /** * 调用者唯一标识ID: 用来校验token(对接时由大数据提供) */ @NotBlank(message = "调用者唯一标识(client_id)不能为空") @JsonProperty("client_id") private String clientId; /** * token值,生成规则: api + v1 + general + generateSql + 当前时间的yyyy-MM-dd格式字符串 + clientSecret(对接时由大数据提供) */ @NotBlank(message = "调用者传入 API TOKEN(api_token_client)不能为空") @JsonProperty("api_token_client") private String apiTokenClient; /** * 任务名称 */ @JsonProperty("task_name") private String taskName; /** * 下载任务的唯一id,只有need_download为1是才需要上传该值 */ @JsonProperty("uuid") private String uuid; /** * 业务系统id,业务系统扩展字段,用来生成uuid,一般与uuid保持一致即可 */ @JsonProperty("bu_sys_id") private String buSysId; /** * 是否需要下载: "0" 不需要, "1"需要下载;当状态为1时,也就意味着需要往Mysql中写SQL,然后查询SQL,导出Excel,必须填写call_back_url,此时关联的分页默认查询20条返回;当为 0时,不需要插入SQL脚本,不需要导出Excel,也不用填写call_back_url,需要填写分页参数,不填写会默认当前第一页,每页20条数据 */ @NotBlank(message = "是否需要下载(need_download)不能为空") @JsonProperty("need_download") private String needDownload; /** * 大数据回调URL:need_download "0" 不需要下载,该参数无需传递;"1"需要下载,该参数需要传递 */ @JsonProperty("call_back_url") private String callBackUrl; /** * 查询的表的维度列: 维度列即是指用户关注的列,需要包含在返回值中;字段值必须严格与要查询数据库中的字段一致,否则会报错 */ @JsonProperty("dimensions") @Valid private List<GIDimensionQo> dimensions; /** * 查询的表的指标列: 指标列即是指用户关注某些字段的聚合值,比如总条数等结果,需要包含在返回值中 */ @JsonProperty("quotas") private List<GIQuotaQo> quotas; /** * 查询条件: 除分区字段外 */ @JsonProperty("querys") @Valid private List<GIQueryQo> querys; /** * 查询的表的排序条件:字段值必须严格与要查询数据库中的字段一致,否则会报错 */ @JsonProperty("sorts") private List<String> sorts; /** * 查询的表的排序类型:DESC-降序、ASC-升序 */ @JsonProperty("sort_type") private String sortType; /** * 分区时间范围: 时间过滤条件,建议单次查询时间跨度最大为31天。 */ @JsonProperty("partition_time_range") @Valid private PartitionTimeRangeQo partitionTimeRange; /** * 数据请求来源: Report:日报数据 DataApply:数据申请 OTHER或空:其它 */ @JsonProperty("request_source") private String requestSource = "OTHER"; /** * 分页参数:当前页码,默认当前第一页 */ @JsonProperty("current_page") private Long currentPage = 1L; /** * 分页参数: 每页数量, 默认20条 */ @JsonProperty("page_size") private Long pageSize = 20L; /** * 实际链接数据库名(接口调用忽略该字段) */ @JsonIgnore private String dbTable; /** * 实际链接数据库名(接口调用忽略该字段) */ @JsonIgnore private String dbName; /** * 数据库类型(接口调用忽略该字段) */ @JsonIgnore private String dbType; }
package com.general.common.domain; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; @Data public class GIAggregateInfo { @JsonProperty("total") private Long total; }
package com.general.common.dto; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import org.apache.commons.lang3.StringUtils; @Data @AllArgsConstructor @NoArgsConstructor public class TableStructure { /** * ch:字段名 */ private String name; /** * ch|mysql:字段类型 */ private String type; /** * ch|hive|myslq:字段描述 */ private String comment; /** * hive:字段名 */ private String colName; /** * hive:字段类型 */ private String dataType; /** * mysql:字段名 */ private String field; public String getName() { if (StringUtils.isNotBlank(name)) { return name; } else if (StringUtils.isNotBlank(colName)) { return colName; } else if (StringUtils.isNotBlank(field)) { return field; } return null; } public String getComment() { if (StringUtils.isNotBlank(comment)) { return comment; } return null; } }