由于某几个业务表数据量太大,数据由业务写,数据部门读。
写压力不大,读却很容易导致长时间等待问题(读由单独系统进行读),导致连接被占用,从而容易并发稍稍增长导致全库卡死!
于是,就拆库呗。
业务系统拆分就不要做了(微服务化),没那工夫。
直接原系统拆两个数据源出来,对某几个高压力表的写就单独用这个数据源,从而减轻压力。
所以,分库工作就变为了两个步骤:1. 两个数据源读写业务;
2. 将新数据库写动作同步回读库;
再由于方便性,数据库也是使用阿里的rds数据库,一个变为两个!
代码上做两个数据源很简单,尤其是在原有代码就写得比较清晰的情况下;
如下是使用springboot和mybatis做的多数据源配置:1. 配置多个数据源类;
2. 启用mybatis多数据源,加载不同数据源;
3. 根据扫描路径区别使用的数据源;
4. 根据扫描路径将需要拆分的表与原表区别;
5. 测试时可使用同同机器上多库形式运行,上线后为多实例同库运行;
6. 验证功能可用性;如有问题,及时修改;
具体配置如下:
// 原数据源配置 @Configuration @MapperScan(basePackages = MainDataSourceConfig.SCAN_BASE_PACKAGE, sqlSessionFactoryRef = "sqlSessionFactory") public class MainDataSourceConfig { public static final String SCAN_BASE_PACKAGE = "com.xxx.dao.mapper.main"; /** * xml 配置文件扫描路径 */ public static final String SCAN_XML_MAPPER_LOCATION = "classpath:mybatis/mappers/mysql/main/**/*Mapper.xml"; //jdbcConfig @Value("${jdbc.main.url}") private String jdbcUrl; @Value("${jdbc.main.driver}") private String driverName; @Value("${pool.main.maxPoolSize}") private int maxPoolSize; @Value("${jdbc.main.username}") private String jdbcUserName; @Value("${jdbc.main.password}") private String jdbcPwd; @Value("${pool.main.maxWait}") private int jdbcMaxWait; @Value("${pool.main.validationQuery}") private String validationQuery; @Bean(name = "druidDataSource") @Primary public DruidDataSource druidDataSource(){ DruidDataSource ds = new DruidDataSource(); ds.setUrl(jdbcUrl); ds.setDriverClassName(driverName); ds.setMaxActive(maxPoolSize); ds.setUsername(jdbcUserName); ds.setPassword(jdbcPwd); ds.setRemoveAbandoned(true); ds.setMaxWait(jdbcMaxWait); ds.setValidationQuery(validationQuery); return ds; } @Bean(name = "dataSourceTransactionManager") @Primary public DataSourceTransactionManager dataSourceTransactionManager(){ DataSourceTransactionManager dm = new DataSourceTransactionManager(); dm.setDataSource(druidDataSource()); return dm; } @Bean(name="sqlSessionFactory") @Primary public SqlSessionFactory sqlSessionFactory() throws Exception { SqlSessionFactoryBean sqlSessionFactory = new SqlSessionFactoryBean(); ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); Resource[] mapperXmlResource = resolver.getResources(SCAN_XML_MAPPER_LOCATION); sqlSessionFactory.setDataSource(druidDataSource()); sqlSessionFactory.setMapperLocations(mapperXmlResource); return sqlSessionFactory.getObject(); } } // 新数据源配置,仅仅改了下配置名,但是还不得不另一个配置类 @Configuration @MapperScan(basePackages = ExtraDataSourceConfig.SCAN_BASE_PACKAGE, sqlSessionFactoryRef = "sqlSessionFactoryExt") public class ExtraDataSourceConfig { public static final String SCAN_BASE_PACKAGE = "com.xxx.dao.mapper.ext"; /** * xml 配置文件扫描路径 */ public static final String SCAN_XML_MAPPER_LOCATION = "classpath:mybatis/mappers/mysql/ext/**/*Mapper.xml"; //jdbcConfig @Value("${jdbc.ext.url}") private String jdbcUrl; @Value("${jdbc.ext.driver}") private String driverName; @Value("${pool.ext.maxPoolSize}") private int maxPoolSize; @Value("${jdbc.ext.username}") private String jdbcUserName; @Value("${jdbc.ext.password}") private String jdbcPwd; @Value("${pool.ext.maxWait}") private int jdbcMaxWait; @Value("${pool.ext.validationQuery}") private String validationQuery; @Bean(name = "druidDataSourceExt") public DruidDataSource druidDataSource(){ DruidDataSource ds = new DruidDataSource(); ds.setUrl(jdbcUrl); ds.setDriverClassName(driverName); ds.setMaxActive(maxPoolSize); ds.setUsername(jdbcUserName); ds.setPassword(jdbcPwd); ds.setRemoveAbandoned(true); ds.setMaxWait(jdbcMaxWait); ds.setValidationQuery(validationQuery); return ds; } @Bean(name = "dataSourceTransactionManagerExt") public DataSourceTransactionManager dataSourceTransactionManager(){ DataSourceTransactionManager dm = new DataSourceTransactionManager(); dm.setDataSource(druidDataSource()); return dm; } @Bean(name="sqlSessionFactoryExt") public SqlSessionFactory sqlSessionFactory() throws Exception { SqlSessionFactoryBean sqlSessionFactory = new SqlSessionFactoryBean(); ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); Resource[] mapperXmlResource = resolver.getResources(SCAN_XML_MAPPER_LOCATION); sqlSessionFactory.setDataSource(druidDataSource()); sqlSessionFactory.setMapperLocations(mapperXmlResource); return sqlSessionFactory.getObject(); } }