Fork me on GitHub

mybatis下实现多数据源

这次要完成的是从一个数据库中读取数据,然后再把数据插入到另一个数据库中。在同一套项目代码中要完成这个操作,就不可避免的涉及到了多数据源。本文即介绍在mybatis中完成多数据源的切换相关内容

  1. 指定数据源一

    @Configuration
    // 扫描 Mapper 接口并容器管理
    @MapperScan(basePackages = MasterDataSourceConfig.PACKAGE, sqlSessionFactoryRef = "masterSqlSessionFactory")
    public class MasterDataSourceConfig {
    
        // 精确到 master 目录,以便跟其他数据源隔离
        static final String PACKAGE = "com.datareach.kafka.dao.master";
        static final String MAPPER_LOCATION = "classpath:mapper/master/*.xml";
    	//application.yml中的值可以通过@Value注解进行读取
        @Value("${master.datasource.url}")
        private String url;
        @Value("${master.datasource.username}")
        private String user;
        @Value("${master.datasource.password}")
        private String password;
        @Value("${master.datasource.driver-class-name}")
        private String driverClass;
    
        @Bean(name = "masterDataSource")
        @Primary
        public DataSource masterDataSource() {
            DruidDataSource dataSource = new DruidDataSource();
            dataSource.setDriverClassName(driverClass);
            dataSource.setUrl(url);
            dataSource.setUsername(user);
            dataSource.setPassword(password);
            return dataSource;
        }
    
        @Bean(name = "masterTransactionManager")
        @Primary
        public DataSourceTransactionManager masterTransactionManager() {
            return new DataSourceTransactionManager(masterDataSource());
        }
    
        @Bean(name = "masterSqlSessionFactory")
        @Primary
        public SqlSessionFactory masterSqlSessionFactory(@Qualifier("masterDataSource") DataSource masterDataSource)
                throws Exception {
            final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
            sessionFactory.setDataSource(masterDataSource);
            sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
                    .getResources(MasterDataSourceConfig.MAPPER_LOCATION));
            return sessionFactory.getObject();
        }
    }
    

    数据源一的相关配置

    # master 数据源配置
    master:
      datasource:
        url: jdbc:postgresql://localhost:5432/postgres?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=false&serverTimezone=GMT%2B8
        driver-class-name: org.postgresql.Driver
        username: product
        password: 
        initial-size: 1
        min-idle: 1
        max-active: 20
        test-on-borrow: true
        max-wait: 60000
        time-between-eviction-runs-millis: 60000
        min-evictable-idle-time-millis: 300000
        validation-query: SELECT 1 FROM DUAL
        test-While-Idle: true
        test-on-return: false
        pool-prepared-statements: false
        max-pool-prepared-statement-per-connection-size: 20
        filters: stat,wall,log4j,config
    
  2. 指定数据源二

    @Configuration
    // 扫描 Mapper 接口并容器管理
    @MapperScan(basePackages = SecondDataSourceConfig.PACKAGE, sqlSessionFactoryRef = "secondSqlSessionFactory")
    public class SecondDataSourceConfig {
        // 精确到 cluster 目录,以便跟其他数据源隔离
        static final String PACKAGE = "com.datareach.kafka.dao.secondary";
        static final String MAPPER_LOCATION = "classpath:mapper/secondary/*.xml";
        @Value("${second.datasource.url}")
        private String url;
        @Value("${second.datasource.username}")
        private String user;
        @Value("${second.datasource.password}")
        private String password;
        @Value("${second.datasource.driver-class-name}")
        private String driverClass;
    
        @Bean(name = "secondDataSource")
        public DataSource clusterDataSource() {
            DruidDataSource dataSource = new DruidDataSource();
            dataSource.setDriverClassName(driverClass);
            dataSource.setUrl(url);
            dataSource.setUsername(user);
            dataSource.setPassword(password);
            return dataSource;
        }
        @Bean(name = "secondTransactionManager")
        public DataSourceTransactionManager clusterTransactionManager() {
            return new DataSourceTransactionManager(clusterDataSource());
        }
        @Bean(name = "secondSqlSessionFactory")
        public SqlSessionFactory clusterSqlSessionFactory(@Qualifier("secondDataSource") DataSource clusterDataSource)
                throws Exception {
            final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
            sessionFactory.setDataSource(clusterDataSource);
            sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
                    .getResources(SecondDataSourceConfig.MAPPER_LOCATION));
            return sessionFactory.getObject();
        }
    }
    

    数据源一的相关配置

    second:
      datasource:
        url: jdbc:mysql://localhost:100/data?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=false&serverTimezone=GMT%2B8
        username: root
        password: 
        driver-class-name: com.mysql.jdbc.Driver
        max-idle: 10
        max-wait: 10000
        min-idle: 5
        initial-size: 5
    

其实就是实例化了两个SqlSessionFactory——masterSqlSessionFactory和secondSqlSessionFactory,然后通过注解@MapperScan指定扫描指定的mapper接口时用指定的SqlSessionFactory进行连接构建,从而实现了多数据源。

posted @ 2020-09-23 19:52  ayueC  阅读(304)  评论(0)    收藏  举报