当前位置:网站首页>Write static multi data source code and do scheduled tasks to realize database data synchronization

Write static multi data source code and do scheduled tasks to realize database data synchronization

2022-06-21 15:22:00 starcpdk

First of all we have SpringBoot Configure two data sources in the configuration file of

server:
  port: 8083

spring:
  datasource:
    remote :
      driver-class-name: com.mysql.cj.jdbc.Driver
      jdbc-url: jdbc:mysql://21.33.322.22/greer?useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=UTC
      username: 333
      password: [email protected]
    mine :
      driver-class-name: com.mysql.cj.jdbc.Driver
      jdbc-url: jdbc:mysql://23.33.212.22/ferer?useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=UTC
      username: w22
      password: 222
  profiles:
    active: dev

mybatis-plus:
  configuration:
    log-impl: org.apache.ibatis.logging.stdout.StdOutImpl # Turn on sql journal 

logging:
  level:
    com.starcpdk.mapper: debug

Then we can start with mapper Layer start write , Here's my mapper The layer structure
 Insert picture description here
Let's write a configuration class to configure the data source
First, we prepare a configuration class to read the database information in the configuration file , And create a data source

package com.starcpdk.config;

import com.zaxxer.hikari.HikariDataSource;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;

import javax.sql.DataSource;

@Configuration
public class DataSourceConfig {
    
 
    //  Indicates that this data source is the default data source 
    @Primary
    //  Put this object in spring In the container ( hand Spring management )
    @Bean(name="remoteDataSource")
    //  Read  application.yml  The configuration parameter in maps to an object 
    @ConfigurationProperties(prefix = "spring.datasource.remote")
    public DataSource getDataSource1(){
    
        //  Create a data source 
        return DataSourceBuilder.create().type(HikariDataSource.class).build();
    }
 
    @Primary
    @Bean(name="mineDataSource")
    @ConfigurationProperties(prefix = "spring.datasource.mine")
    public DataSource getDataSource2(){
    
        return DataSourceBuilder.create().type(HikariDataSource.class).build();
    }
}

Next, we need to do some research on these two data sources mapper Mapping of files

package com.starcpdk.config;

import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;

import javax.sql.DataSource;

/** *  data source Config2 */
@Configuration
@MapperScan(basePackages = {
    "com.starcpdk.mapper.mine"}, sqlSessionFactoryRef = "mineSqlSessionFactory")
public class MybatisMineConfig {
    
 
    @Autowired
    @Qualifier("mineDataSource")
    private DataSource dataSource;
 
    /** *  establish  SqlSessionFactory * @return * @throws Exception */
    @Bean(name="mineSqlSessionFactory")
// @Primary
    public SqlSessionFactory mineSqlSessionFactory() throws Exception{
    
        SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
        bean.setDataSource(dataSource);
        //  Set up mybatis Of xml The position of 
        bean.setMapperLocations(new PathMatchingResourcePatternResolver().
                getResources("classpath*:com/starcpdk/mapper/mine/xml/*.xml"));
        return bean.getObject();
    }
 
    /** *  adopt  SqlSessionFactory  To create  SqlSessionTemplate * @param sqlSessionFactory * @return */
    @Bean(name="mineSqlSessionTemplate")
// @Primary
    public SqlSessionTemplate mineSqlSessionTemplate(@Qualifier("mineSqlSessionFactory") SqlSessionFactory sqlSessionFactory){
    
        // SqlSessionTemplate It's thread safe , Can be multiple DAO Shared use 
        return new SqlSessionTemplate(sqlSessionFactory);
    }
}
package com.starcpdk.config;

import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;

import javax.sql.DataSource;

/** *  data source Config1 */
@Configuration
@MapperScan(basePackages = {
    "com.starcpdk.mapper.remote"}, sqlSessionFactoryRef  = "remoteSqlSessionFactory")
public class MybatisRemoteConfig {
    
 
    @Autowired
    @Qualifier("remoteDataSource")
    private DataSource dataSource;
 
    /** *  establish  SqlSessionFactory * @return * @throws Exception */
    @Bean(name="remoteSqlSessionFactory")
    @Primary
    // @Qualifier Expression lookup Spring The name in the container is  preDataSource  The object of 
    public SqlSessionFactory remoteSqlSessionFactory() throws Exception{
    
        //  Used to create  SqlSessionFactory  Equivalent to the following configuration 
// <bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
// <property name="dataSource" ref="dataSource" />
// <property name="mapperLocations" value="classpath:mybatis-mapper/*.xml"/>
// </bean>
        SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
        bean.setDataSource(dataSource);
        //  Set up mybatis Of xml The position of ( scanning mybatis Correlation xml file , Fit into a container )
        bean.setMapperLocations(new PathMatchingResourcePatternResolver().
                getResources("classpath*:com/starcpdk/mapper/remote/xml/*.xml"));
        return bean.getObject();
    }
 
    /** *  adopt  SqlSessionFactory  To create  SqlSessionTemplate * @param sqlSessionFactory * @return */
    @Bean(name="remoteSqlSessionTemplate")
    @Primary
    public SqlSessionTemplate remoteSqlSessionTemplate(@Qualifier("remoteSqlSessionFactory") SqlSessionFactory sqlSessionFactory){
    
        // SqlSessionTemplate It's thread safe , Can be multiple DAO Shared use 
        return new SqlSessionTemplate(sqlSessionFactory);
    }
}

Finally, we will write our own config Just configure the class , In this configuration class, we can write scheduled tasks , Here I write the scheduled task by springboot The self-contained timing task is implemented

package com.starcpdk.config;

import com.baomidou.mybatisplus.extension.plugins.PerformanceInterceptor;
import com.starcpdk.pojo.Maindata;
import com.starcpdk.service.mine.MaindataMineService;
import com.starcpdk.service.remote.MaindataRemoteService;
import com.starcpdk.util.DateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;

import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.List;

@Configuration
@EnableScheduling   // 2. Turn on timed tasks 
@Slf4j

public class MyConfig {
    

    @Resource
    MaindataMineService maindataMineService;

    @Resource
    MaindataRemoteService maindataRemoteService;


    @Bean
    @Profile({
    "dev", "test"})//  Set up  dev test  Environment on 
    public PerformanceInterceptor performanceInterceptor() {
    
        PerformanceInterceptor performanceInterceptor = new PerformanceInterceptor();
        performanceInterceptor.setMaxTime(100);//ms, More than ms be sql Don't execute 
        performanceInterceptor.setFormat(true);
        return performanceInterceptor;
    }

    //3. Add timing task 
    @Scheduled(cron = "0 27 9 * * ?")
    private void insertData() {
    
        log.info(" Scheduled task start time : " + LocalDateTime.now());
        log.info("mineDateTimeMax" + maindataMineService.maxMineDatetime());
        log.info("remoteDateTimeMax" + maindataRemoteService.maxRemoteDatetime());

        String mineDatetime = maindataMineService.maxMineDatetime();
        String remoteDatetime = maindataRemoteService.maxRemoteDatetime();
        HashMap<String, String> map = new HashMap<>();
        map.put("mineDatetime", mineDatetime);
        map.put("remoteDatetime", remoteDatetime);


        if (map.get("mineDatetime") != null){
    
            List<Maindata> list = maindataRemoteService.getAllData(map);
            log.info("list:", list);
            for (Maindata maindata : list) {
    
                HashMap<String, Object> insertMap = new HashMap<>();
                insertMap.put("mdId", maindata.getMdId());
                insertMap.put("sid", maindata.getSid());
                insertMap.put("sId", "");
                insertMap.put("stationId", maindata.getStationId());
                insertMap.put("mdValue", maindata.getMdValue());
                insertMap.put("mdDatetime", maindata.getMdDatetime());
                insertMap.put("mdSn", maindata.getMdSn());
                maindataMineService.insertData(insertMap);
            }
        }else {
    
            List<Maindata> list = maindataRemoteService.getAllDataWithMineIsNull(map);

            log.info("list:", list);
            
            for (Maindata maindata : list) {
    
            	HashMap<String, Object> insertMap = new HashMap<>();
                insertMap.put("mdId", maindata.getMdId());
                insertMap.put("sid", maindata.getSid());
                insertMap.put("sId", "");
                insertMap.put("stationId", maindata.getStationId());
                insertMap.put("mdValue", maindata.getMdValue());
                insertMap.put("mdDatetime", maindata.getMdDatetime());
                insertMap.put("mdSn", maindata.getMdSn());
                maindataMineService.insertData(insertMap);
            }
        }

        log.info(" Scheduled task end time : " + LocalDateTime.now());
    }

}

The code I write in the scheduled task will cause memory overflow
First of all, I see that the problem of heap memory overflow is to consider HashMap Of new The process is put into the loop , A large number of loops can cause memory overflow , because map Object keys are the same , The value will override , So we can put hashMap Of nnew Object procedures are placed outside the loop , So there will only be one in the whole program map Object

however , We found that he would still overflow memory , At, we located list aggregate
We query the database and get list There are more than three million pieces of data in the data set , That is to say, there are more than three million objects list Collection , This will also lead to memory overflow

So I can only optimize the code to achieve
I take time as a constraint , Let him query the data for two days each time for synchronization , After synchronization, you can query the data of the next day

package com.starcpdk.config;

import com.baomidou.mybatisplus.extension.plugins.PerformanceInterceptor;
import com.starcpdk.pojo.Maindata;
import com.starcpdk.service.mine.MaindataMineService;
import com.starcpdk.service.remote.MaindataRemoteService;
import com.starcpdk.util.DateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;

import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.List;

@Configuration
@EnableScheduling   // 2. Turn on timed tasks 
@Slf4j

public class MyConfig {
    

    @Resource
    MaindataMineService maindataMineService;

    @Resource
    MaindataRemoteService maindataRemoteService;


    @Bean
    @Profile({
    "dev", "test"})//  Set up  dev test  Environment on 
    public PerformanceInterceptor performanceInterceptor() {
    
        PerformanceInterceptor performanceInterceptor = new PerformanceInterceptor();
        performanceInterceptor.setMaxTime(100);//ms, More than ms be sql Don't execute 
        performanceInterceptor.setFormat(true);
        return performanceInterceptor;
    }

    //3. Add timing task 
    @Scheduled(cron = "0 27 9 * * ?")
    private void insertData() {
    
        log.info(" Scheduled task start time : " + LocalDateTime.now());
        log.info("mineDateTimeMax" + maindataMineService.maxMineDatetime());
        log.info("remoteDateTimeMax" + maindataRemoteService.maxRemoteDatetime());

        String mineDatetime = maindataMineService.maxMineDatetime();
        String remoteDatetime = maindataRemoteService.maxRemoteDatetime();
        HashMap<String, String> map = new HashMap<>();
        map.put("mineDatetime", mineDatetime);
        map.put("remoteDatetime", remoteDatetime);


        if (map.get("mineDatetime") != null) {
    
            List<Maindata> list = maindataRemoteService.getAllData(map);
            // log.info("list:", list);
            HashMap<String, Object> insertMap = new HashMap<>();
            for (Maindata maindata : list) {
    
                insertMap.put("mdId", maindata.getMdId());
                insertMap.put("sid", maindata.getSid());
                insertMap.put("sId", "");
                insertMap.put("stationId", maindata.getStationId());
                insertMap.put("mdValue", maindata.getMdValue());
                insertMap.put("mdDatetime", maindata.getMdDatetime());
                insertMap.put("mdSn", maindata.getMdSn());
                maindataMineService.insertData(insertMap);
            }
        } else {
    

            String maxMineDateTime = maindataMineService.maxMineDatetime();
            String maxRemoteDatetime = maindataRemoteService.maxRemoteDatetime();

            while (maxMineDateTime != maxRemoteDatetime){
    
                if (maxMineDateTime == null || "".equals(maxMineDateTime)){
    
                    map.put("remoteDatetime", maindataRemoteService.minRemoteDatetime());

                    List<Maindata> list = maindataRemoteService.getAllDataWithMineIsNull(map);
                    // log.info("list:", list);
                    HashMap<String, Object> insertMap = new HashMap<>();

                    for (Maindata maindata : list) {
    
                        insertMap.put("mdId", maindata.getMdId());
                        insertMap.put("sid", maindata.getSid());
                        insertMap.put("sId", "");
                        insertMap.put("stationId", maindata.getStationId());
                        insertMap.put("mdValue", maindata.getMdValue());
                        insertMap.put("mdDatetime", maindata.getMdDatetime());
                        insertMap.put("mdSn", maindata.getMdSn());
                        maindataMineService.insertData(insertMap);
                    }
                }
                map.put("mineDatetime", maxMineDateTime);
                map.put("remoteDatetime", DateUtils.getNextDay(maxMineDateTime, "2"));
                List<Maindata> list = maindataRemoteService.getAllData(map);
                HashMap<String, Object> insertMap = new HashMap<>();

                for (Maindata maindata : list) {
    
                    insertMap.put("mdId", maindata.getMdId());
                    insertMap.put("sid", maindata.getSid());
                    insertMap.put("sId", "");
                    insertMap.put("stationId", maindata.getStationId());
                    insertMap.put("mdValue", maindata.getMdValue());
                    insertMap.put("mdDatetime", maindata.getMdDatetime());
                    insertMap.put("mdSn", maindata.getMdSn());
                    maindataMineService.insertData(insertMap);
                }
            }
        }

        log.info(" Scheduled task end time : " + LocalDateTime.now());
    }

}

The above code can still be optimized , We can import batch data , In this way, the efficiency of batch data import will be higher ~

原网站

版权声明
本文为[starcpdk]所创,转载请带上原文链接,感谢
https://yzsam.com/2022/02/202202221201283462.html