Elastic管理平台开发记录二:数据同步
回顾
上一篇已经完成了代码生成的功能,但还是存在一个问题,生成之后无法使用,java 没有对新生成的 model,mapper 进行编译,查询的数据也就无法映射到对应的 JavaBean 中
解决思路
先保证重启后生成的代码是可用的,再来解决免重启生效的问题
1.解决多类型数据库获取数据库连接问题
因为要根据不同类型的数据库获取连接,所以需要两个配置文件
MySQLDataScourceConfig
/**
* 描述:MySQL连接池配置类
* @author wangql
*/
public class MySQLDataScourceConfig {
static final String PACKAGE = "com.sci99.generator.dao.generator";
static final String MAPPER_LOCATION = "classpath:/mapping/generator/*.xml";
private String url;
private String databse;
private String user;
private String password;
private String driverClass;
public MySQLDataScourceConfig() { }
public MySQLDataScourceConfig(Datasource datasource, String databse) {
this.url = datasource.getUrl();
this.databse = databse;
this.user = datasource.getUsername();
this.password = datasource.getPassword();
this.driverClass = datasource.getDriver();
}
public DataSource masterDataSource() {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setDriverClassName(driverClass);
if(this.databse != null) {
dataSource.setUrl(this.url+"/"+this.databse+"?useUnicode=true&characterEncoding=utf8&serverTimezone=GMT%2B8&useSSL=false");
} else {
dataSource.setUrl(this.url);
}
dataSource.setUsername(user);
dataSource.setPassword(password);
return dataSource;
}
public DataSourceTransactionManager masterTransactionManager() {
return new DataSourceTransactionManager(masterDataSource());
}
public SqlSessionFactory mysqlSqlSessionFactory(@Qualifier("mysqlDataSource") DataSource mysqlDataSource)
throws Exception {
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(mysqlDataSource);
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources(MySQLDataScourceConfig.MAPPER_LOCATION));
return sessionFactory.getObject();
}
}
SQLServerDataSourceConfig
/**
* 描述:sqlServer连接池配置类
* @author wangql
*/
public class SQLServerDataSourceConfig {
static final String PACKAGE = "com.sci99.generator.dao.generator";
static final String MAPPER_LOCATION = "classpath:/mapping/generator/*.xml";
private String url;
private String databse;
private String user;
private String password;
private String driverClass;
public SQLServerDataSourceConfig() { }
public SQLServerDataSourceConfig(Datasource datasource, String databse) {
this.url = datasource.getUrl();
this.databse = databse;
this.user = datasource.getUsername();
this.password = datasource.getPassword();
this.driverClass = datasource.getDriver();
}
public DataSource masterDataSource() {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setDriverClassName(driverClass);
if(this.databse != null) {
dataSource.setUrl(this.url+"; DatabaseName="+this.databse);
} else {
dataSource.setUrl(this.url);
}
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setMaxWait(3600000);
dataSource.setTimeBetweenEvictionRunsMillis(60000);
dataSource.setMinEvictableIdleTimeMillis(300000);
dataSource.setValidationQuery("SELECT 1");
dataSource.setTestOnBorrow(true);
dataSource.setTestWhileIdle(true);
dataSource.setTestOnReturn(true);
return dataSource;
}
public DataSourceTransactionManager masterTransactionManager() {
return new DataSourceTransactionManager(masterDataSource());
}
public SqlSessionFactory sqlServerSqlSessionFactory(@Qualifier("sqlServerDataSource") DataSource sqlServerDataSource)
throws Exception {
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(sqlServerDataSource);
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources(SQLServerDataSourceConfig.MAPPER_LOCATION));
return sessionFactory.getObject();
}
}
获取方式
if ("mysql".equals(datasource.getType())) {
MySQLDataScourceConfig config = new MySQLDataScourceConfig(datasource, params.getDatabases());
// 手动获取sqlSession
SqlSessionFactory sqlSessionFactory = config.mysqlSqlSessionFactory(config.masterDataSource());
SqlSession sqlSession = sqlSessionFactory.openSession();
} else {
SQLServerDataSourceConfig config = new SQLServerDataSourceConfig(datasource, params.getDatabases());
// 手动获取sqlSession
SqlSessionFactory sqlSessionFactory = config.sqlServerSqlSessionFactory(config.masterDataSource());
SQlSession sqlSession = sqlSessionFactory.openSession();
}
2.配置 ES 集群 Client
@Configuration
public class ESConfig {
@Autowired
ClusterMapper clusterMapper;
public static Map<String, Client> map = new ConcurrentHashMap<String, Client>();
/**
* 初始化elasticsearch客户端
*
* @return
*/
@Bean
@Qualifier("esClients")
public Map<String, Client> initEsClients() {
List<ClusterInfo> clusterInfoList = clusterMapper.findClusterList();
if (clusterInfoList != null) {
for (ClusterInfo clusterInfo : clusterInfoList) {
try {
Client client = EsUtils.getClient(clusterInfo.getIp(), clusterInfo.getClusterName(), clusterInfo.getTcpPort());
//TODO 检查集群连通性
map.put(clusterInfo.getClusterId(), client);
} catch (Exception e) {
e.printStackTrace();
}
}
}
return map;
}
}
/**
* 获取es链接
*
* @param ip ip地址
* @param clusterName 集群名称
* @param port tcp端口号
* @return
* @throws Exception
*/
public static Client getClient(String ip, String clusterName, int port) throws Exception {
TransportClient client = null;
if (client == null) {
Settings settings = Settings.builder().put("cluster.name", clusterName).put("client.transport.sniff", true)
.put("client.transport.ping_timeout", "10s").build();
client = new PreBuiltTransportClient(settings);
client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(ip), port));
}
return client;
}
3.解决大数据查询 OOM 问题
使用 mybatis 流式查询
GxidResultHandler, Data 类是表结构对应的 Java 类
@Component
public class GxidResultHandler implements ResultHandler<Data> {
@Autowired
@Qualifier("esClients")
Map<String, Client> map;
private String index;
private String type;
private String clusterId;
// 定义jackson对象
private static final ObjectMapper MAPPER = new ObjectMapper();
// 这是每批处理的大小
private final static int BATCH_SIZE = 10000;
private int size;
private int total = 0;
/**
* 存储每批数据的临时容器
*/
private List<Data> list = new ArrayList<>();
public GxidResultHandler() throws UnknownHostException {
}
@SneakyThrows
@Override
public void handleResult(ResultContext<? extends Data> resultContext) {
// 这里获取流式查询每次返回的单条结果
Data data = resultContext.getResultObject();
list.add(data);
size++;
if (size == BATCH_SIZE) {
handle();
}
total++;
}
private void handle() throws Exception {
Client client = map.get(this.clusterId);
if(client == null && StringUtils.isNotBlank(this.clusterId)) {
client = map.get(clusterId);
}
BulkRequestBuilder bulkRequest = client.prepareBulk();
for (Data data : list) {
// 在这里可以对你获取到的批量结果数据进行需要的业务处理
IndexRequest index = client
.prepareIndex(this.index, this.type, data.getPrimaryKey() + "")
.setSource(MAPPER.writeValueAsString(data))
.request();
bulkRequest.add(index);
}
BulkResponse bulkResponse = bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).execute().actionGet();
size = 0;
list.clear();
}
/**
* 这个方法给外面调用,用来完成最后一批数据处理
* @return
*/
public Integer end() throws Exception {
// 处理最后一批数据
this.handle();
int num = total;
total = 0;
return num;
}
public String getIndex() {
return index;
}
public void setIndex(String index) {
this.index = index;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getClusterId() {
return clusterId;
}
public void setClusterId(String clusterId) {
this.clusterId = clusterId;
}
}
使用
@Autowired
private GxidResultHandler handler;
// 获取生成的mapper接口的class文件
Class interfaceImpl = Class.forName("com.xx.xx.xx.generator."+name+"Mapper");
// 设置同步的索引名和类型名
handler.setIndex(params.getTables().toLowerCase());
handler.setType(params.getTables());
handler.setClusterId(params.getClusterId());
sqlSession.select(interfaceImpl.getName() + ".list", handler);
long num = handler.end();
//关闭连接
sqlSession.close();
return num;
4.解决免重启生效问题
将生成的 Java 类文件进行手动编译,FileUtil 是 hutool 提供的文件工具类
public static void compilerClasses(String table) throws Exception {
String name = CodeGenerateUtils.replaceUnderLineAndUpperCase(table);
String path = System.getProperty("user.dir");
String classPath = path+"/target/classes";
String modelPath = path+"/src/main/java/com/xx/xx/model/generator/"+name+".java";
String daoPath = path +"/src/main/java/com/xx/xx/dao/generator/"+name+"Mapper.java";
String mapperPath = path +"/src/main/resources/mapping/generator/"+name+"Mapper.xml";
JavaCompiler javaCompiler = ToolProvider.getSystemJavaCompiler();
javaCompiler.run(null, null, null, "-d", classPath, modelPath);
javaCompiler.run(null, null, null, "-d", classPath, daoPath);
FileUtil.copy(mapperPath, classPath+"/mapping/generator", true);
}

浙公网安备 33010602011771号