实现同步mysql数据异步同步

1.首先mysql要开启binlog,修改mysql的配置文件:

server_id=2023  #主节点的编号,保证唯一即可
log_bin=mysql-bin
binlog_format=row

2.使用mysql-binlog-connector-java来监听MySQL的二进制日志(binlog)事件,引入依赖。

<dependency>
    <groupId>com.github.shyiko</groupId>
    <artifactId>mysql-binlog-connector-java</artifactId>
    <version>0.29.2</version>
</dependency>

定义配置:

mysql:
  binlog:
    host: localhost
    port: 3306
    schema: heg_hotel
    username: root
    password: 123456
    tables: t_user,t_order

3.代码中配置:

import com.github.shyiko.mysql.binlog.BinaryLogClient;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.stephen.listener.CustomerBinlogListener;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.io.IOException;
import java.util.Arrays;
import java.util.List;

/**
 * @Description 开启binlog监听工具
 * @Author jack
 * @Date 2024/8/22 14:35
 */
@Configuration
public class BinlogConfig {
    @Value("${mysql.binlog.host}")
    private String host;

    @Value("${mysql.binlog.port}")
    private Integer port;

    @Value("${mysql.binlog.username}")
    private String userName;

    @Value("${mysql.binlog.password}")
    private String password;

    @Value("${mysql.binlog.schema:#{null}}")
    private String schema;

    @Value("${mysql.binlog.tables}")
    private String tables;

    @Bean
    public CustomerBinlogListener customerBinlogListener() {
        //考虑到多个数据库的情况
        if(StringUtils.isNotBlank(schema)){
            List<String> schemaTables = Lists.newArrayList();
            //只监听指定库 例如:demo.user
            Arrays.stream(tables.split(",")).forEach(table-> schemaTables.add(Joiner.on('.').join(schema,table)));
            return new CustomerBinlogListener(schemaTables);
        }
        else{
            //监听多个库
            return new CustomerBinlogListener(Arrays.asList(tables.split(",")));
        }
    }

    @Bean
    public BinaryLogClient binaryLogClient() {
        //不指定schema监听所有的数据库
        BinaryLogClient client = new BinaryLogClient(host,port,userName,password);
        //BinaryLogClient client = new BinaryLogClient(host,port,schema,userName,password);
        client.registerEventListener(customerBinlogListener());
        client.setServerId(1);
        client.setKeepAlive(true); // 保持连接
        client.setKeepAliveInterval(10 * 1000); // 心跳包发送频率
        client.setKeepAliveConnectTimeout(5 * 1000); // 心跳发送超时设置
        try {
            client.connect();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        return client;
    }
}

自己实现监听事件:

import com.alibaba.fastjson.JSON;
import com.github.shyiko.mysql.binlog.BinaryLogClient;
import com.github.shyiko.mysql.binlog.event.*;
import com.google.common.collect.Maps;
import com.stephen.model.BinlogDto;
import lombok.extern.slf4j.Slf4j;

import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;


/**
 * @Description 自定义监听数据
 * @Author jack
 * @Date 2024/8/22 15:05
 */
@Slf4j
public class CustomerBinlogListener implements BinaryLogClient.EventListener {
    private HashMap<Long, String> tableMap = Maps.newHashMap();
    private List<String> databaseTables;

    public CustomerBinlogListener(List<String> databaseTables){
        this.databaseTables = databaseTables;
    }

    @Override
    public void onEvent(Event event) {
        // binlog事件
        EventData data = event.getData();
        if (data != null) {
            if (data instanceof TableMapEventData) {
                TableMapEventData tableMapEventData = (TableMapEventData) data;
                tableMap.put(tableMapEventData.getTableId(), tableMapEventData.getDatabase() + "." + tableMapEventData.getTable());
            }
            // update数据
            if (data instanceof UpdateRowsEventData) {
                UpdateRowsEventData updateRowsEventData = (UpdateRowsEventData) data;
                String tableName = tableMap.get(updateRowsEventData.getTableId());
                if (tableName != null && databaseTables.contains(tableName)) {
                    String eventKey = tableName + ".update";
                    System.out.println("212121212121");
                    for (Map.Entry<Serializable[], Serializable[]> row : updateRowsEventData.getRows()) {
                        String msg = JSON.toJSONString(new BinlogDto(eventKey, row.getValue()));
                        log.info("binlog修改日志:{}",msg);
                    }
                }
            }
            // insert数据
            else if (data instanceof WriteRowsEventData) {
                WriteRowsEventData writeRowsEventData = (WriteRowsEventData) data;
                String tableName = tableMap.get(writeRowsEventData.getTableId());
                if (tableName != null && databaseTables.contains(tableName)) {
                    String eventKey = tableName + ".insert";
                    for (Serializable[] row : writeRowsEventData.getRows()) {
                        String msg = JSON.toJSONString(new BinlogDto(eventKey, row));
                        log.info("binlog插入日志:{}",msg);
                    }
                }
            }
            // delete数据
            else if (data instanceof DeleteRowsEventData) {
                DeleteRowsEventData deleteRowsEventData = (DeleteRowsEventData) data;
                String tableName = tableMap.get(deleteRowsEventData.getTableId());
                if (tableName != null && databaseTables.contains(tableName)) {
                    String eventKey = tableName + ".delete";
                    for (Serializable[] row : deleteRowsEventData.getRows()) {
                        String msg = JSON.toJSONString(new BinlogDto(eventKey, row));
                        log.info("binlog删除日志:{}",msg);
                    }
                }
            }
        }
    }
}

可以结合kafka等消息队列,推送数据到es等数据仓库

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容