数据库同步软件: 下载安装和配置
- Server端:canal-deployer
- Client端:canal.adapter
- 管理面板: canal.admin
方法 / 步骤
一:数据源MySQL配置
1.1 开启binlog日志
找到mysql安装目录 mysql --help|grep my.cnf 编辑my.cf
[mysqld]
#开启binlog
log-bin = mysql-bin
#选择row模式
binlog-format = ROW
#配置mysql replication需要定义,不能喝canal的slaveId重复
server_id = 1
- 校验是否开启
运行以下sql (如果显示OFF则代表未开启。在MySQL8以前,这个功能是默认关闭的,需要手动开启。)
show variables like 'log_bin';
或者打开 session_variables表
1.2 生成canal账户
CREATE USER canal IDENTIFIED BY 'canal';
GRANT SELECT, REPLICATION SLAVE, REPLICATION CLIENT ON *.* TO 'canal'@'%';
FLUSH PRIVILEGES;
二: 配置服务端 canal.deployer
2.1 解压并且配置(TCP)
2.1.1 修改canal配置
# tcp, kafka, rocketMQ, rabbitMQ, pulsarMQ (默认服务模式是TCP)
canal.serverMode =tcp
# canal的目标实例(默认的实例名称example),如果更改其他实例则会在\conf目录下生成实例文件夹以及该实例的配置
canal.destinations = example
2.1.2 修改默认example实例的配置
解压 canal.deployer-1.1.6包, 在里面的conf\example\instance.properties改写自己的配置
# position info
# 这里换成自己的主数据库地址账号和密码
canal.instance.master.address=192.168.11.24:3306
canal.instance.dbUsername=root
canal.instance.dbPassword=useradmin
# 默认的数据库名
canal.instance.defaultDatabaseName=ldd_new_test
# 指定检测 ldd_new_test数据库下面的saas_member_course表
# canal.instance.filter.regex=ldd_new_test\\saas_member_course
2.2 启动服务端
- 双击 \bin目录下的 startup.bat的文件
- 查看 实例logs\example\example.log 日志文件
三: Adapter中间件配置
3.1 服务配置
- 打开 \conf\application.yml
# 与canal服务端连接的模式
canal.conf:
mode: tcp #tcp kafka rocketMQ rabbitMQ
# 数据源配置
srcDataSources:
defaultDS:
url: jdbc:mysql://192.168.11.24:3306/ldd_new_test?useUnicode=true
username: root
password: useradmin
canalAdapters:
# canal 的实例名称 或者 mq topic的名称
- instance: example
groups:
- groupId: g1
outerAdapters:
- name: logger
# 如果是es7就使用\conf\es7下面输出适配器的配置。9200是rest的端口要加上http.9300是交互端口
- name: es7
# 多线程中实例中的Key标识
key: adapter-es-key124
hosts: http://10.10.10.124:9200 # 127.0.0.1:9200 for rest mode
properties:
mode: rest # transport or rest
# security.auth: test:123456 # only used for rest mode
# 集群名称可以在 rest服务下 浏览器中看到
cluster.name: elasticsearch
3.2 数据同步配置
数据初始化- 数据库初始化SQL
CREATE TABLE `test_user` (
`test_user_id` bigint(35) NOT NULL AUTO_INCREMENT,
`user_name` varchar(55) DEFAULT NULL,
`age` int(5) DEFAULT NULL,
`address` varchar(155) DEFAULT NULL,
`gmt_create` datetime DEFAULT NULL,
PRIMARY KEY (`test_user_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
- ES创建索引
PUT /test_user
{
"mappings": {
"properties": {
"test_user_id": {
"type": "long"
},
"user_name": {
"type": "text"
},
"age": {
"type": "integer"
},
"address": {
"type": "text"
},
"role_id": {
"type": "long"
},
"gmt_create": {
"type": "date"
}
}
}
}
- 同步配置 conf\es7\test-user.yml
# 这里和主配置canalAdapters.instance.groupId.outerAdapters.key下面的的key一样
outerAdapterKey: adapter-es-key124
dataSourceKey: defaultDS
# 目标实例名称
destination: example
groupId: g1
esMapping:
_index: test_user
_id: _id
upsert: true
pk: test_user_id
sql: "select test_user_id AS _id, test_user_id,user_name,age,address,gmt_create FROM test_user"
# objFields:
# _labels: array:;
etlCondition: "where test_user_id = {}"
commitBatch: 3000
3.3 启动服务
- 双击\bin目录下startup.bat文件
看到 Subscribe destination: example succeed 表示订阅成功
3.4 全量/增量更新
- 全量更新为向Adapter发送POST更新
- postMan发送请求
http://127.0.0.1:8081/etl/es7/test-user.yml
或者使用curl 命令
curl -X POST http://127.0.0.1:8081/etl/es7/test-user.yml
四: Java 客户端配置
- pom 依赖
<!-- 需要与安装的canal版本一致,数据同步中间件 -->
<dependency>
<groupId>com.alibaba.otter</groupId>
<artifactId>canal.client</artifactId>
<version>1.1.4</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>commons-dbutils</groupId>
<artifactId>commons-dbutils</artifactId>
<version>1.2</version>
</dependency>
- boot 入口配置
@SpringBootApplication
@EnableDiscoveryClient
@EnableFeignClients
public class SyncApplication implements CommandLineRunner {
public static void main(String[] args) {
SpringApplication.run(SyncApplication.class);
}
@Resource
CanalClient canalClient;
@Override
public void run(String... args) throws Exception {
canalClient.run();
}
}
- CanalClient 客户端连接配置
@Component
public class CanalClient {
//sql队列
private Queue<String> SQL_QUEUE = new ConcurrentLinkedQueue<>();
@Resource
private DataSource dataSource;
/**
* canal入库方法
*/
public void run() {
//这里canal默认端口号是11111
CanalConnector connector = CanalConnectors.newSingleConnector(
new InetSocketAddress("127.0.0.1"/*这里是ip*/,11111),
"example", "", "");
int batchSize = 1000;
try {
connector.connect();
connector.subscribe(".*\\..*");
connector.rollback();
try {
while (true) {
//尝试从master那边拉去数据batchSize条记录,有多少取多少
Message message = connector.getWithoutAck(batchSize);
long batchId = message.getId();
int size = message.getEntries().size();
if (batchId == -1 || size == 0) {
Thread.sleep(1000);
} else {
dataHandle(message.getEntries());
}
connector.ack(batchId);
//当队列里面堆积的sql大于一定数值的时候就模拟执行
if (SQL_QUEUE.size() >= 1) {
executeQueueSql();
}
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (InvalidProtocolBufferException e) {
e.printStackTrace();
}
} finally {
connector.disconnect();
}
}
/**
* 模拟执行队列里面的sql语句
*/
public void executeQueueSql() {
int size = SQL_QUEUE.size();
for (int i = 0; i < size; i++) {
String sql = SQL_QUEUE.poll();
System.out.println("[sql]----> " + sql);
this.execute(sql.toString());
}
}
/**
* 数据处理
*
* @param entrys
*/
private void dataHandle(List<CanalEntry.Entry> entrys) throws InvalidProtocolBufferException {
for (CanalEntry.Entry entry : entrys) {
if (CanalEntry.EntryType.ROWDATA == entry.getEntryType()) {
CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
CanalEntry.EventType eventType = rowChange.getEventType();
if (eventType == CanalEntry.EventType.DELETE) {
saveDeleteSql(entry);
} else if (eventType == CanalEntry.EventType.UPDATE) {
saveUpdateSql(entry);
} else if (eventType == CanalEntry.EventType.INSERT) {
saveInsertSql(entry);
}
}
}
}
/**
* 保存更新语句
*
* @param entry
*/
private void saveUpdateSql(CanalEntry.Entry entry) {
try {
CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
List<CanalEntry.RowData> rowDatasList = rowChange.getRowDatasList();
for (CanalEntry.RowData rowData : rowDatasList) {
List<CanalEntry.Column> newColumnList = rowData.getAfterColumnsList();
StringBuffer sql = new StringBuffer("update " + entry.getHeader().getTableName() + " set ");
for (int i = 0; i < newColumnList.size(); i++) {
sql.append(" " + newColumnList.get(i).getName()
+ " = '" + newColumnList.get(i).getValue() + "'");
if (i != newColumnList.size() - 1) {
sql.append(",");
}
}
sql.append(" where ");
List<CanalEntry.Column> oldColumnList = rowData.getBeforeColumnsList();
for (CanalEntry.Column column : oldColumnList) {
if (column.getIsKey()) {
//暂时只支持单一主键
sql.append(column.getName() + "=" + column.getValue());
break;
}
}
SQL_QUEUE.add(sql.toString());
}
} catch (InvalidProtocolBufferException e) {
e.printStackTrace();
}
}
/**
* 保存删除语句
*
* @param entry
*/
private void saveDeleteSql(CanalEntry.Entry entry) {
try {
CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
List<CanalEntry.RowData> rowDatasList = rowChange.getRowDatasList();
for (CanalEntry.RowData rowData : rowDatasList) {
List<CanalEntry.Column> columnList = rowData.getBeforeColumnsList();
StringBuffer sql = new StringBuffer("delete from " + entry.getHeader().getTableName() + " where ");
for (CanalEntry.Column column : columnList) {
if (column.getIsKey()) {
//暂时只支持单一主键
sql.append(column.getName() + "=" + column.getValue());
break;
}
}
SQL_QUEUE.add(sql.toString());
}
} catch (InvalidProtocolBufferException e) {
e.printStackTrace();
}
}
/**
* 保存插入语句
*
* @param entry
*/
private void saveInsertSql(CanalEntry.Entry entry) {
try {
CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
List<CanalEntry.RowData> rowDatasList = rowChange.getRowDatasList();
for (CanalEntry.RowData rowData : rowDatasList) {
List<CanalEntry.Column> columnList = rowData.getAfterColumnsList();
StringBuffer sql = new StringBuffer("insert into " + entry.getHeader().getTableName() + " (");
for (int i = 0; i < columnList.size(); i++) {
sql.append(columnList.get(i).getName());
if (i != columnList.size() - 1) {
sql.append(",");
}
}
sql.append(") VALUES (");
for (int i = 0; i < columnList.size(); i++) {
sql.append("'" + columnList.get(i).getValue() + "'");
if (i != columnList.size() - 1) {
sql.append(",");
}
}
sql.append(")");
SQL_QUEUE.add(sql.toString());
}
} catch (InvalidProtocolBufferException e) {
e.printStackTrace();
}
}
/**
* 入库
* @param sql
*/
public void execute(String sql) {
Connection con = null;
try {
if(null == sql) return;
con = dataSource.getConnection();
QueryRunner qr = new QueryRunner();
// int row = qr.execute(con, sql);
int row = qr.update(con, sql);
System.out.println("update: "+ row);
} catch (SQLException e) {
e.printStackTrace();
} finally {
DbUtils.closeQuietly(con);
}
}
}