mysql同步數(shù)據(jù)到es
常用兩種方式
1.使用 logstash
如果是歷史數(shù)據(jù)同步我們可以用logstash,最快同步頻率每分鐘一次,如果對時效性要求高,慎用
2.使用 canal
實時同步,本文章未演示
使用logstash進行同步
logstash 特性:
- 無需開發(fā),僅需安裝配置logstash即可;
- 凡是SQL可以實現(xiàn)的logstash均可以實現(xiàn)(本就是通過sql查詢數(shù)據(jù))
- 支持每次全量同步或按照特定字段(如遞增ID、修改時間)增量同步;
- 同步頻率可控,最快同步頻率每分鐘一次(如果對實效性要求較高,慎用);
- 不支持被物理刪除的數(shù)據(jù)同步物理刪除ES中的數(shù)據(jù)(可在表設(shè)計中增加邏輯刪除字段IsDelete標識數(shù)據(jù)刪除)。
實現(xiàn)原理
定時查詢數(shù)據(jù)庫中數(shù)據(jù),更新到es中
logstash實現(xiàn)步驟
1.下載安裝
注意版本要和自己的es版本一致
下載地址https://www.elastic.co/cn/downloads/past-releases#logstash
2.配置
- 在bin同級目錄下創(chuàng)建"mysql"文件夾
- 在剛創(chuàng)建的" mysql" 文件夾下創(chuàng)建文件jdbc.conf , last_time.txt 和 放入mysql驅(qū)動jar
- 配置jdbc.conf文件
單表同步
input {
stdin {}
jdbc {
type => "jdbc"
# 數(shù)據(jù)庫連接地址
jdbc_connection_string => "jdbc:mysql://192.168.1.1:3306/TestDB?characterEncoding=UTF-8&autoReconnect=true""
# 數(shù)據(jù)庫連接賬號密碼;
jdbc_user => "username"
jdbc_password => "pwd"
# MySQL依賴包路徑;
jdbc_driver_library => "mysql/mysql-connector-java-5.1.34.jar"
# the name of the driver class for mysql
jdbc_driver_class => "com.mysql.jdbc.Driver"
# 數(shù)據(jù)庫重連嘗試次數(shù)
connection_retry_attempts => "3"
# 判斷數(shù)據(jù)庫連接是否可用,默認false不開啟
jdbc_validate_connection => "true"
# 數(shù)據(jù)庫連接可用校驗超時時間,默認3600S
jdbc_validation_timeout => "3600"
# 開啟分頁查詢(默認false不開啟);
jdbc_paging_enabled => "true"
# 單次分頁查詢條數(shù)(默認100000,若字段較多且更新頻率較高,建議調(diào)低此值);
jdbc_page_size => "500"
# statement為查詢數(shù)據(jù)sql,如果sql較復雜,建議配通過statement_filepath配置sql文件的存放路徑;
# sql_last_value為內(nèi)置的變量,存放上次查詢結(jié)果中最后一條數(shù)據(jù)tracking_column的值,此處即為ModifyTime;
# statement_filepath => "mysql/jdbc.sql"
# 注意數(shù)據(jù)庫對的時間查出來和es中的時間格式不一致,會導致插入es失敗,需要進行時間格式轉(zhuǎn)換
statement => "SELECT t.id as id,t.`name` as name,t.num as num,t.create_by as createBy,DATE_FORMAT(t.create_time,'%Y-%m-%d %H:%i:%s') as createTime,t.update_by as updateBy,DATE_FORMAT(t.update_time,'%Y-%m-%d %H:%i:%s') as updateTime ,DATE_FORMAT(t.last_time,'%Y-%m-%d %H:%i:%s') as lastTime FROM product as t WHERE DATE_FORMAT(t.last_time,'%Y-%m-%d %H:%i:%s') >= DATE_FORMAT(:sql_last_value,'%Y-%m-%d %H:%i:%s') order by t.last_time asc"
# 是否將字段名轉(zhuǎn)換為小寫,默認true(如果有數(shù)據(jù)序列化、反序列化需求,建議改為false);
lowercase_column_names => false
# Value can be any of: fatal,error,warn,info,debug,默認info;
sql_log_level => warn
#
# 是否記錄上次執(zhí)行結(jié)果,true表示會將上次執(zhí)行結(jié)果的tracking_column字段的值保存到last_run_metadata_path指定的文件中;
record_last_run => true
# 需要記錄查詢結(jié)果某字段的值時,此字段為true,否則默認tracking_column為timestamp的值;
use_column_value => true
# 需要記錄的字段,用于增量同步,需是數(shù)據(jù)庫字段
tracking_column => "ModifyTime"
# Value can be any of: numeric,timestamp,Default value is "numeric"
tracking_column_type => timestamp
# record_last_run上次數(shù)據(jù)存放位置;
last_run_metadata_path => "mysql/last_id.txt"
# 是否清除last_run_metadata_path的記錄,需要增量同步時此字段必須為false;
clean_run => false
#
# 同步頻率(分 時 天 月 年),默認每分鐘同步一次;
schedule => "* * * * *"
}
}
filter {
json {
source => "message"
remove_field => ["message"]
}
# convert 字段類型轉(zhuǎn)換,將字段TotalMoney數(shù)據(jù)類型改為float;
mutate {
convert => {
"TotalMoney" => "float"
}
}
}
output {
elasticsearch {
# host => "192.168.1.1"
# port => "9200"
# 配置ES集群地址
hosts => ["192.168.1.1:9200", "192.168.1.2:9200", "192.168.1.3:9200"]
# 索引名字,必須小寫
index => "consumption"
# 數(shù)據(jù)唯一索引(建議使用數(shù)據(jù)庫KeyID)
document_id => "%{KeyId}"
}
stdout {
codec => json_lines
}
}
多表同步
input {
stdin {}
jdbc {
# 多表同步時,表類型區(qū)分,建議命名為“庫名_表名”,每個jdbc模塊需對應(yīng)一個type;
type => "TestDB_DetailTab"
# 其他配置此處省略,參考單表配置
# ...
# ...
# record_last_run上次數(shù)據(jù)存放位置;
last_run_metadata_path => "mysql\last_id.txt"
# 是否清除last_run_metadata_path的記錄,需要增量同步時此字段必須為false;
clean_run => false
#
# 同步頻率(分 時 天 月 年),默認每分鐘同步一次;
schedule => "* * * * *"
}
jdbc {
# 多表同步時,表類型區(qū)分,建議命名為“庫名_表名”,每個jdbc模塊需對應(yīng)一個type;
type => "TestDB_Tab2"
# 多表同步時,last_run_metadata_path配置的路徑應(yīng)不一致,避免有影響;
# 其他配置此處省略
# ...
# ...
}
}
filter {
json {
source => "message"
remove_field => ["message"]
}
}
output {
# output模塊的type需和jdbc模塊的type一致
if [type] == "TestDB_DetailTab" {
elasticsearch {
# host => "192.168.1.1"
# port => "9200"
# 配置ES集群地址
hosts => ["192.168.1.1:9200", "192.168.1.2:9200", "192.168.1.3:9200"]
# 索引名字,必須小寫
index => "detailtab1"
# 數(shù)據(jù)唯一索引(建議使用數(shù)據(jù)庫KeyID)
document_id => "%{KeyId}"
}
}
if [type] == "TestDB_Tab2" {
elasticsearch {
# host => "192.168.1.1"
# port => "9200"
# 配置ES集群地址
hosts => ["192.168.1.1:9200", "192.168.1.2:9200", "192.168.1.3:9200"]
# 索引名字,必須小寫
index => "detailtab2"
# 數(shù)據(jù)唯一索引(建議使用數(shù)據(jù)庫KeyID)
document_id => "%{KeyId}"
}
}
stdout {
codec => json_lines
}
}
- 啟動運行
在bin同級目錄下執(zhí)行命令
【windows】bin\logstash.bat -f mysql\jdbc.conf
【linux】nohup ./bin/logstash -f mysql/jdbc_jx_moretable.conf &
案例
提前創(chuàng)建好es索引,我用的是easy-es,直接啟動自動創(chuàng)建了
- 數(shù)據(jù)庫+初始數(shù)據(jù)
CREATE TABLE `product` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主鍵',
`name` varchar(255) DEFAULT NULL COMMENT '名稱',
`num` int(10) DEFAULT NULL COMMENT '數(shù)量',
`last_time` datetime DEFAULT NULL COMMENT '最后修改時間',
`create_time` datetime DEFAULT NULL COMMENT '創(chuàng)建時間',
`update_time` datetime DEFAULT NULL COMMENT '修改時間',
`create_by` varchar(255) DEFAULT NULL COMMENT '創(chuàng)建者',
`update_by` varchar(255) DEFAULT NULL COMMENT '修改者',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4;
INSERT INTO `es`.`product`(`id`, `name`, `num`, `last_time`, `create_time`, `update_time`, `create_by`, `update_by`) VALUES (1, '香甜水蜜桃', 50, '2022-08-12 14:42:17', '2022-08-12 10:47:56', NULL, 'qts', NULL);
INSERT INTO `es`.`product`(`id`, `name`, `num`, `last_time`, `create_time`, `update_time`, `create_by`, `update_by`) VALUES (2, '紅紅的火龍果', 90, '2022-08-12 15:17:36', '2022-08-12 14:12:41', NULL, 'qts', NULL);
- es索引
{
"product": {
"aliases": {
"ee_default_alias": {}
},
"mappings": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"createBy": {
"type": "keyword"
},
"createTime": {
"type": "date",
"format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"
},
"id": {
"type": "long"
},
"lastTime": {
"type": "date",
"format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"
},
"name": {
"type": "text",
"analyzer": "ik_smart",
"search_analyzer": "ik_max_word"
},
"num": {
"type": "integer"
},
"type": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"settings": {
"index": {
"routing": {
"allocation": {
"include": {
"_tier_preference": "data_content"
}
}
},
"number_of_shards": "1",
"provided_name": "product",
"creation_date": "1660287566474",
"number_of_replicas": "1",
"uuid": "ey2A7AYKQB2OBvBUN-fN3Q",
"version": {
"created": "7140299"
}
}
}
}
}
- sql
SELECT
t.id AS id,
t.`name` AS NAME,
t.num AS num,
t.create_by AS createBy,
DATE_FORMAT( t.create_time, '%Y-%m-%d %H:%i:%s' ) AS createTime,
t.update_by AS updateBy,
DATE_FORMAT( t.update_time, '%Y-%m-%d %H:%i:%s' ) AS updateTime,
DATE_FORMAT( t.last_time, '%Y-%m-%d %H:%i:%s' ) AS lastTime
FROM
product AS t
WHERE
DATE_FORMAT( t.last_time, '%Y-%m-%d %H:%i:%s' ) >= DATE_FORMAT(:sql_last_value, '%Y-%m-%d %H:%i:%s' )
ORDER BY
t.last_time ASC
- jdbc.conf
input {
stdin {}
jdbc {
type => "jdbc"
# 數(shù)據(jù)庫連接地址
jdbc_connection_string => "jdbc:mysql://localhost:3306/es?characterEncoding=UTF-8&autoReconnect=true"
# 數(shù)據(jù)庫連接賬號密碼;
jdbc_user => "root"
jdbc_password => "root"
# MySQL依賴包路徑;
jdbc_driver_library => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\mysql-connector-java-5.1.35.jar"
# the name of the driver class for mysql
jdbc_driver_class => "com.mysql.jdbc.Driver"
# 數(shù)據(jù)庫重連嘗試次數(shù)
connection_retry_attempts => "3"
# 判斷數(shù)據(jù)庫連接是否可用,默認false不開啟
jdbc_validate_connection => "true"
# 數(shù)據(jù)庫連接可用校驗超時時間,默認3600S
jdbc_validation_timeout => "3600"
# 開啟分頁查詢(默認false不開啟);
jdbc_paging_enabled => "true"
# 單次分頁查詢條數(shù)(默認100000,若字段較多且更新頻率較高,建議調(diào)低此值);
jdbc_page_size => "500"
# statement為查詢數(shù)據(jù)sql,如果sql較復雜,建議配通過statement_filepath配置sql文件的存放路徑;
# sql_last_value為內(nèi)置的變量,存放上次查詢結(jié)果中最后一條數(shù)據(jù)tracking_column的值,此處即為ModifyTime;
# statement_filepath => "mysql/jdbc.sql"
statement => "SELECT t.id as id,t.`name` as name,t.num as num,t.create_by as createBy,DATE_FORMAT(t.create_time,'%Y-%m-%d %H:%i:%s') as createTime,t.update_by as updateBy,DATE_FORMAT(t.update_time,'%Y-%m-%d %H:%i:%s') as updateTime ,DATE_FORMAT(t.last_time,'%Y-%m-%d %H:%i:%s') as lastTime FROM product as t WHERE DATE_FORMAT(t.last_time,'%Y-%m-%d %H:%i:%s') >= DATE_FORMAT(:sql_last_value,'%Y-%m-%d %H:%i:%s') order by t.last_time asc"
# 是否將字段名轉(zhuǎn)換為小寫,默認true(如果有數(shù)據(jù)序列化、反序列化需求,建議改為false);
lowercase_column_names => false
# Value can be any of: fatal,error,warn,info,debug,默認info;
sql_log_level => warn
#
# 是否記錄上次執(zhí)行結(jié)果,true表示會將上次執(zhí)行結(jié)果的tracking_column字段的值保存到last_run_metadata_path指定的文件中;
record_last_run => true
# 需要記錄查詢結(jié)果某字段的值時,此字段為true,否則默認tracking_column為timestamp的值;
use_column_value => true
# 需要記錄的字段,用于增量同步,需是數(shù)據(jù)庫字段
tracking_column => "lastTime"
# Value can be any of: numeric,timestamp,Default value is "numeric"
tracking_column_type => timestamp
# record_last_run上次數(shù)據(jù)存放位置;
last_run_metadata_path => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\last_time.txt"
# 是否清除last_run_metadata_path的記錄,需要增量同步時此字段必須為false;
clean_run => false
#
# 同步頻率(分 時 天 月 年),默認每分鐘同步一次;
schedule => "* * * * *"
}
}
filter {
json {
source => "message"
remove_field => ["message"]
}
# convert 字段類型轉(zhuǎn)換,將字段TotalMoney數(shù)據(jù)類型改為float;
mutate {
convert => {
"TotalMoney" => "float"
}
}
}
output {
elasticsearch {
# host => "localhost"
# port => "9200"
# 配置ES集群地址
hosts => ["localhost:9200"]
# 索引名字,必須小寫
index => "product"
# 數(shù)據(jù)唯一索引(建議使用數(shù)據(jù)庫KeyID)
document_id => "%{id}"
}
stdout {
codec => json_lines
}
}
- start.bat 放在bin同級目錄下
bin\logstash.bat -f mysql\jdbc.conf
-
雙擊 start.bat啟動
-
測試
http://localhost:9200/product/_search
- 結(jié)果
{
"took": 177,
"timed_out": false,
"_shards": {
"total": 1,
"successful": 1,
"skipped": 0,
"failed": 0
},
"hits": {
"total": {
"value": 2,
"relation": "eq"
},
"max_score": 1.0,
"hits": [
{
"_index": "product",
"_type": "_doc",
"_id": "2",
"_score": 1.0,
"_source": {
"id": 2,
"lastTime": "2022-08-12 15:17:36",
"name": "紅紅的火龍果",
"updateBy": null,
"@version": "1",
"@timestamp": "2022-08-12T07:56:00.108Z",
"type": "jdbc",
"createBy": "qts",
"createTime": "2022-08-12 14:12:41",
"updateTime": null,
"num": 90
}
},
{
"_index": "product",
"_type": "_doc",
"_id": "1",
"_score": 1.0,
"_source": {
"id": 1,
"lastTime": "2022-08-12 15:54:58",
"name": "香甜水蜜桃",
"updateBy": null,
"@version": "1",
"@timestamp": "2022-08-12T07:57:00.212Z",
"type": "jdbc",
"createBy": "qts",
"createTime": "2022-08-12 10:47:56",
"updateTime": null,
"num": 50
}
}
]
}
}
補充
1. filter中封裝對象中的嵌套參數(shù)
- 索引
{
"document": {
"aliases": {
"ee_default_alias": {}
},
"mappings": {
"properties": {
"commentList": { <= 對此字段進行封裝
"type": "nested",
"properties": {
"commentTitle": {
"type": "text",
"analyzer": "ik_smart",
"search_analyzer": "ik_max_word"
}
}
},
"content": {
"type": "text",
"analyzer": "ik_smart",
"search_analyzer": "ik_max_word"
},
"id": {
"type": "long"
},
"title": {
"type": "keyword"
},
"type": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"update_time": {
"type": "date"
}
}
},
"settings": {
"index": {
"routing": {
"allocation": {
"include": {
"_tier_preference": "data_content"
}
}
},
"number_of_shards": "1",
"provided_name": "document",
"creation_date": "1660543629625",
"number_of_replicas": "1",
"uuid": "4j4JB89zSiaUFkl-G0UG2g",
"version": {
"created": "7140299"
}
}
}
}
}
- 對應(yīng)filter操作
# 指定input中type為document的數(shù)據(jù)進行過濾操作
# 注意:如果不使用if判斷,則多條數(shù)據(jù)輸入時,每條中有一下 id 參數(shù)的都會進行封裝并
filter {
if [type] == "document"
{
#這里的 target 標簽會對應(yīng) es 中 products 文檔 的 skus 字段
jdbc_streaming {
jdbc_driver_library => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\mysql-connector-java-5.1.35.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://localhost:3306/es?characterEncoding=UTF-8&autoReconnect=true"
jdbc_user => "root"
jdbc_password => "root"
# sensor_identifier參數(shù)名,id 對應(yīng)input的sql中的返回參數(shù)
parameters => { "sensor_identifier" => "id"}
#這里不能使用statement_filepath的方式引入sql文件,會報錯
#statement_filepath => "/etc/logstash/pipeline/sql/filter_sku.sql"
# 通過父表ID對子表中數(shù)據(jù)進行查詢
statement => "SELECT commentTitle FROM comment as t WHERE doc_id = :sensor_identifier"
#這個commentList對應(yīng) es 索引中的 commentList 字段,如果沒有,則會自定創(chuàng)建默認類型
target => "commentList"
}
}
}
2. filter去掉查詢語句中自動生成的字段
filter {
# 去掉無用的字段@timestamp和@version
mutate {
remove_field => ["@timestamp","@version"]
}
}
3. filter轉(zhuǎn)換字段類型
filter {
# convert 字段類型轉(zhuǎn)換,將字段TotalMoney數(shù)據(jù)類型改為float;
mutate {
convert => {
"TotalMoney" => "float"
}
}
}
案例conf文件
多表同步,嵌套類型封裝
input {
stdin {}
jdbc {
type => "product"
# 數(shù)據(jù)庫連接地址
jdbc_connection_string => "jdbc:mysql://localhost:3306/es?characterEncoding=UTF-8&autoReconnect=true"
# 數(shù)據(jù)庫連接賬號密碼;
jdbc_user => "root"
jdbc_password => "root"
# MySQL依賴包路徑;
jdbc_driver_library => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\mysql-connector-java-5.1.35.jar"
# the name of the driver class for mysql
jdbc_driver_class => "com.mysql.jdbc.Driver"
# 數(shù)據(jù)庫重連嘗試次數(shù)
connection_retry_attempts => "3"
# 判斷數(shù)據(jù)庫連接是否可用,默認false不開啟
jdbc_validate_connection => "true"
# 數(shù)據(jù)庫連接可用校驗超時時間,默認3600S
jdbc_validation_timeout => "3600"
# 開啟分頁查詢(默認false不開啟);
jdbc_paging_enabled => "true"
# 單次分頁查詢條數(shù)(默認100000,若字段較多且更新頻率較高,建議調(diào)低此值);
jdbc_page_size => "500"
# statement為查詢數(shù)據(jù)sql,如果sql較復雜,建議配通過statement_filepath配置sql文件的存放路徑;
# sql_last_value為內(nèi)置的變量,存放上次查詢結(jié)果中最后一條數(shù)據(jù)tracking_column的值,此處即為ModifyTime;
# statement_filepath => "mysql/jdbc.sql"
# statement => "SELECT t.id as id,t.`name` as name,t.num as num,t.create_by as createBy,DATE_FORMAT(t.create_time,'%Y-%m-%d %H:%i:%s') as createTime,t.update_by as updateBy,DATE_FORMAT(t.update_time,'%Y-%m-%d %H:%i:%s') as updateTime ,DATE_FORMAT(t.last_time,'%Y-%m-%d %H:%i:%s') as lastTime FROM product as t WHERE DATE_FORMAT(t.last_time,'%Y-%m-%d %H:%i:%s') >= DATE_FORMAT(:sql_last_value,'%Y-%m-%d %H:%i:%s') order by t.last_time asc"
statement_filepath => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\jdbc.sql"
# 是否將字段名轉(zhuǎn)換為小寫,默認true(如果有數(shù)據(jù)序列化、反序列化需求,建議改為false);
lowercase_column_names => false
# Value can be any of: fatal,error,warn,info,debug,默認info;
sql_log_level => warn
#
# 是否記錄上次執(zhí)行結(jié)果,true表示會將上次執(zhí)行結(jié)果的tracking_column字段的值保存到last_run_metadata_path指定的文件中;
record_last_run => true
# 需要記錄查詢結(jié)果某字段的值時,此字段為true,否則默認tracking_column為timestamp的值;
use_column_value => true
# 需要記錄的字段,用于增量同步,需是數(shù)據(jù)庫字段
tracking_column => "lastTime"
# Value can be any of: numeric,timestamp,Default value is "numeric"
tracking_column_type => timestamp
# record_last_run上次數(shù)據(jù)存放位置;
last_run_metadata_path => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\last_time.txt"
# 是否清除last_run_metadata_path的記錄,需要增量同步時此字段必須為false;
clean_run => false
#
# 同步頻率(分 時 天 月 年),默認每分鐘同步一次;
schedule => "* * * * *"
}
jdbc {
type => "document"
# 數(shù)據(jù)庫連接地址
jdbc_connection_string => "jdbc:mysql://localhost:3306/es?characterEncoding=UTF-8&autoReconnect=true"
# 數(shù)據(jù)庫連接賬號密碼;
jdbc_user => "root"
jdbc_password => "root"
# MySQL依賴包路徑;
jdbc_driver_library => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\mysql-connector-java-5.1.35.jar"
# the name of the driver class for mysql
jdbc_driver_class => "com.mysql.jdbc.Driver"
# 數(shù)據(jù)庫重連嘗試次數(shù)
connection_retry_attempts => "3"
# 判斷數(shù)據(jù)庫連接是否可用,默認false不開啟
jdbc_validate_connection => "true"
# 數(shù)據(jù)庫連接可用校驗超時時間,默認3600S
jdbc_validation_timeout => "3600"
# 開啟分頁查詢(默認false不開啟);
jdbc_paging_enabled => "true"
# 單次分頁查詢條數(shù)(默認100000,若字段較多且更新頻率較高,建議調(diào)低此值);
jdbc_page_size => "500"
# statement為查詢數(shù)據(jù)sql,如果sql較復雜,建議配通過statement_filepath配置sql文件的存放路徑;
# sql_last_value為內(nèi)置的變量,存放上次查詢結(jié)果中最后一條數(shù)據(jù)tracking_column的值,此處即為ModifyTime;
# statement_filepath => "mysql/jdbc.sql"
statement => "SELECT t.id,t.title,t.content,DATE_FORMAT(t.update_time,'%Y-%m-%d %H:%i:%s') as update_time FROM document as t WHERE DATE_FORMAT(t.update_time,'%Y-%m-%d %H:%i:%s') >= DATE_FORMAT(:sql_last_value,'%Y-%m-%d %H:%i:%s')"
# statement_filepath => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\jdbc.sql"
# 是否將字段名轉(zhuǎn)換為小寫,默認true(如果有數(shù)據(jù)序列化、反序列化需求,建議改為false);
lowercase_column_names => false
# Value can be any of: fatal,error,warn,info,debug,默認info;
sql_log_level => warn
#
# 是否記錄上次執(zhí)行結(jié)果,true表示會將上次執(zhí)行結(jié)果的tracking_column字段的值保存到last_run_metadata_path指定的文件中;
record_last_run => true
# 需要記錄查詢結(jié)果某字段的值時,此字段為true,否則默認tracking_column為timestamp的值;
use_column_value => true
# 需要記錄的字段,用于增量同步,需是數(shù)據(jù)庫字段
tracking_column => "update_time"
# Value can be any of: numeric,timestamp,Default value is "numeric"
tracking_column_type => timestamp
# record_last_run上次數(shù)據(jù)存放位置; ※※※ 此處的時間文件創(chuàng)建一個新的 ※※※
last_run_metadata_path => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\doc_last_time.txt"
# 是否清除last_run_metadata_path的記錄,需要增量同步時此字段必須為false;
clean_run => false
#
# 同步頻率(分 時 天 月 年),默認每分鐘同步一次;
schedule => "* * * * *"
}
}
filter {
對document中的數(shù)據(jù)特殊處理
if [type] == "document"
{
#這里的 target 標簽會對應(yīng) es 中 products 文檔 的 skus 字段
jdbc_streaming {
jdbc_driver_library => "D:\java_dev_tool\logstash\logstash-7.14.2\mysql\mysql-connector-java-5.1.35.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://localhost:3306/es?characterEncoding=UTF-8&autoReconnect=true"
jdbc_user => "root"
jdbc_password => "root"
parameters => { "sensor_identifier" => "id"}
#這里不能使用statement_filepath的方式引入sql文件,會報錯
#statement_filepath => "/etc/logstash/pipeline/sql/filter_sku.sql"
statement => "SELECT commentTitle FROM comment as t WHERE doc_id = :sensor_identifier"
#這個skus對應(yīng) es 索引中的 skus字段
target => "commentList"
}
}
json {
source => "message"
remove_field => ["message"]
}
# convert 字段類型轉(zhuǎn)換,將字段TotalMoney數(shù)據(jù)類型改為float;
# 去掉無用的字段@timestamp和@version
mutate {
remove_field => ["@timestamp","@version"]
}
}
output {
if [type] == "product"
{
elasticsearch {
# host => "localhost"
# port => "9200"
# 配置ES集群地址
hosts => ["localhost:9200"]
# 索引名字,必須小寫
index => "product"
# 數(shù)據(jù)唯一索引(建議使用數(shù)據(jù)庫KeyID)
document_id => "%{id}"
}
}
if [type] == "document"
{
elasticsearch {
# host => "localhost"
# port => "9200"
# 配置ES集群地址
hosts => ["localhost:9200"]
# 索引名字,必須小寫
index => "document"
# 數(shù)據(jù)唯一索引(建議使用數(shù)據(jù)庫KeyID)
document_id => "%{id}"
}
}
stdout {
codec => json_lines
}
}
補充2
配置說明文檔地址
input plugin
https://www.elastic.co/guide/en/logstash/current/input-plugins.html
filter plugin
https://www.elastic.co/guide/en/logstash/current/filter-plugins.html
output plugin
https://www.elastic.co/guide/en/logstash/current/output-plugins.html文章來源:http://www.zghlxwxcb.cn/news/detail-402804.html
參考文章
https://zxiaofan.blog.csdn.net/article/details/86708490?spm=1001.2101.3001.6650.2&utm_medium=distribute.pc_relevant.none-task-blog-2%7Edefault%7ECTRLIST%7ERate-2-86708490-blog-125497958.pc_relevant_multi_platform_whitelistv3&depth_1-utm_source=distribute.pc_relevant.none-task-blog-2%7Edefault%7ECTRLIST%7ERate-2-86708490-blog-125497958.pc_relevant_multi_platform_whitelistv3&utm_relevant_index=4文章來源地址http://www.zghlxwxcb.cn/news/detail-402804.html
到了這里,關(guān)于mysql同步數(shù)據(jù)到es之logstash的文章就介紹完了。如果您還想了解更多內(nèi)容,請在右上角搜索TOY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!