-
Notifications
You must be signed in to change notification settings - Fork 331
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge commit '72938fbe6aabbbca39975a5cd4a6e40a920eb9c9' into v2.3
* commit '72938fbe6aabbbca39975a5cd4a6e40a920eb9c9': v2.2.0 修复 input mongo batch 全量后连接可能未关闭的bug input kafka 修复设置多个协程处理数据转换的时候,因为指针原因,导致 worker协程编号出错的bug 修复修改数据源,但是数据源连接案例及描述等信息没有变更的bug #248 修复批量添加表数据同步失败的BUG #249 添加全量任务后,跳转到全量任务列表,采用新窗口跳转 #250 input kafka 在多parrtition的情况下支持设置处理数据转换的协程数量 来提升性能,一个partition的数据,只能被一个协程处理,在单partition的情况下,设置多个无效,默认为 单协程 input mongo 支持设置 全量,增量,以及先全量再增量的方式数据同步
- Loading branch information
Showing
24 changed files
with
918 additions
and
36 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,4 +16,4 @@ limitations under the License. | |
|
||
package config | ||
|
||
const VERSION = "v2.1.1-beta" | ||
const VERSION = "v2.2.0-beta" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
package kafka | ||
|
||
import ( | ||
"github.com/Shopify/sarama" | ||
"github.com/smartystreets/goconvey/convey" | ||
"testing" | ||
) | ||
|
||
func Test_getKafkaConnectConfig(t *testing.T) { | ||
convey.Convey("normal", t, func() { | ||
url := "127.0.0.1:9092,192.168.1.10/topic1,topic2?from.beginning=true&version=2.7.1&consumer.count=3&skip.serialize.err=true" | ||
configMap := ParseDSN(url) | ||
c, err := getKafkaConnectConfig(configMap) | ||
version, _ := sarama.ParseKafkaVersion("2.7.1") | ||
convey.So(err, convey.ShouldBeNil) | ||
convey.So(c.CosumerCount, convey.ShouldEqual, 3) | ||
convey.So(c.SkipSerializeErr, convey.ShouldEqual, true) | ||
convey.So(len(c.Topics), convey.ShouldEqual, 2) | ||
convey.So(c.Topics[0], convey.ShouldEqual, "topic1") | ||
convey.So(c.Topics[1], convey.ShouldEqual, "topic2") | ||
convey.So(c.ParamConfig.Version.String(), convey.ShouldEqual, version.String()) | ||
convey.So(c.ParamConfig.Consumer.Offsets.Initial, convey.ShouldEqual, sarama.OffsetOldest) | ||
}) | ||
|
||
convey.Convey("normal default", t, func() { | ||
url := "127.0.0.1:9092,192.168.1.10/topic1,topic2" | ||
configMap := ParseDSN(url) | ||
c, err := getKafkaConnectConfig(configMap) | ||
convey.So(err, convey.ShouldBeNil) | ||
convey.So(c.CosumerCount, convey.ShouldEqual, 1) | ||
convey.So(c.SkipSerializeErr, convey.ShouldEqual, false) | ||
convey.So(len(c.Topics), convey.ShouldEqual, 2) | ||
convey.So(c.Topics[0], convey.ShouldEqual, "topic1") | ||
convey.So(c.Topics[1], convey.ShouldEqual, "topic2") | ||
convey.So(c.ParamConfig.Version.String(), convey.ShouldEqual, defaultKafkaVersion) | ||
convey.So(c.ParamConfig.Consumer.Offsets.Initial, convey.ShouldEqual, sarama.OffsetNewest) | ||
}) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,87 @@ | ||
/* | ||
Copyright [2018] [jc3wish] | ||
Licensed under the Apache License, Version 2.0 (the "License"); | ||
you may not use this file except in compliance with the License. | ||
You may obtain a copy of the License at | ||
http://www.apache.org/licenses/LICENSE-2.0 | ||
Unless required by applicable law or agreed to in writing, software | ||
distributed under the License is distributed on an "AS IS" BASIS, | ||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
See the License for the specific language governing permissions and | ||
limitations under the License. | ||
*/ | ||
|
||
package kafka | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
"github.com/Shopify/sarama" | ||
"hash/crc32" | ||
"log" | ||
"sync" | ||
) | ||
|
||
//一个input可以设置多个协程进行处理反序列化解析的数据,同时需要保证同一个partition的数据只能被一个协程处理 | ||
|
||
func (c *InputKafka) InitInputCosume(workerCount int) *sync.WaitGroup { | ||
ws := &sync.WaitGroup{} | ||
ws.Add(workerCount) | ||
for i := 0; i < workerCount; i++ { | ||
ch := make(chan *sarama.ConsumerMessage, 5000) | ||
c.inputCosumeList = append(c.inputCosumeList, ch) | ||
go func(workerId int) { | ||
defer ws.Done() | ||
c.InputCosume(c.kafkaGroupCtx, workerId, ch) | ||
}(i) | ||
} | ||
return ws | ||
} | ||
|
||
// 当前这个方法,主要用于单测,实际业务代码中,没有场景使用到 | ||
// 假如中途close了chan,异步协程是有可能会写入失败的 | ||
func (c *InputKafka) CloseInputCosume() { | ||
if len(c.inputCosumeList) == 0 { | ||
return | ||
} | ||
for i := range c.inputCosumeList { | ||
close(c.inputCosumeList[i]) | ||
} | ||
} | ||
|
||
func (c *InputKafka) SendToInputConsume(kafkaMsg *sarama.ConsumerMessage) { | ||
crc32Int := c.CRC32KafkaMsgTopicAndPartition(kafkaMsg) | ||
i := crc32Int % len(c.inputCosumeList) | ||
select { | ||
case c.inputCosumeList[i] <- kafkaMsg: | ||
break | ||
case <-c.kafkaGroupCtx.Done(): | ||
break | ||
} | ||
} | ||
|
||
func (c *InputKafka) CRC32KafkaMsgTopicAndPartition(kafkaMsg *sarama.ConsumerMessage) int { | ||
key := fmt.Sprintf("%s_%d", kafkaMsg.Topic, kafkaMsg.Partition) | ||
crc32Int := int(crc32.ChecksumIEEE([]byte(key))) | ||
return crc32Int | ||
} | ||
|
||
func (c *InputKafka) InputCosume(ctx context.Context, workerId int, ch chan *sarama.ConsumerMessage) { | ||
log.Printf("[INFO] output[%s] InputCosume workeId:%d starting\n", "kafka", workerId) | ||
defer log.Printf("[INFO] output[%s] InputCosume workeId:%d end\n", "kafka", workerId) | ||
for { | ||
select { | ||
case <-ctx.Done(): | ||
return | ||
case kafkaMsg := <-ch: | ||
if kafkaMsg == nil { | ||
return | ||
} | ||
c.ToChildCallback(kafkaMsg) | ||
break | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
package kafka | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
"github.com/Shopify/sarama" | ||
"github.com/smartystreets/goconvey/convey" | ||
"testing" | ||
"time" | ||
) | ||
|
||
func TestInputCosume(t *testing.T) { | ||
convey.Convey("normal", t, func() { | ||
var dataList []*sarama.ConsumerMessage | ||
var callbackFunc = func(message *sarama.ConsumerMessage) error { | ||
dataList = append(dataList, message) | ||
return nil | ||
} | ||
c := NewInputKafka() | ||
c.kafkaGroupCtx, c.kafkaGroupCancel = context.WithCancel(context.Background()) | ||
c.childCallBack = callbackFunc | ||
ws := c.InitInputCosume(5) | ||
for i := 0; i < 20; i++ { | ||
kafkaMsg := &sarama.ConsumerMessage{ | ||
Topic: fmt.Sprintf("topic_%d", i%5), | ||
Partition: 0, | ||
} | ||
c.SendToInputConsume(kafkaMsg) | ||
} | ||
c.CloseInputCosume() | ||
ws.Wait() | ||
convey.So(len(dataList), convey.ShouldEqual, 20) | ||
}) | ||
} | ||
|
||
func TestInputKafka_CloseInputCosume(t *testing.T) { | ||
c := NewInputKafka() | ||
ws := c.InitInputCosume(0) | ||
c.CloseInputCosume() | ||
ws.Wait() | ||
} | ||
|
||
func TestInputKafka_CRC32KafkaMsgTopicAndPartition(t *testing.T) { | ||
convey.Convey("normal", t, func() { | ||
c := NewInputKafka() | ||
kafkaMsg1 := &sarama.ConsumerMessage{ | ||
Topic: fmt.Sprintf("topic_%d", 1), | ||
Partition: 0, | ||
} | ||
kafkaMsg1CRC32 := c.CRC32KafkaMsgTopicAndPartition(kafkaMsg1) | ||
kafkaMsg2 := &sarama.ConsumerMessage{ | ||
Topic: fmt.Sprintf("topic_%d", 2), | ||
Partition: 0, | ||
} | ||
kafkaMsg2CRC32 := c.CRC32KafkaMsgTopicAndPartition(kafkaMsg2) | ||
convey.So(kafkaMsg1CRC32, convey.ShouldNotEqual, kafkaMsg2CRC32) | ||
}) | ||
} | ||
|
||
func TestInputKafka_SendToInputConsume(t *testing.T) { | ||
convey.Convey("send chan lock,and consume cancle", t, func() { | ||
c := NewInputKafka() | ||
c.kafkaGroupCtx, c.kafkaGroupCancel = context.WithCancel(context.Background()) | ||
c.inputCosumeList = make([]chan *sarama.ConsumerMessage, 0) | ||
c.inputCosumeList = append(c.inputCosumeList, make(chan *sarama.ConsumerMessage, 1)) | ||
go func() { | ||
<-time.After(1 * time.Second) | ||
c.kafkaGroupCancel() | ||
}() | ||
for i := 0; i < 3; i++ { | ||
kafkaMsg := &sarama.ConsumerMessage{ | ||
Topic: fmt.Sprintf("topic_%d", i%5), | ||
Partition: 0, | ||
} | ||
c.SendToInputConsume(kafkaMsg) | ||
} | ||
}) | ||
} |
Oops, something went wrong.