Przeglądaj źródła

feat(sink): kafka sink plugin (#1753)

* feat(sink): kafka sink plugin

Signed-off-by: carlclone <906561974@qq.com>

* feat(sink): remove delivery guarantee from kafka sink plugin

Signed-off-by: carlclone <906561974@qq.com>

---------

Signed-off-by: carlclone <906561974@qq.com>
carlclone 2 lat temu
rodzic
commit
ca40a4c54f

+ 1 - 0
.github/workflows/build_packages.yaml

@@ -182,6 +182,7 @@ jobs:
           - sinks/influx2
           - sinks/tdengine
           - sinks/zmq
+          - sinks/kafka
           - sinks/sql
           - sources/random
           - sources/zmq

+ 1 - 0
Makefile

@@ -93,6 +93,7 @@ docker:
 PLUGINS := sinks/influx \
 	sinks/influx2 \
 	sinks/zmq \
+	sinks/kafka \
 	sinks/image \
 	sinks/sql   \
 	sources/random \

+ 8 - 0
docs/directory.json

@@ -238,6 +238,10 @@
 								{
 									"title": "ZeroMQ Sink",
 									"path": "guide/sinks/plugin/zmq"
+								},
+								{
+									"title": "Kafka Sink",
+									"path": "guide/sinks/plugin/kafka"
 								}
 							]
 						}
@@ -912,6 +916,10 @@
 								{
 									"title": "ZeroMQ Sink",
 									"path": "guide/sinks/plugin/zmq"
+								},
+								{
+									"title": "Kafka Sink",
+									"path": "guide/sinks/plugin/kafka"
 								}
 							]
 						}

+ 1 - 0
docs/en_US/guide/sinks/overview.md

@@ -29,6 +29,7 @@ The list of predefined sink plugins:
 - [Tdengine sink](./plugin/tdengine.md): sink to tdengine.
 - [Image sink](./plugin/image.md): sink to an image file. Only used to handle binary result.
 - [Zero MQ sink](./plugin/zmq.md): sink to zero mq.
+- [Kafka sink](./plugin/kafka.md): sink to kafka.
 
 ## Updatable Sink
 

+ 88 - 0
docs/en_US/guide/sinks/plugin/kafka.md

@@ -0,0 +1,88 @@
+# Kafka Sink
+
+The sink will publish the result into a Kafka .
+
+## Compile & deploy plugin
+
+### build in shell
+```shell
+# cd $eKuiper_src
+# go build -trimpath --buildmode=plugin -o plugins/sinks/kafka.so extensions/sinks/kafka/kafka.go
+# zip kafka.zip plugins/sinks/kafka.so
+# cp kafka.zip /root/tomcat_path/webapps/ROOT/
+# bin/kuiper create plugin sink kafka -f /tmp/kafkaPlugin.txt
+# bin/kuiper create rule kafka -f /tmp/kafkaRule.txt
+```
+
+### build with image
+```
+docker build -t demo/plugins:v1 -f build/plugins/Dockerfile .
+docker run demo/plugins:v1
+docker cp  90eae15a7245:/workspace/_plugins/debian/sinks /tmp
+```
+Dockerfile like this:
+```
+## plase check go version that kuiper used
+ARG GO_VERSION=1.18.5
+FROM ghcr.io/lf-edge/ekuiper/base:$GO_VERSION-debian AS builder
+WORKDIR /workspace
+ADD . /workspace/
+RUN go env -w GOPROXY=https://goproxy.cn,direct
+RUN make plugins_c
+CMD ["sleep","3600"]
+```
+add this in Makefile:
+```
+PLUGINS_CUSTOM := sinks/kafka
+
+.PHONY: plugins_c $(PLUGINS_CUSTOM)
+plugins_c: $(PLUGINS_CUSTOM)
+
+$(PLUGINS_CUSTOM): PLUGIN_TYPE = $(word 1, $(subst /, , $@))
+$(PLUGINS_CUSTOM): PLUGIN_NAME = $(word 2, $(subst /, , $@))
+$(PLUGINS_CUSTOM):
+	@$(CURDIR)/build-plugins.sh $(PLUGIN_TYPE) $(PLUGIN_NAME)
+```
+
+Restart the eKuiper server to activate the plugin.
+
+## Properties
+
+| Property name     | Optional | Description                             |
+|-------------------|----------|-----------------------------------------|
+| brokers           | false    | The broker address list ,split with "," |
+| topic             | false    | The topic of the Kafka                  |
+| saslAuthType      | false    | The Kafka sasl authType                 |
+| saslUserName      | true     | The sasl user name                      |
+| saslPassword      | true     | The sasl password                       |
+
+
+Other common sink properties are supported. Please refer to the [sink common properties](../overview.md#common-properties) for more information.
+
+## Sample usage
+
+Below is a sample for selecting temperature great than 50 degree, and some profiles only for your reference.
+
+### /tmp/kafkaRule.txt
+```json
+{
+  "id": "kafka",
+  "sql": "SELECT * from  demo_stream where temperature > 50",
+  "actions": [
+    {
+      "log": {},
+      "kafka":{
+        "brokers": "127.0.0.1:9092,127.0.0.2:9092",
+        "topic": "test_topic",
+        "saslAuthType": "none"
+      }
+    }
+  ]
+}
+```
+### /tmp/kafkaPlugin.txt
+```json
+{
+   "file":"http://localhost:8080/kafka.zip"
+ }
+```

+ 1 - 0
docs/zh_CN/guide/sinks/overview.md

@@ -29,6 +29,7 @@
 - [Tdengine sink](./plugin/tdengine.md): 写入 Tdengine 。
 - [Image sink](./plugin/image.md): 写入一个图像文件。仅用于处理二进制结果。
 - [Zero MQ sink](./plugin/zmq.md):输出到 Zero MQ 。
+- [Kafka sink](./plugin/kafka.md):输出到 Kafka 。
 
 ## 更新
 

+ 87 - 0
docs/zh_CN/guide/sinks/plugin/kafka.md

@@ -0,0 +1,87 @@
+# Kafka 目标(Sink)
+
+该插件将分析结果发送到 Kafka 中。
+## 编译插件&创建插件
+
+### 本地构建
+```shell
+# cd $eKuiper_src
+# go build -trimpath --buildmode=plugin -o plugins/sinks/kafka.so extensions/sinks/kafka/kafka.go
+# zip kafka.zip plugins/sinks/kafka.so
+# cp kafka.zip /root/tomcat_path/webapps/ROOT/
+# bin/kuiper create plugin sink kafka -f /tmp/kafkaPlugin.txt
+# bin/kuiper create rule kafka -f /tmp/kafkaRule.txt
+```
+
+### 镜像构建
+```
+docker build -t demo/plugins:v1 -f build/plugins/Dockerfile .
+docker run demo/plugins:v1
+docker cp  90eae15a7245:/workspace/_plugins/debian/sinks /tmp
+```
+Dockerfile 如下所示:
+```
+## plase check go version that kuiper used
+ARG GO_VERSION=1.18.5
+FROM ghcr.io/lf-edge/ekuiper/base:$GO_VERSION-debian AS builder
+WORKDIR /workspace
+ADD . /workspace/
+RUN go env -w GOPROXY=https://goproxy.cn,direct
+RUN make plugins_c
+CMD ["sleep","3600"]
+```
+在Makefile中添加:
+```
+PLUGINS_CUSTOM := sinks/kafka
+
+.PHONY: plugins_c $(PLUGINS_CUSTOM)
+plugins_c: $(PLUGINS_CUSTOM)
+
+$(PLUGINS_CUSTOM): PLUGIN_TYPE = $(word 1, $(subst /, , $@))
+$(PLUGINS_CUSTOM): PLUGIN_NAME = $(word 2, $(subst /, , $@))
+$(PLUGINS_CUSTOM):
+	@$(CURDIR)/build-plugins.sh $(PLUGIN_TYPE) $(PLUGIN_NAME)
+```
+
+重新启动 eKuiper 服务器以激活插件。
+
+## 属性
+
+| 属性名称              | 会否可选 | 说明                   |
+|-------------------|------|----------------------|
+| brokers           | 否    | broker地址列表 ,用 "," 分割 |
+| topic             | 否    | kafka 主题             |
+| saslAuthType      | 否    | sasl 认证类型            |
+| saslUserName      | 是    | sasl 用户名             |
+| saslPassword      | 是    | sasl 密码              |
+
+
+其他通用的 sink 属性也支持,请参阅[公共属性](../overview.md#公共属性)。
+
+## 示例用法
+
+下面是选择温度大于50度的样本规则,和一些配置文件仅供参考。
+
+### ####/tmp/kafkaRule.txt
+```json
+{
+  "id": "kafka",
+  "sql": "SELECT * from  demo_stream where temperature > 50",
+  "actions": [
+    {
+      "log": {},
+      "kafka":{
+        "brokers": "127.0.0.1:9092,127.0.0.2:9092",
+        "topic": "test_topic",
+        "saslAuthType": "none"
+      }
+    }
+  ]
+}
+```
+### ####/tmp/kafkaPlugin.txt
+```json
+{
+  "file":"http://localhost:8080/kafka.zip"
+}
+```

+ 3 - 0
extensions/go.mod

@@ -30,6 +30,7 @@ require (
 	github.com/pebbe/zmq4 v1.2.9
 	github.com/posener/order v0.0.1
 	github.com/prestodb/presto-go-client v0.0.0-20220921130148-c3f935ff1cf9
+	github.com/segmentio/kafka-go v0.4.39
 	github.com/sijms/go-ora/v2 v2.5.33
 	github.com/snowflakedb/gosnowflake v1.6.18
 	github.com/taosdata/driver-go/v2 v2.0.4
@@ -176,6 +177,8 @@ require (
 	github.com/twmb/murmur3 v1.1.6 // indirect
 	github.com/u2takey/go-utils v0.3.1 // indirect
 	github.com/uber-go/tally v3.5.3+incompatible // indirect
+	github.com/xdg/scram v1.0.5 // indirect
+	github.com/xdg/stringprep v1.0.3 // indirect
 	github.com/xwb1989/sqlparser v0.0.0-20180606152119-120387863bf2 // indirect
 	gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b // indirect
 	go.opencensus.io v0.24.0 // indirect

+ 8 - 0
extensions/go.sum

@@ -487,6 +487,7 @@ github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQL
 github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
 github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
 github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
+github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
 github.com/klauspost/compress v1.16.0 h1:iULayQNOReoYUe+1qtKOqw9CwJv3aNQu8ivo7lw1HU4=
 github.com/klauspost/compress v1.16.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
 github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@@ -593,6 +594,7 @@ github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk
 github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I=
 github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
 github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
+github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
 github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc=
 github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
 github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA=
@@ -631,6 +633,8 @@ github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfF
 github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
 github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg=
 github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg=
+github.com/segmentio/kafka-go v0.4.39 h1:75smaomhvkYRwtuOwqLsdhgCG30B82NsbdkdDfFbvrw=
+github.com/segmentio/kafka-go v0.4.39/go.mod h1:T0MLgygYvmqmBvC+s8aCcbVNfJN4znVne5j0Pzowp/Q=
 github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
 github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
 github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
@@ -693,6 +697,10 @@ github.com/vertica/vertica-sql-go v1.3.1 h1:qjkJzkFmLG+z2koRC6inT+yFr23TyBkNXUP4
 github.com/vertica/vertica-sql-go v1.3.1/go.mod h1:jnn2GFuv+O2Jcjktb7zyc4Utlbu9YVqpHH/lx63+1M4=
 github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
 github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU=
+github.com/xdg/scram v1.0.5 h1:TuS0RFmt5Is5qm9Tm2SoD89OPqe4IRiFtyFY4iwWXsw=
+github.com/xdg/scram v1.0.5/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
+github.com/xdg/stringprep v1.0.3 h1:cmL5Enob4W83ti/ZHuZLuKD/xqJfus4fVPwE+/BDm+4=
+github.com/xdg/stringprep v1.0.3/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
 github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
 github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
 github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=

+ 216 - 0
extensions/sinks/kafka/kafka.go

@@ -0,0 +1,216 @@
+// Copyright 2023 carlclone@gmail.com
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"context"
+	"fmt"
+	"github.com/lf-edge/ekuiper/pkg/api"
+	"github.com/lf-edge/ekuiper/pkg/cast"
+	kafkago "github.com/segmentio/kafka-go"
+	"github.com/segmentio/kafka-go/sasl"
+	"github.com/segmentio/kafka-go/sasl/plain"
+	"github.com/segmentio/kafka-go/sasl/scram"
+	"strings"
+	"time"
+)
+
+type kafkaSink struct {
+	writer *kafkago.Writer
+	c      *sinkConf
+}
+
+const (
+	SASL_NONE  = "none"
+	SASL_PLAIN = "plain"
+	SASL_SCRAM = "scram"
+)
+
+type sinkConf struct {
+	Brokers      string `json:"brokers"`
+	Topic        string `json:"topic"`
+	SaslAuthType string `json:"saslAuthType"`
+	SaslUserName string `json:"saslUserName"`
+	SaslPassword string `json:"saslPassword"`
+}
+
+func (m *kafkaSink) Configure(props map[string]interface{}) error {
+	c := &sinkConf{
+		Brokers:      "localhost:9092",
+		Topic:        "",
+		SaslAuthType: SASL_NONE,
+	}
+	if err := cast.MapToStruct(props, c); err != nil {
+		return err
+	}
+	if len(strings.Split(c.Brokers, ",")) == 0 {
+		return fmt.Errorf("brokers can not be empty")
+	}
+	if c.Topic == "" {
+		return fmt.Errorf("topic can not be empty")
+	}
+	if !(c.SaslAuthType == SASL_NONE || c.SaslAuthType == SASL_SCRAM || c.SaslAuthType == SASL_PLAIN) {
+		return fmt.Errorf("saslAuthType incorrect")
+	}
+	if (c.SaslAuthType == SASL_SCRAM || c.SaslAuthType == SASL_PLAIN) && (c.SaslUserName == "" || c.SaslPassword == "") {
+		return fmt.Errorf("username and password can not be empty")
+	}
+
+	m.c = c
+	return nil
+}
+
+func (m *kafkaSink) Open(ctx api.StreamContext) error {
+	ctx.GetLogger().Debug("Opening kafka sink")
+
+	var err error
+	var mechanism sasl.Mechanism
+
+	//sasl authentication type
+	switch m.c.SaslAuthType {
+	case SASL_PLAIN:
+		mechanism = plain.Mechanism{
+			Username: m.c.SaslUserName,
+			Password: m.c.SaslPassword,
+		}
+	case SASL_SCRAM:
+		mechanism, err = scram.Mechanism(scram.SHA512, m.c.SaslUserName, m.c.SaslPassword)
+		if err != nil {
+			return err
+		}
+	default:
+		mechanism = nil
+	}
+	brokers := strings.Split(m.c.Brokers, ",")
+	w := &kafkago.Writer{
+		Addr:                   kafkago.TCP(brokers...),
+		Topic:                  m.c.Topic,
+		Balancer:               &kafkago.LeastBytes{},
+		Async:                  false,
+		AllowAutoTopicCreation: true,
+		MaxAttempts:            10,
+		RequiredAcks:           -1,
+		Transport: &kafkago.Transport{
+			SASL: mechanism,
+		},
+	}
+	m.writer = w
+	return nil
+}
+
+func (m *kafkaSink) Collect(ctx api.StreamContext, item interface{}) error {
+	logger := ctx.GetLogger()
+	logger.Debugf("kafka sink receive %s", item)
+	var messages []kafkago.Message
+	switch d := item.(type) {
+	case []map[string]interface{}:
+		for _, el := range d {
+			decodedBytes, _, err := ctx.TransformOutput(el)
+			if err != nil {
+				return fmt.Errorf("kafka sink transform data error: %v", err)
+			}
+			messages = append(messages, kafkago.Message{Value: decodedBytes})
+		}
+	case map[string]interface{}:
+		decodedBytes, _, err := ctx.TransformOutput(d)
+		if err != nil {
+			return fmt.Errorf("kafka sink transform data error: %v", err)
+		}
+		messages = append(messages, kafkago.Message{Value: decodedBytes})
+	default:
+		return fmt.Errorf("unrecognized format of %s", item)
+	}
+
+	vctx, cancel := context.WithTimeout(ctx, 10*time.Second)
+	defer cancel()
+	err := m.kafkaWriteWithBackoff(vctx, ctx.GetLogger().Errorf, 100*time.Millisecond, time.Second, messages...)
+	if err != nil {
+		return err
+	}
+	logger.Debug("insert data into kafka success")
+	return nil
+}
+
+func (m *kafkaSink) kafkaWriteWithBackoff(ctx context.Context, log func(string, ...interface{}), interval, maxInterval time.Duration, messages ...kafkago.Message) error {
+	var berr error
+	tries := 0
+
+retry:
+	for {
+		tries++
+
+		err := m.writer.WriteMessages(ctx, messages...)
+		switch err := err.(type) {
+		case nil:
+			return nil
+
+		case kafkago.Error:
+			berr = err
+			if !err.Temporary() {
+				break retry
+			}
+
+		case kafkago.WriteErrors:
+			var remaining []kafkago.Message
+			for i, m := range messages {
+				switch err := err[i].(type) {
+				case nil:
+					continue
+
+				case kafkago.Error:
+					if err.Temporary() {
+						remaining = append(remaining, m)
+						continue
+					}
+				}
+
+				return fmt.Errorf("failed to deliver messages: %v", err)
+			}
+
+			messages = remaining
+			berr = err
+
+		default:
+			if berr == nil || err != context.DeadlineExceeded {
+				berr = err
+			}
+			break retry
+		}
+
+		log("temporary write error: %v", err)
+
+		interval *= 2
+		if interval > maxInterval {
+			interval = maxInterval
+		}
+		timer := time.NewTimer(interval)
+		select {
+		case <-timer.C:
+		case <-ctx.Done():
+			timer.Stop()
+			break retry
+		}
+	}
+
+	return fmt.Errorf("failed to deliver messages after %d tries: %v", tries, berr)
+}
+
+func (m *kafkaSink) Close(ctx api.StreamContext) error {
+	return m.writer.Close()
+}
+
+func Kafka() api.Sink {
+	return &kafkaSink{}
+}

+ 112 - 0
extensions/sinks/kafka/kafka.json

@@ -0,0 +1,112 @@
+{
+  "about": {
+    "trial": true,
+    "author": {
+      "name": "carlclone",
+      "email": "carlclone@gmail.com",
+      "company": "personal",
+      "website": "https://www.github.com/carlclone"
+    },
+    "helpUrl": {
+      "en_US": "https://ekuiper.org/docs/en/latest/guide/sinks/plugin/kafka.html",
+      "zh_CN": "https://ekuiper.org/docs/zh/latest/guide/sinks/plugin/kafka.html"
+    },
+    "description": {
+      "en_US": "This a sink for Kafka, it can be used for saving the analysis data into Redis.",
+      "zh_CN": "为 Kafka 的持久化插件,可以用于将分析数据存入 Kafka 中"
+    }
+  },
+  "libs": [
+    "github.com/segmentio/kafka-go@v0.4.39"
+  ],
+  "properties": [
+    {
+      "name": "brokers",
+      "default": "127.0.0.1:9092,127.0.0.2:9092",
+      "optional": false,
+      "control": "text",
+      "type": "string",
+      "hint": {
+        "en_US": "The url of the Kafka broker list",
+        "zh_CN": "Kafka brokers的 URL 列表"
+      },
+      "label": {
+        "en_US": "broker list",
+        "zh_CN": "Broker url 列表"
+      }
+    },
+    {
+      "name": "topic",
+      "default": "",
+      "optional": false,
+      "control": "text",
+      "type": "string",
+      "hint": {
+        "en_US": "The topic to publish to.",
+        "zh_CN": "订阅主题"
+      },
+      "label": {
+        "en_US": "Topic",
+        "zh_CN": "主题"
+      }
+    },
+    {
+      "name": "saslAuthType",
+      "default": "none",
+      "optional": false,
+      "control": "select",
+      "values": [
+        "none",
+        "plain",
+        "scram"
+      ],
+      "type": "string",
+      "hint": {
+        "en_US": "Sasl auth type of Kafka",
+        "zh_CN": "Kafka 的 Sasl 认证类型"
+      },
+      "label": {
+        "en_US": "Sasl auth type",
+        "zh_CN": "Sasl 认证类型"
+      }
+    },
+    {
+      "name": "saslUserName",
+      "default": "",
+      "optional": true,
+      "control": "text",
+      "type": "string",
+      "hint": {
+        "en_US": "Sasl username for authentication",
+        "zh_CN": "Sasl 认证使用的用户名"
+      },
+      "label": {
+        "en_US": "Sasl username",
+        "zh_CN": "Sasl 用户名"
+      }
+    },
+    {
+      "name": "saslPassword",
+      "default": "",
+      "optional": true,
+      "control": "text",
+      "type": "string",
+      "hint": {
+        "en_US": "Sasl password for authentication",
+        "zh_CN": "Sasl 认证使用的密码"
+      },
+      "label": {
+        "en_US": "Sasl password",
+        "zh_CN": "Sasl 密码"
+      }
+    }
+  ],
+  "node": {
+    "category": "sink",
+    "icon": "iconPath",
+    "label": {
+      "en": "Kafka",
+      "zh": "Kafka"
+    }
+  }
+}

+ 1 - 1
internal/server/plugin_init.go

@@ -235,7 +235,7 @@ func prebuildPluginsHandler(w http.ResponseWriter, _ *http.Request, t plugin.Plu
 }
 
 var NativeSourcePlugin = []string{"random", "zmq", "sql", "video"}
-var NativeSinkPlugin = []string{"image", "influx", "influx2", "tdengine", "zmq", "sql"}
+var NativeSinkPlugin = []string{"image", "influx", "influx2", "tdengine", "zmq", "kafka", "sql"}
 var NativeFunctionPlugin = []string{"accumulateWordCount", "countPlusOne", "echo", "geohash", "image", "labelImage", "tfLite"}
 
 func fetchPluginList(t plugin.PluginType, hosts, os, arch string) (result map[string]string, err error) {