Browse Source

Merge pull request #735 from emqx/dev/1.1.2

Dev/1.1.2
ngjaying 4 years ago
parent
commit
af7e6cf670
100 changed files with 3543 additions and 355 deletions
  1. 7 3
      .ci/Dockerfile-plugins
  2. 14 12
      .github/workflows/build_packages.yaml
  3. 2 1
      .github/workflows/run_fvt_tests.yaml
  4. 22 7
      Makefile
  5. 19 0
      common/kv/kv.go
  6. 6 19
      common/kv.go
  7. 101 0
      common/kv/sqliteKV_test.go
  8. 1 0
      common/util.go
  9. 0 95
      common/util_test.go
  10. 2 2
      deploy/chart/kuiper/Chart.yaml
  11. 21 6
      deploy/docker/Dockerfile-alpine
  12. 3 0
      deploy/packages/deb/Makefile
  13. 1 1
      deploy/packages/deb/debian/rules
  14. 1 1
      deploy/packages/rpm/kuiper.spec
  15. 50 0
      docs/en_US/cli/plugins.md
  16. 24 2
      docs/en_US/extension/function.md
  17. 13 2
      docs/en_US/extension/overview.md
  18. 1 1
      docs/en_US/extension/sink.md
  19. 1 1
      docs/en_US/extension/source.md
  20. 114 1
      docs/en_US/plugins/functions/functions.md
  21. 233 0
      docs/en_US/plugins/functions/tensorflow_lite_tutorial.md
  22. 3 3
      docs/en_US/plugins/plugins_tutorial.md
  23. 1 1
      docs/en_US/plugins/sinks/file.md
  24. 1 1
      docs/en_US/plugins/sinks/image.md
  25. 1 1
      docs/en_US/plugins/sinks/influx.md
  26. 1 1
      docs/en_US/plugins/sinks/tdengine.md
  27. 1 1
      docs/en_US/plugins/sinks/zmq.md
  28. 1 1
      docs/en_US/plugins/sources/random.md
  29. 1 1
      docs/en_US/plugins/sources/zmq.md
  30. 48 1
      docs/en_US/restapi/plugins.md
  31. 25 21
      docs/en_US/rules/overview.md
  32. 12 11
      docs/en_US/sqls/built-in_functions.md
  33. 34 4
      docs/en_US/sqls/json_expr.md
  34. 51 0
      docs/zh_CN/cli/plugins.md
  35. 1 1
      docs/zh_CN/edgex/edgex_rule_engine_tutorial.md
  36. 22 1
      docs/zh_CN/extension/function.md
  37. 10 0
      docs/zh_CN/extension/overview.md
  38. 1 1
      docs/zh_CN/extension/sink.md
  39. 1 1
      docs/zh_CN/extension/source.md
  40. 1 1
      docs/zh_CN/getting_started.md
  41. 2 2
      docs/zh_CN/manager-ui/overview.md
  42. 114 1
      docs/zh_CN/plugins/functions/functions.md
  43. 234 0
      docs/zh_CN/plugins/functions/tensorflow_lite_tutorial.md
  44. 1 1
      docs/zh_CN/plugins/overview.md
  45. 3 3
      docs/zh_CN/plugins/plugins_tutorial.md
  46. 1 1
      docs/zh_CN/plugins/sinks/file.md
  47. 1 1
      docs/zh_CN/plugins/sinks/image.md
  48. 1 1
      docs/zh_CN/plugins/sinks/influx.md
  49. 1 1
      docs/zh_CN/plugins/sinks/tdengine.md
  50. 1 1
      docs/zh_CN/plugins/sinks/zmq.md
  51. 1 1
      docs/zh_CN/plugins/sources/random.md
  52. 1 1
      docs/zh_CN/plugins/sources/zmq.md
  53. 2 2
      docs/zh_CN/quick_start_docker.md
  54. 48 1
      docs/zh_CN/restapi/plugins.md
  55. 1 1
      docs/zh_CN/rules/data_template.md
  56. 20 16
      docs/zh_CN/rules/overview.md
  57. 10 9
      docs/zh_CN/sqls/built-in_functions.md
  58. 40 4
      docs/zh_CN/sqls/json_expr.md
  59. 92 0
      etc/functions/geohash.json
  60. 11 6
      etc/functions/resize.json
  61. 4 8
      etc/functions/thumbnail.json
  62. 2 0
      etc/kuiper.yaml
  63. 1 1
      etc/sources/httppull.yaml
  64. 2 2
      etc/sources/random.json
  65. 74 0
      fvt_scripts/binary_image_process.jmx
  66. 1 1
      fvt_scripts/http_pull_rule.jmx
  67. 16 1
      fvt_scripts/prepare_plugins.sh
  68. 4 1
      plugins/funcMeta.go
  69. 263 0
      plugins/functions/geohash/geohash.go
  70. 6 0
      plugins/functions/image/exports.go
  71. 0 2
      plugins/functions/resize/resize.go
  72. 0 2
      plugins/functions/thumbnail/thumbnail.go
  73. 1001 0
      plugins/functions/labelImage/etc/labels.txt
  74. BIN
      plugins/functions/labelImage/etc/mobilenet_quant_v1_224.tflite
  75. 24 0
      plugins/functions/labelImage/install.sh
  76. 170 0
      plugins/functions/labelImage/labelImage.go
  77. 25 0
      plugins/functions/labelImage/lib/Readme.md
  78. BIN
      plugins/functions/labelImage/lib/libtensorflowlite.so
  79. BIN
      plugins/functions/labelImage/lib/libtensorflowlite_c.so
  80. 2 0
      plugins/functions/labelImage/tflite.conf
  81. 288 42
      plugins/manager.go
  82. 66 14
      plugins/manager_test.go
  83. 1 0
      plugins/sinkMeta_test.go
  84. BIN
      plugins/testzips/functions/comp.zip
  85. BIN
      plugins/testzips/functions/misc.zip
  86. 2 2
      tools/migration/util/migration.go
  87. 2 2
      tools/migration/util/migration_test.go
  88. 23 9
      xsql/ast.go
  89. 4 0
      xsql/funcs_ast_validator.go
  90. 6 0
      xsql/funcs_misc.go
  91. 1 1
      xsql/functions.go
  92. 1 0
      xsql/lexical.go
  93. 3 2
      xsql/parser.go
  94. 7 6
      xsql/parser_test.go
  95. 2 0
      xsql/processors/checkpoint_test.go
  96. 2 0
      xsql/processors/extension_test.go
  97. 94 0
      xsql/processors/rule_test.go
  98. 2 0
      xsql/processors/simple_processor_test.go
  99. 7 0
      xsql/processors/window_rule_test.go
  100. 0 0
      xsql/processors/xsql_processor.go

+ 7 - 3
.ci/Dockerfile-plugins

@@ -17,7 +17,7 @@ RUN set -e -u -x \
     && for lib in $(cat etc/$PLUGIN_TYPE/$PLUGIN_NAME.json | jq -r ".libs[]"); do go get $lib; done \
     && case $PLUGIN_NAME in \
          influxdb ) \
-           go build --buildmode=plugin -tags plugins -o plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME@$VERSION.so plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME.go \
+           go build -trimpath --buildmode=plugin -tags plugins -o plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME@$VERSION.so plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME.go \
            ;; \
          tdengine ) \
            if [ "$(uname -m)" = "x86_64" ]; then \
@@ -28,10 +28,14 @@ RUN set -e -u -x \
            fi; \
            tar -zxvf /tmp/TDengine-client-2.0.6.0.tar.gz \
            && cd TDengine-client-2.0.6.0 && ./install_client.sh && cd - \
-           && go build --buildmode=plugin -tags plugins -o plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME@$VERSION.so plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME.go \
+           && go build -trimpath --buildmode=plugin -tags plugins -o plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME@$VERSION.so plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME.go \
+           ;; \
+         labelImage ) \
+           git clone -b v2.2.0-rc3 --depth 1 https://github.com/tensorflow/tensorflow.git /tensorflow; \
+           CGO_CFLAGS=-I/tensorflow CGO_LDFLAGS=-L/go/kuiper/plugins/functions/labelImage/lib go build -trimpath --buildmode=plugin -o plugins/functions/labelImage/labelImage.so plugins/functions/labelImage/*.go \
            ;; \
          * ) \
-           go build --buildmode=plugin -o plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME@$VERSION.so plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME.go \
+           go build -trimpath --buildmode=plugin -o plugins/$PLUGIN_TYPE/$PLUGIN_NAME/$PLUGIN_NAME@$VERSION.so plugins/$PLUGIN_TYPE/$PLUGIN_NAME/*.go \
            ;; \
        esac \
     && if [ -f "etc/$PLUGIN_TYPE/$PLUGIN_NAME.yaml" ]; then cp etc/$PLUGIN_TYPE/$PLUGIN_NAME.yaml plugins/$PLUGIN_TYPE/$PLUGIN_NAME; fi \

+ 14 - 12
.github/workflows/build_packages.yaml

@@ -68,20 +68,12 @@ jobs:
                 - functions/accumulateWordCount
                 - functions/countPlusOne
                 - functions/echo
-                - functions/thumbnail
-                - functions/resize
+                - functions/image
+                - functions/geohash
+                - functions/labelImage
 
         steps:
         - uses: actions/checkout@v1
-        - name: install docker
-          run: |
-            sudo apt-get remove docker docker-engine docker.io containerd runc
-            sudo apt-get update
-            sudo apt-get install apt-transport-https ca-certificates curl gnupg-agent software-properties-common
-            curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
-            sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
-            sudo apt-get update
-            sudo apt-get install docker-ce docker-ce-cli containerd.io
         - name: prepare docker
           run: |
             mkdir -p $HOME/.docker
@@ -93,7 +85,7 @@ jobs:
             docker run --rm --privileged tonistiigi/binfmt --install all
         - name: build docker image
           run: make docker -j4
-        - name: buiild debian plugins
+        - name: build debian plugins
           env:
             PLUGIN: ${{ matrix.plugin }}
           run: make ${PLUGIN}
@@ -117,6 +109,16 @@ jobs:
                     ${ip_address}:9081/plugins/${plugin_type} \
                     -X POST \
                     -d "{\"name\":\"${plugin_name}\", \"file\":\"file:///var/plugins/${os}/${plugin_type}/${plugin_name}_amd64.zip\", \"shellParas\": [\"2.0.3.1\"]}"
+                elif [ "${plugin_name}" = "image" ]; then
+                     curl \
+                     ${ip_address}:9081/plugins/${plugin_type} \
+                     -X POST \
+                     -d "{\"name\":\"${plugin_name}\", \"file\":\"file:///var/plugins/${os}/${plugin_type}/${plugin_name}_amd64.zip\", \"functions\": [\"resize\",\"thumbnail\"]}"
+                elif [ "${plugin_name}" = "geohash" ]; then
+                     curl \
+                     ${ip_address}:9081/plugins/${plugin_type} \
+                     -X POST \
+                     -d "{\"name\":\"${plugin_name}\", \"file\":\"file:///var/plugins/${os}/${plugin_type}/${plugin_name}_amd64.zip\", \"functions\": [\"geohashEncode\", \"geohashEncodeInt\", \"geohashDecode\", \"geohashDecodeInt\", \"geohashBoundingBox\", \"geohashBoundingBoxInt\", \"geohashNeighbor\", \"geohashNeighborInt\", \"geohashNeighbors\", \"geohashNeighborsInt\"]}"
                 else
                     curl \
                     ${ip_address}:9081/plugins/${plugin_type} \

+ 2 - 1
.github/workflows/run_fvt_tests.yaml

@@ -42,7 +42,8 @@ jobs:
           run: |
             sudo apt update && sudo apt install pkg-config libczmq-dev -y
             make build_with_edgex
-            go build --buildmode=plugin -o plugins/sources/Zmq.so plugins/sources/zmq/zmq.go
+            go build -trimpath --buildmode=plugin -o plugins/sources/Zmq.so plugins/sources/zmq/zmq.go
+            go build -trimpath --buildmode=plugin -o plugins/functions/Image.so plugins/functions/image/*.go
         - name: run edgex && emqx && kuiper
           run: |
             sudo ./fvt_scripts/setup_env.sh

+ 22 - 7
Makefile

@@ -49,10 +49,10 @@ build_prepare:
 build_without_edgex: build_prepare
 	@if [ ! -z $(GOOS) ] && [ ! -z $(GOARCH) ] && [ $(CGO_ENABLED) == 0 ];then \
 		GO111MODULE=on GOOS=$(GOOS) GOARCH=$(GOARCH) CGO_ENABLED=0 go build -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -o kuiper xstream/cli/main.go; \
-		GO111MODULE=on GOOS=$(GOOS) GOARCH=$(GOARCH) CGO_ENABLED=0 go build -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -o kuiperd xstream/server/main.go; \
+		GO111MODULE=on GOOS=$(GOOS) GOARCH=$(GOARCH) CGO_ENABLED=0 go build -trimpath -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -o kuiperd xstream/server/main.go; \
 	else \
 		GO111MODULE=on CGO_ENABLED=1 go build -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -o kuiper xstream/cli/main.go; \
-		GO111MODULE=on CGO_ENABLED=1 go build -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -o kuiperd xstream/server/main.go; \
+		GO111MODULE=on CGO_ENABLED=1 go build -trimpath -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -o kuiperd xstream/server/main.go; \
 	fi
 	@if [ ! -z $$(which upx) ] && [ "$$(uname -m)" != "aarch64" ]; then upx ./kuiper; upx ./kuiperd; fi
 	@mv ./kuiper ./kuiperd $(BUILD_PATH)/$(PACKAGE_NAME)/bin
@@ -66,10 +66,10 @@ pkg_without_edgex: build_without_edgex
 build_with_edgex: build_prepare
 	@if [ ! -z $(GOOS) ] && [ ! -z $(GOARCH) ] && [ $(CGO_ENABLED) == 0 ];then \
 		GO111MODULE=on GOOS=$(GOOS) GOARCH=$(GOARCH) CGO_ENABLED=0 go build -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -tags edgex -o kuiper xstream/cli/main.go; \
-		GO111MODULE=on GOOS=$(GOOS) GOARCH=$(GOARCH) CGO_ENABLED=0 go build -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -tags edgex -o kuiperd xstream/server/main.go; \
+		GO111MODULE=on GOOS=$(GOOS) GOARCH=$(GOARCH) CGO_ENABLED=0 go build -trimpath -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -tags edgex -o kuiperd xstream/server/main.go; \
 	else \
 		GO111MODULE=on CGO_ENABLED=1 go build -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -tags edgex -o kuiper xstream/cli/main.go; \
-		GO111MODULE=on CGO_ENABLED=1 go build -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -tags edgex -o kuiperd xstream/server/main.go; \
+		GO111MODULE=on CGO_ENABLED=1 go build -trimpath -ldflags="-s -w -X main.Version=$(VERSION) -X main.LoadFileType=relative" -tags edgex -o kuiperd xstream/server/main.go; \
 	fi
 	@if [ ! -z $$(which upx) ] && [ "$$(uname -m)" != "aarch64" ]; then upx ./kuiper; upx ./kuiperd; fi
 	@mv ./kuiper ./kuiperd $(BUILD_PATH)/$(PACKAGE_NAME)/bin
@@ -177,12 +177,12 @@ PLUGINS := sinks/file \
 	sources/zmq \
 	functions/accumulateWordCount \
 	functions/countPlusOne \
-	functions/thumbnail\
-	functions/resize\
+	functions/image\
+	functions/geohash\
 	functions/echo
 
 .PHONY: plugins sinks/tdengine $(PLUGINS)
-plugins: cross_prepare sinks/tdengine $(PLUGINS)
+plugins: cross_prepare sinks/tdengine functions/labelImage $(PLUGINS)
 sinks/tdengine:
 	@docker buildx build --no-cache \
     --platform=linux/amd64,linux/arm64 \
@@ -200,6 +200,21 @@ sinks/tdengine:
 	done
 	@rm -f /tmp/cross_build_plugins_sinks_tdengine.tar
 
+functions/labelImage:
+	@docker buildx build --no-cache \
+    --platform=linux/amd64 \
+    -t cross_build \
+    --build-arg VERSION=$(VERSION) \
+    --build-arg PLUGIN_TYPE=functions \
+    --build-arg PLUGIN_NAME=labelImage \
+    --output type=tar,dest=/tmp/cross_build_plugins_functions_labelImage.tar \
+    -f .ci/Dockerfile-plugins .
+
+	@mkdir -p _plugins/debian/functions
+	@tar -xvf /tmp/cross_build_plugins_functions_labelImage.tar --wildcards "go/kuiper/plugins/functions/labelImage/labelImage_amd64.zip"
+	@mv $$(ls go/kuiper/plugins/functions/labelImage/labelImage_amd64.zip) _plugins/debian/functions
+	@rm -f /tmp/cross_build_plugins_functions_labelImage.tar
+
 $(PLUGINS): PLUGIN_TYPE = $(word 1, $(subst /, , $@))
 $(PLUGINS): PLUGIN_NAME = $(word 2, $(subst /, , $@))
 $(PLUGINS):

+ 19 - 0
common/kv/kv.go

@@ -0,0 +1,19 @@
+package kv
+
+type KeyValue interface {
+	Open() error
+	Close() error
+	// Set key to hold string value if key does not exist otherwise return an error
+	Setnx(key string, value interface{}) error
+	// Set key to hold the string value. If key already holds a value, it is overwritten
+	Set(key string, value interface{}) error
+	Get(key string, val interface{}) (bool, error)
+	//Must return *common.Error with NOT_FOUND error
+	Delete(key string) error
+	Keys() (keys []string, err error)
+	Clean() error
+}
+
+func GetDefaultKVStore(fpath string) (ret KeyValue) {
+	return GetSqliteKVStore(fpath)
+}

+ 6 - 19
common/kv.go

@@ -1,10 +1,11 @@
-package common
+package kv
 
 import (
 	"bytes"
 	"database/sql"
 	"encoding/gob"
 	"fmt"
+	"github.com/emqx/kuiper/common"
 	_ "github.com/mattn/go-sqlite3"
 	"os"
 	"path"
@@ -12,20 +13,6 @@ import (
 	"strings"
 )
 
-type KeyValue interface {
-	Open() error
-	Close() error
-	// Set key to hold string value if key does not exist otherwise return an error
-	Setnx(key string, value interface{}) error
-	// Set key to hold the string value. If key already holds a value, it is overwritten
-	Set(key string, value interface{}) error
-	Get(key string, val interface{}) (bool, error)
-	//Must return *common.Error with NOT_FOUND error
-	Delete(key string) error
-	Keys() (keys []string, err error)
-	Clean() error
-}
-
 type SqliteKVStore struct {
 	db    *sql.DB
 	table string
@@ -33,10 +20,10 @@ type SqliteKVStore struct {
 }
 
 func GetSqliteKVStore(fpath string) (ret *SqliteKVStore) {
-	if _, err := os.Stat(fpath); os.IsNotExist(err) {
-		os.MkdirAll(fpath, os.ModePerm)
-	}
 	dir, file := filepath.Split(fpath)
+	if _, err := os.Stat(dir); os.IsNotExist(err) {
+		os.MkdirAll(dir, os.ModePerm)
+	}
 	ret = new(SqliteKVStore)
 	ret.path = path.Join(dir, "sqliteKV.db")
 	ret.table = file
@@ -120,7 +107,7 @@ func (m *SqliteKVStore) Delete(key string) error {
 	var tmp []byte
 	err := row.Scan(&tmp)
 	if nil != err || 0 == len(tmp) {
-		return NewErrorWithCode(NOT_FOUND, fmt.Sprintf("%s is not found", key))
+		return common.NewErrorWithCode(common.NOT_FOUND, fmt.Sprintf("%s is not found", key))
 	}
 	sql = fmt.Sprintf("DELETE FROM %s WHERE key='%s';", m.table, key)
 	_, err = m.db.Exec(sql)

+ 101 - 0
common/kv/sqliteKV_test.go

@@ -0,0 +1,101 @@
+package kv
+
+import (
+	"os"
+	"path"
+	"path/filepath"
+	"reflect"
+	"testing"
+)
+
+func TestSqliteKVStore_Funcs(t *testing.T) {
+	abs, _ := filepath.Abs("test")
+	if f, _ := os.Stat(abs); f != nil {
+		os.Remove(abs)
+	}
+
+	ks := GetSqliteKVStore(abs)
+	if e := ks.Open(); e != nil {
+		t.Errorf("Failed to open data %s.", e)
+	}
+
+	if err := ks.Setnx("foo", "bar"); nil != err {
+		t.Error(err)
+	}
+
+	var v string
+	if ok, _ := ks.Get("foo", &v); ok {
+		if !reflect.DeepEqual("bar", v) {
+			t.Error("expect:bar", "get:", v)
+		}
+	} else {
+		t.Errorf("Should not find the foo key.")
+	}
+
+	if err := ks.Setnx("foo1", "bar1"); nil != err {
+		t.Error(err)
+	}
+
+	if err := ks.Set("foo1", "bar2"); nil != err {
+		t.Error(err)
+	}
+
+	var v1 string
+	if ok, _ := ks.Get("foo1", &v1); ok {
+		if !reflect.DeepEqual("bar2", v1) {
+			t.Error("expect:bar2", "get:", v1)
+		}
+	} else {
+		t.Errorf("Should not find the foo1 key.")
+	}
+
+	if keys, e1 := ks.Keys(); e1 != nil {
+		t.Errorf("Failed to get value: %s.", e1)
+	} else {
+		if !reflect.DeepEqual(2, len(keys)) {
+			t.Error("expect:2", "get:", len(keys))
+		}
+	}
+
+	if e2 := ks.Close(); e2 != nil {
+		t.Errorf("Failed to close data: %s.", e2)
+	}
+
+	if err := ks.Open(); nil != err {
+		t.Error(err)
+	}
+
+	var v2 string
+	if ok, _ := ks.Get("foo", &v2); ok {
+		if !reflect.DeepEqual("bar", v2) {
+			t.Error("expect:bar", "get:", v)
+		}
+	} else {
+		t.Errorf("Should not find the foo key.")
+	}
+
+	if err := ks.Delete("foo1"); nil != err {
+		t.Error(err)
+	}
+
+	if keys, e1 := ks.Keys(); e1 != nil {
+		t.Errorf("Failed to get value: %s.", e1)
+	} else {
+		reflect.DeepEqual(1, len(keys))
+	}
+
+	if err := ks.Clean(); nil != err {
+		t.Error(err)
+	}
+
+	if keys, e1 := ks.Keys(); e1 != nil {
+		t.Errorf("Failed to get value: %s.", e1)
+	} else {
+		reflect.DeepEqual(0, len(keys))
+	}
+
+	dir, _ := filepath.Split(abs)
+	abs = path.Join(dir, "sqliteKV.db")
+	os.Remove(abs)
+
+}

+ 1 - 0
common/util.go

@@ -124,6 +124,7 @@ func InitConf() {
 			Concurrency:        1,
 			BufferLength:       1024,
 			CheckpointInterval: 300000, //5 minutes
+			SendError:          true,
 		},
 	}
 	if err := yaml.Unmarshal(b, &kc); err != nil {

+ 0 - 95
common/util_test.go

@@ -1,106 +1,11 @@
 package common
 
 import (
-	"os"
-	"path"
-	"path/filepath"
 	"reflect"
 	"strings"
 	"testing"
 )
 
-func TestSqliteKVStore_Funcs(t *testing.T) {
-	abs, _ := filepath.Abs("test")
-	if f, _ := os.Stat(abs); f != nil {
-		os.Remove(abs)
-	}
-
-	ks := GetSqliteKVStore(abs)
-	if e := ks.Open(); e != nil {
-		t.Errorf("Failed to open data %s.", e)
-	}
-
-	if err := ks.Setnx("foo", "bar"); nil != err {
-		t.Error(err)
-	}
-
-	var v string
-	if ok, _ := ks.Get("foo", &v); ok {
-		if !reflect.DeepEqual("bar", v) {
-			t.Error("expect:bar", "get:", v)
-		}
-	} else {
-		t.Errorf("Should not find the foo key.")
-	}
-
-	if err := ks.Setnx("foo1", "bar1"); nil != err {
-		t.Error(err)
-	}
-
-	if err := ks.Set("foo1", "bar2"); nil != err {
-		t.Error(err)
-	}
-
-	var v1 string
-	if ok, _ := ks.Get("foo1", &v1); ok {
-		if !reflect.DeepEqual("bar2", v1) {
-			t.Error("expect:bar2", "get:", v1)
-		}
-	} else {
-		t.Errorf("Should not find the foo1 key.")
-	}
-
-	if keys, e1 := ks.Keys(); e1 != nil {
-		t.Errorf("Failed to get value: %s.", e1)
-	} else {
-		if !reflect.DeepEqual(2, len(keys)) {
-			t.Error("expect:2", "get:", len(keys))
-		}
-	}
-
-	if e2 := ks.Close(); e2 != nil {
-		t.Errorf("Failed to close data: %s.", e2)
-	}
-
-	if err := ks.Open(); nil != err {
-		t.Error(err)
-	}
-
-	var v2 string
-	if ok, _ := ks.Get("foo", &v2); ok {
-		if !reflect.DeepEqual("bar", v2) {
-			t.Error("expect:bar", "get:", v)
-		}
-	} else {
-		t.Errorf("Should not find the foo key.")
-	}
-
-	if err := ks.Delete("foo1"); nil != err {
-		t.Error(err)
-	}
-
-	if keys, e1 := ks.Keys(); e1 != nil {
-		t.Errorf("Failed to get value: %s.", e1)
-	} else {
-		reflect.DeepEqual(1, len(keys))
-	}
-
-	if err := ks.Clean(); nil != err {
-		t.Error(err)
-	}
-
-	if keys, e1 := ks.Keys(); e1 != nil {
-		t.Errorf("Failed to get value: %s.", e1)
-	} else {
-		reflect.DeepEqual(0, len(keys))
-	}
-
-	dir, _ := filepath.Split(abs)
-	abs = path.Join(dir, "sqliteKV.db")
-	os.Remove(abs)
-
-}
-
 func TestMapConvert_Funcs(t *testing.T) {
 	source := map[interface{}]interface{}{
 		"QUERY_TABLE": "VBAP",

+ 2 - 2
deploy/chart/kuiper/Chart.yaml

@@ -14,8 +14,8 @@ type: application
 
 # This is the chart version. This version number should be incremented each time you make changes
 # to the chart and its templates, including the app version.
-version: 1.1.1
+version: 1.1.2
 
 # This is the version number of the application being deployed. This version number should be
 # incremented each time you make changes to the application.
-appVersion: 1.1.1
+appVersion: 1.1.2

+ 21 - 6
deploy/docker/Dockerfile-alpine

@@ -11,19 +11,34 @@ RUN make build_with_edgex
 
 FROM alpine:3.12
 
+# Set environment vars
+ENV MAINTAINER="emqx.io" \
+    KUIPER_HOME="/kuiper" \
+    KUIPER__BASIC__CONSOLELOG=true
+
+# These vars are not persisted in the final image layer
+ARG KUIPER_USER="kuiper"
+ARG KUIPER_USER_ID="1001"
+
+# (root) Add packages and "kuiper" user
+RUN apk add sed libzmq
+
 COPY ./deploy/docker/docker-entrypoint.sh /usr/bin/docker-entrypoint.sh
 COPY --from=builder /go/kuiper/kuiper_conf_util /usr/bin/kuiper_conf_util
 COPY --from=builder /go/kuiper/_build/kuiper-* /kuiper/
 
-RUN apk add sed libzmq
+WORKDIR ${KUIPER_HOME}
 
-WORKDIR /kuiper
+# Set appropriate ownership to allow binary full access to KUIPER_HOME dir
+RUN adduser -DH -s /sbin/nologin -u ${KUIPER_USER_ID} ${KUIPER_USER} && \
+    chown -Rh ${KUIPER_USER}:${KUIPER_USER} ${KUIPER_HOME} && \
+    mkdir -p /usr/local/taos && \
+    chown -Rh ${KUIPER_USER}:${KUIPER_USER} /usr/local/taos
 
-ENV MAINTAINER="emqx.io"
-ENV KUIPER_HOME /kuiper
-ENV KUIPER__BASIC__CONSOLELOG true
+# Run the kuiper process under the kuiper user
+USER ${KUIPER_USER}
 
-VOLUME ["/kuiper/etc", "/kuiper/data", "/kuiper/plugins", "/kuiper/log"]
+VOLUME ["${KUIPER_HOME}/etc", "${KUIPER_HOME}/data", "${KUIPER_HOME}/plugins", "${KUIPER_HOME}/log"]
 EXPOSE 9081 20498
 
 ENTRYPOINT ["/usr/bin/docker-entrypoint.sh"]

+ 3 - 0
deploy/packages/deb/Makefile

@@ -16,6 +16,9 @@ all: | $(BUILT)
 $(BUILT):
 	mkdir -p $(TOPDIR) $(SRCDIR)
 	cp -r $(KUIPER_SOURCE)/. $(SRCDIR) 
+	rm -rf $(SRCDIR)/plugins/source/*
+	rm -rf $(SRCDIR)/plugins/sinks/*
+	rm -rf $(SRCDIR)/plugins/functions/*
 
 clean:
 	rm -rf $(TOPDIR)

+ 1 - 1
deploy/packages/deb/debian/rules

@@ -17,7 +17,7 @@ PKG_VSN ?= develop
 ## the generate command EXECUTES in rel/
 build:
 	GO111MODULE=on CGO_ENABLED=1 go build -ldflags="-s -w -X main.Version=$(PKG_VSN) -X main.LoadFileType=absolute" -o kuiper xstream/cli/main.go
-	GO111MODULE=on CGO_ENABLED=1 go build -ldflags="-s -w -X main.Version=$(PKG_VSN) -X main.LoadFileType=absolute" -o kuiperd xstream/server/main.go
+	GO111MODULE=on CGO_ENABLED=1 go build -trimpath -ldflags="-s -w -X main.Version=$(PKG_VSN) -X main.LoadFileType=absolute" -o kuiperd xstream/server/main.go
 
 clean:
 	dh_clean

+ 1 - 1
deploy/packages/rpm/kuiper.spec

@@ -26,7 +26,7 @@ A lightweight IoT edge analytics software
 %build
 cd %{_code_source}
 GO111MODULE=on CGO_ENABLED=1 go build -ldflags="-s -w -X main.Version=%{_version}-%{_release} -X main.LoadFileType=absolute" -o %{_code_source}/kuiper %{_code_source}/xstream/cli/main.go
-GO111MODULE=on CGO_ENABLED=1 go build -ldflags="-s -w -X main.Version=%{_version}-%{_release} -X main.LoadFileType=absolute" -o %{_code_source}/kuiperd %{_code_source}/xstream/server/main.go
+GO111MODULE=on CGO_ENABLED=1 go build -trimpath -ldflags="-s -w -X main.Version=%{_version}-%{_release} -X main.LoadFileType=absolute" -o %{_code_source}/kuiperd %{_code_source}/xstream/server/main.go
 cd -
 
 %install

File diff suppressed because it is too large
+ 50 - 0
docs/en_US/cli/plugins.md


+ 24 - 2
docs/en_US/extension/function.md

@@ -38,14 +38,36 @@ var MyFunction myFunction
 
 The [Echo Function](../../../plugins/functions/echo.go) is a good example.
 
+### Export multiple functions
+
+In one plugin, developers can export multiple functions. Each function must implement [api.Function](../../../xstream/api/stream.go) as described at [Develop a customized function](#develop-a-customized-function) section. Make sure all functions are exported like:
+
+```go
+var(
+    Function1 function1
+    Function2 function2
+    Functionn functionn
+)
+```
+
+It is a best practice to combine all related functions in a plugin to simplify the build and deployment of functions.
+
 ### Package the source
+
 Build the implemented function as a go plugin and make sure the output so file resides in the plugins/functions folder.
 
 ```bash
-go build --buildmode=plugin -o plugins/functions/MyFunction.so plugins/functions/my_function.go
+go build -trimpath --buildmode=plugin -o plugins/functions/MyFunction.so plugins/functions/my_function.go
 ```
 
-### Usage
+### Register multiple functions
+
+Kuiper will load plugins in the plugin folders automatically. The auto loaded function plugin assumes there is a function named the same as the plugin name. If multiple functions are exported, users need to explicitly register them to make them available. There are two ways to register the functions.
+
+1. In development environment, we recommend to build plugin .so file directly into the plugin folder so that kuiper can auto load it. Then call [CLI register functions command](../cli/plugins.md#register-functions) or [REST register functions API](../restapi/plugins.md#register-functions).
+2. In production environment, [package the plugin into zip file](../plugins/plugins_tutorial.md#plugin-deployment-1), then call [CLI function plugin create command](../cli/plugins.md#create-a-plugin) or [REST function plugin create API](../restapi/plugins.md#create-a-plugin) with functions list specified.
+
+## Usage
 
 The customized function can be directly used in the SQL of a rule if it follows the below convention.
 

+ 13 - 2
docs/en_US/extension/overview.md

@@ -35,9 +35,10 @@ A typical environment for developing plugins is to put the plugin and Kuiper in
 2. Create the plugin implementation file inside plugins/sources or plugin/sinks or plugin/functions according to what extension type is developing.
 3. Build the file as plugin into the same folder. The build command is typically like:
 ```bash
-go build --buildmode=plugin -o plugins/sources/MySource.so plugins/sources/my_source.go
+go build -trimpath --buildmode=plugin -o plugins/sources/MySource.so plugins/sources/my_source.go
 ```
 
+Notice that, the `-trimpath` build flag is required if using the prebuilte kuiper or kuiper docker image because the kuiperd is also built with the flag to improve build reproducibility.
 
 ### Plugin development
 The development of plugins is to implement a specific interface according to the plugin type and export the implementation with a specific name. There are two types of exported symbol supported:
@@ -121,4 +122,14 @@ DeleteState(key string) error
 
 #### State data type
 
-The state can be any type. If the rule [checkpoint mechanism](../rules/state_and_fault_tolerance.md) is enabled, the state will be serialized by [golang gob](https://golang.org/pkg/encoding/gob/). So it is required to be gob compatibile. For custom data type, register the type by ``gob.Register(value interface{})`` .
+The state can be any type. If the rule [checkpoint mechanism](../rules/state_and_fault_tolerance.md) is enabled, the state will be serialized by [golang gob](https://golang.org/pkg/encoding/gob/). So it is required to be gob compatibile. For custom data type, register the type by ``gob.Register(value interface{})`` .
+
+### Runtime dependencies
+
+Some plugin may need to access dependencies in the file system. It is recommended to put those files under {{kuiperPath}}/etc/{{pluginType}}/{{pluginName}} directory. When packaging the plugin, put those files in [etc directory](../restapi/plugins.md#plugin-file-format). After installation, they will be moved to the recommended place.
+
+In the plugin source code, developers can access the file system by getting the Kuiper root path from the context:
+
+```go
+ctx.GetRootPath()
+```

+ 1 - 1
docs/en_US/extension/sink.md

@@ -49,7 +49,7 @@ The [Memory Sink](../../../plugins/sinks/memory.go) is a good example.
 Build the implemented sink as a go plugin and make sure the output so file resides in the plugins/sinks folder.
 
 ```bash
-go build --buildmode=plugin -o plugins/sinks/MySink.so plugins/sinks/my_sink.go
+go build -trimpath --buildmode=plugin -o plugins/sinks/MySink.so plugins/sinks/my_sink.go
 ```
 
 ### Usage

+ 1 - 1
docs/en_US/extension/source.md

@@ -69,7 +69,7 @@ There are 2 common configuration fields.
 Build the implemented source as a go plugin and make sure the output so file resides in the plugins/sources folder.
 
 ```bash
-go build --buildmode=plugin -o plugins/sources/MySource.so plugins/sources/my_source.go
+go build -trimpath --buildmode=plugin -o plugins/sources/MySource.so plugins/sources/my_source.go
 ```
 
 ### Usage

+ 114 - 1
docs/en_US/plugins/functions/functions.md

@@ -69,4 +69,117 @@ thumbnail(avg,maxWidth, maxHeight) example
   SELECT countPlusOne(avg,maxWidth, maxHeight) as r1 FROM test;
   ```
 
- 
+### Geohash plugin
+
+| Function              | Example                                                  | Description                                                  |
+| --------------------- | -------------------------------------------------------- | ------------------------------------------------------------ |
+| geohashEncode         | geohashEncode(la,lo float64)(string)                     | Encode latitude and longitude as a string                    |
+| geohashEncodeInt      | geohashEncodeInt(la,lo float64)(uint64)                  | Encode latitude and longitude as an unsigned integer         |
+| geohashDecode         | geohashDecode(hash string)(la,lo float64)                | Decode a string into latitude and longitude                  |
+| geohashDecodeInt      | geohashDecodeInt(hash uint64)(la,lo float64)             | Decode an unsigned integers into latitude and longitude      |
+| geohashBoundingBox    | geohashBoundingBox(hash string)(string)                  | Returns the area encoded by a string                         |
+| geohashBoundingBoxInt | geohashBoundingBoxInt(hash uint64)(string)               | Returns the area encoded by an unsigned integer              |
+| geohashNeighbor       | geohashNeighbor(hash string,direction string)(string)    | Returns the neighbor in the corresponding direction of a string (Direction list: North NorthEast East SouthEast South SouthWest West NorthWest) |
+| geohashNeighborInt    | geohashNeighborInt(hash uint64,direction string)(uint64) | Returns the neighbor in the corresponding direction of an unsigned integer (Direction list: North NorthEast East SouthEast South SouthWest West NorthWest) |
+| geohashNeighbors      | geohashNeighbors(hash string)([]string)                  | Return all neighbors of a string                             |
+| geohashNeighborsInt   | geohashNeighborsInt(hash uint64)([]uint64)               | Return all neighbors of an unsigned integer                  |
+
+ geohashEncode example
+
+- Input: `{"lo" :131.036192,"la":-25.345457}` 
+- Output: `{"geohashEncode":"qgmpvf18h86e"}`
+
+```sql
+SELECT geohashEncode(la,lo) FROM test
+```
+
+ geohashEncodeInt example
+
+- Input: `{"lo" :131.036192,"la":-25.345457}` 
+- Output: `{"geohashEncodeInt":12963433097944239317}`
+
+```sql
+SELECT geohashEncodeInt(la,lo) FROM test
+```
+
+ geohashDecode example
+
+- Input: `{"hash" :"qgmpvf18h86e"} ` 
+- Output: `{"geohashDecode":{"Longitude":131.036192,"Latitude":-25.345457099999997}}`
+
+```sql
+SELECT geohashDecode(hash) FROM test
+```
+
+geohashDecodeInt example
+
+- Input: `{"hash" :12963433097944239317}`
+- Output: `{"geohashDecodeInt":{"Longitude":131.03618861,"Latitude":-25.345456300000002}}`
+
+```sql
+SELECT geohashDecodeInt(hash) FROM test
+```
+
+ geohashBoundingBox  example
+
+- Input: `{"hash" :"qgmpvf18h86e"} `
+- Output: `{"geohashBoundingBox":{"MinLat":-25.345457140356302,"MaxLat":-25.34545697271824,"MinLng":131.03619195520878,"MaxLng":131.0361922904849}}`
+
+```sql
+SELECT geohashBoundingBox(hash) FROM test
+```
+
+ geohashBoundingBoxInt  example
+
+- Input: `{"hash" :12963433097944239317}`
+- Output: `{"geohashBoundingBoxInt":{"MinLat":-25.345456302165985,"MaxLat":-25.34545626025647,"MinLng":131.0361886024475,"MaxLng":131.03618868626654}}`
+
+```sql
+SELECT geohashBoundingBoxInt(hash) FROM test
+```
+
+geohashNeighbor example
+
+- Input: `{"hash" :"qgmpvf18h86e","direction":"North"} `
+- Output: `{"geohashNeighbor":"qgmpvf18h86s"}`
+
+```sql
+SELECT geohashNeighbor(hash,direction) FROM test
+```
+
+geohashNeighborInt example
+
+- Input:`{"hash" :12963433097944239317,"direction":"North"}`
+- Output:`{"geohashNeighborInt":12963433097944240129}`
+
+```sql
+SELECT geohashNeighborInt(hash,direction) FROM test
+```
+
+geohashNeighbors example
+
+- Input: `{"hash" :12963433097944239317}`
+- Output: `{"geohashNeighbors":["qgmpvf18h86s","qgmpvf18h86u","qgmpvf18h86g","qgmpvf18h86f","qgmpvf18h86d","qgmpvf18h866","qgmpvf18h867","qgmpvf18h86k"]}`
+
+```sql
+SELECT geohashNeighbors(hash) FROM test
+```
+
+geohashNeighborsInt example
+
+- Input:  `{"hash" :"qgmpvf18h86e","neber":"North"}` 
+- Output: `{"geohashNeighborsInt":[12963433097944240129,12963433097944240131,12963433097944240130,12963433097944237399,12963433097944237397,12963433097944150015,12963433097944152746,12963433097944152747]}`
+
+```sql
+SELECT geohashNeighborsInt(hash) FROM test
+```
+
+### LabelImage plugin
+
+This is a sample plugin to demonstrate the usage of TensorFlowLite(tflite) model interpreter. The function receives a bytea input representing an image and produce the AI label of the image by running the tflite model.
+
+Assuming the input is the byte array of peacock.jpg, the output will be "peacock".
+
+```sql
+SELECT labelImage(self) FROM tfdemo
+```

File diff suppressed because it is too large
+ 233 - 0
docs/en_US/plugins/functions/tensorflow_lite_tutorial.md


+ 3 - 3
docs/en_US/plugins/plugins_tutorial.md

@@ -167,7 +167,7 @@ Developers can locally compile Kuiper and the plugin for debugging, which steps
     1. Run `go mod edit -replace github.com/emqx/kuiper=$kuiperPath` under the plugin project, make the Kuiper dependence point to the local Kuiper, and then please replace the download directory of step 1 by $kuiperPath, the same below.
    2. Compile the plugin so to the directory of Kuiper plugin
    ```go
-   go build --buildmode=plugin -o $kuiperPath/_build/$build/plugins/sinks/Mysql@v1.0.0.so sinks/mysql.go
+   go build -trimpath --buildmode=plugin -o $kuiperPath/_build/$build/plugins/sinks/Mysql@v1.0.0.so sinks/mysql.go
    ```
 
 ### Docker compile
@@ -185,7 +185,7 @@ Kuiper provides different docker images for different purpose. The development d
     -- In docker instance
     # cd /home/samplePlugin
     # go mod edit -replace github.com/emqx/kuiper=/go/kuiper
-    # go build --buildmode=plugin -o /home/samplePlugin/target/plugins/sinks/Mysql@v1.0.0.so sinks/mysql.go
+    # go build -trimpath --buildmode=plugin -o /home/samplePlugin/target/plugins/sinks/Mysql@v1.0.0.so sinks/mysql.go
     ```
 You can use below sample shell script in your plugin project to automatically build and package the plugins. Please modify the variables at the beginning of the script to meet the requirements of different environments.
 
@@ -199,7 +199,7 @@ export VERSION=0.0.1
 
 go mod edit -replace github.com/emqx/kuiper=$KUIPER_SOURCE
 
-go build --buildmode=plugin -o $PLUGIN_TARGET/sinks/Mysql@v$VERSION.so sinks/mysql.go
+go build -trimpath --buildmode=plugin -o $PLUGIN_TARGET/sinks/Mysql@v$VERSION.so sinks/mysql.go
 
 ## zip the output
 mkdir $ZIP_TARGET/sinks

+ 1 - 1
docs/en_US/plugins/sinks/file.md

@@ -6,7 +6,7 @@ The sink is used for saving analysis result into a specified file.
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sinks/File.so plugins/sinks/file/file.go
+# go build -trimpath --buildmode=plugin -o plugins/sinks/File.so plugins/sinks/file/file.go
 # cp plugins/sinks/File.so $kuiper_install/plugins/sinks
 ```
 

+ 1 - 1
docs/en_US/plugins/sinks/image.md

@@ -6,7 +6,7 @@ Sink is used to save the picture to the specified folder.
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sinks/Image.so plugins/sinks/image/image.go
+# go build -trimpath --buildmode=plugin -o plugins/sinks/Image.so plugins/sinks/image/image.go
 # cp plugins/sinks/Image.so $kuiper_install/plugins/sinks
 ```
 

+ 1 - 1
docs/en_US/plugins/sinks/influx.md

@@ -11,7 +11,7 @@ Please make following update before compile the plugin,
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sinks/Influx.so plugins/sinks/influx/influx.go
+# go build -trimpath --buildmode=plugin -o plugins/sinks/Influx.so plugins/sinks/influx/influx.go
 # zip influx.zip plugins/sinks/Influx.so
 # cp influx.zip /root/tomcat_path/webapps/ROOT/
 # bin/kuiper create plugin sink influx -f /tmp/influxPlugin.txt

+ 1 - 1
docs/en_US/plugins/sinks/tdengine.md

@@ -17,7 +17,7 @@ require (
 
 ```shell
 go mod edit -replace github.com/emqx/kuiper=/$kuiper
-go build --buildmode=plugin -o /$kuiper/plugins/sinks/Tdengine@v1.0.0.so /$kuiper/plugins/sinks/tdengine/tdengine.go
+go build -trimpath --buildmode=plugin -o /$kuiper/plugins/sinks/Tdengine@v1.0.0.so /$kuiper/plugins/sinks/tdengine/tdengine.go
 ```
 ### Install plugin
 Since the operation of the tdengine plug-in depends on the tdengine client, for the convenience of users, the tdengine client will be downloaded when the plug-in is installed. However, the tdengine client version corresponds to the server version one-to-one, which is not compatible with each other, so the user must inform the tdengine server version used.

+ 1 - 1
docs/en_US/plugins/sinks/zmq.md

@@ -6,7 +6,7 @@ The sink will publish the result into a Zero Mq topic.
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sinks/Zmq.so plugins/sinks/zmq/zmq.go
+# go build -trimpath --buildmode=plugin -o plugins/sinks/Zmq.so plugins/sinks/zmq/zmq.go
 # cp plugins/sinks/Zmq.so $kuiper_install/plugins/sinks
 ```
 

+ 1 - 1
docs/en_US/plugins/sources/random.md

@@ -6,7 +6,7 @@ he source will generate random inputs with a specified pattern.
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sources/Random.so plugins/sources/random/random.go
+# go build -trimpath --buildmode=plugin -o plugins/sources/Random.so plugins/sources/random/random.go
 # cp plugins/sources/Random.so $kuiper_install/plugins/sources
 ```
 

+ 1 - 1
docs/en_US/plugins/sources/zmq.md

@@ -6,7 +6,7 @@ The source will subscribe to a Zero Mq topic to import the messages into kuiper
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sources/Zmq.so plugins/sources/zmq/zmq.go
+# go build -trimpath --buildmode=plugin -o plugins/sources/Zmq.so plugins/sources/zmq/zmq.go
 # cp plugins/sources/Zmq.so $kuiper_install/plugins/sources
 ```
 

File diff suppressed because it is too large
+ 48 - 1
docs/en_US/restapi/plugins.md


+ 25 - 21
docs/en_US/rules/overview.md

@@ -1,4 +1,4 @@
-# Rules 
+# Rules
 
 Rules are defined by JSON, below is an example.
 
@@ -31,51 +31,51 @@ The following 3 parameters are required for creating a rule.
 | actions           | false    | An array of sink actions        |
 | options           | true    | A map of options        |
 
-## id
+### id
 
 The identification of the rule. The rule name cannot be duplicated in the same Kuiper instance.
 
-## sql
+### sql
 
-The sql query to run for the rule. 
+The sql query to run for the rule.
+
+## Options
 
-### options
 The current options includes:
 
 | Option name | Type & Default Value | Description                                                  |
 | ------------- | -------- | ------------------------------------------------------------ |
-| isEventTime | boolean: false   | Whether to use event time or processing time as the timestamp for an event. If event time is used, the timestamp will be extracted from the payload. The timestamp filed must be specified by the [stream]([extension](../sqls/streams.md)) definition. |
+| isEventTime | boolean: false   | Whether to use event time or processing time as the timestamp for an event. If event time is used, the timestamp will be extracted from the payload. The timestamp filed must be specified by the [stream](../sqls/streams.md) definition. |
 | lateTolerance        | int64:0   | When working with event-time windowing, it can happen that elements arrive late. LateTolerance can specify by how much time(unit is millisecond) elements can be late before they are dropped. By default, the value is 0 which means late elements are dropped.  |
 | concurrency | int: 1   | A rule is processed by several phases of plans according to the sql statement. This option will specify how many instances will be run for each plan. If the value is bigger than 1, the order of the messages may not be retained. |
 | bufferLength | int: 1024   | Specify how many messages can be buffered in memory for each plan. If the buffered messages exceed the limit, the plan will block message receiving until the buffered messages have been sent out so that the buffered size is less than the limit. A bigger value will accommodate more throughput but will also take up more memory footprint.  |
 | sendMetaToSink | bool:false   | Specify whether the meta data of an event will be sent to the sink. If true, the sink can get te meta data information.  |
+| sendError  | bool: true | Whether to send the error to sink. If true, any runtime error will be sent through the whole rule into sinks. Otherwise, the error will only be printed out in the log. |
 | qos | int:0   | Specify the qos of the stream. The options are 0: At most once; 1: At least once and 2: Exactly once. If qos is bigger than 0, the checkpoint mechanism will be activated to save states periodically so that the rule can be resumed from errors.  |
 | checkpointInterval | int:300000   | Specify the time interval in milliseconds to trigger a checkpoint. This is only effective when qos is bigger than 0.  |
 
-For detail about `qos` and `checkpointInterval`, please check [state and fault tolerance](state_and_fault_tolerance.md).
+For detail about `qos` and `checkpointInterval`, please check [state and fault tolerance](./state_and_fault_tolerance.md).
 
-The rule options can be defined globally in ``etc/kuiper.yaml`` under the ``rules`` section. The options defined in the rule json will override the global setting. 
+The rule options can be defined globally in ``etc/kuiper.yaml`` under the ``rules`` section. The options defined in the rule json will override the global setting.
 
 ## Sources
 
 - Kuiper provides embeded following 3 sources,
-  - MQTT source, see  [MQTT source stream](sources/mqtt.md) for more detailed info.
-  - EdgeX source by default is shipped in [docker images](https://hub.docker.com/r/emqx/kuiper), but NOT included in single download binary files, you use ``make pkg_with_edgex`` command to build a binary package that supports EdgeX source. Please see [EdgeX source stream](sources/edgex.md) for more detailed info.
-  - HTTP pull source, regularly pull the contents at user's specified interval time, see [here](sources/http_pull.md) for more detailed info. 
+  - MQTT source, see  [MQTT source stream](./sources/mqtt.md) for more detailed info.
+  - EdgeX source by default is shipped in [docker images](https://hub.docker.com/r/emqx/kuiper), but NOT included in single download binary files, you use ``make pkg_with_edgex`` command to build a binary package that supports EdgeX source. Please see [EdgeX source stream](./sources/edgex.md) for more detailed info.
+  - HTTP pull source, regularly pull the contents at user's specified interval time, see [here](./sources/http_pull.md) for more detailed info.
 - See [SQL](../sqls/overview.md) for more info of Kuiper SQL.
 - Sources can be customized, see [extension](../extension/overview.md) for more detailed info.
 
-
-
-### sinks/actions
+## Sinks/Actions
 
 Currently, below kinds of sinks/actions are supported:
 
-- [log](sinks/logs.md): Send the result to log file.
-- [mqtt](sinks/mqtt.md): Send the result to an MQTT broker. 
-- [edgex](sinks/edgex.md): Send the result to EdgeX message bus.
-- [rest](sinks/rest.md): Send the result to a Rest HTTP server.
-- [nop](sinks/nop.md): Send the result to a nop operation.
+- [log](./sinks/logs.md): Send the result to log file.
+- [mqtt](./sinks/mqtt.md): Send the result to an MQTT broker.
+- [edgex](./sinks/edgex.md): Send the result to EdgeX message bus.
+- [rest](./sinks/rest.md): Send the result to a Rest HTTP server.
+- [nop](./sinks/nop.md): Send the result to a nop operation.
 
 Each action can define its own properties. There are several common properties:
 
@@ -93,10 +93,11 @@ Each action can define its own properties. There are several common properties:
 | dataTemplate      | true     | The [golang template](https://golang.org/pkg/html/template) format string to specify the output data format. The input of the template is the sink message which is always an array of map. If no data template is specified, the raw input will be the data. |
 
 ### Data Template
-User can refer to [Use Golang template to customize analaysis result in Kuiper](data_template.md) for more detailed scenarios. 
+
+User can refer to [Use Golang template to customize analaysis result in Kuiper](./data_template.md) for more detailed scenarios.
 If sendSingle is true, the data template will execute against a record; Otherwise, it will execute against the whole array of records. Typical data templates are:
 
-For example, we have the sink input as 
+For example, we have the sink input as
 
 ```
 []map[string]interface{}{{
@@ -107,11 +108,13 @@ For example, we have the sink input as
 ```
 
 In sendSingle=true mode:
+
 - Print out the whole record
 
 ```
 "dataTemplate": "{\"content\":{{json .}}}",
 ```
+
 - Print out the ab field
 
 ```
@@ -124,6 +127,7 @@ if the ab field is a string, add the quotes
 ```
 
 In sendSingle=false mode:
+
 - Print out the whole record array
 
 ```

+ 12 - 11
docs/en_US/sqls/built-in_functions.md

@@ -28,13 +28,13 @@ Aggregate functions perform a calculation on a set of values and return a single
     SELECT collect(*) as r1 FROM test GROUP BY TumblingWindow(ss, 10)
     ```
   
-- Get the first element's column 'a' value within the current window. The result will be like: `[{"r1":32}]`
+- Get the second element's column 'a' value within the current window. The result will be like: `[{"r1":32}]`
     ```sql
     SELECT collect(*)[1]->a as r1 FROM test GROUP BY TumblingWindow(ss, 10)
     ```
- 
+
 ### Deduplicate() Examples
- 
+
  - Get the whole array of the current window which is deduplicated by column `a`. The result will be like: `[{"r1":{"a":32, "b":"hello"}, {"a":45, "b":"world"}}]`
      ```sql
      SELECT deduplicate(a, true) as r1 FROM test GROUP BY TumblingWindow(ss, 10)
@@ -43,7 +43,7 @@ Aggregate functions perform a calculation on a set of values and return a single
       ```sql
       SELECT deduplicate(a, false)->a as r1 FROM demo GROUP BY SlidingWindow(hh, 1)
       ```
- 
+
 
 ## Mathematical Functions
 | Function | Example     | Description                                    |
@@ -125,10 +125,11 @@ Aggregate functions perform a calculation on a set of values and return a single
 **Please refer to [json path functions](../json_expr.md#json-path-functions) for how to compose a json path.**  
 
 ## Other Functions
-| Function | Example      | Description                                                  |
-| -------- | ------------ | ------------------------------------------------------------ |
-| isNull   | isNull(col1) | Returns true if the argument is the Null value.              |
-| newuuid  | newuuid()    | Returns a random 16-byte UUID.                               |
-| tstamp   | tstamp()     | Returns the current timestamp in milliseconds from 00:00:00 Coordinated Universal Time (UTC), Thursday, 1 January 1970 |
-| mqtt     | mqtt(topic)  | Returns the MQTT meta-data of specified key. The current supported keys<br />- topic: return the topic of message.  If there are multiple stream source, then specify the source name in parameter. Such as ``mqtt(src1.topic)``<br />- messageid: return the message id of message. If there are multiple stream source, then specify the source name in parameter. Such as ``mqtt(src2.messageid)`` |
-| meta     | meta(topic)  | Returns the meta-data of specified key. The key could be:<br/> - a standalone key if there is only one source in the from clause, such as ``meta(device)``<br />- A qualified key to specify the stream, such as ``meta(src1.device)`` <br />- A key with arrow for multi level meta data, such as ``meta(src1.reading->device->name)`` This assumes reading is a map structure meta data. |
+| Function    | Example           | Description                                                  |
+| ----------- | ----------------- | ------------------------------------------------------------ |
+| isNull      | isNull(col1)      | Returns true if the argument is the Null value.              |
+| cardinality | cardinality(col1) | The number of members in the group. The null value is 0.     |
+| newuuid     | newuuid()         | Returns a random 16-byte UUID.                               |
+| tstamp      | tstamp()          | Returns the current timestamp in milliseconds from 00:00:00 Coordinated Universal Time (UTC), Thursday, 1 January 1970 |
+| mqtt        | mqtt(topic)       | Returns the MQTT meta-data of specified key. The current supported keys<br />- topic: return the topic of message.  If there are multiple stream source, then specify the source name in parameter. Such as ``mqtt(src1.topic)``<br />- messageid: return the message id of message. If there are multiple stream source, then specify the source name in parameter. Such as ``mqtt(src2.messageid)`` |
+| meta        | meta(topic)       | Returns the meta-data of specified key. The key could be:<br/> - a standalone key if there is only one source in the from clause, such as ``meta(device)``<br />- A qualified key to specify the stream, such as ``meta(src1.device)`` <br />- A key with arrow for multi level meta data, such as ``meta(src1.reading->device->name)`` This assumes reading is a map structure meta data. |

+ 34 - 4
docs/en_US/sqls/json_expr.md

@@ -58,7 +58,7 @@ SELECT name->first AS fname FROM demo
 
 ### Index expression
 
-Index Expressions allow you to select a specific element in a list. It should look similar to array access in common programming languages. Indexing is 0 based.
+Index Expressions allow you to select a specific element in a list. It should look similar to array access in common programming languages.The index value starts with 0, -1 is the starting position from the end, and so on.
 
 ```
 SELECT children FROM demo
@@ -77,6 +77,24 @@ SELECT children[0] FROM demo
     "children": "Sara"
 }
 
+SELECT children[1] FROM demo
+
+{
+    "children": "Alex"
+}
+
+SELECT children[-1] FROM demo
+
+{
+    "children": "Jack"
+}
+
+SELECT children[-2] FROM demo
+
+{
+    "children": "Alex"
+}
+
 SELECT d.friends[0]->last FROM demo AS d
 
 {
@@ -88,12 +106,24 @@ SELECT d.friends[0]->last FROM demo AS d
 
 Slices allow you to select a contiguous subset of an array. 
 
-``field[from:to]`` If from is not specified, then it means start from the 1st element of array; If to is not specified, then it means end with the last element of array.
+``field[from:to)``is the interval before closing and opening, excluding to. If from is not specified, then it means start from the 1st element of array; If to is not specified, then it means end with the last element of array.
 
 ```
 SELECT children[0:1] FROM demo
 
 {
+    "children": ["Sara"]
+}
+
+SELECT children[1:-1] FROM demo
+
+{
+    "children": ["Alex"]
+}
+
+SELECT children[0:-1] FROM demo
+
+{
     "children": ["Sara","Alex"]
 }
 ```
@@ -111,7 +141,7 @@ SELECT children[:] FROM demo == SELECT children FROM demo
 
 
 ```
-SELECT children[:1] FROM demo
+SELECT children[:2] FROM demo
 
 {
     "children": ["Sara","Alex"]
@@ -124,7 +154,7 @@ SELECT children[:1] FROM demo
 SELECT followers->Group1[:1]->first FROM demo
 
 {
-    "first": ["John","Alice"]
+    "first": ["John"]
 }
 ```
 

File diff suppressed because it is too large
+ 51 - 0
docs/zh_CN/cli/plugins.md


+ 1 - 1
docs/zh_CN/edgex/edgex_rule_engine_tutorial.md

@@ -207,7 +207,7 @@ time="2020-04-17T06:32:31Z" level=info msg="sink result for rule rule1: [{\"bool
 
 ## 监控分析结果
 
-因为所有的分析结果都被发布到``tcp://broker.emqx.io:1883``,你可以直接使用以下的  ``mosquitto_sub`` 命令来监听结果,你也可以参考别的 [MQTT 客户端工具](https://www.emqx.io/blog/mqtt-client-tools).
+因为所有的分析结果都被发布到``tcp://broker.emqx.io:1883``,你可以直接使用以下的  ``mosquitto_sub`` 命令来监听结果,你也可以参考别的 [MQTT 客户端工具](https://www.emqx.cn/blog/mqtt-client-tools).
 
 ```shell
 # mosquitto_sub -h broker.emqx.io -t result

+ 22 - 1
docs/zh_CN/extension/function.md

@@ -38,13 +38,34 @@ var MyFunction myFunction
 
 [Echo Function](../../../plugins/functions/echo.go) 是一个很好的示例。
 
+### 导出多个函数
+
+开发者可在一个函数插件中导出多个函数。每个函数均需实现 [api.Function](../../../xstream/api/stream.go) 接口,正如 [开发一个定制函数](#develop-a-customized-function) 所描述的那样。需要确保所有函数都导出了,如下所示:
+
+```go
+var(
+    Function1 function1
+    Function2 function2
+    Functionn functionn
+)
+```
+
+同一类的函数可以在一个插件里开发和导出以减少构建和部署开销。
+
 ### 源文件打包
 将实现的函数构建为 go 插件,并确保输出 so 文件位于 plugins/functions 文件夹中。
 
 ```bash
-go build --buildmode=plugin -o plugins/functions/MyFunction.so plugins/functions/my_function.go
+go build -trimpath --buildmode=plugin -o plugins/functions/MyFunction.so plugins/functions/my_function.go
 ```
 
+### 注册多个函数
+
+Kuiper 启动时会自动载入插件目录里已编译好的插件。自动载入的函数插件假设插件里仅导出一个同名的函数。如果插件导出多个函数,则需要显示运行一次注册操作。有两种方法可以注册函数:
+
+1. 在开发环境中,建议直接构建插件 .so 文件到插件目录中以便 kuiper 自动载入。构建完成后,运行 [CLI 注册函数命令](../cli/plugins.md#register-functions) or [REST 注册函数 API](../restapi/plugins.md#register-functions) 进行注册。
+2. 在生产环境中,[打包插件到 zip 压缩包](../plugins/plugins_tutorial.md#plugin-deployment-1),然后运行 [CLI 创建函数插件命令](../cli/plugins.md#create-a-plugin) 或者 [REST 创建函数 API](../restapi/plugins.md#create-a-plugin) 并设置 functions 参数以指定导出函数名。
+
 ### 使用
 
 如果自定义函数遵循以下约定,则可以直接在规则的 SQL 中使用。

+ 10 - 0
docs/zh_CN/extension/overview.md

@@ -42,3 +42,13 @@ func (f *accumulateWordCountFunc) Exec(args []interface{}, ctx api.FunctionConte
 	}
 }
 ```
+
+### 运行时依赖
+
+有些插件可能需要访问文件系统中的依赖文件。依赖文件建放置于 {{kuiperPath}}/etc/{{pluginType}}/{{pluginName}} 目录。打包插件时,依赖文件应放置于 [etc 目录](../restapi/plugins.md#plugin-file-format)。安装后,这些文件会自动移动到推荐的位置。
+
+在插件源代码中,开发者可通过 context 获取 Kuiper 根目录,以访问文件系统中的依赖:
+
+```go
+ctx.GetRootPath()
+```

+ 1 - 1
docs/zh_CN/extension/sink.md

@@ -50,7 +50,7 @@ func MySink() api.Sink {
 将实现的 Sink (目标)构建为 go 插件,并确保输出的 so 文件位于 plugins/sinks 文件夹中。
 
 ```bash
-go build --buildmode=plugin -o plugins/sinks/MySink.so plugins/sinks/my_sink.go
+go build -trimpath --buildmode=plugin -o plugins/sinks/MySink.so plugins/sinks/my_sink.go
 ```
 
 ### 使用

+ 1 - 1
docs/zh_CN/extension/source.md

@@ -62,7 +62,7 @@ Kuiper 扩展支持配置系统自动读取 yaml 文件中的配置,并将其
 将已实现的源构建为 go 插件,并确保输出的 so 文件位于 plugins/sources 文件夹中。
 
 ```bash
-go build --buildmode=plugin -o plugins/sources/MySource.so plugins/sources/my_source.go
+go build -trimpath --buildmode=plugin -o plugins/sources/MySource.so plugins/sources/my_source.go
 ```
 
 ### 使用

+ 1 - 1
docs/zh_CN/getting_started.md

@@ -2,7 +2,7 @@
 
 ## 下载和安装
 
-通过 <https://github.com/emqx/kuiper/releases> 或 <https://www.emqx.io/downloads#kuiper> 获取安装包.
+通过 <https://github.com/emqx/kuiper/releases> 或 <https://www.emqx.cn/downloads#kuiper> 获取安装包.
 
 ### zip、tar.gz 压缩包
 

File diff suppressed because it is too large
+ 2 - 2
docs/zh_CN/manager-ui/overview.md


+ 114 - 1
docs/zh_CN/plugins/functions/functions.md

@@ -69,4 +69,117 @@ thumbnail(avg,maxWidth, maxHeight)示例
   SELECT countPlusOne(avg,maxWidth, maxHeight) as r1 FROM test;
   ```
 
- 
+### Geohash 插件
+
+| 函数                  | 示例                                                     | 说明                                                         |
+| --------------------- | -------------------------------------------------------- | ------------------------------------------------------------ |
+| geohashEncode         | geohashEncode(la,lo float64)(string)                     | 将经纬度编码为字符串                                         |
+| geohashEncodeInt      | geohashEncodeInt(la,lo float64)(uint64)                  | 将经纬度编码为无类型整数                                     |
+| geohashDecode         | geohashDecode(hash string)(la,lo float64)                | 将字符串解码为经纬度                                         |
+| geohashDecodeInt      | geohashDecodeInt(hash uint64)(la,lo float64)             | 将无类型整数解码为经纬度                                     |
+| geohashBoundingBox    | geohashBoundingBox(hash string)(string)                  | 返回字符串编码的区域                                         |
+| geohashBoundingBoxInt | geohashBoundingBoxInt(hash uint64)(string)               | 返回无类型整数编码的区域                                     |
+| geohashNeighbor       | geohashNeighbor(hash string,direction string)(string)    | 返回一个字符串对应方向上的邻居(方向列表:North NorthEast East SouthEast South SouthWest West NorthWest) |
+| geohashNeighborInt    | geohashNeighborInt(hash uint64,direction string)(uint64) | 返回一个无类型整数对应方向上的邻居(方向列表:North NorthEast East SouthEast South SouthWest West NorthWest) |
+| geohashNeighbors      | geohashNeighbors(hash string)([]string)                  | 返回一个字符串的所有邻居                                     |
+| geohashNeighborsInt   | geohashNeighborsInt(hash uint64)([]uint64)               | 返回一个无类型整数的所有邻居                                 |
+
+ geohashEncode 示例
+
+- 输入:`{"lo" :131.036192,"la":-25.345457}` 
+- 输出:`{"geohashEncode":"qgmpvf18h86e"}`
+
+```sql
+SELECT geohashEncode(la,lo) FROM test
+```
+
+ geohashEncodeInt 示例
+
+- 输入:`{"lo" :131.036192,"la":-25.345457}` 
+- 输出:`{"geohashEncodeInt":12963433097944239317}`
+
+```sql
+SELECT geohashEncodeInt(la,lo) FROM test
+```
+
+ geohashDecode 示例
+
+- 输入:`{"hash" :"qgmpvf18h86e"} ` 
+- 输出:`{"geohashDecode":{"Longitude":131.036192,"Latitude":-25.345457099999997}}`
+
+```sql
+SELECT geohashDecode(hash) FROM test
+```
+
+geohashDecodeInt 示例
+
+- 输入:`{"hash" :12963433097944239317}`
+- 输出:`{"geohashDecodeInt":{"Longitude":131.03618861,"Latitude":-25.345456300000002}}`
+
+```sql
+SELECT geohashDecodeInt(hash) FROM test
+```
+
+ geohashBoundingBox  示例
+
+- 输入:`{"hash" :"qgmpvf18h86e"} `
+- 输出:`{"geohashBoundingBox":{"MinLat":-25.345457140356302,"MaxLat":-25.34545697271824,"MinLng":131.03619195520878,"MaxLng":131.0361922904849}}`
+
+```sql
+SELECT geohashBoundingBox(hash) FROM test
+```
+
+ geohashBoundingBoxInt  示例
+
+- 输入:`{"hash" :12963433097944239317}`
+- 输出:`{"geohashBoundingBoxInt":{"MinLat":-25.345456302165985,"MaxLat":-25.34545626025647,"MinLng":131.0361886024475,"MaxLng":131.03618868626654}}`
+
+```sql
+SELECT geohashBoundingBoxInt(hash) FROM test
+```
+
+geohashNeighbor 示例
+
+- 输入:`{"hash" :"qgmpvf18h86e","direction":"North"} `
+- 输出:`{"geohashNeighbor":"qgmpvf18h86s"}`
+
+```sql
+SELECT geohashNeighbor(hash,direction) FROM test
+```
+
+geohashNeighborInt 示例
+
+- 输入:`{"hash" :12963433097944239317,"direction":"North"}`
+- 输出:`{"geohashNeighborInt":12963433097944240129}`
+
+```sql
+SELECT geohashNeighborInt(hash,direction) FROM test
+```
+
+geohashNeighbors 示例
+
+- 输入:`{"hash" :12963433097944239317}`
+- 输出:`{"geohashNeighbors":["qgmpvf18h86s","qgmpvf18h86u","qgmpvf18h86g","qgmpvf18h86f","qgmpvf18h86d","qgmpvf18h866","qgmpvf18h867","qgmpvf18h86k"]}`
+
+```sql
+SELECT geohashNeighbors(hash) FROM test
+```
+
+geohashNeighborsInt 示例
+
+- 输入: `{"hash" :"qgmpvf18h86e","neber":"North"}` 
+- 输出:`{"geohashNeighborsInt":[12963433097944240129,12963433097944240131,12963433097944240130,12963433097944237399,12963433097944237397,12963433097944150015,12963433097944152746,12963433097944152747]}`
+
+```sql
+SELECT geohashNeighborsInt(hash) FROM test
+```
+
+### LabelImage plugin
+
+该插件为展示使用 TensorFlowLite 模型的示例插件。此函数接收一个以 bytea 类型表示的图像的输入,输出该图像的根据 tflite 模型计算的标示。
+
+如下 SQL 中,假设输入为 peacock.jpg 文件的二进制流,则输出为字符串 “peacock”。
+
+```sql
+SELECT labelImage(self) FROM tfdemo
+```

File diff suppressed because it is too large
+ 234 - 0
docs/zh_CN/plugins/functions/tensorflow_lite_tutorial.md


+ 1 - 1
docs/zh_CN/plugins/overview.md

@@ -2,7 +2,7 @@ Kuiper 实现了下面的插件,目前这些插件有的是用于描述插件
 
 Kuiper 插件开发者在开发过程中,可以指定元数据文件,这些元数据主要应用于以下方面:
 
-- 插件编译:对于在目录 `plugins/sinks` 和 `plugins/sources` 中的插件,如果开发者提供了相关的元数据文件,那么 Kuiper 在版本发布的时候会自动编译该插件,然后自动上传这些插件到 EMQ 的插件下载网站上: www.emqx.io/downloads/kuiper/vx.x.x/plugins,其中 `x.x.x` 为版本号。
+- 插件编译:对于在目录 `plugins/sinks` 和 `plugins/sources` 中的插件,如果开发者提供了相关的元数据文件,那么 Kuiper 在版本发布的时候会自动编译该插件,然后自动上传这些插件到 EMQ 的插件下载网站上: www.emqx.cn/downloads/kuiper/vx.x.x/plugins,其中 `x.x.x` 为版本号。
 
   **<u>请注意:由于 Golang 插件的局限性,这些自动编译出来的插件能运行在 Kuiper 官方发布的对应版本的容器中;但是对于直接下载的二进制安装包,或者用户自己编译出来的二进制包,这些下载的插件不保证可以正常运行。</u>**
 

+ 3 - 3
docs/zh_CN/plugins/plugins_tutorial.md

@@ -167,7 +167,7 @@ require (
     1. 在插件项目下,运行 `go mod edit -replace github.com/emqx/kuiper=$kuiperPath`,使得 Kuiper 依赖指向本地 Kuiper,请替换 $kuiperPath 到步骤1下载目录,下同。
    2. 编译插件 so 到 Kuiper 插件目录下
    ```go
-    go build --buildmode=plugin -o $kuiperPath/_build/$build/plugins/sinks/Mysql@v1.0.0.so sinks/mysql.go
+    go build -trimpath --buildmode=plugin -o $kuiperPath/_build/$build/plugins/sinks/Mysql@v1.0.0.so sinks/mysql.go
    ```
 
 ### Docker 编译
@@ -185,7 +185,7 @@ require (
     -- In docker instance
     # cd /home/samplePlugin
     # go mod edit -replace github.com/emqx/kuiper=/go/kuiper
-    # go build --buildmode=plugin -o /home/samplePlugin/target/plugins/sinks/Mysql@v1.0.0.so sinks/mysql.go
+    # go build -trimpath --buildmode=plugin -o /home/samplePlugin/target/plugins/sinks/Mysql@v1.0.0.so sinks/mysql.go
     ```
 
 在插件项目中可以使用如下 shell 脚本自动编译及打包插件。修改脚本开头的参数以满足不同环境下的开发调试需求。
@@ -200,7 +200,7 @@ export VERSION=0.0.1
 
 go mod edit -replace github.com/emqx/kuiper=$KUIPER_SOURCE
 
-go build --buildmode=plugin -o $PLUGIN_TARGET/sinks/Mysql@v$VERSION.so sinks/mysql.go
+go build -trimpath --buildmode=plugin -o $PLUGIN_TARGET/sinks/Mysql@v$VERSION.so sinks/mysql.go
 
 ## zip the output
 mkdir $ZIP_TARGET/sinks

+ 1 - 1
docs/zh_CN/plugins/sinks/file.md

@@ -6,7 +6,7 @@
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sinks/File.so plugins/sinks/file/file.go
+# go build -trimpath --buildmode=plugin -o plugins/sinks/File.so plugins/sinks/file/file.go
 # cp plugins/sinks/File.so $kuiper_install/plugins/sinks
 ```
 

+ 1 - 1
docs/zh_CN/plugins/sinks/image.md

@@ -6,7 +6,7 @@
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sinks/Image.so plugins/sinks/image/image.go
+# go build -trimpath --buildmode=plugin -o plugins/sinks/Image.so plugins/sinks/image/image.go
 # cp plugins/sinks/Image.so $kuiper_install/plugins/sinks
 ```
 

+ 1 - 1
docs/zh_CN/plugins/sinks/influx.md

@@ -10,7 +10,7 @@
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sinks/InfluxDB.so plugins/sinks/influxdb/influxdb.go
+# go build -trimpath --buildmode=plugin -o plugins/sinks/InfluxDB.so plugins/sinks/influxdb/influxdb.go
 # zip influx.zip plugins/sinks/InfluxDB.so
 # cp influx.zip /root/tomcat_path/webapps/ROOT/
 # bin/kuiper create plugin sink influx -f /tmp/influxPlugin.txt

+ 1 - 1
docs/zh_CN/plugins/sinks/tdengine.md

@@ -17,7 +17,7 @@ require (
 
 ```shell
 go mod edit -replace github.com/emqx/kuiper=/$kuiper
-go build --buildmode=plugin -o /$kuiper/plugins/sinks/Tdengine@v1.0.0.so /$kuiper/plugins/sinks/tdengine/tdengine.go
+go build -trimpath --buildmode=plugin -o /$kuiper/plugins/sinks/Tdengine@v1.0.0.so /$kuiper/plugins/sinks/tdengine/tdengine.go
 ```
 ### 安装插件
 由于 tdengine 插件的运行依赖于 tdengine 客户端,为了便于用户使用,安装插件时将下载 tdengine 客户端。但是 tdengine 客户端版本与其服务器版本一一对应,互不兼容,所以用户必须告知所用 tdengine 服务器版本。

+ 1 - 1
docs/zh_CN/plugins/sinks/zmq.md

@@ -6,7 +6,7 @@
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sinks/Zmq.so plugins/sinks/zmq/zmq.go
+# go build -trimpath --buildmode=plugin -o plugins/sinks/Zmq.so plugins/sinks/zmq/zmq.go
 # cp plugins/sinks/Zmq.so $kuiper_install/plugins/sinks
 ```
 

+ 1 - 1
docs/zh_CN/plugins/sources/random.md

@@ -6,7 +6,7 @@
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sources/Random.so plugins/sources/random/random.go
+# go build -trimpath --buildmode=plugin -o plugins/sources/Random.so plugins/sources/random/random.go
 # cp plugins/sources/Random.so $kuiper_install/plugins/sources
 ```
 

+ 1 - 1
docs/zh_CN/plugins/sources/zmq.md

@@ -6,7 +6,7 @@
 
 ```shell
 # cd $kuiper_src
-# go build --buildmode=plugin -o plugins/sources/Zmq.so plugins/sources/zmq/zmq.go
+# go build -trimpath --buildmode=plugin -o plugins/sources/Zmq.so plugins/sources/zmq/zmq.go
 # cp plugins/sources/Zmq.so $kuiper_install/plugins/sources
 ```
 

+ 2 - 2
docs/zh_CN/quick_start_docker.md

@@ -2,7 +2,7 @@
 
 1. 从 `https://hub.docker.com/r/emqx/kuiper/tags` 拉取 Kuiper 的 Docker 镜像。在本教程中推荐使用 `alpine` 镜像(参考 [Kuiper Docker](https://hub.docker.com/r/emqx/kuiper) 的内容了解不同 Kuiper Docker 镜像的区别)。 
 
-2. 设置 Kuiper 源为一个 MQTT 服务器。本例使用位于 `tcp://broker.emqx.io:1883` 的 MQTT 服务器, `broker.emqx.io` 是一个由 [EMQ](https://www.emqx.io) 提供的公有 MQTT 服务器。
+2. 设置 Kuiper 源为一个 MQTT 服务器。本例使用位于 `tcp://broker.emqx.io:1883` 的 MQTT 服务器, `broker.emqx.io` 是一个由 [EMQ](https://www.emqx.cn) 提供的公有 MQTT 服务器。
 
    ```shell
    docker run -p 9081:9081 -d --name kuiper -e MQTT_SOURCE__DEFAULT__SERVERS=[tcp://broker.emqx.io:1883] emqx/kuiper:$tag
@@ -26,7 +26,7 @@
    
    ```
 
-4. 您可以使用任何[ MQTT 客户端工具](https://www.emqx.io/cn/blog/mqtt-client-tools)来发布传感器数据到服务器 `tcp://broker.emqx.io:1883`的主题 `devices/device_001/messages` 。以下例子使用 `mosquitto_pub`。
+4. 您可以使用任何[ MQTT 客户端工具](https://www.emqx.cn/blog/mqtt-client-tools)来发布传感器数据到服务器 `tcp://broker.emqx.io:1883`的主题 `devices/device_001/messages` 。以下例子使用 `mosquitto_pub`。
 
    ```shell
    # mosquitto_pub -h broker.emqx.io -m '{"temperature": 40, "humidity" : 20}' -t devices/device_001/messages

File diff suppressed because it is too large
+ 48 - 1
docs/zh_CN/restapi/plugins.md


+ 1 - 1
docs/zh_CN/rules/data_template.md

@@ -49,7 +49,7 @@ Golang 的模版可以作用于各种数据结构,比如 map、切片 (slice)
 
 ### 切片 (slice) 数据按条发送
 
-流入 sink 的数据是一个 `map[string]interface{}` 切片的数据结构,但是用户往目标 sink 发送数据的时候,可能是需要单条的数据,而不是所有的数据。比如在这篇 [Kuiper 与 AWS IoT Hub 集成的文章](https://www.emqx.io/blog/lightweight-edge-computing-emqx-kuiper-and-aws-iot-hub-integration-solution)中所介绍的,规则产生的样例数据如下所示。
+流入 sink 的数据是一个 `map[string]interface{}` 切片的数据结构,但是用户往目标 sink 发送数据的时候,可能是需要单条的数据,而不是所有的数据。比如在这篇 [Kuiper 与 AWS IoT Hub 集成的文章](https://www.emqx.cn/blog/lightweight-edge-computing-emqx-kuiper-and-aws-iot-hub-integration-solution)中所介绍的,规则产生的样例数据如下所示。
 
 ```json
 [

+ 20 - 16
docs/zh_CN/rules/overview.md

@@ -1,4 +1,4 @@
-## 规则
+# 规则
 
 规则由 JSON 定义,下面是一个示例。
 
@@ -22,7 +22,7 @@
 
 创建规则需要以下3个参数。
 
-### 参数
+## 参数
 
 | 参数名 | 是否可选 | 说明                |
 | ------------- | -------- | ------------------------------------------------------------ |
@@ -39,25 +39,26 @@
 
 为规则运行的 sql 查询。
 
-#### 选项
+## 选项
 
 当前的选项包括:
 
 | 选项名             | 类型和默认值 | 说明                                                         |
 | ------------------ | ------------ | ------------------------------------------------------------ |
-| isEventTime        | bool:false   | 使用事件时间还是将时间用作事件的时间戳。 如果使用事件时间,则将从有效负载中提取时间戳。 必须通过 [stream]([extension](../sqls/streams.md)) 定义指定时间戳记。 |
+| isEventTime        | bool:false   | 使用事件时间还是将时间用作事件的时间戳。 如果使用事件时间,则将从有效负载中提取时间戳。 必须通过 [stream](../sqls/streams.md) 定义指定时间戳记。 |
 | lateTolerance      | int64:0      | 在使用事件时间窗口时,可能会出现元素延迟到达的情况。 LateTolerance 可以指定在删除元素之前可以延迟多少时间(单位为 ms)。 默认情况下,该值为0,表示后期元素将被删除。 |
 | concurrency        | int: 1       | 一条规则运行时会根据 sql 语句分解成多个 plan 运行。该参数设置每个 plan 运行的线程数。该参数值大于1时,消息处理顺序可能无法保证。 |
 | bufferLength       | int: 1024    | 指定每个 plan 可缓存消息数。若缓存消息数超过此限制,plan 将阻塞消息接收,直到缓存消息被消费使得缓存消息数目小于限制为止。此选项值越大,则消息吞吐能力越强,但是内存占用也会越多。 |
 | sendMetaToSink     | bool:false   | 指定是否将事件的元数据发送到目标。 如果为 true,则目标可以获取元数据信息。 |
+| sendError  | bool: true | 指定是否将运行时错误发送到目标。如果为 true,则错误会在整个流中传递直到目标。否则,错误会被忽略,仅打印到日志中。 |
 | qos                | int:0        | 指定流的 qos。 值为0对应最多一次; 1对应至少一次,2对应恰好一次。 如果 qos 大于0,将激活检查点机制以定期保存状态,以便可以从错误中恢复规则。 |
 | checkpointInterval | int:300000   | 指定触发检查点的时间间隔(单位为 ms)。 仅当 qos 大于0时才有效。 |
 
-有关 `qos` 和 `checkpointInterval` 的详细信息,请查看[状态和容错](state_and_fault_tolerance.md)。
+有关 `qos` 和 `checkpointInterval` 的详细信息,请查看[状态和容错](./state_and_fault_tolerance.md)。
 
 可以在 `rules` 下属的 `etc/kuiper.yaml` 中全局定义规则选项。 规则 json 中定义的选项将覆盖全局设置。
 
-###
+## 源
 
 - Kuiper 支持以下 3 种内置源:
   - MQTT 源,有关更多详细信息,请参阅 [MQTT source stream](https://github.com/emqx/kuiper/blob/master/docs/zh_CN/rules/sources/mqtt.md)。
@@ -66,15 +67,15 @@
 - 有关Kuiper SQL 的更多信息,请参阅 [SQL](https://github.com/emqx/kuiper/blob/master/docs/zh_CN/sqls/overview.md)。
 - 可以自定义来源,请参阅 [extension](https://github.com/emqx/kuiper/blob/master/docs/zh_CN/extension/overview.md)了解更多详细信息。
 
-#### 目标/动作
+## 目标/动作
 
 当前,支持以下目标/动作:
 
-- [log](sinks/logs.md): 将结果发送到日志文件。
-- [mqtt](sinks/mqtt.md): 将结果发送到 MQTT 消息服务器。 
-- [edgex](sinks/edgex.md): 将结果发送到 EdgeX 消息总线。
-- [rest](sinks/rest.md): 将结果发送到 Rest HTTP 服务器。
-- [nop](sinks/nop.md): 将结果发送到 nop 操作。
+- [log](./sinks/logs.md): 将结果发送到日志文件。
+- [mqtt](./sinks/mqtt.md): 将结果发送到 MQTT 消息服务器。 
+- [edgex](./sinks/edgex.md): 将结果发送到 EdgeX 消息总线。
+- [rest](./sinks/rest.md): 将结果发送到 Rest HTTP 服务器。
+- [nop](./sinks/nop.md): 将结果发送到 nop 操作。
 
 每个动作可以定义自己的属性。当前有以下的公共属性:
 
@@ -88,11 +89,12 @@
 | cacheLength     | int:1024   | 设置最大消息缓存数量。缓存的消息会一直保留直到消息发送成功。缓存消息将按顺序发送,除非运行在异步或者并发模式下。缓存消息会定期存储到磁盘中。  |
 | cacheSaveInterval  | int:1000   | 设置缓存存储间隔时间。需要注意的是,当规则关闭时,缓存会自动存储。该值越大,则缓存保存开销越小,但系统意外退出时缓存丢失的风险变大。 |
 | omitIfEmpty | bool: false | 如果配置项设置为 true,则当 SELECT 结果为空时,该结果将不提供给目标运算符。 |
-| sendSingle        | true     | 输出消息以数组形式接收,该属性意味着是否将结果一一发送。 如果为false,则输出消息将为`{"result":"${the string of received message}"}`。 例如,`{"result":"[{\"count\":30},"\"count\":20}]"}`。否则,结果消息将与实际字段名称一一对应发送。 对于与上述相同的示例,它将发送 `{"count":30}`,然后发送`{"count":20} `到 RESTful 端点。默认为 false。 |
+| sendSingle        | true     | 输出消息以数组形式接收,该属性意味着是否将结果一一发送。 如果为false,则输出消息将为`{"result":"${the string of received message}"}`。 例如,`{"result":"[{\"count\":30},"\"count\":20}]"}`。否则,结果消息将与实际字段名称一一对应发送。 对于与上述相同的示例,它将发送 `{"count":30}`,然后发送`{"count":20}`到 RESTful 端点。默认为 false。 |
 | dataTemplate      | true     | [golang 模板](https://golang.org/pkg/html/template)格式字符串,用于指定输出数据格式。 模板的输入是目标消息,该消息始终是映射数组。 如果未指定数据模板,则将数据作为原始输入。 |
 
-##### 数据模板
-用户可以参考 [Kuiper 中使用 Golang 模版 (template) 定制分析结果](data_template.md) 来获取更多的关于数据模版的使用场景。
+### 数据模板
+
+用户可以参考 [Kuiper 中使用 Golang 模版 (template) 定制分析结果](./data_template.md) 来获取更多的关于数据模版的使用场景。
 
 如果 sendSingle 为 true,则数据模板将针对某一条记录执行操作; 否则,它将对整个记录数组执行操作。 典型的数据模板是:
 
@@ -107,6 +109,7 @@
 ```
 
 在 sendSingle=true 模式下:
+
 - 打印整个记录
 
 ```
@@ -125,6 +128,7 @@
 ```
 
 在 sendSingle=false 模式下:
+
 - 打印出整个记录数组
 
 ```
@@ -152,7 +156,7 @@
 
 可以自定义动作以支持不同种类的输出,有关更多详细信息,请参见 [extension](../extension/overview.md) 。
 
-##### 模版中支持的函数
+### 模版中支持的函数
 
 Kuiper 扩展了几个可以在模版中使用的函数。
 

+ 10 - 9
docs/zh_CN/sqls/built-in_functions.md

@@ -32,9 +32,9 @@ Kuiper 具有许多内置函数,可以对数据执行计算。
     ```sql
     SELECT collect(*)[1]->a as r1 FROM test GROUP BY TumblingWindow(ss, 10)
     ```
- 
+
 ### Deduplicate() 示例
- 
+
  - 获取当前窗口中,列 `a` 值不重复的所有消息组成的数组。结果为: `[{"r1":{"a":32, "b":"hello"}, {"a":45, "b":"world"}}]`
      ```sql
      SELECT deduplicate(a, true) as r1 FROM test GROUP BY TumblingWindow(ss, 10)
@@ -124,10 +124,11 @@ Kuiper 具有许多内置函数,可以对数据执行计算。
 **请参阅 [json 路径函数](../json_expr.md#json-path-functions) 了解如何编写json路径。**
 
 ## 其它函数
-| 函数    | 示例         | 说明                                                         |
-| ------- | ------------ | ------------------------------------------------------------ |
-| isNull  | isNull(col1) | 如果参数为空值,则返回 true。                                |
-| newuuid | newuuid()    | 返回一个随机的16字节 UUID。                                  |
-| tstamp  | tstamp()     | 返回当前时间戳,以1970年1月1日星期四00:00:00协调世界时(UTC)为单位。 |
-| mqtt    | mqtt(topic)  | 返回指定键的 MQTT 元数据。 当前支持的键包括<br />-topic:返回消息的主题。 如果有多个流源,则在参数中指定源名称。 如 `mqtt(src1.topic)`<br />- messageid:返回消息的消息ID。 如果有多个流源,则在参数中指定源名称。 如 `mqtt(src2.messageid)` |
-| meta    | meta(topic)  | 返回指定键的元数据。 键可能是:<br/>-如果 from 子句中只有一个来源,则为独立键,例如`meta(device)`<br />-用于指定流的合格键,例如 `meta(src1.device)` <br />-用于多级元数据的带有箭头的键,例如 `meta(src1.reading->device->name)`。这里假定读取是地图结构元数据。 |
+| 函数        | 示例              | 说明                                                         |
+| ----------- | ----------------- | ------------------------------------------------------------ |
+| isNull      | isNull(col1)      | 如果参数为空值,则返回 true。                                |
+| cardinality | cardinality(col1) | 组中成员的数量。空值为0。                                    |
+| newuuid     | newuuid()         | 返回一个随机的16字节 UUID。                                  |
+| tstamp      | tstamp()          | 返回当前时间戳,以1970年1月1日星期四00:00:00协调世界时(UTC)为单位。 |
+| mqtt        | mqtt(topic)       | 返回指定键的 MQTT 元数据。 当前支持的键包括<br />-topic:返回消息的主题。 如果有多个流源,则在参数中指定源名称。 如 `mqtt(src1.topic)`<br />- messageid:返回消息的消息ID。 如果有多个流源,则在参数中指定源名称。 如 `mqtt(src2.messageid)` |
+| meta        | meta(topic)       | 返回指定键的元数据。 键可能是:<br/>-如果 from 子句中只有一个来源,则为独立键,例如`meta(device)`<br />-用于指定流的合格键,例如 `meta(src1.device)` <br />-用于多级元数据的带有箭头的键,例如 `meta(src1.reading->device->name)`。这里假定读取是地图结构元数据。 |

+ 40 - 4
docs/zh_CN/sqls/json_expr.md

@@ -60,7 +60,7 @@ SELECT name->first AS fname FROM demo
 
 ### 索引表达式
 
-索引表达式使您可以选择列表中的特定元素。 它看起来应该类似于普通编程语言中的数组访问。 索引从0开始
+索引表达式使您可以选择列表中的特定元素。 它看起来应该类似于普通编程语言中的数组访问。 索引值以0为开始值,-1 为从末尾的开始位置,以此类推
 
 ```
 SELECT children FROM demo
@@ -79,6 +79,24 @@ SELECT children[0] FROM demo
     "children": "Sara"
 }
 
+SELECT children[1] FROM demo
+
+{
+    "children": "Alex"
+}
+
+SELECT children[-1] FROM demo
+
+{
+    "children": "Jack"
+}
+
+SELECT children[-2] FROM demo
+
+{
+    "children": "Alex"
+}
+
 SELECT d.friends[0]->last FROM demo AS d
 
 {
@@ -90,12 +108,24 @@ SELECT d.friends[0]->last FROM demo AS d
 
 切片允许您选择数组的连续子集。
 
-`field[from:to]` 如果未指定 from,则表示从数组的第一个元素开始; 如果未指定 to,则表示以数组的最后一个元素结尾。
+`field[from:to)` 为前闭后开区间,不包含to。如果未指定 from,则表示从数组的第一个元素开始; 如果未指定 to,则表示以数组的最后一个元素结尾。
 
 ```
 SELECT children[0:1] FROM demo
 
 {
+    "children": ["Sara"]
+}
+
+SELECT children[1:-1] FROM demo
+
+{
+    "children": ["Alex"]
+}
+
+SELECT children[0:-1] FROM demo
+
+{
     "children": ["Sara","Alex"]
 }
 ```
@@ -113,7 +143,13 @@ SELECT children[:] FROM demo == SELECT children FROM demo
 
 
 ```
-SELECT children[:1] FROM demo
+SELECT children[:2] FROM demo
+
+{
+    "children": ["Sara","Alex"]
+}
+
+SELECT children[:-1] FROM demo
 
 {
     "children": ["Sara","Alex"]
@@ -126,7 +162,7 @@ SELECT children[:1] FROM demo
 SELECT followers->Group1[:1]->first FROM demo
 
 {
-    "first": ["John","Alice"]
+    "first": ["John"]
 }
 ```
 

+ 92 - 0
etc/functions/geohash.json

@@ -0,0 +1,92 @@
+{
+	"about": {
+		"trial": false,
+		"author": {
+			"name": "EMQ",
+			"email": "contact@emqx.io",
+			"company": "EMQ Technologies Co., Ltd",
+			"website": "https://www.emqx.io"
+		},
+		"helpUrl": {
+			"en_US": "https://github.com/emqx/kuiper/blob/master/docs/en_US/plugins/functions/functions.md",
+			"zh_CN": "https://github.com/emqx/kuiper/blob/master/docs/zh_CN/plugins/functions/functions.md"
+		},
+		"description": {
+			"en_US": "",
+			"zh_CN": ""
+		}
+	},
+	"libs": ["github.com/mmcloughlin/geohash@master"],
+  "name":"geohash",
+	"functions": [{
+		"name": "geohashEncode",
+		"example": "geohashEncode(la,lo )",
+		"hint": {
+			"en_US": "Encode latitude and longitude as characters",
+			"zh_CN": "将经纬度编码为字符"
+		}
+	}, {
+		"name": "geohashEncodeInt",
+		"example": "geohashEncodeInt(la,lo )",
+		"hint": {
+			"en_US": "Encode latitude and longitude as numbers",
+			"zh_CN": "将经纬度编码为数字"
+		}
+	}, {
+		"name": "geohashDecode",
+		"example": "geohashDecode(hash )",
+		"hint": {
+			"en_US": "Decode characters into latitude and longitude",
+			"zh_CN": "将字符解码为经纬度"
+		}
+	}, {
+		"name": "geohashDecodeInt",
+		"example": "geohashDecodeInt(hash)",
+		"hint": {
+			"en_US": "Decode numbers into latitude and longitude",
+			"zh_CN": "将数字解码为经纬度"
+		}
+	}, {
+		"name": "geohashBoundingBox",
+		"example": "geohashBoundingBox(hash )",
+		"hint": {
+			"en_US": "Area for calculating character codes",
+			"zh_CN": "计算字符编码的区域"
+		}
+	}, {
+		"name": "geohashBoundingBoxInt",
+		"example": "geohashBoundingBoxInt(hash)",
+		"hint": {
+			"en_US": "Calculate the area of digital coding",
+			"zh_CN": "计算数字编码的区域"
+		}
+	}, {
+		"name": "geohashNeighbor",
+		"example": "geohashNeighbor(hash,direction )",
+		"hint": {
+			"en_US": "Calculate the neighbor of the corresponding direction of the character encoding",
+			"zh_CN": "计算字符编码对应方向的邻居"
+		}
+	}, {
+		"name": "geohashNeighborInt",
+		"example": "geohashNeighborInt(hash,direction )",
+		"hint": {
+			"en_US": "Calculate the neighbors in the corresponding direction of the digital code",
+			"zh_CN": "计算数字编码对应方向的邻居"
+		}
+	}, {
+		"name": "geohashNeighbors",
+		"example": "geohashNeighbors(hash)",
+		"hint": {
+			"en_US": "Calculate all neighbors of character encoding",
+			"zh_CN": "计算字符编码的所有邻居"
+		}
+	}, {
+		"name": "geohashNeighborsInt",
+		"example": "geohashNeighborsInt(hash)",
+		"hint": {
+			"en_US": "Calculate all neighbors of digital encoding",
+			"zh_CN": "计算数字编码的所有邻居"
+		}
+	}]
+}

+ 11 - 6
etc/functions/resize.json

@@ -8,17 +8,16 @@
 			"website": "https://www.emqx.io"
 		},
 		"helpUrl": {
-      "en_US": "https://github.com/emqx/kuiper/blob/master/docs/en_US/plugins/functions/functions.md",
-      "zh_CN": "https://github.com/emqx/kuiper/blob/master/docs/zh_CN/plugins/functions/functions.md"
+			"en_US": "https://github.com/emqx/kuiper/blob/master/docs/en_US/plugins/functions/functions.md",
+			"zh_CN": "https://github.com/emqx/kuiper/blob/master/docs/zh_CN/plugins/functions/functions.md"
 		},
 		"description": {
 			"en_US": "",
 			"zh_CN": ""
 		}
 	},
-	"libs": [
-	  "github.com/nfnt/resize@master"
-	],
+	"libs": ["github.com/nfnt/resize@master"],
+	"name": "image",
 	"functions": [{
 		"name": "resize",
 		"example": "resize(image,width, height)",
@@ -26,6 +25,12 @@
 			"en_US": "Creates a scaled image with new dimensions (width, height) .If either width or height is set to 0, it will be set to an aspect ratio preserving value.",
 			"zh_CN": "创建具有新尺寸(宽度,高度)的缩放图像。如果width或height设置为0,则将其设置为长宽比保留值。"
 		}
+	}, {
+		"name": "thumbnail",
+		"example": "thumbnail(image,maxWidth, maxHeight)",
+		"hint": {
+			"en_US": "Downscales an image preserving its aspect ratio to the maximum dimensions (maxWidth, maxHeight).",
+			"zh_CN": "将保留宽高比的图像缩小到最大尺寸(maxWidth,maxHeight)。"
+		}
 	}]
 }
-

+ 4 - 8
etc/functions/thumbnail.json

@@ -16,16 +16,12 @@
 			"zh_CN": ""
 		}
 	},
-	"libs": [
-	  "github.com/nfnt/resize@master"
-	],
 	"functions": [{
-		"name": "thumbnail",
-		"example": "thumbnail(image,maxWidth, maxHeight)",
+		"name": "labelImage",
+		"example": "labelImage(col1)",
 		"hint": {
-			"en_US": "Downscales an image preserving its aspect ratio to the maximum dimensions (maxWidth, maxHeight).",
-			"zh_CN": "将保留宽高比的图像缩小到最大尺寸(maxWidth,maxHeight)。"
+			"en_US": "Label an image by tensorflow lite model.",
+			"zh_CN": "采用 tensorflow lite 模型标记图片。"
 		}
 	}]
 }
-

+ 2 - 0
etc/kuiper.yaml

@@ -37,6 +37,8 @@ rule:
   qos: 0
   # The interval in millisecond to run the checkpoint mechanism.
   checkpointInterval: 300000
+  # Whether to send errors to sinks
+  sendError: true
 
 sink:
   # The cache persistence threshold size. If the message in sink cache is larger than 10, then it triggers persistence. If you find

+ 1 - 1
etc/sources/httppull.yaml

@@ -22,4 +22,4 @@ default:
 #Override the global configurations
 application_conf: #Conf_key
   incremental: true
-  url: http://localhost:9090/pull
+  url: http://localhost:9090/

+ 2 - 2
etc/sources/random.json

@@ -87,8 +87,8 @@
         "control": "list",
         "type": "list_object",
         "hint": {
-          "en_US": "The pattern to be generated by the source",
-          "zh_CN": "源生成的样式"
+          "en_US": "The style generated by the source can define multiple fields. The style is json, for example {\"count\":50}.",
+          "zh_CN": "源生成的样式,可定义多个字段。样式为json,例如{\"count\":50}"
         },
         "label": {
           "en_US": "Pattern",

+ 74 - 0
fvt_scripts/binary_image_process.jmx

@@ -108,6 +108,80 @@
             <stringProp name="mqtt.reconn_attampt_max">0</stringProp>
           </net.xmeter.samplers.ConnectSampler>
           <hashTree/>
+          <HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="API_AddPlugin" enabled="true">
+            <boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
+            <elementProp name="HTTPsampler.Arguments" elementType="Arguments">
+              <collectionProp name="Arguments.arguments">
+                <elementProp name="" elementType="HTTPArgument">
+                  <boolProp name="HTTPArgument.always_encode">false</boolProp>
+                  <stringProp name="Argument.value">{&#xd;
+                    &quot;name&quot;:&quot;image&quot;,&#xd;
+                    &quot;file&quot;:&quot;http://127.0.0.1:9090/plugins/image.zip&quot;,&#xd;
+                    &quot;functions&quot;:[&quot;resize&quot;,&quot;thumbnail&quot;]&#xd;
+                    }</stringProp>
+                  <stringProp name="Argument.metadata">=</stringProp>
+                </elementProp>
+              </collectionProp>
+            </elementProp>
+            <stringProp name="HTTPSampler.domain">${srv}</stringProp>
+            <stringProp name="HTTPSampler.port">${rest_port}</stringProp>
+            <stringProp name="HTTPSampler.protocol"></stringProp>
+            <stringProp name="HTTPSampler.contentEncoding"></stringProp>
+            <stringProp name="HTTPSampler.path">/plugins/functions</stringProp>
+            <stringProp name="HTTPSampler.method">POST</stringProp>
+            <boolProp name="HTTPSampler.follow_redirects">true</boolProp>
+            <boolProp name="HTTPSampler.auto_redirects">false</boolProp>
+            <boolProp name="HTTPSampler.use_keepalive">true</boolProp>
+            <boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
+            <stringProp name="HTTPSampler.embedded_url_re"></stringProp>
+            <stringProp name="HTTPSampler.connect_timeout"></stringProp>
+            <stringProp name="HTTPSampler.response_timeout"></stringProp>
+          </HTTPSamplerProxy>
+          <hashTree>
+            <ResponseAssertion guiclass="AssertionGui" testclass="ResponseAssertion" testname="Response Assertion" enabled="true">
+              <collectionProp name="Asserion.test_strings">
+                <stringProp name="49587">201</stringProp>
+              </collectionProp>
+              <stringProp name="Assertion.custom_message"></stringProp>
+              <stringProp name="Assertion.test_field">Assertion.response_code</stringProp>
+              <boolProp name="Assertion.assume_success">true</boolProp>
+              <intProp name="Assertion.test_type">16</intProp>
+            </ResponseAssertion>
+            <hashTree/>
+            <ConstantTimer guiclass="ConstantTimerGui" testclass="ConstantTimer" testname="Constant Timer" enabled="true">
+              <stringProp name="ConstantTimer.delay">500</stringProp>
+            </ConstantTimer>
+            <hashTree/>
+          </hashTree>
+          <HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="API_GetFunc" enabled="true">
+            <elementProp name="HTTPsampler.Arguments" elementType="Arguments" guiclass="HTTPArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
+              <collectionProp name="Arguments.arguments"/>
+            </elementProp>
+            <stringProp name="HTTPSampler.domain">${srv}</stringProp>
+            <stringProp name="HTTPSampler.port">${rest_port}</stringProp>
+            <stringProp name="HTTPSampler.protocol"></stringProp>
+            <stringProp name="HTTPSampler.contentEncoding"></stringProp>
+            <stringProp name="HTTPSampler.path">/plugins/udfs/resize</stringProp>
+            <stringProp name="HTTPSampler.method">GET</stringProp>
+            <boolProp name="HTTPSampler.follow_redirects">true</boolProp>
+            <boolProp name="HTTPSampler.auto_redirects">false</boolProp>
+            <boolProp name="HTTPSampler.use_keepalive">true</boolProp>
+            <boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
+            <stringProp name="HTTPSampler.embedded_url_re"></stringProp>
+            <stringProp name="HTTPSampler.connect_timeout"></stringProp>
+            <stringProp name="HTTPSampler.response_timeout"></stringProp>
+          </HTTPSamplerProxy>
+          <hashTree>
+            <JSONPathAssertion guiclass="JSONPathAssertionGui" testclass="JSONPathAssertion" testname="JSON Assertion" enabled="true">
+              <stringProp name="JSON_PATH">$.plugin</stringProp>
+              <stringProp name="EXPECTED_VALUE">image</stringProp>
+              <boolProp name="JSONVALIDATION">true</boolProp>
+              <boolProp name="EXPECT_NULL">false</boolProp>
+              <boolProp name="INVERT">false</boolProp>
+              <boolProp name="ISREGEX">false</boolProp>
+            </JSONPathAssertion>
+            <hashTree/>
+          </hashTree>
           <HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="API_CreateStream" enabled="true">
             <boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
             <elementProp name="HTTPsampler.Arguments" elementType="Arguments">

+ 1 - 1
fvt_scripts/http_pull_rule.jmx

@@ -93,7 +93,7 @@
                 <elementProp name="" elementType="HTTPArgument">
                   <boolProp name="HTTPArgument.always_encode">false</boolProp>
                   <stringProp name="Argument.value">{&#xd;
-&quot;sql&quot; : &quot;create stream demo (Temperature float, humidity bigint) WITH (FORMAT=\&quot;JSON\&quot;, TYPE=\&quot;httppull\&quot; DATASOURCE=\&quot;devices/+/messages\&quot; Conf_key=\&quot;application_conf\&quot;)&quot;&#xd;
+&quot;sql&quot; : &quot;create stream demo (Temperature float, humidity bigint) WITH (FORMAT=\&quot;JSON\&quot;, TYPE=\&quot;httppull\&quot; DATASOURCE=\&quot;pull\&quot; Conf_key=\&quot;application_conf\&quot;)&quot;&#xd;
 }</stringProp>
                   <stringProp name="Argument.metadata">=</stringProp>
                 </elementProp>

+ 16 - 1
fvt_scripts/prepare_plugins.sh

@@ -16,7 +16,7 @@ if [ -f "$FILE" ]; then
     echo "$FILE exists, not requried to build plugin."
 else
     echo "$FILE does not exist, will build the plugin."
-    go build --buildmode=plugin -o ../plugins/sources/Zmq.so ../plugins/sources/zmq/zmq.go
+    go build -trimpath --buildmode=plugin -o ../plugins/sources/Zmq.so ../plugins/sources/zmq/zmq.go
 fi
 
 mv ../plugins/sources/Zmq.so .
@@ -24,9 +24,24 @@ cp plugins/zmq.yaml .
 zip zmq.zip Zmq.so zmq.yaml
 rm -rf zmq.yaml Zmq.so
 
+rm -rf image.* Image.so
+
+FILE=../plugins/functions/Image.so
+if [ -f "$FILE" ]; then
+    echo "$FILE exists, not requried to build plugin."
+else
+    echo "$FILE does not exist, will build the plugin."
+    go build -trimpath --buildmode=plugin -o ../plugins/functions/Image.so ../plugins/functions/image/*.go
+fi
+
+mv ../plugins/functions/Image.so .
+zip image.zip Image.so
+rm -rf Image.so
+
 rm -rf plugins/service/web/plugins/
 mkdir -p plugins/service/web/plugins/
 mv zmq.zip plugins/service/web/plugins/
+mv image.zip plugins/service/web/plugins/
 
 cd plugins/service/
 export BUILD_ID=dontKillMe

+ 4 - 1
plugins/funcMeta.go

@@ -16,6 +16,7 @@ type (
 	}
 	fileFuncs struct {
 		About   *fileAbout  `json:"about"`
+		Name    string      `json:"name"`
 		FiFuncs []*fileFunc `json:"functions"`
 	}
 	uiFunc struct {
@@ -25,12 +26,13 @@ type (
 	}
 	uiFuncs struct {
 		About   *about    `json:"about"`
+		Name    string    `json:"name"`
 		UiFuncs []*uiFunc `json:"functions"`
 	}
 )
 
 func isInternalFunc(fiName string) bool {
-	internal := []string{`accumulateWordCount.json`, `countPlusOne.json`, `echo.json`, `internal.json`, "windows.json", "thumbnail.json", "resize.json"}
+	internal := []string{`accumulateWordCount.json`, `countPlusOne.json`, `echo.json`, `internal.json`, "windows.json", "image.json", "geohash.json"}
 	for _, v := range internal {
 		if v == fiName {
 			return true
@@ -44,6 +46,7 @@ func newUiFuncs(fi *fileFuncs) *uiFuncs {
 	}
 	uis := new(uiFuncs)
 	uis.About = newAbout(fi.About)
+	uis.Name = fi.Name
 	for _, v := range fi.FiFuncs {
 		ui := new(uiFunc)
 		ui.Name = v.Name

+ 263 - 0
plugins/functions/geohash/geohash.go

@@ -0,0 +1,263 @@
+package main
+
+import (
+	"fmt"
+	"github.com/emqx/kuiper/xstream/api"
+	"github.com/mmcloughlin/geohash"
+)
+
+type geohashEncode struct {
+}
+type geohashEncodeInt struct {
+}
+type geohashDecode struct {
+}
+type geohashDecodeInt struct {
+}
+type geohashBoundingBox struct {
+}
+type geohashBoundingBoxInt struct {
+}
+type geohashNeighbor struct {
+}
+type geohashNeighborInt struct {
+}
+type geohashNeighbors struct {
+}
+type geohashNeighborsInt struct {
+}
+type position struct {
+	Longitude float64
+	Latitude  float64
+}
+
+var (
+	GeohashEncode         geohashEncode
+	GeohashEncodeInt      geohashEncodeInt
+	GeohashDecode         geohashDecode
+	GeohashDecodeInt      geohashDecodeInt
+	GeohashBoundingBox    geohashBoundingBox
+	GeohashBoundingBoxInt geohashBoundingBoxInt
+	GeohashNeighbor       geohashNeighbor
+	GeohashNeighborInt    geohashNeighborInt
+	GeohashNeighbors      geohashNeighbors
+	GeohashNeighborsInt   geohashNeighborsInt
+	g_direction           = map[string]geohash.Direction{
+		"North":     geohash.North,
+		"NorthEast": geohash.NorthEast,
+		"East":      geohash.East,
+		"SouthEast": geohash.SouthEast,
+		"South":     geohash.South,
+		"SouthWest": geohash.SouthWest,
+		"West":      geohash.West,
+		"NorthWest": geohash.NorthWest}
+)
+
+func (r *geohashEncode) IsAggregate() bool {
+	return false
+}
+func (r *geohashEncodeInt) IsAggregate() bool {
+	return false
+}
+func (r *geohashDecode) IsAggregate() bool {
+	return false
+}
+func (r *geohashDecodeInt) IsAggregate() bool {
+	return false
+}
+func (r *geohashBoundingBox) IsAggregate() bool {
+	return false
+}
+func (r *geohashBoundingBoxInt) IsAggregate() bool {
+	return false
+}
+func (r *geohashNeighbor) IsAggregate() bool {
+	return false
+}
+func (r *geohashNeighborInt) IsAggregate() bool {
+	return false
+}
+func (r *geohashNeighbors) IsAggregate() bool {
+	return false
+}
+func (r *geohashNeighborsInt) IsAggregate() bool {
+	return false
+}
+
+func (r *geohashEncode) Validate(args []interface{}) error {
+	if len(args) != 2 {
+		return fmt.Errorf("The geohashEncode function supports 2 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashEncodeInt) Validate(args []interface{}) error {
+	if len(args) != 2 {
+		return fmt.Errorf("The geohashEncodeInt function supports 2 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashDecode) Validate(args []interface{}) error {
+	if len(args) != 1 {
+		return fmt.Errorf("The geohashDecode function supports 1 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashDecodeInt) Validate(args []interface{}) error {
+	if len(args) != 1 {
+		return fmt.Errorf("The geohashDecodeInt function supports 1 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashBoundingBox) Validate(args []interface{}) error {
+	if len(args) != 1 {
+		return fmt.Errorf("The geohashBoundingBox function supports 1 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashBoundingBoxInt) Validate(args []interface{}) error {
+	if len(args) != 1 {
+		return fmt.Errorf("The geohashBoundingBoxInt function supports 1 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashNeighbor) Validate(args []interface{}) error {
+	if len(args) != 2 {
+		return fmt.Errorf("The geohashNeighbor function supports 2 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashNeighborInt) Validate(args []interface{}) error {
+	if len(args) != 2 {
+		return fmt.Errorf("The geohashNeighborInt function supports 2 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashNeighbors) Validate(args []interface{}) error {
+	if len(args) != 1 {
+		return fmt.Errorf("The geohashNeighbors function supports 1 parameters, but got %d", len(args))
+	}
+	return nil
+}
+func (r *geohashNeighborsInt) Validate(args []interface{}) error {
+	if len(args) != 1 {
+		return fmt.Errorf("The geohashNeighborsInt function supports 1 parameters, but got %d", len(args))
+	}
+	return nil
+}
+
+func (r *geohashEncode) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	la, ok := args[0].(float64)
+	if !ok {
+		return fmt.Errorf("arg[0] is not a float, got %v", args[0]), false
+	}
+	lo, ok := args[1].(float64)
+	if !ok {
+		return fmt.Errorf("arg[1] is not a float, got %v", args[1]), false
+	}
+	return geohash.Encode(la, lo), true
+}
+func (r *geohashEncodeInt) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	la, ok := args[0].(float64)
+	if !ok {
+		return fmt.Errorf("arg[0] is not a float, got %v", args[0]), false
+	}
+	lo, ok := args[1].(float64)
+	if !ok {
+		return fmt.Errorf("arg[1] is not a float, got %v", args[1]), false
+	}
+	return geohash.EncodeInt(la, lo), true
+}
+
+func (r *geohashDecode) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	hash, ok := args[0].(string)
+	if !ok || 0 == len(hash) {
+		return fmt.Errorf("arg[0] is not a string, got %v", args[0]), false
+	}
+	if err := geohash.Validate(hash); nil != err {
+		return err, false
+	}
+	la, lo := geohash.Decode(hash)
+	return position{Longitude: lo, Latitude: la}, true
+}
+func (r *geohashDecodeInt) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	hash, ok := args[0].(uint64)
+	if !ok || 0 > hash {
+		return fmt.Errorf("arg[0] is not a bigint, got %v", args[0]), false
+	}
+	la, lo := geohash.DecodeInt(hash)
+	return position{Longitude: lo, Latitude: la}, true
+}
+func (r *geohashBoundingBox) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	hash, ok := args[0].(string)
+	if !ok || 0 == len(hash) {
+		return fmt.Errorf("arg[0] is not a string, got %v", args[0]), false
+	}
+	if err := geohash.Validate(hash); nil != err {
+		return err, false
+	}
+	return geohash.BoundingBox(hash), true
+}
+func (r *geohashBoundingBoxInt) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	hash, ok := args[0].(uint64)
+	if !ok || 0 > hash {
+		return fmt.Errorf("arg[0] is not a bigint, got %v", args[0]), false
+	}
+	return geohash.BoundingBoxInt(hash), true
+}
+func (r *geohashNeighbor) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	hash, ok := args[0].(string)
+	if !ok || 0 == len(hash) {
+		return fmt.Errorf("arg[0] is not a string, got %v", args[0]), false
+	}
+	if err := geohash.Validate(hash); nil != err {
+		return err, false
+	}
+	var directionCode geohash.Direction
+	direction, ok := args[1].(string)
+	if !ok || 0 == len(direction) {
+		return fmt.Errorf("arg[1] is not a string, got %v", args[1]), false
+	} else {
+		directionCode, ok = g_direction[direction]
+		if !ok {
+			return fmt.Errorf("arg[1] is valid, got %v", args[1]), false
+		}
+
+	}
+	return geohash.Neighbor(hash, directionCode), true
+}
+func (r *geohashNeighborInt) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	hash, ok := args[0].(uint64)
+	if !ok || 0 > hash {
+		return fmt.Errorf("arg[0] is not a bigint, got %v", args[0]), false
+	}
+	var directionCode geohash.Direction
+	direction, ok := args[1].(string)
+	if !ok || 0 == len(direction) {
+		return fmt.Errorf("arg[1] is not a string, got %v", args[1]), false
+	} else {
+		directionCode, ok = g_direction[direction]
+		if !ok {
+			return fmt.Errorf("arg[1] is valid, got %v", args[1]), false
+		}
+	}
+	return geohash.NeighborInt(hash, directionCode), true
+}
+
+func (r *geohashNeighbors) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	hash, ok := args[0].(string)
+	if !ok || 0 == len(hash) {
+		return fmt.Errorf("arg[0] is not a string, got %v", args[0]), false
+	}
+	if err := geohash.Validate(hash); nil != err {
+		return err, false
+	}
+	return geohash.Neighbors(hash), true
+}
+
+func (r *geohashNeighborsInt) Exec(args []interface{}, _ api.FunctionContext) (interface{}, bool) {
+	hash, ok := args[0].(uint64)
+	if !ok || 0 > hash {
+		return fmt.Errorf("arg[0] is not a bigint, got %v", args[0]), false
+	}
+	return geohash.NeighborsInt(hash), true
+}

+ 6 - 0
plugins/functions/image/exports.go

@@ -0,0 +1,6 @@
+package main
+
+var (
+	Thumbnail thumbnail
+	Resize    imageResize
+)

+ 0 - 2
plugins/functions/resize/resize.go

@@ -58,5 +58,3 @@ func (f *imageResize) Exec(args []interface{}, _ api.FunctionContext) (interface
 func (f *imageResize) IsAggregate() bool {
 	return false
 }
-
-var Resize imageResize

+ 0 - 2
plugins/functions/thumbnail/thumbnail.go

@@ -58,5 +58,3 @@ func (f *thumbnail) Exec(args []interface{}, _ api.FunctionContext) (interface{}
 func (f *thumbnail) IsAggregate() bool {
 	return false
 }
-
-var Thumbnail thumbnail

File diff suppressed because it is too large
+ 1001 - 0
plugins/functions/labelImage/etc/labels.txt


BIN
plugins/functions/labelImage/etc/mobilenet_quant_v1_224.tflite


+ 24 - 0
plugins/functions/labelImage/install.sh

@@ -0,0 +1,24 @@
+#!/bin/sh
+dir=/usr/local/tflite
+cur=$(dirname "$0")
+echo "Base path $cur" 
+if [ -d "$dir" ]; then
+    echo "SDK path $dir exists." 
+else
+    echo "Creating SDK path $dir"
+    mkdir -p $dir
+    echo "Created SDK path $dir"
+    echo "Moving libs"
+    cp -R $cur/lib $dir
+    echo "Moved libs"
+fi
+
+if [ -f "/etc/ld.so.conf.d/tflite.conf" ]; then
+    echo "/etc/ld.so.conf.d/tflite.conf exists"
+else
+    echo "Copy conf file"
+    cp $cur/tflite.conf /etc/ld.so.conf.d/
+    echo "Copied conf file"
+fi
+ldconfig
+echo "Done"

+ 170 - 0
plugins/functions/labelImage/labelImage.go

@@ -0,0 +1,170 @@
+// +build tflite
+
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"fmt"
+	"github.com/emqx/kuiper/xstream/api"
+	tflite "github.com/mattn/go-tflite"
+	"github.com/nfnt/resize"
+	"image"
+	_ "image/jpeg"
+	_ "image/png"
+	"os"
+	"path"
+	"sort"
+	"sync"
+)
+
+type labelImage struct {
+	modelPath   string
+	labelPath   string
+	once        sync.Once
+	interpreter *tflite.Interpreter
+	labels      []string
+}
+
+func (f *labelImage) Validate(args []interface{}) error {
+	if len(args) != 1 {
+		return fmt.Errorf("labelImage function only supports 1 parameter but got %d", len(args))
+	}
+	return nil
+}
+
+func (f *labelImage) Exec(args []interface{}, ctx api.FunctionContext) (interface{}, bool) {
+	arg0, ok := args[0].([]byte)
+	if !ok {
+		return fmt.Errorf("labelImage function parameter must be a bytea, but got %[1]T(%[1]v)", args[0]), false
+	}
+	img, _, err := image.Decode(bytes.NewReader(arg0))
+	if err != nil {
+		return err, false
+	}
+	var outerErr error
+	f.once.Do(func() {
+		ploc := path.Join(ctx.GetRootPath(), "etc", "functions")
+		f.labels, err = loadLabels(path.Join(ploc, f.labelPath))
+		if err != nil {
+			outerErr = fmt.Errorf("fail to load labels: %s", err)
+			return
+		}
+
+		model := tflite.NewModelFromFile(path.Join(ploc, f.modelPath))
+		if model == nil {
+			outerErr = fmt.Errorf("fail to load model: %s", err)
+			return
+		}
+		defer model.Delete()
+
+		options := tflite.NewInterpreterOptions()
+		options.SetNumThread(4)
+		options.SetErrorReporter(func(msg string, user_data interface{}) {
+			fmt.Println(msg)
+		}, nil)
+		defer options.Delete()
+
+		interpreter := tflite.NewInterpreter(model, options)
+		if interpreter == nil {
+			outerErr = fmt.Errorf("cannot create interpreter")
+			return
+		}
+		status := interpreter.AllocateTensors()
+		if status != tflite.OK {
+			outerErr = fmt.Errorf("allocate failed")
+			interpreter.Delete()
+			return
+		}
+
+		f.interpreter = interpreter
+		// TODO If created, the interpreter will be kept through the whole life of kuiper. Refactor this later.
+		//defer interpreter.Delete()
+	})
+
+	if f.interpreter == nil {
+		return fmt.Errorf("fail to load model %s %s", f.modelPath, outerErr), false
+	}
+	input := f.interpreter.GetInputTensor(0)
+	wantedHeight := input.Dim(1)
+	wantedWidth := input.Dim(2)
+	wantedChannels := input.Dim(3)
+	wantedType := input.Type()
+
+	resized := resize.Resize(uint(wantedWidth), uint(wantedHeight), img, resize.NearestNeighbor)
+	bounds := resized.Bounds()
+	dx, dy := bounds.Dx(), bounds.Dy()
+
+	if wantedType == tflite.UInt8 {
+		bb := make([]byte, dx*dy*wantedChannels)
+		for y := 0; y < dy; y++ {
+			for x := 0; x < dx; x++ {
+				col := resized.At(x, y)
+				r, g, b, _ := col.RGBA()
+				bb[(y*dx+x)*3+0] = byte(float64(r) / 255.0)
+				bb[(y*dx+x)*3+1] = byte(float64(g) / 255.0)
+				bb[(y*dx+x)*3+2] = byte(float64(b) / 255.0)
+			}
+		}
+		input.CopyFromBuffer(bb)
+	} else {
+		return fmt.Errorf("is not wanted type"), false
+	}
+
+	status := f.interpreter.Invoke()
+	if status != tflite.OK {
+		return fmt.Errorf("invoke failed"), false
+	}
+
+	output := f.interpreter.GetOutputTensor(0)
+	outputSize := output.Dim(output.NumDims() - 1)
+	b := make([]byte, outputSize)
+	type result struct {
+		score float64
+		index int
+	}
+	status = output.CopyToBuffer(&b[0])
+	if status != tflite.OK {
+		return fmt.Errorf("output failed"), false
+	}
+	var results []result
+	for i := 0; i < outputSize; i++ {
+		score := float64(b[i]) / 255.0
+		if score < 0.2 {
+			continue
+		}
+		results = append(results, result{score: score, index: i})
+	}
+	sort.Slice(results, func(i, j int) bool {
+		return results[i].score > results[j].score
+	})
+	// output is the biggest score labelImage
+	if len(results) > 0 {
+		return f.labels[results[0].index], true
+	} else {
+		return "", true
+	}
+}
+
+func (f *labelImage) IsAggregate() bool {
+	return false
+}
+
+func loadLabels(filename string) ([]string, error) {
+	labels := []string{}
+	f, err := os.Open(filename)
+	if err != nil {
+		return nil, err
+	}
+	defer f.Close()
+	scanner := bufio.NewScanner(f)
+	for scanner.Scan() {
+		labels = append(labels, scanner.Text())
+	}
+	return labels, nil
+}
+
+var LabelImage = labelImage{
+	modelPath: "labelImage/mobilenet_quant_v1_224.tflite",
+	labelPath: "labelImage/labels.txt",
+}

+ 25 - 0
plugins/functions/labelImage/lib/Readme.md

@@ -0,0 +1,25 @@
+# Tensorflow Lite C API library
+
+This is the prebuilt tensorflow lite c library for debian 10. It can be used directly in Kuiper docker image of tags x.x.x or x.x.x-slim.
+
+To use in other environment, you need to build the library from source.
+
+## Build from source
+
+Here are the steps to build from source in debian. 
+
+1. Install [Python](https://www.tensorflow.org/install/pip#1.-install-the-python-development-environment-on-your-system)
+
+2. Install required python lib: `pip3 install -r requirements.txt`. The requirements are from `tensorflow/tensorflow/tools/pip_package/setup.py` of the corresponding tensorflow version.
+
+3. Install [Bazel](https://docs.bazel.build/versions/4.0.0/install-ubuntu.html)
+
+4. Clone [tensorflow](https://github.com/tensorflow/tensorflow),switch to `git checkout v2.2.0-rc3 -b mybranch`
+
+5. Build the so files, the outputs are in ./bazel-bin
+
+   ```bash
+   $ cd $tensorflowSrc
+   $ bazel build --config monolithic -c opt //tensorflow/lite:libtensorflowlite.so
+   $ bazel build --config monolithic -c opt //tensorflow/lite/c:libtensorflowlite_c.so
+   ```

BIN
plugins/functions/labelImage/lib/libtensorflowlite.so


BIN
plugins/functions/labelImage/lib/libtensorflowlite_c.so


+ 2 - 0
plugins/functions/labelImage/tflite.conf

@@ -0,0 +1,2 @@
+# include tflite c api
+/usr/local/tflite/lib

+ 288 - 42
plugins/manager.go

@@ -7,6 +7,7 @@ import (
 	"errors"
 	"fmt"
 	"github.com/emqx/kuiper/common"
+	"github.com/emqx/kuiper/common/kv"
 	"github.com/emqx/kuiper/xstream/api"
 	"io"
 	"io/ioutil"
@@ -24,14 +25,61 @@ import (
 	"unicode"
 )
 
-type Plugin struct {
+type Plugin interface {
+	GetName() string
+	GetFile() string
+	GetShellParas() []string
+	GetSymbols() []string
+	SetName(n string)
+}
+
+type IOPlugin struct {
 	Name       string   `json:"name"`
 	File       string   `json:"file"`
 	ShellParas []string `json:"shellParas"`
 }
 
+func (p *IOPlugin) GetName() string {
+	return p.Name
+}
+
+func (p *IOPlugin) GetFile() string {
+	return p.File
+}
+
+func (p *IOPlugin) GetShellParas() []string {
+	return p.ShellParas
+}
+
+func (p *IOPlugin) GetSymbols() []string {
+	return nil
+}
+
+func (p *IOPlugin) SetName(n string) {
+	p.Name = n
+}
+
+type FuncPlugin struct {
+	IOPlugin
+	// Optional, if not specified, a default element with the same name of the file will be registered
+	Functions []string `json:"functions"`
+}
+
+func (fp *FuncPlugin) GetSymbols() []string {
+	return fp.Functions
+}
+
 type PluginType int
 
+func NewPluginByType(t PluginType) Plugin {
+	switch t {
+	case FUNCTION:
+		return &FuncPlugin{}
+	default:
+		return &IOPlugin{}
+	}
+}
+
 const (
 	SOURCE PluginType = iota
 	SINK
@@ -49,18 +97,54 @@ var (
 //Registry is append only because plugin cannot delete or reload. To delete a plugin, restart the server to reindex
 type Registry struct {
 	sync.RWMutex
-	internal []map[string]string
+	// 3 maps for source/sink/function. In each map, key is the plugin name, value is the version
+	plugins []map[string]string
+	// A map from function name to its plugin file name. It is constructed during initialization by reading kv info. All functions must have at least an entry, even the function resizes in a one function plugin.
+	symbols map[string]string
 }
 
 func (rr *Registry) Store(t PluginType, name string, version string) {
 	rr.Lock()
-	rr.internal[t][name] = version
+	rr.plugins[t][name] = version
+	rr.Unlock()
+}
+
+func (rr *Registry) StoreSymbols(name string, symbols []string) error {
+	rr.Lock()
+	defer rr.Unlock()
+	for _, s := range symbols {
+		if _, ok := rr.symbols[s]; ok {
+			return fmt.Errorf("function name %s already exists", s)
+		} else {
+			rr.symbols[s] = name
+		}
+	}
+
+	return nil
+}
+
+func (rr *Registry) RemoveSymbols(symbols []string) {
+	rr.Lock()
+	for _, s := range symbols {
+		delete(rr.symbols, s)
+	}
 	rr.Unlock()
 }
 
 func (rr *Registry) List(t PluginType) []string {
 	rr.RLock()
-	result := rr.internal[t]
+	result := rr.plugins[t]
+	rr.RUnlock()
+	keys := make([]string, 0, len(result))
+	for k := range result {
+		keys = append(keys, k)
+	}
+	return keys
+}
+
+func (rr *Registry) ListSymbols() []string {
+	rr.RLock()
+	result := rr.symbols
 	rr.RUnlock()
 	keys := make([]string, 0, len(result))
 	for k := range result {
@@ -71,24 +155,41 @@ func (rr *Registry) List(t PluginType) []string {
 
 func (rr *Registry) Get(t PluginType, name string) (string, bool) {
 	rr.RLock()
-	result := rr.internal[t]
+	result := rr.plugins[t]
 	rr.RUnlock()
 	r, ok := result[name]
 	return r, ok
 }
 
-//func (rr *Registry) Delete(t PluginType, value string) {
-//	rr.Lock()
-//	s := rr.internal[t]
-//	for i, f := range s{
-//		if f == value{
-//			s[len(s)-1], s[i] = s[i], s[len(s)-1]
-//			rr.internal[t] = s
-//			break
-//		}
-//	}
-//	rr.Unlock()
-//}
+func (rr *Registry) GetPluginVersionBySymbol(t PluginType, symbolName string) (string, bool) {
+	switch t {
+	case FUNCTION:
+		rr.RLock()
+		result := rr.plugins[t]
+		name, ok := rr.symbols[symbolName]
+		rr.RUnlock()
+		if ok {
+			r, nok := result[name]
+			return r, nok
+		} else {
+			return "", false
+		}
+	default:
+		return rr.Get(t, symbolName)
+	}
+}
+
+func (rr *Registry) GetPluginBySymbol(t PluginType, symbolName string) (string, bool) {
+	switch t {
+	case FUNCTION:
+		rr.RLock()
+		defer rr.RUnlock()
+		name, ok := rr.symbols[symbolName]
+		return name, ok
+	default:
+		return symbolName, true
+	}
+}
 
 var symbolRegistry = make(map[string]plugin.Symbol)
 var mu sync.RWMutex
@@ -106,7 +207,7 @@ func getPlugin(t string, pt PluginType) (plugin.Symbol, error) {
 		if err != nil {
 			return nil, fmt.Errorf("fail to initialize the plugin manager")
 		}
-		mod, err := getSoFilePath(m, pt, t)
+		mod, err := getSoFilePath(m, pt, t, false)
 		if err != nil {
 			return nil, fmt.Errorf("cannot get the plugin file path: %v", err)
 		}
@@ -181,6 +282,7 @@ type Manager struct {
 	pluginDir string
 	etcDir    string
 	registry  *Registry
+	db        kv.KeyValue
 }
 
 func NewPluginManager() (*Manager, error) {
@@ -196,7 +298,17 @@ func NewPluginManager() (*Manager, error) {
 			outerErr = fmt.Errorf("cannot find etc folder: %s", err)
 			return
 		}
-
+		dbDir, err := common.GetDataLoc()
+		if err != nil {
+			outerErr = fmt.Errorf("cannot find db folder: %s", err)
+			return
+		}
+		db := kv.GetDefaultKVStore(path.Join(dbDir, "pluginFuncs"))
+		err = db.Open()
+		if err != nil {
+			outerErr = fmt.Errorf("error when opening db: %v.", err)
+		}
+		defer db.Close()
 		plugins := make([]map[string]string, 3)
 		for i := 0; i < 3; i++ {
 			names, err := findAll(PluginType(i), dir)
@@ -206,12 +318,24 @@ func NewPluginManager() (*Manager, error) {
 			}
 			plugins[i] = names
 		}
-		registry := &Registry{internal: plugins}
+		registry := &Registry{plugins: plugins, symbols: make(map[string]string)}
+		for pf, _ := range plugins[FUNCTION] {
+			l := make([]string, 0)
+			if ok, err := db.Get(pf, &l); ok {
+				registry.StoreSymbols(pf, l)
+			} else if err != nil {
+				outerErr = fmt.Errorf("error when querying kv: %s", err)
+				return
+			} else {
+				registry.StoreSymbols(pf, []string{pf})
+			}
+		}
 
 		singleton = &Manager{
 			pluginDir: dir,
 			etcDir:    etcDir,
 			registry:  registry,
+			db:        db,
 		}
 		if err := singleton.readSourceMetaDir(); nil != err {
 			common.Log.Errorf("readSourceMetaDir:%v", err)
@@ -251,8 +375,16 @@ func (m *Manager) List(t PluginType) (result []string, err error) {
 	return m.registry.List(t), nil
 }
 
-func (m *Manager) Register(t PluginType, j *Plugin) error {
-	name, uri, shellParas := j.Name, j.File, j.ShellParas
+func (m *Manager) ListSymbols() (result []string, err error) {
+	return m.registry.ListSymbols(), nil
+}
+
+func (m *Manager) GetSymbol(s string) (result string, ok bool) {
+	return m.registry.GetPluginBySymbol(FUNCTION, s)
+}
+
+func (m *Manager) Register(t PluginType, j Plugin) error {
+	name, uri, shellParas := j.GetName(), j.GetFile(), j.GetShellParas()
 	//Validation
 	name = strings.Trim(name, " ")
 	if name == "" {
@@ -269,23 +401,54 @@ func (m *Manager) Register(t PluginType, j *Plugin) error {
 			return fmt.Errorf("invalid name %s: duplicate", name)
 		}
 	}
+	var err error
+	if t == FUNCTION {
+		if len(j.GetSymbols()) > 0 {
+			err = m.db.Open()
+			if err != nil {
+				return err
+			}
+			err = m.db.Set(name, j.GetSymbols())
+			if err != nil {
+				return err
+			}
+			m.db.Close()
+			err = m.registry.StoreSymbols(name, j.GetSymbols())
+		} else {
+			err = m.registry.StoreSymbols(name, []string{name})
+		}
+	}
+	if err != nil {
+		return err
+	}
 
 	zipPath := path.Join(m.pluginDir, name+".zip")
 	var unzipFiles []string
 	//clean up: delete zip file and unzip files in error
 	defer os.Remove(zipPath)
 	//download
-	err := downloadFile(zipPath, uri)
+	err = downloadFile(zipPath, uri)
 	if err != nil {
 		return fmt.Errorf("fail to download file %s: %s", uri, err)
 	}
 	//unzip and copy to destination
 	unzipFiles, version, err := m.install(t, name, zipPath, shellParas)
-	if err != nil {
+	if err == nil && len(j.GetSymbols()) > 0 {
+		if err = m.db.Open(); err == nil {
+			err = m.db.Set(name, j.GetSymbols())
+		}
+	}
+	if err != nil { //Revert for any errors
 		if t == SOURCE && len(unzipFiles) == 1 { //source that only copy so file
-			os.Remove(unzipFiles[0])
+			os.RemoveAll(unzipFiles[0])
+		}
+		if len(j.GetSymbols()) > 0 {
+			m.db.Close()
+			m.registry.RemoveSymbols(j.GetSymbols())
+		} else {
+			m.registry.RemoveSymbols([]string{name})
 		}
-		return fmt.Errorf("fail to unzip file %s: %s", uri, err)
+		return fmt.Errorf("fail to install plugin: %s", err)
 	}
 	m.registry.Store(t, name, version)
 
@@ -306,12 +469,37 @@ func (m *Manager) Register(t PluginType, j *Plugin) error {
 	return nil
 }
 
+// prerequisite:function plugin of name exists
+func (m *Manager) RegisterFuncs(name string, functions []string) error {
+	if len(functions) == 0 {
+		return fmt.Errorf("property 'functions' must not be empty")
+	}
+	err := m.db.Open()
+	if err != nil {
+		return err
+	}
+	defer m.db.Close()
+	old := make([]string, 0)
+	if ok, err := m.db.Get(name, &old); err != nil {
+		return err
+	} else if ok {
+		m.registry.RemoveSymbols(old)
+	} else if !ok {
+		m.registry.RemoveSymbols([]string{name})
+	}
+	err = m.db.Set(name, functions)
+	if err != nil {
+		return err
+	}
+	return m.registry.StoreSymbols(name, functions)
+}
+
 func (m *Manager) Delete(t PluginType, name string, stop bool) error {
 	name = strings.Trim(name, " ")
 	if name == "" {
 		return fmt.Errorf("invalid name %s: should not be empty", name)
 	}
-	soPath, err := getSoFilePath(m, t, name)
+	soPath, err := getSoFilePath(m, t, name, true)
 	if err != nil {
 		return err
 	}
@@ -319,6 +507,13 @@ func (m *Manager) Delete(t PluginType, name string, stop bool) error {
 	paths := []string{
 		soPath,
 	}
+	// Find etc folder
+	etcPath := path.Join(m.etcDir, PluginTypes[t], name)
+	if fi, err := os.Stat(etcPath); err == nil {
+		if fi.Mode().IsDir() {
+			paths = append(paths, etcPath)
+		}
+	}
 	switch t {
 	case SOURCE:
 		paths = append(paths, path.Join(m.etcDir, PluginTypes[t], name+".yaml"))
@@ -326,13 +521,30 @@ func (m *Manager) Delete(t PluginType, name string, stop bool) error {
 	case SINK:
 		m.uninstalSink(name)
 	case FUNCTION:
+		old := make([]string, 0)
+		err = m.db.Open()
+		if err != nil {
+			return err
+		}
+		if ok, err := m.db.Get(name, &old); err != nil {
+			return err
+		} else if ok {
+			m.registry.RemoveSymbols(old)
+			err := m.db.Delete(name)
+			if err != nil {
+				return err
+			}
+		} else if !ok {
+			m.registry.RemoveSymbols([]string{name})
+		}
+		m.db.Close()
 		m.uninstalFunc(name)
 	}
 
 	for _, p := range paths {
 		_, err := os.Stat(p)
 		if err == nil {
-			err = os.Remove(p)
+			err = os.RemoveAll(p)
 			if err != nil {
 				results = append(results, err.Error())
 			}
@@ -354,38 +566,62 @@ func (m *Manager) Delete(t PluginType, name string, stop bool) error {
 		return nil
 	}
 }
-func (m *Manager) Get(t PluginType, name string) (map[string]string, bool) {
+func (m *Manager) Get(t PluginType, name string) (map[string]interface{}, bool) {
 	v, ok := m.registry.Get(t, name)
 	if strings.HasPrefix(v, "v") {
 		v = v[1:]
 	}
 	if ok {
-		m := map[string]string{
+		r := map[string]interface{}{
 			"name":    name,
 			"version": v,
 		}
-		return m, ok
+		if t == FUNCTION {
+			if err := m.db.Open(); err == nil {
+				l := make([]string, 0)
+				if ok, _ := m.db.Get(name, &l); ok {
+					r["functions"] = l
+				}
+				m.db.Close()
+			}
+			// ignore the error
+		}
+		return r, ok
 	}
 	return nil, false
 }
 
 // Return the lowercase version of so name. It may be upper case in path.
-func getSoFilePath(m *Manager, t PluginType, name string) (string, error) {
-	v, ok := m.registry.Get(t, name)
+func getSoFilePath(m *Manager, t PluginType, name string, isSoName bool) (string, error) {
+	var (
+		v      string
+		soname string
+		ok     bool
+	)
+	// We must identify plugin or symbol when deleting function plugin
+	if isSoName {
+		soname = name
+	} else {
+		soname, ok = m.registry.GetPluginBySymbol(t, name)
+		if !ok {
+			return "", common.NewErrorWithCode(common.NOT_FOUND, fmt.Sprintf("invalid symbol name %s: not exist", name))
+		}
+	}
+	v, ok = m.registry.Get(t, soname)
 	if !ok {
-		return "", common.NewErrorWithCode(common.NOT_FOUND, fmt.Sprintf("invalid name %s: not exist", name))
+		return "", common.NewErrorWithCode(common.NOT_FOUND, fmt.Sprintf("invalid name %s: not exist", soname))
 	}
 
-	soFile := name + ".so"
+	soFile := soname + ".so"
 	if v != "" {
-		soFile = fmt.Sprintf("%s@%s.so", name, v)
+		soFile = fmt.Sprintf("%s@%s.so", soname, v)
 	}
 	p := path.Join(m.pluginDir, PluginTypes[t], soFile)
 	if _, err := os.Stat(p); err != nil {
 		p = path.Join(m.pluginDir, PluginTypes[t], ucFirst(soFile))
 	}
 	if _, err := os.Stat(p); err != nil {
-		return "", common.NewErrorWithCode(common.NOT_FOUND, fmt.Sprintf("cannot find .so file for plugin %s", name))
+		return "", common.NewErrorWithCode(common.NOT_FOUND, fmt.Sprintf("cannot find .so file for plugin %s", soname))
 	}
 	return p, nil
 }
@@ -435,6 +671,11 @@ func (m *Manager) install(t PluginType, name, src string, shellParas []string) (
 			filenames = append(filenames, soPath)
 			revokeFiles = append(revokeFiles, soPath)
 			_, version = parseName(fileName)
+		} else if strings.HasPrefix(fileName, "etc/") {
+			err = unzipTo(file, path.Join(m.etcDir, PluginTypes[t], strings.Replace(fileName, "etc", name, 1)))
+			if err != nil {
+				return filenames, "", err
+			}
 		} else { //unzip other files
 			err = unzipTo(file, path.Join(tempPath, fileName))
 			if err != nil {
@@ -463,11 +704,12 @@ func (m *Manager) install(t PluginType, name, src string, shellParas []string) (
 
 		if err != nil {
 			for _, f := range revokeFiles {
-				os.Remove(f)
+				os.RemoveAll(f)
 			}
 			common.Log.Infof(`err:%v stdout:%s stderr:%s`, err, outb.String(), errb.String())
 			return filenames, "", err
 		} else {
+			common.Log.Infof(`run install script:%s`, outb.String())
 			common.Log.Infof("install %s plugin %s", PluginTypes[t], name)
 		}
 	}
@@ -487,17 +729,21 @@ func parseName(n string) (string, string) {
 func unzipTo(f *zip.File, fpath string) error {
 	_, err := os.Stat(fpath)
 	if err == nil || !os.IsNotExist(err) {
-		if err = os.Remove(fpath); err != nil {
+		if err = os.RemoveAll(fpath); err != nil {
 			return fmt.Errorf("failed to delete file %s", fpath)
 		}
 	}
 
 	if f.FileInfo().IsDir() {
-		return fmt.Errorf("%s: not a file, but a directory", fpath)
+		// Make Folder
+		os.MkdirAll(fpath, os.ModePerm)
+		return nil
 	}
 
-	if err := os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil {
-		return err
+	if _, err := os.Stat(filepath.Dir(fpath)); os.IsNotExist(err) {
+		if err := os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil {
+			return err
+		}
 	}
 
 	outFile, err := os.OpenFile(fpath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())

+ 66 - 14
plugins/manager_test.go

@@ -24,6 +24,7 @@ func TestManager_Register(t *testing.T) {
 		n       string
 		u       string
 		v       string
+		f       []string
 		lowerSo bool
 		err     error
 	}{
@@ -36,7 +37,7 @@ func TestManager_Register(t *testing.T) {
 			t:   SOURCE,
 			n:   "zipMissConf",
 			u:   endpoint + "/sources/zipMissConf.zip",
-			err: errors.New("fail to unzip file " + endpoint + "/sources/zipMissConf.zip: invalid zip file: so file or conf file is missing"),
+			err: errors.New("fail to install plugin: invalid zip file: so file or conf file is missing"),
 		}, {
 			t:   SINK,
 			n:   "urlerror",
@@ -46,12 +47,12 @@ func TestManager_Register(t *testing.T) {
 			t:   SINK,
 			n:   "zipWrongname",
 			u:   endpoint + "/sinks/zipWrongName.zip",
-			err: errors.New("fail to unzip file " + endpoint + "/sinks/zipWrongName.zip: invalid zip file: so file or conf file is missing"),
+			err: errors.New("fail to install plugin: invalid zip file: so file or conf file is missing"),
 		}, {
 			t:   FUNCTION,
 			n:   "zipMissSo",
 			u:   endpoint + "/functions/zipMissSo.zip",
-			err: errors.New("fail to unzip file " + endpoint + "/functions/zipMissSo.zip: invalid zip file: so file or conf file is missing"),
+			err: errors.New("fail to install plugin: invalid zip file: so file or conf file is missing"),
 		}, {
 			t: SOURCE,
 			n: "random2",
@@ -70,11 +71,22 @@ func TestManager_Register(t *testing.T) {
 			t: FUNCTION,
 			n: "echo2",
 			u: endpoint + "/functions/echo2.zip",
+			f: []string{"echo2", "echo3"},
 		}, {
 			t:   FUNCTION,
 			n:   "echo2",
 			u:   endpoint + "/functions/echo2.zip",
 			err: errors.New("invalid name echo2: duplicate"),
+		}, {
+			t:   FUNCTION,
+			n:   "misc",
+			u:   endpoint + "/functions/echo2.zip",
+			f:   []string{"misc", "echo3"},
+			err: errors.New("function name echo3 already exists"),
+		}, {
+			t: FUNCTION,
+			n: "comp",
+			u: endpoint + "/functions/comp.zip",
 		},
 	}
 	manager, err := NewPluginManager()
@@ -84,10 +96,22 @@ func TestManager_Register(t *testing.T) {
 
 	fmt.Printf("The test bucket size is %d.\n\n", len(data))
 	for i, tt := range data {
-		err = manager.Register(tt.t, &Plugin{
-			Name: tt.n,
-			File: tt.u,
-		})
+		var p Plugin
+		if tt.t == FUNCTION {
+			p = &FuncPlugin{
+				IOPlugin: IOPlugin{
+					Name: tt.n,
+					File: tt.u,
+				},
+				Functions: tt.f,
+			}
+		} else {
+			p = &IOPlugin{
+				Name: tt.n,
+				File: tt.u,
+			}
+		}
+		err = manager.Register(tt.t, p)
 		if !reflect.DeepEqual(tt.err, err) {
 			t.Errorf("%d: error mismatch:\n  exp=%s\n  got=%s\n\n", i, tt.err, err)
 		} else if tt.err == nil {
@@ -113,7 +137,7 @@ func TestManager_List(t *testing.T) {
 			r: []string{"file", "file2"},
 		}, {
 			t: FUNCTION,
-			r: []string{"accumulateWordCount", "countPlusOne", "echo", "echo2"},
+			r: []string{"accumulateWordCount", "comp", "countPlusOne", "echo", "echo2"},
 		},
 	}
 	manager, err := NewPluginManager()
@@ -135,32 +159,57 @@ func TestManager_List(t *testing.T) {
 	}
 }
 
+func TestManager_Symbols(t *testing.T) {
+	manager, err := NewPluginManager()
+	if err != nil {
+		t.Error(err)
+	}
+	r := []string{"accumulateWordCount", "comp", "countPlusOne", "echo", "echo2", "echo3", "misc"}
+	result, err := manager.ListSymbols()
+	if err != nil {
+		t.Errorf("list symbols error : %s\n\n", err)
+		return
+	}
+	sort.Strings(result)
+	if !reflect.DeepEqual(r, result) {
+		t.Errorf("result mismatch:\n  exp=%v\n  got=%v\n\n", r, result)
+	}
+	p, ok := manager.GetSymbol("echo3")
+	if !ok {
+		t.Errorf("cannot find echo3 symbol")
+	}
+	if p != "echo2" {
+		t.Errorf("wrong plugin %s for echo3 symbol", p)
+	}
+}
+
 func TestManager_Desc(t *testing.T) {
 	data := []struct {
 		t PluginType
 		n string
-		r map[string]string
+		r map[string]interface{}
 	}{
 		{
 			t: SOURCE,
 			n: "random2",
-			r: map[string]string{
+			r: map[string]interface{}{
 				"name":    "random2",
 				"version": "",
 			},
 		}, {
 			t: SOURCE,
 			n: "random3",
-			r: map[string]string{
+			r: map[string]interface{}{
 				"name":    "random3",
 				"version": "1.0.0",
 			},
 		}, {
 			t: FUNCTION,
 			n: "echo2",
-			r: map[string]string{
-				"name":    "echo2",
-				"version": "",
+			r: map[string]interface{}{
+				"name":      "echo2",
+				"version":   "",
+				"functions": []string{"echo2", "echo3"},
 			},
 		},
 	}
@@ -200,6 +249,9 @@ func TestManager_Delete(t *testing.T) {
 		}, {
 			t: SOURCE,
 			n: "random3",
+		}, {
+			t: FUNCTION,
+			n: "comp",
 		},
 	}
 	manager, err := NewPluginManager()

+ 1 - 0
plugins/sinkMeta_test.go

@@ -78,6 +78,7 @@ func TestHintWhenModifySink(t *testing.T) {
 		},
 		Options: &api.RuleOption{
 			IsEventTime: true,
+			SendError:   true,
 		},
 	}
 

BIN
plugins/testzips/functions/comp.zip


BIN
plugins/testzips/functions/misc.zip


+ 2 - 2
tools/migration/util/migration.go

@@ -2,7 +2,7 @@ package util
 
 import (
 	"fmt"
-	"github.com/emqx/kuiper/common"
+	"github.com/emqx/kuiper/common/kv"
 	"github.com/patrickmn/go-cache"
 	"io/ioutil"
 	"os"
@@ -22,7 +22,7 @@ func migration(dir string) error {
 		return err
 	}
 
-	store := common.GetSqliteKVStore(dir)
+	store := kv.GetDefaultKVStore(dir)
 	if err := store.Open(); nil != err {
 		return err
 	}

+ 2 - 2
tools/migration/util/migration_test.go

@@ -1,7 +1,7 @@
 package util
 
 import (
-	"github.com/emqx/kuiper/common"
+	"github.com/emqx/kuiper/common/kv"
 	"github.com/patrickmn/go-cache"
 	"os"
 	"path"
@@ -47,7 +47,7 @@ func TestDataMigration(t *testing.T) {
 		return
 	}
 
-	store := common.GetSqliteKVStore(dir)
+	store := kv.GetDefaultKVStore(dir)
 	if err := store.Open(); nil != err {
 		t.Error(err)
 		return

+ 23 - 9
xsql/ast.go

@@ -1199,23 +1199,37 @@ func (v *ValuerEval) subset(result interface{}, expr Expr) interface{} {
 	ber := v.Eval(expr)
 	if berVal, ok1 := ber.(*BracketEvalResult); ok1 {
 		if berVal.isIndex() {
-			if berVal.Start >= val.Len() {
+			if 0 > berVal.Start {
+				if 0 > berVal.Start+val.Len() {
+					return fmt.Errorf("out of index: %d of %d", berVal.Start, val.Len())
+				}
+				berVal.Start += val.Len()
+			} else if berVal.Start >= val.Len() {
 				return fmt.Errorf("out of index: %d of %d", berVal.Start, val.Len())
 			}
 			return val.Index(berVal.Start).Interface()
 		} else {
-			if berVal.Start >= val.Len() {
+			if 0 > berVal.Start {
+				if 0 > berVal.Start+val.Len() {
+					return fmt.Errorf("out of index: %d of %d", berVal.Start, val.Len())
+				}
+				berVal.Start += val.Len()
+			} else if berVal.Start >= val.Len() {
 				return fmt.Errorf("start value is out of index: %d of %d", berVal.Start, val.Len())
 			}
-
-			if berVal.End >= val.Len() {
+			if math.MinInt32 == berVal.End {
+				berVal.End = val.Len()
+			} else if 0 > berVal.End {
+				if 0 > berVal.End+val.Len() {
+					return fmt.Errorf("out of index: %d of %d", berVal.End, val.Len())
+				}
+				berVal.End += val.Len()
+			} else if berVal.End > val.Len() {
 				return fmt.Errorf("end value is out of index: %d of %d", berVal.End, val.Len())
+			} else if berVal.Start >= berVal.End {
+				return fmt.Errorf("start cannot be greater than end. start:%d  end:%d", berVal.Start, berVal.End)
 			}
-			end := berVal.End
-			if end == -1 {
-				end = val.Len()
-			}
-			return val.Slice(berVal.Start, end).Interface()
+			return val.Slice(berVal.Start, berVal.End).Interface()
 		}
 	} else {
 		return fmt.Errorf("invalid evaluation result - %v", berVal)

+ 4 - 0
xsql/funcs_ast_validator.go

@@ -275,6 +275,10 @@ func validateOtherFunc(name string, args []Expr) error {
 		if err := validateLen(name, 1, len); err != nil {
 			return err
 		}
+	case "cardinality":
+		if err := validateLen(name, 1, len); err != nil {
+			return err
+		}
 	case "nanvl":
 		if err := validateLen(name, 2, len); err != nil {
 			return err

+ 6 - 0
xsql/funcs_misc.go

@@ -222,6 +222,12 @@ func otherCall(name string, args []interface{}) (interface{}, bool) {
 		return nil, false
 	case "meta":
 		return args[0], true
+	case "cardinality":
+		val := reflect.ValueOf(args[0])
+		if val.Kind() == reflect.Slice {
+			return val.Len(), true
+		}
+		return 0, true
 	default:
 		return fmt.Errorf("unknown function name %s", name), false
 	}

+ 1 - 1
xsql/functions.go

@@ -77,7 +77,7 @@ var jsonFuncMap = map[string]string{
 }
 
 var otherFuncMap = map[string]string{"isnull": "",
-	"newuuid": "", "tstamp": "", "mqtt": "", "meta": "",
+	"newuuid": "", "tstamp": "", "mqtt": "", "meta": "", "cardinality": "",
 }
 
 func (fv *FunctionValuer) Call(name string, args []interface{}) (interface{}, bool) {

+ 1 - 0
xsql/lexical.go

@@ -674,6 +674,7 @@ var dataTypes = []string{
 	BIGINT:   "bigint",
 	FLOAT:    "float",
 	STRINGS:  "string",
+	BYTEA:    "bytea",
 	DATETIME: "datetime",
 	BOOLEAN:  "boolean",
 	ARRAY:    "array",

+ 3 - 2
xsql/parser.go

@@ -5,6 +5,7 @@ import (
 	"github.com/emqx/kuiper/common"
 	"github.com/golang-collections/collections/stack"
 	"io"
+	"math"
 	"strconv"
 	"strings"
 )
@@ -547,7 +548,7 @@ func (p *Parser) parseBracketExpr() (Expr, error) {
 	tok2, lit2 := p.scanIgnoreWhitespace()
 	if tok2 == RBRACKET {
 		//field[]
-		return &ColonExpr{Start: 0, End: -1}, nil
+		return &ColonExpr{Start: 0, End: math.MinInt32}, nil
 	} else if tok2 == INTEGER {
 		start, err := strconv.Atoi(lit2)
 		if err != nil {
@@ -581,7 +582,7 @@ func (p *Parser) parseColonExpr(start int) (Expr, error) {
 			return nil, fmt.Errorf("Found %q, expected right bracket.", lit1)
 		}
 	} else if tok == RBRACKET {
-		return &ColonExpr{Start: start, End: -1}, nil
+		return &ColonExpr{Start: start, End: math.MinInt32}, nil
 	}
 	return nil, fmt.Errorf("Found %q, expected right bracket.", lit)
 }

+ 7 - 6
xsql/parser_test.go

@@ -2,6 +2,7 @@ package xsql
 
 import (
 	"fmt"
+	"math"
 	"reflect"
 	"strings"
 	"testing"
@@ -1230,7 +1231,7 @@ func TestParser_ParseStatement(t *testing.T) {
 		{
 			s:    `SELECT sample(-.3,) FROM tbl`,
 			stmt: nil,
-			err:  "cannot get the plugin file path: invalid name sample: not exist",
+			err:  "cannot get the plugin file path: invalid symbol name sample: not exist",
 		},
 
 		{
@@ -1672,7 +1673,7 @@ func TestParser_ParseJsonExpr(t *testing.T) {
 						Expr: &BinaryExpr{
 							LHS: &FieldRef{Name: "children"},
 							OP:  SUBSET,
-							RHS: &ColonExpr{Start: 0, End: -1},
+							RHS: &ColonExpr{Start: 0, End: math.MinInt32},
 						},
 						Name:  "",
 						AName: ""},
@@ -1689,7 +1690,7 @@ func TestParser_ParseJsonExpr(t *testing.T) {
 						Expr: &BinaryExpr{
 							LHS: &FieldRef{Name: "children"},
 							OP:  SUBSET,
-							RHS: &ColonExpr{Start: 2, End: -1},
+							RHS: &ColonExpr{Start: 2, End: math.MinInt32},
 						},
 						Name:  "",
 						AName: "c"},
@@ -1704,7 +1705,7 @@ func TestParser_ParseJsonExpr(t *testing.T) {
 				Fields: []Field{
 					{
 						Expr: &BinaryExpr{
-							LHS: &BinaryExpr{LHS: &FieldRef{Name: "children"}, OP: SUBSET, RHS: &ColonExpr{Start: 2, End: -1}},
+							LHS: &BinaryExpr{LHS: &FieldRef{Name: "children"}, OP: SUBSET, RHS: &ColonExpr{Start: 2, End: math.MinInt32}},
 							OP:  ARROW,
 							RHS: &FieldRef{Name: "first"},
 						},
@@ -1734,7 +1735,7 @@ func TestParser_ParseJsonExpr(t *testing.T) {
 				Fields: []Field{
 					{
 						Expr: &BinaryExpr{
-							LHS: &BinaryExpr{LHS: &FieldRef{StreamName: StreamName("demo"), Name: "children"}, OP: SUBSET, RHS: &ColonExpr{Start: 2, End: -1}},
+							LHS: &BinaryExpr{LHS: &FieldRef{StreamName: StreamName("demo"), Name: "children"}, OP: SUBSET, RHS: &ColonExpr{Start: 2, End: math.MinInt32}},
 							OP:  ARROW,
 							RHS: &FieldRef{Name: "first"},
 						},
@@ -1754,7 +1755,7 @@ func TestParser_ParseJsonExpr(t *testing.T) {
 							Name: "lower",
 							Args: []Expr{
 								&BinaryExpr{
-									LHS: &BinaryExpr{LHS: &FieldRef{StreamName: StreamName("demo"), Name: "children"}, OP: SUBSET, RHS: &ColonExpr{Start: 2, End: -1}},
+									LHS: &BinaryExpr{LHS: &FieldRef{StreamName: StreamName("demo"), Name: "children"}, OP: SUBSET, RHS: &ColonExpr{Start: 2, End: math.MinInt32}},
 									OP:  ARROW,
 									RHS: &FieldRef{Name: "first"},
 								},

+ 2 - 0
xsql/processors/checkpoint_test.go

@@ -108,10 +108,12 @@ func TestCheckpoint(t *testing.T) {
 			BufferLength:       100,
 			Qos:                api.AtLeastOnce,
 			CheckpointInterval: 600,
+			SendError:          true,
 		}, {
 			BufferLength:       100,
 			Qos:                api.ExactlyOnce,
 			CheckpointInterval: 600,
+			SendError:          true,
 		},
 	}
 	for j, opt := range options {

+ 2 - 0
xsql/processors/extension_test.go

@@ -203,6 +203,7 @@ func TestFuncState(t *testing.T) {
 	handleStream(true, streamList, t)
 	doRuleTest(t, tests, 0, &api.RuleOption{
 		BufferLength: 100,
+		SendError:    true,
 	})
 }
 
@@ -292,5 +293,6 @@ func TestFuncStateCheckpoint(t *testing.T) {
 		BufferLength:       100,
 		Qos:                api.AtLeastOnce,
 		CheckpointInterval: 2000,
+		SendError:          true,
 	})
 }

+ 94 - 0
xsql/processors/rule_test.go

@@ -354,12 +354,15 @@ func TestSingleSQL(t *testing.T) {
 	options := []*api.RuleOption{
 		{
 			BufferLength: 100,
+			SendError:    true,
 		}, {
 			BufferLength:       100,
+			SendError:          true,
 			Qos:                api.AtLeastOnce,
 			CheckpointInterval: 5000,
 		}, {
 			BufferLength:       100,
+			SendError:          true,
 			Qos:                api.ExactlyOnce,
 			CheckpointInterval: 5000,
 		},
@@ -456,6 +459,92 @@ func TestSingleSQLError(t *testing.T) {
 	handleStream(true, streamList, t)
 	doRuleTest(t, tests, 0, &api.RuleOption{
 		BufferLength: 100,
+		SendError:    true,
+	})
+}
+
+func TestSingleSQLOmitError(t *testing.T) {
+	//Reset
+	streamList := []string{"ldemo"}
+	handleStream(false, streamList, t)
+	//Data setup
+	var tests = []ruleTest{
+		{
+			name: `TestSingleSQLErrorRule1`,
+			sql:  `SELECT color, ts FROM ldemo where size >= 3`,
+			r: [][]map[string]interface{}{
+				{{
+					"color": "red",
+					"ts":    float64(1541152486013),
+				}},
+				{{
+					"ts": float64(1541152487632),
+				}},
+			},
+			m: map[string]interface{}{
+				"op_1_preprocessor_ldemo_0_exceptions_total":   int64(0),
+				"op_1_preprocessor_ldemo_0_process_latency_us": int64(0),
+				"op_1_preprocessor_ldemo_0_records_in_total":   int64(5),
+				"op_1_preprocessor_ldemo_0_records_out_total":  int64(5),
+
+				"op_3_project_0_exceptions_total":   int64(0),
+				"op_3_project_0_process_latency_us": int64(0),
+				"op_3_project_0_records_in_total":   int64(2),
+				"op_3_project_0_records_out_total":  int64(2),
+
+				"sink_mockSink_0_exceptions_total":  int64(0),
+				"sink_mockSink_0_records_in_total":  int64(2),
+				"sink_mockSink_0_records_out_total": int64(2),
+
+				"source_ldemo_0_exceptions_total":  int64(0),
+				"source_ldemo_0_records_in_total":  int64(5),
+				"source_ldemo_0_records_out_total": int64(5),
+
+				"op_2_filter_0_exceptions_total":   int64(1),
+				"op_2_filter_0_process_latency_us": int64(0),
+				"op_2_filter_0_records_in_total":   int64(5),
+				"op_2_filter_0_records_out_total":  int64(2),
+			},
+		}, {
+			name: `TestSingleSQLErrorRule2`,
+			sql:  `SELECT size * 5 FROM ldemo`,
+			r: [][]map[string]interface{}{
+				{{
+					"rengine_field_0": float64(15),
+				}},
+				{{
+					"rengine_field_0": float64(15),
+				}},
+				{{
+					"rengine_field_0": float64(10),
+				}},
+				{{}},
+			},
+			m: map[string]interface{}{
+				"op_1_preprocessor_ldemo_0_exceptions_total":   int64(0),
+				"op_1_preprocessor_ldemo_0_process_latency_us": int64(0),
+				"op_1_preprocessor_ldemo_0_records_in_total":   int64(5),
+				"op_1_preprocessor_ldemo_0_records_out_total":  int64(5),
+
+				"op_2_project_0_exceptions_total":   int64(1),
+				"op_2_project_0_process_latency_us": int64(0),
+				"op_2_project_0_records_in_total":   int64(5),
+				"op_2_project_0_records_out_total":  int64(4),
+
+				"sink_mockSink_0_exceptions_total":  int64(0),
+				"sink_mockSink_0_records_in_total":  int64(4),
+				"sink_mockSink_0_records_out_total": int64(4),
+
+				"source_ldemo_0_exceptions_total":  int64(0),
+				"source_ldemo_0_records_in_total":  int64(5),
+				"source_ldemo_0_records_out_total": int64(5),
+			},
+		},
+	}
+	handleStream(true, streamList, t)
+	doRuleTest(t, tests, 0, &api.RuleOption{
+		BufferLength: 100,
+		SendError:    false,
 	})
 }
 
@@ -514,6 +603,7 @@ func TestSingleSQLTemplate(t *testing.T) {
 	handleStream(true, streamList, t)
 	doRuleTestBySinkProps(t, tests, 0, &api.RuleOption{
 		BufferLength: 100,
+		SendError:    true,
 	}, map[string]interface{}{
 		"dataTemplate": `{"wrapper":"w1", "c":"{{.color}}"}`,
 		"sendSingle":   true,
@@ -572,6 +662,7 @@ func TestNoneSingleSQLTemplate(t *testing.T) {
 	handleStream(true, streamList, t)
 	doRuleTestBySinkProps(t, tests, 0, &api.RuleOption{
 		BufferLength: 100,
+		SendError:    true,
 	}, map[string]interface{}{
 		"dataTemplate": `<div>results</div><ul>{{range .}}<li>{{.color}} - {{.size}}</li>{{end}}</ul>`,
 	}, func(result [][]byte) interface{} {
@@ -618,12 +709,15 @@ func TestSingleSQLForBinary(t *testing.T) {
 	options := []*api.RuleOption{
 		{
 			BufferLength: 100,
+			SendError:    true,
 		}, {
 			BufferLength:       100,
+			SendError:          true,
 			Qos:                api.AtLeastOnce,
 			CheckpointInterval: 5000,
 		}, {
 			BufferLength:       100,
+			SendError:          true,
 			Qos:                api.ExactlyOnce,
 			CheckpointInterval: 5000,
 		},

+ 2 - 0
xsql/processors/simple_processor_test.go

@@ -66,6 +66,7 @@ func TestRuleActionParse_Apply(t *testing.T) {
 					SendMetaToSink:     false,
 					Qos:                api.AtMostOnce,
 					CheckpointInterval: 300000,
+					SendError:          true,
 				},
 			},
 		}, {
@@ -121,6 +122,7 @@ func TestRuleActionParse_Apply(t *testing.T) {
 					SendMetaToSink:     false,
 					Qos:                api.ExactlyOnce,
 					CheckpointInterval: 60000,
+					SendError:          true,
 				},
 			},
 		},

+ 7 - 0
xsql/processors/window_rule_test.go

@@ -638,12 +638,15 @@ func TestWindow(t *testing.T) {
 	options := []*api.RuleOption{
 		{
 			BufferLength: 100,
+			SendError:    true,
 		}, {
 			BufferLength:       100,
+			SendError:          true,
 			Qos:                api.AtLeastOnce,
 			CheckpointInterval: 5000,
 		}, {
 			BufferLength:       100,
+			SendError:          true,
 			Qos:                api.ExactlyOnce,
 			CheckpointInterval: 5000,
 		},
@@ -1065,16 +1068,19 @@ func TestEventWindow(t *testing.T) {
 	options := []*api.RuleOption{
 		{
 			BufferLength: 100,
+			SendError:    true,
 			IsEventTime:  true,
 			LateTol:      1000,
 		}, {
 			BufferLength:       100,
+			SendError:          true,
 			Qos:                api.AtLeastOnce,
 			CheckpointInterval: 5000,
 			IsEventTime:        true,
 			LateTol:            1000,
 		}, {
 			BufferLength:       100,
+			SendError:          true,
 			Qos:                api.ExactlyOnce,
 			CheckpointInterval: 5000,
 			IsEventTime:        true,
@@ -1334,5 +1340,6 @@ func TestWindowError(t *testing.T) {
 	handleStream(true, streamList, t)
 	doRuleTest(t, tests, 0, &api.RuleOption{
 		BufferLength: 100,
+		SendError:    true,
 	})
 }

+ 0 - 0
xsql/processors/xsql_processor.go


Some files were not shown because too many files changed in this diff