瀏覽代碼

feat(sql): add . for object field (#1877)

* add . for object field

Signed-off-by: Rui-Gan <1171530954@qq.com>

* add sourceNames property

Signed-off-by: Rui-Gan <1171530954@qq.com>

* fix ut

Signed-off-by: Rui-Gan <1171530954@qq.com>

* fix valisateFields

Signed-off-by: Rui-Gan <1171530954@qq.com>

* fix(parser): Fix missing expression types to visit

Signed-off-by: Jiyong Huang <huangjy@emqx.io>

* feat(graph): support source node refer to created stream

Signed-off-by: Jiyong Huang <huangjy@emqx.io>

---------

Signed-off-by: Rui-Gan <1171530954@qq.com>
Signed-off-by: Jiyong Huang <huangjy@emqx.io>
Co-authored-by: Jiyong Huang <huangjy@emqx.io>
Regina 1 年之前
父節點
當前提交
46ae3e0ae7

+ 22 - 1
docs/en_US/sqls/json_expr.md

@@ -37,7 +37,7 @@
 
 
 ### Identifier 
 ### Identifier 
 
 
-Source Dereference (`.`) The source dereference operator can be used to specify columns by dereferencing the source stream or table. The `->` dereference selects a key in a nested JSON object.
+The source dereference operator `.` can be used to specify columns by dereferencing the source stream or table, or to select a key in a nested JSON object. The `->` dereference selects a key in a nested JSON object.
 
 
 ```
 ```
 SELECT demo.age FROM demo
 SELECT demo.age FROM demo
@@ -54,10 +54,31 @@ SELECT demo.name->first FROM demo
 
 
 
 
 ```
 ```
+SELECT demo.name.first FROM demo
+{"first" : "Tom"}
+```
+
+
+
+```
+SELECT name.first AS fname FROM demo
+{"fname": "Tom"}
+```
+
+
+
+```
 SELECT name->first AS fname FROM demo
 SELECT name->first AS fname FROM demo
 {"fname": "Tom"}
 {"fname": "Tom"}
 ```
 ```
 
 
+
+
+```
+SELECT ops->functionA.numArgs AS num FROM demo
+{"num": 2}
+```
+
 ### Index expression
 ### Index expression
 
 
 Index Expressions allow you to select a specific element in a list. It should look similar to array access in common programming languages.The index value starts with 0, -1 is the starting position from the end, and so on.
 Index Expressions allow you to select a specific element in a list. It should look similar to array access in common programming languages.The index value starts with 0, -1 is the starting position from the end, and so on.

+ 22 - 1
docs/zh_CN/sqls/json_expr.md

@@ -37,7 +37,7 @@
 
 
 源引用(`.`)
 源引用(`.`)
 
 
-源引用运算符可用于通过引用源流或表来指定列。 `->` 引用选择嵌套 JSON 对象中的键。
+源引用运算符可用于通过引用源流或表来指定列。 `->` 引用选择嵌套 JSON 对象中的键。 源引用`.`也可以选择嵌套 JSON 对象中的键。
 
 
 ```
 ```
 SELECT demo.age FROM demo
 SELECT demo.age FROM demo
@@ -54,10 +54,31 @@ SELECT demo.name->first FROM demo
 
 
 
 
 ```
 ```
+SELECT demo.name.first FROM demo
+{"first" : "Tom"}
+```
+
+
+
+```
+SELECT name.first AS fname FROM demo
+{"fname": "Tom"}
+```
+
+
+
+```
 SELECT name->first AS fname FROM demo
 SELECT name->first AS fname FROM demo
 {"fname": "Tom"}
 {"fname": "Tom"}
 ```
 ```
 
 
+
+
+```
+SELECT ops->functionA.numArgs AS num FROM demo
+{"num": 2}
+```
+
 ### 索引表达式
 ### 索引表达式
 
 
 索引表达式使您可以选择列表中的特定元素。 它看起来应该类似于普通编程语言中的数组访问。 索引值以0为开始值,-1 为从末尾的开始位置,以此类推。
 索引表达式使您可以选择列表中的特定元素。 它看起来应该类似于普通编程语言中的数组访问。 索引值以0为开始值,-1 为从末尾的开始位置,以此类推。

+ 1 - 1
internal/topo/planner/analyzer_test.go

@@ -126,7 +126,7 @@ var tests = []struct {
 	},
 	},
 	{ // 16
 	{ // 16
 		sql: `SELECT collect(*)[0] as last FROM src1 GROUP BY SlidingWindow(ss,5) HAVING last.temp > 30`,
 		sql: `SELECT collect(*)[0] as last FROM src1 GROUP BY SlidingWindow(ss,5) HAVING last.temp > 30`,
-		r:   newErrorStruct("stream last not found"),
+		r:   newErrorStruct(""),
 	},
 	},
 }
 }
 
 

+ 65 - 50
internal/topo/planner/planner_graph.go

@@ -37,6 +37,15 @@ type genNodeFunc func(name string, props map[string]interface{}, options *api.Ru
 
 
 var extNodes = map[string]genNodeFunc{}
 var extNodes = map[string]genNodeFunc{}
 
 
+type sourceType int
+
+const (
+	ILLEGAL sourceType = iota
+	STREAM
+	SCANTABLE
+	LOOKUPTABLE
+)
+
 // PlanByGraph returns a topo.Topo object by a graph
 // PlanByGraph returns a topo.Topo object by a graph
 func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 	ruleGraph := rule.Graph
 	ruleGraph := rule.Graph
@@ -54,6 +63,7 @@ func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 		store               kv.KeyValue
 		store               kv.KeyValue
 		lookupTableChildren = make(map[string]*ast.Options)
 		lookupTableChildren = make(map[string]*ast.Options)
 		scanTableEmitters   []string
 		scanTableEmitters   []string
+		sourceNames         []string
 		streamEmitters      = make(map[string]struct{})
 		streamEmitters      = make(map[string]struct{})
 	)
 	)
 	for _, srcName := range ruleGraph.Topo.Sources {
 	for _, srcName := range ruleGraph.Topo.Sources {
@@ -64,11 +74,20 @@ func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 		if _, ok := ruleGraph.Topo.Edges[srcName]; !ok {
 		if _, ok := ruleGraph.Topo.Edges[srcName]; !ok {
 			return nil, fmt.Errorf("no edge defined for source node %s", srcName)
 			return nil, fmt.Errorf("no edge defined for source node %s", srcName)
 		}
 		}
-		var srcNode *node.SourceNode
-		srcNode, scanTableEmitters, err = parseSource(srcName, gn, rule, store, lookupTableChildren, streamEmitters)
+		srcNode, srcType, name, err := parseSource(srcName, gn, rule, store, lookupTableChildren)
 		if err != nil {
 		if err != nil {
 			return nil, fmt.Errorf("parse source %s with %v error: %w", srcName, gn.Props, err)
 			return nil, fmt.Errorf("parse source %s with %v error: %w", srcName, gn.Props, err)
 		}
 		}
+		switch srcType {
+		case STREAM:
+			streamEmitters[name] = struct{}{}
+			sourceNames = append(sourceNames, name)
+		case SCANTABLE:
+			scanTableEmitters = append(scanTableEmitters, name)
+			sourceNames = append(sourceNames, name)
+		case LOOKUPTABLE:
+			sourceNames = append(sourceNames, name)
+		}
 		if srcNode != nil {
 		if srcNode != nil {
 			nodeMap[srcName] = srcNode
 			nodeMap[srcName] = srcNode
 			tp.AddSrc(srcNode)
 			tp.AddSrc(srcNode)
@@ -92,14 +111,14 @@ func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 			nt := strings.ToLower(gn.NodeType)
 			nt := strings.ToLower(gn.NodeType)
 			switch nt {
 			switch nt {
 			case "function":
 			case "function":
-				fop, err := parseFunc(gn.Props)
+				fop, err := parseFunc(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, fmt.Errorf("parse function %s with %v error: %w", nodeName, gn.Props, err)
 					return nil, fmt.Errorf("parse function %s with %v error: %w", nodeName, gn.Props, err)
 				}
 				}
 				op := Transform(fop, nodeName, rule.Options)
 				op := Transform(fop, nodeName, rule.Options)
 				nodeMap[nodeName] = op
 				nodeMap[nodeName] = op
 			case "aggfunc":
 			case "aggfunc":
-				fop, err := parseFunc(gn.Props)
+				fop, err := parseFunc(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, fmt.Errorf("parse aggfunc %s with %v error: %w", nodeName, gn.Props, err)
 					return nil, fmt.Errorf("parse aggfunc %s with %v error: %w", nodeName, gn.Props, err)
 				}
 				}
@@ -107,14 +126,14 @@ func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 				op := Transform(fop, nodeName, rule.Options)
 				op := Transform(fop, nodeName, rule.Options)
 				nodeMap[nodeName] = op
 				nodeMap[nodeName] = op
 			case "filter":
 			case "filter":
-				fop, err := parseFilter(gn.Props)
+				fop, err := parseFilter(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, fmt.Errorf("parse filter %s with %v error: %w", nodeName, gn.Props, err)
 					return nil, fmt.Errorf("parse filter %s with %v error: %w", nodeName, gn.Props, err)
 				}
 				}
 				op := Transform(fop, nodeName, rule.Options)
 				op := Transform(fop, nodeName, rule.Options)
 				nodeMap[nodeName] = op
 				nodeMap[nodeName] = op
 			case "pick":
 			case "pick":
-				pop, err := parsePick(gn.Props)
+				pop, err := parsePick(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, fmt.Errorf("parse pick %s with %v error: %w", nodeName, gn.Props, err)
 					return nil, fmt.Errorf("parse pick %s with %v error: %w", nodeName, gn.Props, err)
 				}
 				}
@@ -131,7 +150,7 @@ func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 				}
 				}
 				nodeMap[nodeName] = op
 				nodeMap[nodeName] = op
 			case "join":
 			case "join":
-				stmt, err := parseJoinAst(gn.Props)
+				stmt, err := parseJoinAst(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, fmt.Errorf("parse join %s with %v error: %w", nodeName, gn.Props, err)
 					return nil, fmt.Errorf("parse join %s with %v error: %w", nodeName, gn.Props, err)
 				}
 				}
@@ -178,21 +197,21 @@ func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 					}
 					}
 				}
 				}
 			case "groupby":
 			case "groupby":
-				gop, err := parseGroupBy(gn.Props)
+				gop, err := parseGroupBy(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, fmt.Errorf("parse groupby %s with %v error: %w", nodeName, gn.Props, err)
 					return nil, fmt.Errorf("parse groupby %s with %v error: %w", nodeName, gn.Props, err)
 				}
 				}
 				op := Transform(gop, nodeName, rule.Options)
 				op := Transform(gop, nodeName, rule.Options)
 				nodeMap[nodeName] = op
 				nodeMap[nodeName] = op
 			case "orderby":
 			case "orderby":
-				oop, err := parseOrderBy(gn.Props)
+				oop, err := parseOrderBy(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, fmt.Errorf("parse orderby %s with %v error: %w", nodeName, gn.Props, err)
 					return nil, fmt.Errorf("parse orderby %s with %v error: %w", nodeName, gn.Props, err)
 				}
 				}
 				op := Transform(oop, nodeName, rule.Options)
 				op := Transform(oop, nodeName, rule.Options)
 				nodeMap[nodeName] = op
 				nodeMap[nodeName] = op
 			case "switch":
 			case "switch":
-				sconf, err := parseSwitch(gn.Props)
+				sconf, err := parseSwitch(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, fmt.Errorf("parse switch %s with %v error: %w", nodeName, gn.Props, err)
 					return nil, fmt.Errorf("parse switch %s with %v error: %w", nodeName, gn.Props, err)
 				}
 				}
@@ -329,7 +348,7 @@ func PlanByGraph(rule *api.Rule) (*topo.Topo, error) {
 			dataFlow[n] = graph.MapOut(in, out)
 			dataFlow[n] = graph.MapOut(in, out)
 			// convert filter to having if the input is aggregated
 			// convert filter to having if the input is aggregated
 			if gn.NodeType == "filter" && in.Type == graph.IOINPUT_TYPE_COLLECTION && in.CollectionType == graph.IOCOLLECTION_TYPE_GROUPED {
 			if gn.NodeType == "filter" && in.Type == graph.IOINPUT_TYPE_COLLECTION && in.CollectionType == graph.IOCOLLECTION_TYPE_GROUPED {
-				fop, err := parseHaving(gn.Props)
+				fop, err := parseHaving(gn.Props, sourceNames)
 				if err != nil {
 				if err != nil {
 					return nil, err
 					return nil, err
 				}
 				}
@@ -398,49 +417,48 @@ func genNodesInOrder(toNodes []string, edges map[string][]interface{}, flatRever
 	return i
 	return i
 }
 }
 
 
-func parseSource(nodeName string, gn *api.GraphNode, rule *api.Rule, store kv.KeyValue, lookupTableChildren map[string]*ast.Options, streamEmitters map[string]struct{}) (*node.SourceNode, []string, error) {
-	scanTableEmitters := make([]string, 0)
+func parseSource(nodeName string, gn *api.GraphNode, rule *api.Rule, store kv.KeyValue, lookupTableChildren map[string]*ast.Options) (*node.SourceNode, sourceType, string, error) {
 	sourceMeta := &api.SourceMeta{
 	sourceMeta := &api.SourceMeta{
 		SourceType: "stream",
 		SourceType: "stream",
 	}
 	}
 	err := cast.MapToStruct(gn.Props, sourceMeta)
 	err := cast.MapToStruct(gn.Props, sourceMeta)
 	if err != nil {
 	if err != nil {
-		return nil, scanTableEmitters, err
+		return nil, ILLEGAL, "", err
 	}
 	}
 	if sourceMeta.SourceType != "stream" && sourceMeta.SourceType != "table" {
 	if sourceMeta.SourceType != "stream" && sourceMeta.SourceType != "table" {
-		return nil, scanTableEmitters, fmt.Errorf("source type %s not supported", sourceMeta.SourceType)
+		return nil, ILLEGAL, "", fmt.Errorf("source type %s not supported", sourceMeta.SourceType)
 	}
 	}
 	// If source name is specified, find the created stream/table from store
 	// If source name is specified, find the created stream/table from store
 	if sourceMeta.SourceName != "" {
 	if sourceMeta.SourceName != "" {
 		if store == nil {
 		if store == nil {
 			store, err = store2.GetKV("stream")
 			store, err = store2.GetKV("stream")
 			if err != nil {
 			if err != nil {
-				return nil, scanTableEmitters, err
+				return nil, ILLEGAL, "", err
 			}
 			}
 		}
 		}
 		streamStmt, e := xsql.GetDataSource(store, sourceMeta.SourceName)
 		streamStmt, e := xsql.GetDataSource(store, sourceMeta.SourceName)
 		if e != nil {
 		if e != nil {
-			return nil, scanTableEmitters, fmt.Errorf("fail to get stream %s, please check if stream is created", sourceMeta.SourceName)
+			return nil, ILLEGAL, "", fmt.Errorf("fail to get stream %s, please check if stream is created", sourceMeta.SourceName)
 		}
 		}
 		if streamStmt.StreamType == ast.TypeStream && sourceMeta.SourceType == "table" {
 		if streamStmt.StreamType == ast.TypeStream && sourceMeta.SourceType == "table" {
-			return nil, scanTableEmitters, fmt.Errorf("stream %s is not a table", sourceMeta.SourceName)
+			return nil, ILLEGAL, "", fmt.Errorf("stream %s is not a table", sourceMeta.SourceName)
 		} else if streamStmt.StreamType == ast.TypeTable && sourceMeta.SourceType == "stream" {
 		} else if streamStmt.StreamType == ast.TypeTable && sourceMeta.SourceType == "stream" {
-			return nil, scanTableEmitters, fmt.Errorf("table %s is not a stream", sourceMeta.SourceName)
+			return nil, ILLEGAL, "", fmt.Errorf("table %s is not a stream", sourceMeta.SourceName)
 		}
 		}
 		st := streamStmt.Options.TYPE
 		st := streamStmt.Options.TYPE
 		if st == "" {
 		if st == "" {
 			st = "mqtt"
 			st = "mqtt"
 		}
 		}
 		if st != gn.NodeType {
 		if st != gn.NodeType {
-			return nil, scanTableEmitters, fmt.Errorf("source type %s does not match the stream type %s", gn.NodeType, st)
+			return nil, ILLEGAL, "", fmt.Errorf("source type %s does not match the stream type %s", gn.NodeType, st)
 		}
 		}
 		sInfo, err := convertStreamInfo(streamStmt)
 		sInfo, err := convertStreamInfo(streamStmt)
 		if err != nil {
 		if err != nil {
-			return nil, scanTableEmitters, err
+			return nil, ILLEGAL, "", err
 		}
 		}
 		if sInfo.stmt.StreamType == ast.TypeTable && sInfo.stmt.Options.KIND == ast.StreamKindLookup {
 		if sInfo.stmt.StreamType == ast.TypeTable && sInfo.stmt.Options.KIND == ast.StreamKindLookup {
 			lookupTableChildren[string(sInfo.stmt.Name)] = sInfo.stmt.Options
 			lookupTableChildren[string(sInfo.stmt.Name)] = sInfo.stmt.Options
-			return nil, scanTableEmitters, nil
+			return nil, LOOKUPTABLE, string(sInfo.stmt.Name), nil
 		} else {
 		} else {
 			// Use the plan to calculate the schema and other meta info
 			// Use the plan to calculate the schema and other meta info
 			p := DataSourcePlan{
 			p := DataSourcePlan{
@@ -455,43 +473,40 @@ func parseSource(nodeName string, gn *api.GraphNode, rule *api.Rule, store kv.Ke
 			if sInfo.stmt.StreamType == ast.TypeStream {
 			if sInfo.stmt.StreamType == ast.TypeStream {
 				err = p.PruneColumns(nil)
 				err = p.PruneColumns(nil)
 				if err != nil {
 				if err != nil {
-					return nil, scanTableEmitters, err
+					return nil, ILLEGAL, "", err
 				}
 				}
 				srcNode, e := transformSourceNode(p, nil, rule.Options)
 				srcNode, e := transformSourceNode(p, nil, rule.Options)
 				if e != nil {
 				if e != nil {
-					return nil, scanTableEmitters, e
+					return nil, ILLEGAL, "", e
 				}
 				}
-				streamEmitters[string(sInfo.stmt.Name)] = struct{}{}
-				return srcNode, scanTableEmitters, nil
+				return srcNode, STREAM, string(sInfo.stmt.Name), nil
 			} else {
 			} else {
-				scanTableEmitters = append(scanTableEmitters, string(sInfo.stmt.Name))
-				return nil, scanTableEmitters, nil
+				return nil, SCANTABLE, string(sInfo.stmt.Name), nil
 			}
 			}
 		}
 		}
 	} else {
 	} else {
 		sourceOption := &ast.Options{}
 		sourceOption := &ast.Options{}
 		err = cast.MapToStruct(gn.Props, sourceOption)
 		err = cast.MapToStruct(gn.Props, sourceOption)
 		if err != nil {
 		if err != nil {
-			return nil, scanTableEmitters, err
+			return nil, ILLEGAL, "", err
 		}
 		}
 		sourceOption.TYPE = gn.NodeType
 		sourceOption.TYPE = gn.NodeType
 		switch sourceMeta.SourceType {
 		switch sourceMeta.SourceType {
 		case "stream":
 		case "stream":
 			pp, err := operator.NewPreprocessor(true, nil, true, nil, rule.Options.IsEventTime, sourceOption.TIMESTAMP, sourceOption.TIMESTAMP_FORMAT, strings.EqualFold(sourceOption.FORMAT, message.FormatBinary), sourceOption.STRICT_VALIDATION)
 			pp, err := operator.NewPreprocessor(true, nil, true, nil, rule.Options.IsEventTime, sourceOption.TIMESTAMP, sourceOption.TIMESTAMP_FORMAT, strings.EqualFold(sourceOption.FORMAT, message.FormatBinary), sourceOption.STRICT_VALIDATION)
 			if err != nil {
 			if err != nil {
-				return nil, scanTableEmitters, err
+				return nil, ILLEGAL, "", err
 			}
 			}
 			srcNode := node.NewSourceNode(nodeName, ast.TypeStream, pp, sourceOption, rule.Options.SendError)
 			srcNode := node.NewSourceNode(nodeName, ast.TypeStream, pp, sourceOption, rule.Options.SendError)
-			streamEmitters[nodeName] = struct{}{}
-			return srcNode, scanTableEmitters, nil
+			return srcNode, STREAM, nodeName, nil
 		case "table":
 		case "table":
-			return nil, scanTableEmitters, fmt.Errorf("anonymouse table source is not supported, please create it prior to the rule")
+			return nil, ILLEGAL, "", fmt.Errorf("anonymouse table source is not supported, please create it prior to the rule")
 		}
 		}
 	}
 	}
-	return nil, scanTableEmitters, errors.New("invalid source node")
+	return nil, ILLEGAL, "", errors.New("invalid source node")
 }
 }
 
 
-func parseOrderBy(props map[string]interface{}) (*operator.OrderOp, error) {
+func parseOrderBy(props map[string]interface{}, sourceNames []string) (*operator.OrderOp, error) {
 	n := &graph.Orderby{}
 	n := &graph.Orderby{}
 	err := cast.MapToStruct(props, n)
 	err := cast.MapToStruct(props, n)
 	if err != nil {
 	if err != nil {
@@ -504,7 +519,7 @@ func parseOrderBy(props map[string]interface{}) (*operator.OrderOp, error) {
 			stmt += "DESC"
 			stmt += "DESC"
 		}
 		}
 	}
 	}
-	p, err := xsql.NewParser(strings.NewReader(stmt)).Parse()
+	p, err := xsql.NewParserWithSources(strings.NewReader(stmt), sourceNames).Parse()
 	if err != nil {
 	if err != nil {
 		return nil, fmt.Errorf("invalid order by statement error: %v", err)
 		return nil, fmt.Errorf("invalid order by statement error: %v", err)
 	}
 	}
@@ -516,7 +531,7 @@ func parseOrderBy(props map[string]interface{}) (*operator.OrderOp, error) {
 	}, nil
 	}, nil
 }
 }
 
 
-func parseGroupBy(props map[string]interface{}) (*operator.AggregateOp, error) {
+func parseGroupBy(props map[string]interface{}, sourceNames []string) (*operator.AggregateOp, error) {
 	n := &graph.Groupby{}
 	n := &graph.Groupby{}
 	err := cast.MapToStruct(props, n)
 	err := cast.MapToStruct(props, n)
 	if err != nil {
 	if err != nil {
@@ -526,14 +541,14 @@ func parseGroupBy(props map[string]interface{}) (*operator.AggregateOp, error) {
 		return nil, fmt.Errorf("groupby must have at least one dimension")
 		return nil, fmt.Errorf("groupby must have at least one dimension")
 	}
 	}
 	stmt := "SELECT * FROM unknown Group By " + strings.Join(n.Dimensions, ",")
 	stmt := "SELECT * FROM unknown Group By " + strings.Join(n.Dimensions, ",")
-	p, err := xsql.NewParser(strings.NewReader(stmt)).Parse()
+	p, err := xsql.NewParserWithSources(strings.NewReader(stmt), sourceNames).Parse()
 	if err != nil {
 	if err != nil {
 		return nil, fmt.Errorf("invalid join statement error: %v", err)
 		return nil, fmt.Errorf("invalid join statement error: %v", err)
 	}
 	}
 	return &operator.AggregateOp{Dimensions: p.Dimensions}, nil
 	return &operator.AggregateOp{Dimensions: p.Dimensions}, nil
 }
 }
 
 
-func parseJoinAst(props map[string]interface{}) (*ast.SelectStatement, error) {
+func parseJoinAst(props map[string]interface{}, sourceNames []string) (*ast.SelectStatement, error) {
 	n := &graph.Join{}
 	n := &graph.Join{}
 	err := cast.MapToStruct(props, n)
 	err := cast.MapToStruct(props, n)
 	if err != nil {
 	if err != nil {
@@ -543,7 +558,7 @@ func parseJoinAst(props map[string]interface{}) (*ast.SelectStatement, error) {
 	for _, join := range n.Joins {
 	for _, join := range n.Joins {
 		stmt += " " + join.Type + " JOIN " + join.Name + " ON " + join.On
 		stmt += " " + join.Type + " JOIN " + join.Name + " ON " + join.On
 	}
 	}
-	return xsql.NewParser(strings.NewReader(stmt)).Parse()
+	return xsql.NewParserWithSources(strings.NewReader(stmt), sourceNames).Parse()
 }
 }
 
 
 func parseWindow(props map[string]interface{}) (*node.WindowConfig, error) {
 func parseWindow(props map[string]interface{}) (*node.WindowConfig, error) {
@@ -628,13 +643,13 @@ func parseWindow(props map[string]interface{}) (*node.WindowConfig, error) {
 	}, nil
 	}, nil
 }
 }
 
 
-func parsePick(props map[string]interface{}) (*operator.ProjectOp, error) {
+func parsePick(props map[string]interface{}, sourceNames []string) (*operator.ProjectOp, error) {
 	n := &graph.Select{}
 	n := &graph.Select{}
 	err := cast.MapToStruct(props, n)
 	err := cast.MapToStruct(props, n)
 	if err != nil {
 	if err != nil {
 		return nil, err
 		return nil, err
 	}
 	}
-	stmt, err := xsql.NewParser(strings.NewReader("select " + strings.Join(n.Fields, ",") + " from nonexist")).Parse()
+	stmt, err := xsql.NewParserWithSources(strings.NewReader("select "+strings.Join(n.Fields, ",")+" from nonexist"), sourceNames).Parse()
 	if err != nil {
 	if err != nil {
 		return nil, err
 		return nil, err
 	}
 	}
@@ -645,7 +660,7 @@ func parsePick(props map[string]interface{}) (*operator.ProjectOp, error) {
 	return &operator.ProjectOp{ColNames: t.colNames, AliasNames: t.aliasNames, AliasFields: t.aliasFields, ExprFields: t.exprFields, IsAggregate: t.isAggregate, AllWildcard: t.allWildcard, WildcardEmitters: t.wildcardEmitters, ExprNames: t.exprNames, SendMeta: t.sendMeta}, nil
 	return &operator.ProjectOp{ColNames: t.colNames, AliasNames: t.aliasNames, AliasFields: t.aliasFields, ExprFields: t.exprFields, IsAggregate: t.isAggregate, AllWildcard: t.allWildcard, WildcardEmitters: t.wildcardEmitters, ExprNames: t.exprNames, SendMeta: t.sendMeta}, nil
 }
 }
 
 
-func parseFunc(props map[string]interface{}) (*operator.FuncOp, error) {
+func parseFunc(props map[string]interface{}, sourceNames []string) (*operator.FuncOp, error) {
 	m, ok := props["expr"]
 	m, ok := props["expr"]
 	if !ok {
 	if !ok {
 		return nil, errors.New("no expr")
 		return nil, errors.New("no expr")
@@ -654,7 +669,7 @@ func parseFunc(props map[string]interface{}) (*operator.FuncOp, error) {
 	if !ok {
 	if !ok {
 		return nil, fmt.Errorf("expr %v is not string", m)
 		return nil, fmt.Errorf("expr %v is not string", m)
 	}
 	}
-	stmt, err := xsql.NewParser(strings.NewReader("select " + funcExpr + " from nonexist")).Parse()
+	stmt, err := xsql.NewParserWithSources(strings.NewReader("select "+funcExpr+" from nonexist"), sourceNames).Parse()
 	if err != nil {
 	if err != nil {
 		return nil, err
 		return nil, err
 	}
 	}
@@ -673,7 +688,7 @@ func parseFunc(props map[string]interface{}) (*operator.FuncOp, error) {
 	return &operator.FuncOp{CallExpr: c, Name: name, IsAgg: function.IsAggFunc(name)}, nil
 	return &operator.FuncOp{CallExpr: c, Name: name, IsAgg: function.IsAggFunc(name)}, nil
 }
 }
 
 
-func parseFilter(props map[string]interface{}) (*operator.FilterOp, error) {
+func parseFilter(props map[string]interface{}, sourceNames []string) (*operator.FilterOp, error) {
 	m, ok := props["expr"]
 	m, ok := props["expr"]
 	if !ok {
 	if !ok {
 		return nil, errors.New("no expr")
 		return nil, errors.New("no expr")
@@ -682,7 +697,7 @@ func parseFilter(props map[string]interface{}) (*operator.FilterOp, error) {
 	if !ok {
 	if !ok {
 		return nil, fmt.Errorf("expr %v is not string", m)
 		return nil, fmt.Errorf("expr %v is not string", m)
 	}
 	}
-	p := xsql.NewParser(strings.NewReader("where " + conditionExpr))
+	p := xsql.NewParserWithSources(strings.NewReader("where "+conditionExpr), sourceNames)
 	if exp, err := p.ParseCondition(); err != nil {
 	if exp, err := p.ParseCondition(); err != nil {
 		return nil, err
 		return nil, err
 	} else {
 	} else {
@@ -693,7 +708,7 @@ func parseFilter(props map[string]interface{}) (*operator.FilterOp, error) {
 	return nil, fmt.Errorf("expr %v is not a condition", m)
 	return nil, fmt.Errorf("expr %v is not a condition", m)
 }
 }
 
 
-func parseHaving(props map[string]interface{}) (*operator.HavingOp, error) {
+func parseHaving(props map[string]interface{}, sourceNames []string) (*operator.HavingOp, error) {
 	m, ok := props["expr"]
 	m, ok := props["expr"]
 	if !ok {
 	if !ok {
 		return nil, errors.New("no expr")
 		return nil, errors.New("no expr")
@@ -702,7 +717,7 @@ func parseHaving(props map[string]interface{}) (*operator.HavingOp, error) {
 	if !ok {
 	if !ok {
 		return nil, fmt.Errorf("expr %v is not string", m)
 		return nil, fmt.Errorf("expr %v is not string", m)
 	}
 	}
-	p := xsql.NewParser(strings.NewReader("where " + conditionExpr))
+	p := xsql.NewParserWithSources(strings.NewReader("where "+conditionExpr), sourceNames)
 	if exp, err := p.ParseCondition(); err != nil {
 	if exp, err := p.ParseCondition(); err != nil {
 		return nil, err
 		return nil, err
 	} else {
 	} else {
@@ -713,7 +728,7 @@ func parseHaving(props map[string]interface{}) (*operator.HavingOp, error) {
 	return nil, fmt.Errorf("expr %v is not a condition", m)
 	return nil, fmt.Errorf("expr %v is not a condition", m)
 }
 }
 
 
-func parseSwitch(props map[string]interface{}) (*node.SwitchConfig, error) {
+func parseSwitch(props map[string]interface{}, sourceNames []string) (*node.SwitchConfig, error) {
 	n := &graph.Switch{}
 	n := &graph.Switch{}
 	err := cast.MapToStruct(props, n)
 	err := cast.MapToStruct(props, n)
 	if err != nil {
 	if err != nil {
@@ -724,7 +739,7 @@ func parseSwitch(props map[string]interface{}) (*node.SwitchConfig, error) {
 	}
 	}
 	caseExprs := make([]ast.Expr, len(n.Cases))
 	caseExprs := make([]ast.Expr, len(n.Cases))
 	for i, c := range n.Cases {
 	for i, c := range n.Cases {
-		p := xsql.NewParser(strings.NewReader("where " + c))
+		p := xsql.NewParserWithSources(strings.NewReader("where "+c), sourceNames)
 		if exp, err := p.ParseCondition(); err != nil {
 		if exp, err := p.ParseCondition(); err != nil {
 			return nil, fmt.Errorf("parse case %d error: %v", i, err)
 			return nil, fmt.Errorf("parse case %d error: %v", i, err)
 		} else {
 		} else {

+ 43 - 0
internal/topo/planner/planner_graph_test.go

@@ -540,6 +540,49 @@ func TestPlannerGraphValidate(t *testing.T) {
 }`,
 }`,
 			err: "parse aggfunc aggfunc with map[expr:avg(,temperature) as avg_temperature] error: found \",\", expected expression.",
 			err: "parse aggfunc aggfunc with map[expr:avg(,temperature) as avg_temperature] error: found \",\", expected expression.",
 		},
 		},
+		{
+			graph: `{
+  "nodes": {
+    "abc": {
+      "type": "source",
+      "nodeType": "mqtt",
+      "props": {
+        "datasource": "demo"
+      }
+    },
+    "myfilter": {
+      "type": "operator",
+      "nodeType": "filter",
+      "props": {
+        "expr": "data.nested.temperature > 20"
+      }
+    },   
+    "mqttpv": {
+      "type": "sink",
+      "nodeType": "mqtt",
+      "props": {
+        "server": "tcp://syno.home:1883",
+        "topic": "result",
+        "sendSingle": true
+      }
+    }
+  },
+  "topo": {
+    "sources": [
+      "abc"
+    ],
+    "edges": {
+      "abc": [
+        "myfilter"
+      ],
+      "myfilter": [
+        "mqttpv"
+      ]
+    }
+  }
+}`,
+			err: "",
+		},
 	}
 	}
 
 
 	t.Logf("The test bucket size is %d.\n\n", len(tests))
 	t.Logf("The test bucket size is %d.\n\n", len(tests))

+ 46 - 14
internal/xsql/parser.go

@@ -38,10 +38,11 @@ type Parser struct {
 		tok ast.Token
 		tok ast.Token
 		lit string
 		lit string
 	}
 	}
-	inFunc string // currently parsing function name
-	f      int    // anonymous field index number
-	fn     int    // function index number
-	clause string
+	inFunc      string // currently parsing function name
+	f           int    // anonymous field index number
+	fn          int    // function index number
+	clause      string
+	sourceNames []string // source names in the from/join clause
 }
 }
 
 
 func (p *Parser) ParseCondition() (ast.Expr, error) {
 func (p *Parser) ParseCondition() (ast.Expr, error) {
@@ -98,6 +99,10 @@ func NewParser(r io.Reader) *Parser {
 	return &Parser{s: NewScanner(r)}
 	return &Parser{s: NewScanner(r)}
 }
 }
 
 
+func NewParserWithSources(r io.Reader, sources []string) *Parser {
+	return &Parser{s: NewScanner(r), sourceNames: sources}
+}
+
 func (p *Parser) ParseQueries() ([]ast.SelectStatement, error) {
 func (p *Parser) ParseQueries() ([]ast.SelectStatement, error) {
 	var stmts []ast.SelectStatement
 	var stmts []ast.SelectStatement
 
 
@@ -150,6 +155,10 @@ func (p *Parser) Parse() (*ast.SelectStatement, error) {
 	} else {
 	} else {
 		selects.Joins = joins
 		selects.Joins = joins
 	}
 	}
+	// The source names may be injected from outside to parse part of the sql
+	if p.sourceNames == nil {
+		p.sourceNames = getStreamNames(selects)
+	}
 	p.clause = "where"
 	p.clause = "where"
 	if exp, err := p.ParseCondition(); err != nil {
 	if exp, err := p.ParseCondition(); err != nil {
 		return nil, err
 		return nil, err
@@ -178,6 +187,7 @@ func (p *Parser) Parse() (*ast.SelectStatement, error) {
 	}
 	}
 	p.clause = ""
 	p.clause = ""
 	if tok, lit := p.scanIgnoreWhitespace(); tok == ast.SEMICOLON {
 	if tok, lit := p.scanIgnoreWhitespace(); tok == ast.SEMICOLON {
+		validateFields(selects, p.sourceNames)
 		p.unscan()
 		p.unscan()
 		return selects, nil
 		return selects, nil
 	} else if tok != ast.EOF {
 	} else if tok != ast.EOF {
@@ -187,7 +197,7 @@ func (p *Parser) Parse() (*ast.SelectStatement, error) {
 	if err := Validate(selects); err != nil {
 	if err := Validate(selects); err != nil {
 		return nil, err
 		return nil, err
 	}
 	}
-
+	validateFields(selects, p.sourceNames)
 	return selects, nil
 	return selects, nil
 }
 }
 
 
@@ -234,12 +244,15 @@ func (p *Parser) parseSourceLiteral() (string, string, error) {
 	return strings.Join(sourceSeg, ""), alias, nil
 	return strings.Join(sourceSeg, ""), alias, nil
 }
 }
 
 
-func (p *Parser) parseFieldNameSections() ([]string, error) {
+func (p *Parser) parseFieldNameSections(isSubField bool) ([]string, error) {
 	var fieldNameSects []string
 	var fieldNameSects []string
 	for {
 	for {
 		if tok, lit := p.scanIgnoreWhitespace(); tok == ast.IDENT || tok == ast.ASTERISK {
 		if tok, lit := p.scanIgnoreWhitespace(); tok == ast.IDENT || tok == ast.ASTERISK {
 			fieldNameSects = append(fieldNameSects, lit)
 			fieldNameSects = append(fieldNameSects, lit)
-			if tok1, _ := p.scanIgnoreWhitespace(); !tok1.AllowedSFNToken() {
+			if len(fieldNameSects) > 1 {
+				break
+			}
+			if tok1, _ := p.scanIgnoreWhitespace(); isSubField || !tok1.AllowedSFNToken() {
 				p.unscan()
 				p.unscan()
 				break
 				break
 			}
 			}
@@ -250,9 +263,8 @@ func (p *Parser) parseFieldNameSections() ([]string, error) {
 	}
 	}
 	if len(fieldNameSects) == 0 {
 	if len(fieldNameSects) == 0 {
 		return nil, fmt.Errorf("Cannot find any field name.\n")
 		return nil, fmt.Errorf("Cannot find any field name.\n")
-	} else if len(fieldNameSects) > 2 {
-		return nil, fmt.Errorf("Too many field names. Please use -> to reference keys in struct.\n")
 	}
 	}
+
 	return fieldNameSects, nil
 	return fieldNameSects, nil
 }
 }
 
 
@@ -363,10 +375,12 @@ func (p *Parser) parseSorts() (ast.SortFields, error) {
 					s := ast.SortField{Ascending: true}
 					s := ast.SortField{Ascending: true}
 
 
 					p.unscan()
 					p.unscan()
-					if name, err := p.parseFieldNameSections(); err == nil {
+					if name, err := p.parseFieldNameSections(false); err == nil {
 						if len(name) == 2 {
 						if len(name) == 2 {
 							s.StreamName = ast.StreamName(name[0])
 							s.StreamName = ast.StreamName(name[0])
 							s.Name = name[1]
 							s.Name = name[1]
+							p.unscan()
+							p.unscan()
 						} else {
 						} else {
 							s.Name = name[0]
 							s.Name = name[0]
 						}
 						}
@@ -555,8 +569,10 @@ func (p *Parser) ParseExpr() (ast.Expr, error) {
 		}
 		}
 
 
 		var rhs ast.Expr
 		var rhs ast.Expr
-		if rhs, err = p.parseUnaryExpr(op == ast.ARROW); err != nil {
+		if rhs, err = p.parseUnaryExpr(op == ast.ARROW || op == ast.DOT); err != nil {
 			return nil, err
 			return nil, err
+		} else if op == ast.DOT {
+			op = ast.ARROW
 		}
 		}
 		if op == ast.LIKE || op == ast.NOTLIKE {
 		if op == ast.LIKE || op == ast.NOTLIKE {
 			lp := &ast.LikePattern{
 			lp := &ast.LikePattern{
@@ -633,12 +649,20 @@ func (p *Parser) parseUnaryExpr(isSubField bool) (ast.Expr, error) {
 		}
 		}
 		p.unscan() // Back the Lparen token
 		p.unscan() // Back the Lparen token
 		p.unscan() // Back the ident token
 		p.unscan() // Back the ident token
-		if n, err := p.parseFieldNameSections(); err != nil {
+		if n, err := p.parseFieldNameSections(isSubField); err != nil {
 			return nil, err
 			return nil, err
 		} else {
 		} else {
 			if p.inmeta() {
 			if p.inmeta() {
 				if len(n) == 2 {
 				if len(n) == 2 {
-					return &ast.MetaRef{StreamName: ast.StreamName(n[0]), Name: n[1]}, nil
+					if len(p.sourceNames) > 0 && !contains(p.sourceNames, n[0]) {
+						return &ast.BinaryExpr{
+							LHS: &ast.MetaRef{StreamName: ast.DefaultStream, Name: n[0]},
+							OP:  ast.ARROW,
+							RHS: &ast.JsonFieldRef{Name: n[1]},
+						}, nil
+					} else {
+						return &ast.MetaRef{StreamName: ast.StreamName(n[0]), Name: n[1]}, nil
+					}
 				}
 				}
 				if isSubField {
 				if isSubField {
 					return &ast.JsonFieldRef{Name: n[0]}, nil
 					return &ast.JsonFieldRef{Name: n[0]}, nil
@@ -646,7 +670,15 @@ func (p *Parser) parseUnaryExpr(isSubField bool) (ast.Expr, error) {
 				return &ast.MetaRef{StreamName: ast.DefaultStream, Name: n[0]}, nil
 				return &ast.MetaRef{StreamName: ast.DefaultStream, Name: n[0]}, nil
 			} else {
 			} else {
 				if len(n) == 2 {
 				if len(n) == 2 {
-					return &ast.FieldRef{StreamName: ast.StreamName(n[0]), Name: n[1]}, nil
+					if len(p.sourceNames) > 0 && !contains(p.sourceNames, n[0]) {
+						return &ast.BinaryExpr{
+							LHS: &ast.FieldRef{StreamName: ast.DefaultStream, Name: n[0]},
+							OP:  ast.ARROW,
+							RHS: &ast.JsonFieldRef{Name: n[1]},
+						}, nil
+					} else {
+						return &ast.FieldRef{StreamName: ast.StreamName(n[0]), Name: n[1]}, nil
+					}
 				}
 				}
 				if isSubField {
 				if isSubField {
 					return &ast.JsonFieldRef{Name: n[0]}, nil
 					return &ast.JsonFieldRef{Name: n[0]}, nil

+ 180 - 5
internal/xsql/parser_test.go

@@ -210,7 +210,16 @@ func TestParser_ParseStatement(t *testing.T) {
 			stmt: &ast.SelectStatement{
 			stmt: &ast.SelectStatement{
 				Fields: []ast.Field{
 				Fields: []ast.Field{
 					{
 					{
-						Expr:  &ast.FieldRef{StreamName: ast.StreamName("t1"), Name: "name"},
+						Expr: &ast.BinaryExpr{
+							LHS: &ast.FieldRef{
+								Name:       "t1",
+								StreamName: ast.DefaultStream,
+							},
+							OP: ast.ARROW,
+							RHS: &ast.JsonFieldRef{
+								Name: "name",
+							},
+						},
 						Name:  "name",
 						Name:  "name",
 						AName: "",
 						AName: "",
 					},
 					},
@@ -1411,7 +1420,7 @@ func TestParser_ParseStatement(t *testing.T) {
 						},
 						},
 					},
 					},
 				},
 				},
-				SortFields: []ast.SortField{{Uname: "s1\007name", Name: "name", StreamName: ast.StreamName("s1"), Ascending: true, FieldExpr: &ast.FieldRef{Name: "name", StreamName: ast.DefaultStream}}},
+				SortFields: []ast.SortField{{Uname: "s1\007name", Name: "name", StreamName: ast.StreamName("s1"), Ascending: true, FieldExpr: &ast.FieldRef{Name: "name", StreamName: "s1"}}},
 			},
 			},
 		},
 		},
 
 
@@ -2618,6 +2627,98 @@ func TestParser_ParseJsonExpr(t *testing.T) {
 		},
 		},
 
 
 		{
 		{
+			s: `SELECT demo.children->first->test FROM demo`,
+			stmt: &ast.SelectStatement{
+				Fields: []ast.Field{
+					{
+						Expr: &ast.BinaryExpr{
+							LHS: &ast.BinaryExpr{
+								LHS: &ast.FieldRef{Name: "children", StreamName: "demo"},
+								OP:  ast.ARROW,
+								RHS: &ast.JsonFieldRef{Name: "first"},
+							},
+							OP:  ast.ARROW,
+							RHS: &ast.JsonFieldRef{Name: "test"},
+						},
+
+						Name:  "kuiper_field_0",
+						AName: "",
+					},
+				},
+				Sources: []ast.Source{&ast.Table{Name: "demo"}},
+			},
+		},
+
+		{
+			s: `SELECT demo.children.first.test FROM demo`,
+			stmt: &ast.SelectStatement{
+				Fields: []ast.Field{
+					{
+						Expr: &ast.BinaryExpr{
+							LHS: &ast.BinaryExpr{
+								LHS: &ast.FieldRef{Name: "children", StreamName: "demo"},
+								OP:  ast.ARROW,
+								RHS: &ast.JsonFieldRef{Name: "first"},
+							},
+							OP:  ast.ARROW,
+							RHS: &ast.JsonFieldRef{Name: "test"},
+						},
+
+						Name:  "kuiper_field_0",
+						AName: "",
+					},
+				},
+				Sources: []ast.Source{&ast.Table{Name: "demo"}},
+			},
+		},
+
+		{
+			s: `SELECT demo.children.first->test FROM demo`,
+			stmt: &ast.SelectStatement{
+				Fields: []ast.Field{
+					{
+						Expr: &ast.BinaryExpr{
+							LHS: &ast.BinaryExpr{
+								LHS: &ast.FieldRef{Name: "children", StreamName: "demo"},
+								OP:  ast.ARROW,
+								RHS: &ast.JsonFieldRef{Name: "first"},
+							},
+							OP:  ast.ARROW,
+							RHS: &ast.JsonFieldRef{Name: "test"},
+						},
+
+						Name:  "kuiper_field_0",
+						AName: "",
+					},
+				},
+				Sources: []ast.Source{&ast.Table{Name: "demo"}},
+			},
+		},
+
+		{
+			s: `SELECT demo.children->first.test FROM demo`,
+			stmt: &ast.SelectStatement{
+				Fields: []ast.Field{
+					{
+						Expr: &ast.BinaryExpr{
+							LHS: &ast.BinaryExpr{
+								LHS: &ast.FieldRef{Name: "children", StreamName: "demo"},
+								OP:  ast.ARROW,
+								RHS: &ast.JsonFieldRef{Name: "first"},
+							},
+							OP:  ast.ARROW,
+							RHS: &ast.JsonFieldRef{Name: "test"},
+						},
+
+						Name:  "kuiper_field_0",
+						AName: "",
+					},
+				},
+				Sources: []ast.Source{&ast.Table{Name: "demo"}},
+			},
+		},
+
+		{
 			s: `SELECT children[0:1] FROM demo`,
 			s: `SELECT children[0:1] FROM demo`,
 			stmt: &ast.SelectStatement{
 			stmt: &ast.SelectStatement{
 				Fields: []ast.Field{
 				Fields: []ast.Field{
@@ -2790,6 +2891,68 @@ func TestParser_ParseJsonExpr(t *testing.T) {
 		},
 		},
 
 
 		{
 		{
+			s: `SELECT children[:1] FROM demo WHERE abc[0] IN demo.children[2:].first`,
+			stmt: &ast.SelectStatement{
+				Fields: []ast.Field{
+					{
+						Expr: &ast.BinaryExpr{
+							LHS: &ast.FieldRef{Name: "children", StreamName: ast.DefaultStream},
+							OP:  ast.SUBSET,
+							RHS: &ast.ColonExpr{Start: &ast.IntegerLiteral{Val: 0}, End: &ast.IntegerLiteral{Val: 1}},
+						},
+						Name:  "kuiper_field_0",
+						AName: "",
+					},
+				},
+				Sources: []ast.Source{&ast.Table{Name: "demo"}},
+				Condition: &ast.BinaryExpr{
+					LHS: &ast.BinaryExpr{
+						LHS: &ast.FieldRef{Name: "abc", StreamName: ast.DefaultStream},
+						OP:  ast.SUBSET,
+						RHS: &ast.IndexExpr{Index: &ast.IntegerLiteral{Val: 0}},
+					},
+					OP: ast.IN,
+					RHS: &ast.BinaryExpr{
+						LHS: &ast.BinaryExpr{LHS: &ast.FieldRef{StreamName: ast.StreamName("demo"), Name: "children"}, OP: ast.SUBSET, RHS: &ast.ColonExpr{Start: &ast.IntegerLiteral{Val: 2}, End: &ast.IntegerLiteral{Val: math.MinInt32}}},
+						OP:  ast.ARROW,
+						RHS: &ast.JsonFieldRef{Name: "first"},
+					},
+				},
+			},
+		},
+
+		{
+			s: `SELECT children[:1] FROM demo WHERE abc[0] IN children[2:].first`,
+			stmt: &ast.SelectStatement{
+				Fields: []ast.Field{
+					{
+						Expr: &ast.BinaryExpr{
+							LHS: &ast.FieldRef{Name: "children", StreamName: ast.DefaultStream},
+							OP:  ast.SUBSET,
+							RHS: &ast.ColonExpr{Start: &ast.IntegerLiteral{Val: 0}, End: &ast.IntegerLiteral{Val: 1}},
+						},
+						Name:  "kuiper_field_0",
+						AName: "",
+					},
+				},
+				Sources: []ast.Source{&ast.Table{Name: "demo"}},
+				Condition: &ast.BinaryExpr{
+					LHS: &ast.BinaryExpr{
+						LHS: &ast.FieldRef{Name: "abc", StreamName: ast.DefaultStream},
+						OP:  ast.SUBSET,
+						RHS: &ast.IndexExpr{Index: &ast.IntegerLiteral{Val: 0}},
+					},
+					OP: ast.IN,
+					RHS: &ast.BinaryExpr{
+						LHS: &ast.BinaryExpr{LHS: &ast.FieldRef{StreamName: ast.DefaultStream, Name: "children"}, OP: ast.SUBSET, RHS: &ast.ColonExpr{Start: &ast.IntegerLiteral{Val: 2}, End: &ast.IntegerLiteral{Val: math.MinInt32}}},
+						OP:  ast.ARROW,
+						RHS: &ast.JsonFieldRef{Name: "first"},
+					},
+				},
+			},
+		},
+
+		{
 			s: `SELECT children[:1] FROM demo WHERE abc[0] IN demo.children[2:]->first`,
 			s: `SELECT children[:1] FROM demo WHERE abc[0] IN demo.children[2:]->first`,
 			stmt: &ast.SelectStatement{
 			stmt: &ast.SelectStatement{
 				Fields: []ast.Field{
 				Fields: []ast.Field{
@@ -2821,9 +2984,21 @@ func TestParser_ParseJsonExpr(t *testing.T) {
 		},
 		},
 
 
 		{
 		{
-			s:    `SELECT demo.children.first AS c FROM demo`,
-			stmt: nil,
-			err:  "Too many field names. Please use -> to reference keys in struct.\n",
+			s: `SELECT demo.children.first AS c FROM demo`,
+			stmt: &ast.SelectStatement{
+				Fields: []ast.Field{
+					{
+						Expr: &ast.BinaryExpr{
+							LHS: &ast.FieldRef{Name: "children", StreamName: "demo"},
+							OP:  ast.ARROW,
+							RHS: &ast.JsonFieldRef{Name: "first"},
+						},
+						Name:  "",
+						AName: "c",
+					},
+				},
+				Sources: []ast.Source{&ast.Table{Name: "demo"}},
+			},
 		},
 		},
 		{
 		{
 			s: `SELECT children[index] FROM demo`,
 			s: `SELECT children[index] FROM demo`,

+ 126 - 1
internal/xsql/sqlValidator.go

@@ -20,7 +20,7 @@ import (
 	"github.com/lf-edge/ekuiper/pkg/ast"
 	"github.com/lf-edge/ekuiper/pkg/ast"
 )
 )
 
 
-// Validate validate select statement without context.
+// Validate select statement without context.
 // This is the pre-validation. In planner, there will be a more comprehensive validation after binding
 // This is the pre-validation. In planner, there will be a more comprehensive validation after binding
 func Validate(stmt *ast.SelectStatement) error {
 func Validate(stmt *ast.SelectStatement) error {
 	if HasAggFuncs(stmt.Condition) {
 	if HasAggFuncs(stmt.Condition) {
@@ -115,3 +115,128 @@ func isSRFExists(node ast.Node) bool {
 	})
 	})
 	return exists
 	return exists
 }
 }
+
+func validateFields(stmt *ast.SelectStatement, streamNames []string) {
+	for i, field := range stmt.Fields {
+		stmt.Fields[i].Expr = validateExpr(field.Expr, streamNames)
+	}
+	for i, join := range stmt.Joins {
+		stmt.Joins[i].Expr = validateExpr(join.Expr, streamNames)
+	}
+}
+
+// validateExpr checks if the streamName of a fieldRef is existed and covert it to json filed if not exist.
+// The expr is the expression to be validated, and streamName is the stream name of the current select statement.
+// The expr only contains the expression which is possible to be used in fields and join conditions
+func validateExpr(expr ast.Expr, streamName []string) ast.Expr {
+	switch e := expr.(type) {
+	case *ast.ParenExpr:
+		e.Expr = validateExpr(e.Expr, streamName)
+		return e
+	case *ast.ArrowExpr:
+		e.Expr = validateExpr(e.Expr, streamName)
+		return e
+	case *ast.BracketExpr:
+		e.Expr = validateExpr(e.Expr, streamName)
+		return e
+	case *ast.ColonExpr:
+		e.Start = validateExpr(e.Start, streamName)
+		e.End = validateExpr(e.End, streamName)
+		return e
+	case *ast.IndexExpr:
+		e.Index = validateExpr(e.Index, streamName)
+		return e
+	case *ast.Call:
+		for i, arg := range e.Args {
+			e.Args[i] = validateExpr(arg, streamName)
+		}
+		if e.Partition != nil {
+			for i, p := range e.Partition.Exprs {
+				e.Partition.Exprs[i] = validateExpr(p, streamName)
+			}
+		}
+		if e.WhenExpr != nil {
+			e.WhenExpr = validateExpr(e.WhenExpr, streamName)
+		}
+		return e
+	case *ast.BinaryExpr:
+		exp := ast.BinaryExpr{}
+		exp.OP = e.OP
+		if e.OP == ast.DOT {
+			exp.OP = ast.ARROW
+		}
+		exp.RHS = validateExpr(e.RHS, streamName)
+		exp.LHS = validateExpr(e.LHS, streamName)
+		return &exp
+	case *ast.CaseExpr:
+		e.Value = validateExpr(e.Value, streamName)
+		e.ElseClause = validateExpr(e.ElseClause, streamName)
+		for i, when := range e.WhenClauses {
+			e.WhenClauses[i].Expr = validateExpr(when.Expr, streamName)
+			e.WhenClauses[i].Result = validateExpr(when.Result, streamName)
+		}
+		return e
+	case *ast.ValueSetExpr:
+		e.ArrayExpr = validateExpr(e.ArrayExpr, streamName)
+		for i, v := range e.LiteralExprs {
+			e.LiteralExprs[i] = validateExpr(v, streamName)
+		}
+		return e
+	case *ast.BetweenExpr:
+		e.Higher = validateExpr(e.Higher, streamName)
+		e.Lower = validateExpr(e.Lower, streamName)
+		return e
+	case *ast.LikePattern:
+		e.Expr = validateExpr(e.Expr, streamName)
+		return e
+	case *ast.FieldRef:
+		sn := string(expr.(*ast.FieldRef).StreamName)
+		if sn != string(ast.DefaultStream) && !contains(streamName, sn) {
+			return &ast.BinaryExpr{OP: ast.ARROW, LHS: &ast.FieldRef{Name: string(expr.(*ast.FieldRef).StreamName), StreamName: ast.DefaultStream}, RHS: &ast.JsonFieldRef{Name: expr.(*ast.FieldRef).Name}}
+		}
+		return expr
+	case *ast.MetaRef:
+		sn := string(expr.(*ast.MetaRef).StreamName)
+		if sn != string(ast.DefaultStream) && !contains(streamName, sn) {
+			return &ast.BinaryExpr{OP: ast.ARROW, LHS: &ast.MetaRef{Name: string(expr.(*ast.MetaRef).StreamName), StreamName: ast.DefaultStream}, RHS: &ast.JsonFieldRef{Name: expr.(*ast.MetaRef).Name}}
+		}
+		return expr
+	case *ast.ColFuncField:
+		e.Expr = validateExpr(e.Expr, streamName)
+		return e
+	default:
+		return expr
+	}
+}
+
+func contains(streamName []string, name string) bool {
+	for _, s := range streamName {
+		if s == name {
+			return true
+		}
+	}
+	return false
+}
+
+func getStreamNames(stmt *ast.SelectStatement) (result []string) {
+	if stmt == nil {
+		return nil
+	}
+
+	for _, source := range stmt.Sources {
+		if s, ok := source.(*ast.Table); ok {
+			result = append(result, s.Name)
+			if s.Alias != "" {
+				result = append(result, s.Alias)
+			}
+		}
+	}
+
+	for _, join := range stmt.Joins {
+		result = append(result, join.Name)
+		if join.Alias != "" {
+			result = append(result, join.Alias)
+		}
+	}
+	return
+}

+ 3 - 3
pkg/ast/token.go

@@ -1,4 +1,4 @@
-// Copyright 2021-2022 EMQ Technologies Co., Ltd.
+// Copyright 2021-2023 EMQ Technologies Co., Ltd.
 //
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
 // you may not use this file except in compliance with the License.
@@ -278,7 +278,7 @@ func (tok Token) String() string {
 }
 }
 
 
 func (tok Token) IsOperator() bool {
 func (tok Token) IsOperator() bool {
-	return (tok > operatorBeg && tok < operatorEnd) || tok == ASTERISK || tok == LBRACKET
+	return (tok > operatorBeg && tok < operatorEnd) || tok == ASTERISK || tok == LBRACKET || tok == DOT
 }
 }
 
 
 func (tok Token) IsTimeLiteral() bool { return tok >= DD && tok <= MS }
 func (tok Token) IsTimeLiteral() bool { return tok >= DD && tok <= MS }
@@ -300,7 +300,7 @@ func (tok Token) Precedence() int {
 		return 3
 		return 3
 	case ADD, SUB, BITWISE_OR, BITWISE_XOR:
 	case ADD, SUB, BITWISE_OR, BITWISE_XOR:
 		return 4
 		return 4
-	case MUL, DIV, MOD, BITWISE_AND, SUBSET, ARROW:
+	case MUL, DIV, MOD, BITWISE_AND, SUBSET, ARROW, DOT:
 		return 5
 		return 5
 	}
 	}
 	return 0
 	return 0

+ 21 - 8
pkg/ast/visitor.go

@@ -1,4 +1,4 @@
-// Copyright 2021-2022 EMQ Technologies Co., Ltd.
+// Copyright 2021-2023 EMQ Technologies Co., Ltd.
 //
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
 // you may not use this file except in compliance with the License.
@@ -107,6 +107,19 @@ func Walk(v Visitor, node Node) {
 	case *ParenExpr:
 	case *ParenExpr:
 		Walk(v, n.Expr)
 		Walk(v, n.Expr)
 
 
+	case *ArrowExpr:
+		Walk(v, n.Expr)
+
+	case *BracketExpr:
+		Walk(v, n.Expr)
+
+	case *ColonExpr:
+		Walk(v, n.Start)
+		Walk(v, n.End)
+
+	case *IndexExpr:
+		Walk(v, n.Index)
+
 	case *CaseExpr:
 	case *CaseExpr:
 		Walk(v, n.Value)
 		Walk(v, n.Value)
 		for _, w := range n.WhenClauses {
 		for _, w := range n.WhenClauses {
@@ -114,13 +127,6 @@ func Walk(v Visitor, node Node) {
 		}
 		}
 		Walk(v, n.ElseClause)
 		Walk(v, n.ElseClause)
 
 
-	case *WhenClause:
-		Walk(v, n.Expr)
-		Walk(v, n.Result)
-
-	case *IndexExpr:
-		Walk(v, n.Index)
-
 	case *ColFuncField:
 	case *ColFuncField:
 		Walk(v, n.Expr)
 		Walk(v, n.Expr)
 
 
@@ -129,6 +135,13 @@ func Walk(v Visitor, node Node) {
 			Walk(v, l)
 			Walk(v, l)
 		}
 		}
 		Walk(v, n.ArrayExpr)
 		Walk(v, n.ArrayExpr)
+
+	case *BetweenExpr:
+		Walk(v, n.Lower)
+		Walk(v, n.Higher)
+
+	case *LikePattern:
+		Walk(v, n.Expr)
 	}
 	}
 }
 }