mirror of
https://github.com/oarkflow/mq.git
synced 2025-10-07 17:00:57 +08:00
update
This commit is contained in:
@@ -141,6 +141,7 @@ func testSplitJoinHandler() {
|
||||
splitHandler := handlers.NewSplitHandler("split-test")
|
||||
splitConfig := dag.Payload{
|
||||
Data: map[string]any{
|
||||
"operation": "split",
|
||||
"fields": []string{"full_name", "skills"},
|
||||
"separator": " ",
|
||||
},
|
||||
@@ -155,6 +156,7 @@ func testSplitJoinHandler() {
|
||||
splitHandler2 := handlers.NewSplitHandler("split-test-2")
|
||||
splitConfig2 := dag.Payload{
|
||||
Data: map[string]any{
|
||||
"operation": "split",
|
||||
"fields": []string{"tags"},
|
||||
"separator": ",",
|
||||
},
|
||||
@@ -176,6 +178,7 @@ func testSplitJoinHandler() {
|
||||
joinHandler := handlers.NewJoinHandler("join-test")
|
||||
joinConfig := dag.Payload{
|
||||
Data: map[string]any{
|
||||
"operation": "join",
|
||||
"source_fields": []string{"title", "first_name", "middle_name", "last_name"},
|
||||
"target_field": "full_name_with_title",
|
||||
"separator": " ",
|
||||
@@ -186,6 +189,18 @@ func testSplitJoinHandler() {
|
||||
result = runHandler(joinHandler, joinData, "Join Operation")
|
||||
printResult("String joining", result)
|
||||
printRequestConfigResult(joinData, joinConfig, result)
|
||||
|
||||
fmt.Printf("Split Test Data: %+v\n", testData)
|
||||
fmt.Printf("Split Config: %+v\n", splitConfig.Data)
|
||||
fmt.Printf("Split Result: %+v\n", result)
|
||||
|
||||
fmt.Printf("Split Test Data (comma): %+v\n", testData)
|
||||
fmt.Printf("Split Config (comma): %+v\n", splitConfig2.Data)
|
||||
fmt.Printf("Split Result (comma): %+v\n", result)
|
||||
|
||||
fmt.Printf("Join Test Data: %+v\n", joinData)
|
||||
fmt.Printf("Join Config: %+v\n", joinConfig.Data)
|
||||
fmt.Printf("Join Result: %+v\n", result)
|
||||
}
|
||||
|
||||
func testFlattenHandler() {
|
||||
|
@@ -49,14 +49,16 @@ func (h *SplitHandler) splitOperation(data map[string]any) map[string]any {
|
||||
result := make(map[string]any)
|
||||
fields := h.getTargetFields()
|
||||
separator := h.getSeparator()
|
||||
targetField := h.getTargetField()
|
||||
|
||||
fmt.Printf("Split Operation: Fields=%v, Separator='%s'\n", fields, separator)
|
||||
fmt.Printf("Split Operation: Fields=%v, Separator='%s', TargetField='%s'\n", fields, separator, targetField)
|
||||
|
||||
// Copy all original data
|
||||
for key, value := range data {
|
||||
result[key] = value
|
||||
}
|
||||
|
||||
var allParts []string
|
||||
for _, field := range fields {
|
||||
if val, ok := data[field]; ok {
|
||||
if str, ok := val.(string); ok {
|
||||
@@ -67,15 +69,15 @@ func (h *SplitHandler) splitOperation(data map[string]any) map[string]any {
|
||||
// Create individual fields for each part
|
||||
for i, part := range parts {
|
||||
result[fmt.Sprintf("%s_%d", field, i)] = strings.TrimSpace(part)
|
||||
allParts = append(allParts, strings.TrimSpace(part))
|
||||
}
|
||||
|
||||
// Also store as array
|
||||
result[field+"_parts"] = parts
|
||||
result[field+"_count"] = len(parts)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store all parts in the target field
|
||||
result[targetField] = allParts
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -121,6 +123,13 @@ func (h *SplitHandler) getTargetFields() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *SplitHandler) getTargetField() string {
|
||||
if field, ok := h.Payload.Data["target_field"].(string); ok {
|
||||
return field
|
||||
}
|
||||
return "split_result" // Default target field
|
||||
}
|
||||
|
||||
func (h *SplitHandler) getSeparator() string {
|
||||
if sep, ok := h.Payload.Data["separator"].(string); ok {
|
||||
return sep
|
||||
@@ -239,6 +248,9 @@ func (h *JoinHandler) getSourceField() string {
|
||||
}
|
||||
|
||||
func (h *JoinHandler) getSourceFields() []string {
|
||||
if fields, ok := h.Payload.Data["source_fields"].([]string); ok {
|
||||
return fields
|
||||
}
|
||||
if fields, ok := h.Payload.Data["source_fields"].([]interface{}); ok {
|
||||
var result []string
|
||||
for _, field := range fields {
|
||||
|
Reference in New Issue
Block a user