mirror of
https://github.com/oarkflow/mq.git
synced 2025-09-27 04:15:52 +08:00
feat: update
This commit is contained in:
14
dag/dag.go
14
dag/dag.go
@@ -245,12 +245,14 @@ func (d *DAG) updateTaskMetrics(taskID string, result mq.Result, duration time.D
|
|||||||
case mq.Cancelled:
|
case mq.Cancelled:
|
||||||
d.metrics.Cancelled++
|
d.metrics.Cancelled++
|
||||||
}
|
}
|
||||||
d.Logger().Info("Updating task metrics",
|
if d.debug {
|
||||||
logger.Field{Key: "taskID", Value: taskID},
|
d.Logger().Info("Updating task metrics",
|
||||||
logger.Field{Key: "lastExecuted", Value: time.Now()},
|
logger.Field{Key: "taskID", Value: taskID},
|
||||||
logger.Field{Key: "duration", Value: duration},
|
logger.Field{Key: "lastExecuted", Value: time.Now()},
|
||||||
logger.Field{Key: "success", Value: result.Status},
|
logger.Field{Key: "duration", Value: duration},
|
||||||
)
|
logger.Field{Key: "success", Value: result.Status},
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Getter for task metrics.
|
// Getter for task metrics.
|
||||||
|
@@ -255,10 +255,12 @@ func (tm *TransactionManager) CommitTransaction(txID string) error {
|
|||||||
tx.Status = TransactionStatusCommitted
|
tx.Status = TransactionStatusCommitted
|
||||||
tx.EndTime = time.Now()
|
tx.EndTime = time.Now()
|
||||||
|
|
||||||
tm.logger.Info("Transaction committed",
|
if tm.dag.debug {
|
||||||
logger.Field{Key: "transaction_id", Value: txID},
|
tm.logger.Info("Transaction committed",
|
||||||
logger.Field{Key: "operations_count", Value: len(tx.Operations)},
|
logger.Field{Key: "transaction_id", Value: txID},
|
||||||
)
|
logger.Field{Key: "operations_count", Value: len(tx.Operations)},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Clean up save points
|
// Clean up save points
|
||||||
delete(tm.savePoints, txID)
|
delete(tm.savePoints, txID)
|
||||||
|
@@ -4,13 +4,13 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
|
"math/rand" // ...new import for jitter...
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"math/rand" // ...new import for jitter...
|
|
||||||
|
|
||||||
"github.com/oarkflow/json"
|
"github.com/oarkflow/json"
|
||||||
|
|
||||||
"github.com/oarkflow/mq"
|
"github.com/oarkflow/mq"
|
||||||
"github.com/oarkflow/mq/logger"
|
"github.com/oarkflow/mq/logger"
|
||||||
"github.com/oarkflow/mq/storage"
|
"github.com/oarkflow/mq/storage"
|
||||||
@@ -476,7 +476,9 @@ func (tm *TaskManager) logNodeExecution(exec *task, pureNodeID string, result mq
|
|||||||
fields = append(fields, logger.Field{Key: "error", Value: result.Error.Error()})
|
fields = append(fields, logger.Field{Key: "error", Value: result.Error.Error()})
|
||||||
tm.dag.Logger().Error("Node execution failed", fields...)
|
tm.dag.Logger().Error("Node execution failed", fields...)
|
||||||
} else {
|
} else {
|
||||||
tm.dag.Logger().Info("Node execution completed", fields...)
|
if tm.dag.debug {
|
||||||
|
tm.dag.Logger().Info("Node execution completed", fields...)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -284,8 +284,6 @@ func (po *PerformanceOptimizer) adjustWorkerCount() {
|
|||||||
|
|
||||||
// Apply scaling
|
// Apply scaling
|
||||||
if targetWorkers != currentWorkers {
|
if targetWorkers != currentWorkers {
|
||||||
po.workerPool.logger.Info().Msg(fmt.Sprintf("Auto-scaling workers from %d to %d (queue: %d)",
|
|
||||||
currentWorkers, targetWorkers, queueDepth))
|
|
||||||
po.workerPool.AdjustWorkerCount(targetWorkers)
|
po.workerPool.AdjustWorkerCount(targetWorkers)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
2
pool.go
2
pool.go
@@ -1356,8 +1356,6 @@ func (wp *Pool) adjustWorkersBasedOnLoad() {
|
|||||||
targetWorkers = max(minWorkers, min(maxWorkers, targetWorkers))
|
targetWorkers = max(minWorkers, min(maxWorkers, targetWorkers))
|
||||||
|
|
||||||
if targetWorkers != currentWorkers {
|
if targetWorkers != currentWorkers {
|
||||||
wp.logger.Info().Msgf("Auto-scaling workers from %d to %d (queue: %d, overflow: %d)",
|
|
||||||
currentWorkers, targetWorkers, queueLen, overflowLen)
|
|
||||||
wp.AdjustWorkerCount(targetWorkers)
|
wp.AdjustWorkerCount(targetWorkers)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -11,7 +11,7 @@ import (
|
|||||||
type Response struct {
|
type Response struct {
|
||||||
Additional any `json:"additional,omitempty"`
|
Additional any `json:"additional,omitempty"`
|
||||||
Data any `json:"data"`
|
Data any `json:"data"`
|
||||||
Message string `json:"message,omitempty"`
|
Message string `json:"message"`
|
||||||
StackTrace string `json:"stack_trace,omitempty"`
|
StackTrace string `json:"stack_trace,omitempty"`
|
||||||
Code int `json:"code"`
|
Code int `json:"code"`
|
||||||
Success bool `json:"success"`
|
Success bool `json:"success"`
|
||||||
|
@@ -19,8 +19,6 @@ import (
|
|||||||
"github.com/oarkflow/form"
|
"github.com/oarkflow/form"
|
||||||
"github.com/oarkflow/json"
|
"github.com/oarkflow/json"
|
||||||
"github.com/oarkflow/log"
|
"github.com/oarkflow/log"
|
||||||
"github.com/oarkflow/protocol/utils/str"
|
|
||||||
|
|
||||||
"github.com/oarkflow/mq"
|
"github.com/oarkflow/mq"
|
||||||
"github.com/oarkflow/mq/consts"
|
"github.com/oarkflow/mq/consts"
|
||||||
"github.com/oarkflow/mq/dag"
|
"github.com/oarkflow/mq/dag"
|
||||||
@@ -28,6 +26,7 @@ import (
|
|||||||
"github.com/oarkflow/mq/services/middlewares"
|
"github.com/oarkflow/mq/services/middlewares"
|
||||||
"github.com/oarkflow/mq/services/renderer"
|
"github.com/oarkflow/mq/services/renderer"
|
||||||
"github.com/oarkflow/mq/services/utils"
|
"github.com/oarkflow/mq/services/utils"
|
||||||
|
"github.com/oarkflow/protocol/utils/str"
|
||||||
)
|
)
|
||||||
|
|
||||||
var ValidationInstance Validation
|
var ValidationInstance Validation
|
||||||
@@ -142,41 +141,34 @@ func prepareNode(flow *dag.DAG, node Node) error {
|
|||||||
GeneratedFields: node.Data.GeneratedFields,
|
GeneratedFields: node.Data.GeneratedFields,
|
||||||
Providers: providers,
|
Providers: providers,
|
||||||
})
|
})
|
||||||
if s, ok := node.Data.AdditionalData["conditions"]; ok {
|
condition := make(map[string]string)
|
||||||
var fil map[string]*Filter
|
conditions := make(map[string]dag.Condition)
|
||||||
err := Map(&fil, s)
|
for key, cond := range node.Data.Conditions {
|
||||||
if err != nil {
|
condition[key] = cond.Node
|
||||||
return err
|
if cond.Filter != nil {
|
||||||
}
|
conditions[key] = cond.Filter
|
||||||
condition := make(map[string]string)
|
} else if cond.FilterGroup != nil {
|
||||||
conditions := make(map[string]dag.Condition)
|
cond.FilterGroup.Operator = strings.ToUpper(cond.FilterGroup.Operator)
|
||||||
for key, cond := range fil {
|
if !slices.Contains([]string{"AND", "OR"}, cond.FilterGroup.Operator) {
|
||||||
condition[key] = cond.Node
|
cond.FilterGroup.Operator = "AND"
|
||||||
if cond.Filter != nil {
|
|
||||||
conditions[key] = cond.Filter
|
|
||||||
} else if cond.FilterGroup != nil {
|
|
||||||
cond.FilterGroup.Operator = strings.ToUpper(cond.FilterGroup.Operator)
|
|
||||||
if !slices.Contains([]string{"AND", "OR"}, cond.FilterGroup.Operator) {
|
|
||||||
cond.FilterGroup.Operator = "AND"
|
|
||||||
}
|
|
||||||
var fillers []filters.Condition
|
|
||||||
for _, f := range cond.FilterGroup.Filters {
|
|
||||||
if f != nil {
|
|
||||||
fillers = append(fillers, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
conditions[key] = &filters.FilterGroup{
|
|
||||||
Operator: filters.Boolean(cond.FilterGroup.Operator),
|
|
||||||
Reverse: cond.FilterGroup.Reverse,
|
|
||||||
Filters: fillers,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
conditions[key] = nil
|
|
||||||
}
|
}
|
||||||
|
var fillers []filters.Condition
|
||||||
|
for _, f := range cond.FilterGroup.Filters {
|
||||||
|
if f != nil {
|
||||||
|
fillers = append(fillers, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
conditions[key] = &filters.FilterGroup{
|
||||||
|
Operator: filters.Boolean(cond.FilterGroup.Operator),
|
||||||
|
Reverse: cond.FilterGroup.Reverse,
|
||||||
|
Filters: fillers,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
conditions[key] = nil
|
||||||
}
|
}
|
||||||
flow.AddCondition(node.ID, condition)
|
|
||||||
nodeHandler.SetConditions(conditions)
|
|
||||||
}
|
}
|
||||||
|
flow.AddCondition(node.ID, condition)
|
||||||
|
nodeHandler.SetConditions(conditions)
|
||||||
case dag.Processor:
|
case dag.Processor:
|
||||||
nodeHandler.SetConfig(dag.Payload{
|
nodeHandler.SetConfig(dag.Payload{
|
||||||
Mapping: node.Data.Mapping,
|
Mapping: node.Data.Mapping,
|
||||||
@@ -459,7 +451,7 @@ func customHandler(flow *dag.DAG) fiber.Handler {
|
|||||||
if contentType == "" ||
|
if contentType == "" ||
|
||||||
contentType == fiber.MIMEApplicationJSON ||
|
contentType == fiber.MIMEApplicationJSON ||
|
||||||
contentType == fiber.MIMEApplicationJSONCharsetUTF8 {
|
contentType == fiber.MIMEApplicationJSONCharsetUTF8 {
|
||||||
return ctx.JSON(result)
|
return responses.Success(ctx, 200, result.Payload)
|
||||||
}
|
}
|
||||||
|
|
||||||
var resultData map[string]any
|
var resultData map[string]any
|
||||||
|
@@ -126,6 +126,7 @@ type Data struct {
|
|||||||
Mapping map[string]string `json:"mapping,omitempty" yaml:"mapping,omitempty"`
|
Mapping map[string]string `json:"mapping,omitempty" yaml:"mapping,omitempty"`
|
||||||
AdditionalData map[string]any `json:"additional_data,omitempty" yaml:"additional_data,omitempty"`
|
AdditionalData map[string]any `json:"additional_data,omitempty" yaml:"additional_data,omitempty"`
|
||||||
GeneratedFields []string `json:"generated_fields,omitempty" yaml:"generated_fields,omitempty"`
|
GeneratedFields []string `json:"generated_fields,omitempty" yaml:"generated_fields,omitempty"`
|
||||||
|
Conditions map[string]Filter `json:"conditions,omitempty" yaml:"conditions,omitempty"`
|
||||||
Providers []Provider `json:"providers,omitempty" yaml:"providers,omitempty"`
|
Providers []Provider `json:"providers,omitempty" yaml:"providers,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user