This commit is contained in:
sujit
2025-09-18 18:26:35 +05:45
parent 1b3ebcc325
commit 33857e32d1
55 changed files with 757 additions and 896 deletions

View File

@@ -224,7 +224,7 @@ log.Fatal(app.Listen(":3000"))
```go ```go
// Execute workflow programmatically // Execute workflow programmatically
ctx := context.Background() ctx := context.Background()
input := map[string]interface{}{ input := map[string]any{
"name": "John Doe", "name": "John Doe",
"email": "john@example.com", "email": "john@example.com",
} }
@@ -311,7 +311,7 @@ Config: dag.WorkflowNodeConfig{
// Storage processor config // Storage processor config
StorageType: "memory", StorageType: "memory",
StorageConfig: map[string]interface{}{...}, StorageConfig: map[string]any{...},
} }
``` ```

View File

@@ -103,7 +103,7 @@ engine.RegisterWorkflow(ctx, workflow)
### 4. Execute Workflow ### 4. Execute Workflow
```go ```go
execution, err := engine.ExecuteWorkflow(ctx, "sample-workflow", map[string]interface{}{ execution, err := engine.ExecuteWorkflow(ctx, "sample-workflow", map[string]any{
"input_data": "test_value", "input_data": "test_value",
}, &workflow.ExecutionOptions{ }, &workflow.ExecutionOptions{
Priority: workflow.PriorityMedium, Priority: workflow.PriorityMedium,
@@ -187,7 +187,7 @@ Wait for human intervention
{ {
Type: workflow.NodeTypeHumanTask, Type: workflow.NodeTypeHumanTask,
Config: workflow.NodeConfig{ Config: workflow.NodeConfig{
Custom: map[string]interface{}{ Custom: map[string]any{
"assignee": "manager@company.com", "assignee": "manager@company.com",
"due_date": "3 days", "due_date": "3 days",
"description": "Please review and approve", "description": "Please review and approve",

View File

@@ -30,9 +30,9 @@ type AdminServer struct {
// AdminMessage represents a message sent via WebSocket // AdminMessage represents a message sent via WebSocket
type AdminMessage struct { type AdminMessage struct {
Type string `json:"type"` Type string `json:"type"`
Data interface{} `json:"data"` Data any `json:"data"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
} }
// TaskUpdate represents a real-time task update // TaskUpdate represents a real-time task update
@@ -97,11 +97,11 @@ type AdminSystemMetrics struct {
// AdminBrokerInfo contains broker status information // AdminBrokerInfo contains broker status information
type AdminBrokerInfo struct { type AdminBrokerInfo struct {
Status string `json:"status"` Status string `json:"status"`
Address string `json:"address"` Address string `json:"address"`
Uptime int64 `json:"uptime"` // milliseconds Uptime int64 `json:"uptime"` // milliseconds
Connections int `json:"connections"` Connections int `json:"connections"`
Config map[string]interface{} `json:"config"` Config map[string]any `json:"config"`
} }
// AdminHealthCheck represents a health check result // AdminHealthCheck represents a health check result
@@ -686,7 +686,7 @@ func (a *AdminServer) handleFlushQueues(w http.ResponseWriter, r *http.Request)
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
response := map[string]interface{}{ response := map[string]any{
"status": "queues_flushed", "status": "queues_flushed",
"flushed_count": flushedCount, "flushed_count": flushedCount,
"message": fmt.Sprintf("Flushed %d tasks from all queues", flushedCount), "message": fmt.Sprintf("Flushed %d tasks from all queues", flushedCount),
@@ -733,7 +733,7 @@ func (a *AdminServer) handlePurgeQueue(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
response := map[string]interface{}{ response := map[string]any{
"status": "queue_purged", "status": "queue_purged",
"queue_name": queueName, "queue_name": queueName,
"purged_count": purgedCount, "purged_count": purgedCount,
@@ -772,7 +772,7 @@ func (a *AdminServer) handlePauseConsumer(w http.ResponseWriter, r *http.Request
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
response := map[string]interface{}{ response := map[string]any{
"status": "paused", "status": "paused",
"consumer_id": consumerID, "consumer_id": consumerID,
"message": fmt.Sprintf("Consumer %s has been paused", consumerID), "message": fmt.Sprintf("Consumer %s has been paused", consumerID),
@@ -806,7 +806,7 @@ func (a *AdminServer) handleResumeConsumer(w http.ResponseWriter, r *http.Reques
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
response := map[string]interface{}{ response := map[string]any{
"status": "active", "status": "active",
"consumer_id": consumerID, "consumer_id": consumerID,
"message": fmt.Sprintf("Consumer %s has been resumed", consumerID), "message": fmt.Sprintf("Consumer %s has been resumed", consumerID),
@@ -840,7 +840,7 @@ func (a *AdminServer) handleStopConsumer(w http.ResponseWriter, r *http.Request)
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
response := map[string]interface{}{ response := map[string]any{
"status": "stopped", "status": "stopped",
"consumer_id": consumerID, "consumer_id": consumerID,
"message": fmt.Sprintf("Consumer %s has been stopped", consumerID), "message": fmt.Sprintf("Consumer %s has been stopped", consumerID),
@@ -873,7 +873,7 @@ func (a *AdminServer) handlePausePool(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
response := map[string]interface{}{ response := map[string]any{
"status": "paused", "status": "paused",
"pool_id": poolID, "pool_id": poolID,
"message": fmt.Sprintf("Pool %s has been paused", poolID), "message": fmt.Sprintf("Pool %s has been paused", poolID),
@@ -905,7 +905,7 @@ func (a *AdminServer) handleResumePool(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
response := map[string]interface{}{ response := map[string]any{
"status": "running", "status": "running",
"pool_id": poolID, "pool_id": poolID,
"message": fmt.Sprintf("Pool %s has been resumed", poolID), "message": fmt.Sprintf("Pool %s has been resumed", poolID),
@@ -937,7 +937,7 @@ func (a *AdminServer) handleStopPool(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
response := map[string]interface{}{ response := map[string]any{
"status": "stopped", "status": "stopped",
"pool_id": poolID, "pool_id": poolID,
"message": fmt.Sprintf("Pool %s has been stopped", poolID), "message": fmt.Sprintf("Pool %s has been stopped", poolID),
@@ -958,7 +958,7 @@ func (a *AdminServer) handleGetTasks(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*") w.Header().Set("Access-Control-Allow-Origin", "*")
tasks := a.getCurrentTasks() tasks := a.getCurrentTasks()
json.NewEncoder(w).Encode(map[string]interface{}{ json.NewEncoder(w).Encode(map[string]any{
"tasks": tasks, "tasks": tasks,
"count": len(tasks), "count": len(tasks),
}) })
@@ -1045,7 +1045,7 @@ func (a *AdminServer) getBrokerInfo() *AdminBrokerInfo {
Address: a.broker.opts.brokerAddr, Address: a.broker.opts.brokerAddr,
Uptime: uptime, Uptime: uptime,
Connections: 0, // Would need to implement connection tracking Connections: 0, // Would need to implement connection tracking
Config: map[string]interface{}{ Config: map[string]any{
"max_connections": 1000, "max_connections": 1000,
"read_timeout": "30s", "read_timeout": "30s",
"write_timeout": "30s", "write_timeout": "30s",
@@ -1127,12 +1127,12 @@ func (a *AdminServer) collectMetrics() {
} }
// getCurrentTasks returns current tasks across all queues // getCurrentTasks returns current tasks across all queues
func (a *AdminServer) getCurrentTasks() []map[string]interface{} { func (a *AdminServer) getCurrentTasks() []map[string]any {
if a.broker == nil { if a.broker == nil {
return []map[string]interface{}{} return []map[string]any{}
} }
var tasks []map[string]interface{} var tasks []map[string]any
queueNames := a.broker.queues.Keys() queueNames := a.broker.queues.Keys()
for _, queueName := range queueNames { for _, queueName := range queueNames {
@@ -1143,7 +1143,7 @@ func (a *AdminServer) getCurrentTasks() []map[string]interface{} {
for i := 0; i < queueLen && i < 100; i++ { // Limit to 100 tasks for performance for i := 0; i < queueLen && i < 100; i++ { // Limit to 100 tasks for performance
select { select {
case task := <-queue.tasks: case task := <-queue.tasks:
taskInfo := map[string]interface{}{ taskInfo := map[string]any{
"id": fmt.Sprintf("task-%d", i), "id": fmt.Sprintf("task-%d", i),
"queue": queueName, "queue": queueName,
"retry_count": task.RetryCount, "retry_count": task.RetryCount,

View File

@@ -894,7 +894,7 @@ func (c *Consumer) handleStats(w http.ResponseWriter, r *http.Request) {
} }
// Gather consumer and pool stats using formatted metrics. // Gather consumer and pool stats using formatted metrics.
stats := map[string]interface{}{ stats := map[string]any{
"consumer_id": c.id, "consumer_id": c.id,
"queue": c.queue, "queue": c.queue,
"pool_metrics": c.pool.FormattedMetrics(), "pool_metrics": c.pool.FormattedMetrics(),

View File

@@ -46,23 +46,23 @@ const (
// ActivityEntry represents a single activity log entry // ActivityEntry represents a single activity log entry
type ActivityEntry struct { type ActivityEntry struct {
ID string `json:"id"` ID string `json:"id"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
DAGName string `json:"dag_name"` DAGName string `json:"dag_name"`
Level ActivityLevel `json:"level"` Level ActivityLevel `json:"level"`
Type ActivityType `json:"type"` Type ActivityType `json:"type"`
Message string `json:"message"` Message string `json:"message"`
TaskID string `json:"task_id,omitempty"` TaskID string `json:"task_id,omitempty"`
NodeID string `json:"node_id,omitempty"` NodeID string `json:"node_id,omitempty"`
Duration time.Duration `json:"duration,omitempty"` Duration time.Duration `json:"duration,omitempty"`
Success *bool `json:"success,omitempty"` Success *bool `json:"success,omitempty"`
Error string `json:"error,omitempty"` Error string `json:"error,omitempty"`
Details map[string]interface{} `json:"details,omitempty"` Details map[string]any `json:"details,omitempty"`
ContextData map[string]interface{} `json:"context_data,omitempty"` ContextData map[string]any `json:"context_data,omitempty"`
UserID string `json:"user_id,omitempty"` UserID string `json:"user_id,omitempty"`
SessionID string `json:"session_id,omitempty"` SessionID string `json:"session_id,omitempty"`
TraceID string `json:"trace_id,omitempty"` TraceID string `json:"trace_id,omitempty"`
SpanID string `json:"span_id,omitempty"` SpanID string `json:"span_id,omitempty"`
} }
// ActivityFilter provides filtering options for activity queries // ActivityFilter provides filtering options for activity queries
@@ -242,12 +242,12 @@ func (al *ActivityLogger) flushRoutine() {
} }
// Log logs an activity entry // Log logs an activity entry
func (al *ActivityLogger) Log(level ActivityLevel, activityType ActivityType, message string, details map[string]interface{}) { func (al *ActivityLogger) Log(level ActivityLevel, activityType ActivityType, message string, details map[string]any) {
al.LogWithContext(context.Background(), level, activityType, message, details) al.LogWithContext(context.Background(), level, activityType, message, details)
} }
// LogWithContext logs an activity entry with context information // LogWithContext logs an activity entry with context information
func (al *ActivityLogger) LogWithContext(ctx context.Context, level ActivityLevel, activityType ActivityType, message string, details map[string]interface{}) { func (al *ActivityLogger) LogWithContext(ctx context.Context, level ActivityLevel, activityType ActivityType, message string, details map[string]any) {
entry := ActivityEntry{ entry := ActivityEntry{
ID: mq.NewID(), ID: mq.NewID(),
Timestamp: time.Now(), Timestamp: time.Now(),
@@ -256,7 +256,7 @@ func (al *ActivityLogger) LogWithContext(ctx context.Context, level ActivityLeve
Type: activityType, Type: activityType,
Message: message, Message: message,
Details: details, Details: details,
ContextData: make(map[string]interface{}), ContextData: make(map[string]any),
} }
// Extract context information // Extract context information
@@ -288,7 +288,7 @@ func (al *ActivityLogger) LogWithContext(ctx context.Context, level ActivityLeve
} }
// Extract additional context data // Extract additional context data
for key, value := range map[string]interface{}{ for key, value := range map[string]any{
"method": ctx.Value("method"), "method": ctx.Value("method"),
"user_agent": ctx.Value("user_agent"), "user_agent": ctx.Value("user_agent"),
"ip_address": ctx.Value("ip_address"), "ip_address": ctx.Value("ip_address"),
@@ -306,7 +306,7 @@ func (al *ActivityLogger) LogWithContext(ctx context.Context, level ActivityLeve
func (al *ActivityLogger) LogTaskStart(ctx context.Context, taskID string, nodeID string) { func (al *ActivityLogger) LogTaskStart(ctx context.Context, taskID string, nodeID string) {
al.LogWithContext(ctx, ActivityLevelInfo, ActivityTypeTaskStart, al.LogWithContext(ctx, ActivityLevelInfo, ActivityTypeTaskStart,
fmt.Sprintf("Task %s started on node %s", taskID, nodeID), fmt.Sprintf("Task %s started on node %s", taskID, nodeID),
map[string]interface{}{ map[string]any{
"task_id": taskID, "task_id": taskID,
"node_id": nodeID, "node_id": nodeID,
}) })
@@ -326,7 +326,7 @@ func (al *ActivityLogger) LogTaskComplete(ctx context.Context, taskID string, no
NodeID: nodeID, NodeID: nodeID,
Duration: duration, Duration: duration,
Success: &success, Success: &success,
Details: map[string]interface{}{ Details: map[string]any{
"task_id": taskID, "task_id": taskID,
"node_id": nodeID, "node_id": nodeID,
"duration": duration.String(), "duration": duration.String(),
@@ -350,7 +350,7 @@ func (al *ActivityLogger) LogTaskFail(ctx context.Context, taskID string, nodeID
Duration: duration, Duration: duration,
Success: &success, Success: &success,
Error: err.Error(), Error: err.Error(),
Details: map[string]interface{}{ Details: map[string]any{
"task_id": taskID, "task_id": taskID,
"node_id": nodeID, "node_id": nodeID,
"duration": duration.String(), "duration": duration.String(),

View File

@@ -77,7 +77,7 @@ type DAGCache struct {
// CacheEntry represents a cached item // CacheEntry represents a cached item
type CacheEntry struct { type CacheEntry struct {
Value interface{} Value any
ExpiresAt time.Time ExpiresAt time.Time
AccessCount int64 AccessCount int64
LastAccess time.Time LastAccess time.Time
@@ -100,7 +100,7 @@ func NewDAGCache(ttl time.Duration, maxSize int, logger logger.Logger) *DAGCache
} }
// GetNodeResult retrieves a cached node result // GetNodeResult retrieves a cached node result
func (dc *DAGCache) GetNodeResult(key string) (interface{}, bool) { func (dc *DAGCache) GetNodeResult(key string) (any, bool) {
dc.mu.RLock() dc.mu.RLock()
defer dc.mu.RUnlock() defer dc.mu.RUnlock()
@@ -116,7 +116,7 @@ func (dc *DAGCache) GetNodeResult(key string) (interface{}, bool) {
} }
// SetNodeResult caches a node result // SetNodeResult caches a node result
func (dc *DAGCache) SetNodeResult(key string, value interface{}) { func (dc *DAGCache) SetNodeResult(key string, value any) {
dc.mu.Lock() dc.mu.Lock()
defer dc.mu.Unlock() defer dc.mu.Unlock()

View File

@@ -105,7 +105,7 @@ func (h *EnhancedAPIHandler) getHealth(w http.ResponseWriter, r *http.Request) {
return return
} }
health := map[string]interface{}{ health := map[string]any{
"status": "healthy", "status": "healthy",
"timestamp": time.Now(), "timestamp": time.Now(),
"uptime": time.Since(h.dag.monitor.metrics.StartTime), "uptime": time.Since(h.dag.monitor.metrics.StartTime),
@@ -128,7 +128,7 @@ func (h *EnhancedAPIHandler) getHealth(w http.ResponseWriter, r *http.Request) {
health["reason"] = fmt.Sprintf("High task load: %d tasks in progress", metrics.TasksInProgress) health["reason"] = fmt.Sprintf("High task load: %d tasks in progress", metrics.TasksInProgress)
} }
health["metrics"] = map[string]interface{}{ health["metrics"] = map[string]any{
"total_tasks": metrics.TasksTotal, "total_tasks": metrics.TasksTotal,
"completed_tasks": metrics.TasksCompleted, "completed_tasks": metrics.TasksCompleted,
"failed_tasks": metrics.TasksFailed, "failed_tasks": metrics.TasksFailed,
@@ -147,7 +147,7 @@ func (h *EnhancedAPIHandler) validateDAG(w http.ResponseWriter, r *http.Request)
} }
err := h.dag.ValidateDAG() err := h.dag.ValidateDAG()
response := map[string]interface{}{ response := map[string]any{
"valid": err == nil, "valid": err == nil,
"timestamp": time.Now(), "timestamp": time.Now(),
} }
@@ -173,7 +173,7 @@ func (h *EnhancedAPIHandler) getTopology(w http.ResponseWriter, r *http.Request)
return return
} }
h.respondJSON(w, map[string]interface{}{ h.respondJSON(w, map[string]any{
"topology": topology, "topology": topology,
"count": len(topology), "count": len(topology),
}) })
@@ -192,7 +192,7 @@ func (h *EnhancedAPIHandler) getCriticalPath(w http.ResponseWriter, r *http.Requ
return return
} }
h.respondJSON(w, map[string]interface{}{ h.respondJSON(w, map[string]any{
"critical_path": path, "critical_path": path,
"length": len(path), "length": len(path),
}) })
@@ -295,7 +295,7 @@ func (h *EnhancedAPIHandler) handleTransaction(w http.ResponseWriter, r *http.Re
return return
} }
h.respondJSON(w, map[string]interface{}{ h.respondJSON(w, map[string]any{
"transaction_id": tx.ID, "transaction_id": tx.ID,
"task_id": tx.TaskID, "task_id": tx.TaskID,
"status": "started", "status": "started",
@@ -349,7 +349,7 @@ func (h *EnhancedAPIHandler) optimizePerformance(w http.ResponseWriter, r *http.
return return
} }
h.respondJSON(w, map[string]interface{}{ h.respondJSON(w, map[string]any{
"status": "optimization completed", "status": "optimization completed",
"timestamp": time.Now(), "timestamp": time.Now(),
}) })
@@ -374,7 +374,7 @@ func (h *EnhancedAPIHandler) getCircuitBreakerStatus(w http.ResponseWriter, r *h
return return
} }
status := map[string]interface{}{ status := map[string]any{
"node_id": nodeID, "node_id": nodeID,
"state": h.getCircuitBreakerStateName(cb.GetState()), "state": h.getCircuitBreakerStateName(cb.GetState()),
} }
@@ -383,7 +383,7 @@ func (h *EnhancedAPIHandler) getCircuitBreakerStatus(w http.ResponseWriter, r *h
} else { } else {
// Return status for all circuit breakers // Return status for all circuit breakers
h.dag.circuitBreakersMu.RLock() h.dag.circuitBreakersMu.RLock()
allStatus := make(map[string]interface{}) allStatus := make(map[string]any)
for nodeID, cb := range h.dag.circuitBreakers { for nodeID, cb := range h.dag.circuitBreakers {
allStatus[nodeID] = h.getCircuitBreakerStateName(cb.GetState()) allStatus[nodeID] = h.getCircuitBreakerStateName(cb.GetState())
} }
@@ -404,7 +404,7 @@ func (h *EnhancedAPIHandler) clearCache(w http.ResponseWriter, r *http.Request)
h.dag.nextNodesCache = nil h.dag.nextNodesCache = nil
h.dag.prevNodesCache = nil h.dag.prevNodesCache = nil
h.respondJSON(w, map[string]interface{}{ h.respondJSON(w, map[string]any{
"status": "cache cleared", "status": "cache cleared",
"timestamp": time.Now(), "timestamp": time.Now(),
}) })
@@ -417,7 +417,7 @@ func (h *EnhancedAPIHandler) getCacheStats(w http.ResponseWriter, r *http.Reques
return return
} }
stats := map[string]interface{}{ stats := map[string]any{
"next_nodes_cache_size": len(h.dag.nextNodesCache), "next_nodes_cache_size": len(h.dag.nextNodesCache),
"prev_nodes_cache_size": len(h.dag.prevNodesCache), "prev_nodes_cache_size": len(h.dag.prevNodesCache),
"timestamp": time.Now(), "timestamp": time.Now(),
@@ -428,7 +428,7 @@ func (h *EnhancedAPIHandler) getCacheStats(w http.ResponseWriter, r *http.Reques
// Helper methods // Helper methods
func (h *EnhancedAPIHandler) respondJSON(w http.ResponseWriter, data interface{}) { func (h *EnhancedAPIHandler) respondJSON(w http.ResponseWriter, data any) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(data) json.NewEncoder(w).Encode(data)
} }

View File

@@ -16,7 +16,7 @@ type WorkflowEngine interface {
Start(ctx context.Context) error Start(ctx context.Context) error
Stop(ctx context.Context) Stop(ctx context.Context)
RegisterWorkflow(ctx context.Context, definition *WorkflowDefinition) error RegisterWorkflow(ctx context.Context, definition *WorkflowDefinition) error
ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]interface{}) (*ExecutionResult, error) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]any) (*ExecutionResult, error)
GetExecution(ctx context.Context, executionID string) (*ExecutionResult, error) GetExecution(ctx context.Context, executionID string) (*ExecutionResult, error)
} }
@@ -74,36 +74,36 @@ const (
// WorkflowDefinition represents a complete workflow // WorkflowDefinition represents a complete workflow
type WorkflowDefinition struct { type WorkflowDefinition struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Description string `json:"description"` Description string `json:"description"`
Version string `json:"version"` Version string `json:"version"`
Status WorkflowStatus `json:"status"` Status WorkflowStatus `json:"status"`
Tags []string `json:"tags"` Tags []string `json:"tags"`
Category string `json:"category"` Category string `json:"category"`
Owner string `json:"owner"` Owner string `json:"owner"`
Nodes []WorkflowNode `json:"nodes"` Nodes []WorkflowNode `json:"nodes"`
Edges []WorkflowEdge `json:"edges"` Edges []WorkflowEdge `json:"edges"`
Variables map[string]Variable `json:"variables"` Variables map[string]Variable `json:"variables"`
Config WorkflowConfig `json:"config"` Config WorkflowConfig `json:"config"`
Metadata map[string]interface{} `json:"metadata"` Metadata map[string]any `json:"metadata"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"` UpdatedAt time.Time `json:"updated_at"`
CreatedBy string `json:"created_by"` CreatedBy string `json:"created_by"`
UpdatedBy string `json:"updated_by"` UpdatedBy string `json:"updated_by"`
} }
// WorkflowNode represents a single node in the workflow // WorkflowNode represents a single node in the workflow
type WorkflowNode struct { type WorkflowNode struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Type WorkflowNodeType `json:"type"` Type WorkflowNodeType `json:"type"`
Description string `json:"description"` Description string `json:"description"`
Config WorkflowNodeConfig `json:"config"` Config WorkflowNodeConfig `json:"config"`
Position Position `json:"position"` Position Position `json:"position"`
Timeout *time.Duration `json:"timeout,omitempty"` Timeout *time.Duration `json:"timeout,omitempty"`
RetryPolicy *RetryPolicy `json:"retry_policy,omitempty"` RetryPolicy *RetryPolicy `json:"retry_policy,omitempty"`
Metadata map[string]interface{} `json:"metadata,omitempty"` Metadata map[string]any `json:"metadata,omitempty"`
} }
// WorkflowNodeConfig holds configuration for different node types // WorkflowNodeConfig holds configuration for different node types
@@ -185,15 +185,15 @@ type WorkflowNodeConfig struct {
Channel string `json:"channel,omitempty"` Channel string `json:"channel,omitempty"`
// Webhook receiver fields // Webhook receiver fields
ListenPath string `json:"listen_path,omitempty"` ListenPath string `json:"listen_path,omitempty"`
Secret string `json:"secret,omitempty"` Secret string `json:"secret,omitempty"`
WebhookSecret string `json:"webhook_secret,omitempty"` WebhookSecret string `json:"webhook_secret,omitempty"`
WebhookSignature string `json:"webhook_signature,omitempty"` WebhookSignature string `json:"webhook_signature,omitempty"`
WebhookTransforms map[string]interface{} `json:"webhook_transforms,omitempty"` WebhookTransforms map[string]any `json:"webhook_transforms,omitempty"`
Timeout time.Duration `json:"timeout,omitempty"` Timeout time.Duration `json:"timeout,omitempty"`
// Custom configuration // Custom configuration
Custom map[string]interface{} `json:"custom,omitempty"` Custom map[string]any `json:"custom,omitempty"`
} }
// WorkflowDecisionRule for decision nodes // WorkflowDecisionRule for decision nodes
@@ -204,16 +204,16 @@ type WorkflowDecisionRule struct {
// WorkflowValidationRule for validator nodes // WorkflowValidationRule for validator nodes
type WorkflowValidationRule struct { type WorkflowValidationRule struct {
Field string `json:"field"` Field string `json:"field"`
Type string `json:"type"` // "string", "number", "email", "regex", "required" Type string `json:"type"` // "string", "number", "email", "regex", "required"
Required bool `json:"required"` Required bool `json:"required"`
MinLength int `json:"min_length,omitempty"` MinLength int `json:"min_length,omitempty"`
MaxLength int `json:"max_length,omitempty"` MaxLength int `json:"max_length,omitempty"`
Min *float64 `json:"min,omitempty"` Min *float64 `json:"min,omitempty"`
Max *float64 `json:"max,omitempty"` Max *float64 `json:"max,omitempty"`
Pattern string `json:"pattern,omitempty"` Pattern string `json:"pattern,omitempty"`
Value interface{} `json:"value,omitempty"` Value any `json:"value,omitempty"`
Message string `json:"message,omitempty"` Message string `json:"message,omitempty"`
} }
// WorkflowRoutingRule for router nodes // WorkflowRoutingRule for router nodes
@@ -224,22 +224,22 @@ type WorkflowRoutingRule struct {
// WorkflowEdge represents a connection between nodes // WorkflowEdge represents a connection between nodes
type WorkflowEdge struct { type WorkflowEdge struct {
ID string `json:"id"` ID string `json:"id"`
FromNode string `json:"from_node"` FromNode string `json:"from_node"`
ToNode string `json:"to_node"` ToNode string `json:"to_node"`
Condition string `json:"condition,omitempty"` Condition string `json:"condition,omitempty"`
Priority int `json:"priority"` Priority int `json:"priority"`
Label string `json:"label,omitempty"` Label string `json:"label,omitempty"`
Metadata map[string]interface{} `json:"metadata,omitempty"` Metadata map[string]any `json:"metadata,omitempty"`
} }
// Variable definition for workflow // Variable definition for workflow
type Variable struct { type Variable struct {
Name string `json:"name"` Name string `json:"name"`
Type string `json:"type"` Type string `json:"type"`
DefaultValue interface{} `json:"default_value"` DefaultValue any `json:"default_value"`
Required bool `json:"required"` Required bool `json:"required"`
Description string `json:"description"` Description string `json:"description"`
} }
// WorkflowConfig holds configuration for the entire workflow // WorkflowConfig holds configuration for the entire workflow
@@ -268,15 +268,15 @@ type RetryPolicy struct {
// ExecutionResult represents the result of workflow execution // ExecutionResult represents the result of workflow execution
type ExecutionResult struct { type ExecutionResult struct {
ID string `json:"id"` ID string `json:"id"`
WorkflowID string `json:"workflow_id"` WorkflowID string `json:"workflow_id"`
Status ExecutionStatus `json:"status"` Status ExecutionStatus `json:"status"`
StartTime time.Time `json:"start_time"` StartTime time.Time `json:"start_time"`
EndTime *time.Time `json:"end_time,omitempty"` EndTime *time.Time `json:"end_time,omitempty"`
Input map[string]interface{} `json:"input"` Input map[string]any `json:"input"`
Output map[string]interface{} `json:"output"` Output map[string]any `json:"output"`
Error string `json:"error,omitempty"` Error string `json:"error,omitempty"`
NodeExecutions map[string]interface{} `json:"node_executions,omitempty"` NodeExecutions map[string]any `json:"node_executions,omitempty"`
} }
// EnhancedDAG represents a DAG that integrates with workflow engine concepts // EnhancedDAG represents a DAG that integrates with workflow engine concepts
@@ -338,8 +338,8 @@ type WorkflowExecution struct {
StartTime time.Time StartTime time.Time
EndTime *time.Time EndTime *time.Time
Context context.Context Context context.Context
Input map[string]interface{} Input map[string]any
Output map[string]interface{} Output map[string]any
Error error Error error
// Node execution tracking // Node execution tracking
@@ -352,8 +352,8 @@ type NodeExecution struct {
Status ExecutionStatus Status ExecutionStatus
StartTime time.Time StartTime time.Time
EndTime *time.Time EndTime *time.Time
Input map[string]interface{} Input map[string]any
Output map[string]interface{} Output map[string]any
Error error Error error
RetryCount int RetryCount int
Duration time.Duration Duration time.Duration
@@ -361,7 +361,7 @@ type NodeExecution struct {
// WorkflowStateManager manages workflow state and persistence // WorkflowStateManager manages workflow state and persistence
type WorkflowStateManager struct { type WorkflowStateManager struct {
stateStore map[string]interface{} stateStore map[string]any
mu sync.RWMutex mu sync.RWMutex
} }
@@ -391,7 +391,7 @@ func NewEnhancedDAG(name, key string, config *EnhancedDAGConfig, opts ...mq.Opti
executionHistory: make(map[string]*WorkflowExecution), executionHistory: make(map[string]*WorkflowExecution),
}, },
stateManager: &WorkflowStateManager{ stateManager: &WorkflowStateManager{
stateStore: make(map[string]interface{}), stateStore: make(map[string]any),
}, },
} }
@@ -586,7 +586,7 @@ func (p *workflowNodeProcessor) processAPINode(ctx context.Context, task *mq.Tas
func (p *workflowNodeProcessor) processTransformNode(ctx context.Context, task *mq.Task) mq.Result { func (p *workflowNodeProcessor) processTransformNode(ctx context.Context, task *mq.Task) mq.Result {
// Data transformation processing (simplified implementation) // Data transformation processing (simplified implementation)
var payload map[string]interface{} var payload map[string]any
if err := json.Unmarshal(task.Payload, &payload); err != nil { if err := json.Unmarshal(task.Payload, &payload); err != nil {
return mq.Result{ return mq.Result{
TaskID: task.ID, TaskID: task.ID,
@@ -650,7 +650,7 @@ func (p *workflowNodeProcessor) processTimerNode(ctx context.Context, task *mq.T
} }
// ExecuteWorkflow executes a registered workflow // ExecuteWorkflow executes a registered workflow
func (e *EnhancedDAG) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]interface{}) (*WorkflowExecution, error) { func (e *EnhancedDAG) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]any) (*WorkflowExecution, error) {
e.mu.RLock() e.mu.RLock()
definition, exists := e.workflowRegistry[workflowID] definition, exists := e.workflowRegistry[workflowID]
e.mu.RUnlock() e.mu.RUnlock()
@@ -761,10 +761,10 @@ func (e *EnhancedDAG) executeWithDAG(execution *WorkflowExecution, definition *W
} }
// Convert result back to output // Convert result back to output
var output map[string]interface{} var output map[string]any
if err := json.Unmarshal(result.Payload, &output); err != nil { if err := json.Unmarshal(result.Payload, &output); err != nil {
// If unmarshal fails, create a simple output // If unmarshal fails, create a simple output
output = map[string]interface{}{"result": string(result.Payload)} output = map[string]any{"result": string(result.Payload)}
} }
execution.Status = ExecutionStatusCompleted execution.Status = ExecutionStatusCompleted

View File

@@ -118,7 +118,7 @@ type Transaction struct {
EndTime time.Time `json:"end_time,omitempty"` EndTime time.Time `json:"end_time,omitempty"`
Operations []TransactionOperation `json:"operations"` Operations []TransactionOperation `json:"operations"`
SavePoints []SavePoint `json:"save_points"` SavePoints []SavePoint `json:"save_points"`
Metadata map[string]interface{} `json:"metadata,omitempty"` Metadata map[string]any `json:"metadata,omitempty"`
} }
// TransactionStatus represents the status of a transaction // TransactionStatus represents the status of a transaction
@@ -133,20 +133,20 @@ const (
// TransactionOperation represents an operation within a transaction // TransactionOperation represents an operation within a transaction
type TransactionOperation struct { type TransactionOperation struct {
ID string `json:"id"` ID string `json:"id"`
Type string `json:"type"` Type string `json:"type"`
NodeID string `json:"node_id"` NodeID string `json:"node_id"`
Data map[string]interface{} `json:"data"` Data map[string]any `json:"data"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
RollbackHandler RollbackHandler `json:"-"` RollbackHandler RollbackHandler `json:"-"`
} }
// SavePoint represents a save point in a transaction // SavePoint represents a save point in a transaction
type SavePoint struct { type SavePoint struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
State map[string]interface{} `json:"state"` State map[string]any `json:"state"`
} }
// RollbackHandler defines how to rollback operations // RollbackHandler defines how to rollback operations
@@ -176,7 +176,7 @@ func (tm *TransactionManager) BeginTransaction(taskID string) *Transaction {
StartTime: time.Now(), StartTime: time.Now(),
Operations: make([]TransactionOperation, 0), Operations: make([]TransactionOperation, 0),
SavePoints: make([]SavePoint, 0), SavePoints: make([]SavePoint, 0),
Metadata: make(map[string]interface{}), Metadata: make(map[string]any),
} }
tm.transactions[tx.ID] = tx tm.transactions[tx.ID] = tx
@@ -211,7 +211,7 @@ func (tm *TransactionManager) AddOperation(txID string, operation TransactionOpe
} }
// AddSavePoint adds a save point to the transaction // AddSavePoint adds a save point to the transaction
func (tm *TransactionManager) AddSavePoint(txID, name string, state map[string]interface{}) error { func (tm *TransactionManager) AddSavePoint(txID, name string, state map[string]any) error {
tm.mu.Lock() tm.mu.Lock()
defer tm.mu.Unlock() defer tm.mu.Unlock()
@@ -457,11 +457,11 @@ type HTTPClient interface {
// WebhookEvent represents an event to send via webhook // WebhookEvent represents an event to send via webhook
type WebhookEvent struct { type WebhookEvent struct {
Type string `json:"type"` Type string `json:"type"`
TaskID string `json:"task_id"` TaskID string `json:"task_id"`
NodeID string `json:"node_id,omitempty"` NodeID string `json:"node_id,omitempty"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
Data map[string]interface{} `json:"data"` Data map[string]any `json:"data"`
} }
// NewWebhookManager creates a new webhook manager // NewWebhookManager creates a new webhook manager

View File

@@ -263,7 +263,7 @@ func (tm *DAG) SVGViewerHTML(svgContent string) string {
body { body {
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif; font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
margin: 0; margin: 0;
background: linear-gradient(135deg, #667eea 0%%%%, #764ba2 100%%%%); background: linear-gradient(135deg, #667eea 0%%, #764ba2 100%%);
min-height: 100vh; min-height: 100vh;
display: flex; display: flex;
flex-direction: column; flex-direction: column;
@@ -339,12 +339,12 @@ func (tm *DAG) SVGViewerHTML(svgContent string) string {
} }
.svg-container { .svg-container {
width: 100%%%%; width: 100%%;
height: 100%%%%; height: 100%%;
cursor: grab; cursor: grab;
position: relative; position: relative;
overflow: hidden; overflow: hidden;
display: flex; display: block;
align-items: center; align-items: center;
justify-content: center; justify-content: center;
} }
@@ -357,8 +357,8 @@ func (tm *DAG) SVGViewerHTML(svgContent string) string {
user-select: none; user-select: none;
transform-origin: center center; transform-origin: center center;
transition: transform 0.2s ease-out; transition: transform 0.2s ease-out;
max-width: 100%%%%; max-width: 100%%;
max-height: 100%%%%; max-height: 100%%;
} }
.svg-wrapper svg { .svg-wrapper svg {
@@ -523,7 +523,7 @@ func (tm *DAG) SVGViewerHTML(svgContent string) string {
const scaleX = availableWidth / svgWidth; const scaleX = availableWidth / svgWidth;
const scaleY = availableHeight / svgHeight; const scaleY = availableHeight / svgHeight;
initialScale = Math.min(scaleX, scaleY, 1); // Don't scale up beyond 100%%%% initialScale = Math.min(scaleX, scaleY, 1); // Don't scale up beyond 100%%
// Reset position // Reset position
currentX = 0; currentX = 0;

View File

@@ -41,7 +41,7 @@ func (m *MigrationUtility) ConvertDAGToWorkflow(workflowID, workflowName, versio
EnableAudit: true, EnableAudit: true,
EnableMetrics: true, EnableMetrics: true,
}, },
Metadata: make(map[string]interface{}), Metadata: make(map[string]any),
CreatedAt: time.Now(), CreatedAt: time.Now(),
UpdatedAt: time.Now(), UpdatedAt: time.Now(),
CreatedBy: "migration-utility", CreatedBy: "migration-utility",
@@ -68,7 +68,7 @@ func (m *MigrationUtility) ConvertDAGToWorkflow(workflowID, workflowName, versio
ToNode: edge.To.ID, ToNode: edge.To.ID,
Label: edge.Label, Label: edge.Label,
Priority: 1, Priority: 1,
Metadata: make(map[string]interface{}), Metadata: make(map[string]any),
} }
// Add condition for conditional edges // Add condition for conditional edges
@@ -102,7 +102,7 @@ func (m *MigrationUtility) convertDAGNodeToWorkflowNode(dagNode *Node) WorkflowN
X: 0, // Default position - will need to be set by UI X: 0, // Default position - will need to be set by UI
Y: 0, Y: 0,
}, },
Metadata: make(map[string]interface{}), Metadata: make(map[string]any),
} }
// Convert node type // Convert node type
@@ -116,7 +116,7 @@ func (m *MigrationUtility) convertDAGNodeToWorkflowNode(dagNode *Node) WorkflowN
// Create basic configuration // Create basic configuration
workflowNode.Config = WorkflowNodeConfig{ workflowNode.Config = WorkflowNodeConfig{
Variables: make(map[string]string), Variables: make(map[string]string),
Custom: make(map[string]interface{}), Custom: make(map[string]any),
} }
// Add original DAG node information to metadata // Add original DAG node information to metadata
@@ -394,7 +394,7 @@ func (m *MigrationUtility) GenerateWorkflowTemplate(name, id string) *WorkflowDe
EnableAudit: true, EnableAudit: true,
EnableMetrics: true, EnableMetrics: true,
}, },
Metadata: make(map[string]interface{}), Metadata: make(map[string]any),
CreatedAt: time.Now(), CreatedAt: time.Now(),
UpdatedAt: time.Now(), UpdatedAt: time.Now(),
CreatedBy: "migration-utility", CreatedBy: "migration-utility",

View File

@@ -262,16 +262,16 @@ type AlertHandler interface {
// Alert represents a monitoring alert // Alert represents a monitoring alert
type Alert struct { type Alert struct {
ID string `json:"id"` ID string `json:"id"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
Severity AlertSeverity `json:"severity"` Severity AlertSeverity `json:"severity"`
Type AlertType `json:"type"` Type AlertType `json:"type"`
Message string `json:"message"` Message string `json:"message"`
Details map[string]interface{} `json:"details"` Details map[string]any `json:"details"`
NodeID string `json:"node_id,omitempty"` NodeID string `json:"node_id,omitempty"`
TaskID string `json:"task_id,omitempty"` TaskID string `json:"task_id,omitempty"`
Threshold interface{} `json:"threshold,omitempty"` Threshold any `json:"threshold,omitempty"`
ActualValue interface{} `json:"actual_value,omitempty"` ActualValue any `json:"actual_value,omitempty"`
} }
type AlertSeverity string type AlertSeverity string
@@ -394,7 +394,7 @@ func (m *Monitor) performHealthCheck() {
Message: "High failure rate detected", Message: "High failure rate detected",
Threshold: m.thresholds.MaxFailureRate, Threshold: m.thresholds.MaxFailureRate,
ActualValue: failureRate, ActualValue: failureRate,
Details: map[string]interface{}{ Details: map[string]any{
"failed_tasks": metrics.TasksFailed, "failed_tasks": metrics.TasksFailed,
"total_tasks": metrics.TasksTotal, "total_tasks": metrics.TasksTotal,
}, },
@@ -412,7 +412,7 @@ func (m *Monitor) performHealthCheck() {
Message: "High task load detected", Message: "High task load detected",
Threshold: m.thresholds.MaxTasksInProgress, Threshold: m.thresholds.MaxTasksInProgress,
ActualValue: metrics.TasksInProgress, ActualValue: metrics.TasksInProgress,
Details: map[string]interface{}{ Details: map[string]any{
"tasks_in_progress": metrics.TasksInProgress, "tasks_in_progress": metrics.TasksInProgress,
}, },
}) })
@@ -430,7 +430,7 @@ func (m *Monitor) performHealthCheck() {
NodeID: nodeID, NodeID: nodeID,
Threshold: m.thresholds.MaxNodeFailures, Threshold: m.thresholds.MaxNodeFailures,
ActualValue: failures, ActualValue: failures,
Details: map[string]interface{}{ Details: map[string]any{
"node_id": nodeID, "node_id": nodeID,
"failures": failures, "failures": failures,
}, },
@@ -448,7 +448,7 @@ func (m *Monitor) performHealthCheck() {
Message: "Average execution time is too high", Message: "Average execution time is too high",
Threshold: m.thresholds.MaxExecutionTime, Threshold: m.thresholds.MaxExecutionTime,
ActualValue: metrics.AverageExecutionTime, ActualValue: metrics.AverageExecutionTime,
Details: map[string]interface{}{ Details: map[string]any{
"average_execution_time": metrics.AverageExecutionTime.String(), "average_execution_time": metrics.AverageExecutionTime.String(),
}, },
}) })

View File

@@ -451,7 +451,7 @@ func getVal(c context.Context, v string, data map[string]any) (key string, val a
func init() { func init() {
// define custom functions for use in config // define custom functions for use in config
expr.AddFunction("trim", func(params ...interface{}) (interface{}, error) { expr.AddFunction("trim", func(params ...any) (any, error) {
if len(params) == 0 || len(params) > 1 || params[0] == nil { if len(params) == 0 || len(params) > 1 || params[0] == nil {
return nil, errors.New("Invalid number of arguments") return nil, errors.New("Invalid number of arguments")
} }
@@ -461,7 +461,7 @@ func init() {
} }
return strings.TrimSpace(val), nil return strings.TrimSpace(val), nil
}) })
expr.AddFunction("upper", func(params ...interface{}) (interface{}, error) { expr.AddFunction("upper", func(params ...any) (any, error) {
if len(params) == 0 || len(params) > 1 || params[0] == nil { if len(params) == 0 || len(params) > 1 || params[0] == nil {
return nil, errors.New("Invalid number of arguments") return nil, errors.New("Invalid number of arguments")
} }
@@ -471,7 +471,7 @@ func init() {
} }
return strings.ToUpper(val), nil return strings.ToUpper(val), nil
}) })
expr.AddFunction("lower", func(params ...interface{}) (interface{}, error) { expr.AddFunction("lower", func(params ...any) (any, error) {
if len(params) == 0 || len(params) > 1 || params[0] == nil { if len(params) == 0 || len(params) > 1 || params[0] == nil {
return nil, errors.New("Invalid number of arguments") return nil, errors.New("Invalid number of arguments")
} }
@@ -481,7 +481,7 @@ func init() {
} }
return strings.ToLower(val), nil return strings.ToLower(val), nil
}) })
expr.AddFunction("date", func(params ...interface{}) (interface{}, error) { expr.AddFunction("date", func(params ...any) (any, error) {
if len(params) == 0 || len(params) > 1 || params[0] == nil { if len(params) == 0 || len(params) > 1 || params[0] == nil {
return nil, errors.New("Invalid number of arguments") return nil, errors.New("Invalid number of arguments")
} }
@@ -495,7 +495,7 @@ func init() {
} }
return t.Format("2006-01-02"), nil return t.Format("2006-01-02"), nil
}) })
expr.AddFunction("datetime", func(params ...interface{}) (interface{}, error) { expr.AddFunction("datetime", func(params ...any) (any, error) {
if len(params) == 0 || len(params) > 1 || params[0] == nil { if len(params) == 0 || len(params) > 1 || params[0] == nil {
return nil, errors.New("Invalid number of arguments") return nil, errors.New("Invalid number of arguments")
} }
@@ -509,7 +509,7 @@ func init() {
} }
return t.Format(time.RFC3339), nil return t.Format(time.RFC3339), nil
}) })
expr.AddFunction("addSecondsToNow", func(params ...interface{}) (interface{}, error) { expr.AddFunction("addSecondsToNow", func(params ...any) (any, error) {
if len(params) == 0 || len(params) > 1 || params[0] == nil { if len(params) == 0 || len(params) > 1 || params[0] == nil {
return nil, errors.New("Invalid number of arguments") return nil, errors.New("Invalid number of arguments")
} }
@@ -529,7 +529,7 @@ func init() {
t = t.Add(time.Duration(params[0].(int)) * time.Second) t = t.Add(time.Duration(params[0].(int)) * time.Second)
return t, nil return t, nil
}) })
expr.AddFunction("values", func(params ...interface{}) (interface{}, error) { expr.AddFunction("values", func(params ...any) (any, error) {
if len(params) == 0 || len(params) > 2 { if len(params) == 0 || len(params) > 2 {
return nil, errors.New("Invalid number of arguments") return nil, errors.New("Invalid number of arguments")
} }
@@ -556,15 +556,15 @@ func init() {
} }
return values, nil return values, nil
}) })
expr.AddFunction("uniqueid", func(params ...interface{}) (interface{}, error) { expr.AddFunction("uniqueid", func(params ...any) (any, error) {
// create a new xid // create a new xid
return mq.NewID(), nil return mq.NewID(), nil
}) })
expr.AddFunction("now", func(params ...interface{}) (interface{}, error) { expr.AddFunction("now", func(params ...any) (any, error) {
// get the current time in UTC // get the current time in UTC
return time.Now().UTC(), nil return time.Now().UTC(), nil
}) })
expr.AddFunction("toString", func(params ...interface{}) (interface{}, error) { expr.AddFunction("toString", func(params ...any) (any, error) {
if len(params) == 0 || len(params) > 1 || params[0] == nil { if len(params) == 0 || len(params) > 1 || params[0] == nil {
return nil, errors.New("Invalid number of arguments") return nil, errors.New("Invalid number of arguments")
} }

View File

@@ -7,15 +7,15 @@ import (
// WALMemoryTaskStorage implements TaskStorage with WAL support using memory storage // WALMemoryTaskStorage implements TaskStorage with WAL support using memory storage
type WALMemoryTaskStorage struct { type WALMemoryTaskStorage struct {
*MemoryTaskStorage *MemoryTaskStorage
walManager interface{} // WAL manager interface to avoid import cycle walManager any // WAL manager interface to avoid import cycle
walStorage interface{} // WAL storage interface to avoid import cycle walStorage any // WAL storage interface to avoid import cycle
mu sync.RWMutex mu sync.RWMutex
} }
// WALSQLTaskStorage implements TaskStorage with WAL support using SQL storage // WALSQLTaskStorage implements TaskStorage with WAL support using SQL storage
type WALSQLTaskStorage struct { type WALSQLTaskStorage struct {
*SQLTaskStorage *SQLTaskStorage
walManager interface{} // WAL manager interface to avoid import cycle walManager any // WAL manager interface to avoid import cycle
walStorage interface{} // WAL storage interface to avoid import cycle walStorage any // WAL storage interface to avoid import cycle
mu sync.RWMutex mu sync.RWMutex
} }

View File

@@ -161,7 +161,7 @@ func (tm *DAG) GetActivityLogger() *ActivityLogger {
} }
// LogActivity logs an activity entry // LogActivity logs an activity entry
func (tm *DAG) LogActivity(ctx context.Context, level ActivityLevel, activityType ActivityType, message string, details map[string]interface{}) { func (tm *DAG) LogActivity(ctx context.Context, level ActivityLevel, activityType ActivityType, message string, details map[string]any) {
if tm.activityLogger != nil { if tm.activityLogger != nil {
tm.activityLogger.LogWithContext(ctx, level, activityType, message, details) tm.activityLogger.LogWithContext(ctx, level, activityType, message, details)
} }

View File

@@ -236,8 +236,8 @@ func (v *DAGValidator) GetTopologicalOrder() ([]string, error) {
} }
// GetNodeStatistics returns DAG statistics // GetNodeStatistics returns DAG statistics
func (v *DAGValidator) GetNodeStatistics() map[string]interface{} { func (v *DAGValidator) GetNodeStatistics() map[string]any {
stats := make(map[string]interface{}) stats := make(map[string]any)
nodeCount := 0 nodeCount := 0
edgeCount := 0 edgeCount := 0

View File

@@ -189,7 +189,7 @@ func (ws *WALStorageImpl) SaveWALSegment(ctx context.Context, segment *WALSegmen
status = EXCLUDED.status, status = EXCLUDED.status,
flushed_at = EXCLUDED.flushed_at`, ws.walSegmentsTable) flushed_at = EXCLUDED.flushed_at`, ws.walSegmentsTable)
var flushedAt interface{} var flushedAt any
if segment.FlushedAt != nil { if segment.FlushedAt != nil {
flushedAt = *segment.FlushedAt flushedAt = *segment.FlushedAt
} else { } else {
@@ -404,7 +404,7 @@ func (wes *WALEnabledStorage) SaveTask(ctx context.Context, task *storage.Persis
} }
// Write to WAL first // Write to WAL first
if err := wes.walManager.WriteEntry(ctx, WALEntryTypeTaskUpdate, taskData, map[string]interface{}{ if err := wes.walManager.WriteEntry(ctx, WALEntryTypeTaskUpdate, taskData, map[string]any{
"task_id": task.ID, "task_id": task.ID,
"dag_id": task.DAGID, "dag_id": task.DAGID,
}); err != nil { }); err != nil {
@@ -424,7 +424,7 @@ func (wes *WALEnabledStorage) LogActivity(ctx context.Context, log *storage.Task
} }
// Write to WAL first // Write to WAL first
if err := wes.walManager.WriteEntry(ctx, WALEntryTypeActivityLog, logData, map[string]interface{}{ if err := wes.walManager.WriteEntry(ctx, WALEntryTypeActivityLog, logData, map[string]any{
"task_id": log.TaskID, "task_id": log.TaskID,
"dag_id": log.DAGID, "dag_id": log.DAGID,
"action": log.Action, "action": log.Action,

View File

@@ -23,13 +23,13 @@ const (
// WALEntry represents a single entry in the Write-Ahead Log // WALEntry represents a single entry in the Write-Ahead Log
type WALEntry struct { type WALEntry struct {
ID string `json:"id"` ID string `json:"id"`
Type WALEntryType `json:"type"` Type WALEntryType `json:"type"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
SequenceID uint64 `json:"sequence_id"` SequenceID uint64 `json:"sequence_id"`
Data json.RawMessage `json:"data"` Data json.RawMessage `json:"data"`
Metadata map[string]interface{} `json:"metadata,omitempty"` Metadata map[string]any `json:"metadata,omitempty"`
Checksum string `json:"checksum"` Checksum string `json:"checksum"`
} }
// WALSegment represents a segment of WAL entries // WALSegment represents a segment of WAL entries
@@ -188,7 +188,7 @@ func NewWALManager(config *WALConfig, storage WALStorage) *WALManager {
} }
// WriteEntry writes an entry to the WAL // WriteEntry writes an entry to the WAL
func (wm *WALManager) WriteEntry(ctx context.Context, entryType WALEntryType, data json.RawMessage, metadata map[string]interface{}) error { func (wm *WALManager) WriteEntry(ctx context.Context, entryType WALEntryType, data json.RawMessage, metadata map[string]any) error {
entry := WALEntry{ entry := WALEntry{
ID: generateID(), ID: generateID(),
Type: entryType, Type: entryType,

View File

@@ -115,7 +115,7 @@ func (w *WALEnabledStorageWrapper) SaveTask(ctx context.Context, task *storage.P
} }
// Write to WAL first // Write to WAL first
if err := w.walManager.WriteEntry(ctx, wal.WALEntryTypeTaskUpdate, taskData, map[string]interface{}{ if err := w.walManager.WriteEntry(ctx, wal.WALEntryTypeTaskUpdate, taskData, map[string]any{
"task_id": task.ID, "task_id": task.ID,
"dag_id": task.DAGID, "dag_id": task.DAGID,
}); err != nil { }); err != nil {
@@ -135,7 +135,7 @@ func (w *WALEnabledStorageWrapper) LogActivity(ctx context.Context, logEntry *st
} }
// Write to WAL first // Write to WAL first
if err := w.walManager.WriteEntry(ctx, wal.WALEntryTypeActivityLog, logData, map[string]interface{}{ if err := w.walManager.WriteEntry(ctx, wal.WALEntryTypeActivityLog, logData, map[string]any{
"task_id": logEntry.TaskID, "task_id": logEntry.TaskID,
"dag_id": logEntry.DAGID, "dag_id": logEntry.DAGID,
"action": logEntry.Action, "action": logEntry.Action,

View File

@@ -84,7 +84,7 @@ func NewWorkflowEngineAdapter(config *WorkflowEngineAdapterConfig) *WorkflowEngi
definitions: make(map[string]*WorkflowDefinition), definitions: make(map[string]*WorkflowDefinition),
executions: make(map[string]*ExecutionResult), executions: make(map[string]*ExecutionResult),
stateManager: &WorkflowStateManager{ stateManager: &WorkflowStateManager{
stateStore: make(map[string]interface{}), stateStore: make(map[string]any),
}, },
} }
@@ -184,7 +184,7 @@ func (a *WorkflowEngineAdapter) RegisterWorkflow(ctx context.Context, definition
return nil return nil
} }
func (a *WorkflowEngineAdapter) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]interface{}) (*ExecutionResult, error) { func (a *WorkflowEngineAdapter) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]any) (*ExecutionResult, error) {
a.mu.RLock() a.mu.RLock()
definition, exists := a.definitions[workflowID] definition, exists := a.definitions[workflowID]
a.mu.RUnlock() a.mu.RUnlock()
@@ -200,7 +200,7 @@ func (a *WorkflowEngineAdapter) ExecuteWorkflow(ctx context.Context, workflowID
Status: ExecutionStatusRunning, Status: ExecutionStatusRunning,
StartTime: time.Now(), StartTime: time.Now(),
Input: input, Input: input,
Output: make(map[string]interface{}), Output: make(map[string]any),
} }
// Store execution // Store execution
@@ -251,10 +251,10 @@ func (a *WorkflowEngineAdapter) executeWorkflowAsync(ctx context.Context, execut
// Update execution with node results // Update execution with node results
if execution.NodeExecutions == nil { if execution.NodeExecutions == nil {
execution.NodeExecutions = make(map[string]interface{}) execution.NodeExecutions = make(map[string]any)
} }
execution.NodeExecutions[node.ID] = map[string]interface{}{ execution.NodeExecutions[node.ID] = map[string]any{
"status": "completed", "status": "completed",
"started_at": time.Now().Add(-time.Millisecond * 100), "started_at": time.Now().Add(-time.Millisecond * 100),
"ended_at": time.Now(), "ended_at": time.Now(),
@@ -274,7 +274,7 @@ func (a *WorkflowEngineAdapter) executeWorkflowAsync(ctx context.Context, execut
if i == len(definition.Nodes)-1 { if i == len(definition.Nodes)-1 {
// Last node - complete execution // Last node - complete execution
execution.Status = ExecutionStatusCompleted execution.Status = ExecutionStatusCompleted
execution.Output = map[string]interface{}{ execution.Output = map[string]any{
"result": "workflow completed successfully", "result": "workflow completed successfully",
"nodes_executed": len(definition.Nodes), "nodes_executed": len(definition.Nodes),
} }

View File

@@ -191,9 +191,9 @@ func (api *WorkflowAPI) ExecuteWorkflow(c *fiber.Ctx) error {
}) })
} }
var input map[string]interface{} var input map[string]any
if err := c.BodyParser(&input); err != nil { if err := c.BodyParser(&input); err != nil {
input = make(map[string]interface{}) input = make(map[string]any)
} }
execution, err := api.enhancedDAG.ExecuteWorkflow(c.Context(), id, input) execution, err := api.enhancedDAG.ExecuteWorkflow(c.Context(), id, input)

View File

@@ -134,7 +134,7 @@ func NewAdvancedWorkflowStateManager() *AdvancedWorkflowStateManager {
} }
// CreateExecution creates a new workflow execution // CreateExecution creates a new workflow execution
func (sm *AdvancedWorkflowStateManager) CreateExecution(ctx context.Context, workflowID string, input map[string]interface{}) (*WorkflowExecution, error) { func (sm *AdvancedWorkflowStateManager) CreateExecution(ctx context.Context, workflowID string, input map[string]any) (*WorkflowExecution, error) {
execution := &WorkflowExecution{ execution := &WorkflowExecution{
ID: generateExecutionID(), ID: generateExecutionID(),
WorkflowID: workflowID, WorkflowID: workflowID,
@@ -175,7 +175,7 @@ func (sm *AdvancedWorkflowStateManager) UpdateExecution(ctx context.Context, exe
} }
// ListExecutions returns all executions // ListExecutions returns all executions
func (sm *AdvancedWorkflowStateManager) ListExecutions(ctx context.Context, filters map[string]interface{}) ([]*WorkflowExecution, error) { func (sm *AdvancedWorkflowStateManager) ListExecutions(ctx context.Context, filters map[string]any) ([]*WorkflowExecution, error) {
sm.mu.RLock() sm.mu.RLock()
defer sm.mu.RUnlock() defer sm.mu.RUnlock()
@@ -203,7 +203,7 @@ type ScheduledTask struct {
ID string ID string
WorkflowID string WorkflowID string
Schedule string Schedule string
Input map[string]interface{} Input map[string]any
NextRun time.Time NextRun time.Time
LastRun *time.Time LastRun *time.Time
Enabled bool Enabled bool
@@ -313,7 +313,7 @@ func (e *WorkflowExecutor) Stop(ctx context.Context) {
} }
// ExecuteWorkflow executes a workflow // ExecuteWorkflow executes a workflow
func (e *WorkflowExecutor) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]interface{}) (*WorkflowExecution, error) { func (e *WorkflowExecutor) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]any) (*WorkflowExecution, error) {
// Create execution // Create execution
execution, err := e.stateManager.CreateExecution(ctx, workflowID, input) execution, err := e.stateManager.CreateExecution(ctx, workflowID, input)
if err != nil { if err != nil {
@@ -345,7 +345,7 @@ func (e *WorkflowExecutor) executeWorkflowAsync(ctx context.Context, execution *
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
execution.Status = ExecutionStatusCompleted execution.Status = ExecutionStatusCompleted
execution.Output = map[string]interface{}{ execution.Output = map[string]any{
"result": "workflow completed successfully", "result": "workflow completed successfully",
"input": execution.Input, "input": execution.Input,
} }
@@ -537,7 +537,7 @@ func (m *WorkflowEngineManager) RegisterWorkflow(ctx context.Context, definition
} }
// ExecuteWorkflow executes a workflow // ExecuteWorkflow executes a workflow
func (m *WorkflowEngineManager) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]interface{}) (*ExecutionResult, error) { func (m *WorkflowEngineManager) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]any) (*ExecutionResult, error) {
execution, err := m.executor.ExecuteWorkflow(ctx, workflowID, input) execution, err := m.executor.ExecuteWorkflow(ctx, workflowID, input)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@@ -149,7 +149,7 @@ func (p *APIWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) m
// In real implementation, make HTTP request // In real implementation, make HTTP request
// For now, simulate API call // For now, simulate API call
result := map[string]interface{}{ result := map[string]any{
"api_called": true, "api_called": true,
"url": config.URL, "url": config.URL,
"method": config.Method, "method": config.Method,
@@ -174,7 +174,7 @@ type TransformWorkflowProcessor struct {
func (p *TransformWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result { func (p *TransformWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
config := p.GetConfig() config := p.GetConfig()
var payload map[string]interface{} var payload map[string]any
if err := json.Unmarshal(task.Payload, &payload); err != nil { if err := json.Unmarshal(task.Payload, &payload); err != nil {
return mq.Result{ return mq.Result{
TaskID: task.ID, TaskID: task.ID,
@@ -205,7 +205,7 @@ type DecisionWorkflowProcessor struct {
func (p *DecisionWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result { func (p *DecisionWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
config := p.GetConfig() config := p.GetConfig()
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
return mq.Result{ return mq.Result{
TaskID: task.ID, TaskID: task.ID,
@@ -248,13 +248,13 @@ func (p *TimerWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task)
if config.Duration > 0 { if config.Duration > 0 {
// In real implementation, this might use a scheduler // In real implementation, this might use a scheduler
// For demo, we just add the delay info to the result // For demo, we just add the delay info to the result
result := map[string]interface{}{ result := map[string]any{
"timer_delay": config.Duration.String(), "timer_delay": config.Duration.String(),
"schedule": config.Schedule, "schedule": config.Schedule,
"timer_set_at": "simulated", "timer_set_at": "simulated",
} }
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err == nil { if err := json.Unmarshal(task.Payload, &inputData); err == nil {
for key, value := range inputData { for key, value := range inputData {
result[key] = value result[key] = value
@@ -294,14 +294,14 @@ func (p *DatabaseWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Ta
} }
// Simulate database operation // Simulate database operation
result := map[string]interface{}{ result := map[string]any{
"db_query_executed": true, "db_query_executed": true,
"query": config.Query, "query": config.Query,
"connection": config.Connection, "connection": config.Connection,
"executed_at": "simulated", "executed_at": "simulated",
} }
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err == nil { if err := json.Unmarshal(task.Payload, &inputData); err == nil {
for key, value := range inputData { for key, value := range inputData {
result[key] = value result[key] = value
@@ -334,7 +334,7 @@ func (p *EmailWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task)
} }
// Simulate email sending // Simulate email sending
result := map[string]interface{}{ result := map[string]any{
"email_sent": true, "email_sent": true,
"to": config.EmailTo, "to": config.EmailTo,
"subject": config.Subject, "subject": config.Subject,
@@ -342,7 +342,7 @@ func (p *EmailWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task)
"sent_at": "simulated", "sent_at": "simulated",
} }
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err == nil { if err := json.Unmarshal(task.Payload, &inputData); err == nil {
for key, value := range inputData { for key, value := range inputData {
result[key] = value result[key] = value
@@ -375,14 +375,14 @@ func (p *WebhookProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Re
} }
// Simulate webhook sending // Simulate webhook sending
result := map[string]interface{}{ result := map[string]any{
"webhook_sent": true, "webhook_sent": true,
"url": config.URL, "url": config.URL,
"method": config.Method, "method": config.Method,
"sent_at": "simulated", "sent_at": "simulated",
} }
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err == nil { if err := json.Unmarshal(task.Payload, &inputData); err == nil {
for key, value := range inputData { for key, value := range inputData {
result[key] = value result[key] = value
@@ -415,7 +415,7 @@ func (p *SubDAGWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task
} }
// Simulate sub-DAG execution // Simulate sub-DAG execution
result := map[string]interface{}{ result := map[string]any{
"sub_dag_executed": true, "sub_dag_executed": true,
"sub_workflow_id": config.SubWorkflowID, "sub_workflow_id": config.SubWorkflowID,
"input_mapping": config.InputMapping, "input_mapping": config.InputMapping,
@@ -423,7 +423,7 @@ func (p *SubDAGWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task
"executed_at": "simulated", "executed_at": "simulated",
} }
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err == nil { if err := json.Unmarshal(task.Payload, &inputData); err == nil {
for key, value := range inputData { for key, value := range inputData {
result[key] = value result[key] = value
@@ -446,12 +446,12 @@ type ParallelWorkflowProcessor struct {
func (p *ParallelWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result { func (p *ParallelWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
// Simulate parallel processing // Simulate parallel processing
result := map[string]interface{}{ result := map[string]any{
"parallel_executed": true, "parallel_executed": true,
"executed_at": "simulated", "executed_at": "simulated",
} }
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err == nil { if err := json.Unmarshal(task.Payload, &inputData); err == nil {
for key, value := range inputData { for key, value := range inputData {
result[key] = value result[key] = value
@@ -474,12 +474,12 @@ type LoopWorkflowProcessor struct {
func (p *LoopWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result { func (p *LoopWorkflowProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
// Simulate loop processing // Simulate loop processing
result := map[string]interface{}{ result := map[string]any{
"loop_executed": true, "loop_executed": true,
"executed_at": "simulated", "executed_at": "simulated",
} }
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err == nil { if err := json.Unmarshal(task.Payload, &inputData); err == nil {
for key, value := range inputData { for key, value := range inputData {
result[key] = value result[key] = value

View File

@@ -66,7 +66,7 @@ func (p *BaseProcessor) Close() error {
// Helper methods for workflow processors // Helper methods for workflow processors
func (p *BaseProcessor) processTemplate(template string, data map[string]interface{}) string { func (p *BaseProcessor) processTemplate(template string, data map[string]any) string {
result := template result := template
for key, value := range data { for key, value := range data {
placeholder := fmt.Sprintf("{{%s}}", key) placeholder := fmt.Sprintf("{{%s}}", key)
@@ -79,7 +79,7 @@ func (p *BaseProcessor) generateToken() string {
return fmt.Sprintf("token_%d_%s", time.Now().UnixNano(), generateRandomString(16)) return fmt.Sprintf("token_%d_%s", time.Now().UnixNano(), generateRandomString(16))
} }
func (p *BaseProcessor) validateRule(rule WorkflowValidationRule, data map[string]interface{}) error { func (p *BaseProcessor) validateRule(rule WorkflowValidationRule, data map[string]any) error {
value, exists := data[rule.Field] value, exists := data[rule.Field]
if rule.Required && !exists { if rule.Required && !exists {
@@ -146,7 +146,7 @@ func (p *BaseProcessor) validateRule(rule WorkflowValidationRule, data map[strin
return nil return nil
} }
func (p *BaseProcessor) evaluateCondition(condition string, data map[string]interface{}) bool { func (p *BaseProcessor) evaluateCondition(condition string, data map[string]any) bool {
// Simple condition evaluation (in real implementation, use proper expression parser) // Simple condition evaluation (in real implementation, use proper expression parser)
// For now, support basic equality checks like "field == value" // For now, support basic equality checks like "field == value"
parts := strings.Split(condition, "==") parts := strings.Split(condition, "==")
@@ -179,8 +179,8 @@ func (p *BaseProcessor) validateWebhookSignature(payload []byte, secret, signatu
return hmac.Equal([]byte(signature), []byte(expectedSignature)) return hmac.Equal([]byte(signature), []byte(expectedSignature))
} }
func (p *BaseProcessor) applyTransforms(data map[string]interface{}, transforms map[string]interface{}) map[string]interface{} { func (p *BaseProcessor) applyTransforms(data map[string]any, transforms map[string]any) map[string]any {
result := make(map[string]interface{}) result := make(map[string]any)
// Copy original data // Copy original data
for key, value := range data { for key, value := range data {
@@ -189,7 +189,7 @@ func (p *BaseProcessor) applyTransforms(data map[string]interface{}, transforms
// Apply transforms (simplified implementation) // Apply transforms (simplified implementation)
for key, transform := range transforms { for key, transform := range transforms {
if transformMap, ok := transform.(map[string]interface{}); ok { if transformMap, ok := transform.(map[string]any); ok {
if transformType, exists := transformMap["type"]; exists { if transformType, exists := transformMap["type"]; exists {
switch transformType { switch transformType {
case "rename": case "rename":
@@ -245,9 +245,9 @@ func (p *HTMLProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Resul
} }
// Prepare template data // Prepare template data
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
inputData = make(map[string]interface{}) inputData = make(map[string]any)
} }
// Add template-specific data from config // Add template-specific data from config
@@ -266,7 +266,7 @@ func (p *HTMLProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Resul
} }
// Prepare result // Prepare result
result := map[string]interface{}{ result := map[string]any{
"html_content": htmlOutput.String(), "html_content": htmlOutput.String(),
"template": templateStr, "template": templateStr,
"data": inputData, "data": inputData,
@@ -311,16 +311,16 @@ func (p *SMSProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Result
} }
// Parse input data for dynamic content // Parse input data for dynamic content
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
inputData = make(map[string]interface{}) inputData = make(map[string]any)
} }
// Process message template // Process message template
message := p.processTemplate(config.Message, inputData) message := p.processTemplate(config.Message, inputData)
// Simulate SMS sending (in real implementation, integrate with SMS provider) // Simulate SMS sending (in real implementation, integrate with SMS provider)
result := map[string]interface{}{ result := map[string]any{
"sms_sent": true, "sms_sent": true,
"provider": config.Provider, "provider": config.Provider,
"from": config.From, "from": config.From,
@@ -354,7 +354,7 @@ func (p *AuthProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Resul
config := p.GetConfig() config := p.GetConfig()
// Parse input data // Parse input data
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
return mq.Result{ return mq.Result{
TaskID: task.ID, TaskID: task.ID,
@@ -364,7 +364,7 @@ func (p *AuthProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Resul
} }
// Simulate authentication based on type // Simulate authentication based on type
result := map[string]interface{}{ result := map[string]any{
"auth_type": config.AuthType, "auth_type": config.AuthType,
"authenticated": true, "authenticated": true,
"auth_time": time.Now(), "auth_time": time.Now(),
@@ -413,7 +413,7 @@ func (p *ValidatorProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.
config := p.GetConfig() config := p.GetConfig()
// Parse input data // Parse input data
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
return mq.Result{ return mq.Result{
TaskID: task.ID, TaskID: task.ID,
@@ -432,7 +432,7 @@ func (p *ValidatorProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.
} }
// Prepare result // Prepare result
result := map[string]interface{}{ result := map[string]any{
"validation_passed": len(validationErrors) == 0, "validation_passed": len(validationErrors) == 0,
"validation_type": config.ValidationType, "validation_type": config.ValidationType,
"validated_at": time.Now(), "validated_at": time.Now(),
@@ -474,7 +474,7 @@ func (p *RouterProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Res
config := p.GetConfig() config := p.GetConfig()
// Parse input data // Parse input data
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
return mq.Result{ return mq.Result{
TaskID: task.ID, TaskID: task.ID,
@@ -494,7 +494,7 @@ func (p *RouterProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Res
} }
// Prepare result // Prepare result
result := map[string]interface{}{ result := map[string]any{
"route_selected": selectedRoute, "route_selected": selectedRoute,
"routed_at": time.Now(), "routed_at": time.Now(),
"routing_rules": len(config.RoutingRules), "routing_rules": len(config.RoutingRules),
@@ -523,7 +523,7 @@ func (p *StorageProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Re
config := p.GetConfig() config := p.GetConfig()
// Parse input data // Parse input data
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
return mq.Result{ return mq.Result{
TaskID: task.ID, TaskID: task.ID,
@@ -533,7 +533,7 @@ func (p *StorageProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Re
} }
// Simulate storage operation // Simulate storage operation
result := map[string]interface{}{ result := map[string]any{
"storage_type": config.StorageType, "storage_type": config.StorageType,
"storage_operation": config.StorageOperation, "storage_operation": config.StorageOperation,
"storage_key": config.StorageKey, "storage_key": config.StorageKey,
@@ -577,16 +577,16 @@ func (p *NotifyProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Res
config := p.GetConfig() config := p.GetConfig()
// Parse input data // Parse input data
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
inputData = make(map[string]interface{}) inputData = make(map[string]any)
} }
// Process notification message template // Process notification message template
message := p.processTemplate(config.NotificationMessage, inputData) message := p.processTemplate(config.NotificationMessage, inputData)
// Prepare result // Prepare result
result := map[string]interface{}{ result := map[string]any{
"notified": true, "notified": true,
"notify_type": config.NotifyType, "notify_type": config.NotifyType,
"notification_type": config.NotificationType, "notification_type": config.NotificationType,
@@ -620,7 +620,7 @@ func (p *WebhookReceiverProcessor) ProcessTask(ctx context.Context, task *mq.Tas
config := p.GetConfig() config := p.GetConfig()
// Parse input data // Parse input data
var inputData map[string]interface{} var inputData map[string]any
if err := json.Unmarshal(task.Payload, &inputData); err != nil { if err := json.Unmarshal(task.Payload, &inputData); err != nil {
return mq.Result{ return mq.Result{
TaskID: task.ID, TaskID: task.ID,
@@ -647,7 +647,7 @@ func (p *WebhookReceiverProcessor) ProcessTask(ctx context.Context, task *mq.Tas
} }
// Prepare result // Prepare result
result := map[string]interface{}{ result := map[string]any{
"webhook_received": true, "webhook_received": true,
"webhook_path": config.ListenPath, "webhook_path": config.ListenPath,
"webhook_processed_at": time.Now(), "webhook_processed_at": time.Now(),

View File

@@ -1,139 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"github.com/oarkflow/json"
"github.com/oarkflow/mq"
"github.com/oarkflow/mq/dag"
"github.com/oarkflow/mq/examples/tasks"
)
func subDAG() *dag.DAG {
f := dag.NewDAG("Sub DAG", "sub-dag", func(taskID string, result mq.Result) {
fmt.Printf("Sub DAG Final result for task %s: %s\n", taskID, string(result.Payload))
}, mq.WithSyncMode(true))
f.
AddNode(dag.Function, "Store data", "store:data", &tasks.StoreData{Operation: dag.Operation{Type: dag.Function}}, true).
AddNode(dag.Function, "Send SMS", "send:sms", &tasks.SendSms{Operation: dag.Operation{Type: dag.Function}}).
AddNode(dag.Function, "Notification", "notification", &tasks.InAppNotification{Operation: dag.Operation{Type: dag.Function}}).
AddEdge(dag.Simple, "Store Payload to send sms", "store:data", "send:sms").
AddEdge(dag.Simple, "Store Payload to notification", "send:sms", "notification")
return f
}
func main() {
flow := dag.NewDAG("Sample DAG", "sample-dag", func(taskID string, result mq.Result) {
fmt.Printf("DAG Final result for task %s: %s\n", taskID, string(result.Payload))
})
flow.ConfigureMemoryStorage()
flow.AddNode(dag.Function, "GetData", "GetData", &GetData{}, true)
flow.AddNode(dag.Function, "Loop", "Loop", &Loop{})
flow.AddNode(dag.Function, "ValidateAge", "ValidateAge", &ValidateAge{})
flow.AddNode(dag.Function, "ValidateGender", "ValidateGender", &ValidateGender{})
flow.AddNode(dag.Function, "Final", "Final", &Final{})
flow.AddDAGNode(dag.Function, "Check", "persistent", subDAG())
flow.AddEdge(dag.Simple, "GetData", "GetData", "Loop")
flow.AddEdge(dag.Iterator, "Validate age for each item", "Loop", "ValidateAge")
flow.AddCondition("ValidateAge", map[string]string{"pass": "ValidateGender", "default": "persistent"})
flow.AddEdge(dag.Simple, "Mark as Done", "Loop", "Final")
// Test without the Final node to see if it's causing the issue
// Let's also enable hook to see the flow
flow.SetPreProcessHook(func(ctx context.Context, node *dag.Node, taskID string, payload json.RawMessage) context.Context {
log.Printf("PRE-HOOK: Processing node %s, taskID %s, payload size: %d", node.ID, taskID, len(payload))
return ctx
})
flow.SetPostProcessHook(func(ctx context.Context, node *dag.Node, taskID string, result mq.Result) {
log.Printf("POST-HOOK: Completed node %s, taskID %s, status: %v, payload size: %d", node.ID, taskID, result.Status, len(result.Payload))
})
data := []byte(`[{"age": "15", "gender": "female"}, {"age": "18", "gender": "male"}]`)
if flow.Error != nil {
panic(flow.Error)
}
rs := flow.Process(context.Background(), data)
if rs.Error != nil {
panic(rs.Error)
}
fmt.Println(rs.Status, rs.Topic, string(rs.Payload))
}
type GetData struct {
dag.Operation
}
func (p *GetData) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
log.Printf("GetData: Processing payload of size %d", len(task.Payload))
return mq.Result{Ctx: ctx, Payload: task.Payload}
}
type Loop struct {
dag.Operation
}
func (p *Loop) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
log.Printf("Loop: Processing payload of size %d", len(task.Payload))
return mq.Result{Ctx: ctx, Payload: task.Payload}
}
type ValidateAge struct {
dag.Operation
}
func (p *ValidateAge) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
var data map[string]any
if err := json.Unmarshal(task.Payload, &data); err != nil {
return mq.Result{Error: fmt.Errorf("ValidateAge Error: %s", err.Error()), Ctx: ctx}
}
var status string
if data["age"] == "18" {
status = "pass"
} else {
status = "default"
}
log.Printf("ValidateAge: Processing age %s, status %s", data["age"], status)
updatedPayload, _ := json.Marshal(data)
return mq.Result{Payload: updatedPayload, Ctx: ctx, ConditionStatus: status}
}
type ValidateGender struct {
dag.Operation
}
func (p *ValidateGender) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
var data map[string]any
if err := json.Unmarshal(task.Payload, &data); err != nil {
return mq.Result{Error: fmt.Errorf("ValidateGender Error: %s", err.Error()), Ctx: ctx}
}
data["female_voter"] = data["gender"] == "female"
log.Printf("ValidateGender: Processing gender %s", data["gender"])
updatedPayload, _ := json.Marshal(data)
return mq.Result{Payload: updatedPayload, Ctx: ctx}
}
type Final struct {
dag.Operation
}
func (p *Final) ProcessTask(ctx context.Context, task *mq.Task) mq.Result {
var data []map[string]any
if err := json.Unmarshal(task.Payload, &data); err != nil {
return mq.Result{Error: fmt.Errorf("Final Error: %s", err.Error()), Ctx: ctx}
}
log.Printf("Final: Processing array with %d items", len(data))
for i, row := range data {
row["done"] = true
data[i] = row
}
updatedPayload, err := json.Marshal(data)
if err != nil {
panic(err)
}
return mq.Result{Payload: updatedPayload, Ctx: ctx}
}

View File

@@ -108,7 +108,7 @@ func createExampleWorkflows(ctx context.Context, enhancedDAG *dag.EnhancedDAG) e
Description: "Validates incoming data", Description: "Validates incoming data",
Position: dag.Position{X: 100, Y: 100}, Position: dag.Position{X: 100, Y: 100},
Config: dag.WorkflowNodeConfig{ Config: dag.WorkflowNodeConfig{
Custom: map[string]interface{}{ Custom: map[string]any{
"validation_type": "json", "validation_type": "json",
"required_fields": []string{"data"}, "required_fields": []string{"data"},
}, },
@@ -132,7 +132,7 @@ func createExampleWorkflows(ctx context.Context, enhancedDAG *dag.EnhancedDAG) e
Description: "Stores processed data", Description: "Stores processed data",
Position: dag.Position{X: 500, Y: 100}, Position: dag.Position{X: 500, Y: 100},
Config: dag.WorkflowNodeConfig{ Config: dag.WorkflowNodeConfig{
Custom: map[string]interface{}{ Custom: map[string]any{
"storage_type": "memory", "storage_type": "memory",
"storage_operation": "save", "storage_operation": "save",
"storage_key": "processed_data", "storage_key": "processed_data",
@@ -146,7 +146,7 @@ func createExampleWorkflows(ctx context.Context, enhancedDAG *dag.EnhancedDAG) e
Description: "Sends completion notification", Description: "Sends completion notification",
Position: dag.Position{X: 700, Y: 100}, Position: dag.Position{X: 700, Y: 100},
Config: dag.WorkflowNodeConfig{ Config: dag.WorkflowNodeConfig{
Custom: map[string]interface{}{ Custom: map[string]any{
"notify_type": "email", "notify_type": "email",
"notification_recipients": []string{"admin@example.com"}, "notification_recipients": []string{"admin@example.com"},
"notification_message": "Data processing completed", "notification_message": "Data processing completed",
@@ -193,7 +193,7 @@ func createExampleWorkflows(ctx context.Context, enhancedDAG *dag.EnhancedDAG) e
EnableAudit: true, EnableAudit: true,
EnableMetrics: true, EnableMetrics: true,
}, },
Metadata: map[string]interface{}{ Metadata: map[string]any{
"example": true, "example": true,
"type": "data-processing", "type": "data-processing",
}, },
@@ -332,7 +332,7 @@ func createExampleWorkflows(ctx context.Context, enhancedDAG *dag.EnhancedDAG) e
EnableAudit: true, EnableAudit: true,
EnableMetrics: true, EnableMetrics: true,
}, },
Metadata: map[string]interface{}{ Metadata: map[string]any{
"example": true, "example": true,
"type": "api-integration", "type": "api-integration",
}, },
@@ -358,14 +358,14 @@ func demonstrateWorkflowExecution(ctx context.Context, enhancedDAG *dag.Enhanced
log.Println("Starting workflow execution demonstration...") log.Println("Starting workflow execution demonstration...")
// Execute the data processing workflow // Execute the data processing workflow
input1 := map[string]interface{}{ input1 := map[string]any{
"data": map[string]interface{}{ "data": map[string]any{
"id": "12345", "id": "12345",
"name": "Sample Data", "name": "Sample Data",
"value": 100, "value": 100,
"type": "example", "type": "example",
}, },
"metadata": map[string]interface{}{ "metadata": map[string]any{
"source": "demo", "source": "demo",
}, },
} }
@@ -379,7 +379,7 @@ func demonstrateWorkflowExecution(ctx context.Context, enhancedDAG *dag.Enhanced
log.Printf("Started data processing workflow execution: %s", execution1.ID) log.Printf("Started data processing workflow execution: %s", execution1.ID)
// Execute the API integration workflow // Execute the API integration workflow
input2 := map[string]interface{}{ input2 := map[string]any{
"api_endpoint": "https://jsonplaceholder.typicode.com/posts/1", "api_endpoint": "https://jsonplaceholder.typicode.com/posts/1",
"timeout": 30, "timeout": 30,
} }

View File

@@ -82,7 +82,7 @@ func RoleCheckMiddleware(requiredRoles ...string) mq.Handler {
log.Printf("RoleCheckMiddleware: Checking roles %v for node %s", requiredRoles, task.Topic) log.Printf("RoleCheckMiddleware: Checking roles %v for node %s", requiredRoles, task.Topic)
// Extract user from payload // Extract user from payload
var payload map[string]interface{} var payload map[string]any
if err := json.Unmarshal(task.Payload, &payload); err != nil { if err := json.Unmarshal(task.Payload, &payload); err != nil {
return mq.Result{ return mq.Result{
Status: mq.Failed, Status: mq.Failed,
@@ -161,7 +161,7 @@ func (p *ExampleProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Re
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
// Parse the payload as JSON // Parse the payload as JSON
var payload map[string]interface{} var payload map[string]any
if err := json.Unmarshal(task.Payload, &payload); err != nil { if err := json.Unmarshal(task.Payload, &payload); err != nil {
return mq.Result{ return mq.Result{
Status: mq.Failed, Status: mq.Failed,
@@ -202,7 +202,7 @@ func (p *AdminProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Resu
time.Sleep(200 * time.Millisecond) time.Sleep(200 * time.Millisecond)
// Parse the payload as JSON // Parse the payload as JSON
var payload map[string]interface{} var payload map[string]any
if err := json.Unmarshal(task.Payload, &payload); err != nil { if err := json.Unmarshal(task.Payload, &payload); err != nil {
return mq.Result{ return mq.Result{
Status: mq.Failed, Status: mq.Failed,
@@ -244,7 +244,7 @@ func (p *UserProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Resul
time.Sleep(150 * time.Millisecond) time.Sleep(150 * time.Millisecond)
// Parse the payload as JSON // Parse the payload as JSON
var payload map[string]interface{} var payload map[string]any
if err := json.Unmarshal(task.Payload, &payload); err != nil { if err := json.Unmarshal(task.Payload, &payload); err != nil {
return mq.Result{ return mq.Result{
Status: mq.Failed, Status: mq.Failed,
@@ -286,7 +286,7 @@ func (p *GuestProcessor) ProcessTask(ctx context.Context, task *mq.Task) mq.Resu
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
// Parse the payload as JSON // Parse the payload as JSON
var payload map[string]interface{} var payload map[string]any
if err := json.Unmarshal(task.Payload, &payload); err != nil { if err := json.Unmarshal(task.Payload, &payload); err != nil {
return mq.Result{ return mq.Result{
Status: mq.Failed, Status: mq.Failed,
@@ -418,7 +418,7 @@ func main() {
log.Printf("\n=== Testing user: %s (Roles: %v) ===", user.Name, user.Roles) log.Printf("\n=== Testing user: %s (Roles: %v) ===", user.Name, user.Roles)
// Create payload with user information // Create payload with user information
payload := map[string]interface{}{ payload := map[string]any{
"user": user, "user": user,
"message": fmt.Sprintf("Request from %s", user.Name), "message": fmt.Sprintf("Request from %s", user.Name),
"data": "test data", "data": "test data",

View File

@@ -60,7 +60,7 @@ type Param struct {
// Pre-allocated param slices to avoid any allocations // Pre-allocated param slices to avoid any allocations
var paramPool = sync.Pool{ var paramPool = sync.Pool{
New: func() interface{} { New: func() any {
return make([]Param, 0, 16) return make([]Param, 0, 16)
}, },
} }
@@ -85,7 +85,7 @@ type Ctx struct {
} }
var ctxPool = sync.Pool{ var ctxPool = sync.Pool{
New: func() interface{} { New: func() any {
return &Ctx{} return &Ctx{}
}, },
} }

View File

@@ -115,14 +115,14 @@ func (h *DataHandler) sortData(data map[string]any) map[string]any {
} }
} }
if dataArray, ok := data["data"].([]interface{}); ok { if dataArray, ok := data["data"].([]any); ok {
sortField := h.getSortField() sortField := h.getSortField()
sortOrder := h.getSortOrder() // "asc" or "desc" sortOrder := h.getSortOrder() // "asc" or "desc"
// Convert to slice of maps for sorting // Convert to slice of maps for sorting
var records []map[string]interface{} var records []map[string]any
for _, item := range dataArray { for _, item := range dataArray {
if record, ok := item.(map[string]interface{}); ok { if record, ok := item.(map[string]any); ok {
records = append(records, record) records = append(records, record)
} }
} }
@@ -139,8 +139,8 @@ func (h *DataHandler) sortData(data map[string]any) map[string]any {
return comparison < 0 return comparison < 0
}) })
// Convert back to []interface{} // Convert back to []any
var sortedData []interface{} var sortedData []any
for _, record := range records { for _, record := range records {
sortedData = append(sortedData, record) sortedData = append(sortedData, record)
} }
@@ -161,13 +161,13 @@ func (h *DataHandler) deduplicateData(data map[string]any) map[string]any {
} }
} }
if dataArray, ok := data["data"].([]interface{}); ok { if dataArray, ok := data["data"].([]any); ok {
dedupeFields := h.getDedupeFields() dedupeFields := h.getDedupeFields()
seen := make(map[string]bool) seen := make(map[string]bool)
var uniqueData []interface{} var uniqueData []any
for _, item := range dataArray { for _, item := range dataArray {
if record, ok := item.(map[string]interface{}); ok { if record, ok := item.(map[string]any); ok {
key := h.createDedupeKey(record, dedupeFields) key := h.createDedupeKey(record, dedupeFields)
if !seen[key] { if !seen[key] {
seen[key] = true seen[key] = true
@@ -281,7 +281,7 @@ func (h *DataHandler) validateFields(data map[string]any) map[string]any {
result[key] = value result[key] = value
} }
validationResults := make(map[string]interface{}) validationResults := make(map[string]any)
allValid := true allValid := true
for field, rules := range validationRules { for field, rules := range validationRules {
@@ -323,14 +323,14 @@ func (h *DataHandler) pivotData(data map[string]any) map[string]any {
// Simplified pivot implementation // Simplified pivot implementation
result := make(map[string]any) result := make(map[string]any)
if dataArray, ok := data["data"].([]interface{}); ok { if dataArray, ok := data["data"].([]any); ok {
pivotField := h.getPivotField() pivotField := h.getPivotField()
valueField := h.getValueField() valueField := h.getValueField()
pivoted := make(map[string]interface{}) pivoted := make(map[string]any)
for _, item := range dataArray { for _, item := range dataArray {
if record, ok := item.(map[string]interface{}); ok { if record, ok := item.(map[string]any); ok {
if pivotVal, ok := record[pivotField]; ok { if pivotVal, ok := record[pivotField]; ok {
if val, ok := record[valueField]; ok { if val, ok := record[valueField]; ok {
key := fmt.Sprintf("%v", pivotVal) key := fmt.Sprintf("%v", pivotVal)
@@ -351,11 +351,11 @@ func (h *DataHandler) unpivotData(data map[string]any) map[string]any {
result := make(map[string]any) result := make(map[string]any)
unpivotFields := h.getUnpivotFields() unpivotFields := h.getUnpivotFields()
var unpivotedData []interface{} var unpivotedData []any
for _, field := range unpivotFields { for _, field := range unpivotFields {
if val, ok := data[field]; ok { if val, ok := data[field]; ok {
record := map[string]interface{}{ record := map[string]any{
"field": field, "field": field,
"value": val, "value": val,
} }
@@ -370,7 +370,7 @@ func (h *DataHandler) unpivotData(data map[string]any) map[string]any {
} }
// Helper functions // Helper functions
func (h *DataHandler) compareValues(a, b interface{}) int { func (h *DataHandler) compareValues(a, b any) int {
if a == nil && b == nil { if a == nil && b == nil {
return 0 return 0
} }
@@ -404,7 +404,7 @@ func (h *DataHandler) compareValues(a, b interface{}) int {
return 0 return 0
} }
func (h *DataHandler) createDedupeKey(record map[string]interface{}, fields []string) string { func (h *DataHandler) createDedupeKey(record map[string]any, fields []string) string {
var keyParts []string var keyParts []string
for _, field := range fields { for _, field := range fields {
keyParts = append(keyParts, fmt.Sprintf("%v", record[field])) keyParts = append(keyParts, fmt.Sprintf("%v", record[field]))
@@ -545,7 +545,7 @@ func (h *DataHandler) evaluateCondition(data map[string]any, condition string) b
return false return false
} }
func (h *DataHandler) castValue(val interface{}, targetType string) interface{} { func (h *DataHandler) castValue(val any, targetType string) any {
switch targetType { switch targetType {
case "string": case "string":
return fmt.Sprintf("%v", val) return fmt.Sprintf("%v", val)
@@ -569,8 +569,8 @@ func (h *DataHandler) castValue(val interface{}, targetType string) interface{}
} }
} }
func (h *DataHandler) validateField(val interface{}, rules map[string]interface{}) map[string]interface{} { func (h *DataHandler) validateField(val any, rules map[string]any) map[string]any {
result := map[string]interface{}{ result := map[string]any{
"valid": true, "valid": true,
"errors": []string{}, "errors": []string{},
} }
@@ -610,7 +610,7 @@ func (h *DataHandler) validateField(val interface{}, rules map[string]interface{
return result return result
} }
func (h *DataHandler) validateType(val interface{}, expectedType string) bool { func (h *DataHandler) validateType(val any, expectedType string) bool {
actualType := reflect.TypeOf(val).String() actualType := reflect.TypeOf(val).String()
switch expectedType { switch expectedType {
case "string": case "string":
@@ -626,7 +626,7 @@ func (h *DataHandler) validateType(val interface{}, expectedType string) bool {
} }
} }
func (h *DataHandler) normalizeValue(val interface{}, normType string) interface{} { func (h *DataHandler) normalizeValue(val any, normType string) any {
switch normType { switch normType {
case "lowercase": case "lowercase":
if str, ok := val.(string); ok { if str, ok := val.(string); ok {
@@ -644,7 +644,7 @@ func (h *DataHandler) normalizeValue(val interface{}, normType string) interface
return val return val
} }
func toFloat64(val interface{}) (float64, bool) { func toFloat64(val any) (float64, bool) {
switch v := val.(type) { switch v := val.(type) {
case float64: case float64:
return v, true return v, true
@@ -676,11 +676,11 @@ func (h *DataHandler) getSortOrder() string {
} }
func (h *DataHandler) getDedupeFields() []string { func (h *DataHandler) getDedupeFields() []string {
// Support both []string and []interface{} for dedupe_fields // Support both []string and []any for dedupe_fields
if fields, ok := h.Payload.Data["dedupe_fields"].([]string); ok { if fields, ok := h.Payload.Data["dedupe_fields"].([]string); ok {
return fields return fields
} }
if fields, ok := h.Payload.Data["dedupe_fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["dedupe_fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {
@@ -692,11 +692,11 @@ func (h *DataHandler) getDedupeFields() []string {
return nil return nil
} }
func (h *DataHandler) getCalculations() map[string]map[string]interface{} { func (h *DataHandler) getCalculations() map[string]map[string]any {
result := make(map[string]map[string]interface{}) result := make(map[string]map[string]any)
if calc, ok := h.Payload.Data["calculations"].(map[string]interface{}); ok { if calc, ok := h.Payload.Data["calculations"].(map[string]any); ok {
for key, value := range calc { for key, value := range calc {
if calcMap, ok := value.(map[string]interface{}); ok { if calcMap, ok := value.(map[string]any); ok {
result[key] = calcMap result[key] = calcMap
} }
} }
@@ -704,11 +704,11 @@ func (h *DataHandler) getCalculations() map[string]map[string]interface{} {
return result return result
} }
func (h *DataHandler) getConditions() map[string]map[string]interface{} { func (h *DataHandler) getConditions() map[string]map[string]any {
result := make(map[string]map[string]interface{}) result := make(map[string]map[string]any)
if cond, ok := h.Payload.Data["conditions"].(map[string]interface{}); ok { if cond, ok := h.Payload.Data["conditions"].(map[string]any); ok {
for key, value := range cond { for key, value := range cond {
if condMap, ok := value.(map[string]interface{}); ok { if condMap, ok := value.(map[string]any); ok {
result[key] = condMap result[key] = condMap
} }
} }
@@ -718,7 +718,7 @@ func (h *DataHandler) getConditions() map[string]map[string]interface{} {
func (h *DataHandler) getCastConfig() map[string]string { func (h *DataHandler) getCastConfig() map[string]string {
result := make(map[string]string) result := make(map[string]string)
if cast, ok := h.Payload.Data["cast"].(map[string]interface{}); ok { if cast, ok := h.Payload.Data["cast"].(map[string]any); ok {
for key, value := range cast { for key, value := range cast {
if str, ok := value.(string); ok { if str, ok := value.(string); ok {
result[key] = str result[key] = str
@@ -728,11 +728,11 @@ func (h *DataHandler) getCastConfig() map[string]string {
return result return result
} }
func (h *DataHandler) getValidationRules() map[string]map[string]interface{} { func (h *DataHandler) getValidationRules() map[string]map[string]any {
result := make(map[string]map[string]interface{}) result := make(map[string]map[string]any)
if rules, ok := h.Payload.Data["validation_rules"].(map[string]interface{}); ok { if rules, ok := h.Payload.Data["validation_rules"].(map[string]any); ok {
for key, value := range rules { for key, value := range rules {
if ruleMap, ok := value.(map[string]interface{}); ok { if ruleMap, ok := value.(map[string]any); ok {
result[key] = ruleMap result[key] = ruleMap
} }
} }
@@ -741,7 +741,7 @@ func (h *DataHandler) getValidationRules() map[string]map[string]interface{} {
} }
func (h *DataHandler) getTargetFields() []string { func (h *DataHandler) getTargetFields() []string {
if fields, ok := h.Payload.Data["fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {
@@ -775,7 +775,7 @@ func (h *DataHandler) getValueField() string {
} }
func (h *DataHandler) getUnpivotFields() []string { func (h *DataHandler) getUnpivotFields() []string {
if fields, ok := h.Payload.Data["unpivot_fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["unpivot_fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {

View File

@@ -272,7 +272,7 @@ func (h *FieldHandler) toPascalCase(s string) string {
} }
func (h *FieldHandler) getTargetFields() []string { func (h *FieldHandler) getTargetFields() []string {
if fields, ok := h.Payload.Data["fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {
@@ -286,7 +286,7 @@ func (h *FieldHandler) getTargetFields() []string {
func (h *FieldHandler) getFieldMapping() map[string]string { func (h *FieldHandler) getFieldMapping() map[string]string {
result := make(map[string]string) result := make(map[string]string)
if mapping, ok := h.Payload.Data["mapping"].(map[string]interface{}); ok { if mapping, ok := h.Payload.Data["mapping"].(map[string]any); ok {
for key, value := range mapping { for key, value := range mapping {
if str, ok := value.(string); ok { if str, ok := value.(string); ok {
result[key] = str result[key] = str
@@ -296,18 +296,18 @@ func (h *FieldHandler) getFieldMapping() map[string]string {
return result return result
} }
func (h *FieldHandler) getNewFields() map[string]interface{} { func (h *FieldHandler) getNewFields() map[string]any {
if fields, ok := h.Payload.Data["new_fields"].(map[string]interface{}); ok { if fields, ok := h.Payload.Data["new_fields"].(map[string]any); ok {
return fields return fields
} }
return make(map[string]interface{}) return make(map[string]any)
} }
func (h *FieldHandler) getMergeConfig() map[string]map[string]interface{} { func (h *FieldHandler) getMergeConfig() map[string]map[string]any {
result := make(map[string]map[string]interface{}) result := make(map[string]map[string]any)
if config, ok := h.Payload.Data["merge_config"].(map[string]interface{}); ok { if config, ok := h.Payload.Data["merge_config"].(map[string]any); ok {
for key, value := range config { for key, value := range config {
if configMap, ok := value.(map[string]interface{}); ok { if configMap, ok := value.(map[string]any); ok {
result[key] = configMap result[key] = configMap
} }
} }

View File

@@ -57,11 +57,11 @@ func (h *FlattenHandler) flattenSettings(data map[string]any) map[string]any {
result[key] = value result[key] = value
} }
if settingsArray, ok := data[sourceField].([]interface{}); ok { if settingsArray, ok := data[sourceField].([]any); ok {
flattened := make(map[string]any) flattened := make(map[string]any)
for _, item := range settingsArray { for _, item := range settingsArray {
if setting, ok := item.(map[string]interface{}); ok { if setting, ok := item.(map[string]any); ok {
key, keyExists := setting["key"].(string) key, keyExists := setting["key"].(string)
value, valueExists := setting["value"] value, valueExists := setting["value"]
valueType, typeExists := setting["value_type"].(string) valueType, typeExists := setting["value_type"].(string)
@@ -96,11 +96,11 @@ func (h *FlattenHandler) flattenKeyValue(data map[string]any) map[string]any {
result[key] = value result[key] = value
} }
if kvArray, ok := data[sourceField].([]interface{}); ok { if kvArray, ok := data[sourceField].([]any); ok {
flattened := make(map[string]any) flattened := make(map[string]any)
for _, item := range kvArray { for _, item := range kvArray {
if kvPair, ok := item.(map[string]interface{}); ok { if kvPair, ok := item.(map[string]any); ok {
if key, keyExists := kvPair[keyField]; keyExists { if key, keyExists := kvPair[keyField]; keyExists {
if value, valueExists := kvPair[valueField]; valueExists { if value, valueExists := kvPair[valueField]; valueExists {
if keyStr, ok := key.(string); ok { if keyStr, ok := key.(string); ok {
@@ -139,9 +139,9 @@ func (h *FlattenHandler) flattenArray(data map[string]any) map[string]any {
} }
} }
if array, ok := data[sourceField].([]interface{}); ok { if array, ok := data[sourceField].([]any); ok {
for i, item := range array { for i, item := range array {
if obj, ok := item.(map[string]interface{}); ok { if obj, ok := item.(map[string]any); ok {
for key, value := range obj { for key, value := range obj {
result[fmt.Sprintf("%s_%d_%s", sourceField, i, key)] = value result[fmt.Sprintf("%s_%d_%s", sourceField, i, key)] = value
} }
@@ -162,17 +162,17 @@ func (h *FlattenHandler) flattenRecursive(obj map[string]any, prefix string, res
} }
switch v := value.(type) { switch v := value.(type) {
case map[string]interface{}: case map[string]any:
nestedMap := make(map[string]any) nestedMap := make(map[string]any)
for k, val := range v { for k, val := range v {
nestedMap[k] = val nestedMap[k] = val
} }
h.flattenRecursive(nestedMap, newKey, result, separator) h.flattenRecursive(nestedMap, newKey, result, separator)
case []interface{}: case []any:
// For arrays, create numbered fields // For arrays, create numbered fields
for i, item := range v { for i, item := range v {
itemKey := fmt.Sprintf("%s%s%d", newKey, separator, i) itemKey := fmt.Sprintf("%s%s%d", newKey, separator, i)
if itemMap, ok := item.(map[string]interface{}); ok { if itemMap, ok := item.(map[string]any); ok {
nestedMap := make(map[string]any) nestedMap := make(map[string]any)
for k, val := range itemMap { for k, val := range itemMap {
nestedMap[k] = val nestedMap[k] = val
@@ -188,7 +188,7 @@ func (h *FlattenHandler) flattenRecursive(obj map[string]any, prefix string, res
} }
} }
func (h *FlattenHandler) convertValue(value interface{}, valueType string) interface{} { func (h *FlattenHandler) convertValue(value any, valueType string) any {
switch valueType { switch valueType {
case "string": case "string":
return fmt.Sprintf("%v", value) return fmt.Sprintf("%v", value)
@@ -213,7 +213,7 @@ func (h *FlattenHandler) convertValue(value interface{}, valueType string) inter
return value return value
case "json": case "json":
if str, ok := value.(string); ok { if str, ok := value.(string); ok {
var jsonVal interface{} var jsonVal any
if err := json.Unmarshal([]byte(str), &jsonVal); err == nil { if err := json.Unmarshal([]byte(str), &jsonVal); err == nil {
return jsonVal return jsonVal
} }

View File

@@ -243,7 +243,7 @@ func (h *FormatHandler) formatTrim(data map[string]any) map[string]any {
} }
func (h *FormatHandler) getTargetFields(data map[string]any) []string { func (h *FormatHandler) getTargetFields(data map[string]any) []string {
if fields, ok := h.Payload.Data["fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {

View File

@@ -22,7 +22,7 @@ func (h *GroupHandler) ProcessTask(ctx context.Context, task *mq.Task) mq.Result
} }
// Extract the data array // Extract the data array
dataArray, ok := data["data"].([]interface{}) dataArray, ok := data["data"].([]any)
if !ok { if !ok {
return mq.Result{Error: fmt.Errorf("expected 'data' field to be an array"), Ctx: ctx} return mq.Result{Error: fmt.Errorf("expected 'data' field to be an array"), Ctx: ctx}
} }
@@ -48,7 +48,7 @@ func (h *GroupHandler) ProcessTask(ctx context.Context, task *mq.Task) mq.Result
return mq.Result{Payload: resultPayload, Ctx: ctx} return mq.Result{Payload: resultPayload, Ctx: ctx}
} }
func (h *GroupHandler) groupData(dataArray []interface{}, groupByFields []string, aggregations map[string]string) []map[string]any { func (h *GroupHandler) groupData(dataArray []any, groupByFields []string, aggregations map[string]string) []map[string]any {
groups := make(map[string][]map[string]any) groups := make(map[string][]map[string]any)
// Group data by specified fields // Group data by specified fields
@@ -152,12 +152,12 @@ func (h *GroupHandler) sumField(records []map[string]any, field string) float64
return sum return sum
} }
func (h *GroupHandler) minField(records []map[string]any, field string) interface{} { func (h *GroupHandler) minField(records []map[string]any, field string) any {
if len(records) == 0 { if len(records) == 0 {
return nil return nil
} }
var min interface{} var min any
for _, record := range records { for _, record := range records {
if val, ok := record[field]; ok { if val, ok := record[field]; ok {
if min == nil { if min == nil {
@@ -172,12 +172,12 @@ func (h *GroupHandler) minField(records []map[string]any, field string) interfac
return min return min
} }
func (h *GroupHandler) maxField(records []map[string]any, field string) interface{} { func (h *GroupHandler) maxField(records []map[string]any, field string) any {
if len(records) == 0 { if len(records) == 0 {
return nil return nil
} }
var max interface{} var max any
for _, record := range records { for _, record := range records {
if val, ok := record[field]; ok { if val, ok := record[field]; ok {
if max == nil { if max == nil {
@@ -212,9 +212,9 @@ func (h *GroupHandler) concatField(records []map[string]any, field string) strin
return result return result
} }
func (h *GroupHandler) uniqueField(records []map[string]any, field string) []interface{} { func (h *GroupHandler) uniqueField(records []map[string]any, field string) []any {
seen := make(map[string]bool) seen := make(map[string]bool)
var unique []interface{} var unique []any
for _, record := range records { for _, record := range records {
if val, ok := record[field]; ok && val != nil { if val, ok := record[field]; ok && val != nil {
@@ -229,7 +229,7 @@ func (h *GroupHandler) uniqueField(records []map[string]any, field string) []int
return unique return unique
} }
func (h *GroupHandler) compareValues(a, b interface{}) int { func (h *GroupHandler) compareValues(a, b any) int {
aStr := fmt.Sprintf("%v", a) aStr := fmt.Sprintf("%v", a)
bStr := fmt.Sprintf("%v", b) bStr := fmt.Sprintf("%v", b)
if aStr < bStr { if aStr < bStr {
@@ -244,7 +244,7 @@ func (h *GroupHandler) getGroupByFields() []string {
if fields, ok := h.Payload.Data["group_by"].([]string); ok { if fields, ok := h.Payload.Data["group_by"].([]string); ok {
return fields return fields
} }
if fields, ok := h.Payload.Data["group_by"].([]interface{}); ok { if fields, ok := h.Payload.Data["group_by"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {
@@ -258,7 +258,7 @@ func (h *GroupHandler) getGroupByFields() []string {
func (h *GroupHandler) getAggregations() map[string]string { func (h *GroupHandler) getAggregations() map[string]string {
result := make(map[string]string) result := make(map[string]string)
if aggs, ok := h.Payload.Data["aggregations"].(map[string]interface{}); ok { if aggs, ok := h.Payload.Data["aggregations"].(map[string]any); ok {
for field, aggType := range aggs { for field, aggType := range aggs {
if str, ok := aggType.(string); ok { if str, ok := aggType.(string); ok {
result[field] = str result[field] = str

View File

@@ -63,7 +63,7 @@ func (h *JSONHandler) parseJSON(data map[string]any) map[string]any {
for _, field := range fields { for _, field := range fields {
if val, ok := data[field]; ok { if val, ok := data[field]; ok {
if str, ok := val.(string); ok { if str, ok := val.(string); ok {
var parsed interface{} var parsed any
if err := json.Unmarshal([]byte(str), &parsed); err == nil { if err := json.Unmarshal([]byte(str), &parsed); err == nil {
targetField := h.getTargetFieldForSource(field) targetField := h.getTargetFieldForSource(field)
result[targetField] = parsed result[targetField] = parsed
@@ -125,7 +125,7 @@ func (h *JSONHandler) prettyPrintJSON(data map[string]any) map[string]any {
for _, field := range fields { for _, field := range fields {
if val, ok := data[field]; ok { if val, ok := data[field]; ok {
var prettyJSON interface{} var prettyJSON any
// If it's a string, try to parse it first // If it's a string, try to parse it first
if str, ok := val.(string); ok { if str, ok := val.(string); ok {
@@ -157,7 +157,7 @@ func (h *JSONHandler) minifyJSON(data map[string]any) map[string]any {
for _, field := range fields { for _, field := range fields {
if val, ok := data[field]; ok { if val, ok := data[field]; ok {
var minifyJSON interface{} var minifyJSON any
// If it's a string, try to parse it first // If it's a string, try to parse it first
if str, ok := val.(string); ok { if str, ok := val.(string); ok {
@@ -190,7 +190,7 @@ func (h *JSONHandler) validateJSON(data map[string]any) map[string]any {
for _, field := range fields { for _, field := range fields {
if val, ok := data[field]; ok { if val, ok := data[field]; ok {
if str, ok := val.(string); ok { if str, ok := val.(string); ok {
var temp interface{} var temp any
if err := json.Unmarshal([]byte(str), &temp); err == nil { if err := json.Unmarshal([]byte(str), &temp); err == nil {
result[field+"_valid_json"] = true result[field+"_valid_json"] = true
result[field+"_json_type"] = h.getJSONType(temp) result[field+"_json_type"] = h.getJSONType(temp)
@@ -219,7 +219,7 @@ func (h *JSONHandler) extractFields(data map[string]any) map[string]any {
} }
if val, ok := data[sourceField]; ok { if val, ok := data[sourceField]; ok {
var jsonData map[string]interface{} var jsonData map[string]any
// If it's a string, parse it // If it's a string, parse it
if str, ok := val.(string); ok { if str, ok := val.(string); ok {
@@ -227,7 +227,7 @@ func (h *JSONHandler) extractFields(data map[string]any) map[string]any {
result["extract_error"] = err.Error() result["extract_error"] = err.Error()
return result return result
} }
} else if obj, ok := val.(map[string]interface{}); ok { } else if obj, ok := val.(map[string]any); ok {
jsonData = obj jsonData = obj
} else { } else {
result["extract_error"] = "source field is not a JSON object or string" result["extract_error"] = "source field is not a JSON object or string"
@@ -245,7 +245,7 @@ func (h *JSONHandler) extractFields(data map[string]any) map[string]any {
return result return result
} }
func (h *JSONHandler) extractNestedField(data map[string]interface{}, fieldPath string) interface{} { func (h *JSONHandler) extractNestedField(data map[string]any, fieldPath string) any {
// Simple implementation for dot notation // Simple implementation for dot notation
// For more complex path extraction, could use jsonpath library // For more complex path extraction, could use jsonpath library
if val, ok := data[fieldPath]; ok { if val, ok := data[fieldPath]; ok {
@@ -254,11 +254,11 @@ func (h *JSONHandler) extractNestedField(data map[string]interface{}, fieldPath
return nil return nil
} }
func (h *JSONHandler) getJSONType(val interface{}) string { func (h *JSONHandler) getJSONType(val any) string {
switch val.(type) { switch val.(type) {
case map[string]interface{}: case map[string]any:
return "object" return "object"
case []interface{}: case []any:
return "array" return "array"
case string: case string:
return "string" return "string"
@@ -274,7 +274,7 @@ func (h *JSONHandler) getJSONType(val interface{}) string {
} }
func (h *JSONHandler) getTargetFields() []string { func (h *JSONHandler) getTargetFields() []string {
if fields, ok := h.Payload.Data["fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {
@@ -294,7 +294,7 @@ func (h *JSONHandler) getSourceField() string {
} }
func (h *JSONHandler) getFieldsToExtract() []string { func (h *JSONHandler) getFieldsToExtract() []string {
if fields, ok := h.Payload.Data["extract_fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["extract_fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {
@@ -308,7 +308,7 @@ func (h *JSONHandler) getFieldsToExtract() []string {
func (h *JSONHandler) getTargetFieldForSource(sourceField string) string { func (h *JSONHandler) getTargetFieldForSource(sourceField string) string {
// Check if there's a specific mapping // Check if there's a specific mapping
if mapping, ok := h.Payload.Data["field_mapping"].(map[string]interface{}); ok { if mapping, ok := h.Payload.Data["field_mapping"].(map[string]any); ok {
if target, ok := mapping[sourceField].(string); ok { if target, ok := mapping[sourceField].(string); ok {
return target return target
} }

View File

@@ -94,7 +94,7 @@ func (h *SplitHandler) splitToArrayOperation(data map[string]any) map[string]any
if val, ok := data[field]; ok { if val, ok := data[field]; ok {
if str, ok := val.(string); ok { if str, ok := val.(string); ok {
parts := strings.Split(str, separator) parts := strings.Split(str, separator)
var cleanParts []interface{} var cleanParts []any
for _, part := range parts { for _, part := range parts {
cleanParts = append(cleanParts, strings.TrimSpace(part)) cleanParts = append(cleanParts, strings.TrimSpace(part))
} }
@@ -110,7 +110,7 @@ func (h *SplitHandler) getTargetFields() []string {
if fields, ok := h.Payload.Data["fields"].([]string); ok { if fields, ok := h.Payload.Data["fields"].([]string); ok {
return fields return fields
} }
if fields, ok := h.Payload.Data["fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {
@@ -217,7 +217,7 @@ func (h *JoinHandler) joinFromArrayOperation(data map[string]any) map[string]any
} }
if val, ok := data[sourceField]; ok { if val, ok := data[sourceField]; ok {
if arr, ok := val.([]interface{}); ok { if arr, ok := val.([]any); ok {
var parts []string var parts []string
for _, item := range arr { for _, item := range arr {
if item != nil { if item != nil {
@@ -249,7 +249,7 @@ func (h *JoinHandler) getSourceFields() []string {
if fields, ok := h.Payload.Data["source_fields"].([]string); ok { if fields, ok := h.Payload.Data["source_fields"].([]string); ok {
return fields return fields
} }
if fields, ok := h.Payload.Data["source_fields"].([]interface{}); ok { if fields, ok := h.Payload.Data["source_fields"].([]any); ok {
var result []string var result []string
for _, field := range fields { for _, field := range fields {
if str, ok := field.(string); ok { if str, ok := field.(string); ok {

View File

@@ -50,7 +50,7 @@ func (l *DefaultLogger) Error(msg string, fields ...Field) {
l.logger.Error().Map(flattenFields(fields)).Msg(msg) l.logger.Error().Map(flattenFields(fields)).Msg(msg)
} }
// flattenFields converts a slice of Field into a slice of interface{} key/value pairs. // flattenFields converts a slice of Field into a slice of any key/value pairs.
func flattenFields(fields []Field) map[string]any { func flattenFields(fields []Field) map[string]any {
kv := make(map[string]any) kv := make(map[string]any)
for _, field := range fields { for _, field := range fields {

View File

@@ -78,12 +78,12 @@ type HealthCheck interface {
// HealthCheckResult represents the result of a health check // HealthCheckResult represents the result of a health check
type HealthCheckResult struct { type HealthCheckResult struct {
Name string `json:"name"` Name string `json:"name"`
Status HealthStatus `json:"status"` Status HealthStatus `json:"status"`
Message string `json:"message"` Message string `json:"message"`
Duration time.Duration `json:"duration"` Duration time.Duration `json:"duration"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
Metadata map[string]interface{} `json:"metadata,omitempty"` Metadata map[string]any `json:"metadata,omitempty"`
} }
// HealthStatus represents the health status // HealthStatus represents the health status
@@ -373,7 +373,7 @@ func (mhc *MemoryHealthCheck) Check(ctx context.Context) *HealthCheckResult {
Status: status, Status: status,
Message: message, Message: message,
Timestamp: time.Now(), Timestamp: time.Now(),
Metadata: map[string]interface{}{ Metadata: map[string]any{
"alloc_mb": allocMB, "alloc_mb": allocMB,
"sys_mb": sysMB, "sys_mb": sysMB,
"gc_cycles": m.NumGC, "gc_cycles": m.NumGC,
@@ -414,7 +414,7 @@ func (ghc *GoRoutineHealthCheck) Check(ctx context.Context) *HealthCheckResult {
Status: status, Status: status,
Message: message, Message: message,
Timestamp: time.Now(), Timestamp: time.Now(),
Metadata: map[string]interface{}{ Metadata: map[string]any{
"count": count, "count": count,
}, },
} }
@@ -439,7 +439,7 @@ func (dshc *DiskSpaceHealthCheck) Check(ctx context.Context) *HealthCheckResult
Status: HealthStatusHealthy, Status: HealthStatusHealthy,
Message: "Disk space OK", Message: "Disk space OK",
Timestamp: time.Now(), Timestamp: time.Now(),
Metadata: map[string]interface{}{ Metadata: map[string]any{
"available_gb": 100.0, // Placeholder "available_gb": 100.0, // Placeholder
}, },
} }
@@ -757,7 +757,7 @@ func (ms *MetricsServer) handleMetrics(w http.ResponseWriter, r *http.Request) {
metrics := ms.registry.GetAllMetrics() metrics := ms.registry.GetAllMetrics()
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{ json.NewEncoder(w).Encode(map[string]any{
"timestamp": time.Now(), "timestamp": time.Now(),
"metrics": metrics, "metrics": metrics,
}) })
@@ -768,7 +768,7 @@ func (ms *MetricsServer) handleHealth(w http.ResponseWriter, r *http.Request) {
results := ms.healthChecker.RunChecks(r.Context()) results := ms.healthChecker.RunChecks(r.Context())
overallHealth := ms.healthChecker.GetOverallHealth() overallHealth := ms.healthChecker.GetOverallHealth()
response := map[string]interface{}{ response := map[string]any{
"status": overallHealth, "status": overallHealth,
"timestamp": time.Now(), "timestamp": time.Now(),
"checks": results, "checks": results,
@@ -804,7 +804,7 @@ func (ms *MetricsServer) handleAlerts(w http.ResponseWriter, r *http.Request) {
}) })
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{ json.NewEncoder(w).Encode(map[string]any{
"timestamp": time.Now(), "timestamp": time.Now(),
"alerts": alerts, "alerts": alerts,
}) })

18
mq.go
View File

@@ -444,15 +444,15 @@ type MessageStore interface {
// StoredMessage represents a message stored in the message store // StoredMessage represents a message stored in the message store
type StoredMessage struct { type StoredMessage struct {
ID string `json:"id"` ID string `json:"id"`
Queue string `json:"queue"` Queue string `json:"queue"`
Payload []byte `json:"payload"` Payload []byte `json:"payload"`
Headers map[string]string `json:"headers,omitempty"` Headers map[string]string `json:"headers,omitempty"`
Metadata map[string]interface{} `json:"metadata,omitempty"` Metadata map[string]any `json:"metadata,omitempty"`
Priority int `json:"priority"` Priority int `json:"priority"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
ExpiresAt *time.Time `json:"expires_at,omitempty"` ExpiresAt *time.Time `json:"expires_at,omitempty"`
Attempts int `json:"attempts"` Attempts int `json:"attempts"`
} }
type Broker struct { type Broker struct {

View File

@@ -16,9 +16,9 @@ type ThresholdConfig struct {
} }
type MetricsRegistry interface { type MetricsRegistry interface {
Register(metricName string, value interface{}) Register(metricName string, value any)
Increment(metricName string) Increment(metricName string)
Get(metricName string) interface{} Get(metricName string) any
} }
type CircuitBreakerConfig struct { type CircuitBreakerConfig struct {

View File

@@ -329,7 +329,7 @@ func NewMemoryPool(size int) *MemoryPool {
return &MemoryPool{ return &MemoryPool{
size: size, size: size,
pool: sync.Pool{ pool: sync.Pool{
New: func() interface{} { New: func() any {
return make([]byte, size) return make([]byte, size)
}, },
}, },
@@ -407,13 +407,13 @@ func (pm *PerformanceMonitor) GetMetricsChannel() <-chan PerformanceMetrics {
// PerformanceAlert represents a performance alert // PerformanceAlert represents a performance alert
type PerformanceAlert struct { type PerformanceAlert struct {
Type string `json:"type"` Type string `json:"type"`
Severity string `json:"severity"` Severity string `json:"severity"`
Message string `json:"message"` Message string `json:"message"`
Metrics PerformanceMetrics `json:"metrics"` Metrics PerformanceMetrics `json:"metrics"`
Threshold interface{} `json:"threshold"` Threshold any `json:"threshold"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
Details map[string]interface{} `json:"details,omitempty"` Details map[string]any `json:"details,omitempty"`
} }
// PerformanceAlerter manages performance alerts // PerformanceAlerter manages performance alerts
@@ -490,11 +490,11 @@ func NewPerformanceDashboard(optimizer *PerformanceOptimizer, alerter *Performan
} }
// GetDashboardData returns data for the performance dashboard // GetDashboardData returns data for the performance dashboard
func (pd *PerformanceDashboard) GetDashboardData() map[string]interface{} { func (pd *PerformanceDashboard) GetDashboardData() map[string]any {
metrics, hasMetrics := pd.monitor.GetMetrics() metrics, hasMetrics := pd.monitor.GetMetrics()
alerts := pd.alerter.GetAlerts("", 10) alerts := pd.alerter.GetAlerts("", 10)
data := map[string]interface{}{ data := map[string]any{
"current_metrics": metrics, "current_metrics": metrics,
"has_metrics": hasMetrics, "has_metrics": hasMetrics,
"recent_alerts": alerts, "recent_alerts": alerts,

12
pool.go
View File

@@ -153,7 +153,7 @@ type Metrics struct {
// Plugin is used to inject custom behavior before or after task processing. // Plugin is used to inject custom behavior before or after task processing.
type Plugin interface { type Plugin interface {
Initialize(config interface{}) error Initialize(config any) error
BeforeTask(task *QueueTask) BeforeTask(task *QueueTask)
AfterTask(task *QueueTask, result Result) AfterTask(task *QueueTask, result Result)
} }
@@ -161,7 +161,7 @@ type Plugin interface {
// DefaultPlugin is a no-op implementation of Plugin. // DefaultPlugin is a no-op implementation of Plugin.
type DefaultPlugin struct{} type DefaultPlugin struct{}
func (dp *DefaultPlugin) Initialize(config interface{}) error { return nil } func (dp *DefaultPlugin) Initialize(config any) error { return nil }
func (dp *DefaultPlugin) BeforeTask(task *QueueTask) { func (dp *DefaultPlugin) BeforeTask(task *QueueTask) {
Logger.Info().Str("taskID", task.payload.ID).Msg("BeforeTask plugin invoked") Logger.Info().Str("taskID", task.payload.ID).Msg("BeforeTask plugin invoked")
} }
@@ -274,7 +274,7 @@ func (dlq *DeadLetterQueue) Size() int {
} }
// GetStats returns statistics about the DLQ // GetStats returns statistics about the DLQ
func (dlq *DeadLetterQueue) GetStats() map[string]interface{} { func (dlq *DeadLetterQueue) GetStats() map[string]any {
dlq.mu.RLock() dlq.mu.RLock()
defer dlq.mu.RUnlock() defer dlq.mu.RUnlock()
@@ -302,7 +302,7 @@ func (dlq *DeadLetterQueue) GetStats() map[string]interface{} {
} }
} }
return map[string]interface{}{ return map[string]any{
"total_tasks": len(dlq.tasks), "total_tasks": len(dlq.tasks),
"max_size": dlq.maxSize, "max_size": dlq.maxSize,
"error_counts": errorCounts, "error_counts": errorCounts,
@@ -324,7 +324,7 @@ func NewInMemoryMetricsRegistry() *InMemoryMetricsRegistry {
} }
} }
func (m *InMemoryMetricsRegistry) Register(metricName string, value interface{}) { func (m *InMemoryMetricsRegistry) Register(metricName string, value any) {
m.mu.Lock() m.mu.Lock()
defer m.mu.Unlock() defer m.mu.Unlock()
if v, ok := value.(int64); ok { if v, ok := value.(int64); ok {
@@ -338,7 +338,7 @@ func (m *InMemoryMetricsRegistry) Increment(metricName string) {
m.metrics[metricName]++ m.metrics[metricName]++
} }
func (m *InMemoryMetricsRegistry) Get(metricName string) interface{} { func (m *InMemoryMetricsRegistry) Get(metricName string) any {
m.mu.RLock() m.mu.RLock()
defer m.mu.RUnlock() defer m.mu.RUnlock()
return m.metrics[metricName] return m.metrics[metricName]

View File

@@ -406,7 +406,7 @@ func (r *JSONSchemaRenderer) parseGroupsFromSchema() []GroupInfo {
return nil return nil
} }
groups, ok := groupsData.([]interface{}) groups, ok := groupsData.([]any)
if !ok { if !ok {
return nil return nil
} }
@@ -415,13 +415,13 @@ func (r *JSONSchemaRenderer) parseGroupsFromSchema() []GroupInfo {
var groupedFields = make(map[string]bool) // Track fields that are already in groups var groupedFields = make(map[string]bool) // Track fields that are already in groups
for _, group := range groups { for _, group := range groups {
groupMap, ok := group.(map[string]interface{}) groupMap, ok := group.(map[string]any)
if !ok { if !ok {
continue continue
} }
var groupTitle GroupTitle var groupTitle GroupTitle
if titleMap, ok := groupMap["title"].(map[string]interface{}); ok { if titleMap, ok := groupMap["title"].(map[string]any); ok {
if text, ok := titleMap["text"].(string); ok { if text, ok := titleMap["text"].(string); ok {
groupTitle.Text = text groupTitle.Text = text
} }
@@ -436,7 +436,7 @@ func (r *JSONSchemaRenderer) parseGroupsFromSchema() []GroupInfo {
} }
var fields []FieldInfo var fields []FieldInfo
if fieldsData, ok := groupMap["fields"].([]interface{}); ok { if fieldsData, ok := groupMap["fields"].([]any); ok {
for _, fieldName := range fieldsData { for _, fieldName := range fieldsData {
if fieldNameStr, ok := fieldName.(string); ok { if fieldNameStr, ok := fieldName.(string); ok {
// Handle nested field paths // Handle nested field paths
@@ -948,9 +948,9 @@ func generateOptionsFromSchema(schema *jsonschema.Schema) string {
// Check UI options first // Check UI options first
if schema.UI != nil { if schema.UI != nil {
if options, ok := schema.UI["options"].([]interface{}); ok { if options, ok := schema.UI["options"].([]any); ok {
for _, option := range options { for _, option := range options {
if optionMap, ok := option.(map[string]interface{}); ok { if optionMap, ok := option.(map[string]any); ok {
value := getMapValue(optionMap, "value", "") value := getMapValue(optionMap, "value", "")
text := getMapValue(optionMap, "text", value) text := getMapValue(optionMap, "text", value)
selected := "" selected := ""
@@ -1044,7 +1044,7 @@ func getFieldContentHTML(field FieldInfo) string {
} }
// Check for children elements // Check for children elements
if children, ok := field.Schema.UI["children"].([]interface{}); ok { if children, ok := field.Schema.UI["children"].([]any); ok {
return renderChildren(children) return renderChildren(children)
} }
} }
@@ -1052,10 +1052,10 @@ func getFieldContentHTML(field FieldInfo) string {
return "" return ""
} }
func renderChildren(children []interface{}) string { func renderChildren(children []any) string {
var result strings.Builder var result strings.Builder
for _, child := range children { for _, child := range children {
if childMap, ok := child.(map[string]interface{}); ok { if childMap, ok := child.(map[string]any); ok {
// Create a temporary field info for the child // Create a temporary field info for the child
childSchema := &jsonschema.Schema{ childSchema := &jsonschema.Schema{
UI: childMap, UI: childMap,
@@ -1104,7 +1104,7 @@ func generateLabel(field FieldInfo) string {
return fmt.Sprintf(`<label for="%s">%s%s</label>`, fieldName, title, requiredSpan) return fmt.Sprintf(`<label for="%s">%s%s</label>`, fieldName, title, requiredSpan)
} }
func getMapValue(m map[string]interface{}, key, defaultValue string) string { func getMapValue(m map[string]any, key, defaultValue string) string {
if value, ok := m[key].(string); ok { if value, ok := m[key].(string); ok {
return value return value
} }
@@ -1128,20 +1128,20 @@ func (r *JSONSchemaRenderer) renderButtons() string {
var buttonsHTML bytes.Buffer var buttonsHTML bytes.Buffer
if submitConfig, ok := r.Schema.Form["submit"].(map[string]interface{}); ok { if submitConfig, ok := r.Schema.Form["submit"].(map[string]any); ok {
buttonHTML := renderButtonFromConfig(submitConfig, "submit") buttonHTML := renderButtonFromConfig(submitConfig, "submit")
buttonsHTML.WriteString(buttonHTML) buttonsHTML.WriteString(buttonHTML)
} }
if resetConfig, ok := r.Schema.Form["reset"].(map[string]interface{}); ok { if resetConfig, ok := r.Schema.Form["reset"].(map[string]any); ok {
buttonHTML := renderButtonFromConfig(resetConfig, "reset") buttonHTML := renderButtonFromConfig(resetConfig, "reset")
buttonsHTML.WriteString(buttonHTML) buttonsHTML.WriteString(buttonHTML)
} }
// Support for additional custom buttons // Support for additional custom buttons
if buttons, ok := r.Schema.Form["buttons"].([]interface{}); ok { if buttons, ok := r.Schema.Form["buttons"].([]any); ok {
for _, button := range buttons { for _, button := range buttons {
if buttonMap, ok := button.(map[string]interface{}); ok { if buttonMap, ok := button.(map[string]any); ok {
buttonType := getMapValue(buttonMap, "type", "button") buttonType := getMapValue(buttonMap, "type", "button")
buttonHTML := renderButtonFromConfig(buttonMap, buttonType) buttonHTML := renderButtonFromConfig(buttonMap, buttonType)
buttonsHTML.WriteString(buttonHTML) buttonsHTML.WriteString(buttonHTML)
@@ -1152,7 +1152,7 @@ func (r *JSONSchemaRenderer) renderButtons() string {
return buttonsHTML.String() return buttonsHTML.String()
} }
func renderButtonFromConfig(config map[string]interface{}, defaultType string) string { func renderButtonFromConfig(config map[string]any, defaultType string) string {
var attributes []string var attributes []string
buttonType := getMapValue(config, "type", defaultType) buttonType := getMapValue(config, "type", defaultType)

View File

@@ -17,7 +17,7 @@ type ValidationInfo struct {
Maximum *jsonschema.Rat Maximum *jsonschema.Rat
Pattern string Pattern string
Format string Format string
Enum []interface{} Enum []any
MultipleOf *jsonschema.Rat MultipleOf *jsonschema.Rat
ExclusiveMin *jsonschema.Rat ExclusiveMin *jsonschema.Rat
ExclusiveMax *jsonschema.Rat ExclusiveMax *jsonschema.Rat
@@ -26,7 +26,7 @@ type ValidationInfo struct {
UniqueItems bool UniqueItems bool
MinProperties *float64 MinProperties *float64
MaxProperties *float64 MaxProperties *float64
Const interface{} Const any
// Advanced JSON Schema 2020-12 validations // Advanced JSON Schema 2020-12 validations
AllOf []*jsonschema.Schema AllOf []*jsonschema.Schema
@@ -57,8 +57,8 @@ type ValidationInfo struct {
// Metadata // Metadata
Title *string Title *string
Description *string Description *string
Default interface{} Default any
Examples []interface{} Examples []any
Deprecated *bool Deprecated *bool
ReadOnly *bool ReadOnly *bool
WriteOnly *bool WriteOnly *bool

View File

@@ -26,19 +26,19 @@ type SecurityManager struct {
// AuthProvider interface for different authentication methods // AuthProvider interface for different authentication methods
type AuthProvider interface { type AuthProvider interface {
Name() string Name() string
Authenticate(ctx context.Context, credentials map[string]interface{}) (*User, error) Authenticate(ctx context.Context, credentials map[string]any) (*User, error)
ValidateToken(token string) (*User, error) ValidateToken(token string) (*User, error)
} }
// User represents an authenticated user // User represents an authenticated user
type User struct { type User struct {
ID string `json:"id"` ID string `json:"id"`
Username string `json:"username"` Username string `json:"username"`
Roles []string `json:"roles"` Roles []string `json:"roles"`
Permissions []string `json:"permissions"` Permissions []string `json:"permissions"`
Metadata map[string]interface{} `json:"metadata,omitempty"` Metadata map[string]any `json:"metadata,omitempty"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
LastLoginAt *time.Time `json:"last_login_at,omitempty"` LastLoginAt *time.Time `json:"last_login_at,omitempty"`
} }
// RoleManager manages user roles and permissions // RoleManager manages user roles and permissions
@@ -88,16 +88,16 @@ type AuditLogger struct {
// AuditEvent represents a security audit event // AuditEvent represents a security audit event
type AuditEvent struct { type AuditEvent struct {
ID string `json:"id"` ID string `json:"id"`
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
EventType string `json:"event_type"` EventType string `json:"event_type"`
UserID string `json:"user_id,omitempty"` UserID string `json:"user_id,omitempty"`
Resource string `json:"resource"` Resource string `json:"resource"`
Action string `json:"action"` Action string `json:"action"`
IPAddress string `json:"ip_address,omitempty"` IPAddress string `json:"ip_address,omitempty"`
UserAgent string `json:"user_agent,omitempty"` UserAgent string `json:"user_agent,omitempty"`
Success bool `json:"success"` Success bool `json:"success"`
Details map[string]interface{} `json:"details,omitempty"` Details map[string]any `json:"details,omitempty"`
} }
// SessionManager manages user sessions // SessionManager manages user sessions
@@ -109,13 +109,13 @@ type SessionManager struct {
// Session represents a user session // Session represents a user session
type Session struct { type Session struct {
ID string `json:"id"` ID string `json:"id"`
UserID string `json:"user_id"` UserID string `json:"user_id"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
ExpiresAt time.Time `json:"expires_at"` ExpiresAt time.Time `json:"expires_at"`
IPAddress string `json:"ip_address"` IPAddress string `json:"ip_address"`
UserAgent string `json:"user_agent"` UserAgent string `json:"user_agent"`
Data map[string]interface{} `json:"data,omitempty"` Data map[string]any `json:"data,omitempty"`
} }
// NewSecurityManager creates a new security manager // NewSecurityManager creates a new security manager
@@ -369,7 +369,7 @@ func (sm *SessionManager) CreateSession(userID, ipAddress, userAgent string) *Se
ExpiresAt: time.Now().Add(sm.maxAge), ExpiresAt: time.Now().Add(sm.maxAge),
IPAddress: ipAddress, IPAddress: ipAddress,
UserAgent: userAgent, UserAgent: userAgent,
Data: make(map[string]interface{}), Data: make(map[string]any),
} }
sm.sessions[session.ID] = session sm.sessions[session.ID] = session
@@ -426,7 +426,7 @@ func (sm *SecurityManager) AddAuthProvider(provider AuthProvider) {
} }
// Authenticate authenticates a user using available providers // Authenticate authenticates a user using available providers
func (sm *SecurityManager) Authenticate(ctx context.Context, credentials map[string]interface{}) (*User, error) { func (sm *SecurityManager) Authenticate(ctx context.Context, credentials map[string]any) (*User, error) {
sm.mu.RLock() sm.mu.RLock()
providers := make(map[string]AuthProvider) providers := make(map[string]AuthProvider)
for name, provider := range sm.authProviders { for name, provider := range sm.authProviders {
@@ -444,7 +444,7 @@ func (sm *SecurityManager) Authenticate(ctx context.Context, credentials map[str
UserID: user.ID, UserID: user.ID,
Action: "login", Action: "login",
Success: true, Success: true,
Details: map[string]interface{}{ Details: map[string]any{
"provider": provider.Name(), "provider": provider.Name(),
}, },
}) })
@@ -461,7 +461,7 @@ func (sm *SecurityManager) Authenticate(ctx context.Context, credentials map[str
EventType: "authentication", EventType: "authentication",
Action: "login", Action: "login",
Success: false, Success: false,
Details: map[string]interface{}{ Details: map[string]any{
"error": lastErr.Error(), "error": lastErr.Error(),
}, },
}) })
@@ -524,7 +524,7 @@ func (sm *SecurityManager) CheckRateLimit(key string) error {
EventType: "rate_limit", EventType: "rate_limit",
Action: "exceeded", Action: "exceeded",
Success: false, Success: false,
Details: map[string]interface{}{ Details: map[string]any{
"key": key, "key": key,
}, },
}) })
@@ -565,7 +565,7 @@ func (bap *BasicAuthProvider) Name() string {
return "basic" return "basic"
} }
func (bap *BasicAuthProvider) Authenticate(ctx context.Context, credentials map[string]interface{}) (*User, error) { func (bap *BasicAuthProvider) Authenticate(ctx context.Context, credentials map[string]any) (*User, error) {
username, ok := credentials["username"].(string) username, ok := credentials["username"].(string)
if !ok { if !ok {
return nil, fmt.Errorf("username required") return nil, fmt.Errorf("username required")
@@ -604,7 +604,7 @@ func (bap *BasicAuthProvider) ValidateToken(token string) (*User, error) {
} }
username := parts[0] username := parts[0]
return bap.Authenticate(context.Background(), map[string]interface{}{ return bap.Authenticate(context.Background(), map[string]any{
"username": username, "username": username,
"password": "token", // Placeholder "password": "token", // Placeholder
}) })
@@ -641,7 +641,7 @@ func NewSecurityMiddleware(sm *SecurityManager) *SecurityMiddleware {
} }
// AuthenticateRequest authenticates a request with credentials // AuthenticateRequest authenticates a request with credentials
func (sm *SecurityMiddleware) AuthenticateRequest(credentials map[string]interface{}, ipAddress string) (*User, error) { func (sm *SecurityMiddleware) AuthenticateRequest(credentials map[string]any, ipAddress string) (*User, error) {
user, err := sm.securityManager.Authenticate(context.Background(), credentials) user, err := sm.securityManager.Authenticate(context.Background(), credentials)
if err != nil { if err != nil {
// Log failed authentication attempt // Log failed authentication attempt
@@ -649,7 +649,7 @@ func (sm *SecurityMiddleware) AuthenticateRequest(credentials map[string]interfa
EventType: "authentication", EventType: "authentication",
Action: "login", Action: "login",
Success: false, Success: false,
Details: map[string]interface{}{ Details: map[string]any{
"ip_address": ipAddress, "ip_address": ipAddress,
"error": err.Error(), "error": err.Error(),
}, },

View File

@@ -282,7 +282,7 @@ func Unmarshal(data any, dst any) error {
// Enhanced helper functions // Enhanced helper functions
// getHandlerInfo returns information about the handler (traditional or enhanced) // getHandlerInfo returns information about the handler (traditional or enhanced)
func (receiver *RunHandler) getHandlerInfo(name string) (interface{}, bool) { func (receiver *RunHandler) getHandlerInfo(name string) (any, bool) {
// Check enhanced handlers first // Check enhanced handlers first
if enhancedHandler := receiver.userConfig.GetEnhancedHandler(name); enhancedHandler != nil { if enhancedHandler := receiver.userConfig.GetEnhancedHandler(name); enhancedHandler != nil {
return *enhancedHandler, true return *enhancedHandler, true

View File

@@ -13,16 +13,16 @@ import (
type EnhancedValidation interface { type EnhancedValidation interface {
Validation Validation
// Enhanced methods for workflow integration // Enhanced methods for workflow integration
ValidateWorkflowInput(ctx context.Context, input map[string]interface{}, rules []*dag.WorkflowValidationRule) (ValidationResult, error) ValidateWorkflowInput(ctx context.Context, input map[string]any, rules []*dag.WorkflowValidationRule) (ValidationResult, error)
CreateValidationProcessor(rules []*dag.WorkflowValidationRule) (*dag.ValidatorProcessor, error) CreateValidationProcessor(rules []*dag.WorkflowValidationRule) (*dag.ValidatorProcessor, error)
} }
// Enhanced validation result for workflow integration // Enhanced validation result for workflow integration
type ValidationResult struct { type ValidationResult struct {
Valid bool `json:"valid"` Valid bool `json:"valid"`
Errors map[string]string `json:"errors,omitempty"` Errors map[string]string `json:"errors,omitempty"`
Data map[string]interface{} `json:"data"` Data map[string]any `json:"data"`
Message string `json:"message,omitempty"` Message string `json:"message,omitempty"`
} }
// Enhanced DAG Service for workflow engine integration // Enhanced DAG Service for workflow engine integration
@@ -42,7 +42,7 @@ type EnhancedDAGService interface {
// Workflow engine integration // Workflow engine integration
GetWorkflowEngine(dagKey string) *dag.WorkflowEngineManager GetWorkflowEngine(dagKey string) *dag.WorkflowEngineManager
CreateWorkflowFromHandler(handler EnhancedHandler) (*dag.WorkflowDefinition, error) CreateWorkflowFromHandler(handler EnhancedHandler) (*dag.WorkflowDefinition, error)
ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]interface{}) (*dag.ExecutionResult, error) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]any) (*dag.ExecutionResult, error)
} }
// Enhanced Handler that supports workflow engine features // Enhanced Handler that supports workflow engine features
@@ -113,8 +113,8 @@ type EnhancedEdge struct {
// Workflow processor configurations // Workflow processor configurations
type WorkflowProcessorConfig struct { type WorkflowProcessorConfig struct {
Type string `json:"type" yaml:"type"` Type string `json:"type" yaml:"type"`
Config map[string]interface{} `json:"config" yaml:"config"` Config map[string]any `json:"config" yaml:"config"`
} }
type HTMLProcessorConfig struct { type HTMLProcessorConfig struct {
@@ -141,7 +141,7 @@ type AuthProcessorConfig struct {
type ValidatorProcessorConfig struct { type ValidatorProcessorConfig struct {
ValidationRules []*dag.WorkflowValidationRule `json:"validation_rules" yaml:"validation_rules"` ValidationRules []*dag.WorkflowValidationRule `json:"validation_rules" yaml:"validation_rules"`
Schema map[string]interface{} `json:"schema" yaml:"schema"` Schema map[string]any `json:"schema" yaml:"schema"`
StrictMode bool `json:"strict_mode" yaml:"strict_mode"` StrictMode bool `json:"strict_mode" yaml:"strict_mode"`
} }
@@ -168,11 +168,11 @@ type NotifyProcessorConfig struct {
} }
type WebhookProcessorConfig struct { type WebhookProcessorConfig struct {
ListenPath string `json:"listen_path" yaml:"listen_path"` ListenPath string `json:"listen_path" yaml:"listen_path"`
Secret string `json:"secret" yaml:"secret"` Secret string `json:"secret" yaml:"secret"`
Signature string `json:"signature" yaml:"signature"` Signature string `json:"signature" yaml:"signature"`
Transforms map[string]interface{} `json:"transforms" yaml:"transforms"` Transforms map[string]any `json:"transforms" yaml:"transforms"`
Timeout string `json:"timeout" yaml:"timeout"` Timeout string `json:"timeout" yaml:"timeout"`
} }
// Enhanced service manager // Enhanced service manager
@@ -181,7 +181,7 @@ type EnhancedServiceManager interface {
Initialize(config *EnhancedServiceConfig) error Initialize(config *EnhancedServiceConfig) error
Start(ctx context.Context) error Start(ctx context.Context) error
Stop(ctx context.Context) error Stop(ctx context.Context) error
Health() map[string]interface{} Health() map[string]any
// Enhanced DAG management // Enhanced DAG management
RegisterEnhancedHandler(handler EnhancedHandler) error RegisterEnhancedHandler(handler EnhancedHandler) error
@@ -190,7 +190,7 @@ type EnhancedServiceManager interface {
// Workflow engine integration // Workflow engine integration
GetWorkflowEngine() *dag.WorkflowEngineManager GetWorkflowEngine() *dag.WorkflowEngineManager
ExecuteEnhancedWorkflow(ctx context.Context, key string, input map[string]interface{}) (*dag.ExecutionResult, error) ExecuteEnhancedWorkflow(ctx context.Context, key string, input map[string]any) (*dag.ExecutionResult, error)
// HTTP integration // HTTP integration
RegisterHTTPRoutes(app *fiber.App) error RegisterHTTPRoutes(app *fiber.App) error

View File

@@ -109,7 +109,7 @@ func (eds *enhancedDAGService) CreateWorkflowFromHandler(handler EnhancedHandler
} }
// ExecuteWorkflow executes a workflow // ExecuteWorkflow executes a workflow
func (eds *enhancedDAGService) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]interface{}) (*dag.ExecutionResult, error) { func (eds *enhancedDAGService) ExecuteWorkflow(ctx context.Context, workflowID string, input map[string]any) (*dag.ExecutionResult, error) {
enhancedDAG := eds.GetEnhancedDAG(workflowID) enhancedDAG := eds.GetEnhancedDAG(workflowID)
if enhancedDAG != nil { if enhancedDAG != nil {
// Execute enhanced DAG workflow // Execute enhanced DAG workflow
@@ -139,7 +139,7 @@ func (eds *enhancedDAGService) StoreDAG(key string, traditionalDAG *dag.DAG) err
// Helper methods // Helper methods
func (eds *enhancedDAGService) executeEnhancedDAGWorkflow(ctx context.Context, enhancedDAG *dag.EnhancedDAG, input map[string]interface{}) (*dag.ExecutionResult, error) { func (eds *enhancedDAGService) executeEnhancedDAGWorkflow(ctx context.Context, enhancedDAG *dag.EnhancedDAG, input map[string]any) (*dag.ExecutionResult, error) {
// This would need to be implemented based on the actual EnhancedDAG API // This would need to be implemented based on the actual EnhancedDAG API
// For now, create a mock result // For now, create a mock result
result := &dag.ExecutionResult{ result := &dag.ExecutionResult{
@@ -151,7 +151,7 @@ func (eds *enhancedDAGService) executeEnhancedDAGWorkflow(ctx context.Context, e
return result, nil return result, nil
} }
func (eds *enhancedDAGService) executeTraditionalDAGWorkflow(ctx context.Context, traditionalDAG *dag.DAG, input map[string]interface{}) (*dag.ExecutionResult, error) { func (eds *enhancedDAGService) executeTraditionalDAGWorkflow(ctx context.Context, traditionalDAG *dag.DAG, input map[string]any) (*dag.ExecutionResult, error) {
// Convert input to bytes // Convert input to bytes
inputBytes, err := json.Marshal(input) inputBytes, err := json.Marshal(input)
if err != nil { if err != nil {
@@ -162,10 +162,10 @@ func (eds *enhancedDAGService) executeTraditionalDAGWorkflow(ctx context.Context
result := traditionalDAG.Process(ctx, inputBytes) result := traditionalDAG.Process(ctx, inputBytes)
// Convert result to ExecutionResult format // Convert result to ExecutionResult format
var output map[string]interface{} var output map[string]any
if err := json.Unmarshal(result.Payload, &output); err != nil { if err := json.Unmarshal(result.Payload, &output); err != nil {
// If unmarshal fails, use the raw payload // If unmarshal fails, use the raw payload
output = map[string]interface{}{ output = map[string]any{
"raw_payload": string(result.Payload), "raw_payload": string(result.Payload),
} }
} }

View File

@@ -88,8 +88,8 @@ func (sm *enhancedServiceManager) Stop(ctx context.Context) error {
} }
// Health returns the health status of all services // Health returns the health status of all services
func (sm *enhancedServiceManager) Health() map[string]interface{} { func (sm *enhancedServiceManager) Health() map[string]any {
health := make(map[string]interface{}) health := make(map[string]any)
health["running"] = sm.running health["running"] = sm.running
health["workflow_engine"] = sm.workflowEngine != nil health["workflow_engine"] = sm.workflowEngine != nil
@@ -174,7 +174,7 @@ func (sm *enhancedServiceManager) GetWorkflowEngine() *dag.WorkflowEngineManager
} }
// ExecuteEnhancedWorkflow executes a workflow with enhanced features // ExecuteEnhancedWorkflow executes a workflow with enhanced features
func (sm *enhancedServiceManager) ExecuteEnhancedWorkflow(ctx context.Context, key string, input map[string]interface{}) (*dag.ExecutionResult, error) { func (sm *enhancedServiceManager) ExecuteEnhancedWorkflow(ctx context.Context, key string, input map[string]any) (*dag.ExecutionResult, error) {
handler, err := sm.GetEnhancedHandler(key) handler, err := sm.GetEnhancedHandler(key)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -199,9 +199,9 @@ func (sm *enhancedServiceManager) ExecuteEnhancedWorkflow(ctx context.Context, k
result := traditionalDAG.Process(ctx, inputBytes) result := traditionalDAG.Process(ctx, inputBytes)
// Convert output // Convert output
var output map[string]interface{} var output map[string]any
if err := json.Unmarshal(result.Payload, &output); err != nil { if err := json.Unmarshal(result.Payload, &output); err != nil {
output = map[string]interface{}{"raw": string(result.Payload)} output = map[string]any{"raw": string(result.Payload)}
} }
// Convert result to ExecutionResult format // Convert result to ExecutionResult format
@@ -244,7 +244,7 @@ func (sm *enhancedServiceManager) RegisterHTTPRoutes(app *fiber.App) error {
api.Post("/execute/:key", func(c *fiber.Ctx) error { api.Post("/execute/:key", func(c *fiber.Ctx) error {
key := c.Params("key") key := c.Params("key")
var input map[string]interface{} var input map[string]any
if err := c.BodyParser(&input); err != nil { if err := c.BodyParser(&input); err != nil {
return c.Status(400).JSON(fiber.Map{ return c.Status(400).JSON(fiber.Map{
"error": "Invalid input format", "error": "Invalid input format",
@@ -434,7 +434,7 @@ func (sm *enhancedServiceManager) registerWorkflowEngineRoutes(api fiber.Router)
workflows.Post("/:id/execute", func(c *fiber.Ctx) error { workflows.Post("/:id/execute", func(c *fiber.Ctx) error {
id := c.Params("id") id := c.Params("id")
var input map[string]interface{} var input map[string]any
if err := c.BodyParser(&input); err != nil { if err := c.BodyParser(&input); err != nil {
return c.Status(400).JSON(fiber.Map{"error": "Invalid input"}) return c.Status(400).JSON(fiber.Map{"error": "Invalid input"})
} }

View File

@@ -43,7 +43,7 @@ func (ev *enhancedValidation) Rules() []Rule {
} }
// ValidateWorkflowInput validates input using workflow validation rules // ValidateWorkflowInput validates input using workflow validation rules
func (ev *enhancedValidation) ValidateWorkflowInput(ctx context.Context, input map[string]interface{}, rules []*dag.WorkflowValidationRule) (ValidationResult, error) { func (ev *enhancedValidation) ValidateWorkflowInput(ctx context.Context, input map[string]any, rules []*dag.WorkflowValidationRule) (ValidationResult, error) {
result := ValidationResult{ result := ValidationResult{
Valid: true, Valid: true,
Errors: make(map[string]string), Errors: make(map[string]string),
@@ -88,7 +88,7 @@ func (ev *enhancedValidation) CreateValidationProcessor(rules []*dag.WorkflowVal
} }
// Helper method to validate individual fields // Helper method to validate individual fields
func (ev *enhancedValidation) validateField(input map[string]interface{}, rule *dag.WorkflowValidationRule, result *ValidationResult) error { func (ev *enhancedValidation) validateField(input map[string]any, rule *dag.WorkflowValidationRule, result *ValidationResult) error {
value, exists := input[rule.Field] value, exists := input[rule.Field]
// Check required fields // Check required fields
@@ -134,7 +134,7 @@ func (ev *enhancedValidation) validateField(input map[string]interface{}, rule *
return nil return nil
} }
func (ev *enhancedValidation) validateString(value interface{}, rule *dag.WorkflowValidationRule, result *ValidationResult) error { func (ev *enhancedValidation) validateString(value any, rule *dag.WorkflowValidationRule, result *ValidationResult) error {
str, ok := value.(string) str, ok := value.(string)
if !ok { if !ok {
result.Valid = false result.Valid = false
@@ -172,7 +172,7 @@ func (ev *enhancedValidation) validateString(value interface{}, rule *dag.Workfl
return nil return nil
} }
func (ev *enhancedValidation) validateNumber(value interface{}, rule *dag.WorkflowValidationRule, result *ValidationResult) error { func (ev *enhancedValidation) validateNumber(value any, rule *dag.WorkflowValidationRule, result *ValidationResult) error {
var num float64 var num float64
var ok bool var ok bool
@@ -212,7 +212,7 @@ func (ev *enhancedValidation) validateNumber(value interface{}, rule *dag.Workfl
return nil return nil
} }
func (ev *enhancedValidation) validateEmail(value interface{}, rule *dag.WorkflowValidationRule, result *ValidationResult) error { func (ev *enhancedValidation) validateEmail(value any, rule *dag.WorkflowValidationRule, result *ValidationResult) error {
email, ok := value.(string) email, ok := value.(string)
if !ok { if !ok {
result.Valid = false result.Valid = false
@@ -233,7 +233,7 @@ func (ev *enhancedValidation) validateEmail(value interface{}, rule *dag.Workflo
return nil return nil
} }
func (ev *enhancedValidation) validateBool(value interface{}, rule *dag.WorkflowValidationRule, result *ValidationResult) error { func (ev *enhancedValidation) validateBool(value any, rule *dag.WorkflowValidationRule, result *ValidationResult) error {
_, ok := value.(bool) _, ok := value.(bool)
if !ok { if !ok {
result.Valid = false result.Valid = false
@@ -244,7 +244,7 @@ func (ev *enhancedValidation) validateBool(value interface{}, rule *dag.Workflow
return nil return nil
} }
func (ev *enhancedValidation) validateCustom(value interface{}, rule *dag.WorkflowValidationRule, result *ValidationResult) error { func (ev *enhancedValidation) validateCustom(value any, rule *dag.WorkflowValidationRule, result *ValidationResult) error {
// Custom validation logic - implement based on your needs // Custom validation logic - implement based on your needs
// For now, just accept any value for custom types // For now, just accept any value for custom types
return nil return nil

View File

@@ -80,8 +80,8 @@ func NewJSONEngine(config *AppConfiguration) *JSONEngine {
functions: make(map[string]*Function), functions: make(map[string]*Function),
validators: make(map[string]*Validator), validators: make(map[string]*Validator),
middleware: make(map[string]*Middleware), middleware: make(map[string]*Middleware),
data: make(map[string]interface{}), data: make(map[string]any),
genericData: make(map[string]interface{}), genericData: make(map[string]any),
} }
// Store the configuration // Store the configuration
@@ -115,7 +115,7 @@ func (e *JSONEngine) Compile() error {
// Initialize genericData with config data for backward compatibility // Initialize genericData with config data for backward compatibility
if e.genericData == nil { if e.genericData == nil {
e.genericData = make(map[string]interface{}) e.genericData = make(map[string]any)
} }
// Merge config data into genericData // Merge config data into genericData
@@ -153,7 +153,7 @@ func (e *JSONEngine) Compile() error {
// Initialize genericData with config data for backward compatibility // Initialize genericData with config data for backward compatibility
if e.genericData == nil { if e.genericData == nil {
e.genericData = make(map[string]interface{}) e.genericData = make(map[string]any)
} }
// Merge config data into genericData // Merge config data into genericData
@@ -248,7 +248,7 @@ func (e *JSONEngine) compileFunctions() error {
function := &Function{ function := &Function{
ID: id, ID: id,
Config: functionConfig, Config: functionConfig,
Runtime: make(map[string]interface{}), Runtime: make(map[string]any),
} }
// Compile function based on type - completely generic approach // Compile function based on type - completely generic approach
@@ -284,7 +284,7 @@ func (e *JSONEngine) compileValidators() error {
ID: id, ID: id,
Config: validatorConfig, Config: validatorConfig,
Rules: validatorConfig.Rules, // Now using generic map Rules: validatorConfig.Rules, // Now using generic map
Runtime: make(map[string]interface{}), Runtime: make(map[string]any),
} }
} }
return nil return nil
@@ -301,7 +301,7 @@ func (e *JSONEngine) compileWorkflows() error {
Nodes: make(map[string]*Node), Nodes: make(map[string]*Node),
Edges: make([]*Edge, 0), Edges: make([]*Edge, 0),
Runtime: &WorkflowRuntime{ Runtime: &WorkflowRuntime{
Context: make(map[string]interface{}), Context: make(map[string]any),
Variables: workflowConfig.Variables, Variables: workflowConfig.Variables,
Status: "ready", Status: "ready",
}, },
@@ -312,8 +312,8 @@ func (e *JSONEngine) compileWorkflows() error {
node := &Node{ node := &Node{
ID: nodeConfig.ID, ID: nodeConfig.ID,
Config: nodeConfig, Config: nodeConfig,
Inputs: make(map[string]interface{}), Inputs: make(map[string]any),
Outputs: make(map[string]interface{}), Outputs: make(map[string]any),
} }
// Link function if specified // Link function if specified
@@ -436,15 +436,15 @@ func (e *JSONEngine) createRouteHandler(routeConfig RouteConfig) fiber.Handler {
// Create execution context with enhanced generic data // Create execution context with enhanced generic data
ctx := &ExecutionContext{ ctx := &ExecutionContext{
Request: c, Request: c,
Data: make(map[string]interface{}), Data: make(map[string]any),
Variables: make(map[string]interface{}), Variables: make(map[string]any),
Session: make(map[string]interface{}), Session: make(map[string]any),
User: make(map[string]interface{}), User: make(map[string]any),
Functions: e.functions, Functions: e.functions,
Validators: e.validators, Validators: e.validators,
Config: e.config, Config: e.config,
Runtime: make(map[string]interface{}), Runtime: make(map[string]any),
Context: make(map[string]interface{}), Context: make(map[string]any),
} }
// Add global and generic data to context // Add global and generic data to context
@@ -508,7 +508,7 @@ func (e *JSONEngine) handleTemplate(ctx *ExecutionContext, routeConfig RouteConf
} }
// Prepare template data // Prepare template data
data := make(map[string]interface{}) data := make(map[string]any)
// Add global data // Add global data
for k, v := range e.data { for k, v := range e.data {
@@ -529,7 +529,7 @@ func (e *JSONEngine) handleTemplate(ctx *ExecutionContext, routeConfig RouteConf
if templateID == "employee_form" { if templateID == "employee_form" {
if emp, exists := data["employee"]; !exists || emp == nil { if emp, exists := data["employee"]; !exists || emp == nil {
// For add mode: provide empty employee object and set isEditMode to false // For add mode: provide empty employee object and set isEditMode to false
data["employee"] = map[string]interface{}{ data["employee"] = map[string]any{
"id": "", "id": "",
"name": "", "name": "",
"email": "", "email": "",
@@ -542,7 +542,7 @@ func (e *JSONEngine) handleTemplate(ctx *ExecutionContext, routeConfig RouteConf
data["isEditMode"] = false data["isEditMode"] = false
} else { } else {
// For edit mode: ensure employee has all required fields and set isEditMode to true // For edit mode: ensure employee has all required fields and set isEditMode to true
if empMap, ok := emp.(map[string]interface{}); ok { if empMap, ok := emp.(map[string]any); ok {
// Fill in any missing fields with empty values // Fill in any missing fields with empty values
fields := []string{"id", "name", "email", "department", "position", "salary", "hire_date", "status"} fields := []string{"id", "name", "email", "department", "position", "salary", "hire_date", "status"}
for _, field := range fields { for _, field := range fields {
@@ -557,7 +557,7 @@ func (e *JSONEngine) handleTemplate(ctx *ExecutionContext, routeConfig RouteConf
} }
// Add request data // Add request data
data["request"] = map[string]interface{}{ data["request"] = map[string]any{
"method": ctx.Request.Method(), "method": ctx.Request.Method(),
"path": ctx.Request.Path(), "path": ctx.Request.Path(),
"query": ctx.Request.Queries(), "query": ctx.Request.Queries(),
@@ -602,7 +602,7 @@ func (e *JSONEngine) handleTemplate(ctx *ExecutionContext, routeConfig RouteConf
} }
// renderTemplate renders a template with data // renderTemplate renders a template with data
func (e *JSONEngine) renderTemplate(template *Template, data map[string]interface{}) (string, error) { func (e *JSONEngine) renderTemplate(template *Template, data map[string]any) (string, error) {
tmpl := template.Compiled.(*htmlTemplate.Template) tmpl := template.Compiled.(*htmlTemplate.Template)
var buf strings.Builder var buf strings.Builder
if err := tmpl.Execute(&buf, data); err != nil { if err := tmpl.Execute(&buf, data); err != nil {
@@ -669,7 +669,7 @@ func (e *JSONEngine) handleFunction(ctx *ExecutionContext, routeConfig RouteConf
} }
// Prepare input data // Prepare input data
input := make(map[string]interface{}) input := make(map[string]any)
// Add handler input if specified // Add handler input if specified
if routeConfig.Handler.Input != nil { if routeConfig.Handler.Input != nil {
@@ -680,7 +680,7 @@ func (e *JSONEngine) handleFunction(ctx *ExecutionContext, routeConfig RouteConf
// Add request body for POST/PUT requests // Add request body for POST/PUT requests
if ctx.Request.Method() == "POST" || ctx.Request.Method() == "PUT" { if ctx.Request.Method() == "POST" || ctx.Request.Method() == "PUT" {
var body map[string]interface{} var body map[string]any
if err := ctx.Request.BodyParser(&body); err == nil { if err := ctx.Request.BodyParser(&body); err == nil {
for k, v := range body { for k, v := range body {
input[k] = v input[k] = v
@@ -721,7 +721,7 @@ func (e *JSONEngine) handleFunction(ctx *ExecutionContext, routeConfig RouteConf
} }
// Merge function result with context data // Merge function result with context data
templateData := make(map[string]interface{}) templateData := make(map[string]any)
// Add global data first // Add global data first
for k, v := range ctx.Data { for k, v := range ctx.Data {
@@ -765,7 +765,7 @@ func (e *JSONEngine) checkAuthentication(ctx *ExecutionContext, auth *AuthConfig
token = ctx.Request.Query("token") token = ctx.Request.Query("token")
} }
if token == "" && ctx.Request.Method() == "POST" { if token == "" && ctx.Request.Method() == "POST" {
var body map[string]interface{} var body map[string]any
if err := ctx.Request.BodyParser(&body); err == nil { if err := ctx.Request.BodyParser(&body); err == nil {
if t, ok := body["token"].(string); ok { if t, ok := body["token"].(string); ok {
token = t token = t
@@ -778,7 +778,7 @@ func (e *JSONEngine) checkAuthentication(ctx *ExecutionContext, auth *AuthConfig
} }
// Simple token validation (in real app, validate JWT or session) // Simple token validation (in real app, validate JWT or session)
ctx.User = map[string]interface{}{ ctx.User = map[string]any{
"id": "user_" + token, "id": "user_" + token,
"username": "demo_user", "username": "demo_user",
"role": "user", "role": "user",
@@ -789,8 +789,8 @@ func (e *JSONEngine) checkAuthentication(ctx *ExecutionContext, auth *AuthConfig
} }
// Function executors // Function executors
func (e *JSONEngine) createHTTPFunction(config FunctionConfig) interface{} { func (e *JSONEngine) createHTTPFunction(config FunctionConfig) any {
return func(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { return func(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
client := &http.Client{Timeout: getDefaultDuration(e.workflowEngineConfig.ExecutionTimeout, 30*time.Second)} client := &http.Client{Timeout: getDefaultDuration(e.workflowEngineConfig.ExecutionTimeout, 30*time.Second)}
method := config.Method method := config.Method
@@ -823,10 +823,10 @@ func (e *JSONEngine) createHTTPFunction(config FunctionConfig) interface{} {
return nil, err return nil, err
} }
var result map[string]interface{} var result map[string]any
if err := json.Unmarshal(body, &result); err != nil { if err := json.Unmarshal(body, &result); err != nil {
// If not JSON, return as string // If not JSON, return as string
result = map[string]interface{}{ result = map[string]any{
"status": resp.StatusCode, "status": resp.StatusCode,
"body": string(body), "body": string(body),
} }
@@ -836,8 +836,8 @@ func (e *JSONEngine) createHTTPFunction(config FunctionConfig) interface{} {
} }
} }
func (e *JSONEngine) createExpressionFunction(config FunctionConfig) interface{} { func (e *JSONEngine) createExpressionFunction(config FunctionConfig) any {
return func(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { return func(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
// Special handling for authentication function // Special handling for authentication function
if config.ID == "authenticate_user" || strings.Contains(config.Code, "validate user credentials") { if config.ID == "authenticate_user" || strings.Contains(config.Code, "validate user credentials") {
return e.handleAuthentication(ctx, input) return e.handleAuthentication(ctx, input)
@@ -845,7 +845,7 @@ func (e *JSONEngine) createExpressionFunction(config FunctionConfig) interface{}
// If there's a response configuration, use it directly // If there's a response configuration, use it directly
if config.Response != nil { if config.Response != nil {
result := make(map[string]interface{}) result := make(map[string]any)
// Process response template with data substitution // Process response template with data substitution
for key, value := range config.Response { for key, value := range config.Response {
@@ -909,7 +909,7 @@ func (e *JSONEngine) createExpressionFunction(config FunctionConfig) interface{}
// Try to parse as JSON first // Try to parse as JSON first
if strings.HasPrefix(strings.TrimSpace(expression), "{") { if strings.HasPrefix(strings.TrimSpace(expression), "{") {
var jsonResult map[string]interface{} var jsonResult map[string]any
if err := json.Unmarshal([]byte(expression), &jsonResult); err == nil { if err := json.Unmarshal([]byte(expression), &jsonResult); err == nil {
return jsonResult, nil return jsonResult, nil
} else { } else {
@@ -918,14 +918,14 @@ func (e *JSONEngine) createExpressionFunction(config FunctionConfig) interface{}
} }
// If not JSON, return as simple result // If not JSON, return as simple result
return map[string]interface{}{ return map[string]any{
"result": expression, "result": expression,
}, nil }, nil
} }
} }
func (e *JSONEngine) createTemplateFunction(config FunctionConfig) interface{} { func (e *JSONEngine) createTemplateFunction(config FunctionConfig) any {
return func(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { return func(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
tmpl, err := htmlTemplate.New("function").Parse(config.Code) tmpl, err := htmlTemplate.New("function").Parse(config.Code)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -936,16 +936,16 @@ func (e *JSONEngine) createTemplateFunction(config FunctionConfig) interface{} {
return nil, err return nil, err
} }
return map[string]interface{}{ return map[string]any{
"result": buf.String(), "result": buf.String(),
}, nil }, nil
} }
} }
func (e *JSONEngine) createJSFunction(config FunctionConfig) interface{} { func (e *JSONEngine) createJSFunction(config FunctionConfig) any {
return func(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { return func(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
// Placeholder for JavaScript execution (would use goja or similar in production) // Placeholder for JavaScript execution (would use goja or similar in production)
return map[string]interface{}{ return map[string]any{
"result": "JavaScript execution not implemented in demo", "result": "JavaScript execution not implemented in demo",
"code": config.Code, "code": config.Code,
"input": input, "input": input,
@@ -956,7 +956,7 @@ func (e *JSONEngine) createJSFunction(config FunctionConfig) interface{} {
// Additional generic route handlers for any application type // Additional generic route handlers for any application type
func (e *JSONEngine) handleJSON(ctx *ExecutionContext, routeConfig RouteConfig) error { func (e *JSONEngine) handleJSON(ctx *ExecutionContext, routeConfig RouteConfig) error {
// Handle pure JSON responses // Handle pure JSON responses
response := make(map[string]interface{}) response := make(map[string]any)
// Add handler output if specified // Add handler output if specified
if routeConfig.Handler.Output != nil { if routeConfig.Handler.Output != nil {
@@ -974,7 +974,7 @@ func (e *JSONEngine) handleJSON(ctx *ExecutionContext, routeConfig RouteConfig)
// Add request data if available // Add request data if available
if ctx.Request.Method() == "POST" || ctx.Request.Method() == "PUT" { if ctx.Request.Method() == "POST" || ctx.Request.Method() == "PUT" {
var body map[string]interface{} var body map[string]any
if err := ctx.Request.BodyParser(&body); err == nil { if err := ctx.Request.BodyParser(&body); err == nil {
response["request_data"] = body response["request_data"] = body
} }
@@ -1006,7 +1006,7 @@ func (e *JSONEngine) handleAPI(ctx *ExecutionContext, routeConfig RouteConfig) e
} }
// Return configured output or input as fallback // Return configured output or input as fallback
response := make(map[string]interface{}) response := make(map[string]any)
if routeConfig.Handler.Output != nil { if routeConfig.Handler.Output != nil {
response = routeConfig.Handler.Output response = routeConfig.Handler.Output
} else if routeConfig.Handler.Input != nil { } else if routeConfig.Handler.Input != nil {
@@ -1024,7 +1024,7 @@ func (e *JSONEngine) handleGeneric(ctx *ExecutionContext, routeConfig RouteConfi
// Generic handler for unknown types - maximum flexibility // Generic handler for unknown types - maximum flexibility
log.Printf("Using generic handler for type: %s", routeConfig.Handler.Type) log.Printf("Using generic handler for type: %s", routeConfig.Handler.Type)
response := map[string]interface{}{ response := map[string]any{
"handler_type": routeConfig.Handler.Type, "handler_type": routeConfig.Handler.Type,
"target": routeConfig.Handler.Target, "target": routeConfig.Handler.Target,
"method": ctx.Request.Method(), "method": ctx.Request.Method(),
@@ -1044,7 +1044,7 @@ func (e *JSONEngine) handleGeneric(ctx *ExecutionContext, routeConfig RouteConfi
// Add request body for POST/PUT requests // Add request body for POST/PUT requests
if ctx.Request.Method() == "POST" || ctx.Request.Method() == "PUT" { if ctx.Request.Method() == "POST" || ctx.Request.Method() == "PUT" {
var body map[string]interface{} var body map[string]any
if err := ctx.Request.BodyParser(&body); err == nil { if err := ctx.Request.BodyParser(&body); err == nil {
response["request_body"] = body response["request_body"] = body
} }
@@ -1073,7 +1073,7 @@ func (e *JSONEngine) createAuthMiddleware(config MiddlewareConfig) fiber.Handler
return func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error {
// Simple auth middleware // Simple auth middleware
if config.Config["skip_paths"] != nil { if config.Config["skip_paths"] != nil {
skipPaths := config.Config["skip_paths"].([]interface{}) skipPaths := config.Config["skip_paths"].([]any)
for _, path := range skipPaths { for _, path := range skipPaths {
if c.Path() == path.(string) { if c.Path() == path.(string) {
return c.Next() return c.Next()
@@ -1117,11 +1117,11 @@ func (e *JSONEngine) createCustomMiddleware(config MiddlewareConfig) fiber.Handl
} }
// Workflow execution using real workflow engine // Workflow execution using real workflow engine
func (e *JSONEngine) executeWorkflow(ctx *ExecutionContext, workflow *Workflow, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) executeWorkflow(ctx *ExecutionContext, workflow *Workflow, input map[string]any) (map[string]any, error) {
log.Printf("Executing workflow: %s", workflow.ID) log.Printf("Executing workflow: %s", workflow.ID)
// Initialize workflow context // Initialize workflow context
workflowCtx := make(map[string]interface{}) workflowCtx := make(map[string]any)
for k, v := range input { for k, v := range input {
workflowCtx[k] = v workflowCtx[k] = v
} }
@@ -1130,8 +1130,8 @@ func (e *JSONEngine) executeWorkflow(ctx *ExecutionContext, workflow *Workflow,
} }
// Simple sequential execution // Simple sequential execution
finalResult := make(map[string]interface{}) finalResult := make(map[string]any)
var lastNodeResult map[string]interface{} var lastNodeResult map[string]any
for _, node := range workflow.Nodes { for _, node := range workflow.Nodes {
ctx.Node = node ctx.Node = node
@@ -1167,7 +1167,7 @@ func (e *JSONEngine) executeWorkflow(ctx *ExecutionContext, workflow *Workflow,
// If still no result, return the last meaningful result // If still no result, return the last meaningful result
if len(finalResult) == 0 { if len(finalResult) == 0 {
// Return only safe, non-circular data // Return only safe, non-circular data
finalResult = map[string]interface{}{ finalResult = map[string]any{
"status": "completed", "status": "completed",
"message": workflowCtx["result"], "message": workflowCtx["result"],
} }
@@ -1187,9 +1187,9 @@ func (e *JSONEngine) executeWorkflow(ctx *ExecutionContext, workflow *Workflow,
} }
// sanitizeResult removes circular references and non-serializable data // sanitizeResult removes circular references and non-serializable data
func (e *JSONEngine) sanitizeResult(input map[string]interface{}) map[string]interface{} { func (e *JSONEngine) sanitizeResult(input map[string]any) map[string]any {
// Create a clean result with only the essential workflow output // Create a clean result with only the essential workflow output
result := make(map[string]interface{}) result := make(map[string]any)
// Include all safe fields that don't cause circular references // Include all safe fields that don't cause circular references
for key, value := range input { for key, value := range input {
@@ -1207,18 +1207,18 @@ func (e *JSONEngine) sanitizeResult(input map[string]interface{}) map[string]int
return result return result
} // cleanValue safely converts values to JSON-serializable types } // cleanValue safely converts values to JSON-serializable types
func (e *JSONEngine) cleanValue(value interface{}) interface{} { func (e *JSONEngine) cleanValue(value any) any {
switch v := value.(type) { switch v := value.(type) {
case string, int, int64, float64, bool, nil: case string, int, int64, float64, bool, nil:
return v return v
case []interface{}: case []any:
cleanArray := make([]interface{}, 0, len(v)) cleanArray := make([]any, 0, len(v))
for _, item := range v { for _, item := range v {
cleanArray = append(cleanArray, e.cleanValue(item)) cleanArray = append(cleanArray, e.cleanValue(item))
} }
return cleanArray return cleanArray
case map[string]interface{}: case map[string]any:
cleanMap := make(map[string]interface{}) cleanMap := make(map[string]any)
for k, val := range v { for k, val := range v {
// Only include simple fields in nested maps // Only include simple fields in nested maps
switch val.(type) { switch val.(type) {
@@ -1236,7 +1236,7 @@ func (e *JSONEngine) cleanValue(value interface{}) interface{} {
} }
// Execute individual nodes - simplified implementation for now // Execute individual nodes - simplified implementation for now
func (e *JSONEngine) executeNode(ctx *ExecutionContext, node *Node, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) executeNode(ctx *ExecutionContext, node *Node, input map[string]any) (map[string]any, error) {
log.Printf("Executing node: %s (type: %s)", node.ID, node.Config.Type) log.Printf("Executing node: %s (type: %s)", node.ID, node.Config.Type)
switch node.Config.Type { switch node.Config.Type {
@@ -1247,7 +1247,7 @@ func (e *JSONEngine) executeNode(ctx *ExecutionContext, node *Node, input map[st
log.Printf("Executing sub-workflow: %s", subWorkflowID) log.Printf("Executing sub-workflow: %s", subWorkflowID)
// Map inputs if specified // Map inputs if specified
subInput := make(map[string]interface{}) subInput := make(map[string]any)
if node.Config.InputMapping != nil { if node.Config.InputMapping != nil {
for sourceKey, targetKey := range node.Config.InputMapping { for sourceKey, targetKey := range node.Config.InputMapping {
if value, exists := input[sourceKey]; exists { if value, exists := input[sourceKey]; exists {
@@ -1268,7 +1268,7 @@ func (e *JSONEngine) executeNode(ctx *ExecutionContext, node *Node, input map[st
// Map outputs if specified // Map outputs if specified
if node.Config.OutputMapping != nil { if node.Config.OutputMapping != nil {
mappedResult := make(map[string]interface{}) mappedResult := make(map[string]any)
for sourceKey, targetKey := range node.Config.OutputMapping { for sourceKey, targetKey := range node.Config.OutputMapping {
if value, exists := result[sourceKey]; exists { if value, exists := result[sourceKey]; exists {
if targetKeyStr, ok := targetKey.(string); ok { if targetKeyStr, ok := targetKey.(string); ok {
@@ -1296,15 +1296,15 @@ func (e *JSONEngine) executeNode(ctx *ExecutionContext, node *Node, input map[st
conditionStr := fmt.Sprintf("%v", condition) conditionStr := fmt.Sprintf("%v", condition)
// Simple evaluation (in production, would use a proper expression evaluator) // Simple evaluation (in production, would use a proper expression evaluator)
if strings.Contains(conditionStr, "true") { if strings.Contains(conditionStr, "true") {
return map[string]interface{}{"result": true}, nil return map[string]any{"result": true}, nil
} }
} }
return map[string]interface{}{"result": false}, nil return map[string]any{"result": false}, nil
case "data": case "data":
// Return configured data // Return configured data
if data, exists := node.Config.Config["data"]; exists { if data, exists := node.Config.Config["data"]; exists {
return map[string]interface{}{"data": data}, nil return map[string]any{"data": data}, nil
} }
return input, nil return input, nil
@@ -1315,13 +1315,13 @@ func (e *JSONEngine) executeNode(ctx *ExecutionContext, node *Node, input map[st
} }
// Function execution using the compiled function handlers // Function execution using the compiled function handlers
func (e *JSONEngine) executeFunction(ctx *ExecutionContext, function *Function, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) executeFunction(ctx *ExecutionContext, function *Function, input map[string]any) (map[string]any, error) {
if function.Handler == nil { if function.Handler == nil {
return nil, fmt.Errorf("function handler not compiled") return nil, fmt.Errorf("function handler not compiled")
} }
switch handler := function.Handler.(type) { switch handler := function.Handler.(type) {
case func(*ExecutionContext, map[string]interface{}) (map[string]interface{}, error): case func(*ExecutionContext, map[string]any) (map[string]any, error):
return handler(ctx, input) return handler(ctx, input)
default: default:
return nil, fmt.Errorf("unknown function handler type") return nil, fmt.Errorf("unknown function handler type")
@@ -1329,8 +1329,8 @@ func (e *JSONEngine) executeFunction(ctx *ExecutionContext, function *Function,
} }
// createBuiltinFunction creates handlers for built-in functions // createBuiltinFunction creates handlers for built-in functions
func (e *JSONEngine) createBuiltinFunction(config FunctionConfig) interface{} { func (e *JSONEngine) createBuiltinFunction(config FunctionConfig) any {
return func(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { return func(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
switch config.Handler { switch config.Handler {
case "authenticate": case "authenticate":
// Handle user authentication // Handle user authentication
@@ -1338,7 +1338,7 @@ func (e *JSONEngine) createBuiltinFunction(config FunctionConfig) interface{} {
password, _ := input["password"].(string) password, _ := input["password"].(string)
if username == "" || password == "" { if username == "" || password == "" {
return map[string]interface{}{ return map[string]any{
"success": false, "success": false,
"error": "Username and password required", "error": "Username and password required",
}, nil }, nil
@@ -1346,23 +1346,23 @@ func (e *JSONEngine) createBuiltinFunction(config FunctionConfig) interface{} {
// Generic authentication using user data from configuration // Generic authentication using user data from configuration
// Look for users in multiple possible data keys for flexibility // Look for users in multiple possible data keys for flexibility
var users []interface{} var users []any
if demoUsers, ok := e.data["demo_users"].([]interface{}); ok { if demoUsers, ok := e.data["demo_users"].([]any); ok {
users = demoUsers users = demoUsers
} else if configUsers, ok := e.data["users"].([]interface{}); ok { } else if configUsers, ok := e.data["users"].([]any); ok {
users = configUsers users = configUsers
} else if authUsers, ok := e.data["auth_users"].([]interface{}); ok { } else if authUsers, ok := e.data["auth_users"].([]any); ok {
users = authUsers users = authUsers
} else { } else {
return map[string]interface{}{ return map[string]any{
"success": false, "success": false,
"error": "User authentication data not configured", "error": "User authentication data not configured",
}, nil }, nil
} }
for _, userInterface := range users { for _, userInterface := range users {
user, ok := userInterface.(map[string]interface{}) user, ok := userInterface.(map[string]any)
if !ok { if !ok {
continue continue
} }
@@ -1375,10 +1375,10 @@ func (e *JSONEngine) createBuiltinFunction(config FunctionConfig) interface{} {
// Generate simple token (in production, use JWT) // Generate simple token (in production, use JWT)
token := fmt.Sprintf("token_%s_%d", username, time.Now().Unix()) token := fmt.Sprintf("token_%s_%d", username, time.Now().Unix())
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"token": token, "token": token,
"user": map[string]interface{}{ "user": map[string]any{
"username": username, "username": username,
"role": role, "role": role,
}, },
@@ -1386,7 +1386,7 @@ func (e *JSONEngine) createBuiltinFunction(config FunctionConfig) interface{} {
} }
} }
return map[string]interface{}{ return map[string]any{
"success": false, "success": false,
"error": "Invalid credentials", "error": "Invalid credentials",
}, nil }, nil
@@ -1395,14 +1395,14 @@ func (e *JSONEngine) createBuiltinFunction(config FunctionConfig) interface{} {
return input, nil return input, nil
case "log": case "log":
log.Printf("Builtin log: %+v", input) log.Printf("Builtin log: %+v", input)
return map[string]interface{}{"logged": true}, nil return map[string]any{"logged": true}, nil
case "validate": case "validate":
// Simple validation example // Simple validation example
return map[string]interface{}{"valid": true}, nil return map[string]any{"valid": true}, nil
case "transform": case "transform":
// Simple data transformation // Simple data transformation
if data, exists := input["data"]; exists { if data, exists := input["data"]; exists {
return map[string]interface{}{"transformed": data}, nil return map[string]any{"transformed": data}, nil
} }
return input, nil return input, nil
default: default:
@@ -1412,8 +1412,8 @@ func (e *JSONEngine) createBuiltinFunction(config FunctionConfig) interface{} {
} }
// createCustomFunction creates handlers for custom user-defined functions // createCustomFunction creates handlers for custom user-defined functions
func (e *JSONEngine) createCustomFunction(config FunctionConfig) interface{} { func (e *JSONEngine) createCustomFunction(config FunctionConfig) any {
return func(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { return func(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
// Execute custom code from config.Code // Execute custom code from config.Code
if config.Code != "" { if config.Code != "" {
// For now, just return the configured response or echo input // For now, just return the configured response or echo input
@@ -1446,7 +1446,7 @@ func (e *JSONEngine) createCustomFunction(config FunctionConfig) interface{} {
} }
// Simple key-value transformation based on config // Simple key-value transformation based on config
result := make(map[string]interface{}) result := make(map[string]any)
for k, v := range input { for k, v := range input {
result[k] = v result[k] = v
} }
@@ -1463,11 +1463,11 @@ func (e *JSONEngine) createCustomFunction(config FunctionConfig) interface{} {
} }
// CRUD operation handlers // CRUD operation handlers
func (e *JSONEngine) handleCreateEntity(ctx *ExecutionContext, entity string, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) handleCreateEntity(ctx *ExecutionContext, entity string, input map[string]any) (map[string]any, error) {
switch entity { switch entity {
case "employee": case "employee":
// Create new employee // Create new employee
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"message": "Employee created successfully", "message": "Employee created successfully",
"id": time.Now().Unix(), // Simple ID generation "id": time.Now().Unix(), // Simple ID generation
@@ -1475,7 +1475,7 @@ func (e *JSONEngine) handleCreateEntity(ctx *ExecutionContext, entity string, in
}, nil }, nil
case "post": case "post":
// Create new blog post // Create new blog post
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"message": "Blog post created successfully", "message": "Blog post created successfully",
"id": time.Now().Unix(), "id": time.Now().Unix(),
@@ -1483,14 +1483,14 @@ func (e *JSONEngine) handleCreateEntity(ctx *ExecutionContext, entity string, in
}, nil }, nil
case "email": case "email":
// Create email campaign // Create email campaign
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"message": "Email campaign created successfully", "message": "Email campaign created successfully",
"id": time.Now().Unix(), "id": time.Now().Unix(),
"data": input, "data": input,
}, nil }, nil
default: default:
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"message": fmt.Sprintf("%s created successfully", entity), "message": fmt.Sprintf("%s created successfully", entity),
"id": time.Now().Unix(), "id": time.Now().Unix(),
@@ -1499,9 +1499,9 @@ func (e *JSONEngine) handleCreateEntity(ctx *ExecutionContext, entity string, in
} }
} }
func (e *JSONEngine) handleUpdateEntity(ctx *ExecutionContext, entity string, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) handleUpdateEntity(ctx *ExecutionContext, entity string, input map[string]any) (map[string]any, error) {
id, _ := input["id"].(string) id, _ := input["id"].(string)
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"message": fmt.Sprintf("%s updated successfully", entity), "message": fmt.Sprintf("%s updated successfully", entity),
"id": id, "id": id,
@@ -1509,16 +1509,16 @@ func (e *JSONEngine) handleUpdateEntity(ctx *ExecutionContext, entity string, in
}, nil }, nil
} }
func (e *JSONEngine) handleDeleteEntity(ctx *ExecutionContext, entity string, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) handleDeleteEntity(ctx *ExecutionContext, entity string, input map[string]any) (map[string]any, error) {
id, _ := input["id"].(string) id, _ := input["id"].(string)
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"message": fmt.Sprintf("%s deleted successfully", entity), "message": fmt.Sprintf("%s deleted successfully", entity),
"id": id, "id": id,
}, nil }, nil
} }
func (e *JSONEngine) handleGetEntity(ctx *ExecutionContext, entity string, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) handleGetEntity(ctx *ExecutionContext, entity string, input map[string]any) (map[string]any, error) {
// Get entity ID from input // Get entity ID from input
var entityID string var entityID string
if idVal, ok := input["id"]; ok { if idVal, ok := input["id"]; ok {
@@ -1526,7 +1526,7 @@ func (e *JSONEngine) handleGetEntity(ctx *ExecutionContext, entity string, input
} }
if entityID == "" { if entityID == "" {
return map[string]interface{}{ return map[string]any{
"success": false, "success": false,
"error": entity + " ID is required", "error": entity + " ID is required",
}, nil }, nil
@@ -1535,14 +1535,14 @@ func (e *JSONEngine) handleGetEntity(ctx *ExecutionContext, entity string, input
// Look up entity data from configuration // Look up entity data from configuration
entityDataKey := entity + "s" // Assume plural form (employees, posts, etc.) entityDataKey := entity + "s" // Assume plural form (employees, posts, etc.)
if entityData, ok := e.data[entityDataKey]; ok { if entityData, ok := e.data[entityDataKey]; ok {
if entityList, ok := entityData.([]interface{}); ok { if entityList, ok := entityData.([]any); ok {
for _, item := range entityList { for _, item := range entityList {
if itemMap, ok := item.(map[string]interface{}); ok { if itemMap, ok := item.(map[string]any); ok {
if itemIDVal, ok := itemMap["id"]; ok { if itemIDVal, ok := itemMap["id"]; ok {
itemIDStr := fmt.Sprintf("%v", itemIDVal) itemIDStr := fmt.Sprintf("%v", itemIDVal)
if itemIDStr == entityID { if itemIDStr == entityID {
// Found the entity, return it with all required data // Found the entity, return it with all required data
result := make(map[string]interface{}) result := make(map[string]any)
// Add the entity with singular name // Add the entity with singular name
result[entity] = itemMap result[entity] = itemMap
@@ -1567,17 +1567,17 @@ func (e *JSONEngine) handleGetEntity(ctx *ExecutionContext, entity string, input
} }
} }
return map[string]interface{}{ return map[string]any{
"success": false, "success": false,
"error": entity + " not found", "error": entity + " not found",
}, nil }, nil
} }
func (e *JSONEngine) handleListEntity(ctx *ExecutionContext, entity string, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) handleListEntity(ctx *ExecutionContext, entity string, input map[string]any) (map[string]any, error) {
// Look up entity data from configuration using plural form // Look up entity data from configuration using plural form
entityDataKey := entity + "s" // Assume plural form (employees, posts, etc.) entityDataKey := entity + "s" // Assume plural form (employees, posts, etc.)
if entityData, ok := e.data[entityDataKey]; ok { if entityData, ok := e.data[entityDataKey]; ok {
result := map[string]interface{}{ result := map[string]any{
"success": true, "success": true,
} }
result[entityDataKey] = entityData result[entityDataKey] = entityData
@@ -1593,14 +1593,14 @@ func (e *JSONEngine) handleListEntity(ctx *ExecutionContext, entity string, inpu
} }
// If no data found, return empty result // If no data found, return empty result
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
entity + "s": []interface{}{}, entity + "s": []any{},
}, nil }, nil
} }
func (e *JSONEngine) handleSendCampaign(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) handleSendCampaign(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"campaign_id": fmt.Sprintf("campaign_%d", time.Now().Unix()), "campaign_id": fmt.Sprintf("campaign_%d", time.Now().Unix()),
"emails_sent": 10, // Mock value "emails_sent": 10, // Mock value
@@ -1608,9 +1608,9 @@ func (e *JSONEngine) handleSendCampaign(ctx *ExecutionContext, input map[string]
}, nil }, nil
} }
func (e *JSONEngine) handlePublishEntity(ctx *ExecutionContext, entity string, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) handlePublishEntity(ctx *ExecutionContext, entity string, input map[string]any) (map[string]any, error) {
id, _ := input["id"].(string) id, _ := input["id"].(string)
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"message": fmt.Sprintf("%s published successfully", entity), "message": fmt.Sprintf("%s published successfully", entity),
"id": id, "id": id,
@@ -1619,8 +1619,8 @@ func (e *JSONEngine) handlePublishEntity(ctx *ExecutionContext, entity string, i
} }
// createGenericFunction creates a generic function handler for unknown types // createGenericFunction creates a generic function handler for unknown types
func (e *JSONEngine) createGenericFunction(config FunctionConfig) interface{} { func (e *JSONEngine) createGenericFunction(config FunctionConfig) any {
return func(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { return func(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
log.Printf("Executing generic function: %s with type: %s", config.ID, config.Type) log.Printf("Executing generic function: %s with type: %s", config.ID, config.Type)
// For unknown function types, we create a flexible handler that: // For unknown function types, we create a flexible handler that:
@@ -1630,7 +1630,7 @@ func (e *JSONEngine) createGenericFunction(config FunctionConfig) interface{} {
} }
// 2. Applies any transformations from config // 2. Applies any transformations from config
result := make(map[string]interface{}) result := make(map[string]any)
for k, v := range input { for k, v := range input {
result[k] = v result[k] = v
} }
@@ -1651,12 +1651,12 @@ func (e *JSONEngine) createGenericFunction(config FunctionConfig) interface{} {
} }
// handleAuthentication handles user authentication with actual validation // handleAuthentication handles user authentication with actual validation
func (e *JSONEngine) handleAuthentication(ctx *ExecutionContext, input map[string]interface{}) (map[string]interface{}, error) { func (e *JSONEngine) handleAuthentication(ctx *ExecutionContext, input map[string]any) (map[string]any, error) {
username, _ := input["username"].(string) username, _ := input["username"].(string)
password, _ := input["password"].(string) password, _ := input["password"].(string)
if username == "" || password == "" { if username == "" || password == "" {
return map[string]interface{}{ return map[string]any{
"success": false, "success": false,
"error": "Username and password required", "error": "Username and password required",
}, nil }, nil
@@ -1664,23 +1664,23 @@ func (e *JSONEngine) handleAuthentication(ctx *ExecutionContext, input map[strin
// Generic authentication using user data from configuration // Generic authentication using user data from configuration
// Look for users in multiple possible data keys for flexibility // Look for users in multiple possible data keys for flexibility
var users []interface{} var users []any
if demoUsers, ok := e.data["demo_users"].([]interface{}); ok { if demoUsers, ok := e.data["demo_users"].([]any); ok {
users = demoUsers users = demoUsers
} else if configUsers, ok := e.data["users"].([]interface{}); ok { } else if configUsers, ok := e.data["users"].([]any); ok {
users = configUsers users = configUsers
} else if authUsers, ok := e.data["auth_users"].([]interface{}); ok { } else if authUsers, ok := e.data["auth_users"].([]any); ok {
users = authUsers users = authUsers
} else { } else {
return map[string]interface{}{ return map[string]any{
"success": false, "success": false,
"error": "User authentication data not configured", "error": "User authentication data not configured",
}, nil }, nil
} }
for _, userInterface := range users { for _, userInterface := range users {
user, ok := userInterface.(map[string]interface{}) user, ok := userInterface.(map[string]any)
if !ok { if !ok {
continue continue
} }
@@ -1693,10 +1693,10 @@ func (e *JSONEngine) handleAuthentication(ctx *ExecutionContext, input map[strin
// Generate simple token (in production, use JWT) // Generate simple token (in production, use JWT)
token := fmt.Sprintf("token_%s_%d", username, time.Now().Unix()) token := fmt.Sprintf("token_%s_%d", username, time.Now().Unix())
return map[string]interface{}{ return map[string]any{
"success": true, "success": true,
"token": token, "token": token,
"user": map[string]interface{}{ "user": map[string]any{
"username": username, "username": username,
"role": role, "role": role,
}, },
@@ -1704,7 +1704,7 @@ func (e *JSONEngine) handleAuthentication(ctx *ExecutionContext, input map[strin
} }
} }
return map[string]interface{}{ return map[string]any{
"success": false, "success": false,
"error": "Invalid credentials", "error": "Invalid credentials",
}, nil }, nil

Binary file not shown.

View File

@@ -13,7 +13,7 @@ type AppConfiguration struct {
Middleware []MiddlewareConfig `json:"middleware"` Middleware []MiddlewareConfig `json:"middleware"`
Templates map[string]TemplateConfig `json:"templates"` Templates map[string]TemplateConfig `json:"templates"`
Workflows []WorkflowConfig `json:"workflows"` Workflows []WorkflowConfig `json:"workflows"`
Data map[string]interface{} `json:"data"` Data map[string]any `json:"data"`
Functions map[string]FunctionConfig `json:"functions"` Functions map[string]FunctionConfig `json:"functions"`
Validators map[string]ValidatorConfig `json:"validators"` Validators map[string]ValidatorConfig `json:"validators"`
} }
@@ -71,14 +71,14 @@ type ResponseConfig struct {
// HandlerConfig defines how to handle a route // HandlerConfig defines how to handle a route
type HandlerConfig struct { type HandlerConfig struct {
Type string `json:"type"` // "workflow", "template", "function", "redirect" Type string `json:"type"` // "workflow", "template", "function", "redirect"
Target string `json:"target"` Target string `json:"target"`
Template string `json:"template,omitempty"` Template string `json:"template,omitempty"`
Input map[string]interface{} `json:"input,omitempty"` Input map[string]any `json:"input,omitempty"`
Output map[string]interface{} `json:"output,omitempty"` Output map[string]any `json:"output,omitempty"`
ErrorHandling *ErrorHandlingConfig `json:"error_handling,omitempty"` ErrorHandling *ErrorHandlingConfig `json:"error_handling,omitempty"`
Authentication *AuthConfig `json:"authentication,omitempty"` Authentication *AuthConfig `json:"authentication,omitempty"`
Validation []string `json:"validation,omitempty"` Validation []string `json:"validation,omitempty"`
} }
// ErrorHandlingConfig defines error handling behavior // ErrorHandlingConfig defines error handling behavior
@@ -107,59 +107,59 @@ type AuthConfig struct {
// MiddlewareConfig defines middleware // MiddlewareConfig defines middleware
type MiddlewareConfig struct { type MiddlewareConfig struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Type string `json:"type"` Type string `json:"type"`
Priority int `json:"priority"` Priority int `json:"priority"`
Config map[string]interface{} `json:"config,omitempty"` Config map[string]any `json:"config,omitempty"`
Functions []string `json:"functions,omitempty"` Functions []string `json:"functions,omitempty"`
Enabled bool `json:"enabled"` Enabled bool `json:"enabled"`
} }
// TemplateConfig defines templates // TemplateConfig defines templates
type TemplateConfig struct { type TemplateConfig struct {
Type string `json:"type"` // "html", "text", "json" Type string `json:"type"` // "html", "text", "json"
Content string `json:"content,omitempty"` Content string `json:"content,omitempty"`
Template string `json:"template,omitempty"` // Alternative field name for content Template string `json:"template,omitempty"` // Alternative field name for content
File string `json:"file,omitempty"` File string `json:"file,omitempty"`
Variables map[string]interface{} `json:"variables,omitempty"` Variables map[string]any `json:"variables,omitempty"`
Data map[string]interface{} `json:"data,omitempty"` Data map[string]any `json:"data,omitempty"`
Partials map[string]string `json:"partials,omitempty"` Partials map[string]string `json:"partials,omitempty"`
Helpers []string `json:"helpers,omitempty"` Helpers []string `json:"helpers,omitempty"`
CacheEnabled bool `json:"cache_enabled"` CacheEnabled bool `json:"cache_enabled"`
} }
// WorkflowConfig defines workflows // WorkflowConfig defines workflows
type WorkflowConfig struct { type WorkflowConfig struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
Version string `json:"version,omitempty"` Version string `json:"version,omitempty"`
Nodes []NodeConfig `json:"nodes"` Nodes []NodeConfig `json:"nodes"`
Edges []EdgeConfig `json:"edges"` Edges []EdgeConfig `json:"edges"`
Variables map[string]interface{} `json:"variables,omitempty"` Variables map[string]any `json:"variables,omitempty"`
Triggers []TriggerConfig `json:"triggers,omitempty"` Triggers []TriggerConfig `json:"triggers,omitempty"`
SubWorkflows []SubWorkflowConfig `json:"sub_workflows,omitempty"` SubWorkflows []SubWorkflowConfig `json:"sub_workflows,omitempty"`
JSONSchema *JSONSchemaConfig `json:"json_schema,omitempty"` JSONSchema *JSONSchemaConfig `json:"json_schema,omitempty"`
} }
// NodeConfig defines workflow nodes // NodeConfig defines workflow nodes
type NodeConfig struct { type NodeConfig struct {
ID string `json:"id"` ID string `json:"id"`
Type string `json:"type"` Type string `json:"type"`
Name string `json:"name"` Name string `json:"name"`
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
Function string `json:"function,omitempty"` Function string `json:"function,omitempty"`
SubWorkflow string `json:"sub_workflow,omitempty"` SubWorkflow string `json:"sub_workflow,omitempty"`
Input map[string]interface{} `json:"input,omitempty"` Input map[string]any `json:"input,omitempty"`
Output map[string]interface{} `json:"output,omitempty"` Output map[string]any `json:"output,omitempty"`
InputMapping map[string]interface{} `json:"input_mapping,omitempty"` InputMapping map[string]any `json:"input_mapping,omitempty"`
OutputMapping map[string]interface{} `json:"output_mapping,omitempty"` OutputMapping map[string]any `json:"output_mapping,omitempty"`
Config map[string]interface{} `json:"config,omitempty"` Config map[string]any `json:"config,omitempty"`
Conditions []ConditionConfig `json:"conditions,omitempty"` Conditions []ConditionConfig `json:"conditions,omitempty"`
ErrorHandling *ErrorHandlingConfig `json:"error_handling,omitempty"` ErrorHandling *ErrorHandlingConfig `json:"error_handling,omitempty"`
Timeout string `json:"timeout,omitempty"` Timeout string `json:"timeout,omitempty"`
Retry *RetryConfig `json:"retry,omitempty"` Retry *RetryConfig `json:"retry,omitempty"`
} }
// EdgeConfig defines workflow edges // EdgeConfig defines workflow edges
@@ -175,87 +175,87 @@ type EdgeConfig struct {
// ConditionConfig defines conditional logic // ConditionConfig defines conditional logic
type ConditionConfig struct { type ConditionConfig struct {
Field string `json:"field"` Field string `json:"field"`
Operator string `json:"operator"` Operator string `json:"operator"`
Value interface{} `json:"value"` Value any `json:"value"`
Logic string `json:"logic,omitempty"` // "AND", "OR" Logic string `json:"logic,omitempty"` // "AND", "OR"
} }
// TriggerConfig defines workflow triggers // TriggerConfig defines workflow triggers
type TriggerConfig struct { type TriggerConfig struct {
Type string `json:"type"` // "http", "cron", "event" Type string `json:"type"` // "http", "cron", "event"
Config map[string]interface{} `json:"config"` Config map[string]any `json:"config"`
Enabled bool `json:"enabled"` Enabled bool `json:"enabled"`
Conditions []ConditionConfig `json:"conditions,omitempty"` Conditions []ConditionConfig `json:"conditions,omitempty"`
} }
// SubWorkflowConfig defines sub-workflow mappings // SubWorkflowConfig defines sub-workflow mappings
type SubWorkflowConfig struct { type SubWorkflowConfig struct {
ID string `json:"id"` ID string `json:"id"`
WorkflowID string `json:"workflow_id"` WorkflowID string `json:"workflow_id"`
InputMapping map[string]interface{} `json:"input_mapping,omitempty"` InputMapping map[string]any `json:"input_mapping,omitempty"`
OutputMapping map[string]interface{} `json:"output_mapping,omitempty"` OutputMapping map[string]any `json:"output_mapping,omitempty"`
} }
// JSONSchemaConfig defines JSON schema validation // JSONSchemaConfig defines JSON schema validation
type JSONSchemaConfig struct { type JSONSchemaConfig struct {
Input map[string]interface{} `json:"input,omitempty"` Input map[string]any `json:"input,omitempty"`
Output map[string]interface{} `json:"output,omitempty"` Output map[string]any `json:"output,omitempty"`
} }
// FunctionConfig defines custom functions with complete flexibility // FunctionConfig defines custom functions with complete flexibility
type FunctionConfig struct { type FunctionConfig struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
Type string `json:"type"` // "http", "expression", "template", "js", "builtin" Type string `json:"type"` // "http", "expression", "template", "js", "builtin"
Handler string `json:"handler,omitempty"` Handler string `json:"handler,omitempty"`
Method string `json:"method,omitempty"` // For HTTP functions Method string `json:"method,omitempty"` // For HTTP functions
URL string `json:"url,omitempty"` // For HTTP functions URL string `json:"url,omitempty"` // For HTTP functions
Headers map[string]interface{} `json:"headers,omitempty"` // For HTTP functions Headers map[string]any `json:"headers,omitempty"` // For HTTP functions
Body string `json:"body,omitempty"` // For HTTP request body template Body string `json:"body,omitempty"` // For HTTP request body template
Code string `json:"code,omitempty"` // For custom code functions Code string `json:"code,omitempty"` // For custom code functions
Template string `json:"template,omitempty"` // For template functions Template string `json:"template,omitempty"` // For template functions
Expression string `json:"expression,omitempty"` // For expression functions Expression string `json:"expression,omitempty"` // For expression functions
Parameters map[string]interface{} `json:"parameters,omitempty"` // Generic parameters Parameters map[string]any `json:"parameters,omitempty"` // Generic parameters
Returns map[string]interface{} `json:"returns,omitempty"` // Generic return definition Returns map[string]any `json:"returns,omitempty"` // Generic return definition
Response map[string]interface{} `json:"response,omitempty"` // Response structure Response map[string]any `json:"response,omitempty"` // Response structure
Config map[string]interface{} `json:"config,omitempty"` Config map[string]any `json:"config,omitempty"`
Async bool `json:"async"` Async bool `json:"async"`
Timeout string `json:"timeout,omitempty"` Timeout string `json:"timeout,omitempty"`
} }
// Note: ParameterConfig removed - using generic map[string]interface{} for parameters // Note: ParameterConfig removed - using generic map[string]any for parameters
// ValidatorConfig defines validation rules with complete flexibility // ValidatorConfig defines validation rules with complete flexibility
type ValidatorConfig struct { type ValidatorConfig struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name,omitempty"` Name string `json:"name,omitempty"`
Type string `json:"type"` // "jsonschema", "custom", "regex", "builtin" Type string `json:"type"` // "jsonschema", "custom", "regex", "builtin"
Field string `json:"field,omitempty"` Field string `json:"field,omitempty"`
Schema interface{} `json:"schema,omitempty"` Schema any `json:"schema,omitempty"`
Rules []ValidationRule `json:"rules,omitempty"` // Array of validation rules Rules []ValidationRule `json:"rules,omitempty"` // Array of validation rules
Messages map[string]string `json:"messages,omitempty"` Messages map[string]string `json:"messages,omitempty"`
Expression string `json:"expression,omitempty"` // For expression-based validation Expression string `json:"expression,omitempty"` // For expression-based validation
Config map[string]interface{} `json:"config,omitempty"` Config map[string]any `json:"config,omitempty"`
StrictMode bool `json:"strict_mode"` StrictMode bool `json:"strict_mode"`
AllowEmpty bool `json:"allow_empty"` AllowEmpty bool `json:"allow_empty"`
} }
// ValidationRule defines individual validation rules with flexibility // ValidationRule defines individual validation rules with flexibility
type ValidationRule struct { type ValidationRule struct {
Field string `json:"field,omitempty"` Field string `json:"field,omitempty"`
Type string `json:"type"` Type string `json:"type"`
Required bool `json:"required,omitempty"` Required bool `json:"required,omitempty"`
Value interface{} `json:"value,omitempty"` // Generic value field for min/max, patterns, etc. Value any `json:"value,omitempty"` // Generic value field for min/max, patterns, etc.
Min interface{} `json:"min,omitempty"` Min any `json:"min,omitempty"`
Max interface{} `json:"max,omitempty"` Max any `json:"max,omitempty"`
Pattern string `json:"pattern,omitempty"` Pattern string `json:"pattern,omitempty"`
Expression string `json:"expression,omitempty"` // For custom expressions Expression string `json:"expression,omitempty"` // For custom expressions
CustomRule string `json:"custom_rule,omitempty"` CustomRule string `json:"custom_rule,omitempty"`
Message string `json:"message,omitempty"` Message string `json:"message,omitempty"`
Config map[string]interface{} `json:"config,omitempty"` Config map[string]any `json:"config,omitempty"`
Conditions []ConditionConfig `json:"conditions,omitempty"` Conditions []ConditionConfig `json:"conditions,omitempty"`
} }
// Generic runtime types for the JSON engine // Generic runtime types for the JSON engine
@@ -269,14 +269,14 @@ type JSONEngine struct {
functions map[string]*Function functions map[string]*Function
validators map[string]*Validator validators map[string]*Validator
middleware map[string]*Middleware middleware map[string]*Middleware
data map[string]interface{} data map[string]any
genericData map[string]interface{} // For any custom data structures genericData map[string]any // For any custom data structures
} }
type Template struct { type Template struct {
ID string ID string
Config TemplateConfig Config TemplateConfig
Compiled interface{} Compiled any
} }
type Workflow struct { type Workflow struct {
@@ -291,8 +291,8 @@ type Node struct {
ID string ID string
Config NodeConfig Config NodeConfig
Function *Function Function *Function
Inputs map[string]interface{} Inputs map[string]any
Outputs map[string]interface{} Outputs map[string]any
} }
type Edge struct { type Edge struct {
@@ -306,16 +306,16 @@ type Edge struct {
type Function struct { type Function struct {
ID string ID string
Config FunctionConfig Config FunctionConfig
Handler interface{} // Can be any type of handler Handler any // Can be any type of handler
Runtime map[string]interface{} // Runtime state/context Runtime map[string]any // Runtime state/context
} }
// Validator represents a compiled validator with generic rules // Validator represents a compiled validator with generic rules
type Validator struct { type Validator struct {
ID string ID string
Config ValidatorConfig Config ValidatorConfig
Rules []ValidationRule // Array of validation rules to match ValidatorConfig Rules []ValidationRule // Array of validation rules to match ValidatorConfig
Runtime map[string]interface{} // Runtime context Runtime map[string]any // Runtime context
} }
type Middleware struct { type Middleware struct {
@@ -325,8 +325,8 @@ type Middleware struct {
} }
type WorkflowRuntime struct { type WorkflowRuntime struct {
Context map[string]interface{} Context map[string]any
Variables map[string]interface{} Variables map[string]any
Status string Status string
Error error Error error
} }
@@ -334,15 +334,15 @@ type WorkflowRuntime struct {
// ExecutionContext for runtime with complete flexibility // ExecutionContext for runtime with complete flexibility
type ExecutionContext struct { type ExecutionContext struct {
Request *fiber.Ctx Request *fiber.Ctx
Data map[string]interface{} Data map[string]any
Variables map[string]interface{} Variables map[string]any
Session map[string]interface{} Session map[string]any
User map[string]interface{} User map[string]any
Workflow *Workflow Workflow *Workflow
Node *Node Node *Node
Functions map[string]*Function Functions map[string]*Function
Validators map[string]*Validator Validators map[string]*Validator
Config *AppConfiguration // Access to full config Config *AppConfiguration // Access to full config
Runtime map[string]interface{} // Runtime state Runtime map[string]any // Runtime state
Context map[string]interface{} // Additional context data Context map[string]any // Additional context data
} }

View File

@@ -328,7 +328,7 @@ func prepareNode(flow *dag.DAG, node Node) error {
return nil return nil
} }
func mapProviders(dataProviders interface{}) []dag.Provider { func mapProviders(dataProviders any) []dag.Provider {
var providers []dag.Provider var providers []dag.Provider
err := Map(&providers, dataProviders) err := Map(&providers, dataProviders)
if err != nil { if err != nil {
@@ -956,7 +956,7 @@ func SetupEnhancedAPI(prefix string, router fiber.Router, brokerAddr string) err
// Helper functions for enhanced features (simplified implementation) // Helper functions for enhanced features (simplified implementation)
// addEnhancedNode is a placeholder for future enhanced node functionality // addEnhancedNode is a placeholder for future enhanced node functionality
func addEnhancedNode(enhancedDAG interface{}, node EnhancedNode) error { func addEnhancedNode(enhancedDAG any, node EnhancedNode) error {
// For now, this is a placeholder implementation // For now, this is a placeholder implementation
// In the future, this would add enhanced nodes with workflow capabilities // In the future, this would add enhanced nodes with workflow capabilities
return nil return nil

View File

@@ -547,8 +547,8 @@ func (c *UserConfig) IsEnhancedHandler(handlerName string) bool {
} }
// GetAllHandlers returns both traditional and enhanced handlers // GetAllHandlers returns both traditional and enhanced handlers
func (c *UserConfig) GetAllHandlers() map[string]interface{} { func (c *UserConfig) GetAllHandlers() map[string]any {
handlers := make(map[string]interface{}) handlers := make(map[string]any)
// Add traditional handlers // Add traditional handlers
for _, handler := range c.Policy.Handlers { for _, handler := range c.Policy.Handlers {
@@ -564,7 +564,7 @@ func (c *UserConfig) GetAllHandlers() map[string]interface{} {
} }
// GetHandlerByKey returns either traditional or enhanced handler by key // GetHandlerByKey returns either traditional or enhanced handler by key
func (c *UserConfig) GetHandlerByKey(key string) interface{} { func (c *UserConfig) GetHandlerByKey(key string) any {
// Check traditional handlers first // Check traditional handlers first
if handler := c.GetHandler(key); handler != nil { if handler := c.GetHandler(key); handler != nil {
return *handler return *handler

View File

@@ -96,13 +96,13 @@ func (pq PriorityQueue) Swap(i, j int) {
pq[i].index = i pq[i].index = i
pq[j].index = j pq[j].index = j
} }
func (pq *PriorityQueue) Push(x interface{}) { func (pq *PriorityQueue) Push(x any) {
n := len(*pq) n := len(*pq)
task := x.(*QueueTask) task := x.(*QueueTask)
task.index = n task.index = n
*pq = append(*pq, task) *pq = append(*pq, task)
} }
func (pq *PriorityQueue) Pop() interface{} { func (pq *PriorityQueue) Pop() any {
old := *pq old := *pq
n := len(old) n := len(old)
task := old[n-1] task := old[n-1]