diff --git a/.github/workflows/installer-release.yml b/.github/workflows/installer-release.yml index 5454628ac..7af16072f 100644 --- a/.github/workflows/installer-release.yml +++ b/.github/workflows/installer-release.yml @@ -180,13 +180,12 @@ jobs: -X 'github.com/utmstack/UTMStack/installer/config.INSTALLER_VERSION=${{ inputs.version }}' \ -X 'github.com/utmstack/UTMStack/installer/config.REPLACE=${{ secrets.CM_ENCRYPT_SALT }}' \ -X 'github.com/utmstack/UTMStack/installer/config.PUBLIC_KEY=${{ secrets.CM_SIGN_PUBLIC_KEY }}'" . - mv installer /home/utmstack/installer - chmod +x /home/utmstack/installer + mv installer /usr/local/bin/utmstack_installer + chmod +x /usr/local/bin/utmstack_installer - name: Run Installer - working-directory: /home/utmstack run: | - sudo ./installer + sudo /usr/local/bin/utmstack_installer # ============================================ # V11 RC - Upload to prerelease only diff --git a/agent-manager/Dockerfile b/agent-manager/Dockerfile index 41e405726..3d020c1a7 100644 --- a/agent-manager/Dockerfile +++ b/agent-manager/Dockerfile @@ -20,8 +20,8 @@ RUN GRPCURL_VERSION=1.8.1 && \ # Expose the gRPC agent-manager port -EXPOSE 50051 -EXPOSE 8080 +EXPOSE 9000 +EXPOSE 9001 # Set the health check # HEALTHCHECK --interval=60s --timeout=5s --start-period=5s --retries=3 CMD grpcurl -insecure -plaintext -d '{"service": ""}' localhost:50051 grpc.health.v1.Health/Check | jq -e '.status == "SERVING"' || exit 1 diff --git a/agent-manager/agent/collector_imp.go b/agent-manager/agent/collector_imp.go index e0f27de69..a4b9e3ee0 100644 --- a/agent-manager/agent/collector_imp.go +++ b/agent-manager/agent/collector_imp.go @@ -63,7 +63,8 @@ func InitCollectorService() { collectors := []models.Collector{} _, err := CollectorServ.DBConnection.GetAll(&collectors, "") if err != nil { - catcher.Error("failed to fetch collectors", err, map[string]any{"process": "agent-manager"}) + _ = catcher.Error("failed to fetch collectors", err, map[string]any{"process": "agent-manager"}) + time.Sleep(5 * time.Second) os.Exit(1) } for _, c := range collectors { diff --git a/agent-manager/agent/utmgrpc.go b/agent-manager/agent/utmgrpc.go index 476de475f..11bb4d389 100644 --- a/agent-manager/agent/utmgrpc.go +++ b/agent-manager/agent/utmgrpc.go @@ -4,6 +4,7 @@ import ( "crypto/tls" "net" "os" + "time" "github.com/threatwinds/go-sdk/catcher" "github.com/utmstack/UTMStack/agent-manager/config" @@ -16,7 +17,8 @@ import ( func InitGrpcServer() { err := InitAgentService() if err != nil { - catcher.Error("failed to init agent service", err, map[string]any{"process": "agent-manager"}) + _ = catcher.Error("failed to init agent service", err, map[string]any{"process": "agent-manager"}) + time.Sleep(5 * time.Second) os.Exit(1) } @@ -27,15 +29,17 @@ func InitGrpcServer() { } func StartGrpcServer() { - listener, err := net.Listen("tcp", "0.0.0.0:50051") + listener, err := net.Listen("tcp", "0.0.0.0:9000") if err != nil { - catcher.Error("failed to listen", err, map[string]any{"process": "agent-manager"}) + _ = catcher.Error("failed to listen", err, map[string]any{"process": "agent-manager"}) + time.Sleep(5 * time.Second) os.Exit(1) } loadedCert, err := tls.LoadX509KeyPair(config.CertPath, config.CertKeyPath) if err != nil { - catcher.Error("failed to load TLS credentials: %v", err, map[string]any{"process": "agent-manager"}) + _ = catcher.Error("failed to load TLS credentials: %v", err, map[string]any{"process": "agent-manager"}) + time.Sleep(5 * time.Second) os.Exit(1) } @@ -59,9 +63,10 @@ func StartGrpcServer() { grpc_health_v1.RegisterHealthServer(grpcServer, healthServer) healthServer.SetServingStatus("", grpc_health_v1.HealthCheckResponse_SERVING) - catcher.Info("Starting gRPC server on 0.0.0.0:50051", map[string]any{"process": "agent-manager"}) + catcher.Info("Starting gRPC server on 0.0.0.0:9000", map[string]any{"process": "agent-manager"}) if err := grpcServer.Serve(listener); err != nil { - catcher.Error("failed to serve", err, map[string]any{"process": "agent-manager"}) + _ = catcher.Error("failed to serve", err, map[string]any{"process": "agent-manager"}) + time.Sleep(5 * time.Second) os.Exit(1) } } diff --git a/agent-manager/go.mod b/agent-manager/go.mod index ed325ec07..08a1bc007 100644 --- a/agent-manager/go.mod +++ b/agent-manager/go.mod @@ -4,7 +4,6 @@ go 1.25.5 require ( github.com/AtlasInsideCorp/AtlasInsideAES v1.0.0 - github.com/gin-contrib/gzip v1.2.5 github.com/gin-gonic/gin v1.11.0 github.com/google/uuid v1.6.0 github.com/utmstack/config-client-go v1.2.7 @@ -16,14 +15,8 @@ require ( require ( github.com/bytedance/gopkg v0.1.3 // indirect - github.com/goccy/go-yaml v1.19.2 // indirect - github.com/quic-go/qpack v0.6.0 // indirect - github.com/quic-go/quic-go v0.59.0 // indirect -) - -require ( - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -31,6 +24,7 @@ require ( github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/validator/v10 v10.30.1 // indirect github.com/goccy/go-json v0.10.5 // indirect + github.com/goccy/go-yaml v1.19.2 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect github.com/jackc/pgx/v5 v5.8.0 // indirect @@ -44,7 +38,9 @@ require ( github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pelletier/go-toml/v2 v2.2.4 // indirect - github.com/threatwinds/go-sdk v1.1.7 + github.com/quic-go/qpack v0.6.0 // indirect + github.com/quic-go/quic-go v0.59.0 // indirect + github.com/threatwinds/go-sdk v1.1.8 github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.3.1 // indirect golang.org/x/arch v0.23.0 // indirect @@ -53,5 +49,5 @@ require ( golang.org/x/sync v0.19.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260122232226-8e98ce8d340d // indirect ) diff --git a/agent-manager/go.sum b/agent-manager/go.sum index 478c99bfc..189507d1b 100644 --- a/agent-manager/go.sum +++ b/agent-manager/go.sum @@ -2,10 +2,10 @@ github.com/AtlasInsideCorp/AtlasInsideAES v1.0.0 h1:TBiBl9KCa4i4epY0/q9WSC4ugavL github.com/AtlasInsideCorp/AtlasInsideAES v1.0.0/go.mod h1:cRhQ3TS/VEfu/z+qaciyuDZdtxgaXgaX8+G6Wa5NzBk= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -14,8 +14,6 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw= github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= -github.com/gin-contrib/gzip v1.2.5 h1:fIZs0S+l17pIu1P5XRJOo/YNqfIuPCrZZ3TWB7pjckI= -github.com/gin-contrib/gzip v1.2.5/go.mod h1:aomRgR7ftdZV3uWY0gW/m8rChfxau0n8YVvwlOHONzw= github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk= @@ -89,8 +87,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.8 h1:jDd6HH4GZVRswv9ToaVU+xcyzNlKnA7f1lf/e1Xyt3A= +github.com/threatwinds/go-sdk v1.1.8/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY= @@ -126,8 +124,8 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260122232226-8e98ce8d340d h1:xXzuihhT3gL/ntduUZwHECzAn57E8dA6l8SOtYWdD8Q= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260122232226-8e98ce8d340d/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/agent-manager/main.go b/agent-manager/main.go index b8c64e77b..c9c3b3c5c 100644 --- a/agent-manager/main.go +++ b/agent-manager/main.go @@ -2,6 +2,7 @@ package main import ( "os" + "time" "github.com/threatwinds/go-sdk/catcher" "github.com/utmstack/UTMStack/agent-manager/agent" @@ -15,6 +16,7 @@ func main() { err := database.MigrateDatabase() if err != nil { _ = catcher.Error("failed to migrate database", err, map[string]any{"process": "agent-manager"}) + time.Sleep(5 * time.Second) os.Exit(1) } diff --git a/agent-manager/updates/updates.go b/agent-manager/updates/updates.go index 813bec7bf..b5fb7890a 100644 --- a/agent-manager/updates/updates.go +++ b/agent-manager/updates/updates.go @@ -60,12 +60,12 @@ func ServeDependencies() { } server := &http.Server{ - Addr: ":8080", + Addr: ":9001", Handler: r, TLSConfig: tlsConfig, } - catcher.Info("Starting HTTP server on port 8080", map[string]any{"process": "agent-manager"}) + catcher.Info("Starting HTTP server on port 9001", map[string]any{"process": "agent-manager"}) if err := server.ListenAndServeTLS("", ""); err != nil { _ = catcher.Error("error starting HTTP server", err, map[string]any{"process": "agent-manager"}) return diff --git a/agent/go.mod b/agent/go.mod index 22130d818..c43246ec4 100644 --- a/agent/go.mod +++ b/agent/go.mod @@ -8,6 +8,7 @@ require ( github.com/glebarez/sqlite v1.11.0 github.com/google/uuid v1.6.0 github.com/kardianos/service v1.2.4 + github.com/netsampler/goflow2 v1.3.7 github.com/tehmaze/netflow v0.0.0-20240303214733-8c13bb004068 github.com/threatwinds/go-sdk v1.1.7 github.com/threatwinds/logger v1.2.3 diff --git a/agent/go.sum b/agent/go.sum index 8ff3492f3..99830d697 100644 --- a/agent/go.sum +++ b/agent/go.sum @@ -87,6 +87,8 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w= github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/netsampler/goflow2 v1.3.7 h1:XZaTy8kkMnGXpJ9hS3KbO1McyrFTpVNhVFEx9rNhMmc= +github.com/netsampler/goflow2 v1.3.7/go.mod h1:4UZsVGVAs//iMCptUHn3WNScztJeUhZH7kDW2+/vDdQ= github.com/opensearch-project/opensearch-go/v4 v4.6.0 h1:Ac8aLtDSmLEyOmv0r1qhQLw3b4vcUhE42NE9k+Z4cRc= github.com/opensearch-project/opensearch-go/v4 v4.6.0/go.mod h1:3iZtb4SNt3IzaxavKq0dURh1AmtVgYW71E4XqmYnIiQ= github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= diff --git a/agent/main.go b/agent/main.go index 60bd7a575..395a831b5 100644 --- a/agent/main.go +++ b/agent/main.go @@ -3,6 +3,7 @@ package main import ( "fmt" "os" + "path/filepath" "time" pb "github.com/utmstack/UTMStack/agent/agent" @@ -214,6 +215,20 @@ func main() { case "uninstall": fmt.Println("Uninstalling UTMStackAgent service ...") + fmt.Print("Stopping UTMStackUpdater service... ") + updaterPath := filepath.Join(utils.GetMyPath(), fmt.Sprintf(config.UpdaterFile, "")) + if utils.CheckIfPathExist(updaterPath) { + err := utils.Execute(updaterPath, utils.GetMyPath(), "uninstall") + if err != nil { + fmt.Printf("Warning: %v\n", err) + } else { + fmt.Println("[OK]") + } + time.Sleep(2 * time.Second) + } else { + fmt.Println("[SKIPPED - not found]") + } + cnf, err := config.GetCurrentConfig() if err != nil { fmt.Println("Error getting config: ", err) diff --git a/agent/modules/configuration.go b/agent/modules/configuration.go index dd5bf90e6..4088d080f 100644 --- a/agent/modules/configuration.go +++ b/agent/modules/configuration.go @@ -77,7 +77,7 @@ func ChangeIntegrationStatus(logTyp string, proto string, isEnabled bool, tlsOpt mod := GetModule(logTyp) if mod != nil && mod.IsPortListen(proto) { mod.DisablePort(proto) - time.Sleep(100 * time.Millisecond) + time.Sleep(200 * time.Millisecond) err := mod.EnablePort(proto, true) if err != nil { return "", fmt.Errorf("error enabling TLS on running module: %v", err) @@ -89,7 +89,7 @@ func ChangeIntegrationStatus(logTyp string, proto string, isEnabled bool, tlsOpt mod := GetModule(logTyp) if mod != nil && mod.IsPortListen(proto) { mod.DisablePort(proto) - time.Sleep(100 * time.Millisecond) + time.Sleep(200 * time.Millisecond) err := mod.EnablePort(proto, false) if err != nil { return "", fmt.Errorf("error disabling TLS on running module: %v", err) @@ -248,7 +248,7 @@ func EnableTLSForIntegration(logTyp string, proto string) (string, error) { mod := GetModule(logTyp) if mod != nil && mod.IsPortListen(proto) { mod.DisablePort(proto) - time.Sleep(100 * time.Millisecond) + time.Sleep(200 * time.Millisecond) err := mod.EnablePort(proto, true) if err != nil { return port, fmt.Errorf("error enabling TLS on running module: %v", err) @@ -278,7 +278,7 @@ func DisableTLSForIntegration(logTyp string, proto string) error { mod := GetModule(logTyp) if mod != nil && mod.IsPortListen(proto) { mod.DisablePort(proto) - time.Sleep(100 * time.Millisecond) + time.Sleep(200 * time.Millisecond) err := mod.EnablePort(proto, false) if err != nil { return fmt.Errorf("error disabling TLS on running module: %v", err) diff --git a/agent/modules/modules.go b/agent/modules/modules.go index aa8424d58..b5e247cba 100644 --- a/agent/modules/modules.go +++ b/agent/modules/modules.go @@ -84,6 +84,9 @@ func StartModules() { } if conf[0] { moCache[index].DisablePort(proto) + if conf[1] { + time.Sleep(200 * time.Millisecond) + } } if changeAllowed { moCache[index].SetNewPort(proto, port) diff --git a/agent/modules/netflow.go b/agent/modules/netflow.go index ed245aaae..e3ff518b2 100644 --- a/agent/modules/netflow.go +++ b/agent/modules/netflow.go @@ -3,14 +3,18 @@ package modules import ( "bytes" "context" + "encoding/binary" "errors" "fmt" "net" "strconv" + "strings" "sync" "time" - "github.com/tehmaze/netflow" + "github.com/netsampler/goflow2/decoders/netflow" + "github.com/netsampler/goflow2/decoders/netflowlegacy" + tehmaze "github.com/tehmaze/netflow" "github.com/tehmaze/netflow/session" "github.com/utmstack/UTMStack/agent/config" "github.com/utmstack/UTMStack/agent/logservice" @@ -23,44 +27,128 @@ var ( netflowOnce sync.Once ) +// templateSystem implements netflow.NetFlowTemplateSystem for goflow2 +type templateSystem struct { + templates map[uint16]map[uint32]map[uint16]interface{} + mu sync.RWMutex +} + +func newTemplateSystem() *templateSystem { + return &templateSystem{ + templates: make(map[uint16]map[uint32]map[uint16]interface{}), + } +} + +func (t *templateSystem) GetTemplate(version uint16, obsDomainId uint32, templateId uint16) (interface{}, error) { + t.mu.RLock() + defer t.mu.RUnlock() + + if versionMap, ok := t.templates[version]; ok { + if domainMap, ok := versionMap[obsDomainId]; ok { + if template, ok := domainMap[templateId]; ok { + return template, nil + } + } + } + return nil, fmt.Errorf("template not found: version=%d, obsDomainId=%d, templateId=%d", version, obsDomainId, templateId) +} + +func (t *templateSystem) AddTemplate(version uint16, obsDomainId uint32, template interface{}) { + t.mu.Lock() + defer t.mu.Unlock() + + if _, ok := t.templates[version]; !ok { + t.templates[version] = make(map[uint32]map[uint16]interface{}) + } + if _, ok := t.templates[version][obsDomainId]; !ok { + t.templates[version][obsDomainId] = make(map[uint16]interface{}) + } + + // Extract template ID based on type + var templateId uint16 + switch tmpl := template.(type) { + case netflow.TemplateRecord: + templateId = tmpl.TemplateId + case netflow.IPFIXOptionsTemplateRecord: + templateId = tmpl.TemplateId + case netflow.NFv9OptionsTemplateRecord: + templateId = tmpl.TemplateId + default: + return + } + + t.templates[version][obsDomainId][templateId] = template +} + type NetflowModule struct { - DataType string - Parser parser.Parser - Decoders map[string]*netflow.Decoder - Listener *net.UDPConn - CTX context.Context - Cancel context.CancelFunc - IsEnabled bool + DataType string + Parser parser.Parser + LegacyDecoders map[string]*tehmaze.Decoder // For v1, v6, v7 (tehmaze/netflow) + TemplateSystem map[string]*templateSystem // For v5, v9, IPFIX (goflow2) + Listener *net.UDPConn + CTX context.Context + Cancel context.CancelFunc + IsEnabled bool + mu sync.RWMutex } func GetNetflowModule() *NetflowModule { netflowOnce.Do(func() { netflowModule = &NetflowModule{ - Parser: parser.GetParser("netflow"), - DataType: "netflow", - IsEnabled: false, - Decoders: make(map[string]*netflow.Decoder), + Parser: parser.GetParser("netflow"), + DataType: "netflow", + IsEnabled: false, + LegacyDecoders: make(map[string]*tehmaze.Decoder), + TemplateSystem: make(map[string]*templateSystem), } }) return netflowModule } +func (m *NetflowModule) getOrCreateTemplateSystem(addr string) *templateSystem { + m.mu.Lock() + defer m.mu.Unlock() + + if ts, ok := m.TemplateSystem[addr]; ok { + return ts + } + ts := newTemplateSystem() + m.TemplateSystem[addr] = ts + return ts +} + +func (m *NetflowModule) getOrCreateLegacyDecoder(addr string) *tehmaze.Decoder { + m.mu.Lock() + defer m.mu.Unlock() + + if d, ok := m.LegacyDecoders[addr]; ok { + return d + } + s := session.New() + d := tehmaze.NewDecoder(s) + m.LegacyDecoders[addr] = d + return d +} + +func (m *NetflowModule) removeLegacyDecoder(addr string) { + m.mu.Lock() + defer m.mu.Unlock() + delete(m.LegacyDecoders, addr) +} + func (m *NetflowModule) EnablePort(proto string, enableTLS bool) error { if enableTLS { return fmt.Errorf("TLS not supported for NetFlow protocol") } if proto == "udp" && !m.IsEnabled { - utils.Logger.Info("Server %s listening in port: %s protocol: UDP", m.DataType, config.ProtoPorts[config.DataTypeNetflow].UDP) - m.IsEnabled = true - port, err := strconv.Atoi(config.ProtoPorts[config.DataTypeNetflow].UDP) if err != nil { utils.Logger.ErrorF("error converting port to int: %v", err) return err } - m.Listener, err = net.ListenUDP("udp", &net.UDPAddr{ + listener, err := net.ListenUDP("udp", &net.UDPAddr{ Port: port, IP: net.ParseIP("0.0.0.0"), }) @@ -69,9 +157,13 @@ func (m *NetflowModule) EnablePort(proto string, enableTLS bool) error { return err } + m.IsEnabled = true + m.Listener = listener m.CTX, m.Cancel = context.WithCancel(context.Background()) - buffer := make([]byte, 2048) + utils.Logger.Info("Server %s listening in port: %s protocol: UDP", m.DataType, config.ProtoPorts[config.DataTypeNetflow].UDP) + + buffer := make([]byte, 65535) go func() { for { @@ -97,16 +189,54 @@ func (m *NetflowModule) EnablePort(proto string, enableTLS bool) error { continue } - d, found := m.Decoders[addr.String()] - if !found { - s := session.New() - d = netflow.NewDecoder(s) - m.Decoders[addr.String()] = d + // Validate packet structure before attempting to decode + packetData := buffer[:length] + packetInfo, validationErr := validateNetflowPacket(packetData) + if validationErr != nil { + utils.Logger.ErrorF("invalid NetFlow packet from %s (length: %d bytes): %v", addr.String(), length, validationErr) + continue } - message, err := d.Read(bytes.NewBuffer(buffer[:length])) - if err != nil { - utils.Logger.ErrorF("error decoding NetFlow message: %v", err) + var message interface{} + + // Use hybrid approach: goflow2 for v5/v9/IPFIX, tehmaze for v1/v6/v7 + switch packetInfo.version { + case 5: + // Use goflow2 for NetFlow v5 + msg, err := netflowlegacy.DecodeMessage(bytes.NewBuffer(packetData)) + if err != nil { + utils.Logger.ErrorF("error decoding %s message from %s: %v", packetInfo.versionName, addr.String(), err) + continue + } + message = msg + + case 9, 10: + // Use goflow2 for NetFlow v9 and IPFIX + ts := m.getOrCreateTemplateSystem(addr.String()) + msg, err := netflow.DecodeMessage(bytes.NewBuffer(packetData), ts) + if err != nil { + // Template not found is expected when data arrives before template + // This is normal NetFlow v9/IPFIX behavior, don't log as error + if !strings.Contains(err.Error(), "template not found") { + utils.Logger.ErrorF("error decoding %s message from %s: %v", packetInfo.versionName, addr.String(), err) + } + continue + } + message = msg + + case 1, 6, 7: + // Use tehmaze/netflow for legacy versions (v1, v6, v7) + d := m.getOrCreateLegacyDecoder(addr.String()) + msg, err := d.Read(bytes.NewBuffer(packetData)) + if err != nil { + utils.Logger.ErrorF("error decoding %s message from %s: %v", packetInfo.versionName, addr.String(), err) + m.removeLegacyDecoder(addr.String()) + continue + } + message = msg + + default: + utils.Logger.ErrorF("unsupported NetFlow version %d from %s", packetInfo.version, addr.String()) continue } @@ -159,3 +289,64 @@ func (m *NetflowModule) GetPort(proto string) string { return "" } } + +// netflowPacketInfo contains basic information about a NetFlow packet for validation +type netflowPacketInfo struct { + version uint16 + count uint16 + minSize int + versionName string +} + +// validateNetflowPacket checks if a NetFlow packet has valid structure before decoding +// Returns packet info if valid, error otherwise +func validateNetflowPacket(data []byte) (*netflowPacketInfo, error) { + if len(data) < 4 { + return nil, fmt.Errorf("packet too small: %d bytes (minimum 4 bytes for version and count)", len(data)) + } + + version := binary.BigEndian.Uint16(data[0:2]) + count := binary.BigEndian.Uint16(data[2:4]) + + info := &netflowPacketInfo{ + version: version, + count: count, + } + + switch version { + case 1: + info.versionName = "NetFlow v1" + info.minSize = 24 + int(count)*48 // header (24) + records (48 each) + case 5: + info.versionName = "NetFlow v5" + info.minSize = 24 + int(count)*48 // header (24) + records (48 each) + case 6: + info.versionName = "NetFlow v6" + info.minSize = 24 + int(count)*52 // header (24) + records (52 each) + case 7: + info.versionName = "NetFlow v7" + info.minSize = 24 + int(count)*52 // header (24) + records (52 each) + case 9: + info.versionName = "NetFlow v9" + // NetFlow v9 header is 20 bytes, minimum packet size is just the header + info.minSize = 20 + case 10: + info.versionName = "IPFIX" + // IPFIX header is 16 bytes, field at offset 2-4 is the total message length + info.minSize = 16 + ipfixLength := binary.BigEndian.Uint16(data[2:4]) + if int(ipfixLength) != len(data) { + return nil, fmt.Errorf("IPFIX length mismatch: header says %d bytes, received %d bytes", ipfixLength, len(data)) + } + return info, nil + default: + return nil, fmt.Errorf("unsupported NetFlow version: %d", version) + } + + if len(data) < info.minSize { + return nil, fmt.Errorf("%s packet too small: received %d bytes, minimum expected %d bytes (count=%d)", + info.versionName, len(data), info.minSize, count) + } + + return info, nil +} diff --git a/agent/modules/syslog.go b/agent/modules/syslog.go index 7915d95c3..5d0987a11 100644 --- a/agent/modules/syslog.go +++ b/agent/modules/syslog.go @@ -11,6 +11,7 @@ import ( "os" "strconv" "strings" + "sync" "time" "github.com/threatwinds/go-sdk/entities" @@ -21,6 +22,11 @@ import ( "github.com/utmstack/UTMStack/agent/utils" ) +var ( + syslogModules = make(map[string]*SyslogModule) + syslogMutex sync.RWMutex +) + const ( MinBufferSize = 480 RecommendedBufferSize = 2048 @@ -40,6 +46,7 @@ type SyslogModule struct { TCPListener listenerTCP UDPListener listenerUDP Parser parser.Parser + mu sync.RWMutex } type listenerTCP struct { @@ -60,7 +67,14 @@ type listenerUDP struct { } func GetSyslogModule(dataType string, protoPorts config.ProtoPort) *SyslogModule { - return &SyslogModule{ + syslogMutex.Lock() + defer syslogMutex.Unlock() + + if mod, exists := syslogModules[dataType]; exists { + return mod + } + + newModule := &SyslogModule{ DataType: dataType, TCPListener: listenerTCP{ IsEnabled: false, @@ -72,6 +86,9 @@ func GetSyslogModule(dataType string, protoPorts config.ProtoPort) *SyslogModule }, Parser: parser.GetParser(dataType), } + + syslogModules[dataType] = newModule + return newModule } func (m *SyslogModule) GetDataType() string { @@ -79,6 +96,9 @@ func (m *SyslogModule) GetDataType() string { } func (m *SyslogModule) IsPortListen(proto string) bool { + m.mu.RLock() + defer m.mu.RUnlock() + switch proto { case "tcp": return m.TCPListener.IsEnabled @@ -90,7 +110,9 @@ func (m *SyslogModule) IsPortListen(proto string) bool { } func (m *SyslogModule) SetNewPort(proto string, port string) { - // validate port by dataType, ranges allowed and ports in use + m.mu.Lock() + defer m.mu.Unlock() + switch proto { case "tcp": m.TCPListener.Port = port @@ -100,6 +122,9 @@ func (m *SyslogModule) SetNewPort(proto string, port string) { } func (m *SyslogModule) GetPort(proto string) string { + m.mu.RLock() + defer m.mu.RUnlock() + switch proto { case "tcp": return m.TCPListener.Port @@ -143,140 +168,159 @@ func (m *SyslogModule) DisablePort(proto string) { } func (m *SyslogModule) enableTCP() { - if !m.TCPListener.IsEnabled && m.TCPListener.Port != "" { - utils.Logger.Info("Server %s listening in port: %s protocol: TCP", m.DataType, m.TCPListener.Port) - if m.TCPListener.TLSEnabled { - utils.Logger.Info("Server %s TLS enabled in port: %s protocol: TCP", m.DataType, m.TCPListener.Port) - } - m.TCPListener.IsEnabled = true + m.mu.Lock() + if m.TCPListener.IsEnabled || m.TCPListener.Port == "" { + m.mu.Unlock() + return + } - listener, err := net.Listen("tcp", "0.0.0.0:"+m.TCPListener.Port) - if err != nil { - utils.Logger.ErrorF("error listening TCP in port %s: %v", m.TCPListener.Port, err) - return - } + listener, err := net.Listen("tcp", "0.0.0.0:"+m.TCPListener.Port) + if err != nil { + m.mu.Unlock() + utils.Logger.ErrorF("error listening TCP in port %s: %v", m.TCPListener.Port, err) + return + } - m.TCPListener.Listener = listener - m.TCPListener.CTX, m.TCPListener.Cancel = context.WithCancel(context.Background()) + // Solo setear IsEnabled DESPUÉS de confirmar que el listener está activo + m.TCPListener.IsEnabled = true + m.TCPListener.Listener = listener + m.TCPListener.CTX, m.TCPListener.Cancel = context.WithCancel(context.Background()) + m.mu.Unlock() - go func() { - defer func() { - err = m.TCPListener.Listener.Close() - if err != nil { - utils.Logger.ErrorF("error closing tcp listener: %v", err) - } - }() - for { - select { - case <-m.TCPListener.CTX.Done(): - return - default: - conn, err := m.TCPListener.Listener.Accept() - if err != nil { - if errors.Is(err, net.ErrClosed) { - return - } + utils.Logger.Info("Server %s listening in port: %s protocol: TCP", m.DataType, m.TCPListener.Port) + if m.TCPListener.TLSEnabled { + utils.Logger.Info("Server %s TLS enabled in port: %s protocol: TCP", m.DataType, m.TCPListener.Port) + } - var netOpErr *net.OpError - ok := errors.As(err, &netOpErr) - if ok && netOpErr.Timeout() { - continue - } + go func() { + defer func() { + err = m.TCPListener.Listener.Close() + if err != nil { + utils.Logger.ErrorF("error closing tcp listener: %v", err) + } + }() + for { + select { + case <-m.TCPListener.CTX.Done(): + return + default: + conn, err := m.TCPListener.Listener.Accept() + if err != nil { + if errors.Is(err, net.ErrClosed) { + return + } - utils.Logger.ErrorF("error connecting with tcp listener: %v", err) + var netOpErr *net.OpError + ok := errors.As(err, &netOpErr) + if ok && netOpErr.Timeout() { continue } - // Connection handling based on TLS configuration - if m.TCPListener.TLSEnabled { - go m.handleTLSConnection(conn) - } else { - go m.handleConnectionTCP(conn) - } + utils.Logger.ErrorF("error connecting with tcp listener: %v", err) + continue + } + + // Connection handling based on TLS configuration + if m.TCPListener.TLSEnabled { + go m.handleTLSConnection(conn) + } else { + go m.handleConnectionTCP(conn) } } - }() - } + } + }() } func (m *SyslogModule) enableUDP() { - if !m.UDPListener.IsEnabled && m.UDPListener.Port != "" { - utils.Logger.Info("Server %s listening in port: %s protocol: UDP\n", m.DataType, m.UDPListener.Port) - m.UDPListener.IsEnabled = true + m.mu.Lock() + if m.UDPListener.IsEnabled || m.UDPListener.Port == "" { + m.mu.Unlock() + return + } - listener, err := net.ListenPacket("udp", "0.0.0.0"+":"+m.UDPListener.Port) - if err != nil { - utils.Logger.ErrorF("error listening UDP in port %s: %v", m.UDPListener.Port, err) - return - } + listener, err := net.ListenPacket("udp", "0.0.0.0:"+m.UDPListener.Port) + if err != nil { + m.mu.Unlock() + utils.Logger.ErrorF("error listening UDP in port %s: %v", m.UDPListener.Port, err) + return + } - udpListener, ok := listener.(*net.UDPConn) - if !ok { - utils.Logger.ErrorF("could not assert to *net.UDPConn") - return - } + udpListener, ok := listener.(*net.UDPConn) + if !ok { + m.mu.Unlock() + utils.Logger.ErrorF("could not assert to *net.UDPConn") + listener.Close() + return + } - m.UDPListener.Listener = listener - m.UDPListener.CTX, m.UDPListener.Cancel = context.WithCancel(context.Background()) + // Solo setear IsEnabled DESPUÉS de confirmar que el listener está activo + m.UDPListener.IsEnabled = true + m.UDPListener.Listener = listener + m.UDPListener.CTX, m.UDPListener.Cancel = context.WithCancel(context.Background()) + m.mu.Unlock() - buffer := make([]byte, UDPBufferSize) - msgChannel := make(chan config.MSGDS) + utils.Logger.Info("Server %s listening in port: %s protocol: UDP", m.DataType, m.UDPListener.Port) - go m.handleConnectionUDP(msgChannel) + buffer := make([]byte, UDPBufferSize) + msgChannel := make(chan config.MSGDS) - go func() { - defer func() { - err = m.UDPListener.Listener.Close() - if err != nil { - utils.Logger.ErrorF("error closing udp listener: %v", err) - } - }() - for { - select { - case <-m.UDPListener.CTX.Done(): - return - default: - udpListener.SetDeadline(time.Now().Add(time.Second * 1)) + go m.handleConnectionUDP(msgChannel) - n, add, err := listener.ReadFrom(buffer) - if err != nil { - if errors.Is(err, net.ErrClosed) { - return - } + go func() { + defer func() { + err = m.UDPListener.Listener.Close() + if err != nil { + utils.Logger.ErrorF("error closing udp listener: %v", err) + } + }() + for { + select { + case <-m.UDPListener.CTX.Done(): + return + default: + udpListener.SetDeadline(time.Now().Add(time.Second * 1)) - var netOpErr *net.OpError - ok := errors.As(err, &netOpErr) - if ok && netOpErr.Timeout() { - continue - } + n, add, err := listener.ReadFrom(buffer) + if err != nil { + if errors.Is(err, net.ErrClosed) { + return + } - utils.Logger.ErrorF("error connecting with udp listener: %v", err) + var netOpErr *net.OpError + ok := errors.As(err, &netOpErr) + if ok && netOpErr.Timeout() { continue } - remoteAddr := add.String() - remoteAddr, _, err = net.SplitHostPort(remoteAddr) + + utils.Logger.ErrorF("error connecting with udp listener: %v", err) + continue + } + remoteAddr := add.String() + remoteAddr, _, err = net.SplitHostPort(remoteAddr) + if err != nil { + utils.Logger.ErrorF("error getting remote addr: %v", err) + continue + } + if remoteAddr == "127.0.0.1" { + remoteAddr, err = os.Hostname() if err != nil { - utils.Logger.ErrorF("error getting remote addr: %v", err) + utils.Logger.ErrorF("error getting hostname: %v\n", err) continue } - if remoteAddr == "127.0.0.1" { - remoteAddr, err = os.Hostname() - if err != nil { - utils.Logger.ErrorF("error getting hostname: %v\n", err) - continue - } - } - msgChannel <- config.MSGDS{ - DataSource: remoteAddr, - Message: string(buffer[:n]), - } + } + msgChannel <- config.MSGDS{ + DataSource: remoteAddr, + Message: string(buffer[:n]), } } - }() - } + } + }() } func (m *SyslogModule) disableTCP() { + m.mu.Lock() + defer m.mu.Unlock() + if m.TCPListener.IsEnabled && m.TCPListener.Port != "" { utils.Logger.Info("Server %s closed in port: %s protocol: TCP", m.DataType, m.TCPListener.Port) @@ -292,6 +336,9 @@ func (m *SyslogModule) disableTCP() { } func (m *SyslogModule) disableUDP() { + m.mu.Lock() + defer m.mu.Unlock() + if m.UDPListener.IsEnabled && m.UDPListener.Port != "" { utils.Logger.Info("Server %s closed in port: %s protocol: UDP", m.DataType, m.UDPListener.Port) diff --git a/agent/parser/netflow.go b/agent/parser/netflow.go index 3effaa5d8..712935660 100644 --- a/agent/parser/netflow.go +++ b/agent/parser/netflow.go @@ -4,13 +4,11 @@ import ( "fmt" "sync" - "github.com/tehmaze/netflow" - "github.com/tehmaze/netflow/ipfix" + goflownetflow "github.com/netsampler/goflow2/decoders/netflow" + "github.com/netsampler/goflow2/decoders/netflowlegacy" "github.com/tehmaze/netflow/netflow1" - "github.com/tehmaze/netflow/netflow5" "github.com/tehmaze/netflow/netflow6" "github.com/tehmaze/netflow/netflow7" - "github.com/tehmaze/netflow/netflow9" "github.com/threatwinds/go-sdk/entities" "github.com/threatwinds/go-sdk/plugins" "github.com/utmstack/UTMStack/agent/config" @@ -35,7 +33,7 @@ func GetNetflowParser() *NetflowParser { type NetflowObject struct { Remote string - Message netflow.Message + Message interface{} } func (p *NetflowParser) ProcessData(logMessage interface{}, _ string, queue chan *plugins.Log) error { @@ -46,18 +44,30 @@ func (p *NetflowParser) ProcessData(logMessage interface{}, _ string, queue chan case NetflowObject: remote = l.Remote switch m := l.Message.(type) { + // goflow2 types (primary for v5, v9, IPFIX) + case netflowlegacy.PacketNetFlowV5: + metrics = pnf.PrepareGoflowV5(remote, &m) + case *netflowlegacy.PacketNetFlowV5: + metrics = pnf.PrepareGoflowV5(remote, m) + case goflownetflow.NFv9Packet: + metrics = pnf.PrepareGoflowV9(remote, &m) + case *goflownetflow.NFv9Packet: + metrics = pnf.PrepareGoflowV9(remote, m) + case goflownetflow.IPFIXPacket: + metrics = pnf.PrepareGoflowIPFIX(remote, &m) + case *goflownetflow.IPFIXPacket: + metrics = pnf.PrepareGoflowIPFIX(remote, m) + + // tehmaze types (fallback for v1, v6, v7) case *netflow1.Packet: metrics = pnf.PrepareV1(remote, m) - case *netflow5.Packet: - metrics = pnf.PrepareV5(remote, m) case *netflow6.Packet: metrics = pnf.PrepareV6(remote, m) case *netflow7.Packet: metrics = pnf.PrepareV7(remote, m) - case *netflow9.Packet: - metrics = pnf.PrepareV9(remote, m) - case *ipfix.Message: - metrics = pnf.PrepareIPFIX(remote, m) + + default: + return fmt.Errorf("unknown netflow message type: %T", m) } default: return fmt.Errorf("unknown log batch type: %T", l) diff --git a/agent/parser/netflow/goflow.go b/agent/parser/netflow/goflow.go new file mode 100644 index 000000000..b105079d3 --- /dev/null +++ b/agent/parser/netflow/goflow.go @@ -0,0 +1,263 @@ +package netflow + +import ( + "encoding/binary" + "fmt" + "net" + "time" + + goflownetflow "github.com/netsampler/goflow2/decoders/netflow" + "github.com/netsampler/goflow2/decoders/netflowlegacy" +) + +// PrepareGoflowV5 converts goflow2 NetFlow v5 packet to metrics +func PrepareGoflowV5(addr string, p *netflowlegacy.PacketNetFlowV5) []Metric { + nfExporter, _, _ := net.SplitHostPort(addr) + var metrics []Metric + + for _, r := range p.Records { + met := Metric{OutBytes: "0", InBytes: "0", OutPacket: "0", InPacket: "0", NFSender: nfExporter} + met.FlowVersion = "Netflow-V5" + + // Convert timestamps (First and Last are relative to SysUptime in milliseconds) + met.First = time.Unix(int64(p.UnixSecs), int64(p.UnixNSecs)).Add(-time.Duration(p.SysUptime-r.First) * time.Millisecond).Format(time.RFC3339Nano) + met.Last = time.Unix(int64(p.UnixSecs), int64(p.UnixNSecs)).Add(-time.Duration(p.SysUptime-r.Last) * time.Millisecond).Format(time.RFC3339Nano) + + met.Protocol = ProtoToName(fmt.Sprintf("%v", r.Proto)) + met.Bytes = fmt.Sprintf("%v", r.DOctets) + met.Packets = fmt.Sprintf("%v", r.DPkts) + met.TCPFlags = fmt.Sprintf("%v", r.TCPFlags) + met.SrcAs = fmt.Sprintf("%v", r.SrcAS) + met.DstAs = fmt.Sprintf("%v", r.DstAS) + met.SrcMask = fmt.Sprintf("%v", r.SrcMask) + met.DstMask = fmt.Sprintf("%v", r.DstMask) + + // Convert uint32 IPs to string + srcIP := make(net.IP, 4) + binary.BigEndian.PutUint32(srcIP, r.SrcAddr) + met.SrcIP = srcIP.String() + + dstIP := make(net.IP, 4) + binary.BigEndian.PutUint32(dstIP, r.DstAddr) + met.DstIP = dstIP.String() + + nextHop := make(net.IP, 4) + binary.BigEndian.PutUint32(nextHop, r.NextHop) + met.NextHop = nextHop.String() + + met.SrcPort = fmt.Sprintf("%v", r.SrcPort) + met.DstPort = fmt.Sprintf("%v", r.DstPort) + + met.InEthernet = fmt.Sprintf("%v", r.Input) + met.OutEthernet = fmt.Sprintf("%v", r.Output) + + metrics = append(metrics, met) + } + + return metrics +} + +// PrepareGoflowV9 converts goflow2 NetFlow v9 packet to metrics +func PrepareGoflowV9(addr string, p *goflownetflow.NFv9Packet) []Metric { + nfExporter, _, _ := net.SplitHostPort(addr) + var metrics []Metric + + for _, fs := range p.FlowSets { + // FlowSets can be TemplateFlowSet, DataFlowSet, or OptionsDataFlowSet + switch ds := fs.(type) { + case goflownetflow.DataFlowSet: + for _, record := range ds.Records { + met := Metric{OutBytes: "0", InBytes: "0", OutPacket: "0", InPacket: "0", NFSender: nfExporter} + met.FlowVersion = "Netflow-V9" + + for _, field := range record.Values { + extractFieldValue(&met, field.Type, field.Value, p.UnixSeconds) + } + + metrics = append(metrics, met) + } + } + } + + return metrics +} + +// PrepareGoflowIPFIX converts goflow2 IPFIX packet to metrics +func PrepareGoflowIPFIX(addr string, p *goflownetflow.IPFIXPacket) []Metric { + nfExporter, _, _ := net.SplitHostPort(addr) + var metrics []Metric + + for _, fs := range p.FlowSets { + // FlowSets can be TemplateFlowSet, DataFlowSet, or OptionsDataFlowSet + switch ds := fs.(type) { + case goflownetflow.DataFlowSet: + for _, record := range ds.Records { + met := Metric{OutBytes: "0", InBytes: "0", OutPacket: "0", InPacket: "0", NFSender: nfExporter} + met.FlowVersion = "IPFIX" + + for _, field := range record.Values { + extractFieldValue(&met, field.Type, field.Value, p.ExportTime) + } + + metrics = append(metrics, met) + } + } + } + + return metrics +} + +// extractFieldValue extracts field values based on IPFIX/NetFlow v9 field type IDs +// Field type IDs are defined in RFC 5102 and RFC 3954 +func extractFieldValue(met *Metric, fieldType uint16, value interface{}, exportTime uint32) { + switch fieldType { + // Timestamps + case 21: // flowEndSysUpTime + if v, ok := toUint32(value); ok { + met.Last = time.Unix(int64(exportTime), 0).Add(-time.Duration(v) * time.Millisecond).Format(time.RFC3339Nano) + } + case 22: // flowStartSysUpTime + if v, ok := toUint32(value); ok { + met.First = time.Unix(int64(exportTime), 0).Add(-time.Duration(v) * time.Millisecond).Format(time.RFC3339Nano) + } + case 150: // flowStartSeconds + if v, ok := toUint32(value); ok { + met.First = time.Unix(int64(v), 0).Format(time.RFC3339Nano) + } + case 151: // flowEndSeconds + if v, ok := toUint32(value); ok { + met.Last = time.Unix(int64(v), 0).Format(time.RFC3339Nano) + } + + // Byte and packet counts + case 1: // octetDeltaCount + met.Bytes = fmt.Sprintf("%v", value) + case 2: // packetDeltaCount + met.Packets = fmt.Sprintf("%v", value) + case 85: // octetTotalCount + met.Bytes = fmt.Sprintf("%v", value) + case 86: // packetTotalCount + met.Packets = fmt.Sprintf("%v", value) + + // Interfaces + case 10: // ingressInterface + met.InEthernet = fmt.Sprintf("%v", value) + case 14: // egressInterface + met.OutEthernet = fmt.Sprintf("%v", value) + + // IPv4 addresses + case 8: // sourceIPv4Address + if ip, ok := toIP(value); ok { + met.SrcIP = ip.String() + } else { + met.SrcIP = fmt.Sprintf("%v", value) + } + case 12: // destinationIPv4Address + if ip, ok := toIP(value); ok { + met.DstIP = ip.String() + } else { + met.DstIP = fmt.Sprintf("%v", value) + } + case 15: // ipNextHopIPv4Address + if ip, ok := toIP(value); ok { + met.NextHop = ip.String() + } else { + met.NextHop = fmt.Sprintf("%v", value) + } + + // IPv6 addresses + case 27: // sourceIPv6Address + if ip, ok := toIP(value); ok { + met.SrcIP = ip.String() + } else { + met.SrcIP = fmt.Sprintf("%v", value) + } + case 28: // destinationIPv6Address + if ip, ok := toIP(value); ok { + met.DstIP = ip.String() + } else { + met.DstIP = fmt.Sprintf("%v", value) + } + case 62: // ipNextHopIPv6Address + if ip, ok := toIP(value); ok { + met.NextHop = ip.String() + } else { + met.NextHop = fmt.Sprintf("%v", value) + } + + // Protocol and ports + case 4: // protocolIdentifier + met.Protocol = ProtoToName(fmt.Sprintf("%v", value)) + case 7: // sourceTransportPort + met.SrcPort = fmt.Sprintf("%v", value) + case 11: // destinationTransportPort + met.DstPort = fmt.Sprintf("%v", value) + + // Masks + case 9: // sourceIPv4PrefixLength + met.SrcMask = fmt.Sprintf("%v", value) + case 13: // destinationIPv4PrefixLength + met.DstMask = fmt.Sprintf("%v", value) + case 29: // sourceIPv6PrefixLength + met.SrcMask = fmt.Sprintf("%v", value) + case 30: // destinationIPv6PrefixLength + met.DstMask = fmt.Sprintf("%v", value) + + // AS numbers + case 16: // bgpSourceAsNumber + met.SrcAs = fmt.Sprintf("%v", value) + case 17: // bgpDestinationAsNumber + met.DstAs = fmt.Sprintf("%v", value) + + // TCP flags + case 6: // tcpControlBits + met.TCPFlags = fmt.Sprintf("%v", value) + + // Flow direction + case 61: // flowDirection + switch fmt.Sprintf("%v", value) { + case "0": + met.Direction = "Ingress" + case "1": + met.Direction = "Egress" + default: + met.Direction = fmt.Sprintf("%v", value) + } + } +} + +// toUint32 attempts to convert value to uint32 +func toUint32(value interface{}) (uint32, bool) { + switch v := value.(type) { + case uint32: + return v, true + case uint64: + return uint32(v), true + case int64: + return uint32(v), true + case int: + return uint32(v), true + case []byte: + if len(v) == 4 { + return binary.BigEndian.Uint32(v), true + } + } + return 0, false +} + +// toIP attempts to convert value to net.IP +func toIP(value interface{}) (net.IP, bool) { + switch v := value.(type) { + case net.IP: + return v, true + case []byte: + if len(v) == 4 || len(v) == 16 { + return net.IP(v), true + } + case uint32: + ip := make(net.IP, 4) + binary.BigEndian.PutUint32(ip, v) + return ip, true + } + return nil, false +} diff --git a/agent/version.json b/agent/version.json index 9937935fb..6bbff213a 100644 --- a/agent/version.json +++ b/agent/version.json @@ -1,4 +1,4 @@ { - "version": "11.1.0", + "version": "11.1.1", "updater_version": "1.0.0" } diff --git a/backend/src/main/java/com/park/utmstack/domain/chart_builder/UtmVisualization.java b/backend/src/main/java/com/park/utmstack/domain/chart_builder/UtmVisualization.java index fd0702b50..d3f0b0875 100644 --- a/backend/src/main/java/com/park/utmstack/domain/chart_builder/UtmVisualization.java +++ b/backend/src/main/java/com/park/utmstack/domain/chart_builder/UtmVisualization.java @@ -69,7 +69,6 @@ public class UtmVisualization implements Serializable { @Column(name = "system_owner") private Boolean systemOwner; - @NotNull @Column(name = "id_pattern") private Long idPattern; @@ -104,7 +103,10 @@ public class UtmVisualization implements Serializable { @JsonDeserialize private AggregationType aggregationType; - @ManyToOne(fetch = FetchType.EAGER) + @Column(name = "sql_query", nullable = true) + private String sqlQuery; + + @ManyToOne(fetch = FetchType.EAGER, optional = true) @JoinColumn(name = "id_pattern", referencedColumnName = "id", insertable = false, updatable = false) private UtmIndexPattern pattern; @@ -196,6 +198,10 @@ public void setQuery(String query) { } public List getFilterType() throws UtmSerializationException { + if (_filters == null) { + return null; + } + filterType = UtilSerializer.jsonDeserializeList(FilterType.class, _filters); return filterType; } @@ -225,6 +231,14 @@ public void setAggregationType(AggregationType aggregationType) throws UtmSerial this.aggregationType = aggregationType; } + public String getSqlQuery() { + return sqlQuery; + } + + public void setSqlQuery(String sqlQuery) { + this.sqlQuery = sqlQuery; + } + public String getChartAction() { return chartAction; } diff --git a/backend/src/main/java/com/park/utmstack/service/dto/elastic/SqlSearchDto.java b/backend/src/main/java/com/park/utmstack/service/dto/elastic/SqlSearchDto.java index a7eb5089f..a4088f3a0 100644 --- a/backend/src/main/java/com/park/utmstack/service/dto/elastic/SqlSearchDto.java +++ b/backend/src/main/java/com/park/utmstack/service/dto/elastic/SqlSearchDto.java @@ -5,8 +5,6 @@ import lombok.Data; import lombok.NoArgsConstructor; -import javax.validation.constraints.NotNull; - @Data @AllArgsConstructor @NoArgsConstructor diff --git a/backend/src/main/java/com/park/utmstack/service/dto/visualization/UtmVisualizationDto.java b/backend/src/main/java/com/park/utmstack/service/dto/visualization/UtmVisualizationDto.java new file mode 100644 index 000000000..b01ea0805 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/dto/visualization/UtmVisualizationDto.java @@ -0,0 +1,41 @@ +package com.park.utmstack.service.dto.visualization; + +import com.park.utmstack.domain.chart_builder.types.ChartType; +import com.park.utmstack.domain.chart_builder.types.aggregation.AggregationType; +import com.park.utmstack.domain.chart_builder.types.query.FilterType; +import com.park.utmstack.domain.index_pattern.UtmIndexPattern; +import com.park.utmstack.service.dto.visualization.enums.QueryLanguageEnum; +import com.park.utmstack.validation.elasticsearch.SqlSelectOnly; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.Instant; +import java.util.List; + +@Data +@NoArgsConstructor +@AllArgsConstructor +public class UtmVisualizationDto { + private Long id; + private String name; + private String description; + private String eventType; + private Instant createdDate; + private Instant modifiedDate; + private String userCreated; + private String userModified; + private String chartConfig; + private String chartAction; + private Boolean systemOwner; + private Long idPattern; + private UtmIndexPattern pattern; + private ChartType chartType; + private String query; + @SqlSelectOnly + private String sqlQuery; + private List filterType; + private AggregationType aggregationType; + private QueryLanguageEnum queryLanguage; +} + diff --git a/backend/src/main/java/com/park/utmstack/service/dto/visualization/enums/QueryLanguageEnum.java b/backend/src/main/java/com/park/utmstack/service/dto/visualization/enums/QueryLanguageEnum.java new file mode 100644 index 000000000..7d588fd18 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/dto/visualization/enums/QueryLanguageEnum.java @@ -0,0 +1,6 @@ +package com.park.utmstack.service.dto.visualization.enums; + +public enum QueryLanguageEnum { + DSL, + SQL +} \ No newline at end of file diff --git a/backend/src/main/java/com/park/utmstack/service/dto/visualization/mapper/UtmVisualizationMapper.java b/backend/src/main/java/com/park/utmstack/service/dto/visualization/mapper/UtmVisualizationMapper.java new file mode 100644 index 000000000..5acc7a542 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/dto/visualization/mapper/UtmVisualizationMapper.java @@ -0,0 +1,16 @@ +package com.park.utmstack.service.dto.visualization.mapper; + +import com.park.utmstack.domain.chart_builder.UtmVisualization; +import com.park.utmstack.service.dto.visualization.UtmVisualizationDto; +import com.park.utmstack.util.exceptions.UtmSerializationException; +import org.mapstruct.Mapper; +import org.mapstruct.factory.Mappers; + +@Mapper(componentModel = "spring") +public interface UtmVisualizationMapper { + UtmVisualizationMapper INSTANCE = Mappers.getMapper(UtmVisualizationMapper.class); + + UtmVisualizationDto toDto(UtmVisualization entity) throws UtmSerializationException; + + UtmVisualization toEntity(UtmVisualizationDto dto) throws UtmSerializationException; +} diff --git a/backend/src/main/java/com/park/utmstack/service/elasticsearch/sql/SqlQueryFilterService.java b/backend/src/main/java/com/park/utmstack/service/elasticsearch/sql/SqlQueryFilterService.java new file mode 100644 index 000000000..b6b6f45a4 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/elasticsearch/sql/SqlQueryFilterService.java @@ -0,0 +1,377 @@ +package com.park.utmstack.service.elasticsearch.sql; + +import com.park.utmstack.domain.chart_builder.types.query.FilterType; +import com.park.utmstack.domain.chart_builder.types.query.OperatorType; +import org.springframework.stereotype.Service; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +@Service +public class SqlQueryFilterService { + + /** + * Applies the given filters to the base SQL query by generating a dynamic WHERE clause. + * - Handles all FilterType operators. + * - Treats @timestamp specially (relative and absolute ranges). + * - Merges the generated WHERE with an existing WHERE if present. + */ + public String applyFilters(String baseSql, List filters) { + if (filters == null || filters.isEmpty()) { + return baseSql; + } + + List andConditions = new ArrayList<>(); + List orConditions = new ArrayList<>(); + + for (FilterType filter : filters) { + + // Special handling for @timestamp: relative/absolute time logic + if ("@timestamp".equals(filter.getField())) { + andConditions.add(buildTimestampCondition(filter)); + continue; + } + + String sqlCondition = toSqlCondition(filter); + + // IS_ONE_OF_TERMS_OR is explicitly an OR-group operator + if (filter.getOperator() == OperatorType.IS_ONE_OF_TERMS_OR) { + orConditions.add(sqlCondition); + } else { + andConditions.add(sqlCondition); + } + } + + String whereClause = combineConditions(andConditions, orConditions); + return mergeSql(baseSql, whereClause); + } + + // ------------------------------------------------------------------------- + // TIMESTAMP HANDLING + // ------------------------------------------------------------------------- + + /** + * Builds the SQL condition for @timestamp. + * The value may be: + * - A List of two elements (for IS_BETWEEN) + * - A single String (for > or <=) + */ + private String buildTimestampCondition(FilterType f) { + + Object rawValue = f.getValue(); + + switch (f.getOperator()) { + + case IS_BETWEEN: + if (!(rawValue instanceof List list) || list.size() != 2) { + throw new IllegalArgumentException("@timestamp IS_BETWEEN requires a list of two values"); + } + + String from = String.valueOf(list.get(0)); + String to = String.valueOf(list.get(1)); + + return "@timestamp BETWEEN " + toSqlTime(from) + " AND " + toSqlTime(to); + + case IS_GREATER_THAN: + if (!(rawValue instanceof String singleGt)) { + throw new IllegalArgumentException("@timestamp IS_GREATER_THAN requires a single value"); + } + + return "@timestamp > " + toSqlTime(singleGt); + + case IS_LESS_THAN_OR_EQUALS: + if (!(rawValue instanceof String singleLe)) { + throw new IllegalArgumentException("@timestamp IS_LESS_THAN_OR_EQUALS requires a single value"); + } + + return "@timestamp <= " + toSqlTime(singleLe); + + + default: + throw new IllegalArgumentException("Unsupported timestamp operator: " + f.getOperator()); + } + } + + + /** + * Converts a logical time value into a SQL expression: + * - "now" -> NOW() + * - "now-24h" -> DATE_SUB(NOW(), INTERVAL 24 HOUR) + * - "now-15m" -> DATE_SUB(NOW(), INTERVAL 15 MINUTE) + * - "now-7d" -> DATE_SUB(NOW(), INTERVAL 7 DAY) + * - any other -> quoted literal (absolute timestamp) + */ + private String toSqlTime(String value) { + if ("now".equals(value)) { + return "NOW()"; + } + + if (value.startsWith("now-")) { + // Example: now-24h, now-15m, now-7d + String number = value.substring(4, value.length() - 1); + char unit = value.toLowerCase().charAt(value.length() - 1); + + String sqlUnit = switch (unit) { + case 'm' -> "MINUTE"; + case 'h' -> "HOUR"; + case 'd' -> "DAY"; + default -> throw new IllegalArgumentException("Invalid time unit in value: " + value); + }; + + return "DATE_SUB(NOW(), INTERVAL " + number + " " + sqlUnit + ")"; + } + + // Absolute timestamp value + return "'" + value + "'"; + } + + // ------------------------------------------------------------------------- + // GENERAL OPERATORS + // ------------------------------------------------------------------------- + + /** + * Maps a FilterType to a SQL condition string. + * All non-@timestamp operators are handled here. + */ + private String toSqlCondition(FilterType f) { + + String field = f.getField(); + Object rawValue = f.getValue(); + List list = asList(rawValue); // safe conversion + + return switch (f.getOperator()) { + + // --------------------------------------------------------------------- + // Equality + // --------------------------------------------------------------------- + case IS -> field + " = '" + list.get(0) + "'"; + + case IS_NOT -> field + " <> '" + list.get(0) + "'"; + + // --------------------------------------------------------------------- + // Text contains + // --------------------------------------------------------------------- + case CONTAIN -> field + " LIKE '%" + list.get(0) + "%'"; + + case DOES_NOT_CONTAIN -> field + " NOT LIKE '%" + list.get(0) + "%'"; + + case CONTAIN_ONE_OF -> + "(" + list.stream() + .map(v -> field + " LIKE '%" + v + "%'") + .collect(Collectors.joining(" OR ")) + ")"; + + case DOES_NOT_CONTAIN_ONE_OF -> + "(" + list.stream() + .map(v -> field + " NOT LIKE '%" + v + "%'") + .collect(Collectors.joining(" AND ")) + ")"; + + // --------------------------------------------------------------------- + // List membership + // --------------------------------------------------------------------- + case IS_ONE_OF -> + field + " IN (" + joinQuoted(list) + ")"; + + case IS_NOT_ONE_OF -> + field + " NOT IN (" + joinQuoted(list) + ")"; + + case IS_ONE_OF_TERMS -> + field + " IN (" + joinQuoted(list) + ")"; + + case IS_ONE_OF_TERMS_OR -> + "(" + list.stream() + .map(v -> field + " = '" + v + "'") + .collect(Collectors.joining(" OR ")) + ")"; + + // --------------------------------------------------------------------- + // Existence + // --------------------------------------------------------------------- + case EXIST -> field + " IS NOT NULL"; + + case DOES_NOT_EXIST -> field + " IS NULL"; + + // --------------------------------------------------------------------- + // Ranges (non-timestamp fields) + // --------------------------------------------------------------------- + case IS_BETWEEN -> { + if (list.size() != 2) { + throw new IllegalArgumentException("IS_BETWEEN requires exactly 2 values"); + } + yield field + " BETWEEN '" + list.get(0) + "' AND '" + list.get(1) + "'"; + } + + case IS_NOT_BETWEEN -> { + if (list.size() != 2) { + throw new IllegalArgumentException("IS_NOT_BETWEEN requires exactly 2 values"); + } + yield field + " NOT BETWEEN '" + list.get(0) + "' AND '" + list.get(1) + "'"; + } + + case IS_GREATER_THAN -> + field + " > '" + list.get(0) + "'"; + + case IS_LESS_THAN_OR_EQUALS -> + field + " <= '" + list.get(0) + "'"; + + // --------------------------------------------------------------------- + // Starts / ends with + // --------------------------------------------------------------------- + case START_WITH -> + field + " LIKE '" + list.get(0) + "%'"; + + case NOT_START_WITH -> + field + " NOT LIKE '" + list.get(0) + "%'"; + + case ENDS_WITH -> + field + " LIKE '%" + list.get(0) + "'"; + + case NOT_ENDS_WITH -> + field + " NOT LIKE '%" + list.get(0) + "'"; + + // --------------------------------------------------------------------- + // Value in multiple fields + // --------------------------------------------------------------------- + case IS_IN_FIELDS -> + "'" + list.get(0) + "' IN (" + String.join(", ", list) + ")"; + + case IS_NOT_IN_FIELDS -> + "'" + list.get(0) + "' NOT IN (" + String.join(", ", list) + ")"; + + // --------------------------------------------------------------------- + default -> throw new IllegalArgumentException("Unsupported operator: " + f.getOperator()); + }; + } + + /** + * Joins a list of values into a comma-separated list of quoted literals. + * Example: ["a","b"] -> 'a', 'b' + */ + private String joinQuoted(List values) { + return values.stream() + .map(v -> "'" + v + "'") + .collect(Collectors.joining(", ")); + } + + // ------------------------------------------------------------------------- + // AND / OR COMBINATION + // ------------------------------------------------------------------------- + + /** + * Combines AND and OR condition lists into a single SQL expression. + * - AND conditions are grouped in parentheses. + * - OR conditions are grouped in parentheses. + * - If both exist: (AND...) AND (OR...) + */ + private String combineConditions(List ands, List ors) { + + String andPart = ands.isEmpty() + ? "" + : "(" + String.join(" AND ", ands) + ")"; + + String orPart = ors.isEmpty() + ? "" + : "(" + String.join(" OR ", ors) + ")"; + + if (!andPart.isEmpty() && !orPart.isEmpty()) { + return andPart + " AND " + orPart; + } + + return andPart + orPart; + } + + // ------------------------------------------------------------------------- + // MERGING WITH BASE SQL + // ------------------------------------------------------------------------- + + /** + * Merges the generated WHERE clause into the base SQL. + * - If base SQL already has WHERE, appends "AND ". + * - Otherwise, appends "WHERE ". + */ + private String mergeSql(String sql, String whereClause) { + if (whereClause == null || whereClause.isBlank()) { + return sql; + } + + String normalized = normalizeForSearch(sql); + + // Case 1: SQL already contains WHERE → append AND + int whereIndex = normalized.indexOf(" where "); + if (whereIndex != -1) { + // Find where the WHERE clause ends (before GROUP BY / ORDER BY / LIMIT) + int endOfWhere = findEndOfWhereClause(normalized, whereIndex + 7); + return sql.substring(0, endOfWhere) + + " AND " + whereClause + " " + + sql.substring(endOfWhere); + } + + // Case 2: Insert WHERE after FROM + int fromIndex = normalized.indexOf(" from "); + if (fromIndex != -1) { + int insertPos = findEndOfFromTarget(sql, normalized, fromIndex + 6); + return sql.substring(0, insertPos) + + " WHERE " + whereClause + " " + + sql.substring(insertPos); + } + + // Case 3: fallback + return sql + " WHERE " + whereClause; + } + + private int findEndOfFromTarget(String originalSql, String normalizedSql, int start) { + + List keywords = List.of(" group by ", " order by ", " limit ", " having ", " where "); + + int nextKeywordPos = normalizedSql.length(); + + for (String kw : keywords) { + int idx = normalizedSql.indexOf(kw, start); + if (idx != -1 && idx < nextKeywordPos) { + nextKeywordPos = idx; + } + } + + // Convert normalized index back to original index + String before = normalizedSql.substring(0, nextKeywordPos); + String lastToken = before.trim().substring(before.trim().lastIndexOf(" ") + 1); + + int originalIndex = originalSql.toLowerCase().indexOf(lastToken); + + return originalIndex == -1 ? originalSql.length() : originalIndex + lastToken.length(); + } + + private int findEndOfWhereClause(String normalized, int start) { + List keywords = List.of(" group by ", " order by ", " limit ", " having "); + + int nextKeywordPos = normalized.length(); + + for (String kw : keywords) { + int idx = normalized.indexOf(kw, start); + if (idx != -1 && idx < nextKeywordPos) { + nextKeywordPos = idx; + } + } + + return nextKeywordPos; + } + + private List asList(Object value) { + if (value == null) { + return List.of(); + } + if (value instanceof List list) { + return list.stream().map(String::valueOf).toList(); + } + return List.of(String.valueOf(value)); // single value → list of one + } + + private String normalizeForSearch(String sql) { + return sql + .replace("\n", " ") + .replace("\r", " ") + .replace("\t", " ") + .replaceAll(" +", " ") + .toLowerCase(); + } +} + diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/ResponseParser.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/ResponseParser.java index b8721e4ff..895e620ca 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/ResponseParser.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/ResponseParser.java @@ -2,10 +2,16 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.park.utmstack.domain.chart_builder.UtmVisualization; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch.core.SearchResponse; import java.util.List; +import java.util.Map; public interface ResponseParser { List parse(UtmVisualization visualization, SearchResponse result); + + default List parse(UtmVisualization visualization, SearchSqlResponse result) { + return null; + } } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/bar_chart/ResponseParserForBarChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/bar_chart/ResponseParserForBarChart.java index 342d9cb9a..c8ba956ee 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/bar_chart/ResponseParserForBarChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/bar_chart/ResponseParserForBarChart.java @@ -10,6 +10,7 @@ import com.utmstack.opensearch_connector.parsers.DateHistogramAggregateParser; import com.utmstack.opensearch_connector.parsers.TermAggregateParser; import com.utmstack.opensearch_connector.types.BucketAggregation; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch._types.aggregations.*; import org.opensearch.client.opensearch.core.SearchResponse; import org.springframework.util.CollectionUtils; @@ -201,4 +202,49 @@ private void parseMetric(SearchResponse result, List metrics) { throw new RuntimeException(ctx + ": " + e.getLocalizedMessage()); } } + + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + final String ctx = CLASSNAME + ".parse(SearchSqlResponse)"; + try { + BarChartResult retValue = new BarChartResult(); + + BarChartResult.Serie serie = new BarChartResult.Serie(); + serie.setMetricId("1"); + + for (Object rowObj : result.getData()) { + if (!(rowObj instanceof Map)) { + continue; + } + Map row = (Map) rowObj; + + String category = null; + Double value = 0.0; + + for (Map.Entry entry : row.entrySet()) { + Object val = entry.getValue(); + if (val instanceof Number) { + value = ((Number) val).doubleValue(); + if (serie.getName() == null) { + serie.setName(entry.getKey() != null ? entry.getKey() : "metric"); + } + } else { + category = val != null ? val.toString() : "UNKNOWN"; + } + } + + if (category != null) { + retValue.addCategory(category); + serie.addData(value); + } + } + + retValue.addSerie(serie); + + return Collections.singletonList(retValue); + } catch (Exception e) { + throw new RuntimeException(ctx + ": " + e.getMessage()); + } + } + } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/coordinate_map/ResponseParserForCoordinateMapChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/coordinate_map/ResponseParserForCoordinateMapChart.java index 580fa06ab..d3605edae 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/coordinate_map/ResponseParserForCoordinateMapChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/coordinate_map/ResponseParserForCoordinateMapChart.java @@ -11,14 +11,17 @@ import com.park.utmstack.util.exceptions.UtmIpInfoException; import com.utmstack.opensearch_connector.parsers.TermAggregateParser; import com.utmstack.opensearch_connector.types.BucketAggregation; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch.core.SearchResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; +import org.springframework.util.Assert; import org.springframework.util.StringUtils; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; @Component @@ -110,5 +113,56 @@ public static boolean isValidIPv6(String ip) { return ip.matches(regex); } + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + final String ctx = CLASSNAME + ".parse(SearchSqlResponse)"; + List retValue = new ArrayList<>(); + + try { + Assert.notNull(visualization, "Param visualization must not be null"); + List data = result.getData(); + + for (int i = 0; i < data.size(); i++) { + Object rowObj = data.get(i); + if (!(rowObj instanceof Map)) continue; + Map row = (Map) rowObj; + + String ip = null; + + for (Map.Entry entry : row.entrySet()) { + Object val = entry.getValue(); + if (val == null) continue; + + String strVal = val.toString(); + if (ip == null && isValidIP(strVal)) { + ip = strVal; + } + } + if (!StringUtils.hasText(ip)) continue; + + GeoIp ipInfo; + try { + ipInfo = ipInfoService.getIpInfo(ip); + if (ipInfo == null) continue; + } catch (UtmIpInfoException e) { + log.error(e.getMessage()); + continue; + } + + CoordinateMapChartResult chartResult = new CoordinateMapChartResult(); + chartResult.setName(ip); + chartResult.setValue(new Double[] { + ipInfo.getLatitude(), + ipInfo.getLongitude(), + (double) i + }); + + retValue.add(chartResult); + } + return retValue; + } catch (Exception e) { + throw new RuntimeException(ctx + ": " + e.getMessage(), e); + } + } } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/gauge_goal_chart/ResponseParserForGaugeGoalChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/gauge_goal_chart/ResponseParserForGaugeGoalChart.java index 3a7275a9e..1fe119474 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/gauge_goal_chart/ResponseParserForGaugeGoalChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/gauge_goal_chart/ResponseParserForGaugeGoalChart.java @@ -9,6 +9,7 @@ import com.park.utmstack.util.chart_builder.elasticsearch_dsl.responses.ResponseParser; import com.utmstack.opensearch_connector.parsers.TermAggregateParser; import com.utmstack.opensearch_connector.types.BucketAggregation; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch._types.aggregations.*; import org.opensearch.client.opensearch.core.SearchResponse; import org.springframework.util.Assert; @@ -111,4 +112,51 @@ private List parseTermAggregation(Map r } return rturn; } + + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + final String ctx = CLASSNAME + ".parse(SearchSqlResponse)"; + try { + Assert.notNull(visualization, "Param visualization must not be null"); + + List results = new ArrayList<>(); + + for (Object rowObj : result.getData()) { + if (!(rowObj instanceof Map)) continue; + Map row = (Map) rowObj; + + String bucketKey = null; + String bucketId = null; + + Double metricValue = null; + String metricId = null; + + for (Map.Entry entry : row.entrySet()) { + String key = entry.getKey(); + Object val = entry.getValue(); + + if (val == null) continue; + + if (val instanceof Number) { + metricValue = ((Number) val).doubleValue(); + metricId = key; + } else { + bucketKey = val.toString(); + bucketId = key; + } + } + + if (metricValue == null) metricValue = 0.0; + if (metricId == null) metricId = "metric"; + if (bucketKey == null) bucketKey = "UNKNOWN"; + if (bucketId == null) bucketId = "bucket"; + + results.add(new GaugeGoalChartResult(metricId, metricValue, bucketKey, bucketId)); + } + + return results; + } catch (Exception e) { + throw new RuntimeException(ctx + ": " + e.getMessage(), e); + } + } } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/heat_map_chart/ResponseParserForHeatMapChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/heat_map_chart/ResponseParserForHeatMapChart.java index db8b02b29..1cacf3713 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/heat_map_chart/ResponseParserForHeatMapChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/heat_map_chart/ResponseParserForHeatMapChart.java @@ -10,8 +10,10 @@ import com.utmstack.opensearch_connector.parsers.DateHistogramAggregateParser; import com.utmstack.opensearch_connector.parsers.TermAggregateParser; import com.utmstack.opensearch_connector.types.BucketAggregation; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch._types.aggregations.Aggregate; import org.opensearch.client.opensearch.core.SearchResponse; +import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; @@ -150,4 +152,64 @@ private void parseBuckets(Bucket bucket, Map aggregation, Met } } } + + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + final String ctx = CLASSNAME + ".parse(SearchSqlResponse)"; + try { + Assert.notNull(visualization, "Param visualization must not be null"); + + HeatMapChartResult retValue = new HeatMapChartResult(); + + String metricAlias = "Count"; + if (!result.getData().isEmpty() && result.getData().get(0) instanceof Map) { + Map firstRow = (Map) result.getData().get(0); + for (Map.Entry entry : firstRow.entrySet()) { + if (entry.getValue() instanceof Number) { + metricAlias = entry.getKey(); + break; + } + } + } + + retValue.addYAxis(metricAlias); + + int index = 0; + for (Object rowObj : result.getData()) { + if (!(rowObj instanceof Map)) continue; + Map row = (Map) rowObj; + + String bucketKey = null; + Double metricValue = null; + + for (Map.Entry entry : row.entrySet()) { + Object val = entry.getValue(); + if (val == null) continue; + + if (bucketKey == null && val instanceof String) { + bucketKey = entry.getKey() + ":=" + val.toString(); + } + if (metricValue == null && val instanceof Number) { + metricValue = ((Number) val).doubleValue(); + } + } + + if (bucketKey == null || metricValue == null) continue; + + retValue.addXAxis(bucketKey); + + Double[] data = new Double[3]; + data[0] = (double) index; + data[1] = 0.0; + data[2] = metricValue; + + retValue.addData(data); + index++; + } + + return Collections.singletonList(retValue); + } catch (Exception e) { + throw new RuntimeException(ctx + ": " + e.getMessage(), e); + } + } } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/list_chart/ResponseParserForListChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/list_chart/ResponseParserForListChart.java index 7866b7382..72cd0a410 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/list_chart/ResponseParserForListChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/list_chart/ResponseParserForListChart.java @@ -8,6 +8,7 @@ import com.park.utmstack.util.MapUtil; import com.park.utmstack.util.chart_builder.elasticsearch_dsl.responses.ResponseParser; import com.park.utmstack.util.chart_builder.elasticsearch_dsl.responses.impl.table_chart.TableChartResult; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch.core.SearchResponse; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; @@ -83,4 +84,44 @@ private void extractColumns(Bucket bucket) { throw new RuntimeException(ctx + ": " + e.getLocalizedMessage()); } } + + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + final String ctx = CLASSNAME + ".parse(SQL)"; + try { + if (result == null || result.getData() == null || result.getData().isEmpty()) { + return Collections.singletonList(retValue); + } + + List> rows = + (List>) (List) result.getData(); + + Map firstRow = rows.get(0); + List columns = new ArrayList<>(firstRow.keySet()); + + for (String col : columns) { + retValue.addColumn(col); + } + + for (Map row : rows) { + List> cells = new ArrayList<>(); + + for (String column : columns) { + Object rawValue = row.get(column); + + TableChartResult.Cell cell = new TableChartResult.Cell<>(); + cell.setValue(rawValue != null ? rawValue.toString() : null); + + cells.add(cell); + } + + retValue.addRow(cells); + } + + return Collections.singletonList(retValue); + + } catch (Exception e) { + throw new RuntimeException(ctx + ": " + e.getMessage(), e); + } + } } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/metric_chart/ResponseParserForMetricChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/metric_chart/ResponseParserForMetricChart.java index 4032beebc..465cc59f1 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/metric_chart/ResponseParserForMetricChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/metric_chart/ResponseParserForMetricChart.java @@ -9,6 +9,7 @@ import com.park.utmstack.util.chart_builder.elasticsearch_dsl.responses.ResponseParser; import com.utmstack.opensearch_connector.parsers.TermAggregateParser; import com.utmstack.opensearch_connector.types.BucketAggregation; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch._types.aggregations.*; import org.opensearch.client.opensearch.core.SearchResponse; import org.springframework.util.Assert; @@ -112,4 +113,53 @@ private List parseTermAggregation(Map resu throw new RuntimeException(ctx + ": " + e.getLocalizedMessage()); } } + + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + final String ctx = CLASSNAME + ".parse(SearchSqlResponse)"; + try { + Assert.notNull(visualization, "Param visualization must not be null"); + + List results = new ArrayList<>(); + + for (Object rowObj : result.getData()) { + if (!(rowObj instanceof Map)) continue; + Map row = (Map) rowObj; + + String bucketKey = null; + String bucketId = null; + + Double metricValue = null; + String metricId = null; + + for (Map.Entry entry : row.entrySet()) { + String key = entry.getKey(); + Object val = entry.getValue(); + + if (val == null) continue; + + if (val instanceof Number) { + metricValue = ((Number) val).doubleValue(); + metricId = key; + } else { + bucketKey = val.toString(); + bucketId = key; + } + } + + if (metricValue == null) metricValue = 0.0; + if (metricId == null) metricId = "metric"; + if (bucketKey == null) bucketKey = "UNKNOWN"; + if (bucketId == null) bucketId = "bucket"; + + results.add(new MetricChartResult(metricId, metricValue, bucketKey, bucketId)); + } + + return results; + + } catch (Exception e) { + throw new RuntimeException(ctx + ": " + e.getMessage(), e); + } + } + } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/pie_chart/PieChartResult.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/pie_chart/PieChartResult.java index fd25b9566..53a1958dc 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/pie_chart/PieChartResult.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/pie_chart/PieChartResult.java @@ -1,5 +1,10 @@ package com.park.utmstack.util.chart_builder.elasticsearch_dsl.responses.impl.pie_chart; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor public class PieChartResult { private String metricId; private String bucketKey; @@ -13,35 +18,5 @@ public PieChartResult(String metricId, double value, String bucketKey, String bu this.bucketId = bucketId; } - public String getBucketKey() { - return bucketKey; - } - - public void setBucketKey(String bucketKey) { - this.bucketKey = bucketKey; - } - - public Double getValue() { - return value; - } - - public void setValue(Double value) { - this.value = value; - } - public String getMetricId() { - return metricId; - } - - public void setMetricId(String metricId) { - this.metricId = metricId; - } - - public String getBucketId() { - return bucketId; - } - - public void setBucketId(String bucketId) { - this.bucketId = bucketId; - } } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/pie_chart/ResponseParserForPieChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/pie_chart/ResponseParserForPieChart.java index 9820049ce..abbf9911a 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/pie_chart/ResponseParserForPieChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/pie_chart/ResponseParserForPieChart.java @@ -10,15 +10,14 @@ import com.park.utmstack.util.exceptions.UtmChartBuilderException; import com.utmstack.opensearch_connector.parsers.TermAggregateParser; import com.utmstack.opensearch_connector.types.BucketAggregation; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch._types.aggregations.*; import org.opensearch.client.opensearch.core.SearchResponse; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; +import java.util.*; +import java.util.stream.Collectors; public class ResponseParserForPieChart implements ResponseParser { private static final String CLASSNAME = "ResponseParserForPieChart"; @@ -33,7 +32,6 @@ public List parse(UtmVisualization visualization, SearchResponse Bucket bucket = aggregationType.getBucket(); - // In pie chart only one metric is allowed if (CollectionUtils.isEmpty(aggregationType.getMetrics()) || aggregationType.getMetrics().size() > 1) throw new UtmChartBuilderException("In pie charts it is required one and only one metric type"); @@ -113,4 +111,41 @@ private List parseTermAggregation(Map result, throw new RuntimeException(ctx + ": " + e.getLocalizedMessage()); } } + + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + return result.getData().stream() + .map(r -> { + if (!(r instanceof Map)) { + return new PieChartResult("UNKNOWN", 0.0, "N/A", "N/A"); + } + + Map map = (Map) r; + + String bucketKey = null; + String bucketId = null; + + double value = 0.0; + String metricId = null; + + for (Map.Entry entry : map.entrySet()) { + String key = entry.getKey().toString(); + Object val = entry.getValue(); + + if (val instanceof Number) { + value = ((Number) val).doubleValue(); + metricId = key; + } else { + bucketKey = val != null ? val.toString() : null; + bucketId = key; + } + } + + if (metricId == null) metricId = "metric"; + if (bucketId == null) bucketId = "bucket"; + + return new PieChartResult(metricId, value, bucketKey, bucketId); + }) + .collect(Collectors.toList()); + } } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/table_chart/ResponseParserForTableChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/table_chart/ResponseParserForTableChart.java index dc2a48957..14241814b 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/table_chart/ResponseParserForTableChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/table_chart/ResponseParserForTableChart.java @@ -9,6 +9,7 @@ import com.utmstack.opensearch_connector.parsers.DateHistogramAggregateParser; import com.utmstack.opensearch_connector.parsers.TermAggregateParser; import com.utmstack.opensearch_connector.types.BucketAggregation; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch._types.aggregations.*; import org.opensearch.client.opensearch.core.SearchResponse; import org.springframework.util.Assert; @@ -153,4 +154,39 @@ private void parseMetric(SearchResponse result, List metrics throw new RuntimeException(ctx + ": " + e.getMessage()); } } + + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + final String ctx = CLASSNAME + ".parse(SearchSqlResponse)"; + try { + Assert.notNull(visualization, ctx + ": Param visualization must not be null"); + + TableChartResult table = new TableChartResult(); + + if (!result.getData().isEmpty() && result.getData().get(0) instanceof Map) { + Map firstRow = (Map) result.getData().get(0); + for (String key : firstRow.keySet()) { + table.addColumn("->" + key); + } + } + + for (Object rowObj : result.getData()) { + if (!(rowObj instanceof Map)) continue; + Map row = (Map) rowObj; + + List> cells = new ArrayList<>(); + for (Map.Entry entry : row.entrySet()) { + Object val = entry.getValue(); + boolean isMetric = val instanceof Number; + cells.add(new TableChartResult.Cell<>(val, isMetric)); + } + + table.addRow(cells); + } + + return Collections.singletonList(table); + } catch (Exception e) { + throw new RuntimeException(ctx + ": " + e.getMessage(), e); + } + } } diff --git a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/tag_cloud_chart/ResponseParserForTagCloudChart.java b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/tag_cloud_chart/ResponseParserForTagCloudChart.java index 9cce0153a..07da916bc 100644 --- a/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/tag_cloud_chart/ResponseParserForTagCloudChart.java +++ b/backend/src/main/java/com/park/utmstack/util/chart_builder/elasticsearch_dsl/responses/impl/tag_cloud_chart/ResponseParserForTagCloudChart.java @@ -9,6 +9,7 @@ import com.park.utmstack.util.chart_builder.elasticsearch_dsl.responses.ResponseParser; import com.utmstack.opensearch_connector.parsers.TermAggregateParser; import com.utmstack.opensearch_connector.types.BucketAggregation; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; import org.opensearch.client.opensearch._types.aggregations.*; import org.opensearch.client.opensearch.core.SearchResponse; import org.springframework.util.Assert; @@ -78,4 +79,52 @@ private List parseTermAggregation(Map ag throw new RuntimeException(ctx + ": " + e.getMessage()); } } + + @Override + public List parse(UtmVisualization visualization, SearchSqlResponse result) { + final String ctx = CLASSNAME + ".parse(SearchSqlResponse)"; + try { + Assert.notNull(visualization, "Param visualization must not be null"); + + List results = new ArrayList<>(); + + for (Object rowObj : result.getData()) { + if (!(rowObj instanceof Map)) { + continue; + } + + Map row = (Map) rowObj; + + String bucketKey = null; + Double metricValue = 0.0; + + String bucketId = null; + String metricId = null; + + for (Map.Entry entry : row.entrySet()) { + String column = entry.getKey(); + Object val = entry.getValue(); + + if (val instanceof Number) { + metricValue = ((Number) val).doubleValue(); + metricId = column; + } else { + bucketKey = val != null ? val.toString() : "UNKNOWN"; + bucketId = column; + } + } + + results.add(new TagCloudChartResult( + bucketKey, + metricValue, + metricId, + bucketId + )); + } + + return results; + } catch (Exception e) { + throw new RuntimeException(ctx + ": " + e.getMessage()); + } + } } diff --git a/backend/src/main/java/com/park/utmstack/web/rest/chart_builder/UtmVisualizationResource.java b/backend/src/main/java/com/park/utmstack/web/rest/chart_builder/UtmVisualizationResource.java index d34e067ef..c1671c4c0 100644 --- a/backend/src/main/java/com/park/utmstack/web/rest/chart_builder/UtmVisualizationResource.java +++ b/backend/src/main/java/com/park/utmstack/web/rest/chart_builder/UtmVisualizationResource.java @@ -9,15 +9,24 @@ import com.park.utmstack.service.chart_builder.UtmVisualizationQueryService; import com.park.utmstack.service.chart_builder.UtmVisualizationService; import com.park.utmstack.service.dto.chart_builder.UtmVisualizationCriteria; +import com.park.utmstack.service.dto.visualization.UtmVisualizationDto; +import com.park.utmstack.service.dto.visualization.enums.QueryLanguageEnum; +import com.park.utmstack.service.dto.visualization.mapper.UtmVisualizationMapper; import com.park.utmstack.service.elasticsearch.ElasticsearchService; +import com.park.utmstack.service.elasticsearch.sql.SqlQueryFilterService; import com.park.utmstack.util.ResponseUtil; +import com.park.utmstack.util.UtilPagination; import com.park.utmstack.util.chart_builder.elasticsearch_dsl.requests.RequestDsl; import com.park.utmstack.util.chart_builder.elasticsearch_dsl.responses.ResponseParser; import com.park.utmstack.util.chart_builder.elasticsearch_dsl.responses.ResponseParserFactory; +import com.park.utmstack.util.elastic.SqlPaginationUtil; import com.park.utmstack.util.exceptions.UtmChartBuilderException; import com.park.utmstack.web.rest.errors.BadRequestAlertException; import com.park.utmstack.web.rest.util.HeaderUtil; import com.park.utmstack.web.rest.util.PaginationUtil; +import com.utmstack.opensearch_connector.types.SearchSqlResponse; +import com.utmstack.opensearch_connector.types.SqlQueryRequest; +import lombok.RequiredArgsConstructor; import org.opensearch.client.opensearch.core.SearchResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,15 +46,13 @@ import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneOffset; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Optional; +import java.util.*; /** * REST controller for managing UtmVisualization. */ @RestController +@RequiredArgsConstructor @RequestMapping("/api") public class UtmVisualizationResource { @@ -60,20 +67,9 @@ public class UtmVisualizationResource { private final ApplicationEventService applicationEventService; private final UtmStackService utmStackService; private final ElasticsearchService elasticsearchService; + private final UtmVisualizationMapper utmVisualizationMapper; + private final SqlQueryFilterService sqlQueryFilterService; - public UtmVisualizationResource(UtmVisualizationService visualizationService, - UtmVisualizationQueryService visualizationQueryService, - ResponseParserFactory responseParserFactory, - ApplicationEventService applicationEventService, - UtmStackService utmStackService, - ElasticsearchService elasticsearchService) { - this.visualizationService = visualizationService; - this.visualizationQueryService = visualizationQueryService; - this.responseParserFactory = responseParserFactory; - this.applicationEventService = applicationEventService; - this.elasticsearchService = elasticsearchService; - this.utmStackService = utmStackService; - } /** * POST /utm-visualizations : Create a new utmVisualization. @@ -83,7 +79,7 @@ public UtmVisualizationResource(UtmVisualizationService visualizationService, * Request) if the utmVisualization has already an ID */ @PostMapping("/utm-visualizations") - public ResponseEntity createUtmVisualization(@Valid @RequestBody UtmVisualization utmVisualization) { + public ResponseEntity createUtmVisualization(@Valid @RequestBody UtmVisualizationDto utmVisualization) { final String ctx = CLASSNAME + ".createUtmVisualization"; UtmVisualization result = null; @@ -91,8 +87,20 @@ public ResponseEntity createUtmVisualization(@Valid @RequestBo if (utmVisualization.getId() != null) throw new BadRequestAlertException("A new utmVisualization cannot already have an ID", ENTITY_NAME, "idexists"); - RequestDsl requestQuery = new RequestDsl(utmVisualization); - utmVisualization.setQuery(requestQuery.getSearchSourceBuilder().toString()); + if (utmVisualization.getQueryLanguage() == QueryLanguageEnum.DSL) { + if (utmVisualization.getPattern() == null) { + throw new BadRequestAlertException("DSL visualization requires a pattern", ENTITY_NAME, "patternmissing"); + } + if (utmVisualization.getAggregationType() == null) { + throw new BadRequestAlertException("DSL visualization requires an aggregationType", ENTITY_NAME, "aggregationmissing"); + } + RequestDsl requestQuery = new RequestDsl(utmVisualizationMapper.toEntity(utmVisualization)); + utmVisualization.setQuery(requestQuery.getSearchSourceBuilder().toString()); + } else { + if (utmVisualization.getSqlQuery() == null || utmVisualization.getSqlQuery().isBlank()) { + throw new BadRequestAlertException("SQL visualization requires a sqlQuery", ENTITY_NAME, "sqlmissing"); + } + } SecurityUtils.getCurrentUserLogin().ifPresent(utmVisualization::setUserCreated); utmVisualization.setCreatedDate(LocalDateTime.now().toInstant(ZoneOffset.UTC)); @@ -104,7 +112,8 @@ public ResponseEntity createUtmVisualization(@Valid @RequestBo utmVisualization.setSystemOwner(true); } - result = visualizationService.save(utmVisualization); + result = visualizationService.save(utmVisualizationMapper.toEntity(utmVisualization)); + return ResponseEntity.created(new URI("/api/utm-visualizations/" + result.getId())) .headers(HeaderUtil.createEntityCreationAlert(ENTITY_NAME, result.getId().toString())).body(result); } catch (DataIntegrityViolationException e) { @@ -176,21 +185,27 @@ public ResponseEntity createBatchUtmVisualization(@Valid @RequestBody Crea * couldn't be updated */ @PutMapping("/utm-visualizations") - public ResponseEntity updateUtmVisualization(@Valid @RequestBody UtmVisualization utmVisualization) { + public ResponseEntity updateUtmVisualization(@Valid @RequestBody UtmVisualizationDto utmVisualization) { final String ctx = CLASSNAME + ".updateUtmVisualization"; if (utmVisualization.getId() == null) { throw new BadRequestAlertException("Invalid id", ENTITY_NAME, "idnull"); } UtmVisualization result = null; try { - RequestDsl requestQuery = new RequestDsl(utmVisualization); - utmVisualization.setQuery(requestQuery.getSearchSourceBuilder().toString()); + if (utmVisualization.getQueryLanguage() == QueryLanguageEnum.DSL) { + RequestDsl requestQuery = new RequestDsl(utmVisualizationMapper.toEntity(utmVisualization)); + utmVisualization.setQuery(requestQuery.getSearchSourceBuilder().toString()); + } else { + if (utmVisualization.getSqlQuery() == null || utmVisualization.getSqlQuery().isBlank()) { + throw new BadRequestAlertException("SQL visualization requires a sqlQuery", ENTITY_NAME, "sqlmissing"); + } + } SecurityUtils.getCurrentUserLogin().ifPresent(utmVisualization::setUserModified); utmVisualization.setModifiedDate(Instant.now()); utmVisualization.setSystemOwner(utmVisualization.getSystemOwner() == null ? utmVisualization.getId() < 1000000 : utmVisualization.getSystemOwner()); - result = visualizationService.save(utmVisualization); + result = visualizationService.save(utmVisualizationMapper.toEntity(utmVisualization)); return ResponseEntity.ok().headers(HeaderUtil.createEntityUpdateAlert(ENTITY_NAME, utmVisualization.getId().toString())).body(result); } catch (DataIntegrityViolationException e) { String msg = ctx + ": " + e.getMostSpecificCause().getMessage().replaceAll("\n", ""); @@ -281,26 +296,34 @@ public ResponseEntity bulkDelete(@RequestParam List ids) { } @PostMapping("/utm-visualizations/run") - public ResponseEntity> run(@RequestBody UtmVisualization visualization, + public ResponseEntity> run(@RequestBody @Valid UtmVisualizationDto visualization, @RequestParam(value = "page", required = false) Integer page, @RequestParam(value = "size", required = false) Integer size, @RequestParam(defaultValue = "200") int top) throws UtmChartBuilderException { final String ctx = CLASSNAME + ".run"; try { Assert.notNull(visualization, "Param utmVisualization must not be null"); + ResponseParser responseParser = responseParserFactory.instance(visualization.getChartType()); + UtmVisualization utmVisualization = utmVisualizationMapper.toEntity(visualization); + + if (visualization.getQueryLanguage() == QueryLanguageEnum.SQL && Objects.nonNull(visualization.getSqlQuery()) && !visualization.getSqlQuery().trim().isEmpty()) { + String query = sqlQueryFilterService.applyFilters(visualization.getSqlQuery(), visualization.getFilterType()); + SearchSqlResponse response = elasticsearchService.searchBySql(new SqlQueryRequest(query, null), Map.class); + return ResponseEntity.ok().body(responseParser.parse(utmVisualization, response)); + } if (!elasticsearchService.indexExist(visualization.getPattern().getPattern())) return ResponseEntity.ok(Collections.emptyList()); - RequestDsl requestQuery = new RequestDsl(visualization); + RequestDsl requestQuery = new RequestDsl(utmVisualization); SearchResponse result; if(Objects.nonNull(page) && Objects.nonNull(size)){ result = elasticsearchService.search(requestQuery.getSearchSourceBuilder( PageRequest.of(page, size), top).build(), ObjectNode.class); } else { result = elasticsearchService.search(requestQuery.getSearchSourceBuilder().build(), ObjectNode.class); } - ResponseParser responseParser = responseParserFactory.instance(visualization.getChartType()); - return ResponseEntity.ok().body(responseParser.parse(visualization, result)); + + return ResponseEntity.ok().body(responseParser.parse(utmVisualization, result)); } catch (Exception e) { String msg = ctx + ": " + e.getMessage(); log.error(msg); diff --git a/backend/src/main/resources/config/liquibase/changelog/20251203001_add_column_sqlQuery_to_visualization.xml b/backend/src/main/resources/config/liquibase/changelog/20251203001_add_column_sqlQuery_to_visualization.xml new file mode 100644 index 000000000..f9af91470 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20251203001_add_column_sqlQuery_to_visualization.xml @@ -0,0 +1,12 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260122001_update_is_activatable_macos_module.xml b/backend/src/main/resources/config/liquibase/changelog/20260122001_update_is_activatable_macos_module.xml new file mode 100644 index 000000000..fb7ea96de --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260122001_update_is_activatable_macos_module.xml @@ -0,0 +1,21 @@ + + + + + + + + + module_name = 'MACOS' + + + + + name = 'MacOS' + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260122002_update_o365_visualizations_filters.xml b/backend/src/main/resources/config/liquibase/changelog/20260122002_update_o365_visualizations_filters.xml new file mode 100644 index 000000000..dd4f46973 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260122002_update_o365_visualizations_filters.xml @@ -0,0 +1,52 @@ + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260122003_update_o365_visualizations_agg.xml b/backend/src/main/resources/config/liquibase/changelog/20260122003_update_o365_visualizations_agg.xml new file mode 100644 index 000000000..a3661e718 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260122003_update_o365_visualizations_agg.xml @@ -0,0 +1,60 @@ + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260122004_update_azure_visualizations.xml b/backend/src/main/resources/config/liquibase/changelog/20260122004_update_azure_visualizations.xml new file mode 100644 index 000000000..91a06c523 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260122004_update_azure_visualizations.xml @@ -0,0 +1,143 @@ + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260126001_update_regex_for_configuration_parameter_base_url.xml b/backend/src/main/resources/config/liquibase/changelog/20260126001_update_regex_for_configuration_parameter_base_url.xml new file mode 100644 index 000000000..f0a6470b2 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260126001_update_regex_for_configuration_parameter_base_url.xml @@ -0,0 +1,16 @@ + + + + + + + + conf_param_short = 'utmstack.mail.baseUrl' + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260128001_update_azure_correlation_rules.xml b/backend/src/main/resources/config/liquibase/changelog/20260128001_update_azure_correlation_rules.xml new file mode 100644 index 000000000..0b3c4300b --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260128001_update_azure_correlation_rules.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260128002_update_o365_correlation_rules.xml b/backend/src/main/resources/config/liquibase/changelog/20260128002_update_o365_correlation_rules.xml new file mode 100644 index 000000000..ed7611c3d --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260128002_update_o365_correlation_rules.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260129001_update_filter_sophos_xg.xml b/backend/src/main/resources/config/liquibase/changelog/20260129001_update_filter_sophos_xg.xml new file mode 100644 index 000000000..b8f73266f --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260129001_update_filter_sophos_xg.xml @@ -0,0 +1,760 @@ + + + + + + + ' + - fieldName: log.syslogVersion + pattern: '{{.integer}}' + - fieldName: log.syslogDeviceTime + pattern: '{{.year}}-{{.monthNumber}}-{{.monthDay}}{{.space}}{{.time}}' + - fieldName: log.0trash + pattern: '{{.data}}\=' + - fieldName: log.syslogHostIP + pattern: '{{.ipv4}}' + - fieldName: log.notDefined + pattern: '{{.integer}}' + - fieldName: log.restData + pattern: '{{.greedy}}' + source: raw + + - grok: + patterns: + - fieldName: log.syslogPriority + pattern: '\<{{.integer}}\>' + - fieldName: log.restData + pattern: '{{.greedy}}' + source: raw + + - kv: + fieldSplit: " " + valueSplit: "=" + source: log.restData + + # Extracting common fields for all logs + - grok: + patterns: + - fieldName: log.1trash + pattern: '{{.data}}log_type=' + - fieldName: log.type + pattern: '\"{{.data}}\"' + - fieldName: log.2trash + pattern: '{{.data}}log_component=' + - fieldName: log.component + pattern: '\"{{.data}}\"' + - fieldName: log.3trash + pattern: '{{.data}}log_subtype=' + - fieldName: log.subType + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + # Parsing the raw field for log_type="Content Filtering" log_component="HTTP" log_subtype="Allowed" + - grok: + patterns: + - fieldName: log.4trash + pattern: '{{.data}}user_agent=' + - fieldName: log.userAgent + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.5trash + pattern: '{{.data}}download_file_name=' + - fieldName: log.downloadfilename + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.6trash + pattern: '{{.data}}upload_file_name=' + - fieldName: log.uploadfilename + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + # Parsing the raw field for log_type="Content Filtering" log_component="Application" log_subtype="Denied" + - grok: + patterns: + - fieldName: log.4trash + pattern: '{{.data}}category=' + - fieldName: log.category + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.5trash + pattern: '{{.data}}application_name=' + - fieldName: log.applicationName + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.6trash + pattern: '{{.data}}application_technology=' + - fieldName: log.applicationTech + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.7trash + pattern: '{{.data}}application_category=' + - fieldName: log.applicationCategory + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.8trash + pattern: '{{.data}}message=' + - fieldName: log.logMessage + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + # Parsing the raw field for log_type="Firewall" log_component="Firewall Rule|Invalid Traffic" log_subtype="Allowed|Denied" + - grok: + patterns: + - fieldName: log.4trash + pattern: '{{.data}}ether_type=' + - fieldName: log.etherType + pattern: '{{.data}}\)' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + # Parsing the raw field for log_type="Event" log_component="DDNS|DHCP Server|Firewall Authentication" log_subtype="System|Authentication" + - grok: + patterns: + - fieldName: log.4trash + pattern: '{{.data}}raw_data=' + - fieldName: log.rawData + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.5trash + pattern: '{{.data}}raw_data=' + - fieldName: log.rawData + pattern: '\"{{.data}}\"' + source: log.restData + + - grok: + patterns: + - fieldName: log.6trash + pattern: '{{.data}}usergroupname=' + - fieldName: log.userGroupName + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.7trash + pattern: '{{.data}}auth_client=' + - fieldName: log.authClient + pattern: '\"{{.data}}\"' + - fieldName: log.irrelevant + pattern: '{{.greedy}}' + source: log.restData + + - grok: + patterns: + - fieldName: log.8trash + pattern: '{{.data}}message=' + - fieldName: log.logMessage + pattern: '\"{{.data}}\"' + source: log.restData + + # Renaming useful fields + - rename: + from: + - log.appiscloud + to: log.appIsCloud + + - rename: + from: + - log.category + to: log.categoryApp + + - rename: + from: + - log.categorytype + to: log.categoryType + + - rename: + from: + - log.uploadfilename + to: log.uploadFileName + + - rename: + from: + - log.downloadfiletype + to: log.downloadFileType + + - rename: + from: + - log.downloadfilename + to: log.downloadFileName + + - rename: + from: + - log.uploadfiletype + to: log.uploadFileType + + - rename: + from: + - log.contenttype + to: log.contentType + + - rename: + from: + - log.conid + to: log.connectionId + + - rename: + from: + - log.deviceid + to: log.deviceId + + - rename: + from: + - log.domain + to: target.domain + + - rename: + from: + - log.dstip + to: target.ip + + - rename: + from: + - log.dstport + to: target.port + + - rename: + from: + - log.exceptions + to: log.webExceptions + + - rename: + from: + - log.fwruleid + to: log.firewallRuleId + + - rename: + from: + - log.iap + to: log.webPolicy + + - rename: + from: + - log.logid + to: log.Id + + - rename: + from: + - log.bridgedisplayname + to: log.bridgeDisplayName + + - rename: + from: + - log.bridgename + to: log.bridgeName + + - rename: + from: + - log.recvbytes + to: origin.bytesReceived + + - rename: + from: + - log.sentbytes + to: origin.bytesSent + + - rename: + from: + - log.srcip + to: origin.ip + + - rename: + from: + - log.srcport + to: origin.port + + - rename: + from: + - log.statuscode + to: log.statusCode + + - rename: + from: + - log.protocol + to: protocol + + - rename: + from: + - log.usedquota + to: log.responseTime + + - rename: + from: + - log.usergp + to: log.userGroup + + - rename: + from: + - log.username + to: origin.user + + - rename: + from: + - log.srccountrycode + to: log.srcCountryCode + + - rename: + from: + - log.dstcountrycode + to: log.dstCountryCode + + - rename: + from: + - log.appresolvedby + to: log.appResolvedBy + + - rename: + from: + - log.applicationrisk + to: log.applicationRisk + + - rename: + from: + - log.applicationfilterpolicy + to: log.applicationFilterPolicy + + - rename: + from: + - log.device + to: log.deviceType + + - rename: + from: + - log.devicename + to: log.deviceName + + - rename: + from: + - log.domainname + to: log.domainName + + - rename: + from: + - log.timezone + to: log.timeZone + + - rename: + from: + - log.filename + to: target.file + + - rename: + from: + - log.filepath + to: target.path + + - rename: + from: + - log.indisplayinterface + to: log.inDisplayInterface + + - rename: + from: + - log.ininterface + to: log.inInterface + + - rename: + from: + - log.outdisplayinterface + to: log.outDisplayInterface + + - rename: + from: + - log.outinterface + to: log.outInterface + + - rename: + from: + - log.vlanid + to: log.vLanId + + - rename: + from: + - log.ftpcommand + to: command + + - rename: + from: + - log.FTPurl + to: target.url + + - rename: + from: + - log.FTPdirection + to: log.ftpDirection + + - rename: + from: + - log.ipspolicyid + to: log.ipsPolicyId + + - rename: + from: + - log.appfilterpolicyid + to: log.appFilterPolicyId + + - rename: + from: + - log.natruleid + to: log.natRuleId + + - rename: + from: + - log.srcmac + to: origin.mac + + - rename: + from: + - log.policytype + to: log.policyType + + - rename: + from: + - log.updatedip + to: log.updatedIp + + - rename: + from: + - log.clienthostname + to: log.clientHostName + + - rename: + from: + - log.ipaddress + to: origin.ip + + - rename: + from: + - log.clientphysicaladdress + to: log.clientPhysicalAddress + + - rename: + from: + - log.timestamp + to: log.timesTamp + + - rename: + from: + - log.starttime + to: log.startTime + + - rename: + from: + - log.authmechanism + to: log.authMechanism + + - rename: + from: + - log.eventtype + to: log.eventType + + - rename: + from: + - log.executionpath + to: log.executionPath + + - rename: + from: + - log.loginuser + to: log.loginUser + + - rename: + from: + - log.processuser + to: log.processUser + + - rename: + from: + - log.dstmac + to: target.mac + + - rename: + from: + - log.dstdomain + to: target.domain + + # Removing unnecessary characters + - trim: + function: prefix + substring: '"' + fields: + - log.categoryApp + - log.categoryType + - log.component + - log.subType + - log.type + - log.logMessage + - log.contentType + - log.deviceName + - log.domainName + - target.domain + - log.inDisplayInterface + - log.inInterface + - log.outDisplayInterface + - log.outInterface + - log.vLanId + - log.deviceType + - command + - log.clientHostName + - log.ipAddress + - log.clientPhysicalAddress + - log.authMechanism + - log.eventType + - log.executionPath + - log.loginUser + - log.processUser + - log.rawData + - log.userGroupName + - log.authClient + - target.file + - target.path + - log.ftpDirection + - target.url + - log.downloadFileName + - log.downloadFileType + - log.uploadFileName + - log.uploadFileType + - protocol + - log.reason + - log.referer + - log.status + - log.statusCode + - log.url + - log.virus + - log.userAgent + - log.userGroup + - origin.user + - log.bridgeName + - log.bridgeDisplayName + - log.timeZone + - log.applicationCategory + - log.applicationName + - log.applicationTech + - log.appResolvedBy + + - trim: + function: suffix + substring: '"' + fields: + - log.categoryApp + - log.categoryType + - log.component + - log.subType + - log.type + - log.logMessage + - log.contentType + - log.deviceName + - log.domainName + - target.domain + - log.inDisplayInterface + - log.inInterface + - log.outDisplayInterface + - log.outInterface + - log.vLanId + - log.deviceType + - command + - log.clientHostName + - log.ipAddress + - log.clientPhysicalAddress + - log.authMechanism + - log.eventType + - log.executionPath + - log.loginUser + - log.processUser + - log.rawData + - log.userGroupName + - log.authClient + - target.file + - target.path + - log.ftpDirection + - target.url + - log.downloadFileName + - log.downloadFileType + - log.uploadFileName + - log.uploadFileType + - protocol + - log.reason + - log.referer + - log.status + - log.statusCode + - log.url + - log.virus + - log.userAgent + - log.userGroup + - origin.user + - log.bridgeName + - log.bridgeDisplayName + - log.timeZone + - log.applicationCategory + - log.applicationName + - log.applicationTech + - log.appResolvedBy + + - trim: + function: prefix + substring: '<' + fields: + - log.syslogPriority + + - trim: + function: suffix + substring: '>' + fields: + - log.syslogPriority + + # Adding geolocation to target ip + - dynamic: + plugin: com.utmstack.geolocation + params: + source: target.ip + destination: target.geolocation + where: exists("target.ip") + + # Adding geolocation to origin ip + - dynamic: + plugin: com.utmstack.geolocation + params: + source: origin.ip + destination: origin.geolocation + where: exists("origin.ip") + + # Field conversions + - cast: + fields: + - target.port + to: int + + - cast: + fields: + - log.statusCode + - origin.bytesReceived + - origin.bytesSent + to: float + + # Renaming "log.statusCode" to "statusCode" to add it to the event structure + - rename: + from: + - log.statusCode + to: statusCode + + # Adding actionResult + # denied by default + - add: + function: 'string' + params: + key: actionResult + value: 'denied' + + - add: + function: 'string' + params: + key: actionResult + value: 'accepted' + where: safe("statusCode", 0.0) >= double(200) && safe("statusCode", 0.0) <= double(299) || (safe("statusCode", 0.0) >= double(300) && safe("statusCode", 0.0) <= double(399) && safe("origin.bytesReceived", 0.0) > double(0)) + + # Removing unused fields + - delete: + fields: + - log.0trash + - log.1trash + - log.2trash + - log.3trash + - log.4trash + - log.5trash + - log.6trash + - log.7trash + - log.8trash + - log.irrelevant + - log.rawdata + - log.message + - log.restData + - log.useragent + - log.activityname + - log.httpresponsecode + - log.overrideauthorizer + - log.overridename + - log.overridetoken + - log.transactionid + - log.applicationcategory + - log.applicationname + - log.applicationtechnology + - log.avpolicyname + - log.ethertype + - log.bridgedisplayname + - log.bridgename + - log.usergroupname + - log.authclient + - log.logcomponent + - log.logsubtype + - log.name + - log.logtype +$$ + WHERE id=801; + ]]> + + + \ No newline at end of file diff --git a/backend/src/main/resources/config/liquibase/data/azure.rules.sql b/backend/src/main/resources/config/liquibase/data/azure.rules.sql new file mode 100644 index 000000000..266fed786 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/data/azure.rules.sql @@ -0,0 +1,1223 @@ +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1593, 'API Management Security Events', 3, 3, 2, 'Defense Evasion, Persistence, Privilege Escalation, Initial Access', 'T1078 - Valid Accounts', 'Detects suspicious API Management activities including authentication failures, unauthorized access attempts, or API policy violations in Azure API Management services. + +Next Steps: +1. Review the specific API endpoint and operation that triggered the alert +2. Investigate the source IP address and user identity associated with the request +3. Check for patterns of failed authentication attempts from the same source +4. Verify if the API access request aligns with legitimate business needs +5. Review API Management policies and access controls +6. Check for any recent changes to API permissions or policies +7. Consider implementing additional rate limiting or IP restrictions if needed +', '["https://learn.microsoft.com/en-us/azure/api-management/api-management-howto-use-azure-monitor","https://attack.mitre.org/techniques/T1078/"]', '(contains("log.operationName", "Microsoft.ApiManagement") || equals("log.category", "GatewayLogs")) && (oneOf("statusCode", [401, 403]) || equals("actionResult", "denied"))', '2026-01-29 16:18:52.034045', true, true, 'origin', '["origin.ip"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1594, 'Azure Event Hub Authorization Rule Created or Updated', 2, 3, 2, 'Cloud Storage Object', 'Collection', 'Identifies when an Event Hub Authorization Rule is created or updated in Azure. An authorization rule is associated with specific rights (Listen, Send, Manage), and carries a pair of cryptographic keys. When you create an Event Hubs namespace, a policy rule named RootManageSharedAccessKey is created for the namespace. This has manage permissions for the entire namespace and it''s recommended that you treat this rule like an administrative root account and don''t use it in your application. Adversaries may create or modify authorization rules to establish persistence, exfiltrate data, or maintain access to Event Hub streams.', '["https://attack.mitre.org/tactics/TA0009/","https://attack.mitre.org/techniques/T1537/","https://attack.mitre.org/tactics/TA0010/","https://learn.microsoft.com/en-us/azure/event-hubs/authorize-access-shared-access-signature"]', '(equalsIgnoreCase("log.category", "Administrative") || contains("log.category", "Activity")) && (equalsIgnoreCase("log.operationName", "MICROSOFT.EVENTHUB/NAMESPACES/AUTHORIZATIONRULES/WRITE") || contains("log.operationName", "Microsoft.EventHub/namespaces/authorizationRules/write")) && (equals("log.resultType", "0"))', '2026-01-29 16:18:53.218299', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1595, 'Azure AD Conditional Access Policy Bypass Attempt', 3, 3, 1, 'Defense Evasion, Persistence, Privilege Escalation, Initial Access', 'T1078 - Valid Accounts', 'Detects potential attempts to bypass Azure AD Conditional Access policies through policy tampering or unauthorized modifications. Monitors for policy updates and deletions that could weaken security controls such as MFA requirements, device compliance checks, or location-based restrictions. Adversaries may modify or delete conditional access policies to facilitate unauthorized access, bypass security controls, or establish persistence. + +Next Steps: +1. Immediately review the conditional access policy changes made and document all modifications +2. Verify the identity and authorization of the user who made the changes, including verifying they have legitimate administrative access +3. Check if the policy modifications align with approved change management processes and security approval workflows +4. Review Azure AD sign-in logs for any unusual authentication patterns or successful logins following the policy change +5. Assess if the modified policy creates security gaps, weakens access controls, or allows unauthorized access paths +6. Cross-reference the timing of policy changes with any recent security incidents or suspicious activities +7. Consider immediately reverting unauthorized changes and implementing stronger approval workflows for future policy modifications +8. Audit all other conditional access policies for similar unauthorized modifications +', '["https://danielchronlund.com/2022/01/07/the-attackers-guide-to-azure-ad-conditional-access/","https://learn.microsoft.com/en-us/entra/identity/conditional-access/overview","https://attack.mitre.org/techniques/T1078/"]', '(equalsIgnoreCase("log.category", "AuditLogs") || contains("log.category", "Audit")) && (oneOf("log.operationName", ["Update policy", "Delete policy", "Delete conditional access policy", "Update conditional access policy"]) || contains("log.operationName", "conditionalAccessPolicies")) && equals("log.resultType", "0")', '2026-01-29 16:18:54.353112', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1596, 'Azure Container Registry Critical Vulnerability Detected', 3, 3, 2, 'Initial Access', 'T1190 - Exploit Public-Facing Application', 'Detects critical or high severity vulnerabilities in container images within Azure Container Registry, including newly pushed images or recently scanned images with security issues. + +Next Steps: +1. Review the vulnerability details and affected container image +2. Assess the impact and exploitability of the vulnerability +3. Update or patch the container image to address the vulnerability +4. Implement security scanning in CI/CD pipeline to prevent future issues +5. Consider quarantining affected images until patched +6. Monitor for any exploitation attempts against vulnerable containers +', '["https://learn.microsoft.com/en-us/azure/defender-for-cloud/defender-for-container-registries-introduction","https://attack.mitre.org/techniques/T1190/"]', 'contains("log.OperationName", "Microsoft.ContainerRegistry") && (equalsIgnoreCase("log.ResultType", "VulnerabilityFound") || equalsIgnoreCase("log.Category", "SecurityAssessment")) && (oneOf("severity", ["critical", "high"]) || greaterOrEqual("statusCode", "400"))', '2026-01-29 16:18:55.466506', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1597, 'Azure Key Vault Modified', 3, 3, 2, 'Unsecured Credentials', 'Credential Access', 'Identifies modifications to a Key Vault in Azure. The Key Vault is a service that safeguards encryption keys and secrets like certificates, connection strings, and passwords. Because this data is sensitive and business critical, access to key vaults should be secured to allow only authorized applications and users. Adversaries may modify Key Vault configurations to weaken security controls, add unauthorized access policies, or change network rules to facilitate credential theft and unauthorized access to sensitive secrets.', '["https://attack.mitre.org/techniques/T1552/","https://attack.mitre.org/tactics/TA0006/","https://learn.microsoft.com/en-us/azure/key-vault/general/security-features"]', '(equalsIgnoreCase("log.category", "Administrative") || contains("log.category", "Activity")) && (equalsIgnoreCase("log.operationName", "MICROSOFT.KEYVAULT/VAULTS/WRITE") || contains("log.operationName", "Microsoft.KeyVault/vaults/write")) && (equals("log.resultType", "0"))', '2026-01-29 16:18:56.863184', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1598, 'Azure Storage Account Key Regenerated', 3, 2, 2, 'Application Access Token', 'Credential Access', 'Identifies a rotation to storage account access keys in Azure. Regenerating access keys can affect any applications or Azure services that are dependent on the storage account key. Adversaries may regenerate a key as a means of acquiring credentials to access systems and resources, potentially locking out legitimate users while maintaining their own access. This technique can be used to establish persistence, disrupt operations, or facilitate data exfiltration from Azure Storage.', '["https://attack.mitre.org/techniques/T1528/","https://attack.mitre.org/tactics/TA0006/","https://learn.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage"]', '(equalsIgnoreCase("log.category", "Administrative") || contains("log.category", "Activity")) && (equalsIgnoreCase("log.operationName", "MICROSOFT.STORAGE/STORAGEACCOUNTS/REGENERATEKEY/ACTION") || contains("log.operationName", "Microsoft.Storage/storageAccounts/regeneratekey/action")) && (equals("log.resultType", "0"))', '2026-01-29 16:18:57.886582', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1599, 'Azure Defender for Cloud Critical Security Alert', 3, 3, 2, 'Intrusion Detection', 'T1001 - Initial Access', 'Detects critical and high severity alerts from Microsoft Defender for Cloud (formerly Azure Security Center) indicating potential active threats, malware infections, successful breach attempts, or suspicious activities that require immediate response. These alerts leverage advanced threat detection, behavioral analytics, and machine learning to identify security incidents across Azure resources. + +Next Steps: +1. Review the full alert details in Microsoft Defender for Cloud portal +2. Verify the affected resource and assess the scope of potential compromise +3. Check for related suspicious activities on the affected resource and correlated events +4. Implement immediate containment measures if threat is confirmed +5. Review security policies and configurations for the affected resource +6. Document the incident and update security procedures as needed +7. Investigate the CompromisedEntity and Entities fields for IOCs +', '["https://learn.microsoft.com/en-us/azure/defender-for-cloud/alerts-overview","https://learn.microsoft.com/en-us/azure/defender-for-cloud/alerts-schemas","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/securityalert","https://attack.mitre.org/tactics/TA0001/"]', 'equalsIgnoreCase("log.AlertSeverity", "High") || equalsIgnoreCase("severity", "High") && (equalsIgnoreCase("log.ProductName", "Azure Security Center") || equalsIgnoreCase("log.ProductName", "Microsoft Defender for Cloud"))', '2026-01-29 16:18:59.231442', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1600, 'Azure Diagnostic Settings Deletion', 1, 3, 3, 'Defense Evasion', 'T1562.008 - Impair Defenses: Disable Cloud Logs', 'Detects the deletion of diagnostic settings in Azure, which are critical for sending platform logs, metrics, and activity data to destinations like Log Analytics workspaces, Event Hubs, or storage accounts. Adversaries delete diagnostic settings to evade detection by disabling security monitoring and audit logging capabilities. + +This technique is commonly observed when attackers: +- Attempt to hide malicious activities from security teams +- Disable logging before executing destructive operations +- Remove evidence trails of their presence in the environment +- Prevent detection of lateral movement or data exfiltration + +Legitimate deletions are rare and typically occur only during: +- Infrastructure decommissioning or major reconfigurations +- Cost optimization initiatives (but should be heavily scrutinized) +- Migration to new monitoring solutions + +Next Steps: +1. Immediately verify if the deletion was authorized and documented +2. Identify who performed the operation and from which IP address +3. Check if diagnostic settings were immediately recreated (potential test) +4. Review recent activities on the affected resource for suspicious behavior +5. Verify if other resources had their diagnostic settings deleted +6. Restore diagnostic settings immediately to resume monitoring +7. Investigate the caller''s account for potential compromise +8. Check for other defense evasion techniques in the timeline +', '["https://attack.mitre.org/techniques/T1562/008/","https://attack.mitre.org/tactics/TA0005/","https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/diagnostic-settings","https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/activity-log"]', '(equalsIgnoreCase("log.category", "Administrative") || contains("log.category", "Activity")) && (equalsIgnoreCase("log.operationName", "MICROSOFT.INSIGHTS/DIAGNOSTICSETTINGS/DELETE") || contains("log.operationName", "Delete diagnostic setting")) && (equalsIgnoreCase("log.resultType", "0"))', '2026-01-29 16:19:00.321909', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1601, 'Application Gateway WAF Security Alerts', 3, 3, 2, 'Initial Access', 'T1190 - Exploit Public-Facing Application', 'Detects Web Application Firewall alerts from Azure Application Gateway indicating potential web attacks or malicious activity. This rule triggers when WAF blocks or detects suspicious requests that match OWASP security rules, including SQL injection, cross-site scripting (XSS), command injection, and other common web exploits. + +**Next Steps:** +1. Review the specific WAF rule ID and message details to understand the attack type +2. Analyze the source IP address for reputation and geographic location +3. Examine the request URL, headers, and payload for attack indicators +4. Check for additional requests from the same source IP within the time window +5. Verify if this is a legitimate application behavior or actual attack attempt +6. Consider implementing additional WAF rules or IP blocking if confirmed malicious +7. Review application logs for any successful bypass attempts +8. Investigate ruleId and ruleGroup to understand the specific OWASP rule triggered +', '["https://learn.microsoft.com/en-us/azure/web-application-firewall/ag/web-application-firewall-logs","https://learn.microsoft.com/en-us/azure/application-gateway/application-gateway-diagnostics","https://attack.mitre.org/techniques/T1190/"]', 'equalsIgnoreCase("log.category", "ApplicationGatewayFirewallLog") && +(equalsIgnoreCase("log.properties.action", "Blocked") || +equalsIgnoreCase("log.properties.action", "Matched") || +!equals("log.properties.ruleId", "")) +', '2026-01-29 16:19:01.377231', true, true, 'origin', '["origin.ip","log.properties.ruleId"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1602, 'Azure Event Hub Deletion', 1, 3, 3, 'Defense Evasion', 'T1562.008 - Impair Defenses: Disable Cloud Logs', 'Detects the deletion of an Azure Event Hub, which is a critical event processing service that ingests and processes large volumes of events, logs, and telemetry data. Event Hubs are commonly used for security monitoring, log aggregation, and SIEM integration. Adversaries may delete Event Hubs to evade detection by disrupting log collection pipelines and preventing security events from reaching monitoring systems. + +Threat Context: +- Event Hubs are often used to stream logs to SIEM solutions +- Deletion interrupts security monitoring and incident detection capabilities +- Can be part of anti-forensics activities to cover tracks +- May indicate an attempt to blind security operations before further attacks + +Legitimate Use Cases: +- Decommissioning unused Event Hubs during cost optimization +- Infrastructure cleanup during application retirement +- Migration to new Event Hub namespaces or different logging solutions +- Testing and development environment cleanup + +Suspicious Indicators: +- Event Hub actively receiving logs suddenly deleted +- Deletion performed by non-administrative accounts +- Multiple Event Hubs deleted in quick succession +- Deletion outside change management windows +- Deletion from unusual locations or IP addresses +- Event Hub connected to production SIEM or security monitoring + +Next Steps: +1. Verify if the deletion was authorized via change management process +2. Identify who performed the deletion (caller) and their role +3. Check if the Event Hub was actively receiving security logs +4. Determine the impact on security monitoring and log collection +5. Review recent authentication activity for the caller account +6. Check for other suspicious activities in the timeline (diagnostic settings changes, etc.) +7. Verify if backups of the Event Hub configuration exist +8. If unauthorized, restore the Event Hub and investigate for account compromise +9. Review authorization rules and access policies for remaining Event Hubs +', '["https://attack.mitre.org/techniques/T1562/008/","https://attack.mitre.org/tactics/TA0005/","https://learn.microsoft.com/en-us/azure/event-hubs/monitor-event-hubs","https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/activity-log"]', '(equalsIgnoreCase("log.category", "Administrative") || contains("log.category", "Activity")) && (equalsIgnoreCase("log.operationName", "MICROSOFT.EVENTHUB/NAMESPACES/EVENTHUBS/DELETE") || contains("log.operationName", "Delete EventHub")) && (equalsIgnoreCase("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS"))', '2026-01-29 16:19:02.478107', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1603, 'Azure Firewall Policy Deletion', 1, 3, 3, 'Defense Evasion', 'T1562.004 - Impair Defenses: Disable or Modify System Firewall', 'Detects the deletion of an Azure Firewall Policy, which defines network and application rules, threat intelligence settings, and security configurations for Azure Firewall. Adversaries delete firewall policies to disable network security controls, eliminate barriers to lateral movement, enable unrestricted outbound communication, or facilitate data exfiltration without detection. + +Threat Context: +- Firewall policies control network traffic filtering and security rules +- Deletion removes critical network security controls and visibility +- Often precedes lateral movement, data exfiltration, or command-and-control establishment +- Can be part of ransomware attacks to disable network segmentation +- May indicate preparation for destructive attacks by removing protective barriers + +Azure Firewall Policies Control: +- Network filtering rules (allow/deny traffic between networks) +- Application rules (L7 filtering based on FQDNs, URLs, HTTP/HTTPS) +- NAT rules (destination NAT for inbound connections) +- Threat intelligence-based filtering +- IDPS (Intrusion Detection and Prevention) settings +- TLS inspection configurations + +Legitimate Use Cases: +- Migration to new firewall policies or consolidated policy structures +- Decommissioning of test/development environments +- Replacement during security architecture redesign +- Cleanup of unused or deprecated policies + +Suspicious Indicators: +- Active production firewall policy deleted +- Deletion by non-network/security administrators +- Multiple firewall policies deleted in sequence +- Deletion during off-hours or outside change windows +- Policy protecting critical workloads suddenly removed +- Deletion followed by suspicious network activity + +Next Steps: +1. Immediately verify if the deletion was authorized via change management +2. Identify who performed the deletion and verify their administrative role +3. Determine which Azure Firewalls were using the deleted policy +4. Check if affected firewalls now have no policy (completely unprotected) +5. Review the deleted policy''s rules to understand security impact +6. Verify if a replacement policy was immediately applied +7. Check for suspicious network traffic patterns after deletion +8. Look for other security control modifications in the timeline +9. If unauthorized, restore the policy from backup immediately +10. Investigate the caller''s account for potential compromise +', '["https://attack.mitre.org/techniques/T1562/004/","https://attack.mitre.org/tactics/TA0005/","https://learn.microsoft.com/en-us/azure/firewall/policy-rule-sets","https://learn.microsoft.com/en-us/azure/firewall-manager/policy-overview"]', '(equalsIgnoreCase("log.category", "Administrative") || contains("log.category", "Activity")) && (equalsIgnoreCase("log.operationName", "MICROSOFT.NETWORK/FIREWALLPOLICIES/DELETE") || contains("log.operationName", "Delete Firewall Policy")) && (equalsIgnoreCase("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS"))', '2026-01-29 16:19:03.534595', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1604, 'Azure Network Watcher Deletion', 1, 3, 3, 'Defense Evasion', 'T1562.008 - Impair Defenses: Disable Cloud Logs', 'Detects the deletion of an Azure Network Watcher instance, which provides critical network monitoring, diagnostic, and visibility tools for Azure virtual networks. Network Watcher enables flow logs, packet capture, connection monitoring, network topology visualization, and NSG diagnostics. Adversaries delete Network Watcher instances to blind network monitoring capabilities, hide lateral movement, evade detection of data exfiltration, and eliminate network forensic capabilities. + +Threat Context: +- Network Watcher provides visibility into network traffic patterns and connections +- Deletion disables NSG flow logs, stopping network traffic logging +- Removes packet capture capabilities needed for incident investigation +- Eliminates connection monitoring and network topology visibility +- Often precedes lateral movement or data exfiltration activities +- Can be part of anti-forensics to eliminate evidence of network activity + +Network Watcher Capabilities Lost: +- NSG Flow Logs: Track all traffic flowing through network security groups +- Packet Capture: Capture network packets for forensic analysis +- Connection Monitor: Monitor connectivity and latency between resources +- IP Flow Verify: Test NSG rules and diagnose connectivity issues +- Next Hop: Determine routing paths and identify routing problems +- VPN Diagnostics: Troubleshoot VPN gateway and connection issues +- Network Topology: Visualize network architecture and dependencies +- Traffic Analytics: Analyze flow log data for security insights + +Legitimate Use Cases: +- Region consolidation or migration to different monitoring solutions +- Cost optimization by disabling in unused regions +- Decommissioning of development/test environments +- Replacement during infrastructure redesign + +Suspicious Indicators: +- Network Watcher deleted in production regions +- Deletion by non-network/security administrators +- Multiple Network Watchers deleted across regions +- Deletion during off-hours or outside maintenance windows +- Deletion preceded or followed by suspicious network activity +- NSG flow logs were actively collecting security data +- No replacement monitoring solution configured + +Next Steps: +1. Immediately verify if deletion was authorized via change management +2. Identify who performed the deletion and verify their role +3. Determine which regions lost Network Watcher coverage +4. Check if NSG flow logs were active and where they were stored +5. Verify if any security incidents occurred around deletion time +6. Review recent network activity for suspicious patterns +7. Check for other security control modifications in timeline +8. Examine the caller''s account for potential compromise +9. If unauthorized, immediately recreate Network Watcher instances +10. Restore NSG flow logging and verify log retention +11. Review stored flow logs for evidence of malicious activity before deletion +', '["https://attack.mitre.org/techniques/T1562/008/","https://attack.mitre.org/tactics/TA0005/","https://learn.microsoft.com/en-us/azure/network-watcher/network-watcher-overview","https://learn.microsoft.com/en-us/azure/network-watcher/network-watcher-monitoring-overview"]', '(equalsIgnoreCase("log.category", "Administrative") || contains("log.category", "Activity")) && (equalsIgnoreCase("log.operationName", "MICROSOFT.NETWORK/NETWORKWATCHERS/DELETE") || contains("log.operationName", "Delete Network Watcher")) && (equalsIgnoreCase("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS"))', '2026-01-29 16:19:04.640528', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1605, 'Azure Blob Container Access Level Modification', 3, 2, 1, 'Exfiltration', 'T1537 - Transfer Data to Cloud Account', 'Detects modifications to Azure Blob Storage container access levels, particularly changes that enable anonymous public read access. While anonymous public access is a legitimate feature for sharing data broadly (e.g., CDN content, public downloads), it presents a critical security risk when applied to containers with sensitive data. Adversaries may modify container access levels to exfiltrate data, establish command-and-control infrastructure, or expose confidential information without authentication. + +Threat Context: +- Container access level changes can expose sensitive data publicly +- Anonymous access allows data access without authentication +- Commonly used for data exfiltration in breached environments +- Can be leveraged to stage malware or C2 infrastructure +- May indicate unauthorized data disclosure or insider threats + +Azure Blob Container Access Levels: +- Private (No public access): Default, requires authentication +- Blob (Anonymous read for blobs): Individual blobs accessible publicly +- Container (Anonymous read for container and blobs): Full container browsing enabled + +Legitimate Use Cases: +- Publishing static website content or assets +- Sharing large datasets for public consumption +- CDN origin configuration for public content delivery +- Open-source project artifact hosting +- Public documentation or media distribution + +Suspicious Indicators: +- Production storage accounts suddenly made public +- Containers with "confidential", "private", "backup", "logs" in names made public +- Access level changes during off-hours +- Changes by non-storage/DevOps administrators +- Multiple containers made public in succession +- Recently created containers immediately made public +- Containers in finance, HR, or sensitive workload resource groups + +Next Steps: +1. Immediately verify if the access change was authorized +2. Identify which container was modified and review its contents +3. Determine if the container contains sensitive or confidential data +4. Check who performed the modification (caller) and their role +5. Review recent access logs for anonymous access attempts +6. If unauthorized and sensitive, immediately revert to private access +7. Audit other containers in the same storage account for similar changes +8. Check for data download activity from public IPs after the change +9. Review the caller''s account for potential compromise +10. Verify if allowBlobPublicAccess is disabled at storage account level +11. Consider implementing Azure Policy to prevent public access +12. Document incident if sensitive data was exposed +', '["https://attack.mitre.org/techniques/T1537/","https://attack.mitre.org/tactics/TA0010/","https://learn.microsoft.com/en-us/azure/storage/blobs/anonymous-read-access-configure","https://learn.microsoft.com/en-us/azure/storage/blobs/anonymous-read-access-prevent"]', '(equalsIgnoreCase("log.category", "Administrative") || contains("log.category", "Activity")) && (equalsIgnoreCase("log.operationName", "MICROSOFT.STORAGE/STORAGEACCOUNTS/BLOBSERVICES/CONTAINERS/WRITE") || contains("log.operationName", "Update container")) && (equalsIgnoreCase("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS"))', '2026-01-29 16:19:05.781178', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1606, 'ExpressRoute Configuration Changes', 2, 3, 3, 'Discovery', 'T1046 - Network Service Discovery', 'Detects configuration changes to Azure ExpressRoute circuits which could indicate unauthorized network modifications or attempts to bypass security controls. ExpressRoute circuits provide private connectivity between Azure and on-premises infrastructure, making unauthorized changes particularly concerning for network security. + +Next Steps: +1. Verify the change was authorized and performed by legitimate administrators +2. Review the specific configuration changes made to the ExpressRoute circuit +3. Check if the change aligns with documented change management procedures +4. Investigate the source IP and user account that performed the change +5. Validate that no unauthorized access to critical network segments occurred +6. Review related network logs for any suspicious activity following the change +', '["https://learn.microsoft.com/en-us/azure/expressroute/monitor-expressroute","https://attack.mitre.org/techniques/T1046/"]', 'oneOf("log.operationName", ["Microsoft.Network/expressRouteCircuits/write", "Microsoft.Network/expressRouteCircuits/delete"]) || (contains("log.resourceId", "/expressRouteCircuits/") && contains("log.operationName", "write"))', '2026-01-29 16:19:07.019042', true, true, 'origin', '["log.resourceId"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1607, 'Azure Service Principal Credentials Added', 3, 3, 2, 'Persistence', 'Account Manipulation: Additional Cloud Credentials', 'Detects when new credentials (certificates or secrets) are added to Azure service principals through Azure AD/Entra ID Audit Logs. + +**Security Context:** +Adversaries may add credentials to service principals to maintain persistent access to victim Azure accounts. By hijacking an application with granted permissions through adding rogue secrets or certificates, attackers can access protected data and bypass MFA requirements. This technique is commonly used after initial compromise to establish long-term persistence. + +**Detection Logic:** +This rule monitors AuditLogs for successful "Add service principal" operations, which indicate new credentials being added to service principals. The operation captures both certificate and secret additions. + +**Investigation Steps:** +1. Identify the actor who added the credentials: Check log.propertiesInitiatedBy for the user or service principal +2. Review the target service principal: Examine log.propertiesTargetResources for the affected service principal name and ID +3. Verify if the action was authorized: Correlate with change management tickets +4. Check service principal permissions: Review what resources this service principal can access +5. Examine recent sign-in activity: Look for unusual authentication patterns using the service principal +6. Review credential type: Determine if a certificate or secret was added via log.propertiesModifiedProperties + +**Recommended Actions:** +- If unauthorized, immediately revoke the newly added credentials +- Review and rotate all credentials for the affected service principal +- Audit all resources accessible by the service principal for signs of compromise +- Enable alerts for future credential additions to critical service principals +- Implement conditional access policies and privileged identity management + +**MITRE ATT&CK Reference:** T1098.001 - Account Manipulation: Additional Cloud Credentials + +**Azure Documentation:** +- AuditLogs table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs +- Service Principal credentials: https://learn.microsoft.com/en-us/azure/active-directory/develop/howto-create-service-principal-portal +', '["https://attack.mitre.org/techniques/T1098/001/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs","https://learn.microsoft.com/en-us/azure/active-directory/develop/howto-create-service-principal-portal"]', 'equalsIgnoreCase("log.category", "AuditLogs") && +contains("log.operationName", "Add service principal") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) +', '2026-01-29 16:19:08.188259', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1608, 'Azure Function App Security Alert', 3, 3, 2, 'Credential Access', 'T1078 - Valid Accounts', 'Detects security-related errors and exceptions in Azure Function Apps from the FunctionAppLogs table, including authentication failures, authorization denials, execution exceptions, and suspicious patterns. This rule identifies potential security incidents such as credential access attempts, unauthorized function invocations, code injection attempts, or misconfigured security settings. + +Threat Context: +- Function Apps often have access to sensitive data and backend systems +- Authentication/authorization errors may indicate credential stuffing or brute force +- Exception details can reveal code injection attempts or exploitation +- Failed executions may indicate tampering with function code or configurations +- Function Apps can be abused for lateral movement or data exfiltration + +Azure Functions Log Categories (host.json): +- Host.Results: Function execution results and performance metrics +- Host.Aggregator: Aggregated performance and invocation metrics +- Function: Individual function execution logs and custom logging +- Host.Executor: Function host execution details + +What This Rule Detects: +- Exceptions with populated ExceptionDetails field +- Warning level logs (Level >= 3) in Function and Host.Results categories +- Error level logs (Level >= 4) in Function and Host.Results categories +- Authentication and authorization related errors +- Function execution failures that may indicate security issues + +Legitimate Scenarios (Reduce False Positives): +- Transient connectivity issues to dependencies +- Expected validation errors from user input +- Planned maintenance or deployment errors +- Development/testing activities in non-production environments + +Suspicious Indicators: +- Repeated authentication failures from same source +- Authorization errors accessing sensitive functions +- SQL injection or command injection patterns in exceptions +- Unusual error rates or patterns during off-hours +- Errors from unexpected geographic locations +- Function invocations with suspicious payloads + +Investigation and Response Steps: +1. Review the ExceptionDetails and Message fields for error context +2. Identify the specific FunctionName experiencing errors +3. Check the source IP address and correlate with threat intelligence +4. Query for error frequency and patterns: + FunctionAppLogs | where FunctionName == "" | summarize count() by bin(TimeGenerated, 5m) +5. Review recent code deployments or configuration changes +6. Check Function App authentication settings (Easy Auth, API keys, managed identities) +7. Verify if function keys or connection strings were recently exposed +8. Examine the HostInstanceId to identify affected instances +9. Review Application Insights for correlated traces and dependencies +10. If credential abuse suspected, rotate function keys and connection strings +11. Check for unauthorized code deployments via ARM templates or DevOps pipelines +12. Review network security group rules and private endpoint configurations +13. Enable detailed logging temporarily for deeper investigation if needed +', '["https://learn.microsoft.com/en-us/azure/azure-functions/monitor-functions","https://learn.microsoft.com/en-us/azure/azure-functions/configure-monitoring","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/functionapplogs","https://attack.mitre.org/techniques/T1078/"]', '(contains("log.Category", "Host.Results") || contains("log.Category", "Function")) && (greaterOrEqual("log.Level", 3) || exists("log.ExceptionDetails"))', '2026-01-29 16:19:09.247701', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1609, 'Azure Resource Group Deletion', 2, 3, 3, 'Data Destruction', 'Data Destruction', 'Detects the successful deletion of Azure resource groups through Azure Activity Logs. + +**Security Context:** +Resource group deletion is a high-impact administrative action that permanently removes all contained resources. Adversaries may delete resource groups to destroy evidence, disrupt operations, or cause financial impact. This is an irreversible action that can result in significant data loss and service disruption. + +**Detection Logic:** +This rule monitors Activity Logs for successful resource group deletion operations. The operation name "MICROSOFT.RESOURCES/SUBSCRIPTIONS/RESOURCEGROUPS/DELETE" specifically identifies when an entire resource group is removed from a subscription. + +**Investigation Steps:** +1. Identify the actor: Check log.propertiesCaller for the user or service principal who performed the deletion +2. Review the deleted resource group: Examine log.resourceId and log.resourceGroupName for the affected resources +3. Check authorization: Verify if the deletion was authorized through change management procedures +4. Assess impact: Determine what resources were contained in the deleted resource group +5. Review recent activity: Look for suspicious authentication or privilege escalation events before the deletion +6. Check for bulk operations: Identify if multiple resource groups were deleted in a short timeframe +7. Examine timing: Verify if the deletion occurred during normal business hours + +**Recommended Actions:** +- If unauthorized, immediately investigate the actor''s account for compromise +- Review Azure Resource Graph changes history to identify all deleted resources +- Check if resource group locks were bypassed or removed before deletion +- Verify backup availability and initiate recovery procedures if needed +- Enable resource locks on critical resource groups to prevent accidental or malicious deletion +- Implement approval workflows for resource group deletion operations +- Enable Azure Policy to enforce retention policies + +**Note:** Azure Resource Groups use soft-delete for certain resource types. Recovery may be possible within the retention period depending on the resources contained. + +**MITRE ATT&CK Reference:** T1485 - Data Destruction + +**Azure Documentation:** +- AzureActivity table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/azureactivity +- Resource Group deletion: https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/delete-resource-group +', '["https://attack.mitre.org/techniques/T1485/","https://attack.mitre.org/tactics/TA0040/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/azureactivity","https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/delete-resource-group"]', 'equalsIgnoreCase("log.category", "Administrative") && +equalsIgnoreCase("log.operationName", "MICROSOFT.RESOURCES/SUBSCRIPTIONS/RESOURCEGROUPS/DELETE") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) +', '2026-01-29 16:19:10.352945', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1610, 'Azure Active Directory High Risk Sign-in', 3, 3, 2, 'Valid Accounts', 'Initial Access', 'Identifies high risk Azure Active Directory (AD) sign-ins by leveraging Microsoft''s Identity Protection machine learning and heuristics. Identity Protection categorizes risk into three tiers: low, medium, and high. While Microsoft does not provide specific details about how risk is calculated, each level brings higher confidence that the user or sign-in is compromised. This rule triggers on ''high'' risk level sign-ins, which indicate strong indicators of compromise such as impossible travel, anonymous IP usage, or leaked credentials.', '["https://attack.mitre.org/techniques/T1078/","https://learn.microsoft.com/en-us/entra/id-protection/concept-identity-protection-risks","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/signinlogs"]', 'equalsIgnoreCase("log.category", "SignInLogs") && equalsIgnoreCase("log.properties.RiskLevelDuringSignIn", "high") && equalsIgnoreCase("log.propertiesTokenIssuerType", "AzureAD") && equals("log.resultType", "0")', '2026-01-29 16:19:11.521069', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1611, 'Azure Active Directory PowerShell Sign-in', 1, 2, 3, 'Valid Accounts', 'Initial Access', 'Identifies a sign-in using the Azure Active Directory PowerShell module. PowerShell for Azure Active Directory allows for managing settings from the command line, which is intended for users who are members of an admin role. This activity could indicate legitimate administrative access or potential unauthorized access if the account has been compromised.', '["https://attack.mitre.org/techniques/T1078/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/signinlogs","https://learn.microsoft.com/en-us/entra/identity/monitoring-health/reference-azure-monitor-sign-ins-log-schema"]', 'equalsIgnoreCase("log.category", "SignInLogs") && equalsIgnoreCase("log.propertiesAppDisplayName", "Azure Active Directory PowerShell") && equalsIgnoreCase("log.propertiesTokenIssuerType", "AzureAD") && equals("log.resultType", "0") && equalsIgnoreCase("log.resultSignature", "SUCCESS")', '2026-01-29 16:19:12.599594', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1612, 'Azure Application Credential Modification', 3, 3, 2, 'Defense Evasion', 'T1098.001 - Account Manipulation: Additional Cloud Credentials', 'Detects when a new credential (certificate or secret) is added to an Azure AD application. Applications can use certificates or secret strings to authenticate when requesting tokens. Adversaries may add additional authentication credentials to existing applications to establish persistence, evade defenses, or enable privilege escalation by impersonating legitimate applications. + +This technique is commonly used in post-compromise scenarios where attackers: +- Add secrets to high-privilege applications to maintain access +- Create backdoor authentication methods to evade MFA requirements +- Establish persistence mechanisms that survive password resets +- Enable token-based authentication for automated attacks + +Next Steps: +1. Verify if the credential modification was authorized and expected +2. Identify who performed the operation (check InitiatedBy field) +3. Review the affected application''s permissions and access scope +4. Check for subsequent suspicious sign-in activity using the application +5. Audit other applications for similar unauthorized modifications +6. If unauthorized, immediately remove the suspicious credentials +7. Review application usage logs for potential abuse +8. Investigate the source IP address and user agent of the modification +', '["https://attack.mitre.org/techniques/T1098/001/","https://attack.mitre.org/tactics/TA0005/","https://learn.microsoft.com/en-us/azure/active-directory/reports-monitoring/concept-audit-logs","https://learn.microsoft.com/en-us/entra/identity/monitoring-health/reference-audit-activities"]', '(equalsIgnoreCase("log.category", "AuditLogs") || contains("log.category", "Audit")) && (contains("log.operationName", "Certificates and secrets management") || equalsIgnoreCase("log.operationName", "Add service principal credentials") || equalsIgnoreCase("log.operationName", "Update application") || equalsIgnoreCase("log.operationName", "Update application - Certificates and secrets management")) && (equalsIgnoreCase("log.resultType", "0"))', '2026-01-29 16:19:13.655604', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1613, 'Possible Consent Grant Attack via Azure-Registered Application', 3, 3, 2, 'Phishing', 'Initial Access', 'Detects when a user grants permissions to an Azure-registered application or when an administrator grants tenant-wide permissions to an application. An adversary may create an Azure-registered application that requests access to data such as contact information, email, or documents. Consent grant attacks are commonly used in phishing campaigns where malicious OAuth applications trick users into granting excessive permissions, enabling data exfiltration or unauthorized access to organizational resources.', '["https://attack.mitre.org/techniques/T1566/","https://learn.microsoft.com/en-us/entra/identity/enterprise-apps/manage-consent-requests","https://learn.microsoft.com/en-us/defender-cloud-apps/investigate-risky-oauth"]', '(equalsIgnoreCase("log.category", "AuditLogs") || contains("log.category", "Audit")) && equalsIgnoreCase("log.operationName", "Consent to application") && equals("log.resultType", "0")', '2026-01-29 16:19:14.772015', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1614, 'Azure Key Vault Excessive Access Detected', 3, 2, 1, 'Collection', 'T1530 - Data from Cloud Storage Object', 'Detects unusual spikes in Azure Key Vault access patterns. Monitors for multiple secret retrieval operations from the same source, which could indicate credential harvesting or data exfiltration attempts. + +Next Steps: +1. Investigate the source IP address and verify if it''s a legitimate system or user +2. Review the specific secrets/keys being accessed and their criticality +3. Check for any recent changes to Key Vault access policies +4. Correlate with user authentication logs to identify the account responsible +5. Verify if the access pattern aligns with normal business operations +6. Consider implementing additional access controls or monitoring if suspicious activity is confirmed +', '["https://learn.microsoft.com/en-us/azure/key-vault/general/logging","https://attack.mitre.org/techniques/T1530/"]', 'equalsIgnoreCase("log.category", "AuditEvent") && oneOf("log.operationName", ["SecretGet", "SecretList", "KeyGet"]) && exists("origin.ip")', '2026-01-29 16:19:15.960290', true, true, 'origin', '["origin.ip","log.resourceId"]', '[{"indexPattern":"v11-log-azure-*","with":[{"field":"origin.ip.keyword","operator":"filter_term","value":"{{.origin.ip}}"},{"field":"log.category.keyword","operator":"filter_term","value":"AuditEvent"}],"or":null,"within":"now-10m","count":20}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1615, 'Azure External Guest User Invitation', 2, 2, 1, 'Valid Accounts', 'Initial Access', 'Identifies an invitation to an external user in Azure Active Directory (Azure AD / Microsoft Entra ID). Azure AD B2B collaboration allows you to invite people from outside your organization to be guest users in your cloud account and grant them access to resources. Unless there is a business need to provision guest access, it is best practice to avoid creating guest users. Guest users could potentially be overlooked indefinitely, leading to a potential security vulnerability. Adversaries may leverage guest accounts to establish initial access, maintain persistence, or move laterally within the organization.', '["https://attack.mitre.org/techniques/T1078/","https://learn.microsoft.com/en-us/entra/external-id/what-is-b2b","https://learn.microsoft.com/en-us/entra/identity/users/users-restrict-guest-permissions"]', '(equalsIgnoreCase("log.category", "AuditLogs") || contains("log.category", "Audit")) && oneOf("log.operationName", ["Invite external user", "Invite user"]) && equals("log.resultType", "0")', '2026-01-29 16:19:17.137532', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1616, 'Azure Automation Account Created', 2, 3, 2, 'Persistence', 'Valid Accounts: Cloud Accounts', 'Detects the creation of Azure Automation accounts through Azure Activity Logs. + +**Security Context:** +Azure Automation accounts provide a platform to automate management tasks and orchestrate actions across Azure and hybrid environments. Adversaries may create Automation accounts to establish persistence by deploying malicious runbooks, webhooks, or scheduled tasks that execute with privileged credentials. This allows them to maintain long-term access and execute commands without direct interactive login. + +**Detection Logic:** +This rule monitors Activity Logs for successful creation or update operations on Automation accounts. The operation "MICROSOFT.AUTOMATION/AUTOMATIONACCOUNTS/WRITE" captures both new account creation and modifications to existing accounts. + +**Investigation Steps:** +1. Identify the creator: Check log.propertiesCaller for the user or service principal who created the account +2. Review account configuration: Examine the Automation account name and resource group (log.resourceId) +3. Verify authorization: Confirm if the creation was part of legitimate infrastructure deployment +4. Inspect runbooks: Check if any runbooks have been created or imported into the new account +5. Review webhooks: Identify any webhooks configured that could be triggered externally +6. Check schedules: Look for scheduled tasks that may execute malicious code +7. Examine credentials: Verify if credentials or certificates have been added to the account +8. Review RBAC assignments: Check what permissions were granted to the Automation account''s managed identity +9. Correlate with other events: Look for suspicious authentication or privilege escalation before creation + +**Recommended Actions:** +- If unauthorized, immediately disable the Automation account +- Review and audit all runbooks, webhooks, and schedules within the account +- Check Run As accounts and credential assets for suspicious additions +- Examine execution history for any jobs that have already run +- Revoke any managed identity or Run As account permissions +- Enable diagnostic logging for the Automation account +- Implement approval workflows for Automation account creation +- Use Azure Policy to restrict Automation account creation to authorized users + +**Note:** The WRITE operation captures both creation and updates. To distinguish new accounts, correlate with Resource Graph changes or check for absence of previous activity. + +**MITRE ATT&CK Reference:** T1078.004 - Valid Accounts: Cloud Accounts + +**Azure Documentation:** +- AzureActivity table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/azureactivity +- Azure Automation security: https://learn.microsoft.com/en-us/azure/automation/automation-security-overview +', '["https://attack.mitre.org/techniques/T1078/004/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/azureactivity","https://learn.microsoft.com/en-us/azure/automation/automation-security-overview"]', 'equalsIgnoreCase("log.category", "Administrative") && +equalsIgnoreCase("log.operationName", "MICROSOFT.AUTOMATION/AUTOMATIONACCOUNTS/WRITE") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) +', '2026-01-29 16:19:18.236122', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1617, 'Azure Automation Runbook Created or Modified', 2, 3, 2, 'Persistence', 'Command and Scripting Interpreter', 'Detects creation, modification, or publishing of Azure Automation runbooks through Azure Activity Logs. + +**Security Context:** +Azure Automation runbooks are scripts (PowerShell, Python, etc.) that execute automated tasks with assigned credentials and permissions. Adversaries may create malicious runbooks or modify existing ones to execute arbitrary code, establish persistence, perform lateral movement, or exfiltrate data. Since runbooks can run on schedules or be triggered via webhooks, they provide a powerful mechanism for maintaining long-term access without interactive login. + +**Detection Logic:** +This rule monitors Activity Logs for three critical runbook operations: +- DRAFT/WRITE: Creating or updating a draft runbook +- WRITE: Creating or updating a published runbook +- PUBLISH/ACTION: Publishing a draft runbook to make it executable + +All three operations indicate potential malicious activity when performed by unauthorized actors. + +**Investigation Steps:** +1. Identify the actor: Check log.propertiesCaller for who created/modified the runbook +2. Review runbook details: Examine log.resourceId for runbook name and Automation account +3. Verify authorization: Confirm if the action was part of legitimate automation deployment +4. Inspect runbook content: Review the actual script code for malicious commands +5. Check runbook type: Identify if it''s PowerShell, Python, or other scripting language +6. Review execution history: Look for any jobs that have already executed this runbook +7. Examine triggers: Check for webhooks or schedules that will execute the runbook +8. Analyze credentials used: Verify what Run As accounts or credential assets the runbook accesses +9. Check network activity: Look for suspicious connections if the runbook has executed +10. Correlate timing: Check if creation/modification follows suspicious authentication events + +**Recommended Actions:** +- If unauthorized, immediately unpublish or delete the malicious runbook +- Review runbook code for indicators of compromise (C2 connections, data exfiltration, etc.) +- Disable any associated webhooks or schedules +- Revoke credentials that the runbook may have accessed +- Enable version control for runbooks to track all changes +- Implement code review requirements for runbook modifications +- Use Azure Policy to enforce runbook creation restrictions +- Enable diagnostic logging for all Automation accounts +- Monitor runbook job execution logs for suspicious activity + +**Common Malicious Patterns:** +- Runbooks that create new users or modify permissions +- Scripts that exfiltrate data to external storage +- Code that establishes reverse shells or C2 connections +- Runbooks that disable security controls or delete logs + +**MITRE ATT&CK Reference:** T1059 - Command and Scripting Interpreter + +**Azure Documentation:** +- AzureActivity table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/azureactivity +- Runbook management: https://learn.microsoft.com/en-us/azure/automation/manage-runbooks +', '["https://attack.mitre.org/techniques/T1059/","https://azure.microsoft.com/en-in/blog/azure-automation-runbook-management/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/azureactivity","https://learn.microsoft.com/en-us/azure/automation/manage-runbooks"]', 'equalsIgnoreCase("log.category", "Administrative") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) && +oneOf("log.operationName", [ + "MICROSOFT.AUTOMATION/AUTOMATIONACCOUNTS/RUNBOOKS/DRAFT/WRITE", + "MICROSOFT.AUTOMATION/AUTOMATIONACCOUNTS/RUNBOOKS/WRITE", + "MICROSOFT.AUTOMATION/AUTOMATIONACCOUNTS/RUNBOOKS/PUBLISH/ACTION" +]) +', '2026-01-29 16:19:19.325349', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1618, 'Azure Automation Webhook Created', 2, 3, 2, 'Persistence', 'Command and Scripting Interpreter', 'Detects the creation of Azure Automation webhooks through Azure Activity Logs. + +**Security Context:** +Azure Automation webhooks provide an HTTP endpoint that enables external systems to trigger runbook execution. Each webhook has a unique URL that can execute runbooks with passed parameters, making it a powerful automation mechanism. Adversaries can abuse webhooks to establish persistence by creating backdoor triggers that execute malicious runbooks from external locations, bypassing interactive authentication and logging requirements. This technique is well-documented in offensive Azure toolkits like PowerZure. + +**Detection Logic:** +This rule monitors Activity Logs for webhook creation and update operations: +- WEBHOOKS/ACTION: Generate or regenerate webhook URL +- WEBHOOKS/WRITE: Create or update webhook configuration + +Both operations indicate potential malicious activity when creating backdoor access to runbook execution. + +**Investigation Steps:** +1. Identify the creator: Check log.propertiesCaller for who created the webhook +2. Review webhook details: Examine log.resourceId for webhook name and associated Automation account +3. Verify authorization: Confirm if webhook creation was part of legitimate automation workflow +4. Identify target runbook: Determine which runbook the webhook is configured to trigger +5. Review runbook content: Inspect the linked runbook code for malicious commands +6. Check webhook expiry: Verify the expiration date (long-lived webhooks are suspicious) +7. Examine webhook URL: Determine if the URL has been accessed or shared externally +8. Analyze timing: Check if creation follows suspicious authentication or privilege escalation +9. Review network logs: Look for external HTTP POST requests to the webhook URL +10. Correlate with runbook jobs: Check execution history for suspicious job runs + +**Recommended Actions:** +- If unauthorized, immediately disable or delete the webhook +- Rotate the webhook URL if compromise is suspected +- Review and audit the associated runbook for malicious code +- Check webhook execution logs for any triggered jobs +- Examine firewall logs for external connections attempting to trigger the webhook +- Implement IP restrictions on webhook access when possible +- Enable diagnostic logging for Automation accounts +- Use Azure Policy to restrict webhook creation to authorized personnel +- Implement webhook URL lifecycle management with short expiration periods +- Monitor for webhook regeneration attempts + +**Common Attack Patterns:** +- Creating webhooks linked to malicious runbooks for command execution +- Using webhooks as covert C2 channels for remote access +- Establishing persistence through externally-triggered automation +- Bypassing MFA by triggering privileged runbooks via webhooks +- Creating long-lived (multi-year) webhook URLs for sustained access + +**Known Threat Tools:** +- PowerZure: Create-Backdoor function specifically targets webhook creation +- SpecterOps documented webhook abuse for Azure persistence + +**MITRE ATT&CK Reference:** T1059 - Command and Scripting Interpreter + +**Azure Documentation:** +- AzureActivity table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/azureactivity +- Webhook documentation: https://learn.microsoft.com/en-us/azure/automation/automation-webhooks +', '["https://attack.mitre.org/techniques/T1059/","https://powerzure.readthedocs.io/en/latest/Functions/operational.html#create-backdoor","https://github.com/hausec/PowerZure","https://posts.specterops.io/attacking-azure-azure-ad-and-introducing-powerzure-ca70b330511a","https://www.ciraltos.com/webhooks-and-azure-automation-runbooks/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/azureactivity","https://learn.microsoft.com/en-us/azure/automation/automation-webhooks"]', 'equalsIgnoreCase("log.category", "Administrative") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) && +oneOf("log.operationName", [ + "MICROSOFT.AUTOMATION/AUTOMATIONACCOUNTS/WEBHOOKS/ACTION", + "MICROSOFT.AUTOMATION/AUTOMATIONACCOUNTS/WEBHOOKS/WRITE" +]) +', '2026-01-29 16:19:20.472132', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1619, 'Network Security Group Modifications', 3, 3, 2, 'Defense Evasion', 'T1562.007 - Impair Defenses: Disable or Modify Cloud Firewall', 'Detects modifications to Azure Network Security Groups which could indicate attempts to bypass network security controls or create backdoor access. Network Security Groups control traffic flow to Azure resources and unauthorized changes could expose critical infrastructure. + +Next Steps: +1. Verify the legitimacy of the NSG modification with the responsible administrator +2. Review the specific security rules that were added, modified, or deleted +3. Check if the modification aligns with approved change management processes +4. Investigate the source IP and user account that performed the change +5. Review other recent Azure activity from the same user or IP address +6. Validate that the NSG changes don''t expose critical resources to unauthorized access +7. Check if the changes affect production workloads or sensitive environments +', '["https://learn.microsoft.com/en-us/azure/virtual-network/virtual-network-nsg-manage-log","https://attack.mitre.org/techniques/T1562/007/"]', 'contains("log.operationName", "Microsoft.Network/networkSecurityGroups") && contains("log.operationName", ["/write", "/delete", "/securityRules/write"]) && equalsIgnoreCase("log.category", "Administrative") && equalsIgnoreCase("log.resultType", "Success")', '2026-01-29 16:19:21.527856', true, true, 'origin', '["origin.ip","log.resourceId"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1620, 'Azure Conditional Access Policy Modified', 3, 3, 2, 'Persistence', 'Account Manipulation: Additional Cloud Credentials', 'Detects modifications to Azure Conditional Access policies through Azure AD/Entra ID Audit Logs. + +**Security Context:** +Azure Conditional Access policies are critical security controls that enforce access requirements such as multi-factor authentication (MFA), device compliance, location restrictions, and application-specific rules. Adversaries who gain sufficient privileges may modify these policies to weaken security controls, create exceptions for their compromised accounts, exclude malicious users from MFA requirements, or establish persistent access by bypassing security mechanisms. + +**Detection Logic:** +This rule monitors AuditLogs for successful "Update policy" operations in Azure AD/Entra ID. These operations indicate changes to existing Conditional Access policy configurations, including modifications to: +- User and group inclusions/exclusions +- Application scope changes +- Location-based access rules +- Grant controls (MFA, device compliance, etc.) +- Session controls +- Policy state (enabled/disabled/report-only) + +**Investigation Steps:** +1. Identify the modifier: Check log.propertiesInitiatedBy for who modified the policy +2. Review policy details: Examine log.propertiesTargetResources for the affected policy name and ID +3. Verify authorization: Confirm if the modification was part of approved security changes +4. Compare policy versions: Review log.propertiesModifiedProperties to identify specific changes made +5. Check policy before/after states: Look for weakening of security controls (MFA removed, users excluded, etc.) +6. Analyze timing: Determine if modification follows suspicious authentication or privilege escalation +7. Review affected users/apps: Identify which users, groups, or applications are impacted by the change +8. Check for exclusions: Look for specific users or groups being excluded from security requirements +9. Examine policy state: Verify if policy was disabled or moved to report-only mode +10. Correlate with sign-ins: Check for unusual sign-in activity after policy modification + +**Recommended Actions:** +- If unauthorized, immediately revert the policy to its previous secure state +- Review all Conditional Access policies for unauthorized modifications +- Enable change tracking for Conditional Access policies +- Implement privileged access management for policy modification rights +- Use PIM (Privileged Identity Management) with approval for Conditional Access Administrator role +- Enable alerts for all Conditional Access policy changes +- Maintain documented baseline configurations for all policies +- Implement policy-as-code for version control and change management +- Review and audit accounts with Conditional Access Administrator role +- Consider emergency access accounts and their exclusions + +**Common Malicious Modifications:** +- Excluding attacker-controlled accounts from MFA requirements +- Disabling location-based restrictions +- Removing device compliance requirements +- Adding exceptions for legacy authentication protocols +- Changing policy state from "enabled" to "report-only" or "disabled" +- Expanding policy exclusions to include compromised accounts + +**MITRE ATT&CK Reference:** T1098.001 - Account Manipulation: Additional Cloud Credentials + +**Azure Documentation:** +- AuditLogs table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs +- Conditional Access: https://learn.microsoft.com/en-us/entra/identity/conditional-access/overview +', '["https://attack.mitre.org/techniques/T1098/001/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs","https://learn.microsoft.com/en-us/entra/identity/conditional-access/overview","https://learn.microsoft.com/en-us/entra/identity/conditional-access/howto-conditional-access-policy-all-users-mfa"]', 'equalsIgnoreCase("log.category", "AuditLogs") && +equalsIgnoreCase("log.operationName", "Update policy") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) +', '2026-01-29 16:19:22.706259', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1621, 'Azure Global Administrator Role Addition to PIM User', 3, 3, 3, 'Persistence', 'Account Manipulation: Additional Cloud Credentials', 'Detects when users are granted Global Administrator (Company Administrator) role assignments through Azure AD/Entra ID Privileged Identity Management (PIM). + +**Security Context:** +The Global Administrator role is the most powerful administrative role in Azure AD/Entra ID, granting complete control over all aspects of the directory and services that use Azure AD identities. PIM enables just-in-time privileged access through eligible (requires activation) or time-bound assignments. Adversaries who gain sufficient privileges may add themselves or other compromised accounts to this role to establish persistence and maintain full administrative control over the tenant. + +**Detection Logic:** +This rule monitors AuditLogs for successful PIM role assignments specifically for the Global Administrator role. It detects both: +- **Eligible assignments (permanent)**: User can activate the role when needed +- **Active assignments (time-bound)**: Role is directly active for a specified duration + +The rule identifies these assignments through the operation names and filters for the Global Administrator role specifically. + +**Investigation Steps:** +1. Identify the assignor: Check log.propertiesInitiatedBy for who made the role assignment +2. Identify the assignee: Examine log.propertiesTargetResources for the user receiving the role +3. Verify authorization: Confirm if this assignment was part of approved privileged access request +4. Check assignment type: Determine if it''s eligible (requires activation) or time-bound (direct) +5. Review duration: For time-bound assignments, check the duration of the assignment +6. Analyze timing: Determine if assignment follows suspicious authentication or compromise indicators +7. Review justification: Check if a business justification was provided in log.propertiesAdditionalDetails +8. Check user history: Review the assignee''s account for recent suspicious activity +9. Examine recent actions: Look for privileged operations performed immediately after assignment +10. Correlate with sign-ins: Check for unusual authentication patterns before/after assignment + +**Recommended Actions:** +- If unauthorized, immediately revoke the Global Administrator role assignment +- Review all recent PIM role assignments for anomalies +- Enable PIM approval workflows for Global Administrator role assignments +- Implement maximum assignment duration limits for time-bound assignments +- Require MFA and justification for all Global Administrator activations +- Enable PIM alerts for high-privilege role assignments +- Audit accounts with Privileged Role Administrator permissions +- Review and limit the number of permanent Global Administrator assignments +- Enable Azure AD Identity Protection to detect compromised credentials +- Implement break-glass emergency access accounts following best practices + +**PIM Assignment Types:** +- **Eligible (permanent)**: User must activate the role when needed, typically with MFA and justification +- **Active (time-bound)**: Role is directly assigned for a limited duration without activation required +- Both types should be monitored as adversaries may use either for persistence + +**Common Attack Patterns:** +- Compromised Privileged Role Administrator adding backdoor accounts +- Insider threat establishing persistent administrative access +- Privilege escalation from lower-privilege administrative roles +- Adding service principals or managed identities to Global Administrator role +- Creating long-duration time-bound assignments for sustained access + +**MITRE ATT&CK Reference:** T1098.001 - Account Manipulation: Additional Cloud Credentials + +**Azure Documentation:** +- AuditLogs table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs +- PIM for Azure AD roles: https://learn.microsoft.com/en-us/entra/id-governance/privileged-identity-management/pim-configure +', '["https://attack.mitre.org/techniques/T1098/001/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs","https://learn.microsoft.com/en-us/entra/id-governance/privileged-identity-management/pim-configure","https://learn.microsoft.com/en-us/entra/identity/role-based-access-control/permissions-reference#global-administrator"]', 'equalsIgnoreCase("log.category", "AuditLogs") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) && +(contains("log.operationName", "Add eligible member to role") || contains("log.operationName", "Add member to role")) && +(contains("log.properties.targetResources.displayName", "Global Administrator") || contains("log.properties.targetResources.displayName", "Company Administrator")) +', '2026-01-29 16:19:23.832434', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1622, 'Azure Privileged Identity Management Role Settings Modified', 3, 3, 2, 'Persistence', 'Account Manipulation: Additional Cloud Credentials', 'Detects modifications to Azure AD/Entra ID Privileged Identity Management (PIM) role settings through Audit Logs. + +**Security Context:** +PIM role settings define critical security controls for privileged role assignments, including: +- Approval requirements for role activation +- Multi-factor authentication (MFA) enforcement +- Maximum activation duration +- Justification requirements +- Notification settings +- Eligibility and assignment duration limits + +Adversaries with sufficient privileges may modify these settings to weaken security controls, remove approval requirements, extend activation durations, or disable MFA requirements, making it easier to abuse privileged roles for persistent access. + +**Detection Logic:** +This rule monitors AuditLogs for successful "Update role setting in PIM" operations, which capture modifications to role configuration policies that govern how privileged roles are activated and managed. + +**Investigation Steps:** +1. Identify the modifier: Check log.propertiesInitiatedBy for who changed the role settings +2. Identify affected role: Examine log.propertiesTargetResources for which role''s settings were modified +3. Review specific changes: Analyze log.propertiesModifiedProperties to identify what settings changed (before/after values) +4. Verify authorization: Confirm if the modification was part of approved policy changes +5. Check for security weakening: Look for: + - Removal or reduction of approval requirements + - Disabling MFA for activation + - Extending maximum activation durations + - Removing justification requirements + - Disabling notifications to administrators +6. Analyze timing: Determine if modification follows suspicious authentication or privilege escalation +7. Review role sensitivity: Assess the criticality of the affected role (Global Admin, Privileged Role Admin, etc.) +8. Check for pattern: Look for multiple role setting modifications in short timeframe +9. Examine subsequent activations: Monitor for role activations after settings were weakened +10. Correlate with user behavior: Check if modifier has history of legitimate administrative actions + +**Recommended Actions:** +- If unauthorized, immediately revert role settings to secure baseline +- Review all PIM role settings for unauthorized modifications +- Enable change notifications for PIM role setting updates +- Implement approval workflows for modifying PIM role settings +- Use PIM for Privileged Role Administrator role itself +- Maintain documented baseline configurations for all PIM role settings +- Enable alerts for PIM configuration changes +- Audit accounts with permissions to modify PIM settings +- Implement policy-as-code for PIM role configurations +- Review and document approved security baselines for each privileged role + +**Common Malicious Modifications:** +- Removing MFA requirement for role activation +- Disabling approval workflows for high-privilege roles +- Extending maximum activation duration from hours to days +- Removing justification requirements +- Disabling email notifications to security teams +- Extending maximum eligible assignment duration +- Removing requirement for assignment end date +- Allowing permanent assignments without expiration + +**PIM Role Settings Categories:** +- **Activation**: MFA, approval, justification, duration +- **Assignment**: Maximum duration, expiration requirements, permanent assignments +- **Notification**: Alerts to admins, assignees, and approvers + +**MITRE ATT&CK Reference:** T1098.001 - Account Manipulation: Additional Cloud Credentials + +**Azure Documentation:** +- AuditLogs table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs +- PIM role settings: https://learn.microsoft.com/en-us/entra/id-governance/privileged-identity-management/pim-how-to-change-default-settings +', '["https://attack.mitre.org/techniques/T1098/001/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs","https://learn.microsoft.com/en-us/entra/id-governance/privileged-identity-management/pim-how-to-change-default-settings","https://learn.microsoft.com/en-us/entra/id-governance/privileged-identity-management/pim-configure"]', 'equalsIgnoreCase("log.category", "AuditLogs") && +contains("log.operationName", "Update role setting") && +(equals("log.resultType", "0") || equalsIgnoreCase("lactionResult", "SUCCESS")) +', '2026-01-29 16:19:24.831514', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1623, 'Azure Service Principal Addition', 2, 3, 2, 'Persistence', 'T1136.003 - Create Account: Cloud Account', 'Detects when a new service principal is created in Azure Active Directory (Entra ID). A service principal is an identity created for use with applications, hosted services, and automated tools to access Azure resources. While service principals are legitimate and necessary for automation, adversaries may create rogue service principals to establish persistent access, escalate privileges, or move laterally within an Azure environment. + +Threat Context: +- Service principals can be granted powerful permissions across Azure subscriptions +- Unlike user accounts, service principals often lack MFA protection +- Credentials (secrets/certificates) can persist for years without rotation +- Service principals can be used for automated attacks without triggering user behavior analytics + +Legitimate Use Cases: +- DevOps pipelines and CI/CD automation +- Application authentication and service-to-service communication +- Terraform/Bicep/ARM template deployments +- Monitoring and management tools + +Suspicious Indicators: +- Creation by non-administrative users +- Creation outside business hours +- Service principal granted high privileges immediately after creation +- Multiple service principals created in quick succession +- Creation from unusual IP addresses or locations + +Next Steps: +1. Verify if the service principal creation was authorized and documented +2. Identify who created it (check InitiatedBy field) and verify their role +3. Review the permissions/roles assigned to the new service principal +4. Check if credentials (secrets/certificates) were added immediately after +5. Examine the source IP address and location of the creation event +6. Verify if the service principal has been used for authentication +7. Cross-reference with change management tickets or DevOps records +8. If unauthorized, immediately disable the service principal and rotate credentials +', '["https://attack.mitre.org/techniques/T1136/003/","https://attack.mitre.org/tactics/TA0003/","https://learn.microsoft.com/en-us/entra/identity/monitoring-health/reference-audit-activities","https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals"]', '(equalsIgnoreCase("log.category", "AuditLogs") || contains("log.category", "Audit")) && (equalsIgnoreCase("log.operationName", "Add service principal") || contains("log.operationName", "Create service principal")) && (equalsIgnoreCase("log.resultType", "0") || equalsIgnoreCase("actionResult", "success"))', '2026-01-29 16:19:25.821239', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1624, 'User Added as Owner for Azure Application', 3, 3, 2, 'Persistence', 'Account Manipulation: Additional Cloud Credentials', 'Detects when a user or service principal is added as an owner for an Azure AD/Entra ID application registration through Audit Logs. + +**Security Context:** +Azure AD application registrations represent identities used for authentication to Azure and other Microsoft services. Application owners have full administrative control over the application, including the ability to: +- Add or rotate credentials (certificates and secrets) +- Modify API permissions +- Add additional owners +- Configure redirect URIs and authentication settings +- Delete the application + +Adversaries may add themselves or compromised accounts as application owners to establish persistence, as this grants them the ability to generate new credentials for the application''s service principal, enabling long-term access even if the original compromise vector is remediated. + +**Detection Logic:** +This rule monitors AuditLogs for successful "Add owner to application" operations, which capture when ownership permissions are granted on Azure AD application registrations. + +**Investigation Steps:** +1. Identify the adder: Check log.propertiesInitiatedBy for who added the owner +2. Identify new owner: Examine log.propertiesTargetResources for the user/principal being added as owner +3. Identify application: Review log.propertiesTargetResources for the affected application details +4. Verify authorization: Confirm if the owner addition was part of legitimate administrative action +5. Review application sensitivity: Check what API permissions and resources the application has access to +6. Check application credentials: Look for credential additions after owner was added +7. Analyze timing: Determine if owner addition follows suspicious authentication or privilege escalation +8. Review new owner privileges: Assess if the new owner already has elevated permissions +9. Check for pattern: Look for multiple ownership additions across different applications +10. Examine subsequent actions: Monitor for credential generation, permission changes, or authentication using the application + +**Recommended Actions:** +- If unauthorized, immediately remove the malicious owner from the application +- Review and rotate all credentials (secrets and certificates) for the affected application +- Audit all API permissions granted to the application +- Review authentication activity using the application''s service principal +- Check for any configuration changes made after the owner was added +- Enable application owner change alerts for critical applications +- Implement approval workflows for adding owners to sensitive applications +- Audit accounts with permissions to modify application ownership +- Review and document expected owners for all applications +- Consider using managed identities instead of application registrations where possible + +**Application Owner Capabilities:** +Application owners can perform all configuration actions on the application, including: +- **Credential Management**: Add/remove certificates and client secrets +- **Permission Management**: Request and consent to API permissions +- **Authentication Configuration**: Modify redirect URIs, token settings +- **Ownership Management**: Add/remove other owners +- **Application Deletion**: Remove the application entirely + +**Common Attack Patterns:** +- Adding backdoor account as owner after initial compromise +- Privilege escalation by compromising application with broad permissions +- Establishing persistence through credential rotation capabilities +- Insider threats adding personal accounts as owners +- Service principal abuse by adding owners to high-privilege applications +- Lateral movement by gaining control of applications with cross-tenant access + +**Related Detections:** +- Service principal credential additions +- Application permission changes +- Service principal authentication anomalies +- Cross-tenant application access + +**MITRE ATT&CK Reference:** T1098.001 - Account Manipulation: Additional Cloud Credentials + +**Azure Documentation:** +- AuditLogs table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs +- Application owners: https://learn.microsoft.com/en-us/entra/identity/role-based-access-control/permissions-reference#application-administrator +', '["https://attack.mitre.org/techniques/T1098/001/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs","https://learn.microsoft.com/en-us/entra/identity/role-based-access-control/permissions-reference#application-administrator","https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal"]', 'equalsIgnoreCase("log.category", "AuditLogs") && +equalsIgnoreCase("log.operationName", "Add owner to application") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) +', '2026-01-29 16:19:26.937445', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1625, 'MFA Disabled for Privileged Azure AD User', 3, 3, 1, 'Credential Access, Defense Evasion, Persistence', 'T1556 - Modify Authentication Process', 'Detects when Multi-Factor Authentication (MFA) is disabled for privileged users in Azure AD. This could indicate an attempt to weaken security controls for unauthorized access. + +Next Steps: +1. Verify if the MFA disable action was authorized and legitimate +2. Check who initiated the change and from which IP address +3. Review the user''s recent login activity and permissions +4. Ensure the user account has not been compromised +5. Re-enable MFA if the change was unauthorized +6. Consider implementing conditional access policies to prevent unauthorized MFA changes +', '["https://learn.microsoft.com/en-us/entra/identity/authentication/howto-mfa-reporting","https://attack.mitre.org/techniques/T1556/"]', 'oneOf("log.operationName", ["Disable Strong Authentication", "Update user"]) && equalsIgnoreCase("log.service", "Authentication Methods") && contains("target.user", ["admin", "globaladmin"])', '2026-01-29 16:19:27.953421', true, true, 'origin', '["target.user"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1626, 'User Added as Owner for Azure Service Principal', 3, 3, 2, 'Persistence', 'Account Manipulation: Additional Cloud Credentials', 'Detects when a user or service principal is added as an owner for an Azure AD/Entra ID service principal (Enterprise Application) through Audit Logs. + +**Security Context:** +Service principals are the local representation of application objects in a specific Azure AD tenant, also known as Enterprise Applications. They define what an application can actually do in that specific tenant, including: +- API permissions and consented scopes +- Role assignments to Azure resources +- Authentication and access policies +- Conditional Access policy applicability + +Service principal owners have significant control over the identity, including the ability to: +- Manage credentials for the service principal +- Modify application assignments and permissions +- Configure authentication settings +- Add or remove additional owners +- Delete the service principal + +Adversaries may add themselves or compromised accounts as service principal owners to establish persistence, as this grants the ability to authenticate as the service principal and access all resources it has permissions to. + +**Key Difference: Application vs Service Principal:** +- **Application Registration**: The global definition of the app across all tenants +- **Service Principal (Enterprise App)**: The local instance/identity in a specific tenant +- Application owners control the app definition; Service Principal owners control the tenant-specific instance + +**Detection Logic:** +This rule monitors AuditLogs for successful "Add owner to service principal" operations, which capture when ownership permissions are granted on service principal objects. + +**Investigation Steps:** +1. Identify the adder: Check log.propertiesInitiatedBy for who added the owner +2. Identify new owner: Examine log.propertiesTargetResources for the user/principal being added as owner +3. Identify service principal: Review log.propertiesTargetResources for the affected service principal details +4. Verify authorization: Confirm if the owner addition was part of legitimate administrative action +5. Review service principal permissions: Check what API permissions and Azure role assignments the SP has +6. Check for credential additions: Look for certificate or secret additions after owner was added +7. Analyze timing: Determine if owner addition follows suspicious authentication or privilege escalation +8. Review new owner privileges: Assess if the new owner already has elevated permissions elsewhere +9. Check authentication history: Monitor for authentication attempts using the service principal +10. Examine subsequent actions: Look for permission changes, role assignments, or configuration modifications + +**Recommended Actions:** +- If unauthorized, immediately remove the malicious owner from the service principal +- Review and rotate all credentials associated with the service principal +- Audit all API permissions and Azure role assignments for the service principal +- Review authentication activity using the service principal''s credentials +- Check for any configuration changes made after the owner was added +- Enable service principal owner change alerts for critical applications +- Implement approval workflows for adding owners to sensitive service principals +- Audit accounts with permissions to modify service principal ownership +- Review and document expected owners for all service principals +- Consider implementing Managed Identities where service principals are currently used + +**Service Principal Owner Capabilities:** +Service principal owners can perform critical configuration actions: +- **Credential Management**: Add/remove certificates and client secrets +- **Permission Management**: View and manage API permissions (consent may require admin) +- **Authentication Configuration**: Modify authentication policies and settings +- **Ownership Management**: Add/remove other owners +- **Assignment Management**: Manage user/group assignments to the application +- **Service Principal Deletion**: Remove the service principal from the tenant + +**Common Attack Patterns:** +- Adding backdoor account as owner after compromising admin credentials +- Privilege escalation by controlling high-permission service principals +- Establishing persistence through credential generation capabilities +- Lateral movement by gaining control of service principals with cross-resource access +- Insider threats adding personal accounts as owners +- Compromising service principals with privileged Azure RBAC roles +- Abusing service principals with delegated API permissions + +**Related Detections:** +- Service principal credential additions (certificates/secrets) +- Application owner additions (companion detection) +- Service principal permission changes +- Service principal authentication anomalies +- Azure RBAC role assignments to service principals + +**MITRE ATT&CK Reference:** T1098.001 - Account Manipulation: Additional Cloud Credentials + +**Azure Documentation:** +- AuditLogs table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs +- Service principals: https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals +', '["https://attack.mitre.org/techniques/T1098/001/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs","https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals","https://learn.microsoft.com/en-us/entra/identity/enterprise-apps/overview-assign-app-owners"]', 'equalsIgnoreCase("log.category", "AuditLogs") && +equalsIgnoreCase("log.operationName", "Add owner to service principal") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) +', '2026-01-29 16:19:29.489211', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1627, 'Multi-Factor Authentication Disabled for an Azure User', 3, 3, 2, 'Persistence', 'Modify Authentication Process', 'Detects when multi-factor authentication (MFA) is disabled for an Azure AD/Entra ID user account through Audit Logs. + +**Security Context:** +Multi-factor authentication is a critical security control that requires users to provide additional verification beyond just a password. Disabling MFA for user accounts significantly weakens authentication security and is a common technique used by adversaries to maintain persistent access. Once MFA is disabled, attackers can authenticate using only compromised credentials without triggering additional verification steps, making detection more difficult. + +**Detection Logic:** +This rule monitors AuditLogs for successful "Disable Strong Authentication" operations, which represent the per-user MFA setting being turned off in Azure AD/Entra ID. This operation is distinct from Conditional Access MFA policies and represents the legacy per-user MFA enforcement method. + +**Investigation Steps:** +1. Identify the disabler: Check log.propertiesInitiatedBy for who disabled MFA +2. Identify affected user: Examine log.propertiesTargetResources for the user whose MFA was disabled +3. Verify authorization: Confirm if the MFA disabling was part of legitimate administrative action +4. Review user privilege: Determine if the affected user has elevated permissions (admins, privileged roles) +5. Check timing: Analyze if MFA was disabled after suspicious authentication events +6. Review authentication history: Look for failed authentication attempts before MFA disabling +7. Check for compromise indicators: Search for unusual sign-in patterns, impossible travel, or risky sign-ins +8. Examine subsequent logins: Monitor for authentication activity immediately after MFA disabling +9. Review MFA methods: Check what MFA methods the user had registered before disabling +10. Correlate with other events: Look for privilege escalation or data access after MFA disabling + +**Recommended Actions:** +- If unauthorized, immediately re-enable MFA for the affected user +- Force password reset for the affected account +- Review all authentication activity for the affected user +- Check for compromised credentials using Azure AD Identity Protection +- Revoke all active sessions for the affected user +- Enable Conditional Access policies instead of per-user MFA for better control +- Implement PIM approval workflows for modifying MFA settings +- Enable alerts for MFA changes on privileged accounts +- Audit accounts with permissions to modify user authentication settings +- Review and restrict who can disable MFA (typically requires User Administrator or higher) + +**Modern MFA Management:** +- **Per-user MFA (legacy)**: This detection targets the legacy per-user MFA setting +- **Conditional Access**: Modern approach using policies instead of per-user settings +- **Authentication Methods Policy**: Newer method for managing FIDO2, passwordless, etc. + +Organizations should migrate from per-user MFA to Conditional Access policies for more granular control. + +**Common Attack Patterns:** +- Disabling MFA after compromising an administrator account +- Removing MFA from privileged accounts for easier persistent access +- Disabling MFA before credential harvesting or lateral movement +- Insider threats removing MFA from their own accounts +- Disabling MFA on service accounts to enable automated authentication attacks + +**Related Detections:** +- MFA method removal/changes +- Conditional Access policy modifications +- Authentication methods policy changes +- Privileged role assignments without MFA + +**MITRE ATT&CK Reference:** T1556 - Modify Authentication Process + +**Azure Documentation:** +- AuditLogs table: https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs +- Per-user MFA: https://learn.microsoft.com/en-us/entra/identity/authentication/howto-mfa-userstates +', '["https://attack.mitre.org/techniques/T1556/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/auditlogs","https://learn.microsoft.com/en-us/entra/identity/authentication/howto-mfa-userstates","https://learn.microsoft.com/en-us/entra/identity/authentication/concept-mfa-licensing"]', 'equalsIgnoreCase("log.category", "AuditLogs") && +equalsIgnoreCase("log.operationName", "Disable Strong Authentication") && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) +', '2026-01-29 16:19:30.685602', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1628, 'Azure AD Privilege Escalation Attempt Detected', 3, 3, 1, 'Defense Evasion, Persistence, Privilege Escalation, Initial Access', 'T1078 - Valid Accounts', 'Detects attempts to escalate privileges in Azure AD through role assignments. Monitors for the Microsoft.Authorization/roleAssignments/write operation which indicates a user or service principal is being granted additional permissions. + +Next Steps: +1. Verify the legitimacy of the role assignment by checking with the requesting user or administrator +2. Review the specific role being assigned and ensure it follows the principle of least privilege +3. Check if this is part of a scheduled maintenance or approved change request +4. Investigate the source IP address and user context for any suspicious patterns +5. Review Azure AD audit logs for any other suspicious activities from the same user or IP +6. If unauthorized, immediately revoke the role assignment and investigate potential account compromise +', '["https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-alert","https://attack.mitre.org/techniques/T1078/"]', 'equalsIgnoreCase("log.operationName", "Microsoft.Authorization/roleAssignments/write") && equalsIgnoreCase("log.category", "Administrative")', '2026-01-29 16:19:32.384127', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1629, 'Resource Group Mass Modifications', 2, 3, 3, 'Impact', 'T1496 - Resource Hijacking', 'Detects mass modifications to Azure resource groups which could indicate unauthorized infrastructure changes or resource hijacking attempts. This rule triggers when multiple resource group write or delete operations are performed by the same user from the same IP address within a 15-minute window. + +Next Steps: +1. Investigate the user account (log.aadObjectId) performing the modifications +2. Review the specific resource groups being modified +3. Check if the operations align with scheduled maintenance or legitimate business activities +4. Verify the source IP address and geolocation for suspicious activity +5. Review Azure Activity Logs for the full scope of changes made +6. Check for any privilege escalation or unauthorized access to the account +7. If malicious, immediately revoke access and assess impact on affected resources +', '["https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/activity-log-schema","https://attack.mitre.org/techniques/T1496/"]', 'contains("log.operationName", "Microsoft.Resources/subscriptions/resourceGroups") && contains("log.operationName", ["/write", "/delete"]) && equalsIgnoreCase("log.category", "Administrative") && equalsIgnoreCase("log.resultSignature", "Succeeded")', '2026-01-29 16:19:34.088773', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1630, 'Azure Service Principal Multiple Failed Authentications', 3, 3, 2, 'Initial Access', 'T1078.004 - Valid Accounts: Cloud Accounts', 'Detects potential brute force or credential guessing attacks against Azure service principals by identifying multiple failed authentication attempts from the same source IP within a 2-hour window. + +**Security Context:** +Service principals are non-interactive identities used by applications and services to authenticate to Azure AD. Multiple failed authentication attempts may indicate: +- Brute force attacks trying to guess client secrets +- Replay attacks with expired or invalid credentials +- Misconfigured applications repeatedly failing to authenticate +- Compromised credentials being tested + +**Detection Logic:** +- **Trigger**: A failed service principal authentication (resultType != "0") +- **Correlation**: Looks back 2 hours for 5+ failed authentications from the same service principal and source IP + +This correlation approach reduces false positives by only alerting when there''s a pattern of multiple failures, not just a single authentication error. + +**Investigation Steps:** +1. Identify the service principal: Check log.propertiesServicePrincipalId and log.propertiesAppDisplayName +2. Review failure reasons: Examine log.propertiesResultDescription for specific error codes +3. Analyze source location: Check origin.ip and log.propertiesLocation for geographic anomalies +4. Review failure pattern: Check timestamps and frequency of authentication attempts +5. Verify credential type: Determine if certificate or secret authentication was attempted +6. Check service principal ownership: Review who owns the application/service principal +7. Examine recent credential changes: Look for recent secret/certificate additions in AuditLogs +8. Review service principal permissions: Check API permissions and Azure RBAC roles +9. Look for successful authentications: Check if any attempts succeeded after the failures +10. Correlate with other events: Search for related suspicious activities (permission changes, resource access) + +**Recommended Actions:** +- If legitimate: Review application configuration and fix authentication issues +- If suspicious: Immediately rotate all credentials (secrets and certificates) for the service principal +- Review and audit all API permissions and Azure RBAC role assignments +- Check for unauthorized credential additions in AuditLogs +- Implement IP restrictions or Conditional Access for service principals where possible +- Enable Azure AD Identity Protection for service principal risk detection +- Consider implementing Managed Identities instead of service principals where possible +- Review authentication logs for successful breaches after failed attempts + +**Common Failure Result Types:** +- **50126**: Invalid credentials (wrong password/secret) +- **50053**: Account locked due to too many sign-in attempts +- **50057**: Account disabled +- **700016**: Application not found in directory +- **7000215**: Invalid client secret provided + +**MITRE ATT&CK Reference:** T1078.004 - Valid Accounts: Cloud Accounts +', '["https://attack.mitre.org/techniques/T1078/004/","https://www.cloud-architekt.net/auditing-of-msi-and-service-principals/","https://learn.microsoft.com/en-us/azure/azure-monitor/reference/tables/signinlogs"]', 'equalsIgnoreCase("log.category", "SignInLogs") && +exists("log.propertiesServicePrincipalId") && +!equals("log.resultType", "0") +', '2026-01-29 16:19:35.088874', true, true, 'origin', null, '[{"indexPattern":"v11-log-azure-*","with":[{"field":"log.propertiesServicePrincipalId.keyword","operator":"filter_term","value":"{{.log.propertiesServicePrincipalId}}"},{"field":"origin.ip.keyword","operator":"filter_term","value":"{{.origin.ip}}"},{"field":"log.category.keyword","operator":"filter_term","value":"SignInLogs"}],"or":null,"within":"now-2h","count":5}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1631, 'SQL Database Firewall Rule Modifications', 3, 2, 1, 'Lateral Movement', 'Remote Services', 'Detects modifications to Azure SQL Database firewall rules which could allow unauthorized access to sensitive data. This includes both creation and deletion of firewall rules that control network access to SQL databases. + +Next Steps: +1. Verify the legitimacy of the firewall rule modification +2. Check if the change was authorized and documented +3. Review the source IP and user making the modification +4. Assess if the new firewall rule creates security risks +5. Monitor for subsequent database access attempts from newly allowed IPs +6. Review Azure Activity Logs for related database activities +', '["https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/activity-log-schema","https://attack.mitre.org/techniques/T1021/"]', 'contains("log.operationName", "Microsoft.Sql/servers") && (contains("log.operationName", "/firewallRules/write") || contains("log.operationName", "/firewallRules/delete")) && equalsIgnoreCase("log.category", "Administrative") && equalsIgnoreCase("actionResult", "accepted")', '2026-01-29 16:19:36.077882', true, true, 'origin', '["origin.ip","log.resourceId"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1632, 'Azure Storage Account Public Access Enabled', 3, 2, 1, 'Collection', 'T1530 - Data from Cloud Storage Object', 'Detects when public (anonymous) access is enabled on Azure Storage Accounts or Blob containers, creating a critical security risk by allowing unauthenticated access to potentially sensitive data. + +**Security Context:** +Azure Storage Accounts can be configured to allow public access at two levels: +1. **Account Level**: `allowBlobPublicAccess` property enables/disables public access for the entire storage account +2. **Container Level**: Individual blob containers can be set to "Blob" or "Container" public access levels + +Public access levels: +- **None (Private)**: No anonymous access - requires authentication +- **Blob**: Anonymous read access for blobs only +- **Container**: Anonymous read access for blobs and container metadata + +**Risk Scenarios:** +- Data exfiltration without authentication +- Exposure of sensitive files, databases, backups, or credentials +- Compliance violations (GDPR, HIPAA, PCI-DSS) +- Ransomware actors scanning for exposed storage accounts +- Unauthorized data modification if write permissions misconfigured + +**Detection Logic:** +Monitors Activity Logs for successful WRITE operations on: +- Storage account properties (allowBlobPublicAccess setting) +- Blob service configurations (publicAccess on containers) + +**Investigation Steps:** +1. Identify the storage account: Check log.resourceId for the full resource path +2. Review who made the change: Check log.identityClaimUid (user/service principal) +3. Verify the specific change: Check log.propertiesAllowBlobPublicAccess or log.propertiesPublicAccess values +4. Determine change justification: Review log.callerIpAddress and check if change request was approved +5. Audit current configuration: Use Azure CLI/Portal to check current publicAccess settings +6. Scan for exposed data: Review containers and blobs for sensitive information +7. Check access logs: Look for anonymous requests in storage analytics logs +8. Review network rules: Verify if firewall/VNET rules provide additional protection +9. Verify encryption: Ensure encryption at rest and in transit is enabled +10. Check for data exfiltration: Review Storage Analytics logs for unusual download patterns + +**Recommended Actions:** +- **Immediate**: If unauthorized, disable public access immediately via Azure Portal or CLI +- **CLI Command**: `az storage account update --name --allow-blob-public-access false` +- Review all containers for public access: `az storage container list --account-name ` +- Enable Azure Defender for Storage for threat detection +- Implement Azure Private Link to restrict access to private networks +- Configure network rules (firewall/VNET) to limit access +- Enable storage account access logs and monitoring +- Implement Shared Access Signatures (SAS) with expiration for temporary access +- Use Azure RBAC instead of public access for authorized users +- Enable soft delete and versioning for blob protection +- Set up alerts for anonymous access attempts in storage analytics + +**Azure Security Best Practices:** +- Disable public access at the account level by default +- Use Azure Private Endpoints for private connectivity +- Require secure transfer (HTTPS) for all operations +- Enable Azure AD authentication instead of shared keys +- Implement least privilege access with Azure RBAC +- Enable Azure Policy to prevent public access across subscriptions + +**Common Legitimate Use Cases:** +- Hosting static website content (images, CSS, JS) +- Distributing public software/packages +- Sharing public datasets or documentation + +Even for legitimate cases, consider alternatives like Azure CDN with authentication or Azure Static Web Apps. + +**MITRE ATT&CK Reference:** T1530 - Data from Cloud Storage Object +', '["https://attack.mitre.org/techniques/T1530/","https://learn.microsoft.com/en-us/azure/storage/blobs/anonymous-read-access-configure","https://learn.microsoft.com/en-us/azure/storage/common/storage-network-security","https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/activity-log-schema"]', 'equalsIgnoreCase("log.category", "Administrative") && +contains("log.operationName", "Microsoft.Storage/storageAccounts") && +(contains("log.operationName", "write") || contains("log.operationName", "blobServices")) && +(equals("log.resultType", "0") || equalsIgnoreCase("actionResult", "SUCCESS")) +', '2026-01-29 16:19:37.423512', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1633, 'Virtual Machine Suspicious Activities', 2, 2, 3, 'Defense Evasion', 'T1578 - Modify Cloud Compute Infrastructure', 'Detects suspicious activities on Azure Virtual Machines including rapid creation, deletion, or configuration changes that could indicate compromise or abuse. This rule triggers when multiple VM operations are performed from the same IP address within a short timeframe. + +Next Steps: +1. Review the specific VM operations performed and verify if they are legitimate business activities +2. Check the user account and IP address associated with the activities for any signs of compromise +3. Examine the timing and frequency of operations to determine if they follow normal usage patterns +4. Verify if the operations were performed during expected business hours +5. Check for any associated alerts or anomalies in authentication logs +6. Review VM configurations and access logs for any unauthorized changes +7. Contact the resource owner to confirm if the activities were authorized +', '["https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/activity-log-schema","https://attack.mitre.org/techniques/T1578/"]', 'contains("log.operationName", "Microsoft.Compute/virtualMachines") && (contains("log.operationName", "/write") || contains("log.operationName", "/delete") || contains("log.operationName", "/restart/action") || contains("log.operationName", "/powerOff/action")) && equalsIgnoreCase("log.category", "Administrative") && equalsIgnoreCase("log.resultSignature", "Succeeded") && exists("origin.ip")', '2026-01-29 16:19:38.433747', true, true, 'origin', '["origin.ip"]', '[{"indexPattern":"v11-log-azure-*","with":[{"field":"origin.ip.keyword","operator":"filter_term","value":"{{.origin.ip}}"}],"or":null,"within":"now-10m","count":8}]', null); + +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1593, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1594, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1595, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1596, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1597, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1598, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1599, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1600, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1601, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1602, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1603, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1604, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1605, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1606, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1607, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1608, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1609, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1610, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1611, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1612, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1613, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1614, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1615, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1616, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1617, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1618, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1619, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1620, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1621, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1622, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1623, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1624, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1625, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1626, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1627, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1628, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1629, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1630, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1631, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1632, 3, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1633, 3, null); diff --git a/backend/src/main/resources/config/liquibase/data/o365.rules.sql b/backend/src/main/resources/config/liquibase/data/o365.rules.sql new file mode 100644 index 000000000..4d43dbe1e --- /dev/null +++ b/backend/src/main/resources/config/liquibase/data/o365.rules.sql @@ -0,0 +1,478 @@ +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1546, 'Office 365 Anti-Phishing Policy Bypass Detected', 3, 3, 1, 'Defense Evasion', 'T1562.001 - Impair Defenses: Disable or Modify Tools', 'Detects potential bypasses or modifications to anti-phishing policies including changes to safe sender lists, domain exclusions, or policy disabling that could allow phishing emails to reach users. This rule identifies successful administrative actions on anti-phishing configurations that could weaken email security defenses. + +Next Steps: +1. Review the specific anti-phishing policy changes made by the user +2. Verify if the changes were authorized and documented in change management +3. Check if the user has legitimate administrative privileges for email security policies +4. Examine the timing and context of the changes (e.g., during business hours vs. off-hours) +5. Look for any subsequent phishing emails that may have bypassed detection +6. Consider rolling back unauthorized changes and implementing additional approval workflows +7. Monitor for any unusual email activity following the policy modifications +', '["https://learn.microsoft.com/en-us/defender-office-365/anti-phishing-policies-about","https://attack.mitre.org/techniques/T1562/001/"]', '(contains("action", "AntiPhish") || equals("action", "Set-AntiPhishPolicy") || equals("action", "Remove-AntiPhishPolicy") || equals("action", "New-AntiPhishPolicy") || equals("action", "Disable-AntiPhishRule") || contains("action","SafeSender") || contains("action","BypassedSender")) && equals("actionResult", "Success") && exists("origin.user")', '2026-01-28 22:54:21.386731', true, true, 'origin', null, '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"log.Operation.keyword","operator":"filter_match","value":"AntiPhish"}],"or":null,"within":"now-4h","count":2}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1547, 'Office 365 App Consent Grants Detected', 3, 3, 1, 'Persistence, Privilege Escalation', 'T1098.003 - Account Manipulation: Additional Cloud Credentials', 'Detects when OAuth consent is granted to an application in Office 365. Attackers may use malicious OAuth apps to gain persistent access to user data without requiring credentials. This technique allows attackers to maintain access even after password changes. + +Next Steps: +1. Review the application that received consent and verify its legitimacy +2. Check the permissions granted to the application +3. Investigate the user who granted consent for suspicious activity +4. Review application audit logs for any unauthorized data access +5. If malicious, revoke the application consent and remove the app registration +6. Consider implementing application consent policies to prevent unauthorized app installations +', '["https://learn.microsoft.com/en-us/defender-office-365/detect-and-remediate-illicit-consent-grants","https://attack.mitre.org/techniques/T1098/003/"]', 'equals("action", "Consent to application") && equals("actionResult", "Success")', '2026-01-28 22:54:22.331652', true, true, 'origin', '["origin.user","log.appAccessContextClientAppId"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1548, 'Audit Log Tampering Detection', 3, 3, 2, 'Defense Evasion', 'T1070.001 - Indicator Removal: Clear Windows Event Logs', 'Detects attempts to tamper with audit logs by disabling auditing, modifying audit configurations, or clearing audit data. This could indicate an attempt to hide malicious activities. + +Next Steps: +1. Investigate the user account that performed the audit configuration changes +2. Review recent activities by this user to identify potential malicious actions +3. Check if the audit configuration was restored after being disabled +4. Correlate with other security events around the same timeframe +5. Verify if this was an authorized administrative action +6. Review any recent privilege escalation or account compromise indicators +', '["https://learn.microsoft.com/en-us/purview/audit-log-enable-disable","https://attack.mitre.org/techniques/T1070/001/"]', '(oneOf("action", ["Set-AdminAuditLogConfig", "Remove-AdminAuditLogConfig", "Disable-OrganizationCustomization", "Set-OrganizationConfig"]) || +(equals("log.Workload", "Exchange") && contains("log.ObjectId", "AdminAuditLog")) || +(contains("log.Parameters", "UnifiedAuditLogIngestionEnabled") && contains("log.Parameters", "false"))) && +exists("origin.user") && +equals("actionResult", "Succeeded") +', '2026-01-28 22:54:23.212737', true, true, 'origin', null, '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"Set-AdminAuditLogConfig"}],"or":[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"Remove-AdminAuditLogConfig"}],"or":null,"within":"now-24h","count":1}],"within":"now-24h","count":1}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1549, 'Azure AD Integration Suspicious Activity', 3, 3, 2, 'Persistence, Privilege Escalation', 'T1098 - Account Manipulation', 'Detects suspicious Azure Active Directory integration events including multiple failed authentication attempts, unusual role assignments, or bulk user modifications that could indicate an attempted compromise of identity management systems. This rule identifies patterns of authentication failures, privilege escalations, and bulk account modifications that may suggest malicious activity targeting the organization''s identity infrastructure. + +Next Steps: +1. Review the specific Azure AD activity details and affected user accounts +2. Analyze the frequency and timing of the detected events for patterns +3. Verify the legitimacy of any role assignments or user modifications +4. Check for concurrent suspicious activities from the same IP address or user +5. Review Azure AD sign-in logs for additional context around failed authentications +6. Validate whether the activities align with known business processes or authorized administrative tasks +7. Consider implementing additional monitoring for the affected accounts +8. If malicious activity is confirmed, immediately review and revoke any unauthorized permissions or access +', '["https://learn.microsoft.com/en-us/purview/audit-log-activities","https://attack.mitre.org/techniques/T1098/"]', 'equals("log.Workload", "AzureActiveDirectory") && +( + (equals("action", "UserLoginFailed") && equals("actionResult", "Failed")) || + (equals("action", "Add member to role") && equals("actionResult", "Success")) || + (equals("action", "Update user") && equals("actionResult", "Success")) || + (equals("action", "Delete user") && equals("actionResult", "Success")) || + (equals("action", "Add service principal") && equals("actionResult", "Success")) +) && exists("origin.user") +', '2026-01-28 22:54:24.351490', true, true, 'origin', '["origin.user","origin.ip"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"log.Workload.keyword","operator":"filter_term","value":"AzureActiveDirectory"}],"or":null,"within":"now-15m","count":10}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1550, 'Unauthorized Calendar Sharing Modification', 3, 2, 1, 'Collection', 'T1213 - Data from Information Repositories', 'Detects modifications to calendar sharing permissions that could expose sensitive scheduling information to unauthorized users. This rule identifies when users modify calendar delegation or folder permissions on calendar folders, which could indicate unauthorized access attempts or data exposure risks. + +Next Steps: +1. Verify if the calendar sharing modification was authorized by the calendar owner +2. Review the specific permissions granted and recipient of the sharing permissions +3. Check if the user performing the action has legitimate business need for calendar access +4. Investigate any unusual patterns of calendar sharing modifications by the same user +5. Review related authentication logs for the user account +6. Consider implementing additional approval workflows for calendar sharing modifications +', '["https://docs.microsoft.com/en-us/microsoft-365/compliance/audit-log-activities","https://attack.mitre.org/techniques/T1213/"]', 'oneOf("action", ["UpdateCalendarDelegation", "AddFolderPermissions", "ModifyFolderPermissions", "RemoveFolderPermissions", "Set-MailboxFolderPermission", "Add-MailboxFolderPermission"]) && +equals("actionResult", "Success") && +contains("log.folderPath", "Calendar") +', '2026-01-28 22:54:25.212572', true, true, 'origin', '["lastEvent.origin.user","log.Item_FolderPath"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1551, 'Communication Compliance Alert', 2, 2, 1, 'Discovery', 'Account Discovery', 'Detects communication compliance policy violations including potentially threatening, harassing, or discriminatory language in messages, sensitive information sharing, or regulatory compliance violations in communications. + +Next Steps: +1. Review the specific compliance violation details and message content +2. Investigate the user''s recent communication patterns +3. Check if this is part of a pattern of policy violations +4. Escalate to HR or compliance team if inappropriate content is confirmed +5. Consider additional training or disciplinary action based on severity +6. Review and update communication policies if needed +', '["https://learn.microsoft.com/en-us/purview/communication-compliance","https://attack.mitre.org/techniques/T1087/"]', 'equals("action", "CommunicationComplianceAlert") || (equals("log.ComplianceType", "CommunicationCompliance") && equals("actionResult", "PolicyMatch")) || (contains("log.PolicyType", "Communication") && oneOf("log.Severity", ["Medium", "High", "Critical"]))', '2026-01-28 22:54:26.109483', true, true, 'origin', '["origin.user"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1552, 'Suspicious Compliance Alert Activity', 3, 3, 2, 'Defense Evasion', 'T1562.001 - Impair Defenses: Disable or Modify Tools', 'Detects suspicious patterns in compliance-related activities including alert suppression, policy modifications, or audit log tampering that could indicate attempts to evade security monitoring or hide malicious activities in Office 365 Security & Compliance Center. + +Next Steps: +1. Review the specific compliance action performed and verify if it was authorized +2. Check if the user has legitimate administrative privileges for compliance operations +3. Analyze the timing and frequency of compliance changes - multiple rapid changes may indicate malicious activity +4. Examine what compliance policies, rules, or alerts were modified and assess the security impact +5. Review audit logs for any related suspicious activities before and after this event +6. Verify if the changes align with documented change management processes +7. Check for any correlation with other security alerts or unusual user behavior +8. If unauthorized, immediately review and restore appropriate compliance settings +9. Consider implementing additional monitoring on compliance configuration changes +10. Escalate to security team if evidence suggests malicious intent or privilege abuse +', '["https://learn.microsoft.com/en-us/purview/audit-log-activities","https://attack.mitre.org/techniques/T1562/001/"]', 'equals("log.Workload", "SecurityComplianceCenter") && +( + equals("action", "AlertTriggered") || + equals("action", "AlertEntityGenerated") || + equals("action", "AlertUpdated") || + equals("action", "ComplianceSettingChanged") || + equals("action", "Set-ComplianceSecurityFilter") || + equals("action", "New-ComplianceSecurityFilter") || + equals("action", "Remove-ComplianceSecurityFilter") || + equals("action", "Set-AdminAuditLogConfig") || + equals("action", "Set-OrganizationConfig") || + contains("action", "CompliancePolicy") || + contains("action", "ComplianceRule") || + contains("action", "ComplianceTag") +) && +equals("actionResult", "Success") +', '2026-01-28 22:54:26.973857', true, true, 'origin', '["origin.user","action","log.ObjectId"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1553, 'Conditional Access Bypass Detection', 3, 3, 2, 'Defense Evasion, Lateral Movement', 'T1550 - Use Alternate Authentication Material', 'Detects attempts to bypass conditional access policies through various methods including legacy authentication, trusted location manipulation, or session token abuse. This rule identifies when users successfully authenticate despite conditional access policy failures or use legacy authentication methods that may bypass modern security controls. + +Next Steps: +1. Verify the legitimacy of the authentication attempt with the user +2. Review conditional access policy configuration and compliance +3. Check if the authentication method used is approved for the user''s role +4. Investigate the source IP address and device details +5. Review recent policy changes that might have caused legitimate bypass +6. Consider implementing stricter conditional access policies for high-risk users +7. Monitor for additional suspicious activities from the same user or IP +', '["https://learn.microsoft.com/en-us/azure/active-directory/conditional-access/overview","https://attack.mitre.org/techniques/T1550/"]', '(oneOf("action", ["UserLoggedIn", "UserLoginFailed"]) && + ((oneOf("log.propertiesConditionalAccessStatus", ["failure", "notApplied"]) && equals("actionResult", "Success")) || + (oneOf("log.AuthenticationMethod", ["Legacy Authentication", "Basic Authentication"])) || + (oneOf("log.ClientAppUsed", ["Exchange ActiveSync", "IMAP4", "POP3", "SMTP Auth"])) || + (equals("log.DeviceDetail", "{}") && contains("log.Location", "trusted")) || + (equals("log.IsInteractive", "false") && equals("log.appAccessContextClientAppId", "")))) && +!equals("origin.user", "") && +!equals("origin.ip", "") +', '2026-01-28 22:54:27.768609', true, true, 'origin', '["origin.user","origin.ip"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1554, 'Data Loss Prevention Policy Violation', 3, 2, 1, 'Collection', 'T1213 - Data from Information Repositories', 'Detects violations of Data Loss Prevention (DLP) policies including attempts to share, access, or exfiltrate sensitive information such as credit card numbers, social security numbers, or confidential business data. + +This rule triggers when Office 365 DLP policies detect unauthorized handling of sensitive data across Exchange, SharePoint, OneDrive, Teams, or Security Compliance Center workloads. + +**Next Steps:** +1. **Immediate Response:** Review the specific DLP policy violation details and sensitive data types involved +2. **User Investigation:** Verify if the user action was intentional and authorized, check user''s role and data access permissions +3. **Data Assessment:** Determine what sensitive information was involved and potential exposure scope +4. **Policy Review:** Evaluate if DLP policy settings are appropriate or need adjustment +5. **Incident Documentation:** Record the violation details, investigation findings, and remediation actions taken +6. **User Training:** If unintentional, provide additional data handling training to the user +7. **System Monitoring:** Monitor for additional violations from the same user or similar patterns +', '["https://learn.microsoft.com/en-us/purview/dlp-learn-about-dlp","https://attack.mitre.org/techniques/T1213/"]', '( + equals("action", "DLPRuleMatch") || + equals("action", "DlpPolicyMatch") || + equals("action", "DLPRuleUndo") || + contains("log.PolicyDetails", "DLP") || + contains("log.ExceptionInfo", "DLP") +) && +( + equals("log.Workload", "Exchange") || + equals("log.Workload", "SharePoint") || + equals("log.Workload", "OneDrive") || + equals("log.Workload", "Teams") || + equals("log.Workload", "SecurityComplianceCenter") +) && +!equals("actionResult", "Failed") +', '2026-01-28 22:54:28.714965', true, true, 'origin', '["lastEvent.origin.user","log.PolicyId","log.SensitiveInfoTypeData"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1555, 'eDiscovery Abuse Detection', 3, 2, 1, 'Collection', 'T1074 - Data Staged', 'Detects potential abuse of eDiscovery features including excessive searches, exports, or unauthorized access to sensitive data through eDiscovery operations. This rule identifies users performing multiple eDiscovery actions within a short timeframe, which could indicate malicious data staging or exfiltration attempts. + +Next Steps: +1. Review the user''s authorization and role assignments for eDiscovery operations +2. Examine the scope and content of the searches performed +3. Verify if the searches align with legitimate business requirements or ongoing legal matters +4. Check for unusual patterns in search queries or export activities +5. Investigate if sensitive data was accessed or exported without proper justification +6. Review data classification and sensitivity of accessed content +7. Correlate with other suspicious activities from the same user account +8. Consider implementing additional monitoring for the user if activity appears unauthorized +', '["https://learn.microsoft.com/en-us/purview/ediscovery-search-for-activities-in-the-audit-log","https://attack.mitre.org/techniques/T1074/"]', 'oneOf("action", ["SearchStarted", "SearchExported", "SearchCreated", "CaseAdded", "HoldCreated", "SearchExportDownloaded", "SearchPreviewed", "SearchResultsPurged", "RemoveSearchResultsSentToZoom", "RemoveSearchExported", "RemoveSearchPreviewed", "RemoveSearchResultsPurged", "SearchResultsSentToZoom", "ViewedSearchExported", "ViewedSearchPreviewed"]) && +!equals("origin.user", "") && +equals("actionResult", "Succeeded") && +exists("log.action") +', '2026-01-28 22:54:29.551792', true, true, 'origin', '["origin.user"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"log.action.keyword","operator":"filter_term","value":"{{.log.action}}"}],"or":null,"within":"now-1h","count":10}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1556, 'Exchange Admin Configuration Changes', 3, 3, 2, 'Persistence, Privilege Escalation', 'T1098 - Account Manipulation', 'Detects changes to Exchange administrative configuration that could impact security settings, user permissions, or mail flow policies. This rule monitors for successful execution of critical Exchange administrative cmdlets that can affect organizational security posture. + +Next Steps: +1. Review the specific administrative action performed and verify it was authorized +2. Check if the user performing the action has appropriate privileges +3. Examine the timing and frequency of administrative changes +4. Validate configuration changes against change management policies +5. Review any related audit logs for the time period around the change +6. Confirm the source IP and location of the administrative session +7. If unauthorized, immediately review affected configurations and revert if necessary +', '["https://docs.microsoft.com/en-us/exchange/security-and-compliance/exchange-auditing-reports/view-administrator-audit-log","https://attack.mitre.org/techniques/T1098/"]', 'oneOf("action", ["Set-AdminAuditLogConfig", "Set-TransportRule", "Set-MalwareFilterPolicy", "Set-HostedContentFilterPolicy", "Set-DkimSigningConfig", "Set-OrganizationConfig", "Set-RoleGroup", "Add-RoleGroupMember", "Remove-RoleGroupMember", "New-ManagementRoleAssignment", "Remove-ManagementRoleAssignment"]) && +equals("actionResult", "Succeeded") +', '2026-01-28 22:54:30.428878', true, true, 'origin', '["origin.user","action"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1557, 'Suspicious External Sharing Activity', 3, 2, 1, 'Exfiltration', 'T1567 - Exfiltration Over Web Service', 'Detects unauthorized or suspicious external sharing activities in SharePoint and OneDrive that could indicate data exfiltration attempts or policy violations when sharing sensitive content with external parties. + +Next Steps: +1. Review the shared content and assess its sensitivity level +2. Verify if the external sharing was authorized by data owners +3. Check if the recipient has legitimate business need for access +4. Validate against organizational data sharing policies +5. Examine user''s recent activity for other suspicious sharing patterns +6. Consider revoking access if sharing was unauthorized +7. Update DLP policies if needed to prevent future violations +8. Document findings and coordinate with data protection team +', '["https://learn.microsoft.com/en-us/purview/audit-log-activities","https://attack.mitre.org/techniques/T1567/"]', '(equals("log.Workload", "SharePoint") || equals("log.Workload", "OneDrive")) && +( + equals("action", "SharingInvitationCreated") || + equals("action", "AnonymousLinkCreated") || + equals("action", "AnonymousLinkUsed") || + equals("action", "SecureLinkCreated") || + equals("action", "SharingSet") || + equals("action", "CompanyLinkCreated") || + equals("action", "AddedToSecureLink") +) && +equals("actionResult", "Success") && +( + equals("log.TargetUserOrGroupType", "Guest") || + contains("log.SiteUrl", "external") || + contains("log.EventData", "AllowExternalSharing") +) +', '2026-01-28 22:54:31.270262', true, true, 'origin', '["origin.user","log.ObjectId"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1558, 'Abnormal Guest User Invitation Activity', 3, 2, 1, 'Persistence', 'Account Creation', 'Detects unusual spikes in guest user invitations which could indicate an attempt to establish persistence through external accounts or potential data exfiltration preparation by adding unauthorized external collaborators. + +Next Steps: +1. Review the user account initiating the invitations for any signs of compromise +2. Verify the legitimacy of the invited external users and their business justification +3. Check if the inviting user has appropriate permissions for guest invitations +4. Examine the invited users'' email domains for suspicious or unexpected organizations +5. Review any subsequent activity by the invited guest users +6. Validate that the invitation frequency aligns with normal business processes +7. Consider implementing additional approval workflows for guest user invitations +', '["https://learn.microsoft.com/en-us/purview/audit-log-activities","https://attack.mitre.org/techniques/T1136/"]', 'equals("log.Workload", "AzureActiveDirectory") && +( + equals("action", "Invite external user") || + equals("action", "InviteGuest") || + equals("action", "Add guest to group") || + equals("action", "Guest user invite redeemed") +) && +equals("actionResult", "Success") && exists("origin.user") +', '2026-01-28 22:54:32.172440', true, true, 'origin', '["origin.user"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"Invite external user"}],"or":null,"within":"now-1h","count":5}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1559, 'Office 365 Mail Flow Rule Modified', 3, 3, 2, 'Defense Evasion', 'T1564.008 - Hide Artifacts: Email Hiding Rules', 'Detects modifications to mail flow rules (transport rules) in Office 365. Attackers may create or modify mail flow rules to redirect, delete, or hide emails, bypassing security controls or exfiltrating data. Mail flow rules can be used to automatically delete emails containsing specific keywords, forward sensitive emails to external addresses, or modify email content. + +Next Steps: +1. Review the specific mail flow rule that was modified, including its conditions and actions +2. Verify if the change was authorized and performed by a legitimate administrator +3. Check if the rule involves email forwarding to external domains or deletion of emails +4. Examine recent email traffic to identify any emails that may have been affected by the rule +5. Review mailbox audit logs for the affected user accounts +6. Check for other administrative changes made by the same user around the same time +7. If unauthorized, disable the malicious rule immediately and investigate the compromised account +', '["https://admindroid.com/how-to-audit-transport-rule-changes-report-in-microsoft-365","https://attack.mitre.org/techniques/T1564/008/"]', '(contains("action", "TransportRule") || equals("action", "New-TransportRule") || equals("action", "Set-TransportRule") || equals("action", "Remove-TransportRule") || equals("action", "Enable-TransportRule") || equals("action", "Disable-TransportRule")) && equals("actionResult", "Succeeded")', '2026-01-28 22:54:33.181131', true, true, 'origin', '["origin.user","log.action"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1560, 'Mass Email Deletion Detected', 2, 3, 2, 'Collection', 'T1114 - Email Collection', 'Detects when a user performs mass deletion of emails which could indicate data destruction, covering tracks, or malicious insider activity. Monitors for multiple HardDelete or SoftDelete operations within a short time window. + +Next Steps: +1. Verify the legitimacy of the user performing the deletions +2. Check if this aligns with any scheduled maintenance or cleanup activities +3. Investigate the content and importance of deleted emails if possible +4. Review user''s recent access patterns and behavior +5. Check for any concurrent suspicious activities from the same user +6. Validate if the user has appropriate permissions for bulk email operations +7. Consider temporarily restricting the user''s access pending investigation +', '["https://learn.microsoft.com/en-us/purview/audit-mailboxes","https://attack.mitre.org/techniques/T1114/"]', 'oneOf("action", ["HardDelete", "SoftDelete"]) && !equals("origin.user", "") && !equals("origin.ip", "")', '2026-01-28 22:54:34.228904', true, true, 'origin', '["origin.user"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"HardDelete"}],"or":[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"SoftDelete"}],"or":null,"within":"now-15m","count":25}],"within":"now-15m","count":25}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1561, 'Suspicious Mail Forwarding Rule Creation', 3, 2, 1, 'Collection', 'T1114.001 - Email Collection: Local Email Collection', 'Detects creation or modification of inbox rules that forward emails to external recipients, which could indicate data exfiltration attempts. This rule monitors for the creation of new inbox rules or modifications to existing rules that contains forwarding parameters. + +Next Steps: +1. Review the specific forwarding parameters in the log.Parameters field to identify the destination email address +2. Verify if the forwarding destination is a legitimate business email or an external/suspicious address +3. Check if the user who created the rule has legitimate business justification for email forwarding +4. Review recent authentication logs for the affected user account for signs of compromise +5. Examine the timing of the rule creation - if created outside business hours or immediately after login, investigate further +6. Check for other suspicious activities by the same user account around the same timeframe +7. If malicious, disable the forwarding rule and reset user credentials +', '["https://docs.microsoft.com/en-us/microsoft-365/compliance/auditing-troubleshooting-scenarios","https://attack.mitre.org/techniques/T1114/001/"]', 'oneOf("action", ["NewInboxRule", "Set-InboxRule", "UpdateInboxRules"]) && +equals("actionResult", "Succeeded") && +(contains("log.Parameters", "ForwardTo") || contains("log.Parameters", "ForwardAsAttachmentTo") || contains("log.Parameters", "RedirectTo")) +', '2026-01-28 22:54:35.351689', true, true, 'origin', '["origin.user","origin.ip"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1562, 'Multi-Geo Data Violations', 3, 2, 1, 'Exfiltration', 'T1030 - Data Transfer Size Limits', 'Detects violations of multi-geo data residency policies including unauthorized data movements between regions, cross-geo access violations, or attempts to bypass geo-restrictions. This rule identifies activities such as site geo moves, cross-geo file operations, and data location parameter modifications that may indicate unauthorized data transfer or policy violations. + +Next Steps: +1. Review the user''s authorization level for multi-geo operations +2. Verify if the geo movement was approved through proper change management +3. Check data classification and residency requirements for affected content +4. Examine the source and destination locations for compliance violations +5. Review audit logs for related suspicious activities by the same user +6. Validate that the operation aligns with organizational data governance policies +', '["https://learn.microsoft.com/en-us/microsoft-365/enterprise/microsoft-365-multi-geo","https://attack.mitre.org/techniques/T1030/"]', '(oneOf("action", ["SiteGeoMoveScheduled", "SiteGeoMoveCompleted", "SiteGeoMoveCancelled", "AllowedDataLocationAdded", "GeoQuotaAllocated", "MigrationJobCompleted"]) || +(equals("log.Workload", "OneDrive") && contains("log.ItemName", "cross-geo")) || +(!equals("log.SourceFileName", "") && !equals("log.DestinationFileName", "") && contains("log.SourceRelativeUrl", "geo") && contains("log.DestinationRelativeUrl", "geo")) || +(contains("log.Parameters", "DataLocation") || contains("log.Parameters", "PreferredDataLocation"))) && +!equals("origin.user", "") && +equals("actionResult", "Succeeded") && +!equals("origin.ip", "") +', '2026-01-28 22:54:36.478882', true, true, 'origin', '["origin.user","origin.ip"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1563, 'Office 365 OAuth Application Anomalous Activity', 3, 3, 2, 'Credential Access', 'T1528 - Steal Application Access Token', 'Detects anomalous OAuth application activities including suspicious consent patterns, high-privilege permission grants, or rapid consent events from a single user which may indicate compromised accounts or malicious OAuth apps. This rule identifies when users consent to OAuth applications with high-risk permissions such as Mail access, Files access, or User.ReadWrite permissions, followed by multiple consent events from the same IP address. + +Next Steps: +- Review the OAuth application details and permissions granted +- Verify if the user intentionally consented to the application +- Check for any suspicious activities from the user account after consent +- Investigate the OAuth application''s reputation and publisher +- Consider revoking application permissions if deemed malicious +- Review other users who may have consented to the same application +- Implement conditional access policies to restrict high-risk OAuth consents +', '["https://office365itpros.com/2023/12/15/oauth-apps-security/","https://attack.mitre.org/techniques/T1528/"]', 'equals("action", "Consent to application") && equals("actionResult", "Success") && (contains("log.Scope", "Mail.") || contains("log.Scope", "Files.") || contains("log.Scope", "User.ReadWrite") || contains("log.Scope", ".All")) && exists("origin.ip")', '2026-01-28 22:54:37.608661', true, true, 'origin', '["origin.ip","log.appAccessContextClientAppId"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.ip.keyword","operator":"filter_term","value":"{{.origin.ip}}"},{"field":"action.keyword","operator":"filter_term","value":"Consent to application"}],"or":null,"within":"now-1h","count":5}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1564, 'OneDrive Mass File Access Detected', 3, 1, 2, 'Collection', 'T1530 - Data from Cloud Storage Object', 'Detects when a user accesses an abnormally high number of files in OneDrive within a short time period (200+ files in 30 minutes), which could indicate automated data collection, reconnaissance, or preparation for data exfiltration. This behavior is often associated with insider threats, compromised accounts, or malicious scripts designed to harvest sensitive data from cloud storage. + +Next Steps: +1. Immediately verify the legitimacy of the user account and check for signs of compromise +2. Review the specific files accessed to determine sensitivity and business impact +3. Check for concurrent suspicious activities like bulk downloads or external sharing +4. Examine authentication logs for unusual login patterns or locations +5. Verify if the access pattern aligns with the user''s normal work responsibilities +6. Consider temporarily restricting the user''s OneDrive access pending investigation +7. Review OneDrive sharing permissions and external access configurations +8. Check for any automated tools or scripts that might be accessing the files +9. Correlate with other security events from the same user or IP address +10. Document findings and escalate to incident response team if malicious activity is confirmed +', '["https://o365reports.com/2024/01/30/audit-file-access-in-sharepoint-online-using-powershell/","https://attack.mitre.org/techniques/T1530/"]', 'oneOf("action", ["FileAccessed", "FileAccessedExtended", "FilePreviewed"]) && !equals("origin.user", "") && equals("log.Workload", "OneDrive")', '2026-01-28 22:54:38.734195', true, true, 'origin', '["origin.user","origin.ip"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"FileAccessed"},{"field":"log.Workload.keyword","operator":"filter_term","value":"OneDrive"}],"or":[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"FileAccessedExtended"},{"field":"log.Workload.keyword","operator":"filter_term","value":"OneDrive"}],"or":null,"within":"now-30m","count":67},{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"FilePreviewed"},{"field":"log.Workload.keyword","operator":"filter_term","value":"OneDrive"}],"or":null,"within":"now-30m","count":67}],"within":"now-30m","count":67}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1565, 'Power Apps Data Connector Suspicious Activity', 3, 2, 1, 'Collection', 'T1530 - Data from Cloud Storage Object', 'Detects creation or modification of Power Apps data connectors that could lead to unauthorized data access or exfiltration from corporate data sources. This rule identifies suspicious Power Apps activities including app creation/modification, data connection changes, and data export/import operations that may indicate an attempt to access or exfiltrate sensitive organizational data. + +Next Steps: +1. Review the specific Power Apps activity and determine if it was authorized +2. Verify the user''s role and permissions for Power Apps and data connectors +3. Examine the data sources being connected to and assess their sensitivity +4. Check for any recent data exports or unusual data access patterns +5. Review the user''s recent activity for other suspicious behavior +6. Validate the legitimacy of any new apps or data connections created +7. Consider implementing additional monitoring for the affected user account +8. If unauthorized, immediately revoke access and investigate potential data compromise +', '["https://docs.microsoft.com/en-us/power-platform/admin/audit-data-user-activity","https://attack.mitre.org/techniques/T1530/"]', 'oneOf("action", ["CreateApp", "EditApp", "DeleteApp", "ShareApp", "UnshareApp", "CreateDataConnection", "UpdateDataConnection", "DeleteDataConnection", "ExportData", "ImportData"]) && +equals("actionResult", "Success") && +equals("log.Workload", "PowerApps") +', '2026-01-28 22:54:39.898236', true, true, 'origin', '["origin.user","origin.ip"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1566, 'Suspicious Power Automate Flow Activity', 3, 3, 2, 'Collection', 'Automated Collection', 'Detects creation or modification of Power Automate flows that could be used for automated data exfiltration or unauthorized process automation. This rule identifies suspicious Power Automate activity including flow creation, modification, sharing, and connection management that may indicate malicious automation attempts. + +Next Steps: +1. Investigate the user account performing the Power Automate actions - verify if this is legitimate business activity +2. Review the specific flows created or modified - analyze their triggers, actions, and data access patterns +3. Check if the flows are accessing sensitive data sources (SharePoint, OneDrive, databases, external APIs) +4. Examine flow sharing patterns - verify if flows are being shared with unauthorized users or external accounts +5. Review connection creation/deletion activities - check for connections to suspicious external services +6. Correlate with other Office 365 audit logs to identify broader attack patterns +7. If malicious, immediately disable the flows and revoke user permissions +8. Consider implementing Power Platform DLP policies to prevent future abuse +', '["https://docs.microsoft.com/en-us/power-platform/admin/audit-data-user-activity","https://attack.mitre.org/techniques/T1119/"]', 'oneOf("action", ["CreateFlow", "EditFlow", "DeleteFlow", "EnableFlow", "DisableFlow", "ShareFlow", "UnshareFlow", "CreateConnection", "DeleteConnection"]) && +equals("actionResult", "Success") && equals("log.Workload", "PowerAutomate") +', '2026-01-28 22:54:41.063492', true, true, 'origin', '["origin.user","action"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1567, 'Office 365 Safe Attachment Policy Violation', 3, 3, 2, 'Initial Access', 'T1566.001 - Phishing: Spearphishing Attachment', 'Detects violations of Safe Attachment policies including malicious files blocked during detonation, policy modifications that reduce protection, or attempts to bypass attachment scanning that could lead to malware delivery. This rule identifies when Safe Attachment policies are modified, disabled, or when malicious attachments are detected and blocked by Office 365 security controls. + +Next Steps: +1. Review the specific Safe Attachment policy change or violation that occurred +2. Verify if the policy modification was authorized and legitimate +3. Check if any malicious attachments were successfully blocked or if any bypassed controls +4. Investigate the user account that performed the action for signs of compromise +5. Review email logs for any related suspicious email activity +6. Validate current Safe Attachment policy configurations are properly restrictive +7. Consider implementing additional email security controls if gaps are identified +', '["https://learn.microsoft.com/en-us/defender-office-365/safe-attachments-about","https://attack.mitre.org/techniques/T1566/001/"]', '(contains("action", "SafeAttachment") || equals("action", "Set-SafeAttachmentPolicy") || equals("action", "Remove-SafeAttachmentPolicy") || equals("action", "Disable-SafeAttachmentRule") || contains("action", "MalwareDetected") || contains("action", "AttachmentBlocked") || contains("action", "DetonationBlock")) && equals("actionResult", "Succeeded")', '2026-01-28 22:54:42.191549', true, true, 'origin', '["origin.user","category"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1568, 'Safe Links Click Pattern Anomaly', 2, 3, 1, 'Execution', 'T1204.001 - User Execution: Malicious Link', 'Detects unusual patterns in Safe Links click behavior that may indicate phishing attempts or malicious URL access. This rule monitors for multiple clicks on suspicious URLs from the same user within a short timeframe, which could indicate either a persistent phishing campaign or user behavior that poses security risks. + +Next Steps: +1. Review the user''s activity timeline to identify the source of the malicious links +2. Check email security logs for the original messages containsing the blocked URLs +3. Verify if other users received similar phishing emails +4. Assess whether the user''s account has been compromised +5. Implement additional security awareness training for the affected user +6. Consider blocking the malicious domains at the network level +', '["https://learn.microsoft.com/en-us/defender-office-365/safe-links-about","https://attack.mitre.org/techniques/T1204/001/"]', 'equals("action", "ClickedSafeLink") && equals("actionResult", "Blocked") && !equals("origin.user", "")', '2026-01-28 22:54:43.272229', true, true, 'origin', '["origin.user"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"ClickedSafeLink"}],"or":null,"within":"now-30m","count":5}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1569, 'SharePoint Mass File Download Detected', 3, 1, 2, 'Collection', 'T1213 - Data from Information Repositories', 'Detects when a user downloads an unusually large number of files from SharePoint or OneDrive within a short time period, which could indicate data exfiltration or insider threat activity. This rule triggers when a user downloads 100 or more files within 30 minutes. + +**Next Steps:** +1. Review the user''s recent activity patterns and determine if the download volume is consistent with their role and responsibilities +2. Check if the user has legitimate business justification for downloading large amounts of data +3. Examine the types of files downloaded and their sensitivity levels +4. Verify the user''s location and device used for the downloads +5. Look for any concurrent suspicious activities such as unusual login times or access from new locations +6. Contact the user''s manager to validate the business need for the file downloads +7. Consider implementing additional monitoring or restrictions if the activity appears suspicious +8. Review data loss prevention (DLP) policies and ensure they are properly configured +', '["https://www.sharepointdiary.com/2020/10/how-to-track-document-downloads-using-audit-log-in-sharepoint-online.html","https://attack.mitre.org/techniques/T1213/"]', 'equals("action", "FileDownloaded") && !equals("origin.user", "") && oneOf("log.Workload", ["SharePoint", "OneDrive"])', '2026-01-28 22:54:44.403294', true, true, 'origin', '["origin.user","origin.ip"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"action.keyword","operator":"filter_term","value":"FileDownloaded"}],"or":null,"within":"now-30m","count":100}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1570, 'Suspicious Email Forwarding Rule Created', 3, 2, 1, 'Collection', 'T1114.003 - Email Collection: Email Forwarding Rule', 'Detects creation or modification of inbox rules that forward emails to external domains, which is a common technique used by attackers to exfiltrate emails and maintain persistence after compromising an account. Attackers often create these rules to automatically forward sensitive emails to external addresses under their control. + +Next Steps: +1. Verify if the user creating the rule is legitimate and authorized +2. Check the destination email address in the forwarding rule for external domains +3. Review recent authentication logs for the affected user account +4. Examine other administrative actions performed by this user recently +5. Check for any other suspicious inbox rules created by the same user +6. Validate if the forwarding address belongs to a legitimate business contact +7. Consider temporarily disabling the forwarding rule pending investigation +8. Review email logs to see what emails may have already been forwarded +', '["https://redcanary.com/blog/threat-detection/email-forwarding-rules/","https://attack.mitre.org/techniques/T1114/003/"]', 'oneOf("action", ["New-InboxRule", "Set-InboxRule"]) && !equals("origin.user", "") && (contains("log.Parameters", "ForwardTo") || contains("log.Parameters", "RedirectTo") || contains("log.Parameters", "ForwardAsAttachmentTo"))', '2026-01-28 22:54:45.534003', true, true, 'origin', '["origin.user"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1571, 'Suspicious Teams Message Export Activity', 3, 2, 1, 'Collection', 'T1114 - Email Collection', 'Detects when Teams messages are exported or accessed in bulk through API calls, which could indicate an attempt to exfiltrate chat history and shared files from Microsoft Teams. + +Next Steps: +1. Review the user''s recent Teams activity and authentication events +2. Check if the export activity was authorized or part of legitimate business operations +3. Examine the volume and scope of data accessed or exported +4. Verify the user''s current permissions and access levels +5. Look for any concurrent suspicious activities from the same user account +6. Contact the user to confirm if they initiated these export operations +7. Review any third-party applications that may have access to Teams data +', '["https://learn.microsoft.com/en-us/purview/audit-teams-audit-log-events","https://attack.mitre.org/techniques/T1114/"]', 'oneOf("action", ["MessagesListed", "MessagesExported", "RecordingExported", "TranscriptsExported"]) && !equals("origin.user", "") && equals("log.Workload", "MicrosoftTeams")', '2026-01-28 22:54:46.573120', true, true, 'origin', '["origin.user","log.appAccessContextClientAppId"]', '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{.origin.user}}"},{"field":"log.Workload.keyword","operator":"filter_term","value":"MicrosoftTeams"}],"or":null,"within":"now-1h","count":20}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1572, 'Threat Intelligence Alert Pattern', 3, 3, 2, 'Initial Access', 'Phishing', 'Detects threat intelligence alerts from Office 365 security services indicating known malicious activities, threat actor patterns, or indicators of compromise matching threat intelligence feeds. These alerts are generated when Microsoft''s threat intelligence systems identify suspicious activities, malicious file attachments, URLs, or communication patterns associated with known threat actors. + +Next Steps: +1. Immediately review the alert details and associated threat intelligence indicators +2. Investigate the affected user account for any compromise indicators +3. Check email activity and file access patterns for the affected user +4. Verify if any malicious files were opened or downloaded +5. Review authentication logs for unusual sign-in patterns +6. Examine network connections and data transfer activities +7. Check for lateral movement attempts within the organization +8. Implement containsment measures if compromise is confirmed +9. Update security policies and user training based on attack vectors +10. Report findings to incident response team and document lessons learned +', '["https://learn.microsoft.com/en-us/microsoft-365/security/defender/threat-analytics","https://attack.mitre.org/techniques/T1566/"]', 'equals("action", "ThreatIntelligenceAlertTriggered") || (equals("log.AlertType", "ThreatIntelligence") && !equals("actionResult", "Success"))', '2026-01-28 22:54:47.664261', true, true, 'origin', '["origin.user","target.user"]', '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1573, 'Microsoft 365 New Inbox Rule Created', 3, 2, 1, 'Email Collection', 'Collection', 'Credential Access consists of techniques for stealing credentials like account names and passwords. Techniques used to get credentials include keylogging or credential dumping. Using legitimate credentials can give adversaries access to systems, make them harder to detect, and provide the opportunity to create more accounts to help achieve their goals.
Identifies when a new Inbox rule is created in Microsoft 365. Inbox rules process messages in the Inbox based on conditions and take actions, such as moving a message to a specified folder or deleting a message. Adequate permissions are required on the mailbox to create an Inbox rule.', '["https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/responding-to-a-compromised-email-account?view=o365-worldwide","https://docs.microsoft.com/en-us/powershell/module/exchange/new-inboxrule?view=exchange-ps","https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/detect-and-remediate-outlook-rules-forms-attack?view=o365-worldwide","https://attack.mitre.org/techniques/T1114/","https://attack.mitre.org/techniques/T1114/003/","https://attack.mitre.org/tactics/TA0009/"]', 'equals("log.workLoad", "Exchange") && equals("action", "New-InboxRule") && oneOf("actionResult", ["Success","Succeeded","PartiallySucceeded","True"])', '2026-01-28 22:56:09.186372', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1574, 'Attempts to Brute Force a Microsoft 365 User Account', 3, 3, 3, 'Brute Force', 'Credential Access', 'Credential Access consists of techniques for stealing credentials like account names and passwords. Techniques used to get credentials include keylogging or credential dumping. Using legitimate credentials can give adversaries access to systems, make them harder to detect, and provide the opportunity to create more accounts to help achieve their goals.
Identifies attempts to brute force a Microsoft 365 user account. An adversary may attempt a brute force attack to obtain unauthorized access to user accounts.', '["https://attack.mitre.org/techniques/T1110/","https://attack.mitre.org/tactics/TA0006/"]', 'oneOf("log.workLoad", ["Exchange", "AzureActiveDirectory"]) && oneOf("action", ["UserLoginFailed", "PasswordLogonInitialAuthUsingPassword"]) && oneOf("actionResult", ["Failed", "False"]) && exists("origin.user")', '2026-01-28 22:56:10.186443', true, true, 'origin', null, '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.user.keyword","operator":"filter_term","value":"{{origin.user}}"}],"or":null,"within":"now-60s","count":5}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1575, 'Potential Password Spraying of Microsoft 365 User Accounts', 3, 2, 2, 'Brute Force', 'Credential Access', 'Credential Access consists of techniques for stealing credentials like account names and passwords. Techniques used to get credentials include keylogging or credential dumping. Using legitimate credentials can give adversaries access to systems, make them harder to detect, and provide the opportunity to create more accounts to help achieve their goals.
Identifies a high number (25) of failed Microsoft 365 user authentication attempts from a single IP address within 30 minutes, which could be indicative of a password spraying attack. An adversary may attempt a password spraying attack to obtain unauthorized access to user accounts.', '["https://attack.mitre.org/techniques/T1110/","https://attack.mitre.org/tactics/TA0006/"]', 'oneOf("log.workLoad", ["Exchange", "AzureActiveDirectory"]) && oneOf("action", ["UserLoginFailed", "PasswordLogonInitialAuthUsingPassword"]) && oneOf("actionResult", ["Failed", "False"]) && exists("origin.ip")', '2026-01-28 22:56:11.274900', true, true, 'origin', null, '[{"indexPattern":"v11-log-o365-*","with":[{"field":"origin.ip.keyword","operator":"filter_term","value":"{{origin.ip}}"}],"or":null,"within":"now-60s","count":5}]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1576, 'O365 Excessive Single Sign-On Logon Errors', 2, 3, 2, 'Brute Force', 'Credential Access', 'Credential Access consists of techniques for stealing credentials like account names and passwords. Techniques used to get credentials include keylogging or credential dumping. Using legitimate credentials can give adversaries access to systems, make them harder to detect, and provide the opportunity to create more accounts to help achieve their goals.
Identifies accounts with a high number of single sign-on (SSO) logon errors. Excessive logon errors may indicate an attempt to brute force a password or SSO token.', '["https://attack.mitre.org/techniques/T1110/","https://attack.mitre.org/tactics/TA0006/"]', 'equals("log.workLoad", "AzureActiveDirectory") && equals("log.LogonError", "SsoArtifactInvalidOrExpired")', '2026-01-28 22:56:12.281662', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1577, 'Microsoft 365 Exchange DLP Policy Removed', 3, 3, 2, 'Impair Defenses', 'Defense Evasion', 'Defense Evasion consists of techniques that adversaries use to avoid detection throughout their compromise. Techniques used for defense evasion include uninstalling/disabling security software or obfuscating/encrypting data and scripts. Adversaries also leverage and abuse trusted processes to hide and masquerade their malware. Other tactics’ techniques are cross-listed here when those techniques include the added benefit of subverting defenses.
Identifies when a Data Loss Prevention (DLP) policy is removed in Microsoft 365. An adversary may remove a DLP policy to evade existing DLP monitoring.', '["https://docs.microsoft.com/en-us/microsoft-365/compliance/data-loss-prevention-policies?view=o365-worldwide","https://attack.mitre.org/techniques/T1562/","https://attack.mitre.org/tactics/TA0005/"]', 'equals("log.workLoad", "Exchange") && equals("action", "Remove-DlpPolicy") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:13.342286', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1578, 'Microsoft 365 Exchange Malware Filter Policy Deletion', 3, 3, 2, 'Impair Defenses', 'Defense Evasion', 'Defense Evasion consists of techniques that adversaries use to avoid detection throughout their compromise. Techniques used for defense evasion include uninstalling/disabling security software or obfuscating/encrypting data and scripts. Adversaries also leverage and abuse trusted processes to hide and masquerade their malware. Other tactics’ techniques are cross-listed here when those techniques include the added benefit of subverting defenses.
Identifies when a malware filter policy has been deleted in Microsoft 365. A malware filter policy is used to alert administrators that an internal user sent a message that containsed malware. This may indicate an account or machine compromise that would need to be investigated. Deletion of a malware filter policy may be done to evade detection.', '["https://attack.mitre.org/techniques/T1562/","https://attack.mitre.org/tactics/TA0005/"]', 'equals("log.workLoad", "Exchange") && equals("action", "Remove-MalwareFilterPolicy") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:14.461010', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1579, 'Microsoft 365 Exchange Safe Attachment Rule Disabled', 3, 3, 2, 'Impair Defenses', 'Defense Evasion', 'Defense Evasion consists of techniques that adversaries use to avoid detection throughout their compromise. Techniques used for defense evasion include uninstalling/disabling security software or obfuscating/encrypting data and scripts. Adversaries also leverage and abuse trusted processes to hide and masquerade their malware. Other tactics’ techniques are cross-listed here when those techniques include the added benefit of subverting defenses.
Identifies when a safe attachment rule is disabled in Microsoft 365. Safe attachment rules can extend malware protections to include routing all messages and attachments without a known malware signature to a special hypervisor environment. An adversary or insider threat may disable a safe attachment rule to exfiltrate data or evade defenses.', '["https://attack.mitre.org/techniques/T1562/","https://attack.mitre.org/tactics/TA0005/"]', 'equals("log.workLoad", "Exchange") && equals("action", "Disable-SafeAttachmentRule") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:15.632685', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1580, 'Microsoft 365 Detection of a connection to a .onion domain', 3, 2, 2, 'Software', 'Command and Control', 'Tor is a software suite and network that provides increased anonymity on the Internet. It creates a multi-hop proxy network and utilizes multilayer encryption to protect both the message and routing information. Tor utilizes Onion Routing, in which messages are encrypted with multiple layers of encryption; at each step in the proxy network, the topmost layer is decrypted and the contents forwarded on to the next node until it reaches its destination.', '["https://attack.mitre.org/software/S0183/","https://attack.mitre.org/software/"]', 'contains("log.siteUrl", ".onion")', '2026-01-28 22:56:16.800635', true, true, 'target', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1581, 'Microsoft 365 Exchange Malware Filter Rule Modification', 3, 3, 2, 'Impair Defenses', 'Defense Evasion', 'Defense Evasion consists of techniques that adversaries use to avoid detection throughout their compromise. Techniques used for defense evasion include uninstalling/disabling security software or obfuscating/encrypting data and scripts. Adversaries also leverage and abuse trusted processes to hide and masquerade their malware. Other tactics’ techniques are cross-listed here when those techniques include the added benefit of subverting defenses.
Identifies when a malware filter rule has been deleted or disabled in Microsoft 365. An adversary or insider threat may want to modify a malware filter rule to evade detection.', '["https://attack.mitre.org/techniques/T1562/","https://attack.mitre.org/tactics/TA0005/"]', 'equals("log.workLoad", "Exchange") && oneOf("action", ["Remove-MalwareFilterRule","Disable-MalwareFilterRule"]) && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:17.977905', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1582, 'Microsoft 365 Exchange Transport Rule Creation', 3, 2, 1, 'Transfer Data to Cloud Account', 'Exfiltration', 'Exfiltration consists of techniques that adversaries may use to steal data from your network. Once they’ve collected data, adversaries often package it to avoid detection while removing it. This can include compression and encryption. Techniques for getting data out of a target network typically include transferring it over their command and control channel or an alternate channel and may also include putting size limits on the transmission.
Identifies a transport rule creation in Microsoft 365. Exchange Online mail transport rules should be set to not forward email to domains outside of your organization as a best practice. An adversary may create transport rules to exfiltrate data.', '["https://docs.microsoft.com/en-us/exchange/security-and-compliance/mail-flow-rules/mail-flow-rules","https://attack.mitre.org/techniques/T1537/","https://attack.mitre.org/tactics/TA0010/"]', 'equals("log.workLoad", "Exchange") && equals("action", "New-TransportRule") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:19.145972', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1583, 'Microsoft 365 Exchange Transport Rule Modification', 3, 2, 1, 'Transfer Data to Cloud Account', 'Exfiltration', 'Exfiltration consists of techniques that adversaries may use to steal data from your network. Once they’ve collected data, adversaries often package it to avoid detection while removing it. This can include compression and encryption. Techniques for getting data out of a target network typically include transferring it over their command and control channel or an alternate channel and may also include putting size limits on the transmission.
Identifies when a transport rule has been disabled or deleted in Microsoft 365. Mail flow rules (also known as transport rules) are used to identify and take action on messages that flow through your organization. An adversary or insider threat may modify a transport rule to exfiltrate data or evade defenses.', '["https://docs.microsoft.com/en-us/exchange/security-and-compliance/mail-flow-rules/mail-flow-rules","https://attack.mitre.org/techniques/T1537/","https://attack.mitre.org/tactics/TA0010/"]', 'equals("log.Workload", "Exchange") && oneOf("action", ["Remove-TransportRule","Disable-TransportRule"]) && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:20.319715', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1584, 'Microsoft 365 Exchange Anti-Phish Policy Deletion', 2, 3, 2, 'Phishing', 'Initial Access', 'Adversaries may send phishing messages to gain access to victim systems. All forms of phishing are electronically delivered social engineering. Phishing can be targeted, known as spearphishing. In spearphishing, a specific individual, company, or industry will be targeted by the adversary. More generally, adversaries can conduct non-targeted phishing, such as in mass malware spam campaigns.
Adversaries may send victims emails containsing malicious attachments or links, typically to execute malicious code on victim systems. Phishing may also be conducted via third-party services, like social media platforms. Phishing may also involve social engineering techniques, such as posing as a trusted source.
Identifies the deletion of an anti-phishing policy in Microsoft 365. By default, Microsoft 365 includes built-in features that help protect users from phishing attacks. Anti-phishing polices increase this protection by refining settings to better detect and prevent attacks.', '["https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/set-up-anti-phishing-policies?view=o365-worldwide","https://attack.mitre.org/techniques/T1566/","https://attack.mitre.org/tactics/TA0001/"]', 'equals("log.Workload", "Exchange") && equals("action", "Remove-AntiPhishPolicy") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:21.363846', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1585, 'Microsoft 365 Exchange Anti-Phish Rule Modification', 2, 3, 2, 'Phishing', 'Initial Access', 'Adversaries may send phishing messages to gain access to victim systems. All forms of phishing are electronically delivered social engineering. Phishing can be targeted, known as spearphishing. In spearphishing, a specific individual, company, or industry will be targeted by the adversary. More generally, adversaries can conduct non-targeted phishing, such as in mass malware spam campaigns.
Adversaries may send victims emails containsing malicious attachments or links, typically to execute malicious code on victim systems. Phishing may also be conducted via third-party services, like social media platforms. Phishing may also involve social engineering techniques, such as posing as a trusted source.
Identifies the modification of an anti-phishing rule in Microsoft 365. By default, Microsoft 365 includes built-in features that help protect users from phishing attacks. Anti-phishing rules increase this protection by refining settings to better detect and prevent attacks.', '["https://attack.mitre.org/techniques/T1566/","https://attack.mitre.org/tactics/TA0001/"]', 'equals("log.Workload", "Exchange") && oneOf("action", ["Remove-AntiPhishRule","Disable-AntiPhishRule"]) && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:22.461878', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1586, 'Microsoft 365 Exchange Safe Link Policy Disabled', 3, 2, 2, 'Phishing', 'Initial Access', 'When it comes to protecting its users, Microsoft takes the threat of phishing seriously. Spoofing is a common technique that''s used by attackers. Spoofed messages appear to originate from someone or somewhere other than the actual source. This technique is often used in phishing campaigns that are designed to obtain user credentials. The anti-spoofing technology in EOP specifically examines forgery of the From header in the message body (used to display the message sender in email clients). When EOP has high confidence that the From header is forged, the message is identified as spoofed.
Identifies when a Safe Link policy is disabled in Microsoft 365. Safe Link policies for Office applications extend phishing protection to documents that contains hyperlinks, even after they have been delivered to a user.', '["https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/atp-safe-links?view=o365-worldwide","https://attack.mitre.org/techniques/T1566/"]', 'equals("log.Workload", "Exchange") && equals("action", "Disable-SafeLinksRule") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:23.594850', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1587, 'Microsoft 365 Teams Custom Application Interaction Allowed', 2, 2, 2, 'Teams Configuration Management', 'Maintain access', 'Identifies when custom applications are allowed in Microsoft Teams. If an organization requires applications other than those available in the Teams app store, custom applications can be developed as packages and uploaded. An adversary may abuse this behavior to establish persistence in an environment.', '["https://docs.microsoft.com/en-us/microsoftteams/platform/concepts/deploy-and-publish/apps-upload"]', 'equals("log.Workload", "Exchange") && equals("action", "TeamsTenantSettingChanged") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:24.760455', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1588, 'Microsoft 365 Exchange DKIM Signing Configuration Disabled', 3, 3, 1, 'Phishing', 'Spoofing', 'When it comes to protecting its users, Microsoft takes the threat of phishing seriously. Spoofing is a common technique that''s used by attackers. Spoofed messages appear to originate from someone or somewhere other than the actual source. This technique is often used in phishing campaigns that are designed to obtain user credentials. The anti-spoofing technology in EOP specifically examines forgery of the From header in the message body (used to display the message sender in email clients). When EOP has high confidence that the From header is forged, the message is identified as spoofed.
Identifies when a DomainKeys Identified Mail (DKIM) signing configuration is disabled in Microsoft 365. With DKIM in Microsoft 365, messages that are sent from Exchange Online will be cryptographically signed. This will allow the receiving email system to validate that the messages were generated by a server that the organization authorized and not being spoofed.', '["https://docs.microsoft.com/en-us/microsoft-365/security/office-365-security/anti-spoofing-protection?view=o365-worldwide"]', 'equals("log.Workload", "Exchange") && equals("action", "Set-DkimSigningConfig") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:25.849264', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1589, 'Microsoft 365 Exchange Management Group Role Assignment', 2, 3, 2, 'Account Manipulation', 'Persistence', 'Adversaries may manipulate accounts to maintain access to victim systems. Account manipulation may consist of any action that preserves adversary access to a compromised account, such as modifying credentials or permission groups. These actions could also include account activity designed to subvert security policies, such as performing iterative password updates to bypass password duration policies and preserve the life of compromised credentials. In order to create or manipulate accounts, the adversary must already have sufficient permissions on systems or the domain.
Identifies when a new role is assigned to a management group in Microsoft 365. An adversary may attempt to add a role in order to maintain persistence in an environment.', '["https://docs.microsoft.com/en-us/microsoft-365/admin/add-users/about-admin-roles?view=o365-worldwide","https://attack.mitre.org/tactics/TA0003/","https://attack.mitre.org/techniques/T1098"]', 'equals("log.Workload", "Exchange") && equals("action", "New-ManagementRoleAssignment") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:27.027346', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1590, 'Microsoft 365 Teams External Access Enabled', 3, 2, 2, 'Account Manipulation', 'Persistence', 'Adversaries may manipulate accounts to maintain access to victim systems. Account manipulation may consist of any action that preserves adversary access to a compromised account, such as modifying credentials or permission groups. These actions could also include account activity designed to subvert security policies, such as performing iterative password updates to bypass password duration policies and preserve the life of compromised credentials. In order to create or manipulate accounts, the adversary must already have sufficient permissions on systems or the domain.
Identifies when external access is enabled in Microsoft Teams. External access lets Teams and Skype for Business users communicate with other users that are outside their organization. An adversary may enable external access or add an allowed domain to exfiltrate data or maintain persistence in an environment.', '["https://docs.microsoft.com/en-us/microsoftteams/manage-external-access","https://attack.mitre.org/tactics/TA0003/","https://attack.mitre.org/techniques/T1098"]', 'oneOf("log.Workload", ["SkypeForBusiness","MicrosoftTeams"]) && equals("action", "Set-CsTenantFederationConfiguration") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:28.190516', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1591, 'Microsoft 365 Teams Guest Access Enabled', 3, 2, 2, 'Account Manipulation', 'Persistence', 'Adversaries may manipulate accounts to maintain access to victim systems. Account manipulation may consist of any action that preserves adversary access to a compromised account, such as modifying credentials or permission groups. These actions could also include account activity designed to subvert security policies, such as performing iterative password updates to bypass password duration policies and preserve the life of compromised credentials. In order to create or manipulate accounts, the adversary must already have sufficient permissions on systems or the domain.
Identifies when guest access is enabled in Microsoft Teams. Guest access in Teams allows people outside the organization to access teams and channels. An adversary may enable guest access to maintain persistence in an environment.
The Microsoft 365 Fleet integration, Filebeat module, or similarly structured data is required to be compatible with this rule.', '["https://attack.mitre.org/tactics/TA0003/","https://attack.mitre.org/techniques/T1098"]', 'oneOf("log.Workload", ["SkypeForBusiness","MicrosoftTeams"]) && equals("action", "New-ManagementRoleAssignment") && oneOf("actionResult", ["Success","PartiallySucceeded","True"])', '2026-01-28 22:56:29.355607', true, true, 'origin', null, '[]', null); +insert into public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) values (1592, 'Microsoft 365 Possible Successful Password Guessing detected', 3, 2, 2, 'Credential Access', 'Brute Force: Password Guessing', 'Adversaries with no prior knowledge of legitimate credentials within the system or environment may guess passwords to attempt access to accounts. Without knowledge of the password for an account, an adversary may opt to systematically guess the password using a repetitive or iterative mechanism. An adversary may guess login credentials without prior knowledge of system or environment passwords during an operation by using a list of common passwords. Password guessing may or may not take into account the target''s policies on password complexity or use policies that may lock accounts out after a number of failed attempts.', '["https://attack.mitre.org/tactics/TA0006","https://attack.mitre.org/techniques/T1110/001/"]', 'oneOf("log.Workload", ["Exchange","AzureActiveDirectory"]) && equals("log.Operation", "UserLoginFailed") && oneOf("log.ResultStatus", ["Failed","False"]) && exists("log.UserId") && exists("log.ClientIp")', '2026-01-28 22:56:30.452230', true, true, 'origin', null, '[{"indexPattern":"v11-log-o365-*","with":[{"field":"log.Workload.keyword","operator":"filter_term","value":"Exchange, AzureActiveDirectory"},{"field":"log.Operation.keyword","operator":"filter_term","value":"UserLoginFailed"},{"field":"log.ResultStatus.keyword","operator":"filter_term","value":"Failed,False"},{"field":"log.UserId.keyword","operator":"filter_term","value":"{{.log.UserId}}"},{"field":"log.ClientIp.keyword","operator":"filter_term","value":"{{.log.ClientIp}}"}],"or":null,"within":"now-1m","count":10}]', null); + + +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1546, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1547, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1548, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1549, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1550, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1551, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1552, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1553, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1554, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1555, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1556, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1557, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1558, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1559, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1560, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1561, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1562, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1563, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1564, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1565, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1566, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1567, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1568, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1569, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1570, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1571, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1572, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1573, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1574, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1575, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1576, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1577, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1578, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1579, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1580, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1581, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1582, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1583, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1584, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1585, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1586, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1587, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1588, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1589, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1590, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1591, 4, null); +insert into public.utm_group_rules_data_type (rule_id, data_type_id, last_update) values (1592, 4, null); diff --git a/backend/src/main/resources/config/liquibase/master.xml b/backend/src/main/resources/config/liquibase/master.xml index 0dcd8b3ee..71b5ddba4 100644 --- a/backend/src/main/resources/config/liquibase/master.xml +++ b/backend/src/main/resources/config/liquibase/master.xml @@ -309,6 +309,23 @@ + + + + + + + + + + + + + + + + + diff --git a/filters/README.md b/filters/README.md index 8484c0ea4..ef2da9a6b 100644 --- a/filters/README.md +++ b/filters/README.md @@ -1 +1,3 @@ # UTMStack Filters + +Documentation on how to create and maintain custom filters can be found in: https://github.com/utmstack/UTMStack/wiki diff --git a/filters/pfsense/pfsense_fw.yml b/filters/pfsense/pfsense_fw.yml index 00e8ae088..338f642d3 100644 --- a/filters/pfsense/pfsense_fw.yml +++ b/filters/pfsense/pfsense_fw.yml @@ -21,7 +21,7 @@ pipeline: - grok: patterns: - fieldName: log.priority - pattern: '\<{{.data}}\>' + pattern: '\<{{.integer}}\>' - fieldName: log.syslogVersion pattern: '{{.integer}}' - fieldName: log.deviceTime @@ -31,6 +31,21 @@ pipeline: - fieldName: log.msgAll pattern: '{{.greedy}}' source: raw + where: regexMatch("raw", "\\d{4}-\\d{2}-\\d{2}") + + # Parsing syslog format date (OPNsense/pfSense) + - grok: + patterns: + - fieldName: log.priority + pattern: '\<{{.integer}}\>' + - fieldName: log.deviceTime + pattern: '{{.monthName}}{{.space}}{{.monthDay}}{{.space}}{{.time}}{{.space}}' + - fieldName: log.syslogHost + pattern: '{{.hostname}}{{.space}}' + - fieldName: log.msgAll + pattern: '{{.greedy}}' + source: raw + where: regexMatch("raw", "<\\d+>[A-Z][a-z]{2}\\s+\\d{1,2}\\s+\\d{2}") #......................................................................# # Removing unnecessary characters of the syslogHeader @@ -116,7 +131,7 @@ pipeline: - log.tcpWindow - log.urg - log.tcpOptions - where: log.csvMsg.matches("(.+),(\\s)?(match|\\w+),(block|pass),(in|out),(4|6),(.+)(tcp|TCP|Tcp)") + where: regexMatch("log.csvMsg", "(.+),(\\s)?(match|\\w+),(block|pass),(in|out),(4|6),(.+)(tcp|TCP|Tcp)") # .......................................................................# - csv: @@ -146,13 +161,13 @@ pipeline: - log.srcPort - log.dstPort - log.dataLength - where: log.csvMsg.matches("(.+),(\\s)?(match|\\w+),(block|pass),(in|out),(4|6),(.+)(udp|UDP|Udp)") + where: regexMatch("log.csvMsg", "(.+),(\\s)?(match|\\w+),(block|pass),(in|out),(4|6),(.+)(udp|UDP|Udp)") #......................................................................# - csv: source: log.csvMsg separator: "," - columns: + headers: - log.ruleNumber - log.subRuleNumber - log.anchor @@ -179,13 +194,13 @@ pipeline: - log.icmpData3 - log.icmpData4 - log.icmpData5 - where: log.csvMsg.matches("(.+),(\\s)?(match|\\w+),(block|pass),(in|out),(4|6),(.+)(icmp|ICMP|Icmp)") + where: regexMatch("log.csvMsg", "(.+),(\\s)?(match|\\w+),(block|pass),(in|out),(4|6),(.+)(icmp|ICMP|Icmp)") #......................................................................# - csv: source: log.csvMsg separator: "," - columns: + headers: - log.ruleNumber - log.subRuleNumber - log.anchor @@ -212,11 +227,11 @@ pipeline: - log.tcpWindow - log.urg - log.tcpOptions - where: log.csvMsg.matches("(.+),(\\s)?(match|\\w+),(block|pass),(in|out),(6|17),(.+)(tcp|TCP|Tcp)") + where: regexMatch("log.csvMsg", "(.+),(\\s)?(match|\\w+),(block|pass),(in|out),(6|17),(.+)(tcp|TCP|Tcp)") #......................................................................# - csv: - source: csvMsg + source: log.csvMsg separator: "," headers: - log.ruleNumber @@ -239,7 +254,7 @@ pipeline: - log.srcPort - log.dstPort - log.dataLength - where: log.csvMsg.matches("(.+),(match|\\w+),(block|pass),(in|out),6,(.+)(udp|UDP|Udp)") + where: regexMatch("log.csvMsg", "(.+),(match|\\w+),(block|pass),(in|out),6,(.+)(udp|UDP|Udp)") #......................................................................# - csv: @@ -269,7 +284,7 @@ pipeline: - log.icmpData3 - log.icmpData4 - log.icmpData5 - where: log.csvMsg.matches("(.+),(match|\\w+),(block|pass),(in|out),(6|17),(.+)(icmp|ICMP|Icmp)") + where: regexMatch("log.csvMsg", "(.+),(match|\\w+),(block|pass),(in|out),(6|17),(.+)(icmp|ICMP|Icmp)") # ................................................# # Rename fields @@ -291,7 +306,7 @@ pipeline: - rename: from: - - log.destIp + - log.dstIp to: target.ip - rename: @@ -301,12 +316,7 @@ pipeline: - rename: from: - - log.destPort - to: target.port - - - rename: - from: - - log.destPort + - log.dstPort to: target.port # ................................................# diff --git a/filters/sophos/sophos_xg_firewall.yml b/filters/sophos/sophos_xg_firewall.yml index 7d4f0f9c7..980737252 100644 --- a/filters/sophos/sophos_xg_firewall.yml +++ b/filters/sophos/sophos_xg_firewall.yml @@ -1,4 +1,4 @@ -# Sophos_XG filter, version 3.0.2 +# Sophos_XG filter, version 3.0.4 # Supports SF 20.0 version log types # See manual: https://docs.sophos.com/nsg/sophos-firewall/20.0/pdf/sf-syslog-guide-20.0.pdf # and documentation https://docs.sophos.com/nsg/sophos-firewall/20.0/Help/en-us/webhelp/onlinehelp/AdministratorHelp/Logs/TroubleshootingLogs/LogFileDetails/index.html#https-ftp-waf @@ -11,7 +11,7 @@ pipeline: - grok: patterns: - fieldName: log.syslogPriority - pattern: '\<{{.data}}\>' + pattern: '\<{{.integer}}\>' - fieldName: log.syslogVersion pattern: '{{.integer}}' - fieldName: log.syslogDeviceTime @@ -29,7 +29,7 @@ pipeline: - grok: patterns: - fieldName: log.syslogPriority - pattern: '\<{{.data}}\>' + pattern: '\<{{.integer}}\>' - fieldName: log.restData pattern: '{{.greedy}}' source: raw diff --git a/frontend/src/app/app-module/app-module-view/app-module-view.component.ts b/frontend/src/app/app-module/app-module-view/app-module-view.component.ts index 526e15a15..1cbd53a10 100644 --- a/frontend/src/app/app-module/app-module-view/app-module-view.component.ts +++ b/frontend/src/app/app-module/app-module-view/app-module-view.component.ts @@ -92,7 +92,6 @@ export class AppModuleViewComponent implements OnInit, OnDestroy { return res.body ? res.body.sort((a, b) => a > b ? 1 : -1) : []; }), catchError(error => { - console.log(error); this.utmToastService.showError('Failed to fetch categories', 'An error occurred while fetching module data.'); return of([]); @@ -116,14 +115,12 @@ export class AppModuleViewComponent implements OnInit, OnDestroy { } filterByCategory($event: any) { - console.log('filter'); this.req['moduleCategory.equals'] = $event; this.refreshModules(); } onSearch($event: string) { - console.log('search'); this.req.page = 0; this.req['prettyName.contains'] = $event; this.refreshModules(); diff --git a/frontend/src/app/app-module/guides/guide-syslog/guide-syslog.component.ts b/frontend/src/app/app-module/guides/guide-syslog/guide-syslog.component.ts index 42d382e3c..0f855f2d7 100644 --- a/frontend/src/app/app-module/guides/guide-syslog/guide-syslog.component.ts +++ b/frontend/src/app/app-module/guides/guide-syslog/guide-syslog.component.ts @@ -1,8 +1,8 @@ import {Component, Input, OnInit} from '@angular/core'; import {UtmModulesEnum} from '../../shared/enum/utm-module.enum'; +import {PLATFORMS} from "../shared/constant"; import {Step} from '../shared/step'; import {SYSLOGSTEPS} from './syslog.steps'; -import {PLATFORMS} from "../shared/constant"; @Component({ selector: 'app-guide-syslog', @@ -45,7 +45,6 @@ export class GuideSyslogComponent implements OnInit { {module: UtmModulesEnum.DECEPTIVE_BYTES, port: '7010 TCP'}, {module: UtmModulesEnum.DECEPTIVE_BYTES, port: '7010 UDP'}, - {module: UtmModulesEnum.SOPHOS_XG, port: '7008 TCP'}, {module: UtmModulesEnum.SOPHOS_XG, port: '7008 UDP'}, {module: UtmModulesEnum.SYSLOG, port: '7014 TCP'}, diff --git a/frontend/src/app/app-module/services/module.service.ts b/frontend/src/app/app-module/services/module.service.ts index 6b960b13e..e6e8da2c3 100644 --- a/frontend/src/app/app-module/services/module.service.ts +++ b/frontend/src/app/app-module/services/module.service.ts @@ -40,7 +40,6 @@ export class ModuleService { }), shareReplay(1), catchError(error => { - console.error(error); this.utmToastService.showError( 'Failed to fetch servers', 'An error occurred while fetching server list.' diff --git a/frontend/src/app/graphic-builder/chart-builder/chart-builder.component.html b/frontend/src/app/graphic-builder/chart-builder/chart-builder.component.html index 7181df14a..f3338af6f 100644 --- a/frontend/src/app/graphic-builder/chart-builder/chart-builder.component.html +++ b/frontend/src/app/graphic-builder/chart-builder/chart-builder.component.html @@ -1,30 +1,30 @@
-
-
- Visualization builder +
+
+ {{mode==='edit' ? visualization && visualization.name : 'Visualization builder'}}
-
- {{visualization.name}} -
-
- - -
+ + + +
-
+
@@ -37,7 +37,7 @@
- +
-
+ + +
+
+
+ {{ errorMessage }} +
+
+
- - + + + + @@ -184,7 +203,7 @@
-
+
{ this.chart = params[VisualizationQueryParamsEnum.CHART]; this.mode = params[VisualizationQueryParamsEnum.MODE]; - this.pattern = params[VisualizationQueryParamsEnum.PATTERN_NAME]; - this.patternId = Number(params[VisualizationQueryParamsEnum.PATTERN_ID]); if (params[VisualizationQueryParamsEnum.CALLBACK]) { this.callback = params[VisualizationQueryParamsEnum.CALLBACK]; } @@ -89,12 +105,22 @@ export class ChartBuilderComponent implements OnInit, AfterViewChecked { ngOnInit() { this.tempId = Math.floor(Math.random() * (1000000 - 20000 + 1) + 20000); - this.getFields(); if (this.mode === 'edit') { this.visualizationService.find(this.visualizationId).subscribe(vis => { this.visualization = vis.body; + if (this.visualization.sqlQuery) { + this.visualization.queryLanguage = ChartBuilderQueryLanguageEnum.SQL; + this.sqlQuery = this.visualization.sqlQuery; + this.isSqlMode = true; + } else { + this.visualization.queryLanguage = ChartBuilderQueryLanguageEnum.DSL; + this.pattern = this.visualization.pattern.pattern; + this.patternId = this.visualization.pattern.id; + this.isSqlMode = false; + } const defaultFilterTime = this.getDefaultFilterTimeFromVisualization(this.visualization.filterType); this.defaultTime = defaultFilterTime ? defaultFilterTime : new ElasticFilterDefaultTime('now-24h', 'now'); + this.loading = false; }); } else { this.visualization = { @@ -107,17 +133,18 @@ export class ChartBuilderComponent implements OnInit, AfterViewChecked { }, chartAction: new ChartActionType(false), filterType: [{field: '@timestamp', operator: ElasticOperatorsEnum.IS_BETWEEN, value: ['now-24h', 'now']}], - idPattern: this.patternId, + idPattern: null, chartType: this.chart, eventType: this.type, userCreated: null, name: '', - pattern: { - id: this.patternId, - pattern: this.pattern - }, + pattern: null, + queryLanguage: ChartBuilderQueryLanguageEnum.DSL }; + this.loading = false; } + + this.getIndexPatterns(); } getFields() { @@ -137,20 +164,39 @@ export class ChartBuilderComponent implements OnInit, AfterViewChecked { runVisualization() { this.running = true; + if (this.isSqlMode) { + this.errorMessage = this.sqlValidationService.validateSqlQuery(this.sqlQuery); + if (this.errorMessage) { + this.running = false; + return; + } + this.visualization.sqlQuery = this.sqlQuery; + this.visualization.queryLanguage = ChartBuilderQueryLanguageEnum.SQL; + } else { + this.visualization.queryLanguage = ChartBuilderQueryLanguageEnum.DSL; + } this.runVisualizationBehavior.$run.next(this.tempId); } saveVisualization() { + if (this.isSqlMode && this.sqlQuery === '') { + this.errorMessage = 'SQL Query cannot be empty'; + return; + } const modal = this.modalService.open(VisualizationSaveComponent, {centered: true}); + if (this.isSqlMode) { + this.nullifyUnusedFields(); + this.visualization.queryLanguage = ChartBuilderQueryLanguageEnum.SQL; + this.visualization.sqlQuery = this.sqlQuery; + } else { + this.visualization.queryLanguage = ChartBuilderQueryLanguageEnum.DSL; + this.visualization.sqlQuery = ''; + } modal.componentInstance.visualization = this.visualization; modal.componentInstance.callback = this.callback; modal.componentInstance.mode = this.mode; } - chartIconResolver(): string { - return UTM_CHART_ICONS[this.chart]; - } - onFilterChange($event: ElasticFilterType[]) { this.configChange = true; this.visualization.filterType = $event; @@ -277,4 +323,64 @@ export class ChartBuilderComponent implements OnInit, AfterViewChecked { this.location.back(); } } + + toggleSqlMode($event: boolean) { + this.visualization.sqlQuery = ''; + this.isSqlMode = $event; + } + + indexPatternSelected(pattern: UtmIndexPattern) { + this.pattern = pattern.pattern; + this.visualization.pattern = pattern; + this.visualization.idPattern = pattern.id; + this.patternId = pattern.id; + this.getFields(); + } + + loadFieldNames() { + return [ + ...this.localFieldService.getPatternStoredFields(ALERT_INDEX_PATTERN).map(f => f.name), + ...this.localFieldService.getPatternStoredFields(LOG_INDEX_PATTERN).map(f => f.name) + ]; + } + + indexPatternLoaded(indexPatternNames: string[]) { + this.indexPatternNames = indexPatternNames; + } + + nullifyUnusedFields() { + this.visualization.aggregationType = null; + } + + clearMessages(): void { + this.errorMessage = ''; + } + + getIndexPatterns() { + const req = { + page: 0, + size: 1000, + sort: 'id,asc', + 'isActive.equals': true, + }; + this.indexPatternService.query(req) + .pipe( + map((res: HttpResponse) => res.body || []) + ) + .subscribe({ + next: data => { + this.indexPatternNames = data.map((pattern: UtmIndexPattern) => pattern.pattern); + this.indexPattern = data; + }, + error: () => this.indexPatternNames = [], + }); + } + + onIndexPatternChange($event: string) { + const indexPattern = this.indexPattern.find(p => p.pattern === $event); + + if (indexPattern) { + this.indexPatternSelected(indexPattern); + } + } } diff --git a/frontend/src/app/graphic-builder/shared/components/viewer/chart-view/chart-view.component.ts b/frontend/src/app/graphic-builder/shared/components/viewer/chart-view/chart-view.component.ts index 7377d17db..830c62476 100644 --- a/frontend/src/app/graphic-builder/shared/components/viewer/chart-view/chart-view.component.ts +++ b/frontend/src/app/graphic-builder/shared/components/viewer/chart-view/chart-view.component.ts @@ -1,24 +1,15 @@ -import { - ChangeDetectionStrategy, - Component, - EventEmitter, - Input, - OnChanges, - OnDestroy, - OnInit, - Output, SimpleChanges -} from '@angular/core'; -import { Observable, of, Subject} from 'rxjs'; +import {ChangeDetectionStrategy, Component, EventEmitter, Input, OnDestroy, OnInit, Output} from '@angular/core'; +import {Observable, of, Subject} from 'rxjs'; import {catchError, filter, switchMap, takeUntil, tap} from 'rxjs/operators'; import {UtmToastService} from '../../../../../shared/alert/utm-toast.service'; import {DashboardBehavior} from '../../../../../shared/behaviors/dashboard.behavior'; -import EChartOption = echarts.EChartOption; -import {TimeFilterBehavior} from "../../../../../shared/behaviors/time-filter.behavior"; +import {TimeFilterBehavior} from '../../../../../shared/behaviors/time-filter.behavior'; import {ChartFactory} from '../../../../../shared/chart/factories/echart-factory/chart-factory'; import {VisualizationType} from '../../../../../shared/chart/types/visualization.type'; import { ElasticFilterDefaultTime } from '../../../../../shared/components/utm/filters/elastic-filter-time/elastic-filter-time.component'; +import {ChartBuilderQueryLanguageEnum} from '../../../../../shared/enums/chart-builder-query-language.enum'; import {ChartTypeEnum} from '../../../../../shared/enums/chart-type.enum'; import {ElasticOperatorsEnum} from '../../../../../shared/enums/elastic-operators.enum'; import {RefreshService, RefreshType} from '../../../../../shared/services/util/refresh.service'; @@ -30,6 +21,7 @@ import {RunVisualizationService} from '../../../services/run-visualization.servi import {UtmChartClickActionService} from '../../../services/utm-chart-click-action.service'; import {rebuildVisualizationFilterTime} from '../../../util/chart-filter/chart-filter.util'; import {resolveDefaultVisualizationTime} from '../../../util/visualization/visualization-render.util'; +import EChartOption = echarts.EChartOption; // @ts-ignore require('echarts-wordcloud'); @@ -90,7 +82,8 @@ export class ChartViewComponent implements OnInit, OnDestroy { .subscribe(id => { if (id && this.chartId === id) { this.refreshService.sendRefresh(this.refreshType); - this.defaultTime = resolveDefaultVisualizationTime(this.visualization); + this.defaultTime = this.visualization.filterType && this.visualization.queryLanguage !== ChartBuilderQueryLanguageEnum.SQL ? + resolveDefaultVisualizationTime(this.visualization) : new ElasticFilterDefaultTime('now-30d', 'now'); } }); @@ -132,12 +125,12 @@ export class ChartViewComponent implements OnInit, OnDestroy { }); if (!this.defaultTime) { - this.defaultTime = resolveDefaultVisualizationTime(this.visualization); - - if (!this.defaultTime) { - this.refreshService.sendRefresh(this.refreshType); - } + this.defaultTime = this.visualization.filterType ? resolveDefaultVisualizationTime(this.visualization) + : new ElasticFilterDefaultTime('now-30d', 'now'); + } + if (this.building) { + this.refreshService.sendRefresh(this.refreshType); } } @@ -165,6 +158,8 @@ export class ChartViewComponent implements OnInit, OnDestroy { runVisualization() { this.loadingOption = true; + this.visualization.queryLanguage = this.visualization.sqlQuery ? ChartBuilderQueryLanguageEnum.SQL + : ChartBuilderQueryLanguageEnum.DSL; return this.runVisualizationService.run(this.visualization) .pipe( tap((data) => { @@ -173,7 +168,8 @@ export class ChartViewComponent implements OnInit, OnDestroy { this.runWithError = false; this.onChartChange(data); }), - catchError(() => { + catchError((err) => { + console.error(err); this.loadingOption = false; this.runWithError = true; this.echartOption = null; diff --git a/frontend/src/app/graphic-builder/shared/components/viewer/goal-view/goal-view.component.ts b/frontend/src/app/graphic-builder/shared/components/viewer/goal-view/goal-view.component.ts index 94882ebd9..b5439a20e 100644 --- a/frontend/src/app/graphic-builder/shared/components/viewer/goal-view/goal-view.component.ts +++ b/frontend/src/app/graphic-builder/shared/components/viewer/goal-view/goal-view.component.ts @@ -3,11 +3,15 @@ import {Observable, of, Subject} from 'rxjs'; import {catchError, filter, switchMap, takeUntil, tap} from 'rxjs/operators'; import {UtmToastService} from '../../../../../shared/alert/utm-toast.service'; import {DashboardBehavior} from '../../../../../shared/behaviors/dashboard.behavior'; +import {TimeFilterBehavior} from '../../../../../shared/behaviors/time-filter.behavior'; import {EchartClickAction} from '../../../../../shared/chart/types/action/echart-click-action'; import {UtmGoalOption} from '../../../../../shared/chart/types/charts/goal/utm-goal-option'; import {MetricResponse} from '../../../../../shared/chart/types/metric/metric-response'; import {VisualizationType} from '../../../../../shared/chart/types/visualization.type'; -import {ElasticFilterDefaultTime} from '../../../../../shared/components/utm/filters/elastic-filter-time/elastic-filter-time.component'; +import { + ElasticFilterDefaultTime +} from '../../../../../shared/components/utm/filters/elastic-filter-time/elastic-filter-time.component'; +import {ChartBuilderQueryLanguageEnum} from '../../../../../shared/enums/chart-builder-query-language.enum'; import {ChartTypeEnum} from '../../../../../shared/enums/chart-type.enum'; import {RefreshService, RefreshType} from '../../../../../shared/services/util/refresh.service'; import {TimeFilterType} from '../../../../../shared/types/time-filter.type'; @@ -18,7 +22,6 @@ import {RunVisualizationService} from '../../../services/run-visualization.servi import {UtmChartClickActionService} from '../../../services/utm-chart-click-action.service'; import {rebuildVisualizationFilterTime} from '../../../util/chart-filter/chart-filter.util'; import {resolveDefaultVisualizationTime} from '../../../util/visualization/visualization-render.util'; -import {TimeFilterBehavior} from "../../../../../shared/behaviors/time-filter.behavior"; @Component({ selector: 'app-goal-view', @@ -66,7 +69,9 @@ export class GoalViewComponent implements OnInit, OnDestroy { .subscribe((id) => { if (id && this.chartId === id) { this.refreshService.sendRefresh(this.refreshType); - this.defaultTime = resolveDefaultVisualizationTime(this.visualization); + this.defaultTime = this.visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + resolveDefaultVisualizationTime(this.visualization) + : new ElasticFilterDefaultTime('now-30d', 'now'); } }); this.dashboardBehavior.$filterDashboard @@ -94,31 +99,17 @@ export class GoalViewComponent implements OnInit, OnDestroy { }); if (!this.defaultTime) { - this.defaultTime = resolveDefaultVisualizationTime(this.visualization); - - if (!this.defaultTime) { - this.refreshService.sendRefresh(this.refreshType); - } + this.defaultTime = this.visualization.filterType ? resolveDefaultVisualizationTime(this.visualization) + : new ElasticFilterDefaultTime('now-30d', 'now'); + } + if (this.building) { + this.refreshService.sendRefresh(this.refreshType); } } runVisualization() { this.runningChart = true; - /*this.runVisualizationService.run(this.visualization).subscribe(data => { - this.runningChart = false; - this.runned.emit('runned'); - this.data = data; - this.extractGoals(); - this.error = false; - }, error => { - this.runningChart = false; - this.error = true; - this.runned.emit('runned'); - this.toastService.showError('Error', - 'Error occurred while running visualization'); - });*/ - return this.runVisualizationService.run(this.visualization) .pipe( tap((data) => { @@ -154,9 +145,9 @@ export class GoalViewComponent implements OnInit, OnDestroy { this.goals = []; const config: UtmGoalOption[] = this.visualization.chartConfig; if (data) { - for (const d of data) { - const metricIndex = this.visualization.aggregationType.metrics.findIndex(value => Number(value.id) === Number(d.metricId)); - const optionIndex = config.findIndex(value => Number(value.metricId) === Number(d.metricId)); + data.forEach((d, index) => { + const metricId = isNaN(Number(d.metricId)) ? index + 1 : Number(d.metricId); + const optionIndex = config.findIndex(value => Number(value.metricId) === metricId); const max = (config[optionIndex].max ? config[optionIndex].max : this.calcTotal(data)); const goal = new UtmGoalOption(Number(d.metricId), this.calcPercent(max, d.value, config[optionIndex].decimal), @@ -168,11 +159,13 @@ export class GoalViewComponent implements OnInit, OnDestroy { config[optionIndex].cap, config[optionIndex].type, config[optionIndex].thresholds, - d.bucketKey ? d.bucketKey : extractMetricLabel(d.metricId, this.visualization), + this.visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + d.bucketKey ? d.bucketKey : extractMetricLabel(d.metricId, this.visualization) + : d.metricId, config[optionIndex].foregroundColor ); this.goals.push(goal); - } + }); } return this.goals; } diff --git a/frontend/src/app/graphic-builder/shared/components/viewer/map-view/map-view.component.ts b/frontend/src/app/graphic-builder/shared/components/viewer/map-view/map-view.component.ts index f0fe83e1d..7892cd982 100644 --- a/frontend/src/app/graphic-builder/shared/components/viewer/map-view/map-view.component.ts +++ b/frontend/src/app/graphic-builder/shared/components/viewer/map-view/map-view.component.ts @@ -11,6 +11,7 @@ import {UtmScatterMapOptionType} from '../../../../../shared/chart/types/charts/ import {LeafletMapType} from '../../../../../shared/chart/types/map/leaflet/leaflet-map.type'; import {VisualizationType} from '../../../../../shared/chart/types/visualization.type'; import {ElasticFilterDefaultTime} from '../../../../../shared/components/utm/filters/elastic-filter-time/elastic-filter-time.component'; +import {ChartBuilderQueryLanguageEnum} from '../../../../../shared/enums/chart-builder-query-language.enum'; import {ChartTypeEnum} from '../../../../../shared/enums/chart-type.enum'; import {RefreshService, RefreshType} from '../../../../../shared/services/util/refresh.service'; import {TimeFilterType} from '../../../../../shared/types/time-filter.type'; @@ -295,7 +296,9 @@ export class MapViewComponent implements OnInit, AfterViewInit, OnDestroy { .subscribe(id => { if (id && this.chartId === id) { this.refreshService.sendRefresh(this.refreshType); - this.defaultTime = resolveDefaultVisualizationTime(this.visualization); + this.defaultTime = this.visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + resolveDefaultVisualizationTime(this.visualization) + : new ElasticFilterDefaultTime('now-30d', 'now'); } }); this.dashboardBehavior.$filterDashboard @@ -323,12 +326,12 @@ export class MapViewComponent implements OnInit, AfterViewInit, OnDestroy { }); if (!this.defaultTime) { - this.defaultTime = resolveDefaultVisualizationTime(this.visualization); - - if (!this.defaultTime) { - this.refreshService.sendRefresh(this.refreshType); - } + this.defaultTime = this.visualization.filterType ? resolveDefaultVisualizationTime(this.visualization) + : new ElasticFilterDefaultTime('now-30d', 'now'); + } + if (this.building) { + this.refreshService.sendRefresh(this.refreshType); } } @@ -396,7 +399,7 @@ export class MapViewComponent implements OnInit, AfterViewInit, OnDestroy { 'padding-top:10px' + 'font: 13px / 20px Poppins, sans-serif;' + 'pointer-events: none;">' + - getBucketLabel(0, this.visualization) + + this.visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? getBucketLabel(0, this.visualization) : '' + '
' + ' Number(value.metricId) === Number(d.metricId)); kpi.push({ value: d.value, - label: extractMetricLabel(this.visualization.aggregationType.metrics[metricIndex].id, this.visualization), - group: this.extractGroupName(d.bucketKey), + label: this.visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + extractMetricLabel(this.visualization.aggregationType.metrics[metricIndex].id, this.visualization) + : d.metricId ? d.metricId : '', + group: this.visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + this.extractGroupName(d.bucketKey) + : d.bucketId + ' - ' + d.bucketKey, bucketKey: d.bucketKey, color: optionIndex > -1 ? this.visualization.chartConfig[optionIndex].color : null, icon: optionIndex > -1 ? this.visualization.chartConfig[optionIndex].icon : null, diff --git a/frontend/src/app/graphic-builder/shared/components/viewer/table-view/table-view.component.html b/frontend/src/app/graphic-builder/shared/components/viewer/table-view/table-view.component.html index a581a2c20..26707872f 100644 --- a/frontend/src/app/graphic-builder/shared/components/viewer/table-view/table-view.component.html +++ b/frontend/src/app/graphic-builder/shared/components/viewer/table-view/table-view.component.html @@ -48,7 +48,7 @@ *ngFor="let column of data.columns" [sortable]="column.split('->')[0]" appColumnSortable class="font-weight-semibold cursor-pointer" scope="col"> - {{column.split('->')[1]}} + {{ column.split('->')[1] || column.split('->')[0] }} diff --git a/frontend/src/app/graphic-builder/shared/components/viewer/table-view/table-view.component.ts b/frontend/src/app/graphic-builder/shared/components/viewer/table-view/table-view.component.ts index fb87dd3d0..4572a0baa 100644 --- a/frontend/src/app/graphic-builder/shared/components/viewer/table-view/table-view.component.ts +++ b/frontend/src/app/graphic-builder/shared/components/viewer/table-view/table-view.component.ts @@ -13,7 +13,7 @@ import {Observable, of, Subject} from 'rxjs'; import {catchError, filter, map, switchMap, takeUntil, tap} from 'rxjs/operators'; import {UtmToastService} from '../../../../../shared/alert/utm-toast.service'; import {DashboardBehavior} from '../../../../../shared/behaviors/dashboard.behavior'; -import {TimeFilterBehavior} from "../../../../../shared/behaviors/time-filter.behavior"; +import {TimeFilterBehavior} from '../../../../../shared/behaviors/time-filter.behavior'; import {EchartClickAction} from '../../../../../shared/chart/types/action/echart-click-action'; import {UtmTableOptionType} from '../../../../../shared/chart/types/charts/table/utm-table-option.type'; import {TableBuilderResponseType} from '../../../../../shared/chart/types/response/table-builder-response.type'; @@ -22,6 +22,7 @@ import {ElasticFilterDefaultTime} from '../../../../../shared/components/utm/fil import {SortableDirective} from '../../../../../shared/directives/sortable/sortable.directive'; import {SortDirection} from '../../../../../shared/directives/sortable/type/sort-direction.type'; import {SortEvent} from '../../../../../shared/directives/sortable/type/sort-event'; +import {ChartBuilderQueryLanguageEnum} from '../../../../../shared/enums/chart-builder-query-language.enum'; import {ChartTypeEnum} from '../../../../../shared/enums/chart-type.enum'; import {ChartValueSeparator} from '../../../../../shared/enums/chart-value-separator'; import {RefreshService, RefreshType} from '../../../../../shared/services/util/refresh.service'; @@ -98,7 +99,9 @@ export class TableViewComponent implements OnInit, OnChanges, OnDestroy { .subscribe(id => { if (id && this.chartId === id) { this.refreshService.sendRefresh(this.refreshType); - this.defaultTime = resolveDefaultVisualizationTime(this.visualization); + this.defaultTime = this.visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + resolveDefaultVisualizationTime(this.visualization) + : new ElasticFilterDefaultTime('now-30d', 'now'); } }); this.dashboardBehavior.$filterDashboard @@ -126,12 +129,12 @@ export class TableViewComponent implements OnInit, OnChanges, OnDestroy { }); if (!this.defaultTime) { - this.defaultTime = resolveDefaultVisualizationTime(this.visualization); - - if (!this.defaultTime) { - this.refreshService.sendRefresh(this.refreshType); - } + this.defaultTime = this.visualization.filterType ? resolveDefaultVisualizationTime(this.visualization) + : new ElasticFilterDefaultTime('now-30d', 'now'); + } + if (this.building) { + this.refreshService.sendRefresh(this.refreshType); } } diff --git a/frontend/src/app/graphic-builder/shared/services/run-visualization.service.ts b/frontend/src/app/graphic-builder/shared/services/run-visualization.service.ts index 3ec455312..b44e331b7 100644 --- a/frontend/src/app/graphic-builder/shared/services/run-visualization.service.ts +++ b/frontend/src/app/graphic-builder/shared/services/run-visualization.service.ts @@ -1,8 +1,9 @@ import {Injectable} from '@angular/core'; import {Observable} from 'rxjs'; import {VisualizationType} from '../../../shared/chart/types/visualization.type'; +import {ChartBuilderQueryLanguageEnum} from '../../../shared/enums/chart-builder-query-language.enum'; +import {createRequestOption} from '../../../shared/util/request-util'; import {VisualizationService} from '../../visualization/shared/services/visualization.service'; -import {createRequestOption} from "../../../shared/util/request-util"; @Injectable({ providedIn: 'root' @@ -18,6 +19,7 @@ export class RunVisualizationService { * @param request optional pagination */ run(visualization: VisualizationType, request: any = {}): Observable { + visualization.queryLanguage = !visualization.sqlQuery ? ChartBuilderQueryLanguageEnum.DSL : ChartBuilderQueryLanguageEnum.SQL; const req = createRequestOption(request); return new Observable(subscriber => { if (typeof visualization.chartConfig !== 'string') { diff --git a/frontend/src/app/graphic-builder/shared/util/visualization/visualization-render.util.ts b/frontend/src/app/graphic-builder/shared/util/visualization/visualization-render.util.ts index e52e292cd..db4af2c3a 100644 --- a/frontend/src/app/graphic-builder/shared/util/visualization/visualization-render.util.ts +++ b/frontend/src/app/graphic-builder/shared/util/visualization/visualization-render.util.ts @@ -12,6 +12,6 @@ export function resolveDefaultVisualizationTime(visualization: VisualizationType const to = visualization.filterType[indexTime].value[1]; return new ElasticFilterDefaultTime(from, to); } else { - return null; + return new ElasticFilterDefaultTime('now-30d', 'now'); } } diff --git a/frontend/src/app/graphic-builder/visualization/visualization-create/visualization-create.component.html b/frontend/src/app/graphic-builder/visualization/visualization-create/visualization-create.component.html index cdb227404..49d3b9914 100644 --- a/frontend/src/app/graphic-builder/visualization/visualization-create/visualization-create.component.html +++ b/frontend/src/app/graphic-builder/visualization/visualization-create/visualization-create.component.html @@ -1,9 +1,6 @@
- -
-
Select a visualization
Cancel + + +
+
+ + diff --git a/frontend/src/app/graphic-builder/visualization/visualization-header/visualization-header.component.scss b/frontend/src/app/graphic-builder/visualization/visualization-header/visualization-header.component.scss new file mode 100644 index 000000000..a5235bc4d --- /dev/null +++ b/frontend/src/app/graphic-builder/visualization/visualization-header/visualization-header.component.scss @@ -0,0 +1,12 @@ +.gap-2 { + gap: .5rem; +} + +.header-border { + border: 1px solid #dee2e6; + border-bottom: none; +} + +.visualization-actions { + height: 36px; +} diff --git a/frontend/src/app/graphic-builder/visualization/visualization-header/visualization-header.component.ts b/frontend/src/app/graphic-builder/visualization/visualization-header/visualization-header.component.ts new file mode 100644 index 000000000..d07ac6ee0 --- /dev/null +++ b/frontend/src/app/graphic-builder/visualization/visualization-header/visualization-header.component.ts @@ -0,0 +1,53 @@ +import {Component, EventEmitter, Input, OnInit, Output} from '@angular/core'; +import {NgbActiveModal} from '@ng-bootstrap/ng-bootstrap'; +import {UTM_CHART_ICONS} from '../../../shared/constants/icons-chart.const'; +import {UtmIndexPattern} from '../../../shared/types/index-pattern/utm-index-pattern'; + +@Component({ + selector: 'app-visualization-header', + templateUrl: './visualization-header.component.html', + styleUrls: ['./visualization-header.component.scss'] +}) +export class VisualizationHeaderComponent implements OnInit { + @Input() chartType: string; + @Output() indexPatternInitialized = new EventEmitter(); + @Output() indexPatternSelected = new EventEmitter(); + @Output() cancelled = new EventEmitter(); + @Output() saved = new EventEmitter(); + @Input() sqlMode = false; + @Input() showModeToggle = true; + @Output() sqlModeToggled = new EventEmitter(); + @Input() pattern: UtmIndexPattern; + + constructor(public activeModal: NgbActiveModal) { + } + + ngOnInit() { + + } + + indexPatternChange(pattern: UtmIndexPattern) { + this.indexPatternSelected.emit(pattern); + } + + indexPatternLoaded(indexPatternNames: string[]) { + this.indexPatternInitialized.emit(indexPatternNames); + } + + cancel() { + this.cancelled.emit(); + } + + save() { + this.saved.emit(); + } + + toggleSqlMode() { + this.sqlMode = !this.sqlMode; + this.sqlModeToggled.emit(this.sqlMode); + } + + chartIconResolver(): string { + return this.chartType ? UTM_CHART_ICONS[this.chartType] : ''; + } +} diff --git a/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.html b/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.html index c5f96805c..8aa078e5d 100644 --- a/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.html +++ b/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.html @@ -117,7 +117,7 @@
Visualizations
- {{vis.pattern.pattern}} + {{vis.pattern? vis.pattern.pattern : ''}} diff --git a/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.scss b/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.scss index 0da1e85ca..3293357d8 100644 --- a/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.scss +++ b/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.scss @@ -17,3 +17,7 @@ .min-h-0 { min-height: 0; } + +.container-fluid{ + overflow: auto; +} diff --git a/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.ts b/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.ts index 0d7036efd..05adf6938 100644 --- a/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.ts +++ b/frontend/src/app/graphic-builder/visualization/visualization-list/visualization-list.component.ts @@ -75,8 +75,10 @@ export class VisualizationListComponent implements OnInit { editVisualization(vis: VisualizationType) { this.spinner.show('loadingSpinner'); const queryParams = {}; - queryParams[VisualizationQueryParamsEnum.PATTERN_NAME] = vis.pattern.pattern; - queryParams[VisualizationQueryParamsEnum.PATTERN_ID] = vis.pattern.id; + if (vis.pattern) { + queryParams[VisualizationQueryParamsEnum.PATTERN_NAME] = vis.pattern.pattern; + queryParams[VisualizationQueryParamsEnum.PATTERN_ID] = vis.pattern.id; + } queryParams[VisualizationQueryParamsEnum.CHART] = vis.chartType; queryParams[VisualizationQueryParamsEnum.MODE] = 'edit'; queryParams[VisualizationQueryParamsEnum.VISUALIZATION_ID] = vis.id; diff --git a/frontend/src/app/graphic-builder/visualization/visualization-save/visualization-save.component.ts b/frontend/src/app/graphic-builder/visualization/visualization-save/visualization-save.component.ts index 2ec090cfb..759f88a57 100644 --- a/frontend/src/app/graphic-builder/visualization/visualization-save/visualization-save.component.ts +++ b/frontend/src/app/graphic-builder/visualization/visualization-save/visualization-save.component.ts @@ -37,7 +37,11 @@ export class VisualizationSaveComponent implements OnInit { ngOnInit() { this.initFormSaveVis(); - cleanVisualizationData(this.visualization).then(visualization => { + const visualizationPromise = this.visualization.chartType === ChartTypeEnum.TAG_CLOUD_CHART + ? Promise.resolve(this.visualization) + : cleanVisualizationData(this.visualization); + + visualizationPromise.then(visualization => { this.visualizationToSave = visualization; if (this.mode === 'edit') { this.visSaveForm.get('name').setValue(this.visualizationToSave.name); @@ -46,6 +50,7 @@ export class VisualizationSaveComponent implements OnInit { }); } + initFormSaveVis() { this.visSaveForm = this.fb.group( { diff --git a/frontend/src/app/graphic-builder/visualization/visualization-shared.module.ts b/frontend/src/app/graphic-builder/visualization/visualization-shared.module.ts index 275c9073d..e07226ba0 100644 --- a/frontend/src/app/graphic-builder/visualization/visualization-shared.module.ts +++ b/frontend/src/app/graphic-builder/visualization/visualization-shared.module.ts @@ -14,6 +14,8 @@ import {VisualizationDeleteComponent} from './visualization-delete/visualization import {VisualizationImportComponent} from './visualization-import/visualization-import.component'; import {VisualizationListComponent} from './visualization-list/visualization-list.component'; import {VisualizationSaveComponent} from './visualization-save/visualization-save.component'; +import {VisualizationHeaderComponent} from "./visualization-header/visualization-header.component"; +import {ResizableModule} from "angular-resizable-element"; @NgModule({ declarations: [ @@ -24,7 +26,8 @@ import {VisualizationSaveComponent} from './visualization-save/visualization-sav VisualizationImportComponent, VisualizationFilterComponent, VisualizationChangeNameComponent, - VisualizationSelectComponent + VisualizationSelectComponent, + VisualizationHeaderComponent ], entryComponents: [ VisualizationDeleteComponent, @@ -41,12 +44,14 @@ import {VisualizationSaveComponent} from './visualization-save/visualization-sav FormsModule, ReactiveFormsModule, GraphicBuilderSharedModule, + ResizableModule, ], schemas: [CUSTOM_ELEMENTS_SCHEMA, NO_ERRORS_SCHEMA], exports: [ VisualizationChangeNameComponent, - VisualizationSelectComponent + VisualizationSelectComponent, + VisualizationHeaderComponent ], providers: [InputClassResolve] }) diff --git a/frontend/src/app/rule-management/app-rule/components/import-rules/import-rule.component.ts b/frontend/src/app/rule-management/app-rule/components/import-rules/import-rule.component.ts index b4683d73d..b62592634 100644 --- a/frontend/src/app/rule-management/app-rule/components/import-rules/import-rule.component.ts +++ b/frontend/src/app/rule-management/app-rule/components/import-rules/import-rule.component.ts @@ -124,7 +124,6 @@ export class ImportRuleComponent implements OnInit, OnDestroy { dataTypes: file.dataTypes && file.dataTypes.length > 0 ? file.dataTypes : [] }; }); - console.log('filesWithDataTypes', filesWithDataTypes); // Fetch and filter data types for each file forkJoin( @@ -147,7 +146,7 @@ export class ImportRuleComponent implements OnInit, OnDestroy { integrity: file.impact.integrity || 0, availability: file.impact.availability || 0, definition: file.where || '', - afterEvents: file.afterEvents || [], + afterEvents: file.afterEvents || file.correlation || [], dataTypes: filteredDataTypes.filter(dt => !!dt) })) ), diff --git a/frontend/src/app/rule-management/app-rule/components/import-rules/import-rule.service.ts b/frontend/src/app/rule-management/app-rule/components/import-rules/import-rule.service.ts index 100c69594..9f928cb06 100644 --- a/frontend/src/app/rule-management/app-rule/components/import-rules/import-rule.service.ts +++ b/frontend/src/app/rule-management/app-rule/components/import-rules/import-rule.service.ts @@ -157,6 +157,7 @@ export class ImportRuleService { 'impact', 'where', 'afterEvents', + 'correlation', 'name', 'adversary', 'confidentiality', diff --git a/frontend/src/app/shared/chart/factories/echart-factory/charts/gauge.ts b/frontend/src/app/shared/chart/factories/echart-factory/charts/gauge.ts index b7cb9c639..bb1839f16 100644 --- a/frontend/src/app/shared/chart/factories/echart-factory/charts/gauge.ts +++ b/frontend/src/app/shared/chart/factories/echart-factory/charts/gauge.ts @@ -1,4 +1,5 @@ import {extractMetricLabel} from '../../../../../graphic-builder/chart-builder/chart-property-builder/shared/functions/visualization-util'; +import {ChartBuilderQueryLanguageEnum} from '../../../../enums/chart-builder-query-language.enum'; import {SeriesGauge} from '../../../types/charts/chart-properties/series/gauge/series-gauge'; import {UtmGaugeOptionType} from '../../../types/charts/gauge/utm-gauge-option.type'; import {GaugeBuilderResponseType} from '../../../types/response/gauge-builder-response.type'; @@ -70,7 +71,9 @@ export class Gauge implements ChartBuildInterface { options.serie[0].splitLine.length = options.serie[0].axisLine.lineStyle.width + 5; options.serie[0].data = [ { - name: extractMetricLabel(data[0].metricId, visualization), + name: visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + extractMetricLabel(data[0].metricId, visualization) + : data[0].metricId, value: data[0].value } ]; diff --git a/frontend/src/app/shared/chart/factories/echart-factory/charts/heatmap.ts b/frontend/src/app/shared/chart/factories/echart-factory/charts/heatmap.ts index f08290f0b..bb6563014 100644 --- a/frontend/src/app/shared/chart/factories/echart-factory/charts/heatmap.ts +++ b/frontend/src/app/shared/chart/factories/echart-factory/charts/heatmap.ts @@ -1,4 +1,5 @@ import {extractMetricLabel} from '../../../../../graphic-builder/chart-builder/chart-property-builder/shared/functions/visualization-util'; +import {ChartBuilderQueryLanguageEnum} from '../../../../enums/chart-builder-query-language.enum'; import {ChartTypeEnum} from '../../../../enums/chart-type.enum'; import {HeatMapPropertiesType} from '../../../types/charts/heatmap/heat-map-properties.type'; import {HeatMapResponseType} from '../../../types/response/heat-map-response.type'; @@ -19,7 +20,9 @@ export class Heatmap implements ChartBuildInterface { heatMapOptions.visualMap.max = undefined; heatMapOptions.visualMap.min = undefined; heatMapOptions.series = [{ - name: extractMetricLabel(0, visualization), + name: visualization.queryLanguage === ChartBuilderQueryLanguageEnum. DSL ? + extractMetricLabel(0, visualization) + : '', type: 'heatmap', data: data[0].data, }]; diff --git a/frontend/src/app/shared/chart/factories/echart-factory/charts/line-bar.ts b/frontend/src/app/shared/chart/factories/echart-factory/charts/line-bar.ts index 73a84bcdd..08fce111d 100644 --- a/frontend/src/app/shared/chart/factories/echart-factory/charts/line-bar.ts +++ b/frontend/src/app/shared/chart/factories/echart-factory/charts/line-bar.ts @@ -8,6 +8,7 @@ import {BarLineResponseType} from '../../../types/response/bar-line-response.typ import {VisualizationType} from '../../../types/visualization.type'; import {ChartBuildInterface} from '../chart-build.interface'; import {ChartOption} from '../chart-option'; +import {ChartBuilderQueryLanguageEnum} from "../../../../enums/chart-builder-query-language.enum"; export class LineBar implements ChartBuildInterface { chartEnumType = ChartTypeEnum; @@ -37,8 +38,8 @@ export class LineBar implements ChartBuildInterface { * -data[0].series.length mean that have only one metric * - visualization type for single must have bar type */ - if (!visualization.aggregationType.bucket || - visualization.aggregationType.bucket.subBucket || + if ((visualization.aggregationType && !visualization.aggregationType.bucket) || + (visualization.aggregationType && visualization.aggregationType && visualization.aggregationType.bucket.subBucket) || data[0].series.length > 1 || (visualization.chartType === this.chartEnumType.LINE_CHART || visualization.chartType === this.chartEnumType.AREA_LINE_CHART)) { @@ -105,7 +106,8 @@ export class LineBar implements ChartBuildInterface { for (let j = 0; j < data[0].series.length; j++) { const metricId = Number(data[0].series[j].metricId); const index = serie.findIndex(value => Number(value.metricId) === metricId); - const metricLabel = extractMetricLabel(data[0].series[j].metricId, visualization); + const metricLabel = visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + extractMetricLabel(data[0].series[j].metricId, visualization) : ''; const stackBy = data[0].series[j].name === '' ? metricLabel : data[0].series[j].name; if (index !== -1) { const ser = { diff --git a/frontend/src/app/shared/chart/factories/echart-factory/charts/pie.ts b/frontend/src/app/shared/chart/factories/echart-factory/charts/pie.ts index 5d2d32240..3f5154e56 100644 --- a/frontend/src/app/shared/chart/factories/echart-factory/charts/pie.ts +++ b/frontend/src/app/shared/chart/factories/echart-factory/charts/pie.ts @@ -2,6 +2,7 @@ import { extractMetricLabel, getBucketLabel } from '../../../../../graphic-builder/chart-builder/chart-property-builder/shared/functions/visualization-util'; +import {ChartBuilderQueryLanguageEnum} from '../../../../enums/chart-builder-query-language.enum'; import {Legend} from '../../../types/charts/chart-properties/legend/legend'; import {SeriesPie} from '../../../types/charts/chart-properties/series/pie/series-pie'; import {ItemStyle} from '../../../types/charts/chart-properties/style/item-style'; @@ -63,7 +64,10 @@ export class Pie implements ChartBuildInterface { extractSeries(data: any[], visualization: VisualizationType): string[] { const series: string[] = []; for (const dat of data) { - series.push(dat.bucketKey ? dat.bucketKey : extractMetricLabel(dat.metricId, visualization)); + series.push(dat.bucketKey ? dat.bucketKey : + visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + extractMetricLabel(dat.metricId, visualization) + : ''); } return series; } @@ -73,7 +77,10 @@ export class Pie implements ChartBuildInterface { for (const dat of data) { values.push( { - name: dat.bucketKey ? dat.bucketKey : extractMetricLabel(dat.metricId, visualization), + name: dat.bucketKey ? dat.bucketKey : + visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + extractMetricLabel(dat.metricId, visualization) + : '', value: Number(dat.value.toFixed(2)) } ); @@ -82,7 +89,10 @@ export class Pie implements ChartBuildInterface { } private extractSerieName(data: PieBuilderResponseType[], visualization: VisualizationType): string { - return getBucketLabel(0, visualization); + if (visualization.queryLanguage === ChartBuilderQueryLanguageEnum.SQL) { + return data.length > 0 ? data[0].bucketId : 'SQL Series'; + } else { + return getBucketLabel(0, visualization); + } } - } diff --git a/frontend/src/app/shared/chart/factories/echart-factory/charts/tag-cloud.ts b/frontend/src/app/shared/chart/factories/echart-factory/charts/tag-cloud.ts index 54b6b14c0..71002ded0 100644 --- a/frontend/src/app/shared/chart/factories/echart-factory/charts/tag-cloud.ts +++ b/frontend/src/app/shared/chart/factories/echart-factory/charts/tag-cloud.ts @@ -2,6 +2,7 @@ import { extractMetricLabel, getBucketLabel } from '../../../../../graphic-builder/chart-builder/chart-property-builder/shared/functions/visualization-util'; +import {ChartBuilderQueryLanguageEnum} from '../../../../enums/chart-builder-query-language.enum'; import {UtmTagCloudOptionType} from '../../../types/charts/tag-cloud/utm-tag-cloud-option.type'; import {PieBuilderResponseType} from '../../../types/response/pie-builder-response.type'; import {VisualizationType} from '../../../types/visualization.type'; @@ -16,7 +17,12 @@ export class TagCloud implements ChartBuildInterface { buildChart(data?: any[], visualization?: VisualizationType): ChartOption { const tagOptions: UtmTagCloudOptionType = visualization.chartConfig; tagOptions.series[0].data = this.extractTagValues(data, visualization); - tagOptions.series[0].name = getBucketLabel(0, visualization); + + tagOptions.series[0].name = + visualization.queryLanguage === ChartBuilderQueryLanguageEnum.SQL + ? (data && data.length > 0 && data[0].bucketId ? data[0].bucketId : 'Count') + : getBucketLabel(0, visualization); + return tagOptions; } @@ -28,7 +34,10 @@ export class TagCloud implements ChartBuildInterface { for (const dat of data) { values.push( { - name: dat.bucketKey ? dat.bucketKey : extractMetricLabel(dat.metricId, visualization), + name: dat.bucketKey ? dat.bucketKey + : visualization.queryLanguage === ChartBuilderQueryLanguageEnum.DSL ? + extractMetricLabel(dat.metricId, visualization) + : 'ALL', value: Number(dat.value.toFixed(2)), textStyle: this.createRandomItemStyle(tagOptions.series[0].color), } diff --git a/frontend/src/app/shared/chart/types/visualization.type.ts b/frontend/src/app/shared/chart/types/visualization.type.ts index 858b74736..291b8f932 100644 --- a/frontend/src/app/shared/chart/types/visualization.type.ts +++ b/frontend/src/app/shared/chart/types/visualization.type.ts @@ -1,3 +1,4 @@ +import {ChartBuilderQueryLanguageEnum} from "../../enums/chart-builder-query-language.enum"; import {ChartTypeEnum} from '../../enums/chart-type.enum'; import {DataNatureTypeEnum} from '../../enums/nature-data.enum'; import {ElasticFilterType} from '../../types/filter/elastic-filter.type'; @@ -23,6 +24,7 @@ export class VisualizationType { showTime?: boolean; systemOwner?: boolean; page?: any; - + sqlQuery?: string; + queryLanguage?: ChartBuilderQueryLanguageEnum; } diff --git a/frontend/src/app/shared/components/code-editor/code-editor.component.ts b/frontend/src/app/shared/components/code-editor/code-editor.component.ts index c24804b6e..f00bdb24d 100644 --- a/frontend/src/app/shared/components/code-editor/code-editor.component.ts +++ b/frontend/src/app/shared/components/code-editor/code-editor.component.ts @@ -54,6 +54,7 @@ export class CodeEditorComponent implements OnInit, OnDestroy, ControlValueAcces @Input() consoleOptions?: ConsoleOptions; @Output() execute = new EventEmitter(); @Output() clearData = new EventEmitter(); + @Output() indexPatternChange = new EventEmitter(); @Input() queryError: string | null = null; @Input() customKeywords: string[] = []; @@ -88,13 +89,6 @@ export class CodeEditorComponent implements OnInit, OnDestroy, ControlValueAcces this.consoleOptions = { ...this.defaultOptions, ...this.consoleOptions }; } - ngOnDestroy(): void { - if (this.completionProvider) { - this.completionProvider.dispose(); - this.completionProvider = undefined; - } - } - onEditorInit(editorInstance: monaco.editor.IStandaloneCodeEditor) { this.completionProvider = monaco.languages.registerCompletionItemProvider('sql', { provideCompletionItems: () => { @@ -165,6 +159,7 @@ export class CodeEditorComponent implements OnInit, OnDestroy, ControlValueAcces } clearMessages(): void { + this.extractIndexPattern(this.sqlQuery); this.errorMessage = ''; this.successMessage = ''; } @@ -184,4 +179,43 @@ export class CodeEditorComponent implements OnInit, OnDestroy, ControlValueAcces setDisabledState?(isDisabled: boolean): void { // Optional: handle disabled state } + + extractIndexPattern(sql: string): string | null { + const normalized = sql + .replace(/\s+/g, ' ') + .toLowerCase(); + + const fromIndex = normalized.indexOf(' from '); + if (fromIndex === -1) { return null; } + + const start = fromIndex + 6; + + const keywords = [' where ', ' group by ', ' order by ', ' limit ', ' having ']; + + let end = normalized.length; + for (const kw of keywords) { + const idx = normalized.indexOf(kw, start); + if (idx !== -1 && idx < end) { + end = idx; + } + } + + const originalFragment = normalized.substring(start, end).trim(); + + if (originalFragment.length > 0) { + const indexPatternSelected = this.customKeywords.find(keyword => keyword === originalFragment); + + if (indexPatternSelected) { + this.indexPatternChange.emit(indexPatternSelected); + } + } + } + + ngOnDestroy(): void { + if (this.completionProvider) { + this.completionProvider.dispose(); + this.completionProvider = undefined; + } + } + } diff --git a/frontend/src/app/shared/components/utm/index-pattern/index-pattern-select/index-pattern-select.component.html b/frontend/src/app/shared/components/utm/index-pattern/index-pattern-select/index-pattern-select.component.html index a007af815..605267720 100644 --- a/frontend/src/app/shared/components/utm/index-pattern/index-pattern-select/index-pattern-select.component.html +++ b/frontend/src/app/shared/components/utm/index-pattern/index-pattern-select/index-pattern-select.component.html @@ -29,7 +29,7 @@ class="d-flex justify-content-start align-items-center w-100 pl-2">
Source
- {{ pattern.pattern }} + {{ pattern ? pattern.pattern : '' }}
diff --git a/frontend/src/app/shared/components/utm/index-pattern/index-pattern-select/index-pattern-select.component.ts b/frontend/src/app/shared/components/utm/index-pattern/index-pattern-select/index-pattern-select.component.ts index 79c624cd5..18634adf5 100644 --- a/frontend/src/app/shared/components/utm/index-pattern/index-pattern-select/index-pattern-select.component.ts +++ b/frontend/src/app/shared/components/utm/index-pattern/index-pattern-select/index-pattern-select.component.ts @@ -76,8 +76,9 @@ export class IndexPatternSelectComponent implements OnInit { } } - getListPatterns(){ - return this.patterns.map(pattern => ({ id: pattern.id, name: pattern.pattern, selected: this.pattern.id == pattern.id })); + getListPatterns() { + return this.patterns.map(pattern => ({ id: pattern.id, name: pattern.pattern, selected: + (this.pattern ? this.pattern.id === pattern.id : false) })); } selectedPattern($event,popover) { diff --git a/frontend/src/app/shared/components/utm/util/utm-file-upload/utm-file-upload.component.ts b/frontend/src/app/shared/components/utm/util/utm-file-upload/utm-file-upload.component.ts index 243f4a244..c44e9da60 100644 --- a/frontend/src/app/shared/components/utm/util/utm-file-upload/utm-file-upload.component.ts +++ b/frontend/src/app/shared/components/utm/util/utm-file-upload/utm-file-upload.component.ts @@ -15,7 +15,7 @@ export class UtmFileUploadComponent implements OnInit { @Input() shoPreview = false; @Input() validateFileSize = false; @Input() maxFileSize = 10 * 1024; - maxFiles = 10; + maxFiles = 50; maxFilesError = false; @Output() fileEmit = new EventEmitter(); @Output() errorEmit = new EventEmitter(); diff --git a/frontend/src/app/shared/enums/chart-builder-query-language.enum.ts b/frontend/src/app/shared/enums/chart-builder-query-language.enum.ts new file mode 100644 index 000000000..2bd98f847 --- /dev/null +++ b/frontend/src/app/shared/enums/chart-builder-query-language.enum.ts @@ -0,0 +1,4 @@ +export enum ChartBuilderQueryLanguageEnum { + DSL = 'DSL', + SQL = 'SQL' +} diff --git a/installer/docker/compose.go b/installer/docker/compose.go index 53d7e2e7f..81cc43247 100644 --- a/installer/docker/compose.go +++ b/installer/docker/compose.go @@ -113,8 +113,8 @@ func (c *Compose) Populate(conf *config.Config, stack *StackConfig) error { conf.UpdatesFolder + ":/updates", }, Ports: []string{ - "9000:50051", - "9001:8080", + "9000:9000", + "9001:9001", }, Environment: []string{ "DB_PATH=/data/utmstack.db", @@ -218,7 +218,7 @@ func (c *Compose) Populate(conf *config.Config, stack *StackConfig) error { "INTERNAL_KEY=" + conf.InternalKey, "ENCRYPTION_KEY=" + conf.InternalKey, "GRPC_AGENT_MANAGER_HOST=agentmanager", - "GRPC_AGENT_MANAGER_PORT=50051", + "GRPC_AGENT_MANAGER_PORT=9000", "EVENT_PROCESSOR_HOST=event-processor-manager", "EVENT_PROCESSOR_PORT=9002", }, diff --git a/installer/services/postgres.go b/installer/services/postgres.go index df16c6639..c7a471a43 100644 --- a/installer/services/postgres.go +++ b/installer/services/postgres.go @@ -6,6 +6,7 @@ import ( _ "github.com/lib/pq" "github.com/utmstack/UTMStack/installer/config" + "github.com/utmstack/UTMStack/installer/utils" ) func InitPgUtmstack(c *config.Config) error { @@ -83,36 +84,31 @@ CONSTRAINT utm_client_pkey PRIMARY KEY (id) return nil } -func GetAdminEmail(c *config.Config) (string, error) { - psqlconn := fmt.Sprintf("host=localhost port=5432 user=postgres password=%s sslmode=disable database=utmstack", - c.Password) - db, err := sql.Open("postgres", psqlconn) +func GetAdminEmail(_ *config.Config) (string, error) { + // Get postgres container ID + containerIDs, err := utils.RunCmdWithOutput("docker", "ps", "-q", "-f", "name=utmstack_postgres") if err != nil { - return "", err + return "", fmt.Errorf("error getting postgres container: %v", err) } - defer db.Close() - err = db.Ping() - if err != nil { - return "", err + if len(containerIDs) == 0 { + return "", fmt.Errorf("postgres container not found") } - var email string - err = db.QueryRow(` - SELECT email - FROM jhi_user - WHERE login = 'admin' AND created_by = 'system' AND email != 'admin@localhost' - LIMIT 1 - `).Scan(&email) + containerID := containerIDs[0] + // Execute query inside the container + query := "SELECT email FROM jhi_user WHERE login = 'admin' AND created_by = 'system' AND email != 'admin@localhost' LIMIT 1" + output, err := utils.RunCmdWithOutput("docker", "exec", containerID, "psql", "-U", "postgres", "-d", "utmstack", "-t", "-c", query) if err != nil { - if err == sql.ErrNoRows { - return "", nil - } - return "", err + return "", fmt.Errorf("error executing query: %v", err) + } + + if len(output) == 0 { + return "", nil } - return email, nil + return output[0], nil } func InitPgUserAuditor(c *config.Config) error { diff --git a/installer/templates/front-end.go b/installer/templates/front-end.go index e8214645c..76d48cc52 100644 --- a/installer/templates/front-end.go +++ b/installer/templates/front-end.go @@ -12,7 +12,7 @@ const FrontEnd string = `server { } set $utmstack_backend http://backend:8080; - set $utmstack_agent_manager http://agentmanager:8080; + set $utmstack_agent_manager http://agentmanager:9001; set $utmstack_backend_auth http://backend:8080/api/authenticate; set $utmstack_ws http://backend:8080/ws; set $utmstack_saml2 http://backend:8080/login/saml2/; diff --git a/plugins/README.md b/plugins/README.md index 8d069a221..8112cc76c 100644 --- a/plugins/README.md +++ b/plugins/README.md @@ -1,1706 +1,3 @@ -# ThreatWinds EventProcessor and UTMStack Integration +# UTMStack Plugins -This documentation provides a comprehensive guide on how to implement rules for analysis, and pipelines for data -extraction, enrichment, and transformation within the EventProcessor and UTMStack ecosystem. It is designed to be a -practical reference for developers working with these systems. - -## Table of Contents - -1. [Introduction](#introduction) -2. [Architecture Overview](#architecture-overview) -3. [Components](#components) -4. [Implementing Rules](#implementing-rules) -5. [Implementing Filters](#implementing-filters) -6. [Best Practices](#best-practices) -7. [Development Workflow](#development-workflow) -8. [Advanced Features](#advanced-features) -9. [Integration with Other Systems](#integration-with-other-systems) -10. [Performance Optimization](#performance-optimization) -11. [Troubleshooting](#troubleshooting) -12. [Real-World Use Cases](#real-world-use-cases) -13. [Custom Plugin Development](#custom-plugin-development) -14. [Scaling the System](#scaling-the-system) -15. [Migration and Upgrades](#migration-and-upgrades) -16. [Community Resources and Support](#community-resources-and-support) - -## Introduction - -This documentation provides a comprehensive guide on how to implement rules for analysis, extraction, and data -transformation within the EventProcessor and UTMStack ecosystem. The EventProcessor is a security event processing -engine that uses a plugin-based architecture to process, analyze, and transform security events from various sources. -UTMStack integrates with the EventProcessor to provide a complete security monitoring solution. - -## Architecture Overview - -The EventProcessor and UTMStack integration consists of several components: - -1. **EventProcessor**: The core engine that processes security events. -2. **EventProcessor Plugins**: Plugins that extend the functionality of the EventProcessor. -3. **UTMStack Plugins**: Plugins that integrate UTMStack with the EventProcessor. -4. **go-sdk**: A Go SDK that provides common functionality for both the EventProcessor and plugins. -5. **Rules**: YAML files that define analysis rules for detecting security threats. -6. **Filters**: YAML files that define how to extract and transform data from raw events. - -The EventProcessor uses a plugin architecture where plugins are separate processes that communicate with the -EventProcessor via gRPC over Unix sockets. This allows for a flexible and extensible system where new functionality can -be added without modifying the core engine. - -## Components - -### EventProcessor - -The EventProcessor is the core engine that processes security events. It loads plugins, routes events to the appropriate -plugins, and manages the overall flow of data through the system. - -### EventProcessor Plugins - -The EventProcessor has several built-in plugins for different tasks: - -- **Input Plugins**: Collect or receive logs from external sources (http-input, grpc-input). -- **Parsing Plugins**: Extract and enrich data and transform logs (add, cast, csv, delete, expand, grok, json, kv, - reformat, rename, trim). -- **Analysis Plugins**: Process logs to detect security attacks (analysis). - -### UTMStack Plugins - -UTMStack has several plugins that integrate with the EventProcessor: - -- **Input Plugins**: Collect events from various sources (aws, azure, bitdefender, gcp, o365, sophos, inputs). -- **Parsing Plugins**: Extract and enrich data and transform logs (geolocation). -- **Analysis Plugins**: Process and analyze events (events). -- **Correlation Plugins**: Detect relationships between alerts (alerts, soc-ai). -- **Notification Plugins**: Send notifications or statistics to internal and external systems (stats). -- **Sidecar Plugins**: Background task plugins with multiple purposes, like maintaining the system healthy or managing - configurations (config). - -### go-sdk - -The go-sdk provides common functionality for both the EventProcessor and plugins. It defines the interfaces and types -used for communication between components. - -Key files in the go-sdk: - -- `plugins/plugins.proto`: Defines the Protocol Buffers messages and services used by the plugin system. -- `plugins/config.go`: Provides configuration functionality for plugins. -- `plugins/cel.go`: Provides Common Expression Language (CEL) functionality for rules. - -## Implementing Rules - -Rules are YAML files that define how to analyze events to detect security threats. They're used by the analysis plugin -to generate alerts when specific conditions are met. - -### Rule Structure - -A rule is defined as a YAML object with the following fields: - -```yaml -- id: 1 # Unique identifier for the rule - dataTypes: # Types of data this rule applies to - - google - name: Hello # Name of the rule - impact: # Impact information - confidentiality: 0 # Impact on confidentiality (0-5) - integrity: 0 # Impact on integrity (0-5) - availability: 3 # Impact on availability (0-5) - category: Testing Category # Category of the rule - technique: Testing Technique # Technique used by the threat - adversary: origin # Which side is considered the adversary (origin or target) - references: # External references - - https://quantfall.com - description: This is a testing rule. # Description of the rule - where: safe(origin.geolocation.country, "") == "United States" # Expression to evaluate - afterEvents: # Additional events to search for - - indexPattern: v11-log-* # Index pattern to search in - with: # Conditions for the search - - field: origin.ip.keyword # Field to match - operator: filter_term # Operator (filter_term, must_not_term, filter_match, must_not_match) - value: '{{origin.ip}}' # Value to match (can use variables from the event) - within: now-12h # Time window for the search - count: 1 # Number of events to match - deduplicateBy: # Fields used for deduplication - - adversary.ip - - adversary.country -``` - -### Rule Fields - -- **id**: A unique identifier for the rule. -- **dataTypes**: An array of data types that this rule applies to. The rule will only be evaluated for events with these - data types. -- **name**: The name of the rule. -- **impact**: The impact of the threat detected by this rule, with scores for confidentiality, integrity, and - availability. -- **category**: The category of the rule. -- **technique**: The technique used by the threat. -- **adversary**: Which side is considered the adversary (origin or target). -- **references**: An array of external references for more information about the threat. -- **description**: A description of the rule. -- **where**: An expression to evaluate. -- **afterEvents**: Additional events to search for. - - **indexPattern**: The index pattern to search in. - - **with**: Conditions for the search. - - **field**: The field to match. - - **operator**: The operator to use for matching. Possible values: - - **filter_match**: Equality operator. Matches events where the field equals the value (Using full-text - search). - - **filter_term**: Equality operator. Matches events where the field equals the value (Using term search). - - **must_not_match**: Not equal operator. Matches events where the field does not equal the value (Using - full-text search). - - **must_not_term**: Not equal operator. Matches events where the field does not equal the value (Using term - search). - - **value**: The value to match (can use variables from the event using the `{{field.path}}` syntax). - - **within**: The time window for the search. - - **count**: The number of events to match. (Max 50). -- **deduplicateBy**: Fields used for deduplication of alerts. - -### Rule Evaluation - -When an event is received, the analysis plugin evaluates all rules that apply to the event's data type. For each rule: - -1. The variables are extracted from the event. -2. The expression is evaluated using the variables. -3. If the expression evaluates to true, the afterEvents searches are performed. -4. If all conditions are met, an alert is generated. - -## Implementing Filters - -Filters are YAML files that define how to extract and transform data from raw events. They are used by the parsing -plugin to convert raw events into a standardized format that can be used by the EventProcessor and analyzed by rules. - -### Filter Structure - -A filter is defined as a YAML object with the following structure: - -```yaml -pipeline: - - dataTypes: # Types of data this filter applies to - - wineventlog - steps: # Processing steps to apply - - json: # Parse the raw data as JSON - source: raw - - rename: # Rename fields - from: - - log.host.ip - to: origin.ip - # More steps... -``` - -### Filter Steps - -Filters can include various types of steps for processing events. Each step type serves a specific purpose in the data -transformation pipeline: - -#### 1. **json** - Parse JSON Data - -Parses a field containing JSON data into structured log fields. - -**Required Fields:** - -- `source`: The field containing JSON data - -**Example:** - -```yaml -- json: - source: raw -``` - -#### 2. **rename** - Rename Fields - -Renames existing fields to match standardized field naming conventions. - -**Required Fields:** - -- `from`: Array of source field names -- `to`: Target field name - -**Example:** - -```yaml -- rename: - from: - - log.host.ip - - log.source.ip - to: origin.ip -``` - -#### 3. **cast** - Type Conversion - -Converts field values to specified data types. - -**Required Fields:** - -- `fields`: Array of field names to convert -- `to`: Target data type (`int`, `float`, `float64`, `string`, `[]string`, etc.) - -**Example:** - -```yaml -- cast: - fields: - - origin.port - - statusCode - to: int - -- cast: - fields: - - log.local.ips - to: '[]string' -``` - -#### 4. **delete** - Remove Fields - -Removes specified fields from the log structure. - -**Required Fields:** - -- `fields`: Array of field names to remove - -**Optional Fields:** - -- `where`: Conditional logic for when to apply the deletion - -**Example:** - -```yaml -- delete: - fields: - - log.method - - log.service - - log.metadata - where: exists(action) -``` - -#### 5. **grok** - Pattern-based Parsing - -Extracts structured data from unstructured text using pattern matching. Each grok step uses a list of patterns that are -applied sequentially to extract multiple fields from the source. - -**Required Fields:** - -- `patterns`: Array of pattern definitions, each with `fieldName` and `pattern` -- `source`: Source field to parse - -**Note:** Use `fieldName` (camelCase) in pattern definitions. Some legacy filters may use `field_name` but `fieldName` -is the standard. - -**Optional Fields:** - -- `where`: Conditional logic for when to apply parsing - -**Pattern Structure:** - -- Each pattern in the list defines one field to extract -- Patterns are applied in sequence to parse complex log formats -- Multiple grok steps can be used for different parsing stages - -**Built-in Patterns:** - -- `{{.ipv4}}`, `{{.ipv6}}` - IP addresses -- `{{.integer}}`, `{{.word}}` - Numbers and words -- `{{.data}}`, `{{.greedy}}` - Generic data patterns -- `{{.time}}`, `{{.year}}`, `{{.monthNumber}}`, `{{.monthDay}}` - Time patterns -- `{{.hostname}}`, `{{.day}}`, `{{.monthName}}` - Host and date patterns - -**Examples:** - -Basic Apache log parsing: - -```yaml -- grok: - patterns: - - fieldName: origin.ip - pattern: '{{.ipv4}}|{{.ipv6}}' - - fieldName: origin.user - pattern: '{{.word}}|(-)' - - fieldName: deviceTime - pattern: '\[{{.data}}\]' - - fieldName: log.request - pattern: '\"{{.data}}\"' - - fieldName: log.statusCode - pattern: '{{.integer}}' - source: log.message -``` - -Complex Cisco ASA parsing: - -```yaml -- grok: - patterns: - - fieldName: log.ciscoTime - pattern: '({{.day}}\s)?{{.monthName}}\s{{.monthDay}}\s{{.year}}\s{{.time}}' - - fieldName: log.localIp - pattern: '{{.ipv4}}|{{.ipv6}}|{{.hostname}}' - - fieldName: log.asaHeader - pattern: '{{.data}}ASA-' - - fieldName: log.severity - pattern: '{{.integer}}' - - fieldName: log.messageId - pattern: '-{{.integer}}' - - fieldName: log.msg - pattern: '{{.greedy}}' - source: raw -``` - -Parsing with port extraction: - -```yaml -- grok: - patterns: - - fieldName: origin.ip - pattern: '({{.ipv4}}|{{.ipv6}})' - - fieldName: origin.port - pattern: '/{{.integer}}' - - fieldName: target.ip - pattern: '({{.ipv4}}|{{.ipv6}})' - - fieldName: target.port - pattern: '/{{.integer}}' - source: log.connectionInfo -``` - -#### 6. **kv** - Key-Value Parsing - -Parses key-value formatted data into structured fields. - -**Required Fields:** - -- `fieldSplit`: Character(s) that separate key-value pairs -- `valueSplit`: Character(s) that separate keys from values -- `source`: Source field containing key-value data - -**Optional Fields:** - -- `where`: Conditional logic for when to apply parsing - -**Example:** - -```yaml -- kv: - fieldSplit: " " - valueSplit: "=" - source: raw -``` - -#### 7. **trim** - String Trimming - -Removes specified characters from the beginning or end of field values. - -**Required Fields:** - -- `function`: Trim operation (`prefix`, `suffix`, `substring`, `regex`, `space`) -- `substring`: Character(s) to remove -- `fields`: Array of fields to trim - -**Optional Fields:** - -- `where`: Conditional logic for when to apply trimming - -**Example:** - -```yaml -- trim: - function: prefix - substring: '[' - fields: - - deviceTime - - log.severityLabel - -- trim: - function: suffix - substring: ':' - fields: - - origin.ip -``` - -#### 8. **add** - Add New Fields - -Adds new fields with specified values, often used for field enrichment and normalization. - -**Required Fields:** - -- `function`: Add function type (`string`) -- `params`: Parameters including `key` and `value` - -**Optional Fields:** - -- `where`: Conditional logic for when to add the field - -**Example:** - -```yaml -- add: - function: 'string' - params: - key: actionResult - value: 'accepted' - where: safe("statusCode", 0.0) >= double(200) && safe("statusCode", 0.0) <= double(200)) - -- add: - function: 'string' - params: - key: action - value: 'get' - where: - variables: safe("log.method", "") == "GET" -``` - -#### 9. **reformat** - Field Reformatting - -Reformats field values, particularly useful for timestamp conversion. - -**Required Fields:** - -- `fields`: Array of fields to reformat -- `function`: Reformatting function (`time`) -- `fromFormat`: Source format pattern -- `toFormat`: Target format pattern - -**Example:** - -```yaml -- reformat: - fields: - - deviceTime - function: time - fromFormat: '14/Feb/2022:15:40:53 -0500' - toFormat: '2024-09-23T15:57:40.338364445Z' -``` - -#### 10. **expand** - Field Expansion - -Expands complex nested fields or structured data into individual fields. - -**Required Fields:** - -- `source`: Source field containing data to expand -- `to`: Target field name for expanded data - -**Optional Fields:** - -- `where`: Conditional logic for when to apply expansion - -**Example:** - -```yaml -- expand: - source: log.jsonPayload.structuredRdata - to: log.jsonPayloadStructuredRdata - where: exists(log.jsonPayload.structuredRdata) -``` - -#### 11. **csv** - CSV Data Parsing - -Parses CSV-formatted data into structured fields using defined column mappings. - -**Required Fields:** - -- `source`: Source field containing CSV data -- `separator`: Character used to separate CSV values (typically `","`) -- `headers`: Array of field names for CSV columns - -**Optional Fields:** - -- `where`: Conditional logic for when to apply CSV parsing - -**Note:** The protocol buffer definition only supports `headers`, not `columns`. Some filters may incorrectly use -`columns` but `headers` is the standard. - -**Example:** - -```yaml -- csv: - source: log.csvMsg - separator: "," - headers: - - log.ruleNumber - - log.subRuleNumber - - log.anchor - - log.tracker - - log.realInterface - - log.reason - - log.action -``` - -#### 12. **dynamic** - Dynamic Plugin Integration - -Calls external plugins for specialized processing like geolocation enrichment. - -**Required Fields:** - -- `plugin`: Plugin identifier -- `params`: Plugin-specific parameters - -**Optional Fields:** - -- `where`: Conditional logic for when to apply the plugin - -**Example:** - -```yaml -- dynamic: - plugin: com.utmstack.geolocation - params: - source: origin.ip - destination: origin.geolocation - where: exists(origin.ip) -``` - -### Conditional Logic in Steps - -All steps support conditional logic using the `where` clause: - -**Structure:** - -```yaml -where: "conditional_expression" -``` - -**Supported Data Types:** - -- `string`, `int`, `float`, `bool` -- `[]string`, `[]int` (arrays) -- Custom types as needed - -**Expression Examples:** - -- `has(origin.ip)` - Check if variable exists and is valid -- `statusCode >= 200 && statusCode <= 299` - Range checking -- `log.method == "GET"` - String comparison -- `severity.contains("error")` - String operations - -### Complete Filter Example - -Here's a comprehensive example showing multiple step types working together (based on Apache access log processing): - -```yaml -pipeline: - - dataTypes: - - apache - steps: - # 1. Parse JSON structure from raw input - - json: - source: raw - - # 2. Rename fields to standardized mapping - - rename: - from: - - log.host.hostname - to: origin.host - - rename: - from: - - log.host.ip - to: log.local.ips - - # 3. Type conversion for array fields - - cast: - to: '[]string' - fields: - - log.local.ips - - # 4. Parse Apache Common Log Format using grok - - grok: - patterns: - - fieldName: origin.ip - pattern: '{{.ipv4}}|{{.ipv6}}' - - fieldName: origin.user - pattern: '{{.word}}|(-)' - - fieldName: deviceTime - pattern: '\[{{.data}}\]' - - fieldName: log.request - pattern: '\"{{.data}}\"' - - fieldName: log.statusCode - pattern: '{{.integer}}' - - fieldName: origin.bytesReceived - pattern: '{{.integer}}|(-)' - source: log.message - - # 5. Clean up parsed fields - - trim: - function: prefix - substring: '[' - fields: - - deviceTime - - trim: - function: suffix - substring: ']' - fields: - - deviceTime - - # 6. Parse HTTP request components - - grok: - patterns: - - fieldName: log.method - pattern: '{{.word}}' - - fieldName: origin.path - pattern: '(.*)\s+' - - fieldName: protocol - pattern: '{{.greedy}}' - source: log.request - - # 7. Add geolocation data using dynamic plugin - - dynamic: - plugin: com.utmstack.geolocation - params: - source: origin.ip - destination: origin.geolocation - where: exists(origin.ip) - - # 8. Normalize HTTP methods to standardized actions - - add: - function: 'string' - params: - key: action - value: 'get' - where: safe(log.method, "") == "GET" - - # 9. Add result classification based on status codes - - add: - function: 'string' - params: - key: actionResult - value: 'accepted' - where: safe(log.statusCode, 0.0) >= double(200) && safe(log.statusCode, 0.0) <= double(299)) - - # 10. Convert numeric fields - - cast: - fields: - - log.statusCode - - origin.bytesReceived - to: int - - # 11. Reformat timestamp - - reformat: - fields: - - deviceTime - function: time - fromFormat: '14/Feb/2022:15:40:53 -0500' - toFormat: '2024-09-23T15:57:40.338364445Z' - - # 12. Clean up temporary fields - - delete: - fields: - - log.method - - log.service - - log.agent - where: exists(action) -``` - -This example demonstrates: - -- **Sequential processing**: Steps are applied in order -- **Field transformation**: Renaming, casting, and reformatting -- **Pattern extraction**: Using grok for complex log parsing -- **Conditional logic**: Adding fields based on conditions -- **External integration**: Geolocation enrichment via dynamic plugin -- **Data cleanup**: Trimming and deleting unnecessary fields - -### Filter Evaluation - -When an event is received, the parsing plugin selects the appropriate filter based on the event's data type. The filter -is then applied to the event, transforming it according to the defined steps. Each step modifies the event structure, -preparing it for analysis and correlation by downstream plugins. - -## Best Practices - -### Rule Development - -1. **Start Simple**: Begin with simple rules that match specific patterns, then refine them as needed. -2. **Test Thoroughly**: Test rules with a variety of events to ensure they work as expected. -3. **Use Variables**: Use variables to make rules more readable and maintainable. -4. **Document Rules**: Include a clear description and references in each rule. -5. **Consider Performance**: Complex rules can impact performance, so optimize them as needed. - -### Filter Development - -1. **Standardize Field Names**: Use consistent field names across all filters. -2. **Remove Unnecessary Fields**: Delete fields that are not needed for analysis to reduce storage requirements. -3. **Handle Edge Cases**: Consider how to handle missing or malformed data. -4. **Document Filters**: Include comments to explain the purpose of each step. -5. **Test with Real Data**: Test filters with real data to ensure they work as expected. - -## Development Workflow - -This section provides a step-by-step guide for developing and implementing new rules and filters. - -### Rule Development Workflow - -1. **Identify the Security Threat**: Determine what security threat you want to detect. -2. **Understand the Data**: Examine the events that would indicate this threat. -3. **Create a Rule File**: Create a new YAML file in the rules directory. -4. **Define Basic Metadata**: Set the id, name, description, and other metadata. -5. **Define Data Types**: Specify which data types this rule applies to. -6. **Define Impact**: Set the confidentiality, integrity, and availability impact scores. -7. **Define Where Conditions**: Create variables and an expression to identify events of interest. -8. **Define After Events**: If needed, specify additional events to search for. -9. **Define Deduplication**: Specify fields to use for deduplicating alerts. -10. **Test the Rule**: Deploy the rule and test it with sample events. -11. **Refine the Rule**: Adjust the rule based on testing results. -12. **Document the Rule**: Add comments and references to explain the rule. - -### Filter Development Workflow - -1. **Identify the Data Source**: Determine what data source you want to process. -2. **Understand the Raw Format**: Examine the raw events from this source. -3. **Create a Filter File**: Create a new YAML file in the appropriate filters directory. -4. **Define Data Types**: Specify which data types this filter applies to. -5. **Define Parsing Steps**: Add steps to parse and transform the raw data. -6. **Test the Filter**: Deploy the filter and test it with sample events. -7. **Refine the Filter**: Adjust the filter based on testing results. -8. **Document the Filter**: Add comments to explain the filter. - -## Advanced Features - -This section covers advanced features of the EventProcessor and UTMStack ecosystem that can be used to create more -sophisticated rules and filters. - -### Advanced Rule Features - -#### Complex Expressions - -The `where` field in rules supports complex expressions using the Common Expression Language (CEL). CEL is a -powerful expression language that allows for complex logic, including: - -- **Logical Operators**: `&&` (AND), `||` (OR), `!` (NOT) -- **Comparison Operators**: `==`, `!=`, `<`, `<=`, `>`, `>=` -- **String Operations**: `startsWith()`, `endsWith()`, `contains()` -- **Array Operations**: `in`, `size()` -- **Mathematical Operations**: `+`, `-`, `*`, `/`, `%` - -Example of a complex expression: - -```yaml -where: has(origin.country) && !(origin.country in ["United States", "Canada", "United Kingdom"]) && (origin.user != "" && origin.user.startsWith("admin")) -``` - -#### Nested AfterEvents - -The `afterEvents` field in rules supports nested searches using the `or` field. This allows for more complex correlation -logic: - -```yaml -afterEvents: - - indexPattern: v11-log-* - with: - - field: origin.ip.keyword - operator: filter_term - value: '{{origin.ip}}' - within: now-12h - count: 1 - or: - - indexPattern: v11-alert-* - with: - - field: adversary.ip.keyword - operator: filter_term - value: '{{origin.ip}}' - within: now-24h - count: 2 -``` - -In this example, the rule will match if either: - -1. There is at least 1 event in the `v11-log-*` index with the same origin IP within the last 12 hours, OR -2. There are at least 2 alerts in the `v11-alert-*` index with the same adversary IP within the last 24 hours. - -#### Dynamic Values - -Rule fields can use dynamic values from the event using the `{{field.path}}` syntax. This is particularly useful in the -`afterEvents` section: - -```yaml -afterEvents: - - indexPattern: v11-log-* - with: - - field: origin.user.keyword - operator: filter_term - value: '{{origin.user}}' - - field: origin.ip.keyword - operator: must_not_term - value: '{{origin.ip}}' - within: now-24h - count: 3 -``` - -This example searches for events with the same user but a different IP address, which could indicate a compromised -account. - -### Advanced Filter Features - -#### Multi-Stage Pipelines - -Filters can include multiple stages in the pipeline, each with its own set of steps: - -```yaml -pipeline: - - dataTypes: - - wineventlog - steps: - - json: - source: raw - - rename: - from: - - log.host.ip - to: origin.ip - - dataTypes: - - wineventlog - steps: - - grok: - source: message - pattern: "%{WORD:action} %{IP:target.ip}" - target: parsed -``` - -This allows for more modular and maintainable filters, especially for complex data sources. - -## Integration with Other Systems - -The EventProcessor and UTMStack ecosystem can integrate with various other systems to enhance its capabilities. - -### Integration with Threat Intelligence Platforms - -UTMStack can integrate with threat intelligence platforms to enrich events with threat intelligence data: - -1. **ThreatWinds**: UTMStack has native integration with ThreatWinds for threat intelligence. -2. **MISP**: UTMStack can integrate with MISP to consume threat intelligence feeds via third party plugins. -3. **AlienVault OTX**: UTMStack can integrate with AlienVault OTX to consume threat intelligence feeds via third party - plugins. - -### Integration with Ticketing Systems - -UTMStack can integrate with ticketing systems to create tickets for alerts: - -1. **JIRA**: UTMStack can create JIRA tickets for alerts using the JIRA API via third party plugins. -2. **ServiceNow**: UTMStack can create ServiceNow incidents for alerts using the ServiceNow API via third party plugins. -3. **GitHub Issues**: UTMStack can create GitHub issues for alerts using the GitHub API via third party plugins. - -### Integration with Communication Platforms - -UTMStack can integrate with communication platforms to send notifications for alerts: - -1. **Email**: UTMStack can send email notifications for alerts. -2. **Slack**: UTMStack can send Slack notifications for alerts using the Slack API via third party plugins. -3. **Microsoft Teams**: UTMStack can send Microsoft Teams notifications for alerts using the Microsoft Teams API via - third party plugins. - -## Performance Optimization - -This section provides guidance on optimizing the performance of the EventProcessor and UTMStack ecosystem. - -### Rule Optimization - -1. **Limit Data Types**: Specify only the data types that the rule applies to. This reduces the number of events that - need to be evaluated. -2. **Use Efficient Expressions**: Use efficient expressions in the `where` field. Avoid complex expressions - that require a lot of processing. -3. **Limit AfterEvents Searches**: Limit the number of `afterEvents` searches and the time window for each search. This - reduces the load on the search engine. -4. **Use Deduplication**: Use the `deduplicateBy` field to prevent alert fatigue. - -### Filter Optimization - -1. **Limit Data Types**: Specify only the data types that the filter applies to. This reduces the number of events that - need to be processed. -2. **Use Efficient Steps**: Use efficient steps in the filter pipeline. Avoid complex steps that require a lot of - processing. -3. **Remove Unnecessary Fields**: Remove fields that are not needed for analysis to reduce storage requirements. -4. **Use Conditional Steps**: Use conditional steps to apply different processing based on the event type. This can - reduce the number of steps that need to be applied to each event. - -### System Optimization - -1. **Hardware Resources**: Ensure that the system has sufficient hardware resources (CPU, memory, disk) to handle the - expected event volume. -2. **Cluster Configuration**: Configure the OpenSearch cluster with the appropriate number of nodes, shards, and - replicas for the expected event volume. -3. **JVM Settings**: Configure the JVM settings for OpenSearch to optimize memory usage. -4. **Network Configuration**: Ensure that the network configuration is optimized for the expected event volume. - -## Troubleshooting - -### Common Issues - -1. **Rule Not Triggering**: Check that the event matches the dataTypes and where conditions. -2. **Filter Not Processing**: Check that the event matches the dataTypes and that the filter steps are correct. -3. **Missing Fields**: Check that the fields referenced in rules and filters exist in the events. -4. **Performance Issues**: Check for complex rules or filters that may be impacting performance. - -### Debugging - -1. **Check Logs**: Look for error messages in the EventProcessor and plugin logs. -2. **Test Rules Individually**: Test rules one at a time to isolate issues. -3. **Validate YAML**: Ensure that rule and filter YAML files are valid. -4. **Check Field Names**: Verify that field names in rules and filters match the actual field names in events. -5. **Use Test Events**: Create test events that should trigger your rules and verify they work as expected. - -## Real-World Use Cases - -This section provides real-world examples of how the EventProcessor and UTMStack ecosystem can be used to solve security -challenges. - -### Detecting Brute Force Attacks - -A common security challenge is detecting brute force attacks against authentication systems. Here's how you can use the -EventProcessor and UTMStack to detect such attacks: - -1. **Create a Filter**: Create a filter that extracts relevant information from authentication logs, such as the source - IP, username, and authentication result. - -```yaml -pipeline: - - dataTypes: - - auth_logs - steps: - - json: - source: raw - - rename: - from: - - log.source.ip - to: origin.ip - - rename: - from: - - log.auth.username - to: origin.user - - rename: - from: - - log.auth.result - to: actionResult -``` - -2. **Create a Rule**: Create a rule that detects multiple failed authentication attempts from the same IP address. - -```yaml -- id: 201 - dataTypes: - - auth_logs - name: Brute Force Attack Detection - impact: - confidentiality: 4 - integrity: 3 - availability: 2 - category: Authentication - technique: Brute Force - adversary: origin - references: - - https://attack.mitre.org/techniques/T1110/ - description: Detects multiple failed authentication attempts from the same IP address. - where: has(origin.ip) && actionResult == "failure" - afterEvents: - - indexPattern: v11-log-auth_logs - with: - - field: origin.ip.keyword - operator: filter_term - value: '{{origin.ip}}' - - field: actionResult.keyword - operator: filter_term - value: 'failure' - within: now-1h - count: 5 - deduplicateBy: - - origin.ip -``` - -This rule will generate an alert if there are at least 5 failed authentication attempts from the same IP address within -the last hour. - -### Detecting Data Exfiltration - -Another common security challenge is detecting data exfiltration. Here's how you can use the EventProcessor and UTMStack -to detect unusual data transfers: - -1. **Create a Filter**: Create a filter that extracts relevant information from network logs, such as the source IP, - destination IP, and data transfer size. - -```yaml -pipeline: - - dataTypes: - - network_logs - steps: - - json: - source: raw - - rename: - from: - - log.source.ip - to: origin.ip - - rename: - from: - - log.destination.ip - to: target.ip - - rename: - from: - - log.network.bytes - to: origin.bytesSent -``` - -2. **Create a Rule**: Create a rule that detects large data transfers to unusual destinations. - -```yaml -- id: 202 - dataTypes: - - network_logs - name: Data Exfiltration Detection - impact: - confidentiality: 5 - integrity: 2 - availability: 1 - category: Exfiltration - technique: Data Transfer - adversary: origin - references: - - https://attack.mitre.org/techniques/T1048/ - description: Detects large data transfers to unusual destinations. - where: has(origin.ip) && has(target.ip) && has(origin.bytesSent) origin.bytesSent && > 10000000 && !(target.ip in ["10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16"]) - afterEvents: - - indexPattern: v11-log-network_logs - with: - - field: origin.ip.keyword - operator: filter_term - value: '{{origin.ip}}' - - field: target.ip.keyword - operator: filter_term - value: '{{target.ip}}' - within: now-24h - count: 1 - deduplicateBy: - - origin.ip - - target.ip -``` - -This rule will generate an alert if there is a data transfer larger than 10MB to a destination outside the internal -network. - -### Detecting Insider Threats - -Insider threats can be difficult to detect. Here's how you can use the EventProcessor and UTMStack to detect unusual -user behavior that might indicate an insider threat: - -1. **Create a Filter**: Create a filter that extracts relevant information from user activity logs, such as the - username, action, and time. - -```yaml -pipeline: - - dataTypes: - - user_activity - steps: - - json: - source: raw - - rename: - from: - - log.user.name - to: origin.user - - rename: - from: - - log.activity.action - to: action - - rename: - from: - - log.activity.time - to: deviceTime -``` - -2. **Create a Rule**: Create a rule that detects unusual user activity outside normal working hours. - -```yaml -- id: 203 - dataTypes: - - user_activity - name: Unusual User Activity - impact: - confidentiality: 3 - integrity: 4 - availability: 2 - category: Insider Threat - technique: Unusual Activity - adversary: origin - references: - - https://attack.mitre.org/techniques/T1078/ - description: Detects unusual user activity outside normal working hours. - where: has(origin.user) && has(deviceTime) && has(action) && (time.hour < 8 || time.hour > 18) && action in ["file_access", "database_query", "admin_action"] - afterEvents: - - indexPattern: v11-log-user_activity - with: - - field: origin.user.keyword - operator: filter_term - value: '{{origin.user}}' - - field: action.keyword - operator: filter_term - value: '{{action}}' - within: now-7d - count: 1 - deduplicateBy: - - origin.user - - action -``` - -This rule will generate an alert if a user performs sensitive actions outside normal working hours (8 AM to 6 PM). - -## Custom Plugin Development - -The EventProcessor and UTMStack ecosystem is designed to be extensible through plugins. This section provides guidance -on developing custom plugins to extend the functionality of the system. - -### Plugin Types - -There are five main types of plugins that you can develop for the EventProcessor, each serving specific purposes in the -security event processing pipeline: - -1. **Input Plugins**: Collect or receive logs from external sources (e.g., syslog, APIs, files). -2. **Parsing Plugins**: Extract and transform logs to match a defined mapping, enabling analysis/correlation and - allowing UTMStack users to explore logs in the Logs Explorer and visualize aggregated data in dashboards. -3. **Analysis Plugins**: Process logs to detect security attacks and generate alerts. -4. **Correlation Plugins**: Detect when different alerts are associated or have correlation relationships. -5. **Notification Plugins**: Send notifications to UTMStack and external systems (e.g., Slack channels, ticketing - systems, email). - -### Plugin Structure - -A plugin is a separate process that communicates with the EventProcessor via gRPC over Unix sockets. Each plugin type -implements a specific gRPC service interface: - -1. **Input Plugins**: Implement the `Engine` service (bidirectional streaming) to send logs to the EventProcessor -2. **Parsing Plugins**: Implement the `Parsing` service (unary RPC) with `ParseLog(Transform) -> Draft` method -3. **Analysis Plugins**: Implement the `Analysis` service (server streaming) with `Analyze(Event) -> stream Alert` - method -4. **Correlation Plugins**: Implement the `Correlation` service to process and correlate alerts -5. **Notification Plugins**: Implement the `Notification` service to send notifications to external systems - -The basic structure of a plugin includes: - -1. **Main Package**: The entry point for the plugin. -2. **gRPC Server**: Implements the appropriate gRPC service interface based on plugin type. -3. **Plugin Logic**: Implements the specific functionality for the plugin's purpose in the pipeline. -4. **Unix Socket**: Creates and listens on a named Unix socket for communication with the EventProcessor. - -### Developing an Input Plugin - -Input plugins collect or receive logs from external sources and send them to the EventProcessor. They implement the -`Engine` service interface. - -Here's a step-by-step guide for developing a custom input plugin: - -1. **Create a New Go Module**: Create a new Go module for your plugin. - -```bash -mkdir my-input-plugin -cd my-input-plugin -go mod init github.com/myorg/my-input-plugin -``` - -2. **Add Dependencies**: Add the go-sdk as a dependency. - -```bash -go get github.com/threatwinds/go-sdk -``` - -3. **Create the Main Package**: Create a main.go file with the basic structure of the input plugin. - -```go -package main - -import ( - "context" - "os" - "time" - - "github.com/threatwinds/go-sdk/catcher" - "github.com/threatwinds/go-sdk/plugins" - "github.com/threatwinds/go-sdk/utils" - "google.golang.org/grpc" -) - -func main() { - // Try to set up the connection - socketsFolder, err := utils.MkdirJoin(plugins.WorkDir, "sockets") - if err != nil { - _ = catcher.Error("cannot create sockets folder", err, nil) - time.Sleep(5 * time.Second) // Wait before retrying - os.Exit(1) - } - - socket := socketsFolder.FileJoin("engine_server.sock") - - conn, err := grpc.NewClient( - fmt.Sprintf("unix://%s", socket), - grpc.WithTransportCredentials(insecure.NewCredentials())) - if err != nil { - _ = catcher.Error("cannot connect to engine server", err, map[string]any{}) - time.Sleep(5 * time.Second) // Wait before retrying - os.Exit(1) - } - - defer conn.Close() - - client := plugins.NewEngineClient(conn) - - inputStreamClient, err := client.Input(context.Background()) - if err != nil { - _ = catcher.Error("cannot create input client", err, map[string]any{}) - time.Sleep(5 * time.Second) // Wait before retrying - os.Exit(1) - } - - var logsChannel = make(chan string, 100) - - // Start collecting data from an external source - go collectData(logsChannel) - - // Start sending data to the EventProcessor - go sendData(inputStreamClient, logsChannel) - - // Keep the plugin running - select {} -} - -func sendData(inputStreamClient grpc.BidiStreamingClient[plugins.Log, plugins.Ack], logs chan string) { - for { - log := <-logs - // Implement bidirectional streaming logic here - } -} - -func collectData(logs chan string) { - // Implement data collection logic here - logs <- "collected data" -} -``` - -4. **Build the Plugin**: Build the plugin as a standalone executable. - -```bash -go build -o my-input-plugin -``` - -5. **Deploy the Plugin**: Deploy the plugin to the plugins directory of the EventProcessor. - -```bash -cp my-input-plugin /path/to/eventprocessor/plugins/ -``` - -### Developing a Parsing Plugin - -Parsing plugins extract and transform logs to match a defined mapping that enables analysis/correlation and allows -UTMStack users to explore logs in the Logs Explorer. They implement the `Parsing` service interface. - -Here's a step-by-step guide for developing a custom parsing plugin: - -1. **Create a New Go Module**: Create a new Go module for your plugin. - -```bash -mkdir my-parsing-plugin -cd my-parsing-plugin -go mod init github.com/myorg/my-parsing-plugin -``` - -2. **Add Dependencies**: Add the go-sdk as a dependency. - -```bash -go get github.com/threatwinds/go-sdk -``` - -3. **Create the Main Package**: Create a main.go file with the basic structure of the parsing plugin. - -```go -package main - -import ( - "context" - "net" - "os" - - "github.com/threatwinds/go-sdk/catcher" - "github.com/threatwinds/go-sdk/plugins" - "github.com/threatwinds/go-sdk/utils" - "google.golang.org/grpc" -) - -type parsingServer struct { - plugins.UnimplementedParsingServer -} - -func main() { - // Create socket directory - filePath, err := utils.MkdirJoin(plugins.WorkDir, "sockets") - if err != nil { - _ = catcher.Error("cannot create socket directory", err, nil) - os.Exit(1) - } - - // Create socket path for parsing plugin - socketPath := filePath.FileJoin("my_parsing.sock") - _ = os.Remove(socketPath) - - // Resolve Unix address - unixAddress, err := net.ResolveUnixAddr("unix", socketPath) - if err != nil { - _ = catcher.Error("cannot resolve unix address", err, nil) - os.Exit(1) - } - - // Listen on Unix socket - listener, err := net.ListenUnix("unix", unixAddress) - if err != nil { - _ = catcher.Error("cannot listen to unix socket", err, nil) - os.Exit(1) - } - - // Create gRPC server - grpcServer := grpc.NewServer() - plugins.RegisterParsingServer(grpcServer, &parsingServer{}) - - // Serve gRPC - if err := grpcServer.Serve(listener); err != nil { - _ = catcher.Error("cannot serve grpc", err, nil) - os.Exit(1) - } -} - -func (p *parsingServer) ParseLog(ctx context.Context, transform *plugins.Transform) (*plugins.Draft, error) { - // Implement your parsing logic here - // Note: transform.Draft contains log as string, not structured data - // The actual log parsing is handled by the EventProcessor - - return transform.Draft, nil -} -``` - -4. **Implement the Parsing Logic**: The `ParseLog` method receives a `Transform` containing the current draft and step - configuration, and returns a modified `Draft`. - -5. **Build the Plugin**: Build the plugin as a standalone executable. - -```bash -go build -o my-parsing-plugin -``` - -6. **Deploy the Plugin**: Deploy the plugin to the plugins directory of the EventProcessor. - -```bash -cp my-parsing-plugin /path/to/eventprocessor/plugins/ -``` - -### Developing an Analysis Plugin - -Analysis plugins process logs to detect security attacks and generate alerts. They implement the `Analysis` service -interface. - -```go -package main - -import ( - "net" - "os" - - "github.com/threatwinds/go-sdk/catcher" - "github.com/threatwinds/go-sdk/plugins" - "github.com/threatwinds/go-sdk/utils" - "google.golang.org/grpc" -) - -type analysisServer struct { - plugins.UnimplementedAnalysisServer -} - -func main() { - // Create socket directory - filePath, err := utils.MkdirJoin(plugins.WorkDir, "sockets") - if err != nil { - _ = catcher.Error("cannot create socket directory", err, nil) - os.Exit(1) - } - - // Create socket path for analysis plugin - socketPath := filePath.FileJoin("my_analysis.sock") - _ = os.Remove(socketPath) - - // Resolve Unix address - unixAddress, err := net.ResolveUnixAddr("unix", socketPath) - if err != nil { - _ = catcher.Error("cannot resolve unix address", err, nil) - os.Exit(1) - } - - // Listen on Unix socket - listener, err := net.ListenUnix("unix", unixAddress) - if err != nil { - _ = catcher.Error("cannot listen to unix socket", err, nil) - os.Exit(1) - } - - // Create gRPC server - grpcServer := grpc.NewServer() - plugins.RegisterAnalysisServer(grpcServer, &analysisServer{}) - - // Serve gRPC - if err := grpcServer.Serve(listener); err != nil { - _ = catcher.Error("cannot serve grpc", err, nil) - os.Exit(1) - } -} - -func (a *analysisServer) Analyze(event *plugins.Event, srv grpc.ServerStreamingServer[plugins.Alert]) error { - // Implement your analysis logic here - // Example: Detect failed login attempts - - return nil // No alert generated -} -``` - -### Developing a Correlation Plugin - -Correlation plugins detect when different alerts are associated or have correlation relationships. - -```go -package main - -import ( - "context" - "net" - "os" - - "github.com/threatwinds/go-sdk/catcher" - "github.com/threatwinds/go-sdk/plugins" - "github.com/threatwinds/go-sdk/utils" - "google.golang.org/grpc" -) - -type correlationServer struct { - plugins.UnimplementedCorrelationServer -} - -func main() { - // Create socket directory - filePath, err := utils.MkdirJoin(plugins.WorkDir, "sockets") - if err != nil { - _ = catcher.Error("cannot create socket directory", err, nil) - os.Exit(1) - } - - // Create socket path for correlation plugin - socketPath := filePath.FileJoin("my_correlation.sock") - _ = os.Remove(socketPath) - - // Resolve Unix address - unixAddress, err := net.ResolveUnixAddr("unix", socketPath) - if err != nil { - _ = catcher.Error("cannot resolve unix address", err, nil) - os.Exit(1) - } - - // Listen on Unix socket - listener, err := net.ListenUnix("unix", unixAddress) - if err != nil { - _ = catcher.Error("cannot listen to unix socket", err, nil) - os.Exit(1) - } - - // Create gRPC server - grpcServer := grpc.NewServer() - plugins.RegisterCorrelationServer(grpcServer, &correlationServer{}) - - // Serve gRPC - if err := grpcServer.Serve(listener); err != nil { - _ = catcher.Error("cannot serve grpc", err, nil) - os.Exit(1) - } -} - -func (c *correlationServer) Correlate(ctx context.Context, alert *plugins.Alert) (*emptypb.Empty, error) { - // Implement your correlation logic here - // Example: Correlate multiple failed logins from same IP to create incident - - return nil, nil -} -``` - -### Developing a Notification Plugin - -Notification plugins send notifications to UTMStack and external systems (e.g., Slack channels, ticketing systems). - -```go -package main - -import ( - "context" - "net" - "os" - - "github.com/threatwinds/go-sdk/catcher" - "github.com/threatwinds/go-sdk/plugins" - "github.com/threatwinds/go-sdk/utils" - "google.golang.org/grpc" -) - -type notificationServer struct { - plugins.UnimplementedNotificationServer -} - -func main() { - // Create socket directory - filePath, err := utils.MkdirJoin(plugins.WorkDir, "sockets") - if err != nil { - _ = catcher.Error("cannot create socket directory", err, nil) - os.Exit(1) - } - - // Create socket path for notification plugin - socketPath := filePath.FileJoin("my_notification.sock") - _ = os.Remove(socketPath) - - // Resolve Unix address - unixAddress, err := net.ResolveUnixAddr("unix", socketPath) - if err != nil { - _ = catcher.Error("cannot resolve unix address", err, nil) - os.Exit(1) - } - - // Listen on Unix socket - listener, err := net.ListenUnix("unix", unixAddress) - if err != nil { - _ = catcher.Error("cannot listen to unix socket", err, nil) - os.Exit(1) - } - - // Create gRPC server - grpcServer := grpc.NewServer() - plugins.RegisterNotificationServer(grpcServer, ¬ificationServer{}) - - // Serve gRPC - if err := grpcServer.Serve(listener); err != nil { - _ = catcher.Error("cannot serve grpc", err, nil) - os.Exit(1) - } -} - -func (n *notificationServer) Notify(ctx context.Context, msg *plugins.Message) (*emptypb.Empty, error) { - // Implement your notification logic here - // Example: Send alert to Slack channel - - return &emptypb.Empty{}, nil -} -``` - -### Plugin Configuration - -Plugins can be configured using environment variables or configuration files. The go-sdk provides utilities for reading -configuration values: - -```go -value := plugins.PluginCfg("plugin_name", false).Get("my_config_key").String() -``` - -### Plugin Testing - -It's important to test your plugins thoroughly before deploying them to production. Here are some testing strategies: - -1. **Unit Testing**: Write unit tests for your plugin logic. -2. **Integration Testing**: Test your plugin with the EventProcessor in a development environment. -3. **Load Testing**: Test your plugin under load to ensure it can handle the expected event volume. - -## Scaling the System - -This section provides guidance on scaling the EventProcessor and UTMStack ecosystem to handle increasing event volumes. - -UTMStack can scale both vertically and horizontally to accommodate growing workloads and data volumes. - -### Horizontal Scaling - -For horizontal scaling, UTMStack only requires adding more nodes to Docker Swarm: - -1. **Add Docker Swarm Nodes**: Simply add more nodes to your Docker Swarm cluster. -2. **OpenSearch Node Affinity**: Add more OpenSearch nodes with node affinity to ensure proper data distribution. -3. **Automatic Component Scaling**: The rest of the components will automatically scale based on the number of Docker - nodes available. - -This approach simplifies scaling operations and ensures that your UTMStack deployment can grow seamlessly with your -needs. - -### Vertical Scaling - -You can also scale the system vertically by increasing the resources of existing nodes: - -1. **Increase CPU**: Add more CPU cores to handle more events. -2. **Increase Memory**: Add more memory to improve caching and reduce disk I/O. -3. **Faster Disks**: Use faster disks (e.g., SSDs) to improve I/O performance. - -### Database Scaling - -The OpenSearch database is a critical component that can be scaled to handle more data: - -1. **Add OpenSearch Nodes**: Add more OpenSearch nodes to the cluster with proper node affinity settings. -2. **Increase Shards**: Increase the number of shards to distribute the data more effectively. - -### Queue Scaling - -UTMStack handles event volume spikes through its built-in scaling mechanisms without relying on external message queue -systems like Kafka, RabbitMQ, or Redis. The system's architecture is designed to efficiently process events directly -within the Docker Swarm infrastructure. - -### Monitoring and Alerting - -Implement monitoring and alerting to detect scaling issues: - -1. **Prometheus**: Use Prometheus to collect metrics. -2. **Grafana**: Use Grafana to visualize metrics and set up alerts. - -## Migration and Upgrades - -Migration and upgrades in UTMStack are managed automatically by the UTMStack installer, eliminating the need for manual -intervention. The installer handles all aspects of the upgrade process, including: - -1. **Version Compatibility Checks**: The installer automatically verifies compatibility between components. -2. **Database Schema Updates**: Any required schema changes are applied automatically. -3. **Component Upgrades**: All components are upgraded in the correct order. - -In future releases, UTMStack will include an updates service that will further streamline this process by: - -1. **Automatic Update Detection**: Notifying administrators when updates are available. -2. **Scheduled Updates**: Allowing updates to be scheduled during maintenance windows. -3. **Rolling Updates**: Implementing updates with minimal downtime. -4. **Update Verification**: Verifying the success of updates and automatically rolling back if issues are detected. - -This automated approach ensures that your UTMStack deployment remains up-to-date with minimal effort and risk. - -## Community Resources and Support - -This section provides information on community resources and support options for the EventProcessor and UTMStack -ecosystem. - -### Documentation - -Access comprehensive documentation: - -1. **Official Documentation**: Visit the official documentation - at [UTMStack Documentation](https://documentation.utmstack.com). -2. **GitHub Repositories**: Check the GitHub repositories for READMEs and wikis. -3. **Code Comments**: Look at the code comments for detailed information about specific functions. - -### Community Forums - -Engage with the community: - -1. **GitHub Discussions**: Participate in discussions on GitHub and get community support. -2. **Discord**: Join the UTMStack Discord server for development discussions and collaboration. - -### Training and Tutorials - -Learn from training materials and tutorials: - -1. **Official Tutorials**: Follow the official tutorials on the UTMStack website. -2. **YouTube Channel**: Watch tutorial videos on the UTMStack YouTube channel. -3. **Webinars**: Attend webinars on new features and best practices. - -### Commercial Support - -Get commercial support if needed: - -1. **Professional Services**: Engage professional services for implementation and customization. -2. **Technical Support**: Purchase technical support for production environments. -3. **Training Services**: Get training for your team from UTMStack experts. - -### Contributing - -Contribute to the project: - -1. **Bug Reports**: Report bugs on GitHub. -2. **Feature Requests**: Submit feature requests on GitHub. -3. **Pull Requests**: Contribute code through pull requests. -4. **Documentation**: Help improve the documentation. -5. **Community Support**: Help answer questions from other users. +Documentation on how to create and maintain custom plugins can be found in: https://github.com/utmstack/UTMStack/wiki diff --git a/plugins/alerts/go.mod b/plugins/alerts/go.mod index a6cbc9aae..bfcbdb687 100644 --- a/plugins/alerts/go.mod +++ b/plugins/alerts/go.mod @@ -4,7 +4,7 @@ go 1.25.5 require ( github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 github.com/tidwall/gjson v1.18.0 google.golang.org/protobuf v1.36.11 ) @@ -13,8 +13,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -48,8 +48,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/grpc v1.78.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect diff --git a/plugins/alerts/go.sum b/plugins/alerts/go.sum index 36e5cfc21..9978feb4f 100644 --- a/plugins/alerts/go.sum +++ b/plugins/alerts/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -91,8 +91,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -142,10 +142,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/alerts/main.go b/plugins/alerts/main.go index 8ac956215..cd49f6d40 100644 --- a/plugins/alerts/main.go +++ b/plugins/alerts/main.go @@ -56,10 +56,11 @@ type AlertFields struct { } func main() { - openSearchUrl := plugins.PluginCfg("org.opensearch", false).Get("opensearch").String() + openSearchUrl := plugins.PluginCfg("org.opensearch").Get("opensearch").String() err := sdkos.Connect([]string{openSearchUrl}, "", "") if err != nil { _ = catcher.Error("cannot connect to OpenSearch", err, map[string]any{"process": "plugin_com.utmstack.alerts"}) + time.Sleep(5 * time.Second) os.Exit(1) } @@ -68,6 +69,7 @@ func main() { _ = catcher.Error("com.utmstack.alerts", err, map[string]any{ "process": "plugin_com.utmstack.alerts", }) + time.Sleep(5 * time.Second) os.Exit(1) } } diff --git a/plugins/aws/config/config.go b/plugins/aws/config/config.go index 3c314d88c..784c39b68 100644 --- a/plugins/aws/config/config.go +++ b/plugins/aws/config/config.go @@ -41,7 +41,7 @@ func GetConfig() *ConfigurationSection { func StartConfigurationSystem() { for { - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, map[string]any{"process": "plugin_com.utmstack.aws"}) time.Sleep(reconnectDelay) diff --git a/plugins/aws/go.mod b/plugins/aws/go.mod index 36dc8f25e..d85c1dfa4 100644 --- a/plugins/aws/go.mod +++ b/plugins/aws/go.mod @@ -7,7 +7,7 @@ require ( github.com/aws/aws-sdk-go-v2/config v1.32.7 github.com/aws/aws-sdk-go-v2/credentials v1.19.7 github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 ) require ( @@ -36,8 +36,8 @@ require ( github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.41.6 // indirect github.com/aws/smithy-go v1.24.0 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -66,8 +66,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/plugins/aws/go.sum b/plugins/aws/go.sum index 98f5f0339..b0cc86430 100644 --- a/plugins/aws/go.sum +++ b/plugins/aws/go.sum @@ -36,10 +36,10 @@ github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk= github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -123,8 +123,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -174,10 +174,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/azure/config/config.go b/plugins/azure/config/config.go index 220103675..63e0d0a97 100644 --- a/plugins/azure/config/config.go +++ b/plugins/azure/config/config.go @@ -41,7 +41,7 @@ func GetConfig() *ConfigurationSection { func StartConfigurationSystem() { for { - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, map[string]any{"process": "plugin_com.utmstack.azure"}) time.Sleep(reconnectDelay) diff --git a/plugins/azure/go.mod b/plugins/azure/go.mod index c92123581..a2bf8c395 100644 --- a/plugins/azure/go.mod +++ b/plugins/azure/go.mod @@ -6,7 +6,7 @@ require ( github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs/v2 v2.0.1 github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.4 github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 ) @@ -18,8 +18,8 @@ require ( github.com/Azure/go-amqp v1.5.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -54,8 +54,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/plugins/azure/go.sum b/plugins/azure/go.sum index 451e4dad1..f07ebba16 100644 --- a/plugins/azure/go.sum +++ b/plugins/azure/go.sum @@ -22,10 +22,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE= @@ -123,8 +123,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -174,10 +174,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/bitdefender/config/config.go b/plugins/bitdefender/config/config.go index d089c5744..a5cd5310f 100644 --- a/plugins/bitdefender/config/config.go +++ b/plugins/bitdefender/config/config.go @@ -54,7 +54,7 @@ func StartConfigurationSystem() { if err := utils.ConnectionChecker(UrlCheckConnection); err != nil { _ = catcher.Error("External connection failure detected: %v", err, map[string]any{"process": "plugin_com.utmstack.bitdefender"}) } - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, map[string]any{"process": "plugin_com.utmstack.bitdefender"}) time.Sleep(reconnectDelay) diff --git a/plugins/bitdefender/go.mod b/plugins/bitdefender/go.mod index 739945191..7dc65e835 100644 --- a/plugins/bitdefender/go.mod +++ b/plugins/bitdefender/go.mod @@ -5,7 +5,7 @@ go 1.25.5 require ( github.com/google/uuid v1.6.0 github.com/gorilla/mux v1.8.1 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 ) @@ -14,8 +14,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -50,8 +50,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/plugins/bitdefender/go.sum b/plugins/bitdefender/go.sum index 9464eeeda..c26cade06 100644 --- a/plugins/bitdefender/go.sum +++ b/plugins/bitdefender/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -93,8 +93,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -144,10 +144,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/bitdefender/server/certs.go b/plugins/bitdefender/server/certs.go index 6dc270bfa..103ae4b7e 100644 --- a/plugins/bitdefender/server/certs.go +++ b/plugins/bitdefender/server/certs.go @@ -18,7 +18,7 @@ var ( func loadCerts() (tls.Certificate, error) { certsFolderConfig := "" for { - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, nil) time.Sleep(reconnectDelay) diff --git a/plugins/config/go.mod b/plugins/config/go.mod index fd039eb7e..2ba8d1179 100644 --- a/plugins/config/go.mod +++ b/plugins/config/go.mod @@ -4,7 +4,7 @@ go 1.25.5 require ( github.com/lib/pq v1.10.9 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 gopkg.in/yaml.v3 v3.0.1 sigs.k8s.io/yaml v1.6.0 ) @@ -13,8 +13,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -50,8 +50,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/grpc v1.78.0 // indirect google.golang.org/protobuf v1.36.11 // indirect ) diff --git a/plugins/config/go.sum b/plugins/config/go.sum index 3734a6b8b..a0ab31a8b 100644 --- a/plugins/config/go.sum +++ b/plugins/config/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -93,8 +93,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -144,10 +144,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/config/main.go b/plugins/config/main.go index 13e259542..c15b6c4ac 100644 --- a/plugins/config/main.go +++ b/plugins/config/main.go @@ -406,7 +406,7 @@ func main() { // connect to postgres database func connect() (*sql.DB, error) { - pCfg := plugins.PluginCfg("com.utmstack", false) + pCfg := plugins.PluginCfg("com.utmstack") password := pCfg.Get("postgresql.password").String() server := pCfg.Get("postgresql.server").String() port := pCfg.Get("postgresql.port").Int() diff --git a/plugins/crowdstrike/config/config.go b/plugins/crowdstrike/config/config.go index 272f15def..bb96f60dd 100644 --- a/plugins/crowdstrike/config/config.go +++ b/plugins/crowdstrike/config/config.go @@ -41,7 +41,7 @@ func GetConfig() *ConfigurationSection { func StartConfigurationSystem() { for { - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) time.Sleep(reconnectDelay) diff --git a/plugins/crowdstrike/go.mod b/plugins/crowdstrike/go.mod index de6071d76..2d7c073e3 100644 --- a/plugins/crowdstrike/go.mod +++ b/plugins/crowdstrike/go.mod @@ -5,7 +5,7 @@ go 1.25.5 require ( github.com/crowdstrike/gofalcon v0.19.0 github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 ) @@ -15,8 +15,8 @@ require ( github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/blang/semver/v4 v4.0.0 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect @@ -71,7 +71,7 @@ require ( github.com/tidwall/pretty v1.2.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.3.1 // indirect - go.mongodb.org/mongo-driver v1.17.6 // indirect + go.mongodb.org/mongo-driver v1.17.7 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect go.opentelemetry.io/otel v1.39.0 // indirect go.opentelemetry.io/otel/metric v1.39.0 // indirect @@ -86,8 +86,8 @@ require ( golang.org/x/sync v0.19.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/plugins/crowdstrike/go.sum b/plugins/crowdstrike/go.sum index fd960ef76..a5109a5ba 100644 --- a/plugins/crowdstrike/go.sum +++ b/plugins/crowdstrike/go.sum @@ -6,10 +6,10 @@ github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= @@ -152,8 +152,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -170,8 +170,8 @@ github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= github.com/wI2L/jsondiff v0.7.0 h1:1lH1G37GhBPqCfp/lrs91rf/2j3DktX6qYAKZkLuCQQ= github.com/wI2L/jsondiff v0.7.0/go.mod h1:KAEIojdQq66oJiHhDyQez2x+sRit0vIzC9KeK0yizxM= -go.mongodb.org/mongo-driver v1.17.6 h1:87JUG1wZfWsr6rIz3ZmpH90rL5tea7O3IHuSwHUpsss= -go.mongodb.org/mongo-driver v1.17.6/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.mongodb.org/mongo-driver v1.17.7 h1:a9w+U3Vt67eYzcfq3k/OAv284/uUUkL0uP75VE5rCOU= +go.mongodb.org/mongo-driver v1.17.7/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48= @@ -209,10 +209,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/events/go.mod b/plugins/events/go.mod index 494f45e3e..1e9ac27e5 100644 --- a/plugins/events/go.mod +++ b/plugins/events/go.mod @@ -3,7 +3,7 @@ module github.com/utmstack/UTMStack/plugins/events go 1.25.5 require ( - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 github.com/tidwall/gjson v1.18.0 ) @@ -11,8 +11,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -47,8 +47,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/grpc v1.78.0 // indirect google.golang.org/protobuf v1.36.11 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/plugins/events/go.sum b/plugins/events/go.sum index 36e5cfc21..9978feb4f 100644 --- a/plugins/events/go.sum +++ b/plugins/events/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -91,8 +91,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -142,10 +142,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/events/main.go b/plugins/events/main.go index d08f63c45..df247387e 100644 --- a/plugins/events/main.go +++ b/plugins/events/main.go @@ -3,6 +3,7 @@ package main import ( "io" "os" + "time" "github.com/threatwinds/go-sdk/catcher" "github.com/threatwinds/go-sdk/plugins" @@ -17,6 +18,7 @@ func main() { _ = catcher.Error("failed to start analysis plugin", err, map[string]any{ "process": "plugin_com.utmstack.events", }) + time.Sleep(5 * time.Second) os.Exit(1) } } diff --git a/plugins/events/queue.go b/plugins/events/queue.go index 81ab8f606..53160949b 100644 --- a/plugins/events/queue.go +++ b/plugins/events/queue.go @@ -33,7 +33,7 @@ func startQueue() { retryDelay := 2 * time.Second for retry := 0; retry < maxRetries; retry++ { - osUrl := plugins.PluginCfg("org.opensearch", false).Get("opensearch").String() + osUrl := plugins.PluginCfg("org.opensearch").Get("opensearch").String() err := sdkos.Connect([]string{osUrl}, "", "") if err == nil { diff --git a/plugins/feeds/go.mod b/plugins/feeds/go.mod index e6ea5f3fd..68b1d5bd7 100644 --- a/plugins/feeds/go.mod +++ b/plugins/feeds/go.mod @@ -5,7 +5,7 @@ go 1.25.5 require ( github.com/AtlasInsideCorp/AtlasInsideAES v1.0.0 github.com/opensearch-project/opensearch-go/v2 v2.3.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 golang.org/x/sync v0.19.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -14,8 +14,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -51,8 +51,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/grpc v1.78.0 // indirect google.golang.org/protobuf v1.36.11 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/plugins/feeds/go.sum b/plugins/feeds/go.sum index 228a371a5..7154bbee2 100644 --- a/plugins/feeds/go.sum +++ b/plugins/feeds/go.sum @@ -19,10 +19,10 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.19.0/go.mod h1:BgQOMsg8av8jset59jely github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -113,8 +113,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -197,10 +197,10 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/feeds/main.go b/plugins/feeds/main.go index c6be6eeff..bfef0d981 100644 --- a/plugins/feeds/main.go +++ b/plugins/feeds/main.go @@ -37,7 +37,8 @@ func main() { ctx := context.Background() app, err := initializer.NewApp(ctx) if err != nil { - catcher.Error("failed to initialize application", err, nil) + _ = catcher.Error("failed to initialize application", err, nil) + time.Sleep(5 * time.Second) os.Exit(1) } @@ -60,7 +61,8 @@ func main() { defer shutdownCancel() if err := app.Shutdown(shutdownCtx); err != nil { - catcher.Error("error during shutdown", err, nil) + _ = catcher.Error("error during shutdown", err, nil) + time.Sleep(5 * time.Second) } catcher.Info("ThreadWinds Ingestion Service stopped", nil) diff --git a/plugins/feeds/utils/env.go b/plugins/feeds/utils/env.go index 779b3dc6a..a1643bf4d 100644 --- a/plugins/feeds/utils/env.go +++ b/plugins/feeds/utils/env.go @@ -2,6 +2,7 @@ package utils import ( "os" + "time" "github.com/threatwinds/go-sdk/catcher" ) @@ -9,11 +10,13 @@ import ( func Getenv(key string) string { value, defined := os.LookupEnv(key) if !defined { - catcher.Error("Error loading environment variable, environment variable does not exist", nil, map[string]any{"key": key}) + _ = catcher.Error("Error loading environment variable, environment variable does not exist", nil, map[string]any{"key": key}) + time.Sleep(5 * time.Second) os.Exit(1) } if (value == "") || (value == " ") { - catcher.Error("Error loading environment variable, empty environment variable", nil, map[string]any{"key": key}) + _ = catcher.Error("Error loading environment variable, empty environment variable", nil, map[string]any{"key": key}) + time.Sleep(5 * time.Second) os.Exit(1) } return value diff --git a/plugins/gcp/config/config.go b/plugins/gcp/config/config.go index e6f9daa17..480fff34f 100644 --- a/plugins/gcp/config/config.go +++ b/plugins/gcp/config/config.go @@ -41,7 +41,7 @@ func GetConfig() *ConfigurationSection { func StartConfigurationSystem() { for { - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, map[string]any{"process": "plugin_com.utmstack.gcp"}) time.Sleep(reconnectDelay) diff --git a/plugins/gcp/go.mod b/plugins/gcp/go.mod index 135fe4c07..b08ac0228 100644 --- a/plugins/gcp/go.mod +++ b/plugins/gcp/go.mod @@ -5,8 +5,8 @@ go 1.25.5 require ( cloud.google.com/go/pubsub v1.50.1 github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 - google.golang.org/api v0.260.0 + github.com/threatwinds/go-sdk v1.1.9 + google.golang.org/api v0.263.0 google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 ) @@ -14,15 +14,15 @@ require ( require ( cel.dev/expr v0.25.1 // indirect cloud.google.com/go v0.123.0 // indirect - cloud.google.com/go/auth v0.18.0 // indirect + cloud.google.com/go/auth v0.18.1 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect cloud.google.com/go/compute/metadata v0.9.0 // indirect cloud.google.com/go/iam v1.5.3 // indirect cloud.google.com/go/pubsub/v2 v2.3.0 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect @@ -75,9 +75,9 @@ require ( golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect golang.org/x/time v0.14.0 // indirect - google.golang.org/genproto v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/plugins/gcp/go.sum b/plugins/gcp/go.sum index c3c4547e6..68097e872 100644 --- a/plugins/gcp/go.sum +++ b/plugins/gcp/go.sum @@ -3,16 +3,16 @@ cel.dev/expr v0.25.1/go.mod h1:hrXvqGP6G6gyx8UAHSHJ5RGk//1Oj5nXQ2NI02Nrsg4= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.123.0 h1:2NAUJwPR47q+E35uaJeYoNhuNEM9kM8SjgRgdeOJUSE= cloud.google.com/go v0.123.0/go.mod h1:xBoMV08QcqUGuPW65Qfm1o9Y4zKZBpGS+7bImXLTAZU= -cloud.google.com/go/auth v0.18.0 h1:wnqy5hrv7p3k7cShwAU/Br3nzod7fxoqG+k0VZ+/Pk0= -cloud.google.com/go/auth v0.18.0/go.mod h1:wwkPM1AgE1f2u6dG443MiWoD8C3BtOywNsUMcUTVDRo= +cloud.google.com/go/auth v0.18.1 h1:IwTEx92GFUo2pJ6Qea0EU3zYvKnTAeRCODxfA/G5UWs= +cloud.google.com/go/auth v0.18.1/go.mod h1:GfTYoS9G3CWpRA3Va9doKN9mjPGRS+v41jmZAhBzbrA= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs= cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= cloud.google.com/go/iam v1.5.3 h1:+vMINPiDF2ognBJ97ABAYYwRgsaqxPbQDlMnbHMjolc= cloud.google.com/go/iam v1.5.3/go.mod h1:MR3v9oLkZCTlaqljW6Eb2d3HGDGK5/bDv93jhfISFvU= -cloud.google.com/go/kms v1.24.0 h1:SWltUuoPhTdv9q/P0YEAWQfoYT32O5HdfPgTiWMvrH8= -cloud.google.com/go/kms v1.24.0/go.mod h1:QDH3z2SJ50lfNOE8EokKC1G40i7I0f8xTMCoiptcb5g= +cloud.google.com/go/kms v1.25.0 h1:gVqvGGUmz0nYCmtoxWmdc1wli2L1apgP8U4fghPGSbQ= +cloud.google.com/go/kms v1.25.0/go.mod h1:XIdHkzfj0bUO3E+LvwPg+oc7s58/Ns8Nd8Sdtljihbk= cloud.google.com/go/longrunning v0.8.0 h1:LiKK77J3bx5gDLi4SMViHixjD2ohlkwBi+mKA7EhfW8= cloud.google.com/go/longrunning v0.8.0/go.mod h1:UmErU2Onzi+fKDg2gR7dusz11Pe26aknR4kHmJJqIfk= cloud.google.com/go/pubsub v1.50.1 h1:fzbXpPyJnSGvWXF1jabhQeXyxdbCIkXTpjXHy7xviBM= @@ -24,10 +24,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -160,8 +160,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -253,19 +253,19 @@ golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/api v0.260.0 h1:XbNi5E6bOVEj/uLXQRlt6TKuEzMD7zvW/6tNwltE4P4= -google.golang.org/api v0.260.0/go.mod h1:Shj1j0Phr/9sloYrKomICzdYgsSDImpTxME8rGLaZ/o= +google.golang.org/api v0.263.0 h1:UFs7qn8gInIdtk1ZA6eXRXp5JDAnS4x9VRsRVCeKdbk= +google.golang.org/api v0.263.0/go.mod h1:fAU1xtNNisHgOF5JooAs8rRaTkl2rT3uaoNGo9NS3R8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20260120221211-b8f7ae30c516 h1:JWDYM5UaYOOCf35C9PZ8f6+JWlUWwS5J66LYZTxuHUQ= -google.golang.org/genproto v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:SpjiK7gGN2j/djoQMxLl3QOe/J/XxNzC5M+YLecVVWU= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto v0.0.0-20260128011058-8636f8732409 h1:VQZ/yAbAtjkHgH80teYd2em3xtIkkHd7ZhqfH2N9CsM= +google.golang.org/genproto v0.0.0-20260128011058-8636f8732409/go.mod h1:rxKD3IEILWEu3P44seeNOAwZN4SaoKaQ/2eTg4mM6EM= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= diff --git a/plugins/geolocation/go.mod b/plugins/geolocation/go.mod index 6af7e3e61..82e6ad8fc 100644 --- a/plugins/geolocation/go.mod +++ b/plugins/geolocation/go.mod @@ -3,7 +3,7 @@ module github.com/utmstack/UTMStack/plugins/geolocation go 1.25.5 require ( - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 github.com/tidwall/gjson v1.18.0 github.com/tidwall/sjson v1.2.5 ) @@ -12,8 +12,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -48,8 +48,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/grpc v1.78.0 // indirect google.golang.org/protobuf v1.36.11 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/plugins/geolocation/go.sum b/plugins/geolocation/go.sum index f493fcebe..96b622d6e 100644 --- a/plugins/geolocation/go.sum +++ b/plugins/geolocation/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -91,8 +91,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= @@ -143,10 +143,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/geolocation/main.go b/plugins/geolocation/main.go index aa128016d..1e4bec8cf 100644 --- a/plugins/geolocation/main.go +++ b/plugins/geolocation/main.go @@ -3,6 +3,7 @@ package main import ( "context" "os" + "time" "github.com/threatwinds/go-sdk/catcher" "github.com/threatwinds/go-sdk/plugins" @@ -20,6 +21,7 @@ func main() { _ = catcher.Error("com.utmstack.geolocation", err, map[string]any{ "process": "plugin_com.utmstack.geolocation", }) + time.Sleep(5 * time.Second) os.Exit(1) } } diff --git a/plugins/inputs/auth.go b/plugins/inputs/auth.go index dc03dff92..391402ef7 100644 --- a/plugins/inputs/auth.go +++ b/plugins/inputs/auth.go @@ -53,7 +53,7 @@ func (auth *LogAuthService) SyncAuth() { } func (auth *LogAuthService) syncKeys(typ agent.ConnectorType) { - pConfig := plugins.PluginCfg("com.utmstack", false) + pConfig := plugins.PluginCfg("com.utmstack") agentManager := pConfig.Get("agentManager").String() internalKey := pConfig.Get("internalKey").String() diff --git a/plugins/inputs/backend.go b/plugins/inputs/backend.go index 4b3c9a471..b248038e9 100644 --- a/plugins/inputs/backend.go +++ b/plugins/inputs/backend.go @@ -10,7 +10,7 @@ import ( ) func createPanelRequest(method string, endpoint string) (*http.Request, error) { - pConfig := plugins.PluginCfg("com.utmstack", false) + pConfig := plugins.PluginCfg("com.utmstack") backend := pConfig.Get("backend").String() internalKey := pConfig.Get("internalKey").String() diff --git a/plugins/inputs/go.mod b/plugins/inputs/go.mod index 2c2bd3cdd..479bb46bf 100644 --- a/plugins/inputs/go.mod +++ b/plugins/inputs/go.mod @@ -5,7 +5,7 @@ go 1.25.5 require ( github.com/gin-gonic/gin v1.11.0 github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 ) @@ -14,8 +14,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -49,8 +49,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/plugins/inputs/go.sum b/plugins/inputs/go.sum index 36e5cfc21..9978feb4f 100644 --- a/plugins/inputs/go.sum +++ b/plugins/inputs/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -91,8 +91,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -142,10 +142,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/inputs/health.go b/plugins/inputs/health.go index 453b064ef..332edc00a 100644 --- a/plugins/inputs/health.go +++ b/plugins/inputs/health.go @@ -24,7 +24,7 @@ func CheckAgentManagerHealth() { tlsCredentials := credentials.NewTLS(tlsConfig) for { - pConfig := plugins.PluginCfg("com.utmstack", false) + pConfig := plugins.PluginCfg("com.utmstack") agentManager := pConfig.Get("agentManager").String() internalKey := pConfig.Get("internalKey").String() diff --git a/plugins/inputs/main.go b/plugins/inputs/main.go index 7db70f9c3..e21fe4eb1 100644 --- a/plugins/inputs/main.go +++ b/plugins/inputs/main.go @@ -72,7 +72,7 @@ func main() { } func loadCerts() (string, string, error) { - certsFolderPath := plugins.PluginCfg("com.utmstack", false).Get("certsFolder").String() + certsFolderPath := plugins.PluginCfg("com.utmstack").Get("certsFolder").String() certsFolder, err := utils.MkdirJoin(certsFolderPath) if err != nil { diff --git a/plugins/inputs/middlewares.go b/plugins/inputs/middlewares.go index 2c5d0f0f4..d9d5c0351 100644 --- a/plugins/inputs/middlewares.go +++ b/plugins/inputs/middlewares.go @@ -118,7 +118,7 @@ func (m *Middlewares) authFromContext(ctx context.Context) error { return status.Error(codes.PermissionDenied, "invalid connection key") } } else if len(authInternalKey) > 0 { - internalKey := plugins.PluginCfg("com.utmstack", false).Get("internalKey").String() + internalKey := plugins.PluginCfg("com.utmstack").Get("internalKey").String() if internalKey != authInternalKey[0] { return status.Error(codes.PermissionDenied, "internal key does not match") } diff --git a/plugins/modules-config/go.mod b/plugins/modules-config/go.mod index b90aebd97..fdc0111f5 100644 --- a/plugins/modules-config/go.mod +++ b/plugins/modules-config/go.mod @@ -12,8 +12,8 @@ require ( github.com/aws/aws-sdk-go-v2/service/sts v1.41.6 github.com/crowdstrike/gofalcon v0.19.0 github.com/gin-gonic/gin v1.11.0 - github.com/threatwinds/go-sdk v1.1.7 - google.golang.org/api v0.260.0 + github.com/threatwinds/go-sdk v1.1.9 + google.golang.org/api v0.263.0 google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 ) @@ -21,7 +21,7 @@ require ( require ( cel.dev/expr v0.25.1 // indirect cloud.google.com/go v0.123.0 // indirect - cloud.google.com/go/auth v0.18.0 // indirect + cloud.google.com/go/auth v0.18.1 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect cloud.google.com/go/compute/metadata v0.9.0 // indirect cloud.google.com/go/iam v1.5.3 // indirect @@ -44,8 +44,8 @@ require ( github.com/aws/smithy-go v1.24.0 // indirect github.com/blang/semver/v4 v4.0.0 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect @@ -104,7 +104,7 @@ require ( github.com/tidwall/pretty v1.2.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.3.1 // indirect - go.mongodb.org/mongo-driver v1.17.6 // indirect + go.mongodb.org/mongo-driver v1.17.7 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.64.0 // indirect @@ -123,9 +123,9 @@ require ( golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect golang.org/x/time v0.14.0 // indirect - google.golang.org/genproto v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/plugins/modules-config/go.sum b/plugins/modules-config/go.sum index 60d45bc04..0324da313 100644 --- a/plugins/modules-config/go.sum +++ b/plugins/modules-config/go.sum @@ -3,16 +3,16 @@ cel.dev/expr v0.25.1/go.mod h1:hrXvqGP6G6gyx8UAHSHJ5RGk//1Oj5nXQ2NI02Nrsg4= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.123.0 h1:2NAUJwPR47q+E35uaJeYoNhuNEM9kM8SjgRgdeOJUSE= cloud.google.com/go v0.123.0/go.mod h1:xBoMV08QcqUGuPW65Qfm1o9Y4zKZBpGS+7bImXLTAZU= -cloud.google.com/go/auth v0.18.0 h1:wnqy5hrv7p3k7cShwAU/Br3nzod7fxoqG+k0VZ+/Pk0= -cloud.google.com/go/auth v0.18.0/go.mod h1:wwkPM1AgE1f2u6dG443MiWoD8C3BtOywNsUMcUTVDRo= +cloud.google.com/go/auth v0.18.1 h1:IwTEx92GFUo2pJ6Qea0EU3zYvKnTAeRCODxfA/G5UWs= +cloud.google.com/go/auth v0.18.1/go.mod h1:GfTYoS9G3CWpRA3Va9doKN9mjPGRS+v41jmZAhBzbrA= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs= cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= cloud.google.com/go/iam v1.5.3 h1:+vMINPiDF2ognBJ97ABAYYwRgsaqxPbQDlMnbHMjolc= cloud.google.com/go/iam v1.5.3/go.mod h1:MR3v9oLkZCTlaqljW6Eb2d3HGDGK5/bDv93jhfISFvU= -cloud.google.com/go/kms v1.24.0 h1:SWltUuoPhTdv9q/P0YEAWQfoYT32O5HdfPgTiWMvrH8= -cloud.google.com/go/kms v1.24.0/go.mod h1:QDH3z2SJ50lfNOE8EokKC1G40i7I0f8xTMCoiptcb5g= +cloud.google.com/go/kms v1.25.0 h1:gVqvGGUmz0nYCmtoxWmdc1wli2L1apgP8U4fghPGSbQ= +cloud.google.com/go/kms v1.25.0/go.mod h1:XIdHkzfj0bUO3E+LvwPg+oc7s58/Ns8Nd8Sdtljihbk= cloud.google.com/go/longrunning v0.8.0 h1:LiKK77J3bx5gDLi4SMViHixjD2ohlkwBi+mKA7EhfW8= cloud.google.com/go/longrunning v0.8.0/go.mod h1:UmErU2Onzi+fKDg2gR7dusz11Pe26aknR4kHmJJqIfk= cloud.google.com/go/pubsub v1.50.1 h1:fzbXpPyJnSGvWXF1jabhQeXyxdbCIkXTpjXHy7xviBM= @@ -76,10 +76,10 @@ github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -282,8 +282,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -302,8 +302,8 @@ github.com/wI2L/jsondiff v0.7.0 h1:1lH1G37GhBPqCfp/lrs91rf/2j3DktX6qYAKZkLuCQQ= github.com/wI2L/jsondiff v0.7.0/go.mod h1:KAEIojdQq66oJiHhDyQez2x+sRit0vIzC9KeK0yizxM= go.einride.tech/aip v0.73.0 h1:bPo4oqBo2ZQeBKo4ZzLb1kxYXTY1ysJhpvQyfuGzvps= go.einride.tech/aip v0.73.0/go.mod h1:Mj7rFbmXEgw0dq1dqJ7JGMvYCZZVxmGOR3S4ZcV5LvQ= -go.mongodb.org/mongo-driver v1.17.6 h1:87JUG1wZfWsr6rIz3ZmpH90rL5tea7O3IHuSwHUpsss= -go.mongodb.org/mongo-driver v1.17.6/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.mongodb.org/mongo-driver v1.17.7 h1:a9w+U3Vt67eYzcfq3k/OAv284/uUUkL0uP75VE5rCOU= +go.mongodb.org/mongo-driver v1.17.7/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= @@ -377,19 +377,19 @@ golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/api v0.260.0 h1:XbNi5E6bOVEj/uLXQRlt6TKuEzMD7zvW/6tNwltE4P4= -google.golang.org/api v0.260.0/go.mod h1:Shj1j0Phr/9sloYrKomICzdYgsSDImpTxME8rGLaZ/o= +google.golang.org/api v0.263.0 h1:UFs7qn8gInIdtk1ZA6eXRXp5JDAnS4x9VRsRVCeKdbk= +google.golang.org/api v0.263.0/go.mod h1:fAU1xtNNisHgOF5JooAs8rRaTkl2rT3uaoNGo9NS3R8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20260120221211-b8f7ae30c516 h1:JWDYM5UaYOOCf35C9PZ8f6+JWlUWwS5J66LYZTxuHUQ= -google.golang.org/genproto v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:SpjiK7gGN2j/djoQMxLl3QOe/J/XxNzC5M+YLecVVWU= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto v0.0.0-20260128011058-8636f8732409 h1:VQZ/yAbAtjkHgH80teYd2em3xtIkkHd7ZhqfH2N9CsM= +google.golang.org/genproto v0.0.0-20260128011058-8636f8732409/go.mod h1:rxKD3IEILWEu3P44seeNOAwZN4SaoKaQ/2eTg4mM6EM= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= diff --git a/plugins/modules-config/main.go b/plugins/modules-config/main.go index 457749215..4dda275af 100644 --- a/plugins/modules-config/main.go +++ b/plugins/modules-config/main.go @@ -2,6 +2,8 @@ package main import ( "fmt" + "os" + "time" "github.com/threatwinds/go-sdk/catcher" "github.com/threatwinds/go-sdk/plugins" @@ -18,13 +20,14 @@ func main() { return } - utmConfig := plugins.PluginCfg("com.utmstack", false) + utmConfig := plugins.PluginCfg("com.utmstack") InternalKey = utmConfig.Get("internalKey").String() BackendService = utmConfig.Get("backend").String() if InternalKey == "" || BackendService == "" { _ = catcher.Error("error getting configuration", fmt.Errorf("internal key or backend service is empty"), map[string]any{"process": "plugin_com.utmstack.modules-config"}) - return + time.Sleep(5 * time.Second) + os.Exit(1) } go startGRPCServer() diff --git a/plugins/o365/config/config.go b/plugins/o365/config/config.go index 2d672f5d0..770c000d3 100644 --- a/plugins/o365/config/config.go +++ b/plugins/o365/config/config.go @@ -41,7 +41,7 @@ func GetConfig() *ConfigurationSection { func StartConfigurationSystem() { for { - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, map[string]any{"process": "plugin_com.utmstack.o365"}) time.Sleep(reconnectDelay) diff --git a/plugins/o365/go.mod b/plugins/o365/go.mod index c2b2890b3..c755c130b 100644 --- a/plugins/o365/go.mod +++ b/plugins/o365/go.mod @@ -4,7 +4,7 @@ go 1.25.5 require ( github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 ) @@ -13,8 +13,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -49,8 +49,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/plugins/o365/go.sum b/plugins/o365/go.sum index 36e5cfc21..9978feb4f 100644 --- a/plugins/o365/go.sum +++ b/plugins/o365/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -91,8 +91,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -142,10 +142,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/soc-ai/config/config.go b/plugins/soc-ai/config/config.go index 3d411aec3..05882ad7b 100644 --- a/plugins/soc-ai/config/config.go +++ b/plugins/soc-ai/config/config.go @@ -53,7 +53,7 @@ func StartConfigurationSystem() { GetConfig() for { - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, map[string]any{"process": "plugin_com.utmstack.soc-ai"}) time.Sleep(reconnectDelay) diff --git a/plugins/soc-ai/go.mod b/plugins/soc-ai/go.mod index ce04339b7..44b6744e3 100644 --- a/plugins/soc-ai/go.mod +++ b/plugins/soc-ai/go.mod @@ -17,8 +17,8 @@ require ( require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -37,7 +37,7 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/stoewer/go-strcase v1.3.1 // indirect - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 github.com/tidwall/gjson v1.18.0 // indirect github.com/tidwall/match v1.2.0 // indirect github.com/tidwall/pretty v1.2.1 // indirect @@ -49,8 +49,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/protobuf v1.36.11 gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect diff --git a/plugins/soc-ai/go.sum b/plugins/soc-ai/go.sum index 36e5cfc21..9978feb4f 100644 --- a/plugins/soc-ai/go.sum +++ b/plugins/soc-ai/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -91,8 +91,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -142,10 +142,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/soc-ai/main.go b/plugins/soc-ai/main.go index e2e437220..dd3c5f710 100644 --- a/plugins/soc-ai/main.go +++ b/plugins/soc-ai/main.go @@ -26,6 +26,7 @@ func main() { _ = catcher.Error("failed to start correlation plugin", err, map[string]any{ "process": "plugin_com.utmstack.soc-ai", }) + time.Sleep(5 * time.Second) os.Exit(1) } } diff --git a/plugins/sophos/config/config.go b/plugins/sophos/config/config.go index 118b494c5..90788d498 100644 --- a/plugins/sophos/config/config.go +++ b/plugins/sophos/config/config.go @@ -41,7 +41,7 @@ func GetConfig() *ConfigurationSection { func StartConfigurationSystem() { for { - pluginConfig := plugins.PluginCfg("com.utmstack", false) + pluginConfig := plugins.PluginCfg("com.utmstack") if !pluginConfig.Exists() { _ = catcher.Error("plugin configuration not found", nil, map[string]any{"process": "plugin_com.utmstack.sophos"}) time.Sleep(reconnectDelay) diff --git a/plugins/sophos/go.mod b/plugins/sophos/go.mod index e74d7d712..fbd307c8c 100644 --- a/plugins/sophos/go.mod +++ b/plugins/sophos/go.mod @@ -4,7 +4,7 @@ go 1.25.5 require ( github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 ) @@ -13,8 +13,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -49,8 +49,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect ) diff --git a/plugins/sophos/go.sum b/plugins/sophos/go.sum index 36e5cfc21..9978feb4f 100644 --- a/plugins/sophos/go.sum +++ b/plugins/sophos/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -91,8 +91,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -142,10 +142,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/stats/go.mod b/plugins/stats/go.mod index bd1024a6b..178d81425 100644 --- a/plugins/stats/go.mod +++ b/plugins/stats/go.mod @@ -4,7 +4,7 @@ go 1.25.5 require ( github.com/google/uuid v1.6.0 - github.com/threatwinds/go-sdk v1.1.7 + github.com/threatwinds/go-sdk v1.1.9 google.golang.org/protobuf v1.36.11 ) @@ -12,8 +12,8 @@ require ( cel.dev/expr v0.25.1 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bytedance/gopkg v0.1.3 // indirect - github.com/bytedance/sonic v1.14.2 // indirect - github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect github.com/cloudwego/base64x v0.1.6 // indirect github.com/gabriel-vasile/mimetype v1.4.12 // indirect github.com/gin-contrib/sse v1.1.0 // indirect @@ -48,8 +48,8 @@ require ( golang.org/x/net v0.49.0 // indirect golang.org/x/sys v0.40.0 // indirect golang.org/x/text v0.33.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect google.golang.org/grpc v1.78.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/yaml v1.6.0 // indirect diff --git a/plugins/stats/go.sum b/plugins/stats/go.sum index 36e5cfc21..9978feb4f 100644 --- a/plugins/stats/go.sum +++ b/plugins/stats/go.sum @@ -4,10 +4,10 @@ github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYW github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -91,8 +91,8 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.7 h1:2IJAWTCxZU4BDFiavPjH8MqpA/mam1QyIsjySbZLlRo= -github.com/threatwinds/go-sdk v1.1.7/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/go-sdk v1.1.9 h1:4i8UZczXyGbRJsUEHRgaS2oQ03VTRjh/DYyTtGdSfRA= +github.com/threatwinds/go-sdk v1.1.9/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= @@ -142,10 +142,10 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516 h1:vmC/ws+pLzWjj/gzApyoZuSVrDtF1aod4u/+bbj8hgM= -google.golang.org/genproto/googleapis/api v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:p3MLuOwURrGBRoEyFHBT3GjUwaCQVKeNqqWxlcISGdw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:merA0rdPeUV3YIIfHHcH4qBkiQAc1nfCKSI7lB4cV2M= +google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 h1:H86B94AW+VfJWDqFeEbBPhEtHzJwJfTbgE2lZa54ZAQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/plugins/stats/main.go b/plugins/stats/main.go index 54bb11e62..d88e983ce 100644 --- a/plugins/stats/main.go +++ b/plugins/stats/main.go @@ -24,12 +24,13 @@ func main() { statisticsQueue = make(chan map[plugins.Topic]plugins.DataProcessingMessage, runtime.NumCPU()*100) statsMap = make(map[plugins.Topic]map[string]map[string]int64) - pCfg := plugins.PluginCfg("org.opensearch", false) + pCfg := plugins.PluginCfg("org.opensearch") osUrl := pCfg.Get("opensearch").String() err := sdkos.Connect([]string{osUrl}, "", "") if err != nil { _ = catcher.Error("failed when connecting to OpenSearch", err, map[string]any{"process": "plugin_com.utmstack.stats"}) + time.Sleep(5 * time.Second) os.Exit(1) } @@ -55,6 +56,7 @@ func main() { _ = catcher.Error("failed to start notification plugin", err, map[string]any{ "process": "plugin_com.utmstack.stats", }) + time.Sleep(5 * time.Second) os.Exit(1) }