diff --git a/.github/workflows/v11-deployment-pipeline.yml b/.github/workflows/v11-deployment-pipeline.yml index 538de44a3..93cb8d925 100644 --- a/.github/workflows/v11-deployment-pipeline.yml +++ b/.github/workflows/v11-deployment-pipeline.yml @@ -310,18 +310,29 @@ jobs: - name: Check out code into the right branch uses: actions/checkout@v4 - - name: Build UTMStack Collector + - name: Build UTMStack Collectors run: | echo "Building UTMStack Collector..." cd ${{ github.workspace }}/utmstack-collector GOOS=linux GOARCH=amd64 go build -o utmstack_collector -v -ldflags "-X 'github.com/utmstack/UTMStack/utmstack-collector/config.REPLACE_KEY=${{ secrets.AGENT_SECRET_PREFIX }}'" . + echo "Building UTMStack AS400 Collector..." + + cd ${{ github.workspace }}/as400 + GOOS=linux GOARCH=amd64 go build -o utmstack_as400_collector_service -v -ldflags "-X 'github.com/utmstack/UTMStack/as400/config.REPLACE_KEY=${{ secrets.AGENT_SECRET_PREFIX }}'" . + + cd ${{ github.workspace }}/as400/updater + GOOS=linux GOARCH=amd64 go build -o utmstack_as400_updater_service -v . + - name: Upload collector binary as artifact uses: actions/upload-artifact@v4 with: - name: utmstack-collector - path: ${{ github.workspace }}/utmstack-collector/utmstack_collector + name: utmstack-collectors + path: | + ${{ github.workspace }}/utmstack-collector/utmstack_collector + ${{ github.workspace }}/as400/utmstack_as400_collector_service + ${{ github.workspace }}/as400/updater/utmstack_as400_updater_service retention-days: 1 build_agent_manager: @@ -339,10 +350,10 @@ jobs: name: signed-agents path: ${{ github.workspace }}/agent - - name: Download UTMStack Collector from artifacts + - name: Download UTMStack Collectors from artifacts uses: actions/download-artifact@v4 with: - name: utmstack-collector + name: utmstack-collectors path: ${{ github.workspace }}/utmstack-collector - name: Download signed macOS agents from artifacts @@ -357,12 +368,16 @@ jobs: GOOS=linux GOARCH=amd64 go build -o agent-manager -v . mkdir -p ./dependencies/collector - curl -sSL "https://storage.googleapis.com/utmstack-updates/dependencies/collector/linux-as400-collector.zip" -o ./dependencies/collector/linux-as400-collector.zip - curl -sSL "https://storage.googleapis.com/utmstack-updates/dependencies/collector/windows-as400-collector.zip" -o ./dependencies/collector/windows-as400-collector.zip - - cp "${{ github.workspace }}/utmstack-collector/utmstack_collector" ./dependencies/collector/ + cp "${{ github.workspace }}/utmstack-collector/utmstack-collector/utmstack_collector" ./dependencies/collector/ cp "${{ github.workspace }}/utmstack-collector/version.json" ./dependencies/collector/ + mkdir -p ./dependencies/collector/as400 + curl -sSL "https://storage.googleapis.com/utmstack-updates/dependencies/collector/as400-collector.jar" -o ./dependencies/collector/as400/as400-collector.jar + + cp "${{ github.workspace }}/as400/version.json" ./dependencies/collector/as400/ + cp "${{ github.workspace }}/utmstack-collector/as400/utmstack_as400_collector_service" ./dependencies/collector/as400/ + cp "${{ github.workspace }}/utmstack-collector/as400/updater/utmstack_as400_updater_service" ./dependencies/collector/as400/ + mkdir -p ./dependencies/agent/ # Linux agents @@ -405,6 +420,7 @@ jobs: with: context: ./agent-manager push: true + provenance: false tags: ghcr.io/utmstack/utmstack/agent-manager:${{ needs.setup_deployment.outputs.tag }} build_event_processor: @@ -460,6 +476,7 @@ jobs: context: . file: ./event_processor.Dockerfile push: true + provenance: false tags: ghcr.io/utmstack/utmstack/eventprocessor:${{ needs.setup_deployment.outputs.tag }} build-args: | BASE_IMAGE=ghcr.io/threatwinds/eventprocessor/base:${{ needs.setup_deployment.outputs.event_processor_tag }} diff --git a/as400/README.md b/as400/README.md new file mode 100644 index 000000000..ad53f9f1f --- /dev/null +++ b/as400/README.md @@ -0,0 +1,50 @@ +# UTMStack AS400 Collector + +Log collection service for IBM AS/400 (iSeries) systems that integrates with the UTMStack platform for security analysis and event correlation. + +## General Description + +UTMStack AS400 Collector is a service written in Go that acts as a bridge between IBM AS/400 systems and the UTMStack platform. The service is installed on an intermediate server, connects to multiple remotely configured AS/400 systems, collects security logs, and transmits them in real-time to the UTMStack server for analysis. + +### Key Features + +- **Multi-Server Collection**: Support for multiple AS/400 systems simultaneously +- **Remote Configuration**: Management of AS/400 servers from the UTMStack panel via gRPC streaming +- **Local Persistence**: Temporary log storage in SQLite to ensure delivery in case of network failures +- **Auto-Updates**: Automatic update service included +- **Automatic Reconnection**: Robust handling of disconnections with automatic retries +- **Configurable Retention**: Control of local database size by retention in megabytes +- **Security**: AES encryption for credentials and TLS communication with the server + +## Requirements + +- **Operating System**: Linux (recommended) +- **Connectivity**: Network access to: + - UTMStack server (ports 9000, 9001, 50051) + - AS/400 systems to monitor +- **Java**: Installed automatically during installation +- **Privileges**: Administrator/root permissions to install the service + +### Installation Process + +1. Verify connectivity with the UTMStack server +2. Download dependencies (collector Java JAR, updater) +3. Install Java Runtime if necessary +4. Register the collector with UTMStack's Agent Manager +5. Create and enable the system service +6. Install the auto-update service + +## Configuration of AS/400 Servers + +Configuration of AS/400 servers to monitor is performed **from the UTMStack panel**, not locally. The collector automatically receives configuration. + +### Parameters per Server + +- **Tenant**: Identifier name of the group/server +- **Hostname**: IP address or hostname of the AS/400 +- **User ID**: Connection user to the AS/400 +- **Password**: Password (automatically encrypted) + +## License + +This project is part of UTMStack. Consult the main project license for more information. \ No newline at end of file diff --git a/as400/agent/collector.pb.go b/as400/agent/collector.pb.go new file mode 100644 index 000000000..bcf8d63b7 --- /dev/null +++ b/as400/agent/collector.pb.go @@ -0,0 +1,858 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.9 +// protoc v3.21.12 +// source: collector.proto + +package agent + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" + unsafe "unsafe" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type CollectorModule int32 + +const ( + CollectorModule_AS_400 CollectorModule = 0 + CollectorModule_UTMSTACK CollectorModule = 1 +) + +// Enum value maps for CollectorModule. +var ( + CollectorModule_name = map[int32]string{ + 0: "AS_400", + 1: "UTMSTACK", + } + CollectorModule_value = map[string]int32{ + "AS_400": 0, + "UTMSTACK": 1, + } +) + +func (x CollectorModule) Enum() *CollectorModule { + p := new(CollectorModule) + *p = x + return p +} + +func (x CollectorModule) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (CollectorModule) Descriptor() protoreflect.EnumDescriptor { + return file_collector_proto_enumTypes[0].Descriptor() +} + +func (CollectorModule) Type() protoreflect.EnumType { + return &file_collector_proto_enumTypes[0] +} + +func (x CollectorModule) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use CollectorModule.Descriptor instead. +func (CollectorModule) EnumDescriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{0} +} + +type RegisterRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Ip string `protobuf:"bytes,1,opt,name=ip,proto3" json:"ip,omitempty"` + Hostname string `protobuf:"bytes,2,opt,name=hostname,proto3" json:"hostname,omitempty"` + Version string `protobuf:"bytes,3,opt,name=version,proto3" json:"version,omitempty"` + Collector CollectorModule `protobuf:"varint,4,opt,name=collector,proto3,enum=agent.CollectorModule" json:"collector,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *RegisterRequest) Reset() { + *x = RegisterRequest{} + mi := &file_collector_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RegisterRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RegisterRequest) ProtoMessage() {} + +func (x *RegisterRequest) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RegisterRequest.ProtoReflect.Descriptor instead. +func (*RegisterRequest) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{0} +} + +func (x *RegisterRequest) GetIp() string { + if x != nil { + return x.Ip + } + return "" +} + +func (x *RegisterRequest) GetHostname() string { + if x != nil { + return x.Hostname + } + return "" +} + +func (x *RegisterRequest) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *RegisterRequest) GetCollector() CollectorModule { + if x != nil { + return x.Collector + } + return CollectorModule_AS_400 +} + +type ListCollectorResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + Rows []*Collector `protobuf:"bytes,1,rep,name=rows,proto3" json:"rows,omitempty"` + Total int32 `protobuf:"varint,2,opt,name=total,proto3" json:"total,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListCollectorResponse) Reset() { + *x = ListCollectorResponse{} + mi := &file_collector_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListCollectorResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListCollectorResponse) ProtoMessage() {} + +func (x *ListCollectorResponse) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListCollectorResponse.ProtoReflect.Descriptor instead. +func (*ListCollectorResponse) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{1} +} + +func (x *ListCollectorResponse) GetRows() []*Collector { + if x != nil { + return x.Rows + } + return nil +} + +func (x *ListCollectorResponse) GetTotal() int32 { + if x != nil { + return x.Total + } + return 0 +} + +type Collector struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id int32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + Status Status `protobuf:"varint,2,opt,name=status,proto3,enum=agent.Status" json:"status,omitempty"` + CollectorKey string `protobuf:"bytes,3,opt,name=collector_key,json=collectorKey,proto3" json:"collector_key,omitempty"` + Ip string `protobuf:"bytes,4,opt,name=ip,proto3" json:"ip,omitempty"` + Hostname string `protobuf:"bytes,5,opt,name=hostname,proto3" json:"hostname,omitempty"` + Version string `protobuf:"bytes,6,opt,name=version,proto3" json:"version,omitempty"` + Module CollectorModule `protobuf:"varint,7,opt,name=module,proto3,enum=agent.CollectorModule" json:"module,omitempty"` + LastSeen string `protobuf:"bytes,8,opt,name=last_seen,json=lastSeen,proto3" json:"last_seen,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Collector) Reset() { + *x = Collector{} + mi := &file_collector_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Collector) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Collector) ProtoMessage() {} + +func (x *Collector) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Collector.ProtoReflect.Descriptor instead. +func (*Collector) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{2} +} + +func (x *Collector) GetId() int32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Collector) GetStatus() Status { + if x != nil { + return x.Status + } + return Status_ONLINE +} + +func (x *Collector) GetCollectorKey() string { + if x != nil { + return x.CollectorKey + } + return "" +} + +func (x *Collector) GetIp() string { + if x != nil { + return x.Ip + } + return "" +} + +func (x *Collector) GetHostname() string { + if x != nil { + return x.Hostname + } + return "" +} + +func (x *Collector) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *Collector) GetModule() CollectorModule { + if x != nil { + return x.Module + } + return CollectorModule_AS_400 +} + +func (x *Collector) GetLastSeen() string { + if x != nil { + return x.LastSeen + } + return "" +} + +type CollectorMessages struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to StreamMessage: + // + // *CollectorMessages_Config + // *CollectorMessages_Result + StreamMessage isCollectorMessages_StreamMessage `protobuf_oneof:"stream_message"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CollectorMessages) Reset() { + *x = CollectorMessages{} + mi := &file_collector_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CollectorMessages) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CollectorMessages) ProtoMessage() {} + +func (x *CollectorMessages) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CollectorMessages.ProtoReflect.Descriptor instead. +func (*CollectorMessages) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{3} +} + +func (x *CollectorMessages) GetStreamMessage() isCollectorMessages_StreamMessage { + if x != nil { + return x.StreamMessage + } + return nil +} + +func (x *CollectorMessages) GetConfig() *CollectorConfig { + if x != nil { + if x, ok := x.StreamMessage.(*CollectorMessages_Config); ok { + return x.Config + } + } + return nil +} + +func (x *CollectorMessages) GetResult() *ConfigKnowledge { + if x != nil { + if x, ok := x.StreamMessage.(*CollectorMessages_Result); ok { + return x.Result + } + } + return nil +} + +type isCollectorMessages_StreamMessage interface { + isCollectorMessages_StreamMessage() +} + +type CollectorMessages_Config struct { + Config *CollectorConfig `protobuf:"bytes,1,opt,name=config,proto3,oneof"` +} + +type CollectorMessages_Result struct { + Result *ConfigKnowledge `protobuf:"bytes,2,opt,name=result,proto3,oneof"` +} + +func (*CollectorMessages_Config) isCollectorMessages_StreamMessage() {} + +func (*CollectorMessages_Result) isCollectorMessages_StreamMessage() {} + +type CollectorConfig struct { + state protoimpl.MessageState `protogen:"open.v1"` + CollectorId string `protobuf:"bytes,1,opt,name=collector_id,json=collectorId,proto3" json:"collector_id,omitempty"` + Groups []*CollectorConfigGroup `protobuf:"bytes,2,rep,name=groups,proto3" json:"groups,omitempty"` + RequestId string `protobuf:"bytes,3,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CollectorConfig) Reset() { + *x = CollectorConfig{} + mi := &file_collector_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CollectorConfig) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CollectorConfig) ProtoMessage() {} + +func (x *CollectorConfig) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CollectorConfig.ProtoReflect.Descriptor instead. +func (*CollectorConfig) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{4} +} + +func (x *CollectorConfig) GetCollectorId() string { + if x != nil { + return x.CollectorId + } + return "" +} + +func (x *CollectorConfig) GetGroups() []*CollectorConfigGroup { + if x != nil { + return x.Groups + } + return nil +} + +func (x *CollectorConfig) GetRequestId() string { + if x != nil { + return x.RequestId + } + return "" +} + +type CollectorConfigGroup struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id int32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + GroupName string `protobuf:"bytes,2,opt,name=group_name,json=groupName,proto3" json:"group_name,omitempty"` + GroupDescription string `protobuf:"bytes,3,opt,name=group_description,json=groupDescription,proto3" json:"group_description,omitempty"` + Configurations []*CollectorGroupConfigurations `protobuf:"bytes,4,rep,name=configurations,proto3" json:"configurations,omitempty"` + CollectorId int32 `protobuf:"varint,5,opt,name=collector_id,json=collectorId,proto3" json:"collector_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CollectorConfigGroup) Reset() { + *x = CollectorConfigGroup{} + mi := &file_collector_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CollectorConfigGroup) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CollectorConfigGroup) ProtoMessage() {} + +func (x *CollectorConfigGroup) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CollectorConfigGroup.ProtoReflect.Descriptor instead. +func (*CollectorConfigGroup) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{5} +} + +func (x *CollectorConfigGroup) GetId() int32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *CollectorConfigGroup) GetGroupName() string { + if x != nil { + return x.GroupName + } + return "" +} + +func (x *CollectorConfigGroup) GetGroupDescription() string { + if x != nil { + return x.GroupDescription + } + return "" +} + +func (x *CollectorConfigGroup) GetConfigurations() []*CollectorGroupConfigurations { + if x != nil { + return x.Configurations + } + return nil +} + +func (x *CollectorConfigGroup) GetCollectorId() int32 { + if x != nil { + return x.CollectorId + } + return 0 +} + +type CollectorGroupConfigurations struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id int32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + GroupId int32 `protobuf:"varint,2,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` + ConfKey string `protobuf:"bytes,3,opt,name=conf_key,json=confKey,proto3" json:"conf_key,omitempty"` + ConfValue string `protobuf:"bytes,4,opt,name=conf_value,json=confValue,proto3" json:"conf_value,omitempty"` + ConfName string `protobuf:"bytes,5,opt,name=conf_name,json=confName,proto3" json:"conf_name,omitempty"` + ConfDescription string `protobuf:"bytes,6,opt,name=conf_description,json=confDescription,proto3" json:"conf_description,omitempty"` + ConfDataType string `protobuf:"bytes,7,opt,name=conf_data_type,json=confDataType,proto3" json:"conf_data_type,omitempty"` + ConfRequired bool `protobuf:"varint,8,opt,name=conf_required,json=confRequired,proto3" json:"conf_required,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CollectorGroupConfigurations) Reset() { + *x = CollectorGroupConfigurations{} + mi := &file_collector_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CollectorGroupConfigurations) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CollectorGroupConfigurations) ProtoMessage() {} + +func (x *CollectorGroupConfigurations) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CollectorGroupConfigurations.ProtoReflect.Descriptor instead. +func (*CollectorGroupConfigurations) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{6} +} + +func (x *CollectorGroupConfigurations) GetId() int32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *CollectorGroupConfigurations) GetGroupId() int32 { + if x != nil { + return x.GroupId + } + return 0 +} + +func (x *CollectorGroupConfigurations) GetConfKey() string { + if x != nil { + return x.ConfKey + } + return "" +} + +func (x *CollectorGroupConfigurations) GetConfValue() string { + if x != nil { + return x.ConfValue + } + return "" +} + +func (x *CollectorGroupConfigurations) GetConfName() string { + if x != nil { + return x.ConfName + } + return "" +} + +func (x *CollectorGroupConfigurations) GetConfDescription() string { + if x != nil { + return x.ConfDescription + } + return "" +} + +func (x *CollectorGroupConfigurations) GetConfDataType() string { + if x != nil { + return x.ConfDataType + } + return "" +} + +func (x *CollectorGroupConfigurations) GetConfRequired() bool { + if x != nil { + return x.ConfRequired + } + return false +} + +type ConfigKnowledge struct { + state protoimpl.MessageState `protogen:"open.v1"` + Accepted string `protobuf:"bytes,1,opt,name=accepted,proto3" json:"accepted,omitempty"` + RequestId string `protobuf:"bytes,2,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ConfigKnowledge) Reset() { + *x = ConfigKnowledge{} + mi := &file_collector_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ConfigKnowledge) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ConfigKnowledge) ProtoMessage() {} + +func (x *ConfigKnowledge) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ConfigKnowledge.ProtoReflect.Descriptor instead. +func (*ConfigKnowledge) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{7} +} + +func (x *ConfigKnowledge) GetAccepted() string { + if x != nil { + return x.Accepted + } + return "" +} + +func (x *ConfigKnowledge) GetRequestId() string { + if x != nil { + return x.RequestId + } + return "" +} + +type ConfigRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Module CollectorModule `protobuf:"varint,1,opt,name=module,proto3,enum=agent.CollectorModule" json:"module,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ConfigRequest) Reset() { + *x = ConfigRequest{} + mi := &file_collector_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ConfigRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ConfigRequest) ProtoMessage() {} + +func (x *ConfigRequest) ProtoReflect() protoreflect.Message { + mi := &file_collector_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ConfigRequest.ProtoReflect.Descriptor instead. +func (*ConfigRequest) Descriptor() ([]byte, []int) { + return file_collector_proto_rawDescGZIP(), []int{8} +} + +func (x *ConfigRequest) GetModule() CollectorModule { + if x != nil { + return x.Module + } + return CollectorModule_AS_400 +} + +var File_collector_proto protoreflect.FileDescriptor + +const file_collector_proto_rawDesc = "" + + "\n" + + "\x0fcollector.proto\x12\x05agent\x1a\fcommon.proto\"\x8d\x01\n" + + "\x0fRegisterRequest\x12\x0e\n" + + "\x02ip\x18\x01 \x01(\tR\x02ip\x12\x1a\n" + + "\bhostname\x18\x02 \x01(\tR\bhostname\x12\x18\n" + + "\aversion\x18\x03 \x01(\tR\aversion\x124\n" + + "\tcollector\x18\x04 \x01(\x0e2\x16.agent.CollectorModuleR\tcollector\"S\n" + + "\x15ListCollectorResponse\x12$\n" + + "\x04rows\x18\x01 \x03(\v2\x10.agent.CollectorR\x04rows\x12\x14\n" + + "\x05total\x18\x02 \x01(\x05R\x05total\"\xfa\x01\n" + + "\tCollector\x12\x0e\n" + + "\x02id\x18\x01 \x01(\x05R\x02id\x12%\n" + + "\x06status\x18\x02 \x01(\x0e2\r.agent.StatusR\x06status\x12#\n" + + "\rcollector_key\x18\x03 \x01(\tR\fcollectorKey\x12\x0e\n" + + "\x02ip\x18\x04 \x01(\tR\x02ip\x12\x1a\n" + + "\bhostname\x18\x05 \x01(\tR\bhostname\x12\x18\n" + + "\aversion\x18\x06 \x01(\tR\aversion\x12.\n" + + "\x06module\x18\a \x01(\x0e2\x16.agent.CollectorModuleR\x06module\x12\x1b\n" + + "\tlast_seen\x18\b \x01(\tR\blastSeen\"\x89\x01\n" + + "\x11CollectorMessages\x120\n" + + "\x06config\x18\x01 \x01(\v2\x16.agent.CollectorConfigH\x00R\x06config\x120\n" + + "\x06result\x18\x02 \x01(\v2\x16.agent.ConfigKnowledgeH\x00R\x06resultB\x10\n" + + "\x0estream_message\"\x88\x01\n" + + "\x0fCollectorConfig\x12!\n" + + "\fcollector_id\x18\x01 \x01(\tR\vcollectorId\x123\n" + + "\x06groups\x18\x02 \x03(\v2\x1b.agent.CollectorConfigGroupR\x06groups\x12\x1d\n" + + "\n" + + "request_id\x18\x03 \x01(\tR\trequestId\"\xe2\x01\n" + + "\x14CollectorConfigGroup\x12\x0e\n" + + "\x02id\x18\x01 \x01(\x05R\x02id\x12\x1d\n" + + "\n" + + "group_name\x18\x02 \x01(\tR\tgroupName\x12+\n" + + "\x11group_description\x18\x03 \x01(\tR\x10groupDescription\x12K\n" + + "\x0econfigurations\x18\x04 \x03(\v2#.agent.CollectorGroupConfigurationsR\x0econfigurations\x12!\n" + + "\fcollector_id\x18\x05 \x01(\x05R\vcollectorId\"\x96\x02\n" + + "\x1cCollectorGroupConfigurations\x12\x0e\n" + + "\x02id\x18\x01 \x01(\x05R\x02id\x12\x19\n" + + "\bgroup_id\x18\x02 \x01(\x05R\agroupId\x12\x19\n" + + "\bconf_key\x18\x03 \x01(\tR\aconfKey\x12\x1d\n" + + "\n" + + "conf_value\x18\x04 \x01(\tR\tconfValue\x12\x1b\n" + + "\tconf_name\x18\x05 \x01(\tR\bconfName\x12)\n" + + "\x10conf_description\x18\x06 \x01(\tR\x0fconfDescription\x12$\n" + + "\x0econf_data_type\x18\a \x01(\tR\fconfDataType\x12#\n" + + "\rconf_required\x18\b \x01(\bR\fconfRequired\"L\n" + + "\x0fConfigKnowledge\x12\x1a\n" + + "\baccepted\x18\x01 \x01(\tR\baccepted\x12\x1d\n" + + "\n" + + "request_id\x18\x02 \x01(\tR\trequestId\"?\n" + + "\rConfigRequest\x12.\n" + + "\x06module\x18\x01 \x01(\x0e2\x16.agent.CollectorModuleR\x06module*+\n" + + "\x0fCollectorModule\x12\n" + + "\n" + + "\x06AS_400\x10\x00\x12\f\n" + + "\bUTMSTACK\x10\x012\xee\x02\n" + + "\x10CollectorService\x12B\n" + + "\x11RegisterCollector\x12\x16.agent.RegisterRequest\x1a\x13.agent.AuthResponse\"\x00\x12>\n" + + "\x0fDeleteCollector\x12\x14.agent.DeleteRequest\x1a\x13.agent.AuthResponse\"\x00\x12C\n" + + "\rListCollector\x12\x12.agent.ListRequest\x1a\x1c.agent.ListCollectorResponse\"\x00\x12K\n" + + "\x0fCollectorStream\x12\x18.agent.CollectorMessages\x1a\x18.agent.CollectorMessages\"\x00(\x010\x01\x12D\n" + + "\x12GetCollectorConfig\x12\x14.agent.ConfigRequest\x1a\x16.agent.CollectorConfig\"\x002d\n" + + "\x15PanelCollectorService\x12K\n" + + "\x17RegisterCollectorConfig\x12\x16.agent.CollectorConfig\x1a\x16.agent.ConfigKnowledge\"\x00B5Z3github.com/utmstack/UTMStack/docker-collector/agentb\x06proto3" + +var ( + file_collector_proto_rawDescOnce sync.Once + file_collector_proto_rawDescData []byte +) + +func file_collector_proto_rawDescGZIP() []byte { + file_collector_proto_rawDescOnce.Do(func() { + file_collector_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_collector_proto_rawDesc), len(file_collector_proto_rawDesc))) + }) + return file_collector_proto_rawDescData +} + +var file_collector_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_collector_proto_msgTypes = make([]protoimpl.MessageInfo, 9) +var file_collector_proto_goTypes = []any{ + (CollectorModule)(0), // 0: agent.CollectorModule + (*RegisterRequest)(nil), // 1: agent.RegisterRequest + (*ListCollectorResponse)(nil), // 2: agent.ListCollectorResponse + (*Collector)(nil), // 3: agent.Collector + (*CollectorMessages)(nil), // 4: agent.CollectorMessages + (*CollectorConfig)(nil), // 5: agent.CollectorConfig + (*CollectorConfigGroup)(nil), // 6: agent.CollectorConfigGroup + (*CollectorGroupConfigurations)(nil), // 7: agent.CollectorGroupConfigurations + (*ConfigKnowledge)(nil), // 8: agent.ConfigKnowledge + (*ConfigRequest)(nil), // 9: agent.ConfigRequest + (Status)(0), // 10: agent.Status + (*DeleteRequest)(nil), // 11: agent.DeleteRequest + (*ListRequest)(nil), // 12: agent.ListRequest + (*AuthResponse)(nil), // 13: agent.AuthResponse +} +var file_collector_proto_depIdxs = []int32{ + 0, // 0: agent.RegisterRequest.collector:type_name -> agent.CollectorModule + 3, // 1: agent.ListCollectorResponse.rows:type_name -> agent.Collector + 10, // 2: agent.Collector.status:type_name -> agent.Status + 0, // 3: agent.Collector.module:type_name -> agent.CollectorModule + 5, // 4: agent.CollectorMessages.config:type_name -> agent.CollectorConfig + 8, // 5: agent.CollectorMessages.result:type_name -> agent.ConfigKnowledge + 6, // 6: agent.CollectorConfig.groups:type_name -> agent.CollectorConfigGroup + 7, // 7: agent.CollectorConfigGroup.configurations:type_name -> agent.CollectorGroupConfigurations + 0, // 8: agent.ConfigRequest.module:type_name -> agent.CollectorModule + 1, // 9: agent.CollectorService.RegisterCollector:input_type -> agent.RegisterRequest + 11, // 10: agent.CollectorService.DeleteCollector:input_type -> agent.DeleteRequest + 12, // 11: agent.CollectorService.ListCollector:input_type -> agent.ListRequest + 4, // 12: agent.CollectorService.CollectorStream:input_type -> agent.CollectorMessages + 9, // 13: agent.CollectorService.GetCollectorConfig:input_type -> agent.ConfigRequest + 5, // 14: agent.PanelCollectorService.RegisterCollectorConfig:input_type -> agent.CollectorConfig + 13, // 15: agent.CollectorService.RegisterCollector:output_type -> agent.AuthResponse + 13, // 16: agent.CollectorService.DeleteCollector:output_type -> agent.AuthResponse + 2, // 17: agent.CollectorService.ListCollector:output_type -> agent.ListCollectorResponse + 4, // 18: agent.CollectorService.CollectorStream:output_type -> agent.CollectorMessages + 5, // 19: agent.CollectorService.GetCollectorConfig:output_type -> agent.CollectorConfig + 8, // 20: agent.PanelCollectorService.RegisterCollectorConfig:output_type -> agent.ConfigKnowledge + 15, // [15:21] is the sub-list for method output_type + 9, // [9:15] is the sub-list for method input_type + 9, // [9:9] is the sub-list for extension type_name + 9, // [9:9] is the sub-list for extension extendee + 0, // [0:9] is the sub-list for field type_name +} + +func init() { file_collector_proto_init() } +func file_collector_proto_init() { + if File_collector_proto != nil { + return + } + file_common_proto_init() + file_collector_proto_msgTypes[3].OneofWrappers = []any{ + (*CollectorMessages_Config)(nil), + (*CollectorMessages_Result)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_collector_proto_rawDesc), len(file_collector_proto_rawDesc)), + NumEnums: 1, + NumMessages: 9, + NumExtensions: 0, + NumServices: 2, + }, + GoTypes: file_collector_proto_goTypes, + DependencyIndexes: file_collector_proto_depIdxs, + EnumInfos: file_collector_proto_enumTypes, + MessageInfos: file_collector_proto_msgTypes, + }.Build() + File_collector_proto = out.File + file_collector_proto_goTypes = nil + file_collector_proto_depIdxs = nil +} diff --git a/as400/agent/collector_grpc.pb.go b/as400/agent/collector_grpc.pb.go new file mode 100644 index 000000000..a924c7361 --- /dev/null +++ b/as400/agent/collector_grpc.pb.go @@ -0,0 +1,370 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v3.21.12 +// source: collector.proto + +package agent + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + CollectorService_RegisterCollector_FullMethodName = "/agent.CollectorService/RegisterCollector" + CollectorService_DeleteCollector_FullMethodName = "/agent.CollectorService/DeleteCollector" + CollectorService_ListCollector_FullMethodName = "/agent.CollectorService/ListCollector" + CollectorService_CollectorStream_FullMethodName = "/agent.CollectorService/CollectorStream" + CollectorService_GetCollectorConfig_FullMethodName = "/agent.CollectorService/GetCollectorConfig" +) + +// CollectorServiceClient is the client API for CollectorService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type CollectorServiceClient interface { + RegisterCollector(ctx context.Context, in *RegisterRequest, opts ...grpc.CallOption) (*AuthResponse, error) + DeleteCollector(ctx context.Context, in *DeleteRequest, opts ...grpc.CallOption) (*AuthResponse, error) + ListCollector(ctx context.Context, in *ListRequest, opts ...grpc.CallOption) (*ListCollectorResponse, error) + CollectorStream(ctx context.Context, opts ...grpc.CallOption) (grpc.BidiStreamingClient[CollectorMessages, CollectorMessages], error) + GetCollectorConfig(ctx context.Context, in *ConfigRequest, opts ...grpc.CallOption) (*CollectorConfig, error) +} + +type collectorServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewCollectorServiceClient(cc grpc.ClientConnInterface) CollectorServiceClient { + return &collectorServiceClient{cc} +} + +func (c *collectorServiceClient) RegisterCollector(ctx context.Context, in *RegisterRequest, opts ...grpc.CallOption) (*AuthResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(AuthResponse) + err := c.cc.Invoke(ctx, CollectorService_RegisterCollector_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *collectorServiceClient) DeleteCollector(ctx context.Context, in *DeleteRequest, opts ...grpc.CallOption) (*AuthResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(AuthResponse) + err := c.cc.Invoke(ctx, CollectorService_DeleteCollector_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *collectorServiceClient) ListCollector(ctx context.Context, in *ListRequest, opts ...grpc.CallOption) (*ListCollectorResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListCollectorResponse) + err := c.cc.Invoke(ctx, CollectorService_ListCollector_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *collectorServiceClient) CollectorStream(ctx context.Context, opts ...grpc.CallOption) (grpc.BidiStreamingClient[CollectorMessages, CollectorMessages], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &CollectorService_ServiceDesc.Streams[0], CollectorService_CollectorStream_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[CollectorMessages, CollectorMessages]{ClientStream: stream} + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type CollectorService_CollectorStreamClient = grpc.BidiStreamingClient[CollectorMessages, CollectorMessages] + +func (c *collectorServiceClient) GetCollectorConfig(ctx context.Context, in *ConfigRequest, opts ...grpc.CallOption) (*CollectorConfig, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(CollectorConfig) + err := c.cc.Invoke(ctx, CollectorService_GetCollectorConfig_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CollectorServiceServer is the server API for CollectorService service. +// All implementations must embed UnimplementedCollectorServiceServer +// for forward compatibility. +type CollectorServiceServer interface { + RegisterCollector(context.Context, *RegisterRequest) (*AuthResponse, error) + DeleteCollector(context.Context, *DeleteRequest) (*AuthResponse, error) + ListCollector(context.Context, *ListRequest) (*ListCollectorResponse, error) + CollectorStream(grpc.BidiStreamingServer[CollectorMessages, CollectorMessages]) error + GetCollectorConfig(context.Context, *ConfigRequest) (*CollectorConfig, error) + mustEmbedUnimplementedCollectorServiceServer() +} + +// UnimplementedCollectorServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedCollectorServiceServer struct{} + +func (UnimplementedCollectorServiceServer) RegisterCollector(context.Context, *RegisterRequest) (*AuthResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RegisterCollector not implemented") +} +func (UnimplementedCollectorServiceServer) DeleteCollector(context.Context, *DeleteRequest) (*AuthResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteCollector not implemented") +} +func (UnimplementedCollectorServiceServer) ListCollector(context.Context, *ListRequest) (*ListCollectorResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListCollector not implemented") +} +func (UnimplementedCollectorServiceServer) CollectorStream(grpc.BidiStreamingServer[CollectorMessages, CollectorMessages]) error { + return status.Errorf(codes.Unimplemented, "method CollectorStream not implemented") +} +func (UnimplementedCollectorServiceServer) GetCollectorConfig(context.Context, *ConfigRequest) (*CollectorConfig, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetCollectorConfig not implemented") +} +func (UnimplementedCollectorServiceServer) mustEmbedUnimplementedCollectorServiceServer() {} +func (UnimplementedCollectorServiceServer) testEmbeddedByValue() {} + +// UnsafeCollectorServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to CollectorServiceServer will +// result in compilation errors. +type UnsafeCollectorServiceServer interface { + mustEmbedUnimplementedCollectorServiceServer() +} + +func RegisterCollectorServiceServer(s grpc.ServiceRegistrar, srv CollectorServiceServer) { + // If the following call pancis, it indicates UnimplementedCollectorServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&CollectorService_ServiceDesc, srv) +} + +func _CollectorService_RegisterCollector_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RegisterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CollectorServiceServer).RegisterCollector(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CollectorService_RegisterCollector_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CollectorServiceServer).RegisterCollector(ctx, req.(*RegisterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CollectorService_DeleteCollector_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CollectorServiceServer).DeleteCollector(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CollectorService_DeleteCollector_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CollectorServiceServer).DeleteCollector(ctx, req.(*DeleteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CollectorService_ListCollector_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CollectorServiceServer).ListCollector(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CollectorService_ListCollector_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CollectorServiceServer).ListCollector(ctx, req.(*ListRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CollectorService_CollectorStream_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(CollectorServiceServer).CollectorStream(&grpc.GenericServerStream[CollectorMessages, CollectorMessages]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type CollectorService_CollectorStreamServer = grpc.BidiStreamingServer[CollectorMessages, CollectorMessages] + +func _CollectorService_GetCollectorConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CollectorServiceServer).GetCollectorConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CollectorService_GetCollectorConfig_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CollectorServiceServer).GetCollectorConfig(ctx, req.(*ConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// CollectorService_ServiceDesc is the grpc.ServiceDesc for CollectorService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var CollectorService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "agent.CollectorService", + HandlerType: (*CollectorServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "RegisterCollector", + Handler: _CollectorService_RegisterCollector_Handler, + }, + { + MethodName: "DeleteCollector", + Handler: _CollectorService_DeleteCollector_Handler, + }, + { + MethodName: "ListCollector", + Handler: _CollectorService_ListCollector_Handler, + }, + { + MethodName: "GetCollectorConfig", + Handler: _CollectorService_GetCollectorConfig_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "CollectorStream", + Handler: _CollectorService_CollectorStream_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "collector.proto", +} + +const ( + PanelCollectorService_RegisterCollectorConfig_FullMethodName = "/agent.PanelCollectorService/RegisterCollectorConfig" +) + +// PanelCollectorServiceClient is the client API for PanelCollectorService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type PanelCollectorServiceClient interface { + RegisterCollectorConfig(ctx context.Context, in *CollectorConfig, opts ...grpc.CallOption) (*ConfigKnowledge, error) +} + +type panelCollectorServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewPanelCollectorServiceClient(cc grpc.ClientConnInterface) PanelCollectorServiceClient { + return &panelCollectorServiceClient{cc} +} + +func (c *panelCollectorServiceClient) RegisterCollectorConfig(ctx context.Context, in *CollectorConfig, opts ...grpc.CallOption) (*ConfigKnowledge, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ConfigKnowledge) + err := c.cc.Invoke(ctx, PanelCollectorService_RegisterCollectorConfig_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PanelCollectorServiceServer is the server API for PanelCollectorService service. +// All implementations must embed UnimplementedPanelCollectorServiceServer +// for forward compatibility. +type PanelCollectorServiceServer interface { + RegisterCollectorConfig(context.Context, *CollectorConfig) (*ConfigKnowledge, error) + mustEmbedUnimplementedPanelCollectorServiceServer() +} + +// UnimplementedPanelCollectorServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedPanelCollectorServiceServer struct{} + +func (UnimplementedPanelCollectorServiceServer) RegisterCollectorConfig(context.Context, *CollectorConfig) (*ConfigKnowledge, error) { + return nil, status.Errorf(codes.Unimplemented, "method RegisterCollectorConfig not implemented") +} +func (UnimplementedPanelCollectorServiceServer) mustEmbedUnimplementedPanelCollectorServiceServer() {} +func (UnimplementedPanelCollectorServiceServer) testEmbeddedByValue() {} + +// UnsafePanelCollectorServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to PanelCollectorServiceServer will +// result in compilation errors. +type UnsafePanelCollectorServiceServer interface { + mustEmbedUnimplementedPanelCollectorServiceServer() +} + +func RegisterPanelCollectorServiceServer(s grpc.ServiceRegistrar, srv PanelCollectorServiceServer) { + // If the following call pancis, it indicates UnimplementedPanelCollectorServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&PanelCollectorService_ServiceDesc, srv) +} + +func _PanelCollectorService_RegisterCollectorConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CollectorConfig) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PanelCollectorServiceServer).RegisterCollectorConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PanelCollectorService_RegisterCollectorConfig_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PanelCollectorServiceServer).RegisterCollectorConfig(ctx, req.(*CollectorConfig)) + } + return interceptor(ctx, in, info, handler) +} + +// PanelCollectorService_ServiceDesc is the grpc.ServiceDesc for PanelCollectorService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var PanelCollectorService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "agent.PanelCollectorService", + HandlerType: (*PanelCollectorServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "RegisterCollectorConfig", + Handler: _PanelCollectorService_RegisterCollectorConfig_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "collector.proto", +} diff --git a/as400/agent/common.pb.go b/as400/agent/common.pb.go new file mode 100644 index 000000000..0d4ccf71d --- /dev/null +++ b/as400/agent/common.pb.go @@ -0,0 +1,413 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.12 +// source: common.proto + +package agent + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Status int32 + +const ( + Status_ONLINE Status = 0 + Status_OFFLINE Status = 1 + Status_UNKNOWN Status = 2 +) + +// Enum value maps for Status. +var ( + Status_name = map[int32]string{ + 0: "ONLINE", + 1: "OFFLINE", + 2: "UNKNOWN", + } + Status_value = map[string]int32{ + "ONLINE": 0, + "OFFLINE": 1, + "UNKNOWN": 2, + } +) + +func (x Status) Enum() *Status { + p := new(Status) + *p = x + return p +} + +func (x Status) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Status) Descriptor() protoreflect.EnumDescriptor { + return file_common_proto_enumTypes[0].Descriptor() +} + +func (Status) Type() protoreflect.EnumType { + return &file_common_proto_enumTypes[0] +} + +func (x Status) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Status.Descriptor instead. +func (Status) EnumDescriptor() ([]byte, []int) { + return file_common_proto_rawDescGZIP(), []int{0} +} + +type ConnectorType int32 + +const ( + ConnectorType_AGENT ConnectorType = 0 + ConnectorType_COLLECTOR ConnectorType = 1 +) + +// Enum value maps for ConnectorType. +var ( + ConnectorType_name = map[int32]string{ + 0: "AGENT", + 1: "COLLECTOR", + } + ConnectorType_value = map[string]int32{ + "AGENT": 0, + "COLLECTOR": 1, + } +) + +func (x ConnectorType) Enum() *ConnectorType { + p := new(ConnectorType) + *p = x + return p +} + +func (x ConnectorType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ConnectorType) Descriptor() protoreflect.EnumDescriptor { + return file_common_proto_enumTypes[1].Descriptor() +} + +func (ConnectorType) Type() protoreflect.EnumType { + return &file_common_proto_enumTypes[1] +} + +func (x ConnectorType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ConnectorType.Descriptor instead. +func (ConnectorType) EnumDescriptor() ([]byte, []int) { + return file_common_proto_rawDescGZIP(), []int{1} +} + +type ListRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + PageNumber int32 `protobuf:"varint,1,opt,name=page_number,json=pageNumber,proto3" json:"page_number,omitempty"` + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + SearchQuery string `protobuf:"bytes,3,opt,name=search_query,json=searchQuery,proto3" json:"search_query,omitempty"` + SortBy string `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` +} + +func (x *ListRequest) Reset() { + *x = ListRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_common_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListRequest) ProtoMessage() {} + +func (x *ListRequest) ProtoReflect() protoreflect.Message { + mi := &file_common_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListRequest.ProtoReflect.Descriptor instead. +func (*ListRequest) Descriptor() ([]byte, []int) { + return file_common_proto_rawDescGZIP(), []int{0} +} + +func (x *ListRequest) GetPageNumber() int32 { + if x != nil { + return x.PageNumber + } + return 0 +} + +func (x *ListRequest) GetPageSize() int32 { + if x != nil { + return x.PageSize + } + return 0 +} + +func (x *ListRequest) GetSearchQuery() string { + if x != nil { + return x.SearchQuery + } + return "" +} + +func (x *ListRequest) GetSortBy() string { + if x != nil { + return x.SortBy + } + return "" +} + +type AuthResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"` +} + +func (x *AuthResponse) Reset() { + *x = AuthResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_common_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AuthResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AuthResponse) ProtoMessage() {} + +func (x *AuthResponse) ProtoReflect() protoreflect.Message { + mi := &file_common_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AuthResponse.ProtoReflect.Descriptor instead. +func (*AuthResponse) Descriptor() ([]byte, []int) { + return file_common_proto_rawDescGZIP(), []int{1} +} + +func (x *AuthResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *AuthResponse) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +type DeleteRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + DeletedBy string `protobuf:"bytes,1,opt,name=deleted_by,json=deletedBy,proto3" json:"deleted_by,omitempty"` +} + +func (x *DeleteRequest) Reset() { + *x = DeleteRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_common_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteRequest) ProtoMessage() {} + +func (x *DeleteRequest) ProtoReflect() protoreflect.Message { + mi := &file_common_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteRequest.ProtoReflect.Descriptor instead. +func (*DeleteRequest) Descriptor() ([]byte, []int) { + return file_common_proto_rawDescGZIP(), []int{2} +} + +func (x *DeleteRequest) GetDeletedBy() string { + if x != nil { + return x.DeletedBy + } + return "" +} + +var File_common_proto protoreflect.FileDescriptor + +var file_common_proto_rawDesc = []byte{ + 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, + 0x61, 0x67, 0x65, 0x6e, 0x74, 0x22, 0x87, 0x01, 0x0a, 0x0b, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x6e, 0x75, + 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x70, 0x61, 0x67, 0x65, + 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, + 0x69, 0x7a, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x65, 0x61, 0x72, 0x63, 0x68, 0x5f, 0x71, 0x75, + 0x65, 0x72, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x73, 0x65, 0x61, 0x72, 0x63, + 0x68, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x17, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, + 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x22, + 0x30, 0x0a, 0x0c, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x22, 0x2e, 0x0a, 0x0d, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x5f, 0x62, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x42, + 0x79, 0x2a, 0x2e, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0a, 0x0a, 0x06, 0x4f, + 0x4e, 0x4c, 0x49, 0x4e, 0x45, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x4f, 0x46, 0x46, 0x4c, 0x49, + 0x4e, 0x45, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, + 0x02, 0x2a, 0x29, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x47, 0x45, 0x4e, 0x54, 0x10, 0x00, 0x12, 0x0d, 0x0a, + 0x09, 0x43, 0x4f, 0x4c, 0x4c, 0x45, 0x43, 0x54, 0x4f, 0x52, 0x10, 0x01, 0x42, 0x32, 0x5a, 0x30, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x75, 0x74, 0x6d, 0x73, 0x74, + 0x61, 0x63, 0x6b, 0x2f, 0x55, 0x54, 0x4d, 0x53, 0x74, 0x61, 0x63, 0x6b, 0x2f, 0x61, 0x67, 0x65, + 0x6e, 0x74, 0x2d, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x2f, 0x61, 0x67, 0x65, 0x6e, 0x74, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_common_proto_rawDescOnce sync.Once + file_common_proto_rawDescData = file_common_proto_rawDesc +) + +func file_common_proto_rawDescGZIP() []byte { + file_common_proto_rawDescOnce.Do(func() { + file_common_proto_rawDescData = protoimpl.X.CompressGZIP(file_common_proto_rawDescData) + }) + return file_common_proto_rawDescData +} + +var file_common_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_common_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_common_proto_goTypes = []interface{}{ + (Status)(0), // 0: agent.Status + (ConnectorType)(0), // 1: agent.ConnectorType + (*ListRequest)(nil), // 2: agent.ListRequest + (*AuthResponse)(nil), // 3: agent.AuthResponse + (*DeleteRequest)(nil), // 4: agent.DeleteRequest +} +var file_common_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_common_proto_init() } +func file_common_proto_init() { + if File_common_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_common_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_common_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AuthResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_common_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_common_proto_rawDesc, + NumEnums: 2, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_common_proto_goTypes, + DependencyIndexes: file_common_proto_depIdxs, + EnumInfos: file_common_proto_enumTypes, + MessageInfos: file_common_proto_msgTypes, + }.Build() + File_common_proto = out.File + file_common_proto_rawDesc = nil + file_common_proto_goTypes = nil + file_common_proto_depIdxs = nil +} diff --git a/as400/agent/delete.go b/as400/agent/delete.go new file mode 100644 index 000000000..2abf27b7e --- /dev/null +++ b/as400/agent/delete.go @@ -0,0 +1,43 @@ +package agent + +import ( + "context" + "os/user" + "strconv" + + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/conn" + "github.com/utmstack/UTMStack/as400/utils" + "google.golang.org/grpc/metadata" +) + +func DeleteAgent(cnf *config.Config) error { + connection, err := conn.GetAgentManagerConnection(cnf) + if err != nil { + return utils.Logger.ErrorF("error connecting to Agent Manager: %v", err) + } + + collectorClient := NewCollectorServiceClient(connection) + ctx, cancel := context.WithCancel(context.Background()) + ctx = metadata.AppendToOutgoingContext(ctx, "key", cnf.CollectorKey) + ctx = metadata.AppendToOutgoingContext(ctx, "id", strconv.Itoa(int(cnf.CollectorID))) + ctx = metadata.AppendToOutgoingContext(ctx, "type", "collector") + defer cancel() + + currentUser, err := user.Current() + if err != nil { + return utils.Logger.ErrorF("error getting user: %v", err) + } + + delReq := &DeleteRequest{ + DeletedBy: currentUser.Username, + } + + _, err = collectorClient.DeleteCollector(ctx, delReq) + if err != nil { + utils.Logger.ErrorF("error removing UTMStack AS400 Collector from Agent Manager %v", err) + } + + utils.Logger.LogF(100, "UTMStack AS400 Collector removed successfully from agent manager") + return nil +} diff --git a/as400/agent/ping.pb.go b/as400/agent/ping.pb.go new file mode 100644 index 000000000..21ddaf763 --- /dev/null +++ b/as400/agent/ping.pb.go @@ -0,0 +1,218 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.12 +// source: ping.proto + +package agent + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type PingRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type ConnectorType `protobuf:"varint,1,opt,name=type,proto3,enum=agent.ConnectorType" json:"type,omitempty"` +} + +func (x *PingRequest) Reset() { + *x = PingRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ping_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PingRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PingRequest) ProtoMessage() {} + +func (x *PingRequest) ProtoReflect() protoreflect.Message { + mi := &file_ping_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PingRequest.ProtoReflect.Descriptor instead. +func (*PingRequest) Descriptor() ([]byte, []int) { + return file_ping_proto_rawDescGZIP(), []int{0} +} + +func (x *PingRequest) GetType() ConnectorType { + if x != nil { + return x.Type + } + return ConnectorType_AGENT +} + +type PingResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Received string `protobuf:"bytes,1,opt,name=received,proto3" json:"received,omitempty"` +} + +func (x *PingResponse) Reset() { + *x = PingResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ping_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PingResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PingResponse) ProtoMessage() {} + +func (x *PingResponse) ProtoReflect() protoreflect.Message { + mi := &file_ping_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PingResponse.ProtoReflect.Descriptor instead. +func (*PingResponse) Descriptor() ([]byte, []int) { + return file_ping_proto_rawDescGZIP(), []int{1} +} + +func (x *PingResponse) GetReceived() string { + if x != nil { + return x.Received + } + return "" +} + +var File_ping_proto protoreflect.FileDescriptor + +var file_ping_proto_rawDesc = []byte{ + 0x0a, 0x0a, 0x70, 0x69, 0x6e, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, 0x61, 0x67, + 0x65, 0x6e, 0x74, 0x1a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x22, 0x37, 0x0a, 0x0b, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x28, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x14, + 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x2a, 0x0a, 0x0c, 0x50, 0x69, + 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, + 0x63, 0x65, 0x69, 0x76, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, + 0x63, 0x65, 0x69, 0x76, 0x65, 0x64, 0x32, 0x42, 0x0a, 0x0b, 0x50, 0x69, 0x6e, 0x67, 0x53, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x33, 0x0a, 0x04, 0x50, 0x69, 0x6e, 0x67, 0x12, 0x12, 0x2e, + 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x13, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x28, 0x01, 0x42, 0x32, 0x5a, 0x30, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x75, 0x74, 0x6d, 0x73, 0x74, 0x61, 0x63, + 0x6b, 0x2f, 0x55, 0x54, 0x4d, 0x53, 0x74, 0x61, 0x63, 0x6b, 0x2f, 0x61, 0x67, 0x65, 0x6e, 0x74, + 0x2d, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x2f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_ping_proto_rawDescOnce sync.Once + file_ping_proto_rawDescData = file_ping_proto_rawDesc +) + +func file_ping_proto_rawDescGZIP() []byte { + file_ping_proto_rawDescOnce.Do(func() { + file_ping_proto_rawDescData = protoimpl.X.CompressGZIP(file_ping_proto_rawDescData) + }) + return file_ping_proto_rawDescData +} + +var file_ping_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_ping_proto_goTypes = []interface{}{ + (*PingRequest)(nil), // 0: agent.PingRequest + (*PingResponse)(nil), // 1: agent.PingResponse + (ConnectorType)(0), // 2: agent.ConnectorType +} +var file_ping_proto_depIdxs = []int32{ + 2, // 0: agent.PingRequest.type:type_name -> agent.ConnectorType + 0, // 1: agent.PingService.Ping:input_type -> agent.PingRequest + 1, // 2: agent.PingService.Ping:output_type -> agent.PingResponse + 2, // [2:3] is the sub-list for method output_type + 1, // [1:2] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_ping_proto_init() } +func file_ping_proto_init() { + if File_ping_proto != nil { + return + } + file_common_proto_init() + if !protoimpl.UnsafeEnabled { + file_ping_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PingRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ping_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PingResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_ping_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_ping_proto_goTypes, + DependencyIndexes: file_ping_proto_depIdxs, + MessageInfos: file_ping_proto_msgTypes, + }.Build() + File_ping_proto = out.File + file_ping_proto_rawDesc = nil + file_ping_proto_goTypes = nil + file_ping_proto_depIdxs = nil +} diff --git a/as400/agent/ping_grpc.pb.go b/as400/agent/ping_grpc.pb.go new file mode 100644 index 000000000..f283dc80a --- /dev/null +++ b/as400/agent/ping_grpc.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v3.21.12 +// source: ping.proto + +package agent + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + PingService_Ping_FullMethodName = "/agent.PingService/Ping" +) + +// PingServiceClient is the client API for PingService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type PingServiceClient interface { + Ping(ctx context.Context, opts ...grpc.CallOption) (grpc.ClientStreamingClient[PingRequest, PingResponse], error) +} + +type pingServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewPingServiceClient(cc grpc.ClientConnInterface) PingServiceClient { + return &pingServiceClient{cc} +} + +func (c *pingServiceClient) Ping(ctx context.Context, opts ...grpc.CallOption) (grpc.ClientStreamingClient[PingRequest, PingResponse], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &PingService_ServiceDesc.Streams[0], PingService_Ping_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[PingRequest, PingResponse]{ClientStream: stream} + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type PingService_PingClient = grpc.ClientStreamingClient[PingRequest, PingResponse] + +// PingServiceServer is the server API for PingService service. +// All implementations must embed UnimplementedPingServiceServer +// for forward compatibility. +type PingServiceServer interface { + Ping(grpc.ClientStreamingServer[PingRequest, PingResponse]) error + mustEmbedUnimplementedPingServiceServer() +} + +// UnimplementedPingServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedPingServiceServer struct{} + +func (UnimplementedPingServiceServer) Ping(grpc.ClientStreamingServer[PingRequest, PingResponse]) error { + return status.Errorf(codes.Unimplemented, "method Ping not implemented") +} +func (UnimplementedPingServiceServer) mustEmbedUnimplementedPingServiceServer() {} +func (UnimplementedPingServiceServer) testEmbeddedByValue() {} + +// UnsafePingServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to PingServiceServer will +// result in compilation errors. +type UnsafePingServiceServer interface { + mustEmbedUnimplementedPingServiceServer() +} + +func RegisterPingServiceServer(s grpc.ServiceRegistrar, srv PingServiceServer) { + // If the following call pancis, it indicates UnimplementedPingServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&PingService_ServiceDesc, srv) +} + +func _PingService_Ping_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(PingServiceServer).Ping(&grpc.GenericServerStream[PingRequest, PingResponse]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type PingService_PingServer = grpc.ClientStreamingServer[PingRequest, PingResponse] + +// PingService_ServiceDesc is the grpc.ServiceDesc for PingService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var PingService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "agent.PingService", + HandlerType: (*PingServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Ping", + Handler: _PingService_Ping_Handler, + ClientStreams: true, + }, + }, + Metadata: "ping.proto", +} diff --git a/as400/agent/ping_imp.go b/as400/agent/ping_imp.go new file mode 100644 index 000000000..44cca9b3b --- /dev/null +++ b/as400/agent/ping_imp.go @@ -0,0 +1,90 @@ +package agent + +import ( + "context" + "strings" + "time" + + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/conn" + "github.com/utmstack/UTMStack/as400/utils" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +var ( + timeToSleep = 10 * time.Second + pingInterval = 15 * time.Second +) + +func StartPing(cnf *config.Config, ctx context.Context) { + var connErrMsgWritten, errorLogged bool + + for { + connection, err := conn.GetAgentManagerConnection(cnf) + if err != nil { + if !connErrMsgWritten { + utils.Logger.ErrorF("error connecting to Agent Manager: %v", err) + connErrMsgWritten = true + } else { + utils.Logger.LogF(100, "error connecting to Agent Manager: %v", err) + } + time.Sleep(timeToSleep) + continue + } + + client := NewPingServiceClient(connection) + stream, err := client.Ping(ctx) + if err != nil { + if !connErrMsgWritten { + utils.Logger.ErrorF("failed to start Ping Stream: %v", err) + connErrMsgWritten = true + } else { + utils.Logger.LogF(100, "failed to start Ping Stream: %v", err) + } + time.Sleep(timeToSleep) + continue + } + + utils.Logger.LogF(100, "Ping Stream started") + connErrMsgWritten = false + + ticker := time.NewTicker(pingInterval) + + for range ticker.C { + err := stream.Send(&PingRequest{Type: ConnectorType_AGENT}) + if err != nil { + if strings.Contains(err.Error(), "EOF") { + utils.Logger.LogF(100, "error sending Ping request: %v", err) + time.Sleep(timeToSleep) + break + } + st, ok := status.FromError(err) + if ok && (st.Code() == codes.Unavailable || st.Code() == codes.Canceled) { + if !errorLogged { + utils.Logger.ErrorF("error sending Ping request: %v", err) + errorLogged = true + } else { + utils.Logger.LogF(100, "error sending Ping request: %v", err) + } + time.Sleep(timeToSleep) + break + } else { + if !errorLogged { + utils.Logger.ErrorF("error sending Ping request: %v", err) + errorLogged = true + } else { + utils.Logger.LogF(100, "error sending Ping request: %v", err) + } + time.Sleep(timeToSleep) + continue + } + } + + errorLogged = false + utils.Logger.LogF(100, "Ping request sent") + } + + ticker.Stop() + } +} diff --git a/as400/agent/register.go b/as400/agent/register.go new file mode 100644 index 000000000..2be2e4362 --- /dev/null +++ b/as400/agent/register.go @@ -0,0 +1,63 @@ +package agent + +import ( + "context" + + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/conn" + "github.com/utmstack/UTMStack/as400/models" + "github.com/utmstack/UTMStack/as400/utils" + "google.golang.org/grpc/metadata" +) + +func RegisterCollector(cnf *config.Config, UTMKey string) error { + connection, err := conn.GetAgentManagerConnection(cnf) + if err != nil { + return utils.Logger.ErrorF("error connecting to Agent Manager: %v", err) + } + + collectorClient := NewCollectorServiceClient(connection) + ctx, cancel := context.WithCancel(context.Background()) + ctx = metadata.AppendToOutgoingContext(ctx, "connection-key", UTMKey) + defer cancel() + + ip, err := utils.GetIPAddress() + if err != nil { + return utils.Logger.ErrorF("error getting ip address: %v", err) + } + + osInfo, err := utils.GetOsInfo() + if err != nil { + return utils.Logger.ErrorF("error getting os info: %v", err) + } + + version := models.Version{} + err = utils.ReadJson(config.VersionPath, &version) + if err != nil { + return utils.Logger.ErrorF("error reading version file: %v", err) + } + + request := &RegisterRequest{ + Ip: ip, + Hostname: osInfo.Hostname, + Version: version.Version, + Collector: CollectorModule_AS_400, + } + + utils.Logger.Info("Registering UTMStack AS400 Collector with Agent Manager...") + utils.Logger.Info("Collector Details: IP=%s, Hostname=%s, Version=%s, Module=%s", + ip, osInfo.Hostname, version.Version, CollectorModule_AS_400.String()) + + response, err := collectorClient.RegisterCollector(ctx, request) + if err != nil { + return utils.Logger.ErrorF("failed to register collector: %v", err) + } + + cnf.CollectorID = uint(response.Id) + cnf.CollectorKey = response.Key + + utils.Logger.Info("UTMStack AS400 Collector registered successfully") + utils.Logger.Info("Collector ID: %d", cnf.CollectorID) + + return nil +} diff --git a/as400/agent/uninstall.go b/as400/agent/uninstall.go new file mode 100644 index 000000000..7c1884e3f --- /dev/null +++ b/as400/agent/uninstall.go @@ -0,0 +1,17 @@ +package agent + +import ( + "fmt" + "path/filepath" + + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/utils" +) + +func UninstallAll() error { + err := utils.Execute(filepath.Join(utils.GetMyPath(), fmt.Sprintf(config.ServiceLogFile, "")), utils.GetMyPath(), "uninstall") + if err != nil { + return utils.Logger.ErrorF("%v", err) + } + return nil +} diff --git a/as400/collector/as400.go b/as400/collector/as400.go new file mode 100644 index 000000000..b99faa3f6 --- /dev/null +++ b/as400/collector/as400.go @@ -0,0 +1,258 @@ +package collector + +import ( + "bufio" + "context" + "io" + "os" + "os/exec" + "strings" + "sync" + "syscall" + "time" + + "github.com/threatwinds/go-sdk/entities" + "github.com/threatwinds/go-sdk/plugins" + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/logservice" + "github.com/utmstack/UTMStack/as400/utils" +) + +type AS400Collector struct { + configStreamManager *ConfigStreamManager + collectorJarPath string + configFilePath string + currentProcess *exec.Cmd + processMutex sync.Mutex + isRunning bool + ctx context.Context + cancel context.CancelFunc + hostname string +} + +func NewAS400Collector() *AS400Collector { + hostname, err := os.Hostname() + if err != nil { + utils.Logger.ErrorF("error getting hostname: %v", err) + hostname = "unknown" + } + + collector := &AS400Collector{ + collectorJarPath: config.CollectorJarPath, + configFilePath: config.ConfigFilePath, + hostname: hostname, + } + + collector.configStreamManager = NewConfigStreamManager(collector.handleConfigurationChange) + + return collector +} + +func (c *AS400Collector) Start(ctx context.Context, cnf *config.Config) error { + utils.Logger.Info("Starting AS400 Collector...") + + c.ctx, c.cancel = context.WithCancel(ctx) + + if !utils.CheckIfPathExist(c.collectorJarPath) { + return utils.Logger.ErrorF("AS400 collector JAR not found at: %s", c.collectorJarPath) + } + + if utils.CheckIfPathExist(c.configFilePath) { + utils.Logger.Info("Found existing configuration file, starting JAR...") + if err := c.startCollectorProcess(); err != nil { + utils.Logger.ErrorF("Error starting JAR with existing config: %v", err) + } + } + + go c.configStreamManager.Start(cnf, c.ctx) + + utils.Logger.Info("AS400 started, waiting for configuration...") + + return nil +} + +func (c *AS400Collector) Stop() error { + utils.Logger.Info("Stopping AS400 Collector...") + + if c.cancel != nil { + c.cancel() + } + + if err := c.stopCollectorProcess(); err != nil { + utils.Logger.ErrorF("Error stopping collector process: %v", err) + } + + utils.Logger.Info("AS400 Collector stopped") + return nil +} + +func (c *AS400Collector) handleConfigurationChange(newConfig *AS400CollectorConfig) { + // No servers configured - stop JAR if running and remove config file + if len(newConfig.Servers) == 0 { + utils.Logger.Info("No servers configured, stopping collector...") + if err := c.stopCollectorProcess(); err != nil { + utils.Logger.ErrorF("Error stopping JAR: %v", err) + } + if utils.CheckIfPathExist(c.configFilePath) { + if err := os.Remove(c.configFilePath); err != nil { + utils.Logger.ErrorF("Error removing config file: %v", err) + } + } + return + } + + if err := c.saveConfig(newConfig); err != nil { + utils.Logger.ErrorF("Error saving configuration: %v", err) + return + } + + if !c.isRunning { + if err := c.startCollectorProcess(); err != nil { + utils.Logger.ErrorF("Error starting JAR: %v", err) + } + } +} + +func (c *AS400Collector) saveConfig(config *AS400CollectorConfig) error { + if err := EncryptPasswords(config); err != nil { + return utils.Logger.ErrorF("error encrypting passwords: %v", err) + } + + if err := utils.WriteJSON(c.configFilePath, config); err != nil { + return utils.Logger.ErrorF("error writing config file: %v", err) + } + + utils.Logger.Info("Configuration saved: %d servers", len(config.Servers)) + return nil +} + +func (c *AS400Collector) startCollectorProcess() error { + c.processMutex.Lock() + defer c.processMutex.Unlock() + + if c.isRunning { + return nil + } + + utils.Logger.Info("Starting AS400 collector JAR...") + + cmd := exec.CommandContext(c.ctx, "java", "-jar", c.collectorJarPath, "RUN") + cmd.Dir = utils.GetMyPath() + cmd.Env = append(os.Environ(), "AS400_SECRET="+config.REPLACE_KEY) + + stdout, err := cmd.StdoutPipe() + if err != nil { + return utils.Logger.ErrorF("error creating stdout pipe: %v", err) + } + + stderr, err := cmd.StderrPipe() + if err != nil { + return utils.Logger.ErrorF("error creating stderr pipe: %v", err) + } + + if err := cmd.Start(); err != nil { + return utils.Logger.ErrorF("error starting JAR: %v", err) + } + + c.currentProcess = cmd + c.isRunning = true + + utils.Logger.Info("JAR started (PID: %d)", cmd.Process.Pid) + + go c.processCollectorLogs(stdout) + go c.processCollectorErrors(stderr) + go c.monitorProcess(cmd) + + return nil +} + +func (c *AS400Collector) processCollectorLogs(stdout io.ReadCloser) { + scanner := bufio.NewScanner(stdout) + for scanner.Scan() { + logLine := scanner.Text() + + validatedLog, _, err := entities.ValidateString(logLine, false) + if err != nil { + utils.Logger.ErrorF("invalid log: %v", err) + continue + } + + logservice.LogQueue <- &plugins.Log{ + DataType: string(config.DataType), + DataSource: c.hostname, + Raw: validatedLog, + } + } + + if err := scanner.Err(); err != nil { + utils.Logger.ErrorF("error reading stdout: %v", err) + } +} + +func (c *AS400Collector) processCollectorErrors(stderr io.ReadCloser) { + scanner := bufio.NewScanner(stderr) + for scanner.Scan() { + line := scanner.Text() + // Log4j2 writes INFO/DEBUG/WARN to stderr too, filter them + if strings.Contains(line, " INFO ") || strings.Contains(line, " DEBUG ") { + utils.Logger.Info("JAR: %s", line) + } else if strings.Contains(line, " WARN ") { + utils.Logger.Info("JAR warning: %s", line) + } else { + utils.Logger.ErrorF("JAR error: %s", line) + } + } + + if err := scanner.Err(); err != nil { + utils.Logger.ErrorF("error reading stderr: %v", err) + } +} + +func (c *AS400Collector) stopCollectorProcess() error { + c.processMutex.Lock() + defer c.processMutex.Unlock() + + if !c.isRunning || c.currentProcess == nil { + return nil + } + + utils.Logger.Info("Stopping JAR (PID: %d)...", c.currentProcess.Process.Pid) + + if err := c.currentProcess.Process.Signal(syscall.SIGTERM); err != nil { + utils.Logger.ErrorF("error sending SIGTERM: %v", err) + } + + done := make(chan error, 1) + go func() { + done <- c.currentProcess.Wait() + }() + + select { + case <-time.After(10 * time.Second): + utils.Logger.Info("Forcing SIGKILL...") + c.currentProcess.Process.Kill() + <-done + case <-done: + } + + c.isRunning = false + c.currentProcess = nil + + utils.Logger.Info("JAR stopped") + return nil +} + +func (c *AS400Collector) monitorProcess(cmd *exec.Cmd) { + err := cmd.Wait() + + c.processMutex.Lock() + c.isRunning = false + c.currentProcess = nil + c.processMutex.Unlock() + + if err != nil { + utils.Logger.ErrorF("JAR exited with error: %v", err) + } else { + utils.Logger.Info("JAR exited") + } +} diff --git a/as400/collector/config.go b/as400/collector/config.go new file mode 100644 index 000000000..170cbb21d --- /dev/null +++ b/as400/collector/config.go @@ -0,0 +1,211 @@ +package collector + +import ( + "context" + "strings" + "time" + + aesCrypt "github.com/AtlasInsideCorp/AtlasInsideAES" + pb "github.com/utmstack/UTMStack/as400/agent" + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/conn" + "github.com/utmstack/UTMStack/as400/utils" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type ConfigStreamManager struct { + currentConfig *AS400CollectorConfig + onConfigChange func(*AS400CollectorConfig) +} + +type AS400CollectorConfig struct { + Servers []AS400ServerConfig `json:"servers"` +} + +type AS400ServerConfig struct { + Tenant string `json:"tenant"` + Hostname string `json:"hostname"` + UserId string `json:"userId"` + Password string `json:"password"` +} + +func NewConfigStreamManager(onConfigChange func(*AS400CollectorConfig)) *ConfigStreamManager { + return &ConfigStreamManager{ + onConfigChange: onConfigChange, + } +} + +func (csm *ConfigStreamManager) Start(cnf *config.Config, ctx context.Context) { + for { + select { + case <-ctx.Done(): + return + default: + connection, err := conn.GetAgentManagerConnection(cnf) + if err != nil { + utils.Logger.ErrorF("error connecting to backend: %v", err) + time.Sleep(10 * time.Second) + continue + } + + stream, err := pb.NewCollectorServiceClient(connection).CollectorStream(ctx) + if err != nil { + utils.Logger.ErrorF("error opening stream: %v", err) + time.Sleep(10 * time.Second) + continue + } + + utils.Logger.Info("Config stream connected") + csm.handleStream(stream) + time.Sleep(5 * time.Second) + } + } +} + +func (csm *ConfigStreamManager) handleStream(stream pb.CollectorService_CollectorStreamClient) { + for { + msg, err := stream.Recv() + if err != nil { + if strings.Contains(err.Error(), "EOF") { + return + } + if st, ok := status.FromError(err); ok && (st.Code() == codes.Unavailable || st.Code() == codes.Canceled) { + return + } + utils.Logger.ErrorF("stream error: %v", err) + return + } + + if protoConfig := msg.GetConfig(); protoConfig != nil { + csm.processConfiguration(stream, protoConfig) + } + } +} + +func (csm *ConfigStreamManager) processConfiguration(stream pb.CollectorService_CollectorStreamClient, protoConfig *pb.CollectorConfig) { + requestID := protoConfig.GetRequestId() + + as400Config, err := csm.protoToConfig(protoConfig) + if err != nil { + utils.Logger.ErrorF("invalid config: %v", err) + csm.sendAcknowledgment(stream, requestID, false) + return + } + + if err := csm.validateConfig(as400Config); err != nil { + utils.Logger.ErrorF("validation failed: %v", err) + csm.sendAcknowledgment(stream, requestID, false) + return + } + + if csm.currentConfig != nil && csm.configEquals(csm.currentConfig, as400Config) { + csm.sendAcknowledgment(stream, requestID, true) + return + } + + csm.currentConfig = as400Config + utils.Logger.Info("Config received: %d servers", len(as400Config.Servers)) + + if csm.onConfigChange != nil { + csm.onConfigChange(as400Config) + } + + csm.sendAcknowledgment(stream, requestID, true) +} + +func (csm *ConfigStreamManager) protoToConfig(protoConfig *pb.CollectorConfig) (*AS400CollectorConfig, error) { + config := &AS400CollectorConfig{ + Servers: make([]AS400ServerConfig, 0), + } + + for _, group := range protoConfig.GetGroups() { + server := AS400ServerConfig{ + Tenant: group.GetGroupName(), + } + + utils.Logger.Info("Processing group: %s", server.Tenant) + + confs := group.GetConfigurations() + utils.Logger.Info(" Configurations count: %d", len(confs)) + + for _, conf := range confs { + key := conf.GetConfKey() + value := conf.GetConfValue() + + switch key { + case "hostname", "collector.as400.hostname": + server.Hostname = value + case "userId", "collector.as400.user": + server.UserId = value + case "password", "collector.as400.password": + server.Password = value + default: + utils.Logger.Info(" WARNING: Unknown config key '%s' (ignored)", key) + } + } + + config.Servers = append(config.Servers, server) + } + + return config, nil +} + +func (csm *ConfigStreamManager) validateConfig(config *AS400CollectorConfig) error { + // Empty config is valid - means no servers to collect from + for i, s := range config.Servers { + if s.Tenant == "" || s.Hostname == "" || s.UserId == "" || s.Password == "" { + return utils.Logger.ErrorF("server %d (%s): missing required fields", i, s.Tenant) + } + } + return nil +} + +func (csm *ConfigStreamManager) configEquals(a, b *AS400CollectorConfig) bool { + if len(a.Servers) != len(b.Servers) { + return false + } + + for i := range a.Servers { + if a.Servers[i].Tenant != b.Servers[i].Tenant || + a.Servers[i].Hostname != b.Servers[i].Hostname || + a.Servers[i].UserId != b.Servers[i].UserId || + a.Servers[i].Password != b.Servers[i].Password { + return false + } + } + + return true +} + +func (csm *ConfigStreamManager) sendAcknowledgment(stream pb.CollectorService_CollectorStreamClient, requestId string, accepted bool) { + acceptedStr := "false" + if accepted { + acceptedStr = "true" + } + + ack := &pb.CollectorMessages{ + StreamMessage: &pb.CollectorMessages_Result{ + Result: &pb.ConfigKnowledge{ + Accepted: acceptedStr, + RequestId: requestId, + }, + }, + } + + if err := stream.Send(ack); err != nil { + utils.Logger.ErrorF("ack send failed: %v", err) + } +} + +func EncryptPasswords(cfg *AS400CollectorConfig) error { + for i := range cfg.Servers { + encPassword, err := aesCrypt.AESEncrypt(cfg.Servers[i].Password, []byte(config.REPLACE_KEY)) + if err != nil { + return err + } + cfg.Servers[i].Password = encPassword + } + + return nil +} diff --git a/as400/config/config.go b/as400/config/config.go new file mode 100644 index 000000000..ba1ced37a --- /dev/null +++ b/as400/config/config.go @@ -0,0 +1,168 @@ +package config + +import ( + "os" + "sync" + + aesCrypt "github.com/AtlasInsideCorp/AtlasInsideAES" + "github.com/google/uuid" + "github.com/utmstack/UTMStack/as400/utils" +) + +type MSGDS struct { + DataSource string + Message string +} + +type InstallationUUID struct { + UUID string `yaml:"uuid"` +} + +type Config struct { + Server string `yaml:"server"` + CollectorID uint `yaml:"collector-id"` + CollectorKey string `yaml:"collector-key"` + SkipCertValidation bool `yaml:"insecure"` +} + +func GetInitialConfig() (*Config, string) { + cnf := Config{ + Server: os.Args[2], + } + skip := os.Args[4] + if skip == "yes" { + cnf.SkipCertValidation = true + } else { + cnf.SkipCertValidation = false + } + return &cnf, os.Args[3] +} + +var ( + cnf = Config{} + confOnce sync.Once + installationId = "" + installationIdOnce sync.Once +) + +func GetCurrentConfig() (*Config, error) { + var errR error + confOnce.Do(func() { + uuidExists := utils.CheckIfPathExist(UUIDFileName) + + var encryptConfig Config + if err := utils.ReadYAML(ConfigurationFile, &encryptConfig); err != nil { + errR = utils.Logger.ErrorF("error reading config file: %v", err) + return + } + + var key []byte + var err error + if uuidExists { + id, err := GetUUID() + if err != nil { + errR = utils.Logger.ErrorF("failed to get uuid: %v", err) + return + } + + key, err = utils.GenerateKeyByUUID(REPLACE_KEY, id) + if err != nil { + errR = utils.Logger.ErrorF("error geneating key: %v", err) + return + } + } else { + key, err = utils.GenerateKey(REPLACE_KEY) + if err != nil { + errR = utils.Logger.ErrorF("error geneating key: %v", err) + return + } + } + + collectorKey, err := aesCrypt.AESDecrypt(encryptConfig.CollectorKey, key) + if err != nil { + errR = utils.Logger.ErrorF("error encoding collector key: %v", err) + return + } + + cnf.Server = encryptConfig.Server + cnf.CollectorID = encryptConfig.CollectorID + cnf.CollectorKey = collectorKey + cnf.SkipCertValidation = encryptConfig.SkipCertValidation + + if !uuidExists { + if err := SaveConfig(&cnf); err != nil { + errR = utils.Logger.ErrorF("error writing config file: %v", err) + return + } + } + }) + if errR != nil { + return nil, errR + } + return &cnf, nil +} + +func SaveConfig(cnf *Config) error { + id, err := GenerateNewUUID() + if err != nil { + return utils.Logger.ErrorF("failed to generate uuid: %v", err) + } + + key, err := utils.GenerateKeyByUUID(REPLACE_KEY, id) + if err != nil { + return utils.Logger.ErrorF("error geneating key: %v", err) + } + + collectorKey, err := aesCrypt.AESEncrypt(cnf.CollectorKey, key) + if err != nil { + return utils.Logger.ErrorF("error encoding agent key: %v", err) + } + + encryptConf := &Config{ + Server: cnf.Server, + CollectorID: cnf.CollectorID, + CollectorKey: collectorKey, + SkipCertValidation: cnf.SkipCertValidation, + } + + if err := utils.WriteYAML(ConfigurationFile, encryptConf); err != nil { + return err + } + return nil +} + +func GenerateNewUUID() (string, error) { + id, err := uuid.NewRandom() + if err != nil { + return "", utils.Logger.ErrorF("failed to generate uuid: %v", err) + } + + InstallationUUID := InstallationUUID{ + UUID: id.String(), + } + + if err = utils.WriteYAML(UUIDFileName, InstallationUUID); err != nil { + return "", utils.Logger.ErrorF("error writing uuid file: %v", err) + } + + return InstallationUUID.UUID, nil +} + +func GetUUID() (string, error) { + var errR error + installationIdOnce.Do(func() { + var id = InstallationUUID{} + if err := utils.ReadYAML(UUIDFileName, &id); err != nil { + errR = utils.Logger.ErrorF("error reading uuid file: %v", err) + return + } + + installationId = id.UUID + }) + + if errR != nil { + return "", errR + } + + return installationId, nil +} diff --git a/as400/config/const.go b/as400/config/const.go new file mode 100644 index 000000000..f46f67881 --- /dev/null +++ b/as400/config/const.go @@ -0,0 +1,29 @@ +package config + +import ( + "path/filepath" + + "github.com/utmstack/UTMStack/as400/utils" +) + +var REPLACE_KEY string = "" + +const ( + DataType string = "ibm-as400" +) + +var ( + DependUrl = "https://%s:%s/private/dependencies/collector/as400/%s" + AgentManagerPort = "9000" + LogAuthProxyPort = "50051" + DependenciesPort = "9001" + + ServiceLogFile = filepath.Join(utils.GetMyPath(), "logs", "utmstack_as400_collector.log") + UUIDFileName = filepath.Join(utils.GetMyPath(), "uuid.yml") + ConfigurationFile = filepath.Join(utils.GetMyPath(), "config.yml") + RetentionConfigFile = filepath.Join(utils.GetMyPath(), "retention.json") + LogsDBFile = filepath.Join(utils.GetMyPath(), "logs_process", "logs.db") + VersionPath = filepath.Join(utils.GetMyPath(), "version.json") + CollectorJarPath = filepath.Join(utils.GetMyPath(), "as400-collector.jar") + ConfigFilePath = filepath.Join(utils.GetMyPath(), "local_storage", "server.json") +) diff --git a/as400/conn/conn.go b/as400/conn/conn.go new file mode 100644 index 000000000..6d6104cc4 --- /dev/null +++ b/as400/conn/conn.go @@ -0,0 +1,106 @@ +package conn + +import ( + "crypto/tls" + "sync" + "time" + + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/utils" + "google.golang.org/grpc" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/credentials" +) + +const ( + maxMessageSize = 1024 * 1024 * 1024 + maxConnectionAttempts = 3 + initialReconnectDelay = 10 * time.Second + maxReconnectDelay = 60 * time.Second +) + +var ( + correlationConn *grpc.ClientConn + correlationConnOnce sync.Once + agentManagerConn *grpc.ClientConn + agentManagerConnOnce sync.Once +) + +func GetAgentManagerConnection(cnf *config.Config) (*grpc.ClientConn, error) { + var err error + agentManagerConnOnce.Do(func() { + agentManagerConn, err = connectToServer(cnf.Server, config.AgentManagerPort, cnf.SkipCertValidation) + if err != nil { + err = utils.Logger.ErrorF("error connecting to Agent Manager: %v", err) + } + }) + if err != nil { + return nil, err + } + + state := agentManagerConn.GetState() + if state == connectivity.Shutdown || state == connectivity.TransientFailure { + agentManagerConn.Close() + agentManagerConn, err = connectToServer(cnf.Server, config.AgentManagerPort, cnf.SkipCertValidation) + if err != nil { + return nil, utils.Logger.ErrorF("error connecting to Agent Manager: %v", err) + } + } + + return agentManagerConn, nil +} + +func GetCorrelationConnection(cnf *config.Config) (*grpc.ClientConn, error) { + var err error + correlationConnOnce.Do(func() { + correlationConn, err = connectToServer(cnf.Server, config.LogAuthProxyPort, cnf.SkipCertValidation) + if err != nil { + err = utils.Logger.ErrorF("error connecting to Correlation: %v", err) + } + }) + if err != nil { + return nil, err + } + + state := correlationConn.GetState() + if state == connectivity.Shutdown || state == connectivity.TransientFailure { + correlationConn.Close() + correlationConn, err = connectToServer(cnf.Server, config.LogAuthProxyPort, cnf.SkipCertValidation) + if err != nil { + return nil, utils.Logger.ErrorF("error connecting to Correlation: %v", err) + } + } + + return correlationConn, nil +} + +func connectToServer(addrs, port string, skip bool) (*grpc.ClientConn, error) { + connectionAttemps := 0 + reconnectDelay := initialReconnectDelay + + serverAddress := addrs + ":" + port + var conn *grpc.ClientConn + var err error + + for { + if connectionAttemps >= maxConnectionAttempts { + return nil, utils.Logger.ErrorF("failed to connect to Server: %v", err) + } + + conn, err = grpc.NewClient( + serverAddress, + grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(maxMessageSize)), + grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{InsecureSkipVerify: skip}))) + if err != nil { + connectionAttemps++ + utils.Logger.ErrorF("error connecting to Server, trying again in %.0f seconds", reconnectDelay.Seconds()) + time.Sleep(reconnectDelay) + reconnectDelay = utils.IncrementReconnectDelay(reconnectDelay, maxReconnectDelay) + continue + } + + break + } + + return conn, nil +} diff --git a/as400/database/db.go b/as400/database/db.go new file mode 100644 index 000000000..c85b06379 --- /dev/null +++ b/as400/database/db.go @@ -0,0 +1,129 @@ +package database + +import ( + "errors" + "fmt" + "log" + "os" + "path/filepath" + "sync" + + "github.com/glebarez/sqlite" + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/utils" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +var ( + dbInstance *Database + dbOnce sync.Once +) + +type Database struct { + db *gorm.DB + locker sync.RWMutex +} + +func (d *Database) Migrate(data interface{}) error { + return d.db.AutoMigrate(data) +} + +func (d *Database) Create(data interface{}) error { + return d.db.Create(data).Error +} + +func (d *Database) Find(data interface{}, field string, value interface{}) (bool, error) { + err := d.db.Where(fmt.Sprintf("%v = ?", field), value).Find(data).Error + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return false, nil + } + return false, err + } + return true, nil +} + +func (d *Database) GetAll(data interface{}) error { + if err := d.db.Find(data).Error; err != nil { + return err + } + return nil +} + +func (d *Database) Update(data interface{}, searchField string, searchValue string, modifyField string, newValue interface{}) error { + return d.db.Model(data).Where(fmt.Sprintf("%v = ?", searchField), searchValue).Update(modifyField, newValue).Error +} + +func (d *Database) Delete(data interface{}, field string, value string) error { + return d.db.Where(fmt.Sprintf("%v = ?", field), value).Delete(data).Error +} + +func (d *Database) DeleteOld(data interface{}, retentionMegabytes int) (int, error) { + currentSize, err := GetDatabaseSizeInMB() + if err != nil { + return 0, utils.Logger.ErrorF("error getting database size: %v", err) + } + + var rowsAffected int + for currentSize > retentionMegabytes { + result := d.db.Where("1 = 1").Order("created_at ASC").Limit(10).Delete(data) + if result.Error != nil { + return rowsAffected, result.Error + } + rowsAffected += int(result.RowsAffected) + d.db.Exec("VACUUM;") + currentSize, err = GetDatabaseSizeInMB() + if err != nil { + return rowsAffected, utils.Logger.ErrorF("error getting database size: %v", err) + } + } + + return rowsAffected, nil +} + +func (d *Database) Lock() { + d.locker.Lock() +} + +func (d *Database) Unlock() { + d.locker.Unlock() +} + +func GetDB() *Database { + dbOnce.Do(func() { + path := filepath.Join(utils.GetMyPath(), "logs_process") + err := utils.CreatePathIfNotExist(path) + if err != nil { + log.Fatalf("error creating database path: %v", err) + } + path = config.LogsDBFile + if _, err := os.Stat(path); os.IsNotExist(err) { + file, err := os.Create(path) + if err != nil { + log.Fatalf("error creating database file: %v", err) + } + file.Close() + } + + conn, err := gorm.Open(sqlite.Open(path), &gorm.Config{ + Logger: logger.Default.LogMode(logger.Silent), + }) + if err != nil { + log.Fatalf("error connecting with database: %v", err) + } + + dbInstance = &Database{db: conn} + + }) + + return dbInstance +} + +func GetDatabaseSizeInMB() (int, error) { + fileInfo, err := os.Stat(config.LogsDBFile) + if err != nil { + return 0, err + } + return int(fileInfo.Size() / (1024 * 1024)), nil +} diff --git a/as400/go.mod b/as400/go.mod new file mode 100644 index 000000000..920bc5e32 --- /dev/null +++ b/as400/go.mod @@ -0,0 +1,71 @@ +module github.com/utmstack/UTMStack/as400 + +go 1.25.0 + +require ( + github.com/AtlasInsideCorp/AtlasInsideAES v1.0.0 + github.com/elastic/go-sysinfo v1.15.4 + github.com/glebarez/sqlite v1.11.0 + github.com/google/uuid v1.6.0 + github.com/kardianos/service v1.2.4 + github.com/threatwinds/go-sdk v1.0.45 + github.com/threatwinds/logger v1.2.2 + google.golang.org/grpc v1.75.1 + google.golang.org/protobuf v1.36.9 + gopkg.in/yaml.v2 v2.4.0 + gorm.io/gorm v1.31.0 +) + +require ( + cel.dev/expr v0.24.0 // indirect + github.com/antlr4-go/antlr/v4 v4.13.1 // indirect + github.com/bytedance/sonic v1.14.0 // indirect + github.com/bytedance/sonic/loader v0.3.0 // indirect + github.com/cloudwego/base64x v0.1.5 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/elastic/go-windows v1.0.2 // indirect + github.com/gabriel-vasile/mimetype v1.4.9 // indirect + github.com/gin-contrib/sse v1.1.0 // indirect + github.com/gin-gonic/gin v1.10.1 // indirect + github.com/glebarez/go-sqlite v1.21.2 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.27.0 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/google/cel-go v0.26.0 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/prometheus/procfs v0.15.1 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect + github.com/stoewer/go-strcase v1.3.1 // indirect + github.com/tidwall/gjson v1.18.0 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.3.0 // indirect + go.yaml.in/yaml/v2 v2.4.2 // indirect + golang.org/x/arch v0.19.0 // indirect + golang.org/x/crypto v0.40.0 // indirect + golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 // indirect + golang.org/x/net v0.42.0 // indirect + golang.org/x/sys v0.34.0 // indirect + golang.org/x/text v0.27.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250721164621-a45f3dfb1074 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250721164621-a45f3dfb1074 // indirect + gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + howett.net/plist v0.0.0-20181124034731-591f970eefbb // indirect + modernc.org/libc v1.22.5 // indirect + modernc.org/mathutil v1.5.0 // indirect + modernc.org/memory v1.5.0 // indirect + modernc.org/sqlite v1.23.1 // indirect + sigs.k8s.io/yaml v1.5.0 // indirect +) diff --git a/as400/go.sum b/as400/go.sum new file mode 100644 index 000000000..a8826419f --- /dev/null +++ b/as400/go.sum @@ -0,0 +1,194 @@ +cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY= +cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= +github.com/AtlasInsideCorp/AtlasInsideAES v1.0.0 h1:TBiBl9KCa4i4epY0/q9WSC4ugavL6+6JUkOXWDnMM6I= +github.com/AtlasInsideCorp/AtlasInsideAES v1.0.0/go.mod h1:cRhQ3TS/VEfu/z+qaciyuDZdtxgaXgaX8+G6Wa5NzBk= +github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ= +github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= +github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ= +github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA= +github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA= +github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= +github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4= +github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= +github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/elastic/go-sysinfo v1.15.4 h1:A3zQcunCxik14MgXu39cXFXcIw2sFXZ0zL886eyiv1Q= +github.com/elastic/go-sysinfo v1.15.4/go.mod h1:ZBVXmqS368dOn/jvijV/zHLfakWTYHBZPk3G244lHrU= +github.com/elastic/go-windows v1.0.2 h1:yoLLsAsV5cfg9FLhZ9EXZ2n2sQFKeDYrHenkcivY4vI= +github.com/elastic/go-windows v1.0.2/go.mod h1:bGcDpBzXgYSqM0Gx3DM4+UxFj300SZLixie9u9ixLM8= +github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= +github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= +github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= +github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= +github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ= +github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/glebarez/go-sqlite v1.21.2 h1:3a6LFC4sKahUunAmynQKLZceZCOzUthkRkEAl9gAXWo= +github.com/glebarez/go-sqlite v1.21.2/go.mod h1:sfxdZyhQjTM2Wry3gVYWaW072Ri1WMdWJi0k6+3382k= +github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw= +github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= +github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/cel-go v0.26.0 h1:DPGjXackMpJWH680oGY4lZhYjIameYmR+/6RBdDGmaI= +github.com/google/cel-go v0.26.0/go.mod h1:A9O8OU9rdvrK5MQyrqfIxo1a0u4g3sF8KB6PUIaryMM= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ= +github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kardianos/service v1.2.4 h1:XNlGtZOYNx2u91urOdg/Kfmc+gfmuIo1Dd3rEi2OgBk= +github.com/kardianos/service v1.2.4/go.mod h1:E4V9ufUuY82F7Ztlu1eN9VXWIQxg8NoLQlmFe0MtrXc= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= +github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/stoewer/go-strcase v1.3.1 h1:iS0MdW+kVTxgMoE1LAZyMiYJFKlOzLooE4MxjirtkAs= +github.com/stoewer/go-strcase v1.3.1/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/threatwinds/go-sdk v1.0.45 h1:KZ3s3HviNRrOkg5EqjFnoauANFFzTqjNFyshPLY2SoI= +github.com/threatwinds/go-sdk v1.0.45/go.mod h1:tcWn6r6vqID/W/nL3UKfc5NafA3V/cSkiLvfJnwB58c= +github.com/threatwinds/logger v1.2.2 h1:sVuT8yhbecPqP4tT8EwHfp1czNC6e1wdkE1ihNnuBdA= +github.com/threatwinds/logger v1.2.2/go.mod h1:Amq0QI1y7fkTpnBUgeGVu2Z/C4u4ys2pNLUOuj3UAAU= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA= +github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI= +go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg= +go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc= +go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI= +go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU= +go.yaml.in/yaml/v3 v3.0.3 h1:bXOww4E/J3f66rav3pX3m8w6jDE4knZjGOw8b5Y6iNE= +go.yaml.in/yaml/v3 v3.0.3/go.mod h1:tBHosrYAkRZjRAOREWbDnBXUf08JOwYq++0QNwQiWzI= +golang.org/x/arch v0.19.0 h1:LmbDQUodHThXE+htjrnmVD73M//D9GTH6wFZjyDkjyU= +golang.org/x/arch v0.19.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= +golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= +golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 h1:R9PFI6EUdfVKgwKjZef7QIwGcBKu86OEFpJ9nUEP2l4= +golang.org/x/exp v0.0.0-20250718183923-645b1fa84792/go.mod h1:A+z0yzpGtvnG90cToK5n2tu8UJVP2XUATh+r+sfOOOc= +golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= +golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= +golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/genproto/googleapis/api v0.0.0-20250721164621-a45f3dfb1074 h1:mVXdvnmR3S3BQOqHECm9NGMjYiRtEvDYcqAqedTXY6s= +google.golang.org/genproto/googleapis/api v0.0.0-20250721164621-a45f3dfb1074/go.mod h1:vYFwMYFbmA8vl6Z/krj/h7+U/AqpHknwJX4Uqgfyc7I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250721164621-a45f3dfb1074 h1:qJW29YvkiJmXOYMu5Tf8lyrTp3dOS+K4z6IixtLaCf8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250721164621-a45f3dfb1074/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/grpc v1.75.1 h1:/ODCNEuf9VghjgO3rqLcfg8fiOP0nSluljWFlDxELLI= +google.golang.org/grpc v1.75.1/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= +google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= +google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= +gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/gorm v1.31.0 h1:0VlycGreVhK7RF/Bwt51Fk8v0xLiiiFdbGDPIZQ7mJY= +gorm.io/gorm v1.31.0/go.mod h1:XyQVbO2k6YkOis7C2437jSit3SsDK72s7n7rsSHd+Gs= +howett.net/plist v0.0.0-20181124034731-591f970eefbb h1:jhnBjNi9UFpfpl8YZhA9CrOqpnJdvzuiHsl/dnxl11M= +howett.net/plist v0.0.0-20181124034731-591f970eefbb/go.mod h1:vMygbs4qMhSZSc4lCUl2OEE+rDiIIJAIdR4m7MiMcm0= +modernc.org/libc v1.22.5 h1:91BNch/e5B0uPbJFgqbxXuOnxBQjlS//icfQEGmvyjE= +modernc.org/libc v1.22.5/go.mod h1:jj+Z7dTNX8fBScMVNRAYZ/jF91K8fdT2hYMThc3YjBY= +modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ= +modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/memory v1.5.0 h1:N+/8c5rE6EqugZwHii4IFsaJ7MUhoWX07J5tC/iI5Ds= +modernc.org/memory v1.5.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= +modernc.org/sqlite v1.23.1 h1:nrSBg4aRQQwq59JpvGEQ15tNxoO5pX/kUjcRNwSAGQM= +modernc.org/sqlite v1.23.1/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk= +nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= +sigs.k8s.io/yaml v1.5.0 h1:M10b2U7aEUY6hRtU870n2VTPgR5RZiL/I6Lcc2F4NUQ= +sigs.k8s.io/yaml v1.5.0/go.mod h1:wZs27Rbxoai4C0f8/9urLZtZtF3avA3gKvGyPdDqTO4= diff --git a/as400/logservice/processor.go b/as400/logservice/processor.go new file mode 100644 index 000000000..ac7799c75 --- /dev/null +++ b/as400/logservice/processor.go @@ -0,0 +1,268 @@ +package logservice + +import ( + "context" + "errors" + "os" + "strconv" + "strings" + "sync" + "time" + + "github.com/google/uuid" + "github.com/threatwinds/go-sdk/plugins" + + "github.com/utmstack/UTMStack/as400/agent" + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/conn" + "github.com/utmstack/UTMStack/as400/database" + "github.com/utmstack/UTMStack/as400/models" + "github.com/utmstack/UTMStack/as400/utils" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" +) + +type LogProcessor struct { + db *database.Database + connErrWritten bool + ackErrWritten bool + sendErrWritten bool +} + +var ( + processor LogProcessor + processorOnce sync.Once + LogQueue = make(chan *plugins.Log) + timeToSleep = 10 * time.Second + timeCLeanLogs = 10 * time.Minute +) + +func GetLogProcessor() LogProcessor { + processorOnce.Do(func() { + processor = LogProcessor{ + db: database.GetDB(), + connErrWritten: false, + ackErrWritten: false, + sendErrWritten: false, + } + }) + return processor +} + +func (l *LogProcessor) ProcessLogs(cnf *config.Config, ctx context.Context) { + go l.CleanCountedLogs() + + for { + ctxEof, cancelEof := context.WithCancel(context.Background()) + connection, err := conn.GetCorrelationConnection(cnf) + if err != nil { + if !l.connErrWritten { + utils.Logger.ErrorF("error connecting to Correlation: %v", err) + l.connErrWritten = true + } + time.Sleep(10 * time.Second) + continue + } + + client := plugins.NewIntegrationClient(connection) + plClient := createClient(client, ctx, cnf) + l.connErrWritten = false + + go l.handleAcknowledgements(plClient, ctxEof, cancelEof) + l.processLogs(plClient, ctxEof, cancelEof) + } +} + +func (l *LogProcessor) handleAcknowledgements(plClient plugins.Integration_ProcessLogClient, ctx context.Context, cancel context.CancelFunc) { + for { + select { + case <-ctx.Done(): + return + default: + ack, err := plClient.Recv() + if err != nil { + if strings.Contains(err.Error(), "EOF") { + time.Sleep(timeToSleep) + cancel() + return + } + st, ok := status.FromError(err) + if ok && (st.Code() == codes.Unavailable || st.Code() == codes.Canceled) { + if !l.ackErrWritten { + utils.Logger.ErrorF("failed to receive ack: %v", err) + l.ackErrWritten = true + } + time.Sleep(timeToSleep) + cancel() + return + } else { + if !l.ackErrWritten { + utils.Logger.ErrorF("failed to receive ack: %v", err) + l.ackErrWritten = true + } + time.Sleep(timeToSleep) + continue + } + } + + l.ackErrWritten = false + + l.db.Lock() + err = l.db.Update(&models.Log{}, "id", ack.LastId, "processed", true) + if err != nil { + utils.Logger.ErrorF("failed to update log: %v", err) + } + l.db.Unlock() + } + } +} + +func (l *LogProcessor) processLogs(plClient plugins.Integration_ProcessLogClient, ctx context.Context, cancel context.CancelFunc) { + for { + select { + case <-ctx.Done(): + utils.Logger.Info("context done, exiting processLogs") + return + case newLog := <-LogQueue: + id, err := uuid.NewRandom() + if err != nil { + utils.Logger.ErrorF("failed to generate uuid: %v", err) + continue + } + + newLog.Id = id.String() + l.db.Lock() + err = l.db.Create(&models.Log{ID: newLog.Id, Log: newLog.Raw, Type: newLog.DataType, CreatedAt: time.Now(), DataSource: newLog.DataSource, Processed: false}) + if err != nil { + utils.Logger.ErrorF("failed to save log: %v :log: %s", err, newLog.Raw) + } + l.db.Unlock() + + err = plClient.Send(newLog) + if err != nil { + if strings.Contains(err.Error(), "EOF") { + time.Sleep(timeToSleep) + cancel() + return + } + st, ok := status.FromError(err) + if ok && (st.Code() == codes.Unavailable || st.Code() == codes.Canceled) { + if !l.sendErrWritten { + utils.Logger.ErrorF("failed to send log: %v :log: %s", err, newLog.Raw) + l.sendErrWritten = true + } + time.Sleep(timeToSleep) + cancel() + return + } else { + if !l.sendErrWritten { + utils.Logger.ErrorF("failed to send log: %v :log: %s", err, newLog.Raw) + l.sendErrWritten = true + } + time.Sleep(timeToSleep) + continue + } + } + l.sendErrWritten = false + } + } +} + +func (l *LogProcessor) CleanCountedLogs() { + ticker := time.NewTicker(timeCLeanLogs) + defer ticker.Stop() + for range ticker.C { + dataRetention, err := GetDataRetention() + if err != nil { + utils.Logger.ErrorF("error getting data retention: %s", err) + continue + } + l.db.Lock() + _, err = l.db.DeleteOld(&models.Log{}, dataRetention) + if err != nil { + utils.Logger.ErrorF("error deleting old logs: %s", err) + } + l.db.Unlock() + + unprocessed := make([]models.Log, 0, 10) + l.db.Lock() + found, err := l.db.Find(&unprocessed, "processed", false) + l.db.Unlock() + if err != nil { + utils.Logger.ErrorF("error finding unprocessed logs: %s", err) + continue + } + + if found { + for _, log := range unprocessed { + LogQueue <- &plugins.Log{ + Id: log.ID, + Raw: log.Log, + DataType: log.Type, + DataSource: log.DataSource, + Timestamp: log.CreatedAt.Format(time.RFC3339Nano), + } + } + } + } +} + +func createClient(client plugins.IntegrationClient, ctx context.Context, cnf *config.Config) plugins.Integration_ProcessLogClient { + var connErrMsgWritten bool + invalidKeyCounter := 0 + for { + authCtx := metadata.AppendToOutgoingContext(ctx, + "key", cnf.CollectorKey, + "id", strconv.Itoa(int(cnf.CollectorID)), + "type", "collector") + + plClient, err := client.ProcessLog(authCtx) + if err != nil { + if strings.Contains(err.Error(), "invalid agent key") { + invalidKeyCounter++ + if invalidKeyCounter >= 20 { + utils.Logger.Info("Uninstalling collector: reason: collector has been removed from the panel...") + _ = agent.UninstallAll() + os.Exit(1) + } + } else { + invalidKeyCounter = 0 + } + if !connErrMsgWritten { + utils.Logger.ErrorF("failed to create input client: %v", err) + connErrMsgWritten = true + } + time.Sleep(timeToSleep) + continue + } + return plClient + } +} + +func SetDataRetention(retention string) error { + if retention == "" { + retention = "20" + } + + retentionInt, err := strconv.Atoi(retention) + if err != nil { + return errors.New("retention must be a number (number of megabytes)") + } + + if retentionInt < 1 { + return errors.New("retention must be greater than 0") + } + + return utils.WriteJSON(config.RetentionConfigFile, models.DataRetention{Retention: retentionInt}) +} + +func GetDataRetention() (int, error) { + retention := models.DataRetention{} + err := utils.ReadJson(config.RetentionConfigFile, &retention) + if err != nil { + return 0, err + } + + return retention.Retention, nil +} diff --git a/as400/main.go b/as400/main.go new file mode 100644 index 000000000..6717783bc --- /dev/null +++ b/as400/main.go @@ -0,0 +1,205 @@ +package main + +import ( + "fmt" + "os" + "time" + + pb "github.com/utmstack/UTMStack/as400/agent" + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/database" + "github.com/utmstack/UTMStack/as400/logservice" + "github.com/utmstack/UTMStack/as400/models" + "github.com/utmstack/UTMStack/as400/serv" + "github.com/utmstack/UTMStack/as400/updates" + "github.com/utmstack/UTMStack/as400/utils" +) + +func main() { + utils.InitLogger(config.ServiceLogFile) + + if len(os.Args) > 1 { + arg := os.Args[1] + + isInstalled, err := utils.CheckIfServiceIsInstalled("UTMStackAS400Collector") + if err != nil { + fmt.Println("Error checking if service is installed: ", err) + os.Exit(1) + } + if arg != "install" && !isInstalled { + fmt.Println("UTMStackAS400Collector service is not installed") + os.Exit(1) + } else if arg == "install" && isInstalled { + fmt.Println("UTMStackAS400Collector service is already installed") + os.Exit(1) + } + + switch arg { + case "run": + serv.RunService() + case "install": + utils.PrintBanner() + fmt.Println("Installing UTMStackAS400Collector service ...") + + cnf, utmKey := config.GetInitialConfig() + + fmt.Print("Checking server connection ... ") + if err := utils.ArePortsReachable(cnf.Server, config.AgentManagerPort, config.LogAuthProxyPort, config.DependenciesPort); err != nil { + fmt.Println("\nError trying to connect to server: ", err) + os.Exit(1) + } + fmt.Println("[OK]") + + fmt.Print("Downloading dependencies ... ") + if err := updates.DownloadVersion(cnf.Server, cnf.SkipCertValidation); err != nil { + fmt.Println("\nError downloading version: ", err) + os.Exit(1) + } + if err := updates.DownloadJar(cnf.Server, cnf.SkipCertValidation); err != nil { + fmt.Println("\nError downloading jar: ", err) + os.Exit(1) + } + if err := updates.DownloadUpdater(cnf.Server, cnf.SkipCertValidation); err != nil { + fmt.Println("\nError downloading updater: ", err) + os.Exit(1) + } + fmt.Println("[OK]") + + fmt.Print("Installing Java ... ") + if err := utils.InstallJava(); err != nil { + fmt.Println("\nError installing java: ", err) + os.Exit(1) + } + fmt.Println("[OK]") + + fmt.Print("Configuring collector ... ") + err = pb.RegisterCollector(cnf, utmKey) + if err != nil { + fmt.Println("\nError registering collector: ", err) + os.Exit(1) + } + if err = config.SaveConfig(cnf); err != nil { + fmt.Println("\nError saving config: ", err) + os.Exit(1) + } + + if err := logservice.SetDataRetention(""); err != nil { + fmt.Println("\nError setting retention: ", err) + os.Exit(1) + } + fmt.Println("[OK]") + + fmt.Print(("Creating service ... ")) + serv.InstallService() + fmt.Println("[OK]") + + fmt.Print("Installing updater service ... ") + if err := utils.InstallUpdater(); err != nil { + fmt.Println("\nError installing updater: ", err) + os.Exit(1) + } + fmt.Println("[OK]") + + fmt.Println("UTMStackAS400Collector service installed correctly") + + case "change-retention": + fmt.Println("Changing log retention ...") + retention := os.Args[2] + + if err := logservice.SetDataRetention(retention); err != nil { + fmt.Println("Error trying to change retention: ", err) + os.Exit(1) + } + + fmt.Printf("Retention changed correctly to %s\n", retention) + time.Sleep(5 * time.Second) + + case "clean-logs": + fmt.Println("Cleaning old logs ...") + db := database.GetDB() + datR, err := logservice.GetDataRetention() + if err != nil { + fmt.Println("Error getting retention: ", err) + os.Exit(1) + } + _, err = db.DeleteOld(models.Log{}, datR) + if err != nil { + fmt.Println("Error cleaning logs: ", err) + os.Exit(1) + } + fmt.Println("Logs cleaned correctly") + time.Sleep(5 * time.Second) + + case "uninstall": + fmt.Println("Uninstalling UTMStackAS400Collector service ...") + + fmt.Print("Uninstalling updater service ... ") + if err := utils.UninstallUpdater(); err != nil { + fmt.Println("\nWarning uninstalling updater: ", err) + } else { + fmt.Println("[OK]") + } + + cnf, err := config.GetCurrentConfig() + if err != nil { + fmt.Println("Error getting config: ", err) + os.Exit(1) + } + if err = pb.DeleteAgent(cnf); err != nil { + utils.Logger.ErrorF("error deleting collector: %v", err) + } + + os.Remove(config.ConfigurationFile) + + serv.UninstallService() + + fmt.Println("Uninstalling java") + if err := utils.UninstallJava(); err != nil { + utils.Logger.ErrorF("error unistalling java: %v", err) + } + + fmt.Println("[OK]") + fmt.Println("UTMStackAS400Collector service uninstalled correctly") + os.Exit(1) + case "help": + Help() + default: + fmt.Println("unknown option") + } + } else { + serv.RunService() + } +} + +func Help() { + fmt.Println("### UTMStack Collector ###") + fmt.Println("Usage:") + fmt.Println(" To run the service: ./utmstack_as400_collector run") + fmt.Println(" To install the service: ./utmstack_as400_collector install") + fmt.Println(" To change log retention: ./utmstack_as400_collector change-retention ") + fmt.Println(" To clean old logs: ./utmstack_as400_collector clean-logs") + fmt.Println(" To uninstall the service: ./utmstack_as400_collector uninstall") + fmt.Println(" To debug UTMStack installation: ./utmstack_as400_collector debug-utmstack") + fmt.Println(" For help (this message): ./utmstack_as400_collector help") + fmt.Println() + fmt.Println("Options:") + fmt.Println(" run Run the UTMStackAS400Collector service") + fmt.Println(" install Install the UTMStackAS400Collector service") + fmt.Println(" change-retention Change the log retention to . Retention must be a number of megabytes. Example: 20") + fmt.Println(" clean-logs Clean old logs from the database") + fmt.Println(" uninstall Uninstall the UTMStackAS400Collector service") + fmt.Println(" debug-utmstack Debug UTMStack installation validation") + fmt.Println(" help Display this help message") + fmt.Println() + fmt.Println("Requirements:") + fmt.Println(" - UTMStack must be installed on this system") + fmt.Println(" - File /utmstack.yaml must exist in root directory") + fmt.Println(" - Directory /utmstack/ must exist") + fmt.Println() + fmt.Println("Note:") + fmt.Println(" - Make sure to run commands with appropriate permissions.") + fmt.Println(" - All commands require administrative privileges.") + fmt.Println(" - For detailed logs, check the service log file.") + fmt.Println() + os.Exit(0) +} diff --git a/as400/models/data.go b/as400/models/data.go new file mode 100644 index 000000000..051820585 --- /dev/null +++ b/as400/models/data.go @@ -0,0 +1,5 @@ +package models + +type DataRetention struct { + Retention int `json:"retention"` +} diff --git a/as400/models/schema.go b/as400/models/schema.go new file mode 100644 index 000000000..7e9fa46b7 --- /dev/null +++ b/as400/models/schema.go @@ -0,0 +1,14 @@ +package models + +import ( + "time" +) + +type Log struct { + ID string `gorm:"index"` + CreatedAt time.Time + DataSource string + Type string + Log string + Processed bool +} diff --git a/as400/models/version.go b/as400/models/version.go new file mode 100644 index 000000000..ad40993be --- /dev/null +++ b/as400/models/version.go @@ -0,0 +1,6 @@ +package models + +type Version struct { + Version string `json:"version"` + JarVersion string `json:"jar_version"` +} diff --git a/as400/serv/config.go b/as400/serv/config.go new file mode 100644 index 000000000..20cc8f75a --- /dev/null +++ b/as400/serv/config.go @@ -0,0 +1,17 @@ +package serv + +import ( + "github.com/kardianos/service" +) + +// GetConfigServ creates and returns a pointer to a service configuration structure. +func GetConfigServ() *service.Config { + svcConfig := &service.Config{ + Name: "UTMStackAS400Collector", + DisplayName: "UTMStack AS400 Collector", + Description: "UTMStack AS400 Collector Service", + Arguments: []string{"run"}, + } + + return svcConfig +} diff --git a/as400/serv/install.go b/as400/serv/install.go new file mode 100644 index 000000000..06ab38a06 --- /dev/null +++ b/as400/serv/install.go @@ -0,0 +1,29 @@ +package serv + +import ( + "fmt" + "os" + + "github.com/kardianos/service" +) + +func InstallService() { + svcConfig := GetConfigServ() + prg := new(program) + newService, err := service.New(prg, svcConfig) + if err != nil { + fmt.Println("\nError creating new service: ", err) + os.Exit(1) + } + err = newService.Install() + if err != nil { + fmt.Println("\nError installing new service: ", err) + os.Exit(1) + } + + err = newService.Start() + if err != nil { + fmt.Println("\nError starting new service: ", err) + os.Exit(1) + } +} diff --git a/as400/serv/run.go b/as400/serv/run.go new file mode 100644 index 000000000..782dcbf07 --- /dev/null +++ b/as400/serv/run.go @@ -0,0 +1,21 @@ +package serv + +import ( + "github.com/kardianos/service" + "github.com/utmstack/UTMStack/as400/utils" +) + +func RunService() { + svcConfig := GetConfigServ() + p := new(program) + + newService, err := service.New(p, svcConfig) + if err != nil { + utils.Logger.Fatal("error creating new service: %v", err) + } + + err = newService.Run() + if err != nil { + utils.Logger.Fatal("error running new service: %v", err) + } +} diff --git a/as400/serv/service.go b/as400/serv/service.go new file mode 100644 index 000000000..5c49326c8 --- /dev/null +++ b/as400/serv/service.go @@ -0,0 +1,72 @@ +package serv + +import ( + "context" + "os" + "os/signal" + "strconv" + "syscall" + + "github.com/kardianos/service" + + pb "github.com/utmstack/UTMStack/as400/agent" + collectors "github.com/utmstack/UTMStack/as400/collector" + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/database" + "github.com/utmstack/UTMStack/as400/logservice" + "github.com/utmstack/UTMStack/as400/models" + "github.com/utmstack/UTMStack/as400/utils" + "google.golang.org/grpc/metadata" +) + +type program struct { + as400 *collectors.AS400Collector +} + +func (p *program) Start(_ service.Service) error { + go p.run() + return nil +} + +func (p *program) Stop(_ service.Service) error { + if p.as400 != nil { + utils.Logger.Info("Stopping AS400 Collector...") + return p.as400.Stop() + } + return nil +} + +func (p *program) run() { + utils.InitLogger(config.ServiceLogFile) + cnf, err := config.GetCurrentConfig() + if err != nil { + utils.Logger.Fatal("error getting config: %v", err) + } + + db := database.GetDB() + err = db.Migrate(models.Log{}) + if err != nil { + utils.Logger.ErrorF("error migrating logs table: %v", err) + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + ctx = metadata.AppendToOutgoingContext(ctx, "key", cnf.CollectorKey) + ctx = metadata.AppendToOutgoingContext(ctx, "id", strconv.Itoa(int(cnf.CollectorID))) + ctx = metadata.AppendToOutgoingContext(ctx, "type", "collector") + + go pb.StartPing(cnf, ctx) + + logProcessor := logservice.GetLogProcessor() + go logProcessor.ProcessLogs(cnf, ctx) + + // Start AS400 Collector with configuration stream + p.as400 = collectors.NewAS400Collector() + if err := p.as400.Start(ctx, cnf); err != nil { + utils.Logger.Fatal("error starting AS400 collector: %v", err) + } + + signals := make(chan os.Signal, 1) + signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM) + <-signals +} diff --git a/as400/serv/uninstall.go b/as400/serv/uninstall.go new file mode 100644 index 000000000..f7b669dd8 --- /dev/null +++ b/as400/serv/uninstall.go @@ -0,0 +1,16 @@ +package serv + +import ( + "github.com/utmstack/UTMStack/as400/utils" +) + +func UninstallService() { + err := utils.StopService("UTMStackAS400Collector") + if err != nil { + utils.Logger.Fatal("error stopping UTMStackAS400Collector: %v", err) + } + err = utils.UninstallService("UTMStackAS400Collector") + if err != nil { + utils.Logger.Fatal("error uninstalling UTMStackAS400Collector: %v", err) + } +} diff --git a/as400/updater/config/config.go b/as400/updater/config/config.go new file mode 100644 index 000000000..8daefeb23 --- /dev/null +++ b/as400/updater/config/config.go @@ -0,0 +1,54 @@ +package config + +import ( + "fmt" + "os" + "path/filepath" + "sync" + + "gopkg.in/yaml.v3" +) + +type Config struct { + Server string `json:"server" yaml:"server"` + SkipCertValidation bool `json:"insecure" yaml:"insecure"` +} + +var ( + cnf = Config{} + confOnce sync.Once +) + +func GetCurrentConfig() (*Config, error) { + var errR error + confOnce.Do(func() { + ex, err := os.Executable() + if err != nil { + errR = fmt.Errorf("error getting executable path: %v", err) + return + } + exPath := filepath.Dir(ex) + + configPath := filepath.Join(exPath, "config.yml") + content, err := os.ReadFile(configPath) + if err != nil { + errR = fmt.Errorf("error reading config file: %v", err) + return + } + + var loadedConfig Config + err = yaml.Unmarshal(content, &loadedConfig) + if err != nil { + errR = fmt.Errorf("error parsing config file: %v", err) + return + } + + cnf.Server = loadedConfig.Server + cnf.SkipCertValidation = loadedConfig.SkipCertValidation + }) + + if errR != nil { + return nil, errR + } + return &cnf, nil +} diff --git a/as400/updater/config/const.go b/as400/updater/config/const.go new file mode 100644 index 000000000..4be0bbacb --- /dev/null +++ b/as400/updater/config/const.go @@ -0,0 +1,23 @@ +package config + +import ( + "path/filepath" + + "github.com/utmstack/UTMStack/as400/updater/utils" +) + +const ( + SERV_LOG = "utmstack_as400_updater.log" + SERV_COLLECTOR_NAME = "UTMStackAS400Collector" + JAR_FILE = "as400-collector.jar" +) + +var ( + DependUrl = "https://%s:%s/private/dependencies/collector/as400/%s" + AgentManagerPort = "9000" + LogAuthProxyPort = "50051" + DependenciesPort = "9001" + + ServiceFile = "utmstack_as400_collector_service%s" + VersionPath = filepath.Join(utils.GetMyPath(), "version.json") +) diff --git a/as400/updater/go.mod b/as400/updater/go.mod new file mode 100644 index 000000000..6b8aba325 --- /dev/null +++ b/as400/updater/go.mod @@ -0,0 +1,49 @@ +module github.com/utmstack/UTMStack/as400/updater + +go 1.25.5 + +require ( + github.com/kardianos/service v1.2.4 + github.com/threatwinds/go-sdk v1.1.1 + github.com/threatwinds/logger v1.2.3 + gopkg.in/yaml.v3 v3.0.1 +) + +require ( + github.com/bytedance/gopkg v0.1.3 // indirect + github.com/bytedance/sonic v1.14.2 // indirect + github.com/bytedance/sonic/loader v0.4.0 // indirect + github.com/cloudwego/base64x v0.1.6 // indirect + github.com/gabriel-vasile/mimetype v1.4.12 // indirect + github.com/gin-contrib/sse v1.1.0 // indirect + github.com/gin-gonic/gin v1.11.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.30.1 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/goccy/go-yaml v1.19.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/quic-go/qpack v0.6.0 // indirect + github.com/quic-go/quic-go v0.59.0 // indirect + github.com/tidwall/gjson v1.18.0 // indirect + github.com/tidwall/match v1.2.0 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.3.1 // indirect + go.yaml.in/yaml/v2 v2.4.3 // indirect + golang.org/x/arch v0.23.0 // indirect + golang.org/x/crypto v0.47.0 // indirect + golang.org/x/net v0.49.0 // indirect + golang.org/x/sys v0.40.0 // indirect + golang.org/x/text v0.33.0 // indirect + google.golang.org/protobuf v1.36.11 // indirect + gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect + sigs.k8s.io/yaml v1.6.0 // indirect +) diff --git a/as400/updater/go.sum b/as400/updater/go.sum new file mode 100644 index 000000000..b8177de20 --- /dev/null +++ b/as400/updater/go.sum @@ -0,0 +1,121 @@ +github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= +github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= +github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= +github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= +github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= +github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= +github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw= +github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= +github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= +github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= +github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk= +github.com/gin-gonic/gin v1.11.0/go.mod h1:+iq/FyxlGzII0KHiBGjuNn4UNENUlKbGlNmc+W50Dls= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w= +github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM= +github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kardianos/service v1.2.4 h1:XNlGtZOYNx2u91urOdg/Kfmc+gfmuIo1Dd3rEi2OgBk= +github.com/kardianos/service v1.2.4/go.mod h1:E4V9ufUuY82F7Ztlu1eN9VXWIQxg8NoLQlmFe0MtrXc= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8= +github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII= +github.com/quic-go/quic-go v0.59.0 h1:OLJkp1Mlm/aS7dpKgTc6cnpynnD2Xg7C1pwL6vy/SAw= +github.com/quic-go/quic-go v0.59.0/go.mod h1:upnsH4Ju1YkqpLXC305eW3yDZ4NfnNbmQRCMWS58IKU= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/threatwinds/go-sdk v1.1.1 h1:K1rtmjhYokGvj5z/o/wwn8gD6LvkgKfxv2MyMUezRUQ= +github.com/threatwinds/go-sdk v1.1.1/go.mod h1:N19iqJPaNAoWwZTCuFvV0hIvT0D1jOR1KkKYgAoPLmw= +github.com/threatwinds/logger v1.2.3 h1:V2SVAXzbq+/huCvIWOfqzMTH+WBHJxankyBgVG2hy1Y= +github.com/threatwinds/logger v1.2.3/go.mod h1:N+bJKvF4FQNJZLfQpVYWpr6D8iEAFnAQfHYqH5iR1TI= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/match v1.2.0 h1:0pt8FlkOwjN2fPt4bIl4BoNxb98gGHN2ObFEDkrfZnM= +github.com/tidwall/match v1.2.0/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY= +github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= +go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0= +go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +golang.org/x/arch v0.23.0 h1:lKF64A2jF6Zd8L0knGltUnegD62JMFBiCPBmQpToHhg= +golang.org/x/arch v0.23.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A= +golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= +golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= +golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= +golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= +golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= +gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +sigs.k8s.io/yaml v1.6.0 h1:G8fkbMSAFqgEFgh4b1wmtzDnioxFCUgTZhlbj5P9QYs= +sigs.k8s.io/yaml v1.6.0/go.mod h1:796bPqUfzR/0jLAl6XjHl3Ck7MiyVv8dbTdyT3/pMf4= diff --git a/as400/updater/main.go b/as400/updater/main.go new file mode 100644 index 000000000..2fe98c9ee --- /dev/null +++ b/as400/updater/main.go @@ -0,0 +1,44 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/utmstack/UTMStack/as400/updater/config" + "github.com/utmstack/UTMStack/as400/updater/service" + "github.com/utmstack/UTMStack/as400/updater/utils" +) + +func main() { + path := utils.GetMyPath() + utils.InitLogger(filepath.Join(path, "logs", config.SERV_LOG)) + + if len(os.Args) > 1 { + switch os.Args[1] { + case "install": + fmt.Println("Installing UTMStack AS400 Updater service...") + + fmt.Print(("Creating service ... ")) + service.InstallService() + fmt.Println("[OK]") + + fmt.Println("UTMStack AS400 Updater service installed correctly") + return + case "uninstall": + fmt.Println("Uninstalling UTMStack AS400 Updater service...") + service.UninstallService() + fmt.Println("Service uninstalled successfully") + return + case "start": + fmt.Println("Starting UTMStack AS400 Updater service...") + return + case "stop": + fmt.Println("Stopping UTMStack AS400 Updater service...") + // Will be handled by systemd + return + } + } + + service.RunService() +} diff --git a/as400/updater/models/version.go b/as400/updater/models/version.go new file mode 100644 index 000000000..ad40993be --- /dev/null +++ b/as400/updater/models/version.go @@ -0,0 +1,6 @@ +package models + +type Version struct { + Version string `json:"version"` + JarVersion string `json:"jar_version"` +} diff --git a/as400/updater/service/config.go b/as400/updater/service/config.go new file mode 100644 index 000000000..00f13bb8e --- /dev/null +++ b/as400/updater/service/config.go @@ -0,0 +1,15 @@ +package service + +import ( + "github.com/kardianos/service" +) + +func GetConfigServ() *service.Config { + svcConfig := &service.Config{ + Name: "UTMStackAS400Updater", + DisplayName: "UTMStack AS400 Updater", + Description: "UTMStack AS400 Collector Updater Service", + } + + return svcConfig +} diff --git a/as400/updater/service/install.go b/as400/updater/service/install.go new file mode 100644 index 000000000..bae17e789 --- /dev/null +++ b/as400/updater/service/install.go @@ -0,0 +1,52 @@ +package service + +import ( + "fmt" + "os" + + "github.com/kardianos/service" + "github.com/utmstack/UTMStack/as400/updater/utils" +) + +func InstallService() { + svcConfig := GetConfigServ() + prg := new(program) + newService, err := service.New(prg, svcConfig) + if err != nil { + fmt.Println("\nError creating new service: ", err) + os.Exit(1) + } + err = newService.Install() + if err != nil { + fmt.Println("\nError installing new service: ", err) + os.Exit(1) + } + + err = newService.Start() + if err != nil { + fmt.Println("\nError starting new service: ", err) + os.Exit(1) + } + utils.UpdaterLogger.Info("updater service installed successfully") +} + +func UninstallService() { + svcConfig := GetConfigServ() + prg := new(program) + newService, err := service.New(prg, svcConfig) + if err != nil { + fmt.Println("\nError creating new service: ", err) + os.Exit(1) + } + + err = newService.Stop() + if err != nil { + fmt.Println("\nWarning stopping service: ", err) + } + + err = newService.Uninstall() + if err != nil { + fmt.Println("\nError uninstalling service: ", err) + os.Exit(1) + } +} diff --git a/as400/updater/service/service.go b/as400/updater/service/service.go new file mode 100644 index 000000000..f598f5f9c --- /dev/null +++ b/as400/updater/service/service.go @@ -0,0 +1,43 @@ +package service + +import ( + "github.com/kardianos/service" + "github.com/utmstack/UTMStack/as400/updater/config" + "github.com/utmstack/UTMStack/as400/updater/updates" + "github.com/utmstack/UTMStack/as400/updater/utils" +) + +type program struct{} + +func (p *program) Start(s service.Service) error { + go p.run() + return nil +} + +func (p *program) Stop(s service.Service) error { + return nil +} + +func (p *program) run() { + cnf, err := config.GetCurrentConfig() + if err != nil { + utils.UpdaterLogger.ErrorF("error getting config: %v", err) + return + } + + updates.UpdateDependencies(cnf) +} + +func RunService() { + svcConfig := GetConfigServ() + prg := new(program) + newService, err := service.New(prg, svcConfig) + if err != nil { + utils.UpdaterLogger.Fatal("error creating service: %v", err) + } + + err = newService.Run() + if err != nil { + utils.UpdaterLogger.Fatal("error running service: %v", err) + } +} diff --git a/as400/updater/updates/update.go b/as400/updater/updates/update.go new file mode 100644 index 000000000..d8cac2d7c --- /dev/null +++ b/as400/updater/updates/update.go @@ -0,0 +1,243 @@ +package updates + +import ( + "fmt" + "os" + "path/filepath" + "time" + + "github.com/utmstack/UTMStack/as400/updater/config" + "github.com/utmstack/UTMStack/as400/updater/models" + "github.com/utmstack/UTMStack/as400/updater/utils" +) + +const ( + checkEvery = 5 * time.Minute +) + +var currentVersion = models.Version{} + +func UpdateDependencies(cnf *config.Config) { + if utils.CheckIfPathExist(config.VersionPath) { + err := utils.ReadJson(config.VersionPath, ¤tVersion) + if err != nil { + utils.UpdaterLogger.ErrorF("error reading version file: %v", err) + } + } + + for { + time.Sleep(checkEvery) + + if err := utils.DownloadFile(fmt.Sprintf(config.DependUrl, cnf.Server, config.DependenciesPort, "version.json"), map[string]string{}, "version_new.json", utils.GetMyPath(), cnf.SkipCertValidation); err != nil { + utils.UpdaterLogger.ErrorF("error downloading version.json: %v", err) + continue + } + newVersion := models.Version{} + err := utils.ReadJson(filepath.Join(utils.GetMyPath(), "version_new.json"), &newVersion) + if err != nil { + utils.UpdaterLogger.ErrorF("error reading version file: %v", err) + continue + } + + binaryNeedsUpdate := newVersion.Version != currentVersion.Version + jarNeedsUpdate := newVersion.JarVersion != currentVersion.JarVersion + + if binaryNeedsUpdate || jarNeedsUpdate { + if binaryNeedsUpdate { + utils.UpdaterLogger.Info("New version of agent found: %s -> %s", currentVersion.Version, newVersion.Version) + } + if jarNeedsUpdate { + utils.UpdaterLogger.Info("New version of JAR found: %s -> %s", currentVersion.JarVersion, newVersion.JarVersion) + } + + if binaryNeedsUpdate { + if err := utils.DownloadFile(fmt.Sprintf(config.DependUrl, cnf.Server, config.DependenciesPort, fmt.Sprintf(config.ServiceFile, "")), map[string]string{}, fmt.Sprintf(config.ServiceFile, "_new"), utils.GetMyPath(), cnf.SkipCertValidation); err != nil { + utils.UpdaterLogger.ErrorF("error downloading agent: %v", err) + os.Remove(filepath.Join(utils.GetMyPath(), "version_new.json")) + continue + } + + if err = utils.Execute("chmod", utils.GetMyPath(), "-R", "755", filepath.Join(utils.GetMyPath(), fmt.Sprintf(config.ServiceFile, "_new"))); err != nil { + utils.UpdaterLogger.ErrorF("error executing chmod: %v", err) + } + } + + if jarNeedsUpdate { + if err := utils.DownloadFile(fmt.Sprintf(config.DependUrl, cnf.Server, config.DependenciesPort, config.JAR_FILE), map[string]string{}, config.JAR_FILE+"_new", utils.GetMyPath(), cnf.SkipCertValidation); err != nil { + utils.UpdaterLogger.ErrorF("error downloading JAR: %v", err) + if binaryNeedsUpdate { + os.Remove(filepath.Join(utils.GetMyPath(), fmt.Sprintf(config.ServiceFile, "_new"))) + } + os.Remove(filepath.Join(utils.GetMyPath(), "version_new.json")) + continue + } + } + + utils.UpdaterLogger.Info("Starting update process...") + err = runUpdateProcess(binaryNeedsUpdate, jarNeedsUpdate) + if err != nil { + utils.UpdaterLogger.ErrorF("error updating service: %v", err) + os.Remove(filepath.Join(utils.GetMyPath(), "version_new.json")) + if binaryNeedsUpdate { + os.Remove(filepath.Join(utils.GetMyPath(), fmt.Sprintf(config.ServiceFile, "_new"))) + } + if jarNeedsUpdate { + os.Remove(filepath.Join(utils.GetMyPath(), config.JAR_FILE+"_new")) + } + } else { + utils.UpdaterLogger.Info("Update completed successfully") + if utils.CheckIfPathExist(config.VersionPath) { + err := utils.ReadJson(config.VersionPath, ¤tVersion) + if err != nil { + utils.UpdaterLogger.ErrorF("error reading updated version file: %v", err) + } + } + } + } else { + os.Remove(filepath.Join(utils.GetMyPath(), "version_new.json")) + } + } +} + +func runUpdateProcess(updateBinary, updateJar bool) error { + path := utils.GetMyPath() + + newBin := fmt.Sprintf(config.ServiceFile, "_new") + oldBin := fmt.Sprintf(config.ServiceFile, "") + backupBin := fmt.Sprintf(config.ServiceFile, ".old") + + if updateBinary { + agentNew := filepath.Join(path, newBin) + if _, err := os.Stat(agentNew); err != nil { + return fmt.Errorf("no _new binary found to update") + } + } + + if updateJar { + jarNew := filepath.Join(path, config.JAR_FILE+"_new") + if _, err := os.Stat(jarNew); err != nil { + return fmt.Errorf("no _new JAR found to update") + } + utils.UpdaterLogger.Info("New JAR file found, will be updated") + } + + if err := utils.StopService(config.SERV_COLLECTOR_NAME); err != nil { + return fmt.Errorf("error stopping agent: %v", err) + } + + time.Sleep(10 * time.Second) + + if updateBinary { + utils.UpdaterLogger.Info("Updating binary...") + backupPath := filepath.Join(path, backupBin) + if utils.CheckIfPathExist(backupPath) { + utils.UpdaterLogger.Info("Removing previous backup: %s", backupPath) + if err := os.Remove(backupPath); err != nil { + utils.UpdaterLogger.ErrorF("could not remove old backup: %v", err) + } + } + + if err := os.Rename(filepath.Join(path, oldBin), backupPath); err != nil { + return fmt.Errorf("error backing up old binary: %v", err) + } + + if err := os.Rename(filepath.Join(path, newBin), filepath.Join(path, oldBin)); err != nil { + os.Rename(backupPath, filepath.Join(path, oldBin)) + return fmt.Errorf("error renaming new binary: %v", err) + } + utils.UpdaterLogger.Info("Binary updated successfully") + } + + if updateJar { + utils.UpdaterLogger.Info("Updating JAR file...") + jarBackup := filepath.Join(path, config.JAR_FILE+".old") + jarCurrent := filepath.Join(path, config.JAR_FILE) + + if utils.CheckIfPathExist(jarBackup) { + if err := os.Remove(jarBackup); err != nil { + utils.UpdaterLogger.ErrorF("could not remove old JAR backup: %v", err) + } + } + + if utils.CheckIfPathExist(jarCurrent) { + if err := os.Rename(jarCurrent, jarBackup); err != nil { + utils.UpdaterLogger.ErrorF("error backing up JAR: %v", err) + } + } + + jarNew := filepath.Join(path, config.JAR_FILE+"_new") + if err := os.Rename(jarNew, jarCurrent); err != nil { + utils.UpdaterLogger.ErrorF("error installing new JAR: %v", err) + if utils.CheckIfPathExist(jarBackup) { + os.Rename(jarBackup, jarCurrent) + } + } else { + utils.UpdaterLogger.Info("JAR file updated successfully") + } + } + + if err := utils.StartService(config.SERV_COLLECTOR_NAME); err != nil { + rollbackAgent(oldBin, backupBin, path, updateBinary, updateJar) + return fmt.Errorf("error starting agent: %v", err) + } + + time.Sleep(30 * time.Second) + + isHealthy, err := utils.CheckIfServiceIsActive(config.SERV_COLLECTOR_NAME) + if err != nil || !isHealthy { + utils.UpdaterLogger.Info("New version failed health check, rolling back...") + rollbackAgent(oldBin, backupBin, path, updateBinary, updateJar) + return fmt.Errorf("rollback completed: new version failed health check") + } + + utils.UpdaterLogger.Info("Health check passed for agent") + + versionNewPath := filepath.Join(path, "version_new.json") + versionPath := filepath.Join(path, "version.json") + if utils.CheckIfPathExist(versionNewPath) { + if err := os.Rename(versionNewPath, versionPath); err != nil { + utils.UpdaterLogger.ErrorF("error updating version file: %v", err) + } else { + utils.UpdaterLogger.Info("Version file updated successfully") + } + } + + return nil +} + +func rollbackAgent(currentBin, backupBin, path string, binaryWasUpdated, jarWasUpdated bool) { + utils.UpdaterLogger.Info("Rolling back agent to previous version...") + + utils.StopService(config.SERV_COLLECTOR_NAME) + time.Sleep(5 * time.Second) + + if binaryWasUpdated { + utils.UpdaterLogger.Info("Rolling back binary...") + os.Remove(filepath.Join(path, currentBin)) + os.Rename(filepath.Join(path, backupBin), filepath.Join(path, currentBin)) + utils.UpdaterLogger.Info("Binary rolled back successfully") + } + + if jarWasUpdated { + utils.UpdaterLogger.Info("Rolling back JAR file...") + jarCurrent := filepath.Join(path, config.JAR_FILE) + jarBackup := filepath.Join(path, config.JAR_FILE+".old") + + if utils.CheckIfPathExist(jarBackup) { + os.Remove(jarCurrent) + os.Rename(jarBackup, jarCurrent) + utils.UpdaterLogger.Info("JAR file rolled back successfully") + } + } + + utils.StartService(config.SERV_COLLECTOR_NAME) + os.Remove(filepath.Join(path, "version_new.json")) + if jarWasUpdated { + os.Remove(filepath.Join(path, config.JAR_FILE+"_new")) + } + if binaryWasUpdated { + os.Remove(filepath.Join(path, fmt.Sprintf(config.ServiceFile, "_new"))) + } + + utils.UpdaterLogger.Info("Rollback completed for agent") +} diff --git a/as400/updater/utils/cmd.go b/as400/updater/utils/cmd.go new file mode 100644 index 000000000..eae4140d9 --- /dev/null +++ b/as400/updater/utils/cmd.go @@ -0,0 +1,39 @@ +package utils + +import ( + "errors" + "os/exec" + + twsdk "github.com/threatwinds/go-sdk/entities" +) + +func ExecuteWithResult(c string, dir string, arg ...string) (string, bool) { + cmd := exec.Command(c, arg...) + + cmd.Dir = dir + if errors.Is(cmd.Err, exec.ErrDot) { + cmd.Err = nil + } + + out, err := cmd.Output() + if err != nil { + return string(out[:]) + err.Error(), true + } + + if string(out[:]) == "" { + return "Command executed successfully but no output", false + } + validUtf8Out, _, err := twsdk.ValidateString(string(out[:]), false) + if err != nil { + return string(out[:]) + err.Error(), true + } + + return validUtf8Out, false +} + +func Execute(c string, dir string, arg ...string) error { + cmd := exec.Command(c, arg...) + cmd.Dir = dir + + return cmd.Run() +} diff --git a/as400/updater/utils/download.go b/as400/updater/utils/download.go new file mode 100644 index 000000000..055c44881 --- /dev/null +++ b/as400/updater/utils/download.go @@ -0,0 +1,48 @@ +package utils + +import ( + "crypto/tls" + "fmt" + "io" + "net/http" + "os" + "path/filepath" +) + +func DownloadFile(url string, headers map[string]string, fileName string, path string, skipTlsVerification bool) error { + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return fmt.Errorf("error creating new request: %v", err) + } + for key, value := range headers { + req.Header.Add(key, value) + } + + client := &http.Client{} + client.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: skipTlsVerification}, + } + + resp, err := client.Do(req) + if err != nil { + return fmt.Errorf("error sending request: %v", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("expected status %d; got %d", http.StatusOK, resp.StatusCode) + } + + out, err := os.Create(filepath.Join(path, fileName)) + if err != nil { + return fmt.Errorf("error creating file: %v", err) + } + defer func() { _ = out.Close() }() + + _, err = io.Copy(out, resp.Body) + if err != nil { + return fmt.Errorf("error copying file: %v", err) + } + + return nil +} diff --git a/as400/updater/utils/files.go b/as400/updater/utils/files.go new file mode 100644 index 000000000..6b0180721 --- /dev/null +++ b/as400/updater/utils/files.go @@ -0,0 +1,92 @@ +package utils + +import ( + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" +) + +func GetMyPath() string { + ex, err := os.Executable() + if err != nil { + return "" + } + exPath := filepath.Dir(ex) + return exPath +} + +func CreatePathIfNotExist(path string) error { + if _, err := os.Stat(path); os.IsNotExist(err) { + if err := os.MkdirAll(path, 0755); err != nil { + return fmt.Errorf("error creating path: %v", err) + } + } else if err != nil { + return fmt.Errorf("error checking path: %v", err) + } + return nil +} + +func CheckIfPathExist(path string) bool { + if _, err := os.Stat(path); os.IsNotExist(err) { + return false + } + return true +} + +func ReadJson(fileName string, data interface{}) error { + content, err := os.ReadFile(fileName) + if err != nil { + return err + } + + err = json.Unmarshal(content, data) + if err != nil { + return err + } + + return nil +} + +func WriteStringToFile(fileName string, body string) error { + file, err := os.OpenFile(fileName, os.O_CREATE|os.O_RDWR|os.O_TRUNC, os.ModePerm) + if err != nil { + return err + } + defer func() { _ = file.Close() }() + + _, err = file.WriteString(body) + return err +} + +func WriteJSON(path string, data interface{}) error { + jsonData, err := json.MarshalIndent(data, "", " ") + if err != nil { + return err + } + + err = WriteStringToFile(path, string(jsonData)) + if err != nil { + return err + } + + return nil +} + +func copyFile(src, dst string) error { + sourceFile, err := os.Open(src) + if err != nil { + return err + } + defer sourceFile.Close() + + destFile, err := os.Create(dst) + if err != nil { + return err + } + defer destFile.Close() + + _, err = io.Copy(destFile, sourceFile) + return err +} diff --git a/as400/updater/utils/logger.go b/as400/updater/utils/logger.go new file mode 100644 index 000000000..fc0cbb757 --- /dev/null +++ b/as400/updater/utils/logger.go @@ -0,0 +1,20 @@ +package utils + +import ( + "sync" + + "github.com/threatwinds/logger" +) + +var ( + UpdaterLogger *logger.Logger + loggerOnceInstance sync.Once +) + +func InitLogger(filename string) { + loggerOnceInstance.Do(func() { + UpdaterLogger = logger.NewLogger( + &logger.Config{Format: "text", Level: 100, Output: filename, Retries: 3, Wait: 5}, + ) + }) +} diff --git a/as400/updater/utils/services.go b/as400/updater/utils/services.go new file mode 100644 index 000000000..26b73bd18 --- /dev/null +++ b/as400/updater/utils/services.go @@ -0,0 +1,55 @@ +package utils + +import ( + "fmt" + "strings" +) + +func CheckIfServiceIsActive(serv string) (bool, error) { + path := GetMyPath() + output, errB := ExecuteWithResult("systemctl", path, "is-active", serv) + if errB { + return false, nil + } + serviceStatus := strings.ToLower(strings.TrimSpace(output)) + return serviceStatus == "active", nil +} + +func RestartService(serv string) error { + path := GetMyPath() + isRunning, err := CheckIfServiceIsActive(serv) + if err != nil { + return fmt.Errorf("error checking if %s service is active: %v", serv, err) + } + + if isRunning { + err := Execute("systemctl", path, "restart", serv) + if err != nil { + return fmt.Errorf("error restarting service: %v", err) + } + } else { + err := Execute("systemctl", path, "start", serv) + if err != nil { + return fmt.Errorf("error starting service: %v", err) + } + } + return nil +} + +func StopService(name string) error { + path := GetMyPath() + err := Execute("systemctl", path, "stop", name) + if err != nil { + return fmt.Errorf("error stopping service: %v", err) + } + return nil +} + +func StartService(name string) error { + path := GetMyPath() + err := Execute("systemctl", path, "start", name) + if err != nil { + return fmt.Errorf("error starting service: %v", err) + } + return nil +} diff --git a/as400/updater/utils/zip.go b/as400/updater/utils/zip.go new file mode 100644 index 000000000..ecb690a38 --- /dev/null +++ b/as400/updater/utils/zip.go @@ -0,0 +1,52 @@ +package utils + +import ( + "archive/zip" + "io" + "os" + "path" + "path/filepath" +) + +func Unzip(zipFile, destPath string) error { + archive, err := zip.OpenReader(zipFile) + if err != nil { + return err + } + defer archive.Close() + + for _, f := range archive.File { + err := func() error { + filePath := path.Join(destPath, f.Name) + if f.FileInfo().IsDir() { + os.MkdirAll(filePath, os.ModePerm) + return nil + } + if err := os.MkdirAll(filepath.Dir(filePath), os.ModePerm); err != nil { + return err + } + + dstFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) + if err != nil { + return err + } + defer dstFile.Close() + + fileInArchive, err := f.Open() + if err != nil { + return err + } + defer fileInArchive.Close() + + if _, err := io.Copy(dstFile, fileInArchive); err != nil { + return err + } + + return nil + }() + if err != nil { + return err + } + } + return nil +} diff --git a/as400/updates/dependencies.go b/as400/updates/dependencies.go new file mode 100644 index 000000000..58ec14490 --- /dev/null +++ b/as400/updates/dependencies.go @@ -0,0 +1,34 @@ +package updates + +import ( + "fmt" + + "github.com/utmstack/UTMStack/as400/config" + "github.com/utmstack/UTMStack/as400/utils" +) + +func DownloadVersion(address string, insecure bool) error { + if err := utils.DownloadFile(fmt.Sprintf(config.DependUrl, address, config.DependenciesPort, "version.json"), map[string]string{}, "version.json", utils.GetMyPath(), insecure); err != nil { + return fmt.Errorf("error downloading version.json : %v", err) + } + + return nil + +} + +func DownloadUpdater(address string, insecure bool) error { + if err := utils.DownloadFile(fmt.Sprintf(config.DependUrl, address, config.DependenciesPort, "utmstack_as400_updater_service"), map[string]string{}, "utmstack_as400_updater_service", utils.GetMyPath(), insecure); err != nil { + return fmt.Errorf("error downloading utmstack_as400_updater_service : %v", err) + } + + return nil +} + +func DownloadJar(address string, insecure bool) error { + if err := utils.DownloadFile(fmt.Sprintf(config.DependUrl, address, config.DependenciesPort, "as400-collector.jar"), map[string]string{}, "as400-collector.jar", utils.GetMyPath(), insecure); err != nil { + return fmt.Errorf("error downloading as400-collector.jar : %v", err) + } + + return nil + +} diff --git a/as400/utils/address.go b/as400/utils/address.go new file mode 100644 index 000000000..8a6a4945d --- /dev/null +++ b/as400/utils/address.go @@ -0,0 +1,24 @@ +package utils + +import ( + "errors" + "net" +) + +func GetIPAddress() (string, error) { + ipAddress, err := net.InterfaceAddrs() + if err != nil { + return "", err + } + + for _, addr := range ipAddress { + ipNet, ok := addr.(*net.IPNet) + if ok && !ipNet.IP.IsLoopback() { + if ipNet.IP.To4() != nil { + return ipNet.IP.String(), nil + } + } + } + + return "", errors.New("failed to get IP address") +} diff --git a/as400/utils/banner.go b/as400/utils/banner.go new file mode 100644 index 000000000..cb7c45f59 --- /dev/null +++ b/as400/utils/banner.go @@ -0,0 +1,17 @@ +package utils + +import "fmt" + +func PrintBanner() { + banner := "\n" + + "..........................................................................\n" + + " _ _ _ _____ _ _ \n" + + " | | | | | | / ____| | | | | \n" + + " | | | | | |_ _ __ ___ | (___ | |_ __ _ ___ | | __ \n" + + " | | | | | __| | '_ ` _ \\ \\___ \\ | __| / _` | / __| | |/ / \n" + + " | |__| | | |_ | | | | | | ____) | | |_ | (_| | | (__ | < \n" + + " \\____/ \\__| |_| |_| |_| |_____/ \\__| \\__,_| \\___| |_|\\_\\ \n" + + ".........................................................................." + + fmt.Println(banner) +} diff --git a/as400/utils/cmd.go b/as400/utils/cmd.go new file mode 100644 index 000000000..5de1f108c --- /dev/null +++ b/as400/utils/cmd.go @@ -0,0 +1,15 @@ +package utils + +import ( + "os" + "os/exec" +) + +func Execute(c string, dir string, arg ...string) error { + cmd := exec.Command(c, arg...) + cmd.Dir = dir + cmd.Env = append(os.Environ(), "DEBIAN_FRONTEND=noninteractive") + cmd.Stdin = nil + + return cmd.Run() +} diff --git a/as400/utils/crypt.go b/as400/utils/crypt.go new file mode 100644 index 000000000..48fa35deb --- /dev/null +++ b/as400/utils/crypt.go @@ -0,0 +1,22 @@ +package utils + +import ( + "encoding/base64" +) + +func GenerateKey(baseKey string) ([]byte, error) { + info, err := GetOsInfo() + if err != nil { + return nil, Logger.ErrorF("error getting os info: %v", err) + } + + data := []byte(info.Hostname + info.Mac + info.OsType) + base64Key := base64.StdEncoding.EncodeToString(data) + return []byte(baseKey + base64Key), nil +} + +func GenerateKeyByUUID(baseKey string, uuid string) ([]byte, error) { + data := []byte(baseKey + uuid) + base64Key := base64.StdEncoding.EncodeToString(data) + return []byte(base64Key), nil +} diff --git a/as400/utils/delay.go b/as400/utils/delay.go new file mode 100644 index 000000000..e3000ea71 --- /dev/null +++ b/as400/utils/delay.go @@ -0,0 +1,11 @@ +package utils + +import "time" + +func IncrementReconnectDelay(delay time.Duration, maxReconnectDelay time.Duration) time.Duration { + delay *= 2 + if delay > maxReconnectDelay { + delay = maxReconnectDelay + } + return delay +} diff --git a/as400/utils/download.go b/as400/utils/download.go new file mode 100644 index 000000000..c57753cbf --- /dev/null +++ b/as400/utils/download.go @@ -0,0 +1,73 @@ +package utils + +import ( + "crypto/tls" + "fmt" + "io" + "net/http" + "os" + "path/filepath" +) + +func DownloadFile(url string, headers map[string]string, fileName string, path string, skipTlsVerification bool) error { + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return fmt.Errorf("error creating new request: %v", err) + } + for key, value := range headers { + req.Header.Add(key, value) + } + + client := &http.Client{} + client.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: skipTlsVerification}, + } + + resp, err := client.Do(req) + if err != nil { + return fmt.Errorf("error sending request: %v", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("expected status %d; got %d", http.StatusOK, resp.StatusCode) + } + + out, err := os.Create(filepath.Join(path, fileName)) + if err != nil { + return fmt.Errorf("error creating file: %v", err) + } + defer func() { _ = out.Close() }() + + _, err = io.Copy(out, resp.Body) + if err != nil { + return fmt.Errorf("error copying file: %v", err) + } + + return nil +} + +func InstallJava() error { + err := Execute("apt-get", "", "update") + if err != nil { + return err + } + err = Execute("apt", "", "install", "-y", "openjdk-17-jdk") + if err != nil { + return err + } + return nil + +} + +func UninstallJava() error { + err := Execute("apt", "", "remove", "-y", "openjdk-17-jdk") + if err != nil { + return err + } + err = Execute("apt", "", "autoremove", "-y") + if err != nil { + return err + } + return nil +} diff --git a/as400/utils/files.go b/as400/utils/files.go new file mode 100644 index 000000000..24b45e352 --- /dev/null +++ b/as400/utils/files.go @@ -0,0 +1,110 @@ +package utils + +import ( + "encoding/json" + "os" + "path/filepath" + + "gopkg.in/yaml.v2" +) + +func GetMyPath() string { + ex, err := os.Executable() + if err != nil { + return "" + } + exPath := filepath.Dir(ex) + return exPath +} + +func ReadYAML(path string, result interface{}) error { + file, err := os.Open(path) + if err != nil { + return err + } + defer func() { _ = file.Close() }() + + d := yaml.NewDecoder(file) + if err := d.Decode(result); err != nil { + return err + } + + return nil +} + +func WriteStringToFile(fileName string, body string) error { + // Create directory if it doesn't exist + dir := filepath.Dir(fileName) + if err := os.MkdirAll(dir, 0755); err != nil { + return err + } + + file, err := os.OpenFile(fileName, os.O_CREATE|os.O_RDWR|os.O_TRUNC, os.ModePerm) + if err != nil { + return err + } + defer func() { _ = file.Close() }() + + _, err = file.WriteString(body) + return err +} + +func WriteYAML(url string, data interface{}) error { + config, err := yaml.Marshal(data) + if err != nil { + return err + } + + err = WriteStringToFile(url, string(config)) + if err != nil { + return err + } + + return nil +} + +func WriteJSON(path string, data interface{}) error { + jsonData, err := json.MarshalIndent(data, "", " ") + if err != nil { + return err + } + + err = WriteStringToFile(path, string(jsonData)) + if err != nil { + return err + } + + return nil +} + +func ReadJson(fileName string, data interface{}) error { + content, err := os.ReadFile(fileName) + if err != nil { + return err + } + + err = json.Unmarshal(content, data) + if err != nil { + return err + } + + return nil +} + +func CreatePathIfNotExist(path string) error { + if _, err := os.Stat(path); os.IsNotExist(err) { + if err := os.MkdirAll(path, 0755); err != nil { + return Logger.ErrorF("error creating path: %v", err) + } + } else if err != nil { + return Logger.ErrorF("error checking path: %v", err) + } + return nil +} + +func CheckIfPathExist(path string) bool { + if _, err := os.Stat(path); os.IsNotExist(err) { + return false + } + return true +} diff --git a/as400/utils/host.go b/as400/utils/host.go new file mode 100644 index 000000000..e79c972c3 --- /dev/null +++ b/as400/utils/host.go @@ -0,0 +1,21 @@ +package utils + +import "net" + +func GetHostAliases(hostname string) ([]string, error) { + var aliases []string + addresses, err := net.LookupHost(hostname) + if err != nil { + return nil, err + } + + for _, address := range addresses { + newAliases, err := net.LookupAddr(address) + if err != nil { + return nil, err + } + aliases = append(aliases, newAliases...) + } + + return aliases, nil +} diff --git a/as400/utils/logger.go b/as400/utils/logger.go new file mode 100644 index 000000000..a93ac7455 --- /dev/null +++ b/as400/utils/logger.go @@ -0,0 +1,44 @@ +package utils + +import ( + "path/filepath" + "sync" + + "github.com/threatwinds/logger" +) + +var ( + Logger *logger.Logger + loggerOnceInstance sync.Once + logLevelConfigFile = filepath.Join(GetMyPath(), "log_level.yml") + LogLevelMap = map[string]int{ + "debug": 100, + "info": 200, + "notice": 300, + "warning": 400, + "error": 500, + "critical": 502, + "alert": 509, + } +) + +type LogLevels struct { + Level string `yaml:"level"` +} + +func InitLogger(filename string) { + logLevel := LogLevels{} + err := ReadYAML(logLevelConfigFile, &logLevel) + if err != nil { + logLevel.Level = "info" + } + logLevelInt := 200 + if val, ok := LogLevelMap[logLevel.Level]; ok { + logLevelInt = val + } + loggerOnceInstance.Do(func() { + Logger = logger.NewLogger( + &logger.Config{Format: "text", Level: logLevelInt, Output: filename, Retries: 3, Wait: 5}, + ) + }) +} diff --git a/as400/utils/os.go b/as400/utils/os.go new file mode 100644 index 000000000..1b095729f --- /dev/null +++ b/as400/utils/os.go @@ -0,0 +1,62 @@ +package utils + +import ( + "os" + "os/user" + "strconv" + "strings" + + "github.com/elastic/go-sysinfo" +) + +type OSInfo struct { + Hostname string + OsType string + Platform string + CurrentUser string + Mac string + OsMajorVersion string + OsMinorVersion string + Aliases string + Addresses string +} + +func GetOsInfo() (OSInfo, error) { + var info OSInfo + + hostInfo, err := sysinfo.Host() + if err != nil { + return info, Logger.ErrorF("error getting host info: %v", err) + } + info.OsType = hostInfo.Info().OS.Type + info.Platform = hostInfo.Info().OS.Platform + info.Mac = strings.Join(hostInfo.Info().MACs, ",") + info.OsMajorVersion = strconv.Itoa(hostInfo.Info().OS.Major) + info.OsMinorVersion = strconv.Itoa(hostInfo.Info().OS.Minor) + info.Addresses = strings.Join(hostInfo.Info().IPs, ",") + + hostName, err := os.Hostname() + if err != nil { + return info, Logger.ErrorF("error getting hostname: %v", err) + } + info.Hostname = hostName + + currentUser, err := user.Current() + if err != nil { + return info, Logger.ErrorF("error getting user: %v", err) + } + info.CurrentUser = currentUser.Username + + aliases, err := GetHostAliases(hostInfo.Info().Hostname) + if err != nil { + aliases = aliases[:0] + aliases = append(aliases, "") + } + if len(aliases) == 1 && strings.Contains(aliases[0], "any") { + aliases = aliases[:0] + aliases = append(aliases, "") + } + info.Aliases = strings.Join(aliases, ",") + + return info, nil +} diff --git a/as400/utils/port.go b/as400/utils/port.go new file mode 100644 index 000000000..af1e06596 --- /dev/null +++ b/as400/utils/port.go @@ -0,0 +1,29 @@ +package utils + +import ( + "fmt" + "net" + "time" +) + +func ArePortsReachable(ip string, ports ...string) error { + var conn net.Conn + var err error + +external: + for _, port := range ports { + for i := 0; i < 3; i++ { + conn, err = net.DialTimeout("tcp", fmt.Sprintf("%s:%s", ip, port), 5*time.Second) + if err == nil { + conn.Close() + continue external + } + time.Sleep(5 * time.Second) + } + if err != nil { + return Logger.ErrorF("cannot connect to %s on port %s: %v", ip, port, err) + } + } + + return nil +} diff --git a/as400/utils/services.go b/as400/utils/services.go new file mode 100644 index 000000000..9e9a19808 --- /dev/null +++ b/as400/utils/services.go @@ -0,0 +1,71 @@ +package utils + +import ( + "fmt" + "os" +) + +func StopService(name string) error { + path := GetMyPath() + err := Execute("systemctl", path, "stop", name) + if err != nil { + return Logger.ErrorF("error stopping service: %v", err) + } + return nil +} + +func UninstallService(name string) error { + path := GetMyPath() + err := Execute("systemctl", path, "disable", name) + if err != nil { + return Logger.ErrorF("error uninstalling service: %v", err) + } + err = Execute("rm", "/etc/systemd/system/", "/etc/systemd/system/"+name+".service") + if err != nil { + return Logger.ErrorF("error uninstalling service: %v", err) + } + return nil +} + +func CheckIfServiceIsInstalled(serv string) (bool, error) { + path := GetMyPath() + err := Execute("systemctl", path, "status", serv) + return err == nil, nil +} + +func CreateLinuxService(serviceName string, execStart string) error { + servicePath := "/etc/systemd/system/" + serviceName + ".service" + if !CheckIfPathExist(servicePath) { + file, err := os.Create(servicePath) + if err != nil { + return Logger.ErrorF("error creating %s file: %v", servicePath, err) + } + defer func() { _ = file.Close() }() + + serviceContent := fmt.Sprintf(`[Unit] +Description=%s +After=network.target + +[Service] +ExecStart=%s +Restart=always + +[Install] +WantedBy=multi-user.target +`, serviceName, execStart) + + _, err = file.WriteString(serviceContent) + if err != nil { + return err + } + + err = file.Sync() + if err != nil { + return err + } + } else { + return Logger.ErrorF("service %s already exists", serviceName) + } + + return nil +} diff --git a/as400/utils/updater.go b/as400/utils/updater.go new file mode 100644 index 000000000..6c2543d7b --- /dev/null +++ b/as400/utils/updater.go @@ -0,0 +1,31 @@ +package utils + +import "fmt" + +func InstallUpdater() error { + updaterPath := GetMyPath() + "/utmstack_as400_updater_service" + + if err := Execute("chmod", GetMyPath(), "+x", updaterPath); err != nil { + return fmt.Errorf("error setting execute permissions: %v", err) + } + + if err := Execute(updaterPath, GetMyPath(), "install"); err != nil { + return fmt.Errorf("error installing updater service: %v", err) + } + + return nil +} + +func UninstallUpdater() error { + updaterPath := GetMyPath() + "/utmstack_as400_updater_service" + + if !CheckIfPathExist(updaterPath) { + return nil + } + + if err := Execute(updaterPath, GetMyPath(), "uninstall"); err != nil { + return fmt.Errorf("error uninstalling updater service: %v", err) + } + + return nil +} diff --git a/as400/version.json b/as400/version.json new file mode 100644 index 000000000..ef90da4ab --- /dev/null +++ b/as400/version.json @@ -0,0 +1,4 @@ +{ + "version": "1.0.0", + "jar_version": "1.0.0" +} diff --git a/backend/mvnw b/backend/mvnw old mode 100644 new mode 100755 diff --git a/backend/pom.xml b/backend/pom.xml index 9bead905b..022fdd657 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -8,6 +8,7 @@ com.atlasinside utmstack ${revision} + war UTMStack-API @@ -300,7 +301,7 @@ io.grpc grpc-okhttp - ${io.grpc.version} + ${io.grpc.version} io.grpc @@ -334,13 +335,6 @@ protobuf-java ${protoc.version} - - - com.utmstack.grpc.jclient - collector-client-4j - 1.2.13 - - org.springframework.boot diff --git a/backend/src/main/java/com/park/utmstack/config/CollectorConfiguration.java b/backend/src/main/java/com/park/utmstack/config/CollectorConfiguration.java deleted file mode 100644 index ec16e1cee..000000000 --- a/backend/src/main/java/com/park/utmstack/config/CollectorConfiguration.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.park.utmstack.config; - -import com.utmstack.grpc.connection.GrpcConnection; -import com.utmstack.grpc.exception.GrpcConnectionException; -import com.utmstack.grpc.jclient.config.interceptors.impl.GrpcEmptyAuthInterceptor; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class CollectorConfiguration { - private GrpcConnection collectorConnection; - - @Value("${grpc.server.address}") - private String serverAddress; - - @Value("${grpc.server.port}") - private Integer serverPort; - - @Bean - public GrpcConnection collectorConnection() throws GrpcConnectionException { - this.collectorConnection = new GrpcConnection(); - this.collectorConnection.createChannel(serverAddress, serverPort, new GrpcEmptyAuthInterceptor()); - return this.collectorConnection; - } -} diff --git a/backend/src/main/java/com/park/utmstack/config/GrpcConfiguration.java b/backend/src/main/java/com/park/utmstack/config/GrpcConfiguration.java index 8053115cf..9d014b830 100644 --- a/backend/src/main/java/com/park/utmstack/config/GrpcConfiguration.java +++ b/backend/src/main/java/com/park/utmstack/config/GrpcConfiguration.java @@ -2,7 +2,6 @@ import com.park.utmstack.security.GrpcInterceptor; import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; import io.grpc.netty.GrpcSslContexts; import io.grpc.netty.NettyChannelBuilder; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; diff --git a/backend/src/main/java/com/park/utmstack/domain/application_events/enums/ApplicationEventType.java b/backend/src/main/java/com/park/utmstack/domain/application_events/enums/ApplicationEventType.java index eca45ec19..4b4ee6484 100644 --- a/backend/src/main/java/com/park/utmstack/domain/application_events/enums/ApplicationEventType.java +++ b/backend/src/main/java/com/park/utmstack/domain/application_events/enums/ApplicationEventType.java @@ -46,5 +46,9 @@ public enum ApplicationEventType { MODULE_ACTIVATION_SUCCESS, API_KEY_ACCESS_SUCCESS, API_KEY_ACCESS_FAILURE, + COLLECTOR_DELETE_ATTEMPT, + COLLECTOR_DELETE_SUCCESS, + RESET_USER_PASSWORD_ATTEMPT, + RESET_USER_PASSWORD_SUCCESS, UNDEFINED } diff --git a/backend/src/main/java/com/park/utmstack/domain/collector/validators/UniqueServerName.java b/backend/src/main/java/com/park/utmstack/domain/collector/validators/UniqueServerName.java new file mode 100644 index 000000000..de04fc86a --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/domain/collector/validators/UniqueServerName.java @@ -0,0 +1,18 @@ +package com.park.utmstack.domain.collector.validators; + +import javax.validation.Constraint; +import javax.validation.Payload; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target({ ElementType.FIELD }) +@Retention(RetentionPolicy.RUNTIME) +@Constraint(validatedBy = UniqueServerNameValidator.class) +public @interface UniqueServerName { + String message() default "Server name must be unique."; + Class[] groups() default {}; + Class[] payload() default {}; +} + diff --git a/backend/src/main/java/com/park/utmstack/domain/collector/validators/UniqueServerNameValidator.java b/backend/src/main/java/com/park/utmstack/domain/collector/validators/UniqueServerNameValidator.java new file mode 100644 index 000000000..58f5a5159 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/domain/collector/validators/UniqueServerNameValidator.java @@ -0,0 +1,27 @@ +package com.park.utmstack.domain.collector.validators; + +import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; + +import javax.validation.ConstraintValidator; +import javax.validation.ConstraintValidatorContext; +import java.util.List; +import java.util.stream.Collectors; + +public class UniqueServerNameValidator implements ConstraintValidator> { + + @Override + public boolean isValid(List keys, ConstraintValidatorContext context) { + + if (keys == null ) return false; + + long duplicates = keys.stream() + .filter(k -> "Hostname".equals(k.getConfName())) + .collect(Collectors.groupingBy(UtmModuleGroupConfiguration::getConfValue, Collectors.counting())) + .values().stream() + .filter(count -> count > 1) + .count(); + + return duplicates == 0; + + } +} diff --git a/backend/src/main/java/com/park/utmstack/grpc/client/CollectorServiceClient.java b/backend/src/main/java/com/park/utmstack/grpc/client/CollectorServiceClient.java new file mode 100644 index 000000000..faa50bbc6 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/grpc/client/CollectorServiceClient.java @@ -0,0 +1,88 @@ +package com.park.utmstack.grpc.client; + +import agent.CollectorOuterClass; +import agent.CollectorServiceGrpc; +import com.park.utmstack.grpc.interceptor.CollectorAuthInterceptor; +import com.park.utmstack.service.grpc.AuthResponse; +import com.park.utmstack.service.grpc.DeleteRequest; +import com.park.utmstack.service.grpc.ListRequest; +import com.park.utmstack.util.exceptions.ApiException; +import io.grpc.ManagedChannel; +import io.grpc.StatusRuntimeException; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.HttpStatus; + +@Slf4j +public class CollectorServiceClient { + + private final ManagedChannel channel; + private final CollectorServiceGrpc.CollectorServiceBlockingStub baseStub; + + public CollectorServiceClient(ManagedChannel channel) { + this.channel = channel; + this.baseStub = CollectorServiceGrpc.newBlockingStub(channel); + } + + public CollectorOuterClass.ListCollectorResponse listCollectors(ListRequest request) { + String ctx = "CollectorServiceClient.listCollectors"; + try { + return baseStub.listCollector(request); + } catch (StatusRuntimeException e) { + log.error("{}: An error occurred while listing collectors: {}", ctx, e.getMessage()); + throw new ApiException(String.format("%s: gRPC error listing collectors", ctx), HttpStatus.INTERNAL_SERVER_ERROR); + } + } + + public AuthResponse deleteCollector(int collectorId, String collectorKey) { + + try { + CollectorServiceGrpc.CollectorServiceBlockingStub stub = + baseStub.withInterceptors( + new CollectorAuthInterceptor( + String.valueOf(collectorId), + collectorKey + ) + ); + + DeleteRequest request = DeleteRequest.newBuilder() + .setDeletedBy(String.valueOf(collectorId)) + .build(); + + return stub.deleteCollector(request); + + } catch (StatusRuntimeException e) { + log.error("{}: An error occurred while deleting collector:{}", collectorId, e.getMessage()); + throw new ApiException(String.format("%s: gRPC error deleting collector", collectorId), HttpStatus.INTERNAL_SERVER_ERROR); + } + } + + + public CollectorOuterClass.CollectorConfig getCollectorConfig(int collectorId, String collectorKey, CollectorOuterClass.CollectorModule module) { + + try { + CollectorServiceGrpc.CollectorServiceBlockingStub stub = + baseStub.withInterceptors( + new CollectorAuthInterceptor( + String.valueOf(collectorId), + collectorKey + ) + ); + + CollectorOuterClass.ConfigRequest request = + CollectorOuterClass.ConfigRequest.newBuilder() + .setModule(module) + .build(); + + return stub.getCollectorConfig(request); + + } catch (StatusRuntimeException e) { + log.error("{}: An error occurred while getting collector:{}", collectorId, e.getMessage()); + throw new ApiException(String.format("%s: gRPC error getting collector config", collectorId), HttpStatus.INTERNAL_SERVER_ERROR); + } + } + + public void shutdown() { + channel.shutdown(); + } +} + diff --git a/backend/src/main/java/com/park/utmstack/grpc/client/PanelCollectorServiceClient.java b/backend/src/main/java/com/park/utmstack/grpc/client/PanelCollectorServiceClient.java new file mode 100644 index 000000000..db0e30e76 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/grpc/client/PanelCollectorServiceClient.java @@ -0,0 +1,34 @@ +package com.park.utmstack.grpc.client; + +import agent.CollectorOuterClass; +import agent.PanelCollectorServiceGrpc; +import io.grpc.ManagedChannel; +import io.grpc.StatusRuntimeException; + +public class PanelCollectorServiceClient { + + private final ManagedChannel channel; + private final PanelCollectorServiceGrpc.PanelCollectorServiceBlockingStub baseStub; + + public PanelCollectorServiceClient(ManagedChannel channel) { + this.channel = channel; + this.baseStub = PanelCollectorServiceGrpc.newBlockingStub(channel); + } + + public CollectorOuterClass.ConfigKnowledge insertCollectorConfig(CollectorOuterClass.CollectorConfig config) { + + try { + return baseStub.registerCollectorConfig(config); + + } catch (StatusRuntimeException e) { + throw new RuntimeException("gRPC error inserting collector config: " + e.getMessage(), e); + } catch (Exception e) { + throw new RuntimeException("Unexpected error inserting collector config: " + e.getMessage(), e); + } + } + + public void shutdown() { + channel.shutdown(); + } +} + diff --git a/backend/src/main/java/com/park/utmstack/grpc/connection/GrpcConnection.java b/backend/src/main/java/com/park/utmstack/grpc/connection/GrpcConnection.java new file mode 100644 index 000000000..93c42aede --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/grpc/connection/GrpcConnection.java @@ -0,0 +1,34 @@ +package com.park.utmstack.grpc.connection; + +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class GrpcConnection { + + private ManagedChannel channel; + private final String host; + private final int port; + + public void connect() { + this.channel = ManagedChannelBuilder + .forAddress(host, port) + .usePlaintext() + .build(); + } + + public ManagedChannel getChannel() { + if (channel == null) { + throw new IllegalStateException("Channel not initialized. Call connect() first."); + } + return channel; + } + + public void shutdown() { + if (channel != null && !channel.isShutdown()) { + channel.shutdown(); + } + } +} + diff --git a/backend/src/main/java/com/park/utmstack/grpc/interceptor/CollectorAuthInterceptor.java b/backend/src/main/java/com/park/utmstack/grpc/interceptor/CollectorAuthInterceptor.java new file mode 100644 index 000000000..701a568fb --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/grpc/interceptor/CollectorAuthInterceptor.java @@ -0,0 +1,45 @@ +package com.park.utmstack.grpc.interceptor; + +import io.grpc.*; + +public class CollectorAuthInterceptor implements ClientInterceptor { + + private static final Metadata.Key ID_HEADER = + Metadata.Key.of("id", Metadata.ASCII_STRING_MARSHALLER); + + private static final Metadata.Key KEY_HEADER = + Metadata.Key.of("key", Metadata.ASCII_STRING_MARSHALLER); + + private static final Metadata.Key TYPE_HEADER = + Metadata.Key.of("type", Metadata.ASCII_STRING_MARSHALLER); + + private final String collectorId; + private final String collectorKey; + + public CollectorAuthInterceptor(String collectorId, String collectorKey) { + this.collectorId = collectorId; + this.collectorKey = collectorKey; + } + + @Override + public ClientCall interceptCall( + MethodDescriptor methodDescriptor, + CallOptions callOptions, + Channel channel) { + + return new ForwardingClientCall.SimpleForwardingClientCall<>( + channel.newCall(methodDescriptor, callOptions)) { + + @Override + public void start(Listener responseListener, Metadata headers) { + + headers.put(ID_HEADER, collectorId); + headers.put(KEY_HEADER, collectorKey); + headers.put(TYPE_HEADER, "collector"); + + super.start(responseListener, headers); + } + }; + } +} + diff --git a/backend/src/main/java/com/park/utmstack/repository/UtmModuleGroupConfigurationRepository.java b/backend/src/main/java/com/park/utmstack/repository/UtmModuleGroupConfigurationRepository.java index bcf25f983..3c0e59ac9 100644 --- a/backend/src/main/java/com/park/utmstack/repository/UtmModuleGroupConfigurationRepository.java +++ b/backend/src/main/java/com/park/utmstack/repository/UtmModuleGroupConfigurationRepository.java @@ -10,7 +10,7 @@ /** * Spring Data repository for the UtmModuleGroupConfiguration entity. */ -@SuppressWarnings("unused") + @Repository public interface UtmModuleGroupConfigurationRepository extends JpaRepository { diff --git a/backend/src/main/java/com/park/utmstack/repository/collector/UtmCollectorRepository.java b/backend/src/main/java/com/park/utmstack/repository/collector/UtmCollectorRepository.java index faf4f3ee1..3d187ac96 100644 --- a/backend/src/main/java/com/park/utmstack/repository/collector/UtmCollectorRepository.java +++ b/backend/src/main/java/com/park/utmstack/repository/collector/UtmCollectorRepository.java @@ -31,5 +31,5 @@ Page searchByFilters(@Param("assetIpMacName") String assetIpMacNam void updateGroup(@Param("collectorsIds") List collectorsIds, @Param("assetGroupId") Long assetGroupId); - Optional> findAllByGroupId(Long groupId); + List findAllByGroupIdIn(List groupIds); } diff --git a/backend/src/main/java/com/park/utmstack/repository/network_scan/UtmNetworkScanRepository.java b/backend/src/main/java/com/park/utmstack/repository/network_scan/UtmNetworkScanRepository.java index ee72ae4eb..af0158bd0 100644 --- a/backend/src/main/java/com/park/utmstack/repository/network_scan/UtmNetworkScanRepository.java +++ b/backend/src/main/java/com/park/utmstack/repository/network_scan/UtmNetworkScanRepository.java @@ -43,18 +43,18 @@ public interface UtmNetworkScanRepository extends JpaRepository searchByFilters(@Param("assetIpMacName") String assetIpMacName, @Param("assetOs") List assetOs, + @Param("hasAssetOs") boolean hasAssetOs, @Param("assetAlias") List assetAlias, + @Param("hasAssetAlias") boolean hasAssetAlias, @Param("assetType") List assetType, + @Param("hasAssetType") boolean hasAssetType, @Param("assetAlive") List assetAlive, + @Param("hasAssetAlive") boolean hasAssetAlive, @Param("assetStatus") List assetStatus, + @Param("hasAssetStatus") boolean hasAssetStatus, @Param("serverName") List serverName, + @Param("hasServerName") boolean hasServerName, @Param("ports") List ports, + @Param("hasPorts") boolean hasPorts, @Param("initDate") Instant initDate, @Param("endDate") Instant endDate, @Param("groups") List groups, + @Param("hasGroups") boolean hasGroups, @Param("registeredMode") AssetRegisteredMode registeredMode, @Param("isAgent") List isAgent, + @Param("hasIsAgent") boolean hasIsAgent, @Param("assetOsPlatform") List assetOsPlatform, + @Param("hasAssetOsPlatform") boolean hasAssetOsPlatform, @Param("dataTypes") List dataTypes, + @Param("hasDataTypes") boolean hasDataTypes, Pageable pageable); @Modifying diff --git a/backend/src/main/java/com/park/utmstack/service/UserService.java b/backend/src/main/java/com/park/utmstack/service/UserService.java index 5dacde192..04736f5b9 100644 --- a/backend/src/main/java/com/park/utmstack/service/UserService.java +++ b/backend/src/main/java/com/park/utmstack/service/UserService.java @@ -14,6 +14,7 @@ import com.park.utmstack.util.exceptions.CurrentUserLoginNotFoundException; import com.park.utmstack.web.rest.errors.BadRequestAlertException; import com.park.utmstack.web.rest.errors.InvalidPasswordException; +import com.park.utmstack.web.rest.errors.ResetKeyExpiredException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.context.event.ApplicationReadyEvent; @@ -79,15 +80,35 @@ public void init() { } } - public Optional completePasswordReset(String newPassword, String key) { - log.debug("Reset user password for reset key {}", key); - return userRepository.findOneByResetKey(key).filter( - user -> user.getResetDate().isAfter(Instant.now().minusSeconds(86400))).map(user -> { - user.setPassword(passwordEncoder.encode(newPassword)); - user.setResetKey(null); - user.setResetDate(null); - return user; - }); + public void completePasswordReset(String newPassword, String key) { + final String ctx = CLASS_NAME + ".completePasswordReset"; + log.debug("{}: Processing password reset with key: {}", ctx, key); + + Optional userOptional = userRepository.findOneByResetKey(key); + + if (userOptional.isEmpty()) { + log.info("{}: No user found with reset key", ctx); + throw new CurrentUserLoginNotFoundException( + "The password reset link is invalid or no longer exists. Please request a new password reset." + ); + } + + User user = userOptional.get(); + Instant resetDeadline = Instant.now().minusSeconds(86400); + + if (!user.getResetDate().isAfter(resetDeadline)) { + log.error("{}: Reset key expired for user: {}", ctx, user.getLogin()); + throw new ResetKeyExpiredException( + "The password reset link has expired. Password reset links are valid for 24 hours. Please request a new one." + ); + } + + user.setPassword(passwordEncoder.encode(newPassword)); + user.setResetKey(null); + user.setResetDate(null); + user.setActivated(true); + + log.info("{}: Password reset completed successfully for user: {}", ctx, user.getLogin()); } public Optional requestPasswordReset(String mail) { diff --git a/backend/src/main/java/com/park/utmstack/service/application_modules/UtmModuleGroupConfigurationService.java b/backend/src/main/java/com/park/utmstack/service/application_modules/UtmModuleGroupConfigurationService.java index d8d00387c..e2b649c2c 100644 --- a/backend/src/main/java/com/park/utmstack/service/application_modules/UtmModuleGroupConfigurationService.java +++ b/backend/src/main/java/com/park/utmstack/service/application_modules/UtmModuleGroupConfigurationService.java @@ -5,11 +5,13 @@ import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; import com.park.utmstack.domain.application_modules.enums.ModuleName; import com.park.utmstack.repository.UtmModuleGroupConfigurationRepository; +import com.park.utmstack.repository.UtmModuleGroupRepository; import com.park.utmstack.repository.application_modules.UtmModuleRepository; import com.park.utmstack.event_processor.EventProcessorManagerService; import com.park.utmstack.util.CipherUtil; import com.park.utmstack.util.exceptions.ApiException; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; import org.springframework.http.HttpStatus; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -28,14 +30,13 @@ @Service @Transactional @RequiredArgsConstructor +@Slf4j public class UtmModuleGroupConfigurationService { private static final String CLASSNAME = "UtmModuleGroupConfigurationService"; private final UtmModuleGroupConfigurationRepository moduleConfigurationRepository; private final UtmModuleRepository moduleRepository; - private final EventProcessorManagerService eventProcessorManagerService; - public void createConfigurationKeys(List keys) throws Exception { final String ctx = CLASSNAME + ".createConfigurationKeys"; @@ -52,9 +53,8 @@ public void createConfigurationKeys(List keys) thro * Update configuration of the application modules * * @param keys List of configuration keys to save - * @throws Exception In case of any error */ - public UtmModule updateConfigurationKeys(Long moduleId, List keys) throws Exception { + public UtmModule updateConfigurationKeys(Long moduleId, List keys) { final String ctx = CLASSNAME + ".updateConfigurationKeys"; try { if (CollectionUtils.isEmpty(keys)) @@ -77,7 +77,8 @@ public UtmModule updateConfigurationKeys(Long moduleId, List new ApiException(String.format("Module with ID %1$s not found", moduleId), HttpStatus.NOT_FOUND)); } catch (Exception e) { - throw new Exception(ctx + ": " + e.getMessage()); + log.error("{}: Error updating configuration keys: {}", ctx, e.getMessage()); + throw new ApiException(String.format("%s: Error updating configuration keys: %s", ctx, e.getMessage()), HttpStatus.INTERNAL_SERVER_ERROR); } } diff --git a/backend/src/main/java/com/park/utmstack/service/application_modules/UtmModuleGroupService.java b/backend/src/main/java/com/park/utmstack/service/application_modules/UtmModuleGroupService.java index 117c64155..28e89d7a1 100644 --- a/backend/src/main/java/com/park/utmstack/service/application_modules/UtmModuleGroupService.java +++ b/backend/src/main/java/com/park/utmstack/service/application_modules/UtmModuleGroupService.java @@ -1,21 +1,36 @@ package com.park.utmstack.service.application_modules; import com.park.utmstack.aop.logging.Loggable; +import com.park.utmstack.config.Constants; import com.park.utmstack.domain.application_events.enums.ApplicationEventType; import com.park.utmstack.domain.application_modules.UtmModule; import com.park.utmstack.domain.application_modules.UtmModuleGroup; +import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; +import com.park.utmstack.event_processor.EventProcessorManagerService; +import com.park.utmstack.repository.UtmModuleGroupConfigurationRepository; import com.park.utmstack.repository.UtmModuleGroupRepository; +import com.park.utmstack.repository.application_modules.UtmModuleRepository; import com.park.utmstack.service.application_events.ApplicationEventService; +import com.park.utmstack.service.dto.application_modules.ModuleActivationDTO; +import com.park.utmstack.service.dto.application_modules.ModuleDTO; +import com.park.utmstack.service.dto.application_modules.UtmModuleMapper; +import com.park.utmstack.service.dto.collectors.dto.CollectorConfigDTO; +import com.park.utmstack.util.CipherUtil; +import com.park.utmstack.util.exceptions.ApiException; import lombok.RequiredArgsConstructor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.http.HttpStatus; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; import javax.persistence.EntityNotFoundException; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.stream.Collectors; /** * Service Implementation for managing UtmConfigurationGroup. @@ -31,6 +46,10 @@ public class UtmModuleGroupService { private final UtmModuleGroupRepository moduleGroupRepository; private final UtmModuleService moduleService; private final ApplicationEventService applicationEventService; + private final UtmModuleRepository moduleRepository; + private final UtmModuleGroupConfigurationRepository moduleGroupConfigurationRepository; + private final EventProcessorManagerService eventProcessorManagerService; + private final UtmModuleMapper moduleMapper; /** @@ -69,36 +88,63 @@ public Optional findOne(Long id) { return moduleGroupRepository.findById(id); } - /** - * Delete the utmConfigurationGroup by id. - * - * @param id the id of the entity - */ - - public void delete(Long id) { - final String ctx = CLASSNAME + ".delete"; - long start = System.currentTimeMillis(); + @Transactional + public void deleteGroup(Long id) { UtmModuleGroup moduleGroup = this.moduleGroupRepository.findById(id) .orElseThrow(() -> new EntityNotFoundException("Configuration group not found with ID: " + id)); + Long moduleId = moduleGroup.getModule().getId(); String moduleName = String.valueOf(moduleGroup.getModule().getModuleName()); + Map extra = Map.of( - "ModuleId", moduleGroup.getModule().getId(), + "ModuleId", moduleId, "ModuleName", moduleName, "GroupId", id ); - String attemptMsg = String.format("Initiating deletion of configuration group (ID: %d) for module '%s'", id, moduleName); + String attemptMsg = String.format( + "Initiating deletion of configuration group (ID: %d) for module '%s'", + id, moduleName + ); applicationEventService.createEvent(attemptMsg, ApplicationEventType.CONFIG_GROUP_DELETE_ATTEMPT, extra); moduleGroupRepository.deleteById(id); - long duration = System.currentTimeMillis() - start; - String successMsg = String.format("Configuration group (ID: %d) for module '%s' deleted successfully in %dms", id, moduleName, duration); + String successMsg = String.format( + "Configuration group (ID: %d) for module '%s' deleted successfully", + id, moduleName + ); applicationEventService.createEvent(successMsg, ApplicationEventType.CONFIG_GROUP_DELETE_SUCCESS, extra); } + public void deleteAndFetch(Long id) { + + try { + Long moduleId = moduleGroupRepository.findById(id) + .orElseThrow(() -> new EntityNotFoundException("Configuration group not found with ID: " + id)) + .getModule() + .getId(); + + deleteGroup(id); + + UtmModule module = moduleService.findOne(moduleId) + .orElseThrow(() -> new EntityNotFoundException("Module not found with id " + moduleId)); + + ModuleDTO moduleDTO = moduleMapper.toDto(module, false); + + moduleDTO.setModuleGroups( + moduleDTO.getModuleGroups().stream().filter(g -> !g.getId().equals(id)).collect(Collectors.toSet()) + ); + eventProcessorManagerService.updateModule(moduleDTO); + + } catch (Exception e) { + log.error("{}: Error deleting configuration group with ID {}: {}", CLASSNAME, id, e.getMessage()); + throw new ApiException(String.format("%s: Error deleting configuration group with ID %d", CLASSNAME, id), HttpStatus.INTERNAL_SERVER_ERROR); + } + + } + public void deleteAllByModuleId(Long id) { UtmModule module = this.moduleService.findOne(id) @@ -126,13 +172,15 @@ public List findAllByModuleId(Long moduleId) throws Exception { } } - public List findAllByCollectorId(String collectorId) throws Exception { - final String ctx = CLASSNAME + ".findAllByModuleName"; + public List findAllByCollectorId(String collectorId) { + String ctx = CLASSNAME + ".findAllByCollectorId"; try { return moduleGroupRepository.findAllByCollector(collectorId); } catch (Exception e) { - throw new Exception(ctx + ": " + e.getMessage()); + log.error("{}: Error finding module groups by collector id {}: {}", ctx, collectorId, e.getMessage()); + throw new ApiException(String.format("%s: Error finding module groups by collector id %s", ctx, collectorId), HttpStatus.INTERNAL_SERVER_ERROR); } + } public List findAllWithCollector() throws Exception { @@ -143,4 +191,84 @@ public List findAllWithCollector() throws Exception { throw new Exception(ctx + ": " + e.getMessage()); } } + + @Transactional + public void deleteCollectorById(Long collectorId) { + + List groups = moduleGroupRepository.findAllByCollector(collectorId.toString()); + + if (groups.isEmpty()) { + return; + } + + UtmModuleGroup group = groups.get(0); + + if (group != null) { + handleModuleDeactivationIfNeeded(group, collectorId); + } + + moduleGroupRepository.deleteAllByCollector(collectorId.toString()); + } + + private void handleModuleDeactivationIfNeeded(UtmModuleGroup group, Long collectorId) { + + UtmModule module = moduleRepository.findById(group.getModuleId()) + .orElseThrow(() -> new IllegalStateException("Module not found")); + + if (!module.getModuleActive()) { + return; + } + + boolean otherCollectorsExist = + moduleGroupRepository.findAllByModuleId(module.getId()) + .stream() + .anyMatch(m -> !m.getCollector().equals(collectorId.toString())); + + if (!otherCollectorsExist) { + moduleService.activateDeactivate( + ModuleActivationDTO.builder() + .serverId(module.getServerId()) + .moduleName(module.getModuleName()) + .activationStatus(false) + .build() + ); + } + } + + public void updateCollectorConfigurationKeys(CollectorConfigDTO collectorConfig) { + final String ctx = CLASSNAME + ".updateCollectorConfigurationKeys"; + try { + + List dbConfigs = moduleGroupRepository + .findAllByModuleIdAndCollector(collectorConfig.getModuleId(), + String.valueOf(collectorConfig.getCollector().getId())); + + List keys = collectorConfig.getKeys(); + + if (collectorConfig.getKeys().isEmpty()) { + moduleGroupRepository.deleteAll(dbConfigs); + } else { + for (UtmModuleGroupConfiguration key : keys) { + if (key.getConfDataType().equals("password")) { + key.setConfValue(CipherUtil.encrypt(key.getConfValue(), System.getenv(Constants.ENV_ENCRYPTION_KEY))); + } + } + List keyGroupIds = keys.stream() + .map(UtmModuleGroupConfiguration::getGroupId) + .toList(); + + List groupsToDelete = dbConfigs.stream() + .filter(utmModuleGroup -> !keyGroupIds.contains(utmModuleGroup.getId())) + .collect(Collectors.toList()); + + moduleGroupRepository.deleteAll(groupsToDelete); + moduleGroupConfigurationRepository.saveAll(keys); + } + + } catch (Exception e) { + log.error("{}: Error updating collector configuration keys for collector id {}: {}", ctx, collectorConfig.getCollector().getId(), e.getMessage()); + throw new ApiException(String.format("%s: Error updating collector configuration keys for collector id %d", ctx, collectorConfig.getCollector().getId()), HttpStatus.INTERNAL_SERVER_ERROR); + } + } + } diff --git a/backend/src/main/java/com/park/utmstack/service/collectors/BulkCollectorConfigResponseDTO.java b/backend/src/main/java/com/park/utmstack/service/collectors/BulkCollectorConfigResponseDTO.java new file mode 100644 index 000000000..13158bf5b --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/collectors/BulkCollectorConfigResponseDTO.java @@ -0,0 +1,13 @@ +package com.park.utmstack.service.collectors; + +import lombok.Builder; +import lombok.Data; + +import java.util.List; + +@Data +@Builder +public class BulkCollectorConfigResponseDTO { + private List results; +} + diff --git a/backend/src/main/java/com/park/utmstack/service/collectors/CollectorConfigBuilder.java b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorConfigBuilder.java new file mode 100644 index 000000000..cd821aa30 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorConfigBuilder.java @@ -0,0 +1,108 @@ +package com.park.utmstack.service.collectors; + +import agent.CollectorOuterClass.CollectorConfig; +import agent.CollectorOuterClass.CollectorConfigGroup; +import agent.CollectorOuterClass.CollectorGroupConfigurations; +import com.park.utmstack.config.Constants; +import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; +import com.park.utmstack.repository.UtmModuleGroupConfigurationRepository; +import com.park.utmstack.service.dto.collectors.dto.CollectorConfigDTO; +import com.park.utmstack.service.dto.collectors.dto.CollectorDTO; +import com.park.utmstack.service.application_modules.UtmModuleGroupService; +import com.park.utmstack.util.CipherUtil; +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +@Component +@RequiredArgsConstructor +public class CollectorConfigBuilder { + + private final UtmModuleGroupService moduleGroupService; + private final UtmModuleGroupConfigurationRepository configRepo; + + public CollectorConfig build(CollectorConfigDTO dto) { + + List processed = processPasswords(dto.getKeys()); + + return buildCollectorConfig(processed, dto.getCollector()); + } + + + private List processPasswords(List configs) { + + return configs.stream().map(config -> { + + if (Constants.CONF_TYPE_PASSWORD.equals(config.getConfDataType())) { + + UtmModuleGroupConfiguration original = configRepo.findById(config.getId()) + .orElseThrow(() -> new RuntimeException("Configuration id " + config.getId() + " not found")); + + if (Objects.equals(config.getConfValue(), original.getConfValue())) { + config.setConfValue( + CipherUtil.decrypt(original.getConfValue(), System.getenv(Constants.ENV_ENCRYPTION_KEY)) + ); + } + } + + return config; + + }).toList(); + } + + + private CollectorConfig buildCollectorConfig(List keys, CollectorDTO collectorDTO) { + + List groupIds = keys.stream() + .map(UtmModuleGroupConfiguration::getGroupId) + .distinct() + .toList(); + + List groups = new ArrayList<>(); + + for (Long groupId : groupIds) { + + moduleGroupService.findOne(groupId).ifPresent(group -> { + + List configs = + keys.stream() + .filter(k -> k.getGroupId().equals(groupId)) + .map(this::mapToCollectorGroupConfigurations) + .toList(); + + groups.add( + CollectorConfigGroup.newBuilder() + .setGroupName(group.getGroupName()) + .setGroupDescription(group.getGroupDescription()) + .addAllConfigurations(configs) + .setCollectorId(collectorDTO.getId()) + .build() + ); + }); + } + + return CollectorConfig.newBuilder() + .setCollectorId(String.valueOf(collectorDTO.getId())) + .setRequestId(String.valueOf(System.currentTimeMillis())) + .addAllGroups(groups) + .build(); + } + + + private CollectorGroupConfigurations mapToCollectorGroupConfigurations( + UtmModuleGroupConfiguration c) { + + return CollectorGroupConfigurations.newBuilder() + .setConfKey(c.getConfKey()) + .setConfName(c.getConfName()) + .setConfDescription(c.getConfDescription()) + .setConfDataType(c.getConfDataType()) + .setConfValue(c.getConfValue()) + .setConfRequired(c.getConfRequired()) + .build(); + } +} + diff --git a/backend/src/main/java/com/park/utmstack/service/collectors/CollectorConfigResultDTO.java b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorConfigResultDTO.java new file mode 100644 index 000000000..3b59756c0 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorConfigResultDTO.java @@ -0,0 +1,13 @@ +package com.park.utmstack.service.collectors; + +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class CollectorConfigResultDTO { + private int collectorId; + private boolean success; + private String errorMessage; +} + diff --git a/backend/src/main/java/com/park/utmstack/service/collectors/CollectorGrpcService.java b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorGrpcService.java new file mode 100644 index 000000000..073900217 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorGrpcService.java @@ -0,0 +1,36 @@ +package com.park.utmstack.service.collectors; + +import agent.CollectorOuterClass.*; +import com.park.utmstack.grpc.client.CollectorServiceClient; +import com.park.utmstack.grpc.client.PanelCollectorServiceClient; +import com.park.utmstack.service.grpc.ListRequest; +import io.grpc.ManagedChannel; +import org.springframework.stereotype.Service; + +@Service +public class CollectorGrpcService { + + private final CollectorServiceClient collectorClient; + private final PanelCollectorServiceClient panelClient; + + public CollectorGrpcService(ManagedChannel channel) { + this.collectorClient = new CollectorServiceClient(channel); + this.panelClient = new PanelCollectorServiceClient(channel); + } + + public ListCollectorResponse listCollectors(ListRequest request) { + return collectorClient.listCollectors(request); + } + + public CollectorConfig getCollectorConfig(int id, String key, CollectorModule module) { + return collectorClient.getCollectorConfig(id, key, module); + } + + public void deleteCollector(int id, String key) { + collectorClient.deleteCollector(id, key); + } + + public ConfigKnowledge upsertCollectorConfig(CollectorConfig config) { + return panelClient.insertCollectorConfig(config); + } +} diff --git a/backend/src/main/java/com/park/utmstack/service/collectors/CollectorOpsService.java b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorOpsService.java index a18fae336..5e9579ae3 100644 --- a/backend/src/main/java/com/park/utmstack/service/collectors/CollectorOpsService.java +++ b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorOpsService.java @@ -1,76 +1,16 @@ package com.park.utmstack.service.collectors; -import agent.CollectorOuterClass.CollectorGroupConfigurations; -import agent.CollectorOuterClass.ListCollectorResponse; -import agent.CollectorOuterClass.ConfigKnowledge; -import agent.CollectorOuterClass.CollectorConfig; -import agent.CollectorOuterClass.Collector; -import agent.CollectorOuterClass.CollectorModule; -import agent.CollectorOuterClass.CollectorConfigGroup; -import agent.CollectorOuterClass.ConfigRequest; -import agent.Common; -import agent.Common.ListRequest; -import agent.Common.AuthResponse; -import agent.Common.DeleteRequest; -import com.park.utmstack.config.Constants; -import com.park.utmstack.domain.application_modules.UtmModule; -import com.park.utmstack.domain.application_modules.UtmModuleGroup; -import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; -import com.park.utmstack.domain.collector.UtmCollector; -import com.park.utmstack.domain.network_scan.AssetGroupFilter; -import com.park.utmstack.domain.network_scan.UtmAssetGroup; -import com.park.utmstack.repository.UtmModuleGroupConfigurationRepository; -import com.park.utmstack.repository.UtmModuleGroupRepository; -import com.park.utmstack.repository.application_modules.UtmModuleRepository; -import com.park.utmstack.repository.collector.UtmCollectorRepository; -import com.park.utmstack.security.SecurityUtils; -import com.park.utmstack.service.application_modules.UtmModuleGroupService; -import com.park.utmstack.service.application_modules.UtmModuleService; -import com.park.utmstack.service.dto.application_modules.ModuleActivationDTO; -import com.park.utmstack.service.dto.collectors.CollectorHostnames; -import com.park.utmstack.service.dto.collectors.CollectorModuleEnum; -import com.park.utmstack.service.dto.collectors.dto.CollectorConfigKeysDTO; -import com.park.utmstack.service.dto.collectors.dto.ListCollectorsResponseDTO; -import com.park.utmstack.service.dto.collectors.dto.CollectorDTO; -import com.park.utmstack.service.dto.network_scan.AssetGroupDTO; -import com.park.utmstack.service.validators.collector.CollectorValidatorService; -import com.park.utmstack.util.CipherUtil; -import com.park.utmstack.web.rest.errors.BadRequestAlertException; -import com.utmstack.grpc.connection.GrpcConnection; -import com.utmstack.grpc.exception.CollectorConfigurationGrpcException; -import com.utmstack.grpc.exception.CollectorServiceGrpcException; -import com.utmstack.grpc.exception.GrpcConnectionException; -import com.utmstack.grpc.service.CollectorService; -import com.utmstack.grpc.service.PanelCollectorService; import io.grpc.*; -import org.springframework.dao.InvalidDataAccessResourceUsageException; -import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; -import org.springframework.data.domain.Pageable; -import org.springframework.data.domain.Sort; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.util.Assert; -import org.springframework.util.CollectionUtils; -import org.springframework.util.StringUtils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.validation.BeanPropertyBindingResult; -import org.springframework.validation.Errors; - -import javax.persistence.EntityManager; -import java.math.BigInteger; + import java.util.*; -import java.util.stream.Collectors; -@Service public class CollectorOpsService { + /* private final String CLASSNAME = "CollectorOpsService"; private final Logger log = LoggerFactory.getLogger(CollectorOpsService.class); - private final GrpcConnection grpcConnection; - private final PanelCollectorService panelCollectorService; - private final CollectorService collectorService; + private final ManagedChannel channel; + private final PanelCollectorServiceClient panelCollectorService; + private final CollectorServiceClient collectorService; private final UtmModuleGroupService moduleGroupService; private final UtmModuleGroupConfigurationRepository utmModuleGroupConfigurationRepository; @@ -90,7 +30,7 @@ public class CollectorOpsService { private final CollectorValidatorService collectorValidatorService; - public CollectorOpsService(GrpcConnection grpcConnection, + public CollectorOpsService(ManagedChannel channel, UtmModuleGroupService moduleGroupService, UtmModuleGroupConfigurationRepository utmModuleGroupConfigurationRepository, UtmCollectorRepository utmCollectorRepository, @@ -100,9 +40,10 @@ public CollectorOpsService(GrpcConnection grpcConnection, UtmModuleService utmModuleService, UtmModuleRepository utmModuleRepository, CollectorValidatorService collectorValidatorService) throws GrpcConnectionException { - this.grpcConnection = grpcConnection; - this.panelCollectorService = new PanelCollectorService(grpcConnection); - this.collectorService = new CollectorService(grpcConnection); + + this.channel = channel; + this.panelCollectorService = new PanelCollectorServiceClient(channel); + this.collectorService = new CollectorServiceClient(channel); this.moduleGroupService = moduleGroupService; this.utmModuleGroupConfigurationRepository = utmModuleGroupConfigurationRepository; this.utmCollectorRepository = utmCollectorRepository; @@ -114,12 +55,12 @@ public CollectorOpsService(GrpcConnection grpcConnection, this.collectorValidatorService = collectorValidatorService; } - /** + *//** * Method to update a collector's configuration. * * @param config is the configuration of the collectors to update. * @throws CollectorConfigurationGrpcException if the action can't be performed. - */ + *//* public ConfigKnowledge upsertCollectorConfig(CollectorConfig config) throws CollectorConfigurationGrpcException { final String ctx = CLASSNAME + ".upsertCollectorConfig"; @@ -130,44 +71,24 @@ public ConfigKnowledge upsertCollectorConfig(CollectorConfig config) throws Coll } try { - return panelCollectorService.insertCollectorConfig(config, internalKey); + return panelCollectorService.insertCollectorConfig(config); } catch (Exception e) { String msg = ctx + ": " + e.getMessage(); throw new CollectorConfigurationGrpcException(msg); } } - /** - * Method to get collectors list. - * - * @param request is the request with all the pagination and search params used to list collectors - * according to those params. - * @throws CollectorServiceGrpcException if the action can't be performed or the request is malformed. - */ - public ListCollectorsResponseDTO listCollector(ListRequest request) throws CollectorServiceGrpcException { - final String ctx = CLASSNAME + ".listCollector"; - - String internalKey = System.getenv(Constants.ENV_INTERNAL_KEY); - - if (!StringUtils.hasText(internalKey)) { - throw new BadRequestAlertException(ctx + ": Internal key not configured.", ctx, CLASSNAME); - } - - try { - return mapToListCollectorsResponseDTO(collectorService.listCollector(request, internalKey)); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - throw new CollectorServiceGrpcException(msg); - } + public ListCollectorsResponseDTO listCollector(ListRequest request) { + return mapToListCollectorsResponseDTO(collectorService.listCollectors(request)); } - /** + *//** * Method to List all UtmCollector's hostnames. * * @param request is the request with all the pagination and search params used to list collectors. * according to those params. * @throws CollectorServiceGrpcException if the action can't be performed or the request is malformed. - */ + *//* public CollectorHostnames listCollectorHostnames(ListRequest request) throws CollectorServiceGrpcException { final String ctx = CLASSNAME + ".ListCollectorHostnames"; @@ -178,10 +99,10 @@ public CollectorHostnames listCollectorHostnames(ListRequest request) throws Col } try { - ListCollectorResponse response = collectorService.listCollector(request, internalKey); + ListCollectorResponse response = collectorService.listCollectors(request); CollectorHostnames collectorHostnames = new CollectorHostnames(); - response.getRowsList().forEach(c->{ + response.getRowsList().forEach(c -> { collectorHostnames.getHostname().add(c.getHostname()); }); @@ -192,52 +113,24 @@ public CollectorHostnames listCollectorHostnames(ListRequest request) throws Col } } - /** + *//** * Method to get collectors by hostname and module. * * @param request contains the filter information used to search. - * @throws CollectorServiceGrpcException if the action can't be performed or the request is malformed. - */ - public ListCollectorsResponseDTO getCollectorsByHostnameAndModule(ListRequest request) throws CollectorServiceGrpcException { - final String ctx = CLASSNAME + ".GetCollectorsByHostnameAndModule"; - - String internalKey = System.getenv(Constants.ENV_INTERNAL_KEY); - - if (!StringUtils.hasText(internalKey)) { - throw new BadRequestAlertException(ctx + ": Internal key not configured.", ctx, CLASSNAME); - } - - try { - return mapToListCollectorsResponseDTO(collectorService.listCollector(request, internalKey)); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - throw new CollectorServiceGrpcException(msg); - } + *//* + public ListCollectorsResponseDTO getCollectorsByHostnameAndModule(ListRequest request) { + return mapToListCollectorsResponseDTO(collectorService.listCollectors(request)); } - /** - * Method to get a collector config from agent manager via gRPC. - * - * @param request represents the CollectorModule to get the configurations from. - * @param auth is the authentication parameters used to filter in order to get the collector configuration. - * @throws CollectorServiceGrpcException if the action can't be performed or the request is malformed. - */ - public CollectorConfig getCollectorConfig(ConfigRequest request, AuthResponse auth) throws CollectorServiceGrpcException { - final String ctx = CLASSNAME + ".getCollectorConfig"; - - - try { - return collectorService.requestCollectorConfig(request, auth); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - throw new CollectorServiceGrpcException(msg); - } + public CollectorConfig getCollectorConfig(CollectorDTO collectorDTO) { + return collectorService.getCollectorConfig(collectorDTO.getId(), collectorDTO.getCollectorKey(), + CollectorModule.valueOf(collectorDTO.getModule().toString())); } - /** + *//** * Method to transform a ListCollectorResponse to ListCollectorsResponseDTO - */ - private ListCollectorsResponseDTO mapToListCollectorsResponseDTO(ListCollectorResponse response) throws Exception { + *//* + private ListCollectorsResponseDTO mapToListCollectorsResponseDTO(ListCollectorResponse response) { final String ctx = CLASSNAME + ".mapToListCollectorsResponseDTO"; try { ListCollectorsResponseDTO dto = new ListCollectorsResponseDTO(); @@ -253,13 +146,13 @@ private ListCollectorsResponseDTO mapToListCollectorsResponseDTO(ListCollectorRe return dto; } catch (Exception e) { - throw new Exception(ctx + ": " + e.getMessage()); + throw new ApiException(String.format("%s: Error mapping ListCollectorResponse to ListCollectorsResponseDTO: %s", ctx, e.getMessage()), HttpStatus.INTERNAL_SERVER_ERROR); } } - /** + *//** * Method to map from List to CollectorConfig - */ + *//* public CollectorConfig mapToCollectorConfig(List keys, CollectorDTO collectorDTO) { final String ctx = CLASSNAME + ".mapToCollectorConfig"; @@ -293,16 +186,16 @@ public CollectorConfig mapToCollectorConfig(List ke return collectorConfig; } - /** + *//** * Method to transform a UtmCollector to CollectorDTO - */ + *//* private CollectorDTO protoToCollectorDto(Collector collector) { return new CollectorDTO(this.utmCollectorService.saveCollector(collector)); } - /** + *//** * Method to map from UtmModuleGroupConfiguration to CollectorGroupConfigurations - */ + *//* private CollectorGroupConfigurations mapToCollectorGroupConfigurations(UtmModuleGroupConfiguration moduleConfig) { return CollectorGroupConfigurations.newBuilder() .setConfKey(moduleConfig.getConfKey()) @@ -313,58 +206,48 @@ private CollectorGroupConfigurations mapToCollectorGroupConfigurations(UtmModule .setConfRequired(moduleConfig.getConfRequired()).build(); } - /** + *//** * Method to remove a collector, will be used to remove in the UtmNetworkScanService * * @param hostname the hostname of the collector to remove * @param module the module of the collector to remove - */ + *//* public void deleteCollector(String hostname, CollectorModuleEnum module) { final String ctx = CLASSNAME + ".deleteCollector"; - try { - String currentUser = SecurityUtils.getCurrentUserLogin().orElseThrow(() -> new RuntimeException("No current user login")); - - Optional collectorToSearch = getCollectorsByHostnameAndModule( - getListRequestByHostnameAndModule(hostname, module)).getCollectors() - .stream().findFirst(); - try { - if (collectorToSearch.isEmpty()) { - log.error(String.format("%1$s: UtmCollector %2$s could not be deleted because no information was obtained from collector manager", ctx, hostname)); - return; - } - } catch (StatusRuntimeException e) { - if (e.getStatus().getCode() == Status.Code.NOT_FOUND) { - log.error(String.format("%1$s: UtmCollector %2$s could not be deleted because was not found", ctx, hostname)); - return; - } - } - DeleteRequest collectorDelete = DeleteRequest.newBuilder().setDeletedBy(currentUser).build(); - AuthResponse auth = Common.AuthResponse.newBuilder() - .setId(collectorToSearch.get().getId()) - .setKey(collectorToSearch.get().getCollectorKey()) - .build(); - collectorService.deleteCollector(collectorDelete, auth); + var request = getListRequestByHostnameAndModule(hostname, module); + List collectors = getCollectorsByHostnameAndModule(request).getCollectors(); - } catch (Exception e) { - String msg = ctx + ": " + e.getLocalizedMessage(); - log.error(msg); - throw new RuntimeException(msg); + Optional found = collectors.stream().findFirst(); + if (found.isEmpty()) { + log.error("{}: Collector {} not found in Agent Manager", ctx, hostname); + return; } + + CollectorDTO collector = found.get(); + + collectorService.deleteCollector( + collector.getId(), + collector.getCollectorKey() + ); + + log.info("{}: Collector {} deleted successfully", ctx, hostname); + } - public List mapPasswordConfiguration( List configs) { - return configs.stream().peek(config -> { + public List mapPasswordConfiguration(List configs) { + + return configs.stream().peek(config -> { if (config.getConfDataType().equals("password")) { final UtmModuleGroupConfiguration utmModuleGroupConfiguration = utmModuleGroupConfigurationRepository.findById(config.getId()) .orElseThrow(() -> new RuntimeException(String.format("Configuration id %s not found", config.getId()))); - if (config.getConfValue().equals(utmModuleGroupConfiguration.getConfValue())){ + if (config.getConfValue().equals(utmModuleGroupConfiguration.getConfValue())) { config.setConfValue(CipherUtil.decrypt(utmModuleGroupConfiguration.getConfValue(), ENCRYPTION_KEY)); } } - }).collect(Collectors.toList()); + }).collect(Collectors.toList()); } @Transactional @@ -377,7 +260,8 @@ public void updateGroup(List collectorsIds, Long assetGroupId) throws Exce throw new Exception(ctx + ": " + e.getMessage()); } } - @Transactional + + *//*@Transactional public Page searchGroupsByFilter(AssetGroupFilter filter, Pageable pageable) throws Exception { final String ctx = CLASSNAME + ".searchGroupsByFilter"; try { @@ -399,7 +283,7 @@ public Page searchGroupsByFilter(AssetGroupFilter filter, Pageabl } catch (Exception e) { throw new Exception(ctx + ": " + e.getMessage()); } - } + }*//* private String searchQueryBuilder(AssetGroupFilter filters) { StringBuilder sb = new StringBuilder(); @@ -463,7 +347,7 @@ private String paginateAndSort(String query, Pageable pageable) { sb.append("ORDER BY "); boolean firstProperty = true; - List orders = sort.stream().collect(Collectors.toList()); + List orders = sort.stream().toList(); for (Sort.Order order : orders) { sb.append(String.format(firstProperty ? "%1$s %2$s" : ", %1$s %2$s", order.getProperty(), order.getDirection().name())); @@ -492,20 +376,20 @@ public void deleteCollector(Long id) throws Exception { this.deleteCollector(collector.get().getHostname(), CollectorModuleEnum.valueOf(collector.get().getModule())); List modules = this.utmModuleGroupRepository.findAllByCollector(id.toString()); - if(!modules.isEmpty()){ + if (!modules.isEmpty()) { UtmModule module = utmModuleRepository.findById(modules.get(0).getModuleId()).get(); - if(module.getModuleActive()){ + if (module.getModuleActive()) { modules = this.utmModuleGroupRepository.findAllByModuleId(module.getId()) - .stream().filter( m -> !m.getCollector().equals(id.toString())) + .stream().filter(m -> !m.getCollector().equals(id.toString())) .toList(); - if(modules.isEmpty()){ + if (modules.isEmpty()) { this.utmModuleService.activateDeactivate(ModuleActivationDTO.builder() - .serverId(module.getServerId()) - .moduleName(module.getModuleName()) - .activationStatus(false) + .serverId(module.getServerId()) + .moduleName(module.getModuleName()) + .activationStatus(false) .build()); } } @@ -517,7 +401,7 @@ public void deleteCollector(Long id) throws Exception { } - public String validateCollectorConfig(CollectorConfigKeysDTO collectorConfig) { + public String validateCollectorConfig(CollectorConfigDTO collectorConfig) { Errors errors = new BeanPropertyBindingResult(collectorConfig, "updateConfigurationKeysBody"); collectorValidatorService.validate(collectorConfig, errors); @@ -528,18 +412,11 @@ public String validateCollectorConfig(CollectorConfigKeysDTO collectorConfig) { } public CollectorConfig cacheCurrentCollectorConfig(CollectorDTO collectorDTO) throws CollectorServiceGrpcException { - return this.getCollectorConfig( - ConfigRequest.newBuilder() - .setModule(CollectorModule.valueOf(collectorDTO.getModule().toString())) - .build(), - AuthResponse.newBuilder() - .setId(collectorDTO.getId()) - .setKey(collectorDTO.getCollectorKey()) - .build()); + return this.getCollectorConfig(collectorDTO); } public void updateCollectorConfigViaGrpc( - CollectorConfigKeysDTO collectorConfig, + CollectorConfigDTO collectorConfig, CollectorDTO collectorDTO) throws CollectorConfigurationGrpcException { this.upsertCollectorConfig( @@ -547,15 +424,15 @@ public void updateCollectorConfigViaGrpc( this.mapPasswordConfiguration(collectorConfig.getKeys()), collectorDTO)); } - public void updateCollectorConfigurationKeys(CollectorConfigKeysDTO collectorConfig) throws Exception { + public void updateCollectorConfigurationKeys(CollectorConfigDTO collectorConfig) throws Exception { final String ctx = CLASSNAME + ".updateCollectorConfigurationKeys"; try { List configs = utmModuleGroupRepository .findAllByModuleIdAndCollector(collectorConfig.getModuleId(), - String.valueOf(collectorConfig.getCollector().getId())); + String.valueOf(collectorConfig.getCollector().getId())); List keys = collectorConfig.getKeys(); - if (CollectionUtils.isEmpty(collectorConfig.getKeys())){ + if (CollectionUtils.isEmpty(collectorConfig.getKeys())) { utmModuleGroupRepository.deleteAll(configs); } else { for (UtmModuleGroupConfiguration key : keys) { @@ -590,12 +467,11 @@ public ListRequest getListRequestByHostnameAndModule(String hostname, CollectorM } else if (module != null) { query = "module.Is=" + module.name(); } - ListRequest request = ListRequest.newBuilder() + return ListRequest.newBuilder() .setPageNumber(1) .setPageSize(1000000) .setSearchQuery(query) .setSortBy("id,desc") .build(); - return request; - } + }*/ } diff --git a/backend/src/main/java/com/park/utmstack/service/collectors/CollectorService.java b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorService.java new file mode 100644 index 000000000..4b0cfea27 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/collectors/CollectorService.java @@ -0,0 +1,397 @@ +package com.park.utmstack.service.collectors; + +import agent.CollectorOuterClass; +import com.park.utmstack.domain.collector.UtmCollector; +import com.park.utmstack.domain.network_scan.AssetGroupFilter; +import com.park.utmstack.domain.network_scan.UtmAssetGroup; +import com.park.utmstack.repository.collector.UtmCollectorRepository; +import com.park.utmstack.service.application_modules.UtmModuleGroupService; +import com.park.utmstack.service.dto.collectors.CollectorModuleEnum; +import com.park.utmstack.service.dto.collectors.dto.CollectorConfigDTO; +import com.park.utmstack.service.dto.collectors.dto.CollectorDTO; +import com.park.utmstack.service.dto.collectors.dto.ListCollectorsResponseDTO; +import com.park.utmstack.service.dto.network_scan.AssetGroupDTO; +import com.park.utmstack.service.grpc.ListRequest; +import com.park.utmstack.util.exceptions.ApiException; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.http.HttpStatus; +import org.springframework.stereotype.Service; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +import javax.persistence.EntityManager; +import javax.persistence.Query; +import java.util.*; +import java.util.stream.Collectors; + +@Service +@Slf4j +@RequiredArgsConstructor +public class CollectorService { + + private final CollectorGrpcService collectorGrpcService; + private final UtmModuleGroupService moduleGroupService; + private final CollectorConfigBuilder CollectorConfigBuilder; + private final UtmCollectorService utmCollectorService; + private final UtmCollectorRepository collectorRepository; + private final EntityManager em; + private final String CLASSNAME = "CollectorService"; + + private static final Set ALLOWED_SORT_COLUMNS = Set.of( + "id", + "group_name", + "created_date", + "type" + ); + + + public void upsertCollectorConfig(CollectorConfigDTO collectorConfig) { + + this.moduleGroupService.updateCollectorConfigurationKeys(collectorConfig); + + CollectorOuterClass.CollectorConfig collector = CollectorConfigBuilder.build(collectorConfig); + collectorGrpcService.upsertCollectorConfig(collector); + } + + public BulkCollectorConfigResponseDTO upsertCollectorsConfig(List collectors) { + + List results = collectors.stream() + .map(this::processSingleCollectorConfig) + .toList(); + + return BulkCollectorConfigResponseDTO.builder() + .results(results) + .build(); + } + + public ListCollectorsResponseDTO listCollector(String hostname, Integer pageNumber, Integer pageSize, String sortBy, CollectorModuleEnum module) { + + String cts = CLASSNAME + ".listCollector"; + + try { + + var request = buildListRequest(pageNumber, pageSize, module, hostname, sortBy); + + CollectorOuterClass.ListCollectorResponse collectorResponse = collectorGrpcService.listCollectors(request); + return mapToListCollectorsResponseDTO(collectorResponse); + + } catch (Exception e) { + log.error("{}: Error listing collectors with hostname={} and module={}: {}", cts, hostname, module, e.getMessage(), e); + throw new ApiException(String.format("%s: Error listing collectors.", cts), HttpStatus.INTERNAL_SERVER_ERROR); + } + + } + + private Optional findCollectorByHostname(String hostname, CollectorModuleEnum module) { + + ListCollectorsResponseDTO response = this.listCollector(hostname, 0, 1, null, module); + + if (response.getCollectors() != null && !response.getCollectors().isEmpty()) { + return Optional.of(response.getCollectors().get(0)); + } else { + return Optional.empty(); + } + } + + public void deleteCollector(Long id) { + + String ctx = CLASSNAME + ".deleteCollector"; + + Optional collector = utmCollectorService.findById(id); + + if (collector.isEmpty()) { + + log.error("{}: Collector with id {} not found", ctx, id); + throw new ApiException(String.format("%s: Collector with id %d not found", ctx, id), HttpStatus.NOT_FOUND); + + } /*else if (collector.get().isActive()) { + + var collectorToDelete = collector.get(); + + Optional collectorDTO = this.findCollectorByHostname( + collector.get().getHostname(), + CollectorModuleEnum.valueOf(collectorToDelete.getModule())); + + if (collectorDTO.isEmpty()) { + + log.error("{}: Collector with id {} not found in Agent Manager", ctx, id); + throw new ApiException(String.format("%s: Collector with id %d not found in Agent Manager", ctx, id), HttpStatus.NOT_FOUND); + + } else { + var c = collectorDTO.get(); + collectorGrpcService.deleteCollector(c.getId(), c.getCollectorKey()); + } + + this.moduleGroupService.deleteCollectorById(collectorToDelete.getId()); + + }*/ + + this.utmCollectorService.deleteCollector(id); + } + + public Page searchGroupsByFilter(AssetGroupFilter filter, Pageable pageable) { + + final String ctx = CLASSNAME + ".searchGroupsByFilter"; + try { + + Query countQuery = buildSearchQuery(filter, true, pageable); + long total = ((Number) countQuery.getSingleResult()).longValue(); + + Query dataQuery = buildSearchQuery(filter, false, pageable); + List groups = dataQuery.getResultList(); + + enrichGroups(groups); + + List dtos = groups.stream() + .map(AssetGroupDTO::new) + .toList(); + + return new PageImpl<>(dtos, pageable, total); + + } catch (Exception ex) { + log.error("{}: Error searching collector groups with filters {}: {}", ctx, filter, ex.getMessage(), ex); + throw new ApiException(String.format("%s: Error searching asset groups with filters.", ctx), HttpStatus.INTERNAL_SERVER_ERROR); + } + + + } + + private String searchQueryBuilder(AssetGroupFilter filters) { + StringBuilder sb = new StringBuilder(); + sb.append("SELECT DISTINCT ag.* FROM utm_asset_group ag "); + sb.append("LEFT JOIN utm_collectors c ON ag.id = c.group_id "); + + List conditions = new ArrayList<>(); + + if (filters != null) { + + if (filters.getAssetType() != null) { + conditions.add("ag.type = :type"); + } + + if (filters.getId() != null) { + conditions.add("ag.id = :id"); + } + + if (StringUtils.hasText(filters.getGroupName())) { + conditions.add("LOWER(ag.group_name) LIKE :groupName"); + } + + if (filters.getInitDate() != null && filters.getEndDate() != null) { + conditions.add("ag.created_date BETWEEN :initDate AND :endDate"); + } + + if (!CollectionUtils.isEmpty(filters.getAssetIp())) { + conditions.add("c.ip IN (:ips)"); + } + + if (!CollectionUtils.isEmpty(filters.getAssetName())) { + conditions.add("c.hostname IN (:names)"); + } + } + + if (!conditions.isEmpty()) { + sb.append(" WHERE "); + sb.append(String.join(" AND ", conditions)); + } + + return sb.toString(); + } + + + private ListCollectorsResponseDTO getListCollector(ListRequest request) { + + CollectorOuterClass.ListCollectorResponse collectorResponse = collectorGrpcService.listCollectors(request); + return mapToListCollectorsResponseDTO(collectorResponse); + } + + + private ListCollectorsResponseDTO mapToListCollectorsResponseDTO(CollectorOuterClass.ListCollectorResponse response) { + final String ctx = CLASSNAME + ".mapToListCollectorsResponseDTO"; + try { + ListCollectorsResponseDTO dto = new ListCollectorsResponseDTO(); + + List collectorDTOS = response.getRowsList().stream() + .map(this::protoToCollectorDto) + .collect(Collectors.toList()); + + this.utmCollectorService.synchronize(collectorDTOS); + + dto.setCollectors(collectorDTOS); + dto.setTotal(response.getTotal()); + + return dto; + } catch (Exception e) { + log.error("{}: Error mapping ListCollectorResponse to ListCollectorsResponseDTO: {}", ctx, e.getMessage()); + throw new ApiException(String.format("%s: Error mapping ListCollectorResponse to ListCollectorsResponseDTO: %s", ctx, e.getMessage()), HttpStatus.INTERNAL_SERVER_ERROR); + } + } + + private CollectorDTO protoToCollectorDto(CollectorOuterClass.Collector collector) { + UtmCollector utmCollector = this.utmCollectorService.saveCollector(collector); + return new CollectorDTO(utmCollector); + } + + private CollectorConfigResultDTO processSingleCollectorConfig(CollectorConfigDTO dto) { + + try { + this.upsertCollectorConfig(dto); + + return CollectorConfigResultDTO.builder() + .collectorId(dto.getCollector().getId()) + .success(true) + .build(); + + } catch (Exception e) { + + return CollectorConfigResultDTO.builder() + .collectorId(dto.getCollector().getId()) + .success(false) + .errorMessage(e.getMessage()) + .build(); + } + } + + private Query buildSearchQuery(AssetGroupFilter filter, boolean countQuery, Pageable pageable) { + + StringBuilder sql = new StringBuilder(); + Map params = new HashMap<>(); + + sql.append(""" + SELECT DISTINCT ag.* + FROM utm_asset_group ag + LEFT JOIN utm_collectors c ON ag.id = c.group_id + WHERE 1=1 + """); + + if (filter.getAssetType() != null) { + sql.append(" AND ag.type = :type "); + params.put("type", filter.getAssetType()); + } + + if (filter.getId() != null) { + sql.append(" AND ag.id = :id "); + params.put("id", filter.getId()); + } + + if (StringUtils.hasText(filter.getGroupName())) { + sql.append(" AND LOWER(ag.group_name) LIKE :groupName "); + params.put("groupName", "%" + filter.getGroupName().toLowerCase() + "%"); + } + + if (filter.getInitDate() != null && filter.getEndDate() != null) { + sql.append(" AND ag.created_date BETWEEN :initDate AND :endDate "); + params.put("initDate", filter.getInitDate()); + params.put("endDate", filter.getEndDate()); + } + + if (!CollectionUtils.isEmpty(filter.getAssetIp())) { + sql.append(" AND c.ip IN :ips "); + params.put("ips", filter.getAssetIp()); + } + + if (!CollectionUtils.isEmpty(filter.getAssetName())) { + sql.append(" AND c.hostname IN :names "); + params.put("names", filter.getAssetName()); + } + + if (countQuery) { + sql.insert(0, "SELECT COUNT(*) FROM ("); + sql.append(") AS total"); + } else { + sql.append(buildOrderAndPagination(pageable)); + } + + Query q; + + if (countQuery) { + q = em.createNativeQuery(sql.toString()); + } else { + q = em.createNativeQuery(sql.toString(), UtmAssetGroup.class); + } + + params.forEach(q::setParameter); + + return q; + } + + + private String buildOrderAndPagination(Pageable pageable) { + StringBuilder sb = new StringBuilder(" "); + + Sort sort = pageable.getSort(); + + if (sort.isSorted()) { + sb.append(" ORDER BY "); + + List clauses = sort.stream() + .map(order -> { + validateSortColumn(order.getProperty()); + return order.getProperty() + " " + order.getDirection().name(); + }) + .toList(); + + sb.append(String.join(", ", clauses)); + } + + if (pageable.isPaged()) { + sb.append(" OFFSET ").append(pageable.getOffset()); + sb.append(" LIMIT ").append(pageable.getPageSize()); + } + + return sb.toString(); + } + + private void validateSortColumn(String column) { + if (!ALLOWED_SORT_COLUMNS.contains(column)) { + throw new IllegalArgumentException("Invalid sort column: " + column); + } + } + + private void enrichGroups(List groups) { + if (groups.isEmpty()) return; + + List ids = groups.stream().map(UtmAssetGroup::getId).toList(); + + Map> collectors = + collectorRepository.findAllByGroupIdIn(ids) + .stream() + .collect(Collectors.groupingBy(UtmCollector::getGroupId)); + + groups.forEach(g -> + g.setCollectors(collectors.getOrDefault(g.getId(), List.of())) + ); + } + + private ListRequest buildListRequest(Integer pageNumber, Integer pageSize, + CollectorModuleEnum module, String hostName, String sortBy) { + return ListRequest.newBuilder() + .setPageNumber(pageNumber != null ? pageNumber : 0) + .setPageSize(pageSize != null ? pageSize : 10) + .setSearchQuery(buildSearchQuery(module, hostName)) + .setSortBy(sortBy != null ? sortBy : "") + .build(); + } + + private String buildSearchQuery(CollectorModuleEnum module, String hostName) { + List conditions = new ArrayList<>(); + + if (module != null) { + conditions.add("module.Is=" + module.name()); + } + + if (StringUtils.hasText(hostName)) { + conditions.add("hostname.Is=" + hostName); + } + + return String.join("&", conditions); + } + + +} + diff --git a/backend/src/main/java/com/park/utmstack/service/collectors/UtmCollectorService.java b/backend/src/main/java/com/park/utmstack/service/collectors/UtmCollectorService.java index b7be4f3ca..b75d7459f 100644 --- a/backend/src/main/java/com/park/utmstack/service/collectors/UtmCollectorService.java +++ b/backend/src/main/java/com/park/utmstack/service/collectors/UtmCollectorService.java @@ -1,21 +1,31 @@ package com.park.utmstack.service.collectors; import agent.CollectorOuterClass; +import com.park.utmstack.domain.application_modules.UtmModule; +import com.park.utmstack.domain.application_modules.UtmModuleGroup; import com.park.utmstack.domain.collector.UtmCollector; import com.park.utmstack.domain.network_scan.NetworkScanFilter; import com.park.utmstack.repository.collector.UtmCollectorRepository; +import com.park.utmstack.service.dto.application_modules.ModuleActivationDTO; import com.park.utmstack.service.dto.collectors.dto.CollectorDTO; +import com.park.utmstack.util.exceptions.ApiException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.dao.InvalidDataAccessResourceUsageException; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.http.HttpStatus; import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.Assert; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Objects; +import java.util.Optional; + +import static com.park.utmstack.config.RestTemplateConfiguration.CLASSNAME; @Service public class UtmCollectorService { @@ -31,7 +41,7 @@ public UtmCollectorService(UtmCollectorRepository utmCollectorRepository) { this.utmCollectorRepository = utmCollectorRepository; } - public UtmCollector saveCollector(CollectorOuterClass.Collector collector){ + public UtmCollector saveCollector(CollectorOuterClass.Collector collector) { UtmCollector utmCollector = utmCollectorRepository.findById(Long.valueOf(collector.getId())) .orElse(new UtmCollector()); @@ -76,24 +86,40 @@ public Page searchByFilters(NetworkScanFilter f, Pageable p) throw Page page = filter(f, p); return page.map(CollectorDTO::new); } catch (Exception e) { - throw new RuntimeException(ctx + ": " + e.getMessage()); + log.error("{}: Error searching collectors with filters {}", ctx, e.getMessage(), e); + throw new ApiException(String.format("%s: Error searching collectors with filters", ctx), HttpStatus.INTERNAL_SERVER_ERROR); } } - private Page filter(NetworkScanFilter f, Pageable p) throws Exception { - final String ctx = CLASSNAME + ".filter"; + private Page filter(NetworkScanFilter f, Pageable p) { + + return utmCollectorRepository.searchByFilters( + f.getAssetIpMacName() == null ? null : "%" + f.getAssetIpMacName() + "%", + f.getDiscoveredInitDate(), + f.getDiscoveredEndDate(), + f.getGroups(), p); + + } + + @Transactional + public void updateGroup(List collectorsIds, Long assetGroupId) { + String ctx = CLASSNAME + ".updateGroup"; + try { - return utmCollectorRepository.searchByFilters( - f.getAssetIpMacName() == null ? null : "%" + f.getAssetIpMacName() + "%", - f.getDiscoveredInitDate(), - f.getDiscoveredEndDate(), - f.getGroups(),p); - - } catch (InvalidDataAccessResourceUsageException e) { - String msg = ctx + ": " + e.getMostSpecificCause().getMessage().replaceAll("\n", ""); - throw new Exception(msg); - } catch (Exception e) { - throw new Exception(ctx + ": " + e.getMessage()); + utmCollectorRepository.updateGroup(collectorsIds, assetGroupId); + } catch (Exception ex) { + log.error("{}: Error updating group for collectors {}: {}", ctx, collectorsIds, ex.getMessage(), ex); + throw new ApiException(String.format("%s: Error updating group for collectors %s", ctx, collectorsIds), HttpStatus.INTERNAL_SERVER_ERROR); } + + } + + Optional findById(Long id) { + return utmCollectorRepository.findById(id); + } + + @Transactional + public void deleteCollector(Long id) { + utmCollectorRepository.deleteById(id); } } diff --git a/backend/src/main/java/com/park/utmstack/service/dto/collectors/CollectorHostnames.java b/backend/src/main/java/com/park/utmstack/service/dto/collectors/CollectorHostnames.java deleted file mode 100644 index 08870817a..000000000 --- a/backend/src/main/java/com/park/utmstack/service/dto/collectors/CollectorHostnames.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.park.utmstack.service.dto.collectors; - -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -import java.util.ArrayList; -import java.util.List; - -@Getter -@Setter -@NoArgsConstructor -public class CollectorHostnames { - public List hostname = new ArrayList(); -} diff --git a/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorConfigDTO.java b/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorConfigDTO.java new file mode 100644 index 000000000..fe78c2f11 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorConfigDTO.java @@ -0,0 +1,26 @@ +package com.park.utmstack.service.dto.collectors.dto; + +import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; +import com.park.utmstack.domain.collector.validators.UniqueServerName; +import lombok.Getter; +import lombok.Setter; + +import javax.validation.constraints.NotEmpty; +import javax.validation.constraints.NotNull; +import java.util.List; + +@Setter +@Getter +public class CollectorConfigDTO { + + @NotNull + CollectorDTO collector; + + @NotNull + private Long moduleId; + + @NotNull + @UniqueServerName + private List keys; + +} diff --git a/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorConfigKeysDTO.java b/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorConfigKeysDTO.java deleted file mode 100644 index 40af52707..000000000 --- a/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorConfigKeysDTO.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.park.utmstack.service.dto.collectors.dto; - -import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; - -import javax.validation.constraints.NotNull; -import java.util.List; - -public class CollectorConfigKeysDTO { - @NotNull - CollectorDTO collector; - @NotNull - private Long moduleId; - @NotNull - private List keys; - - public Long getModuleId() { - return moduleId; - } - - public void setModuleId(Long moduleId) { - this.moduleId = moduleId; - } - - public List getKeys() { - return keys; - } - - public void setKeys(List keys) { - this.keys = keys; - } - - public CollectorDTO getCollector() { - return collector; - } - - public void setCollector(CollectorDTO collector) { - this.collector = collector; - } -} diff --git a/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorDTO.java b/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorDTO.java index 703656752..3dbb43e3f 100644 --- a/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorDTO.java +++ b/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/CollectorDTO.java @@ -1,12 +1,15 @@ package com.park.utmstack.service.dto.collectors.dto; -import agent.CollectorOuterClass.Collector; import com.park.utmstack.domain.collector.UtmCollector; import com.park.utmstack.domain.network_scan.UtmAssetGroup; import com.park.utmstack.service.dto.collectors.CollectorModuleEnum; import com.park.utmstack.service.dto.collectors.CollectorStatusEnum; +import lombok.Getter; +import lombok.Setter; +@Setter +@Getter public class CollectorDTO { private int id; private CollectorStatusEnum status; @@ -39,91 +42,4 @@ public CollectorDTO(UtmCollector collector) { this.active = collector.isActive(); } - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public CollectorStatusEnum getStatus() { - return status; - } - - public void setStatus(CollectorStatusEnum status) { - this.status = status; - } - - public String getCollectorKey() { - return collectorKey; - } - - public void setCollectorKey(String collectorKey) { - this.collectorKey = collectorKey; - } - - public String getIp() { - return ip; - } - - public void setIp(String ip) { - this.ip = ip; - } - - public String getHostname() { - return hostname; - } - - public void setHostname(String hostname) { - this.hostname = hostname; - } - - public String getVersion() { - return version; - } - - public void setVersion(String version) { - this.version = version; - } - - public CollectorModuleEnum getModule() { - return module; - } - - public void setModule(CollectorModuleEnum module) { - this.module = module; - } - - public String getLastSeen() { - return lastSeen; - } - - public void setLastSeen(String lastSeen) { - this.lastSeen = lastSeen; - } - - public String getGroupId() { - return groupId; - } - - public void setGroupId(String groupId) { - this.groupId = groupId; - } - - public UtmAssetGroup getGroup() { - return group; - } - - public void setGroup(UtmAssetGroup group) { - this.group = group; - } - - public boolean isActive() { - return active; - } - - public void setActive(boolean active) { - this.active = active; - } } diff --git a/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/ListCollectorsResponseDTO.java b/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/ListCollectorsResponseDTO.java index 4130476a7..d38813f75 100644 --- a/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/ListCollectorsResponseDTO.java +++ b/backend/src/main/java/com/park/utmstack/service/dto/collectors/dto/ListCollectorsResponseDTO.java @@ -1,26 +1,14 @@ package com.park.utmstack.service.dto.collectors.dto; import com.park.utmstack.service.dto.collectors.dto.CollectorDTO; +import lombok.Getter; +import lombok.Setter; import java.util.List; +@Setter +@Getter public class ListCollectorsResponseDTO { private List collectors; private int total; - - public List getCollectors() { - return collectors; - } - - public void setCollectors(List collectors) { - this.collectors = collectors; - } - - public int getTotal() { - return total; - } - - public void setTotal(int total) { - this.total = total; - } } diff --git a/backend/src/main/java/com/park/utmstack/service/dto/network_scan/UpdateGroupDTO.java b/backend/src/main/java/com/park/utmstack/service/dto/network_scan/UpdateGroupDTO.java new file mode 100644 index 000000000..32015f109 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/dto/network_scan/UpdateGroupDTO.java @@ -0,0 +1,18 @@ +package com.park.utmstack.service.dto.network_scan; + +import lombok.Data; + +import javax.validation.constraints.NotEmpty; +import javax.validation.constraints.NotNull; +import java.util.List; + +@Data +public class UpdateGroupDTO { + + @NotEmpty(message = "assetsIds cannot be empty") + private List assetsIds; + + @NotNull(message = "assetGroupId is required") + private Long assetGroupId; +} + diff --git a/backend/src/main/java/com/park/utmstack/service/dto/network_scan/UpdateTypeRequestBody.java b/backend/src/main/java/com/park/utmstack/service/dto/network_scan/UpdateTypeRequestBody.java new file mode 100644 index 000000000..c22ecd393 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/service/dto/network_scan/UpdateTypeRequestBody.java @@ -0,0 +1,18 @@ +package com.park.utmstack.service.dto.network_scan; + +import lombok.Data; +import lombok.Getter; +import lombok.Setter; + +import javax.validation.constraints.NotEmpty; +import java.util.List; + +@Data +@Getter +@Setter +public class UpdateTypeRequestBody { + @NotEmpty + private List assetsIds; + + private Long assetTypeId; +} \ No newline at end of file diff --git a/backend/src/main/java/com/park/utmstack/service/network_scan/UtmAssetGroupService.java b/backend/src/main/java/com/park/utmstack/service/network_scan/UtmAssetGroupService.java index 81fe9e03e..59848eefd 100644 --- a/backend/src/main/java/com/park/utmstack/service/network_scan/UtmAssetGroupService.java +++ b/backend/src/main/java/com/park/utmstack/service/network_scan/UtmAssetGroupService.java @@ -75,33 +75,26 @@ public Page findAll(Pageable pageable) { return utmAssetGroupRepository.findAll(pageable); } - public Page searchGroupsByFilter(AssetGroupFilter filter, Pageable pageable) throws Exception { - final String ctx = CLASSNAME + ".searchGroupsByFilter"; - try { - String query = searchQueryBuilder(filter); - String queryWithPaginationAndSort = paginateAndSort(query, pageable); - BigInteger count = (BigInteger) em.createNativeQuery(String.format("SELECT count(*) FROM (%1$s) AS total", query)).getSingleResult(); - List results = new ArrayList<>(em.createNativeQuery(queryWithPaginationAndSort, UtmAssetGroup.class).getResultList()); - - if (!CollectionUtils.isEmpty(results)) { - results.forEach(g -> { - Optional> assetsOpt = networkScanRepository.findAllByGroupId(g.getId()); - - if (assetsOpt.isPresent()) { - g.setAssets(assetsOpt.get()); - List collect = assetsOpt.get().stream().map(UtmNetworkScan::getAssetName).collect(Collectors.toList()); - List metrics = assetMetricsRepository.findAllByAssetNameIn(collect); - g.setMetrics(metrics); - } - }); - } - return new PageImpl<>(results.stream().map(AssetGroupDTO::new).collect(Collectors.toList()), pageable, count.longValue()); - } catch (InvalidDataAccessResourceUsageException e) { - String msg = ctx + ": " + e.getMostSpecificCause().getMessage().replaceAll("\n", ""); - throw new Exception(msg); - } catch (Exception e) { - throw new Exception(ctx + ": " + e.getMessage()); + public Page searchGroupsByFilter(AssetGroupFilter filter, Pageable pageable) { + + String query = searchQueryBuilder(filter); + String queryWithPaginationAndSort = paginateAndSort(query, pageable); + BigInteger count = (BigInteger) em.createNativeQuery(String.format("SELECT count(*) FROM (%1$s) AS total", query)).getSingleResult(); + List results = new ArrayList<>(em.createNativeQuery(queryWithPaginationAndSort, UtmAssetGroup.class).getResultList()); + + if (!CollectionUtils.isEmpty(results)) { + results.forEach(g -> { + Optional> assetsOpt = networkScanRepository.findAllByGroupId(g.getId()); + + if (assetsOpt.isPresent()) { + g.setAssets(assetsOpt.get()); + List collect = assetsOpt.get().stream().map(UtmNetworkScan::getAssetName).collect(Collectors.toList()); + List metrics = assetMetricsRepository.findAllByAssetNameIn(collect); + g.setMetrics(metrics); + } + }); } + return new PageImpl<>(results.stream().map(AssetGroupDTO::new).collect(Collectors.toList()), pageable, count.longValue()); } @@ -145,61 +138,61 @@ private String searchQueryBuilder(AssetGroupFilter filters) { // groupName if (StringUtils.hasText(filters.getGroupName())) { sb.append(where ? "WHERE " : "AND ") - .append(String.format("lower(utm_asset_group.group_name) LIKE '%%%1$s%%'\n", - filters.getGroupName().toLowerCase())); + .append(String.format("lower(utm_asset_group.group_name) LIKE '%%%1$s%%'\n", + filters.getGroupName().toLowerCase())); where = false; } // createdDate if (Objects.nonNull(filters.getInitDate()) && Objects.nonNull(filters.getEndDate())) { sb.append(where ? "WHERE " : "AND ") - .append(String.format("(utm_asset_group.created_date BETWEEN '%1$s' AND '%2$s')\n", - filters.getInitDate(), filters.getEndDate())); + .append(String.format("(utm_asset_group.created_date BETWEEN '%1$s' AND '%2$s')\n", + filters.getInitDate(), filters.getEndDate())); where = false; } // assetType if (!CollectionUtils.isEmpty(filters.getType())) { String types = filters.getType().stream() - .map(type -> String.format("'%1$s'", type)).collect(Collectors.joining(",")); + .map(type -> String.format("'%1$s'", type)).collect(Collectors.joining(",")); sb.append(where ? "WHERE " : "AND ") - .append(String.format("utm_network_scan.asset_type_id IN (SELECT utm_asset_types.id FROM utm_asset_types WHERE utm_asset_types.type_name IN (%1$s))\n", types)); + .append(String.format("utm_network_scan.asset_type_id IN (SELECT utm_asset_types.id FROM utm_asset_types WHERE utm_asset_types.type_name IN (%1$s))\n", types)); where = false; } // serverName if (!CollectionUtils.isEmpty(filters.getProbe())) { String probes = filters.getProbe().stream() - .map(probe -> String.format("'%1$s'", probe)).collect(Collectors.joining(",")); + .map(probe -> String.format("'%1$s'", probe)).collect(Collectors.joining(",")); sb.append(where ? "WHERE " : "AND ") - .append(String.format("utm_network_scan.server_name IN (%1$s)\n", probes)); + .append(String.format("utm_network_scan.server_name IN (%1$s)\n", probes)); where = false; } // assetOs if (!CollectionUtils.isEmpty(filters.getOs())) { String oss = filters.getOs().stream() - .map(os -> String.format("'%1$s'", os)).collect(Collectors.joining(",")); + .map(os -> String.format("'%1$s'", os)).collect(Collectors.joining(",")); sb.append(where ? "WHERE " : "AND ") - .append(String.format("utm_network_scan.asset_os IN (%1$s)\n", oss)); + .append(String.format("utm_network_scan.asset_os IN (%1$s)\n", oss)); where = false; } // assetIp if (!CollectionUtils.isEmpty(filters.getAssetIp())) { String ips = filters.getAssetIp().stream() - .map(ip -> String.format("'%1$s'", ip)).collect(Collectors.joining(",")); + .map(ip -> String.format("'%1$s'", ip)).collect(Collectors.joining(",")); sb.append(where ? "WHERE " : "AND ") - .append(String.format("utm_network_scan.asset_ip IN (%1$s)\n", ips)); + .append(String.format("utm_network_scan.asset_ip IN (%1$s)\n", ips)); where = false; } // assetName if (!CollectionUtils.isEmpty(filters.getAssetName())) { String names = filters.getAssetName().stream() - .map(name -> String.format("'%1$s'", name)).collect(Collectors.joining(",")); + .map(name -> String.format("'%1$s'", name)).collect(Collectors.joining(",")); sb.append(where ? "WHERE " : "AND ") - .append(String.format("utm_network_scan.asset_name IN (%1$s)\n", names)); + .append(String.format("utm_network_scan.asset_name IN (%1$s)\n", names)); } return sb.toString(); diff --git a/backend/src/main/java/com/park/utmstack/service/network_scan/UtmNetworkScanService.java b/backend/src/main/java/com/park/utmstack/service/network_scan/UtmNetworkScanService.java index 4da77b211..1901434ba 100644 --- a/backend/src/main/java/com/park/utmstack/service/network_scan/UtmNetworkScanService.java +++ b/backend/src/main/java/com/park/utmstack/service/network_scan/UtmNetworkScanService.java @@ -274,20 +274,23 @@ public ByteArrayOutputStream getNetworkScanReport(NetworkScanFilter f, Pageable private Page filter(NetworkScanFilter f, Pageable p) throws Exception { final String ctx = CLASSNAME + ".filter"; try { - - /*if (page.getTotalPages() > 0) { - List utmDataInputStatuses = utmDataInputStatusRepository.findAll().stream().sorted(Comparator.comparing(UtmDataInputStatus::getSource)).collect(Collectors.toList()); - page.forEach(m -> m.setMetrics(assetMetricsRepository.findAllByAssetName(m.getAssetName()))); - page.forEach(m -> m.setDataInputList(utmDataInputStatuses.stream().filter( - inputStatus -> inputStatus.getSource().equalsIgnoreCase(m.getAssetName()) || - inputStatus.getSource().equalsIgnoreCase(m.getAssetIp())).collect(Collectors.toList()))); - }*/ - return networkScanRepository.searchByFilters( f.getAssetIpMacName() == null ? null : "%" + f.getAssetIpMacName() + "%", - f.getOs(), f.getAlias(), f.getType(), f.getAlive(), f.getStatus(), - f.getProbe(), f.getOpenPorts(), f.getDiscoveredInitDate(), - f.getDiscoveredEndDate(), f.getGroups(), f.getRegisteredMode(), f.getAgent(), f.getOsPlatform(), f.getDataTypes(), p); + f.getOs(), !CollectionUtils.isEmpty(f.getOs()), + f.getAlias(), !CollectionUtils.isEmpty(f.getAlias()), + f.getType(), !CollectionUtils.isEmpty(f.getType()), + f.getAlive(), !CollectionUtils.isEmpty(f.getAlive()), + f.getStatus(), !CollectionUtils.isEmpty(f.getStatus()), + f.getProbe(), !CollectionUtils.isEmpty(f.getProbe()), + f.getOpenPorts(), !CollectionUtils.isEmpty(f.getOpenPorts()), + f.getDiscoveredInitDate(), + f.getDiscoveredEndDate(), + f.getGroups(), !CollectionUtils.isEmpty(f.getGroups()), + f.getRegisteredMode(), + f.getAgent(), !CollectionUtils.isEmpty(f.getAgent()), + f.getOsPlatform(), !CollectionUtils.isEmpty(f.getOsPlatform()), + f.getDataTypes(), !CollectionUtils.isEmpty(f.getDataTypes()), + p); } catch (InvalidDataAccessResourceUsageException e) { String msg = ctx + ": " + e.getMostSpecificCause().getMessage().replaceAll("\n", ""); throw new Exception(msg); diff --git a/backend/src/main/java/com/park/utmstack/service/validators/collector/CollectorValidatorService.java b/backend/src/main/java/com/park/utmstack/service/validators/collector/CollectorValidatorService.java index ceaccbfaf..02d1bca1b 100644 --- a/backend/src/main/java/com/park/utmstack/service/validators/collector/CollectorValidatorService.java +++ b/backend/src/main/java/com/park/utmstack/service/validators/collector/CollectorValidatorService.java @@ -1,8 +1,7 @@ package com.park.utmstack.service.validators.collector; import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; -import com.park.utmstack.service.dto.collectors.dto.CollectorConfigKeysDTO; -import com.park.utmstack.web.rest.application_modules.UtmModuleGroupConfigurationResource; +import com.park.utmstack.service.dto.collectors.dto.CollectorConfigDTO; import org.springframework.stereotype.Service; import org.springframework.validation.Errors; import org.springframework.validation.Validator; @@ -14,12 +13,12 @@ public class CollectorValidatorService implements Validator { @Override public boolean supports(Class clazz) { - return CollectorConfigKeysDTO.class.equals(clazz); + return CollectorConfigDTO.class.equals(clazz); } @Override public void validate(Object target, Errors errors) { - CollectorConfigKeysDTO updateConfigurationKeysBody = (CollectorConfigKeysDTO) target; + CollectorConfigDTO updateConfigurationKeysBody = (CollectorConfigDTO) target; Map hostNames = updateConfigurationKeysBody.getKeys().stream() .filter(config -> config.getConfName().equals("Hostname")) diff --git a/backend/src/main/java/com/park/utmstack/util/exceptions/CurrentUserLoginNotFoundException.java b/backend/src/main/java/com/park/utmstack/util/exceptions/CurrentUserLoginNotFoundException.java index edc0d38a7..7ee8ad0b2 100644 --- a/backend/src/main/java/com/park/utmstack/util/exceptions/CurrentUserLoginNotFoundException.java +++ b/backend/src/main/java/com/park/utmstack/util/exceptions/CurrentUserLoginNotFoundException.java @@ -1,7 +1,9 @@ package com.park.utmstack.util.exceptions; -public class CurrentUserLoginNotFoundException extends RuntimeException { +import org.springframework.http.HttpStatus; + +public class CurrentUserLoginNotFoundException extends ApiException { public CurrentUserLoginNotFoundException(String message) { - super(message); + super(message, HttpStatus.NOT_FOUND); } } diff --git a/backend/src/main/java/com/park/utmstack/web/rest/AccountResource.java b/backend/src/main/java/com/park/utmstack/web/rest/AccountResource.java index ec9ab0fe4..219c85241 100644 --- a/backend/src/main/java/com/park/utmstack/web/rest/AccountResource.java +++ b/backend/src/main/java/com/park/utmstack/web/rest/AccountResource.java @@ -1,6 +1,8 @@ package com.park.utmstack.web.rest; +import com.park.utmstack.aop.logging.AuditEvent; +import com.park.utmstack.aop.logging.Loggable; import com.park.utmstack.domain.User; import com.park.utmstack.domain.application_events.enums.ApplicationEventType; import com.park.utmstack.repository.UserRepository; @@ -70,7 +72,7 @@ public ResponseEntity isAuthenticated(HttpServletRequest request) { log.error(msg); applicationEventService.createEvent(msg, ApplicationEventType.ERROR); return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("", "", msg)).body(null); + HeaderUtil.createFailureAlert("", "", msg)).body(null); } } @@ -85,8 +87,8 @@ public UserDTO getAccount() { final String ctx = CLASSNAME + ".getAccount"; try { return userService.getUserWithAuthorities() - .map(UserDTO::new) - .orElseThrow(() -> new InternalServerErrorException("User could not be found")); + .map(UserDTO::new) + .orElseThrow(() -> new InternalServerErrorException("User could not be found")); } catch (InternalServerErrorException e) { String msg = ctx + ": " + e.getMessage(); log.error(msg); @@ -116,7 +118,7 @@ public void saveAccount(@Valid @RequestBody UserDTO userDTO) { throw new InternalServerErrorException("User could not be found"); userService.updateUser(userDTO.getFirstName(), userDTO.getLastName(), userDTO.getEmail(), - userDTO.getLangKey(), userDTO.getImageUrl()); + userDTO.getLangKey(), userDTO.getImageUrl()); } catch (Exception e) { String msg = ctx + ": " + e.getMessage(); log.error(msg); @@ -156,8 +158,8 @@ public void requestPasswordReset(@RequestBody String mail) { final String ctx = CLASSNAME + ".requestPasswordReset"; try { mailService.sendPasswordResetMail( - userService.requestPasswordReset(mail) - .orElseThrow(EmailNotFoundException::new)); + userService.requestPasswordReset(mail) + .orElseThrow(EmailNotFoundException::new)); } catch (Exception e) { String msg = ctx + ": " + e.getMessage(); log.error(msg); @@ -166,34 +168,23 @@ public void requestPasswordReset(@RequestBody String mail) { } } - /** - * POST /account/reset-password/finish : Finish to reset the password of the user - * - * @param keyAndPassword the generated key and the new password - * @throws InvalidPasswordException 400 (Bad Request) if the password is incorrect - * @throws RuntimeException 500 (Internal Server Error) if the password could not be reset - */ + @AuditEvent( + attemptType = ApplicationEventType.RESET_USER_PASSWORD_ATTEMPT, + attemptMessage = "Attempt to reset user password initiated", + successType = ApplicationEventType.RESET_USER_PASSWORD_SUCCESS, + successMessage = "User password reset successfully" + ) @PostMapping(path = "/account/reset-password/finish") public void finishPasswordReset(@RequestBody KeyAndPasswordVM keyAndPassword) { - final String ctx = CLASSNAME + ".finishPasswordReset"; - try { - validatePasswordLength(keyAndPassword.getNewPassword()); - Optional user = - userService.completePasswordReset(keyAndPassword.getNewPassword(), keyAndPassword.getKey()); - if (user.isEmpty()) - throw new InternalServerErrorException("No user was found for this reset key"); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - log.error(msg); - applicationEventService.createEvent(msg, ApplicationEventType.ERROR); - throw new RuntimeException(msg); - } + validatePasswordLength(keyAndPassword.getNewPassword()); + userService.completePasswordReset(keyAndPassword.getNewPassword(), keyAndPassword.getKey()); + } private void validatePasswordLength(String password) { if (!StringUtils.hasText(password) || password.length() < ManagedUserVM.PASSWORD_MIN_LENGTH || - password.length() > ManagedUserVM.PASSWORD_MAX_LENGTH) + password.length() > ManagedUserVM.PASSWORD_MAX_LENGTH) throw new InvalidPasswordException(); } } diff --git a/backend/src/main/java/com/park/utmstack/web/rest/application_modules/UtmModuleGroupResource.java b/backend/src/main/java/com/park/utmstack/web/rest/application_modules/UtmModuleGroupResource.java index 9f10dbe3c..a8090ad4d 100644 --- a/backend/src/main/java/com/park/utmstack/web/rest/application_modules/UtmModuleGroupResource.java +++ b/backend/src/main/java/com/park/utmstack/web/rest/application_modules/UtmModuleGroupResource.java @@ -7,12 +7,16 @@ import com.park.utmstack.domain.application_modules.UtmModuleGroupConfiguration; import com.park.utmstack.domain.application_modules.factory.ModuleFactory; import com.park.utmstack.domain.application_modules.types.ModuleConfigurationKey; +import com.park.utmstack.event_processor.EventProcessorManagerService; import com.park.utmstack.service.application_events.ApplicationEventService; import com.park.utmstack.service.application_modules.UtmModuleGroupConfigurationService; import com.park.utmstack.service.application_modules.UtmModuleGroupService; import com.park.utmstack.service.application_modules.UtmModuleService; +import com.park.utmstack.service.dto.application_modules.ModuleDTO; +import com.park.utmstack.service.dto.application_modules.UtmModuleMapper; import com.park.utmstack.web.rest.util.HeaderUtil; import com.park.utmstack.web.rest.vm.ModuleGroupVM; +import lombok.RequiredArgsConstructor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.dao.DataIntegrityViolationException; @@ -32,6 +36,7 @@ * REST controller for managing UtmConfigurationGroup. */ @RestController +@RequiredArgsConstructor @RequestMapping("/api") public class UtmModuleGroupResource { @@ -45,18 +50,8 @@ public class UtmModuleGroupResource { private final ModuleFactory moduleFactory; private final UtmModuleService moduleService; private final UtmModuleGroupConfigurationService moduleGroupConfigurationService; - - public UtmModuleGroupResource(UtmModuleGroupService moduleGroupService, - ApplicationEventService eventService, - ModuleFactory moduleFactory, - UtmModuleService moduleService, - UtmModuleGroupConfigurationService moduleGroupConfigurationService) { - this.moduleGroupService = moduleGroupService; - this.eventService = eventService; - this.moduleFactory = moduleFactory; - this.moduleService = moduleService; - this.moduleGroupConfigurationService = moduleGroupConfigurationService; - } + private final UtmModuleMapper moduleMapper; + private final EventProcessorManagerService eventProcessorManagerService; @PostMapping("/utm-configuration-groups") @AuditEvent( @@ -165,7 +160,7 @@ public ResponseEntity getConfigurationGroup(@PathVariable Long g public ResponseEntity deleteSingleModuleGroup(@RequestParam Long groupId) { final String ctx = CLASSNAME + ".deleteSingleModuleGroup"; - moduleGroupService.delete(groupId); + moduleGroupService.deleteAndFetch(groupId); return ResponseEntity.ok().build(); } diff --git a/backend/src/main/java/com/park/utmstack/web/rest/collectors/UtmCollectorResource.java b/backend/src/main/java/com/park/utmstack/web/rest/collectors/UtmCollectorResource.java index fd962beae..e421dc29b 100644 --- a/backend/src/main/java/com/park/utmstack/web/rest/collectors/UtmCollectorResource.java +++ b/backend/src/main/java/com/park/utmstack/web/rest/collectors/UtmCollectorResource.java @@ -1,376 +1,120 @@ package com.park.utmstack.web.rest.collectors; -import agent.CollectorOuterClass.CollectorConfig; -import agent.Common.ListRequest; +import com.park.utmstack.aop.logging.AuditEvent; import com.park.utmstack.domain.application_events.enums.ApplicationEventType; import com.park.utmstack.domain.application_modules.UtmModuleGroup; import com.park.utmstack.domain.network_scan.AssetGroupFilter; import com.park.utmstack.domain.network_scan.NetworkScanFilter; -import com.park.utmstack.service.application_events.ApplicationEventService; -import com.park.utmstack.service.application_modules.UtmModuleGroupConfigurationService; import com.park.utmstack.service.application_modules.UtmModuleGroupService; -import com.park.utmstack.service.collectors.CollectorOpsService; +import com.park.utmstack.service.collectors.CollectorService; import com.park.utmstack.service.collectors.UtmCollectorService; import com.park.utmstack.service.dto.collectors.CollectorActionEnum; -import com.park.utmstack.service.dto.collectors.CollectorHostnames; -import com.park.utmstack.service.dto.collectors.dto.CollectorConfigKeysDTO; +import com.park.utmstack.service.dto.collectors.dto.CollectorConfigDTO; import com.park.utmstack.service.dto.collectors.dto.CollectorDTO; import com.park.utmstack.service.dto.collectors.CollectorModuleEnum; -import com.park.utmstack.service.dto.collectors.dto.ErrorResponse; import com.park.utmstack.service.dto.collectors.dto.ListCollectorsResponseDTO; import com.park.utmstack.service.dto.network_scan.AssetGroupDTO; -import com.park.utmstack.util.ResponseUtil; -import com.park.utmstack.web.rest.errors.BadRequestAlertException; -import com.park.utmstack.web.rest.errors.InternalServerErrorException; -import com.park.utmstack.web.rest.network_scan.UtmNetworkScanResource; +import com.park.utmstack.service.dto.network_scan.UpdateGroupDTO; import com.park.utmstack.web.rest.util.HeaderUtil; import com.park.utmstack.web.rest.util.PaginationUtil; -import com.utmstack.grpc.exception.CollectorConfigurationGrpcException; -import com.utmstack.grpc.exception.CollectorServiceGrpcException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.RequiredArgsConstructor; import org.springdoc.api.annotations.ParameterObject; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.http.HttpHeaders; -import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import javax.validation.Valid; -import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; /** * REST controller for managing {@link UtmCollectorResource}. */ @RestController -@RequestMapping("/api") +@RequiredArgsConstructor +@RequestMapping("/api/collectors") public class UtmCollectorResource { - private static final String CLASSNAME = "UtmCollectorResource"; - private final CollectorOpsService collectorService; - private final Logger log = LoggerFactory.getLogger(UtmCollectorResource.class); - private final ApplicationEventService applicationEventService; - private final UtmModuleGroupConfigurationService moduleGroupConfigurationService; - private final UtmModuleGroupService moduleGroupService; - - private final ApplicationEventService eventService; - private final UtmCollectorService utmCollectorService; - - public UtmCollectorResource(CollectorOpsService collectorService, - ApplicationEventService applicationEventService, - UtmModuleGroupConfigurationService moduleGroupConfigurationService, - UtmModuleGroupService moduleGroupService, - ApplicationEventService eventService, - UtmCollectorService utmCollectorService) { - - this.collectorService = collectorService; - this.applicationEventService = applicationEventService; - this.moduleGroupConfigurationService = moduleGroupConfigurationService; - this.moduleGroupService = moduleGroupService; - this.eventService = eventService; - this.utmCollectorService = utmCollectorService; + private final CollectorService collectorService; + + @AuditEvent( + attemptType = ApplicationEventType.CONFIG_UPDATE_ATTEMPT, + successType = ApplicationEventType.CONFIG_UPDATE_SUCCESS, + attemptMessage = "Attempt to upsert collector configuration initiated", + successMessage = "Collector configuration upserted successfully" + ) + @PostMapping("/config") + public ResponseEntity upsertCollectorConfig(@Valid @RequestBody CollectorConfigDTO collectorConfig, + @RequestParam(name = "action", defaultValue = "CREATE") CollectorActionEnum action) { + + collectorService.upsertCollectorConfig(collectorConfig); + return ResponseEntity.noContent().build(); } - /** - * {@code POST /collector-config} : Create or update the collector configs. - * - * @param collectorConfig the collector configs to be created/updated in the agent manager and updated in database. - * @return the {@link ResponseEntity} with status {@code 204 (No Content)}, status {@code 400 (Bad request)} if the internal key is not set, - * status {@code 502 (Bad Gateway)} if the agent manager returns an error, or with status {@code 500 (Internal Server Error)} if the database couldn't - * persist the configurations. - */ - @PostMapping("/collector-config") - public ResponseEntity upsertCollectorConfig( - @Valid @RequestBody CollectorConfigKeysDTO collectorConfig, - @RequestParam(name = "action", defaultValue = "CREATE") CollectorActionEnum action) { - - final String ctx = CLASSNAME + ".upsertCollectorConfig"; - CollectorConfig cacheConfig = null; - - // Validate collector configuration - String validationErrorMessage = this.collectorService.validateCollectorConfig(collectorConfig); - if (validationErrorMessage != null) { - return logAndResponse(new ErrorResponse(validationErrorMessage, HttpStatus.PRECONDITION_FAILED)); - } - - try { - cacheConfig = this.collectorService.cacheCurrentCollectorConfig(collectorConfig.getCollector()); - this.upsert(collectorConfig); - return ResponseEntity.noContent().build(); - - } catch (Exception e) { - return handleUpdateError(e, cacheConfig, collectorConfig.getCollector()); - } - } - - /** - * {@code GET /collectors-list} : Get all collectors list by module. - * - * @param pageNumber the page number to show results from. - * @param pageSize the number of items to show in the page. - * @param module the module used to filter the collectors list. If no value is set, returns collectors by all modules - * @param sortBy the criteria to sort the results. - * @return the {@link ResponseEntity} with status {@code 204 (No Content)}, status {@code 400 (Bad request)} if the internal key is not set, - * or with status {@code 502 (Bad Gateway)} if the agent manager returns an error. - */ - @GetMapping("/collectors-list") - public ResponseEntity listCollectorsByModule(@RequestParam(required = false) Integer pageNumber, - @RequestParam(required = false) Integer pageSize, + @GetMapping + public ResponseEntity listCollectorsByModule(@RequestParam(required = false, defaultValue = "0") Integer pageNumber, + @RequestParam(required = false, defaultValue = "10") Integer pageSize, + @RequestParam(required = false) String hostname, @RequestParam(required = false) CollectorModuleEnum module, @RequestParam(required = false) String sortBy) { - final String ctx = CLASSNAME + ".listCollectorsByModule"; - try { - ListRequest request = ListRequest.newBuilder() - .setPageNumber(pageNumber != null ? pageNumber : 0) - .setPageSize(pageSize != null ? pageSize : 1000000) - .setSearchQuery(module != null ? "module.Is=" + module.name() : "") - .setSortBy(sortBy != null ? sortBy : "") - .build(); - ListCollectorsResponseDTO response = collectorService.listCollector(request); - HttpHeaders headers = new HttpHeaders(); - headers.add("X-Total-Count", Long.toString(response.getTotal())); - return ResponseEntity.ok().headers(headers).body(response); - } catch (BadRequestAlertException e) { - String msg = ctx + ": " + e.getLocalizedMessage(); - log.error(msg); - applicationEventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseUtil.buildErrorResponse(HttpStatus.BAD_REQUEST, msg); - } catch (CollectorServiceGrpcException e) { - String msg = ctx + ": UtmCollector manager is not available or was an error getting the collector list. " + e.getLocalizedMessage(); - log.error(msg); - applicationEventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseUtil.buildErrorResponse(HttpStatus.BAD_GATEWAY, msg); - } - } - /** - * {@code GET /collector-hostnames} : Get all collector hostnames by module. - * - * @param pageNumber the page number to show results from. - * @param pageSize the number of items to show in the page. - * @param module the module used to filter the collectors list. If no value is set, returns collectors by all modules - * @param sortBy the criteria to sort the results. - * @return the {@link ResponseEntity} with status {@code 200 (Ok)}, status {@code 400 (Bad request)} if the internal key is not set, - * or with status {@code 502 (Bad Gateway)} if the agent manager returns an error. - */ - @GetMapping("/collector-hostnames") - public ResponseEntity listCollectorHostNames(@RequestParam(required = false) Integer pageNumber, - @RequestParam(required = false) Integer pageSize, - @RequestParam(required = false) CollectorModuleEnum module, - @RequestParam(required = false) String sortBy) { - final String ctx = CLASSNAME + ".listCollectorHostNames"; - try { - ListRequest request = ListRequest.newBuilder() - .setPageNumber(pageNumber != null ? pageNumber : 0) - .setPageSize(pageSize != null ? pageSize : 1000000) - .setSearchQuery(module != null ? "module.Is=" + module : "") - .setSortBy(sortBy != null ? sortBy : "") - .build(); - return ResponseEntity.ok().body(collectorService.listCollectorHostnames(request)); - } catch (BadRequestAlertException e) { - String msg = ctx + ": " + e.getLocalizedMessage(); - log.error(msg); - applicationEventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseUtil.buildErrorResponse(HttpStatus.BAD_REQUEST, msg); - } catch (CollectorServiceGrpcException e) { - String msg = ctx + ": UtmCollector manager is not available or the parameters are wrong, please check." + e.getLocalizedMessage(); - log.error(msg); - applicationEventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseUtil.buildErrorResponse(HttpStatus.BAD_GATEWAY, msg); - } + ListCollectorsResponseDTO response = collectorService.listCollector(hostname, pageNumber, pageSize, sortBy, module); + HttpHeaders headers = new HttpHeaders(); + headers.add("X-Total-Count", Long.toString(response.getTotal())); + return ResponseEntity.ok().headers(headers).body(response); } - /** - * {@code GET /collector-by-hostname-and-module} : Get collector's list according to the request params. - * - * @param hostname the host name to search for. - * @param module the collector module to search for - * @return the {@link ResponseEntity} with status {@code 204 (No Content)}, status {@code 400 (Bad request)} if the internal key is not set, - * or with status {@code 502 (Bad Gateway)} if the agent manager returns an error. - */ - @GetMapping("/collector-by-hostname-and-module") - public ResponseEntity listCollectorByHostNameAndModule(@RequestParam String hostname, - @RequestParam CollectorModuleEnum module) { - final String ctx = CLASSNAME + ".listCollectorByHostNameAndModule"; - try { - return ResponseEntity.ok().body(collectorService.listCollector( - collectorService.getListRequestByHostnameAndModule(hostname, module))); - } catch (BadRequestAlertException e) { - String msg = ctx + ": " + e.getLocalizedMessage(); - log.error(msg); - applicationEventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseUtil.buildErrorResponse(HttpStatus.BAD_REQUEST, msg); - } catch (CollectorServiceGrpcException e) { - String msg = ctx + ": UtmCollector manager is not available or was an error getting configuration. " + e.getLocalizedMessage(); - log.error(msg); - applicationEventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseUtil.buildErrorResponse(HttpStatus.BAD_GATEWAY, msg); - } - } - - @GetMapping("/groups-by-collectors/{collectorId}") + @GetMapping("/{collectorId}/module-groups") public ResponseEntity> getModuleGroups(@PathVariable String collectorId) { - final String ctx = CLASSNAME + ".getModuleGroups"; - try { - return ResponseEntity.ok(moduleGroupService.findAllByCollectorId(collectorId)); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - log.error(msg); - eventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("", "", msg)).body(null); - } - } - @PutMapping("/updateGroup") - public ResponseEntity updateGroup(@Valid @RequestBody UtmNetworkScanResource.UpdateGroupRequestBody body) { - final String ctx = CLASSNAME + ".updateGroup"; - try { - collectorService.updateGroup(body.getAssetsIds(), body.getAssetGroupId()); - return ResponseEntity.ok().build(); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - log.error(msg); - eventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("", "", msg)).body(null); - } - } - - @GetMapping("/searchGroupsByFilter") - public ResponseEntity> searchGroupsByFilter(AssetGroupFilter filter, Pageable pageable) { - final String ctx = CLASSNAME + ".searchGroupsByFilter"; - try { + return ResponseEntity.ok(moduleGroupService.findAllByCollectorId(collectorId)); - Page page = collectorService.searchGroupsByFilter(filter, pageable); - HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/utm-asset-groups/searchGroupsByFilter"); - return ResponseEntity.ok().headers(headers).body(page.getContent()); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - log.error(msg); - eventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("", "", msg)).body(null); - } } - @GetMapping("/search-by-filters") - public ResponseEntity> searchByFilters(@ParameterObject NetworkScanFilter filters, - @ParameterObject Pageable pageable) { - final String ctx = CLASSNAME + ".searchByFilters"; - try { - collectorService.listCollector(ListRequest.newBuilder() - .setPageNumber(0) - .setPageSize(1000000) - .setSortBy("") - .build()); - Page page = this.utmCollectorService.searchByFilters(filters, pageable); - HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/api/search-by-filters"); - return ResponseEntity.ok().headers(headers).body(page.getContent()); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - log.error(msg); - eventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("", "", msg)).body(null); - } - } + @PutMapping("/asset-group") + public ResponseEntity updateGroup(@Valid @RequestBody UpdateGroupDTO body) { - @DeleteMapping("/collectors/{id}") - public ResponseEntity deleteCollector(@PathVariable Long id) { + utmCollectorService.updateGroup(body.getAssetsIds(), body.getAssetGroupId()); - try { - log.debug("REST request to delete UtmCollector : {}", id); - collectorService.deleteCollector(id); - return ResponseEntity.ok().headers(HeaderUtil.createEntityDeletionAlert("UtmCollector", id.toString())).build(); - } catch (Exception e) { - applicationEventService.createEvent(e.getMessage(), ApplicationEventType.ERROR); - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("UtmCollector", null, e.getMessage())).body(null); - } + return ResponseEntity.ok().build(); } - @PostMapping("/collectors-config") - public ResponseEntity> upsertCollectorsConfig(@RequestBody List collectors) { - Map results = new HashMap<>(); - final String ctx = CLASSNAME + ".upsertCollectorsConfig"; - CollectorConfig cacheConfig = null; - - List> collectorsResults = new ArrayList<>(); - for (CollectorConfigKeysDTO collectorConfig : collectors) { - Map collectorResult = new HashMap<>(); - collectorResult.put("collectorId", collectorConfig.getCollector().getId()); - try { - cacheConfig = this.collectorService.cacheCurrentCollectorConfig(collectorConfig.getCollector()); - this.upsert(collectorConfig); - collectorResult.put("status", "success"); - } catch (Exception e) { - ErrorResponse error = this.getError(e, cacheConfig); - collectorResult.put("status", "failure"); - collectorResult.put("errorMessage", error.getMessage()); - } - collectorsResults.add(collectorResult); - } - - results.put("results", collectorsResults); - return ResponseEntity.status(HttpStatus.MULTI_STATUS).body(results); - } - - private ResponseEntity handleUpdateError(Exception e, CollectorConfig cacheConfig, CollectorDTO collectorDTO) { - return logAndResponse(this.getError(e, cacheConfig)); - } - private ErrorResponse getError(Exception e, CollectorConfig cacheConfig) { - String msg; - HttpStatus status; + @GetMapping("/asset-groups") + public ResponseEntity> searchGroupsByFilter(AssetGroupFilter filter, Pageable pageable) { - try { - if (e instanceof InternalServerErrorException) { - /*collectorService.upsertCollectorConfig(cacheConfig);*/ - msg = "The collector configuration couldn't be persisted on database: " + e.getLocalizedMessage(); - status = HttpStatus.INTERNAL_SERVER_ERROR; - } else if (e instanceof CollectorConfigurationGrpcException || e instanceof CollectorServiceGrpcException) { - msg = "UtmCollector manager is not available or the configuration is wrong: " + e.getLocalizedMessage(); - status = HttpStatus.BAD_GATEWAY; + Page page = collectorService.searchGroupsByFilter(filter, pageable); + HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/utm-asset-groups/searchGroupsByFilter"); + return ResponseEntity.ok().headers(headers).body(page.getContent()); - } else if (e instanceof BadRequestAlertException) { - msg = e.getLocalizedMessage(); - status = HttpStatus.BAD_REQUEST; + } - } else { - msg = "Unexpected error: " + e.getLocalizedMessage(); - status = HttpStatus.INTERNAL_SERVER_ERROR; - } + @GetMapping("/search-by-filters") + public ResponseEntity> searchByFilters(@ParameterObject NetworkScanFilter filters, + @ParameterObject Pageable pageable) { - } catch (Exception rollbackException) { - msg = "Failed to rollback the configuration: " + rollbackException.getLocalizedMessage(); - status = HttpStatus.INTERNAL_SERVER_ERROR; - } + Page page = this.utmCollectorService.searchByFilters(filters, pageable); + HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/api/search-by-filters"); + return ResponseEntity.ok().headers(headers).body(page.getContent()); - return new ErrorResponse(msg, status); } - private ResponseEntity logAndResponse(ErrorResponse error) { - log.error(error.getMessage()); - applicationEventService.createEvent(error.getMessage(), ApplicationEventType.ERROR); - return ResponseUtil.buildErrorResponse(error.getStatus(), error.getMessage()); + @AuditEvent( + attemptType = ApplicationEventType.COLLECTOR_DELETE_ATTEMPT, + successType = ApplicationEventType.COLLECTOR_DELETE_SUCCESS, + attemptMessage = "Attempt to delete collector initiated", + successMessage = "Collector deleted successfully" + ) + @DeleteMapping("/{id}") + public ResponseEntity deleteCollector(@PathVariable Long id) { + collectorService.deleteCollector(id); + return ResponseEntity.ok().headers(HeaderUtil.createEntityDeletionAlert("UtmCollector", id.toString())).build(); } - private void upsert(CollectorConfigKeysDTO collectorConfig) throws Exception { - - // Update local database with new configuration - this.collectorService.updateCollectorConfigurationKeys(collectorConfig); - - // Attempt to update collector configuration via gRPC - this.collectorService.updateCollectorConfigViaGrpc(collectorConfig, collectorConfig.getCollector()); - } } diff --git a/backend/src/main/java/com/park/utmstack/web/rest/errors/ResetKeyExpiredException.java b/backend/src/main/java/com/park/utmstack/web/rest/errors/ResetKeyExpiredException.java new file mode 100644 index 000000000..4297e20c1 --- /dev/null +++ b/backend/src/main/java/com/park/utmstack/web/rest/errors/ResetKeyExpiredException.java @@ -0,0 +1,11 @@ +package com.park.utmstack.web.rest.errors; + +import com.park.utmstack.util.exceptions.ApiException; +import org.springframework.http.HttpStatus; + +public class ResetKeyExpiredException extends ApiException { + + public ResetKeyExpiredException(String message) { + super(message, HttpStatus.BAD_REQUEST); + } +} diff --git a/backend/src/main/java/com/park/utmstack/web/rest/network_scan/UtmAssetGroupResource.java b/backend/src/main/java/com/park/utmstack/web/rest/network_scan/UtmAssetGroupResource.java index a9b3faa79..beb20df0b 100644 --- a/backend/src/main/java/com/park/utmstack/web/rest/network_scan/UtmAssetGroupResource.java +++ b/backend/src/main/java/com/park/utmstack/web/rest/network_scan/UtmAssetGroupResource.java @@ -64,8 +64,8 @@ public ResponseEntity createUtmAssetGroup(@Valid @RequestBody Utm utmAssetGroup.setCreatedDate(Instant.now()); UtmAssetGroup result = utmAssetGroupService.save(utmAssetGroup); return ResponseEntity.created(new URI("/api/utm-asset-groups/" + result.getId())) - .headers(HeaderUtil.createEntityCreationAlert(ENTITY_NAME, result.getId().toString())) - .body(result); + .headers(HeaderUtil.createEntityCreationAlert(ENTITY_NAME, result.getId().toString())) + .body(result); } catch (DataIntegrityViolationException e) { String msg = ctx + ": " + e.getMostSpecificCause().getMessage().replaceAll("\n", ""); log.error(msg); @@ -76,7 +76,7 @@ public ResponseEntity createUtmAssetGroup(@Valid @RequestBody Utm log.error(msg); eventService.createEvent(msg, ApplicationEventType.ERROR); return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("", "", msg)).body(null); + HeaderUtil.createFailureAlert("", "", msg)).body(null); } } @@ -98,8 +98,8 @@ public ResponseEntity updateUtmAssetGroup(@Valid @RequestBody Utm UtmAssetGroup result = utmAssetGroupService.save(utmAssetGroup); return ResponseEntity.ok() - .headers(HeaderUtil.createEntityUpdateAlert(ENTITY_NAME, utmAssetGroup.getId().toString())) - .body(result); + .headers(HeaderUtil.createEntityUpdateAlert(ENTITY_NAME, utmAssetGroup.getId().toString())) + .body(result); } catch (DataIntegrityViolationException e) { String msg = ctx + ": " + e.getMostSpecificCause().getMessage().replaceAll("\n", ""); log.error(msg); @@ -110,7 +110,7 @@ public ResponseEntity updateUtmAssetGroup(@Valid @RequestBody Utm log.error(msg); eventService.createEvent(msg, ApplicationEventType.ERROR); return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("", "", msg)).body(null); + HeaderUtil.createFailureAlert("", "", msg)).body(null); } } @@ -121,21 +121,18 @@ public ResponseEntity updateUtmAssetGroup(@Valid @RequestBody Utm * @return the ResponseEntity with status 200 (OK) and the list of utmAssetGroups in body */ @GetMapping("/utm-asset-groups/searchGroupsByFilter") - public ResponseEntity> searchGroupsByFilter(AssetGroupFilter filter, Pageable pageable) { - final String ctx = CLASSNAME + ".searchGroupsByFilter"; - try { - Page page = utmAssetGroupService.searchGroupsByFilter(filter, pageable); - HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/utm-asset-groups/searchGroupsByFilter"); - return ResponseEntity.ok().headers(headers).body(utmAssetGroupService.searchGroupsByFilter(filter, pageable).getContent()); - } catch (Exception e) { - String msg = ctx + ": " + e.getMessage(); - log.error(msg); - eventService.createEvent(msg, ApplicationEventType.ERROR); - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).headers( - HeaderUtil.createFailureAlert("", "", msg)).body(null); - } + public ResponseEntity> searchGroupsByFilter( + AssetGroupFilter filter, Pageable pageable) { + + Page page = utmAssetGroupService.searchGroupsByFilter(filter, pageable); + HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/utm-asset-groups/searchGroupsByFilter"); + + return ResponseEntity.ok() + .headers(headers) + .body(page.getContent()); } + /** * GET /utm-asset-groups/:id : get the "id" utmAssetGroup. * diff --git a/backend/src/main/proto/collector.proto b/backend/src/main/proto/collector.proto new file mode 100644 index 000000000..99fd4d551 --- /dev/null +++ b/backend/src/main/proto/collector.proto @@ -0,0 +1,87 @@ +syntax = "proto3"; + +option go_package = "github.com/utmstack/UTMStack/agent-manager/agent"; +import "common.proto"; + +package agent; + +service CollectorService { + rpc RegisterCollector(RegisterRequest) returns (AuthResponse) {} + rpc DeleteCollector(DeleteRequest) returns (AuthResponse) {} + rpc ListCollector (ListRequest) returns (ListCollectorResponse) {} + rpc CollectorStream(stream CollectorMessages) returns (stream CollectorMessages) {} + rpc GetCollectorConfig (ConfigRequest) returns (CollectorConfig) {} +} + +service PanelCollectorService { + rpc RegisterCollectorConfig(CollectorConfig) returns (ConfigKnowledge) {} +} + +enum CollectorModule{ + AS_400 = 0; + UTMSTACK = 1; +} + +message RegisterRequest { + string ip = 1; + string hostname = 2; + string version = 3; + CollectorModule collector = 4; +} + +message ListCollectorResponse { + repeated Collector rows = 1; + int32 total = 2; +} + +message Collector { + int32 id = 1; + Status status = 2; + string collector_key = 3; + string ip = 4; + string hostname = 5; + string version = 6; + CollectorModule module = 7; + string last_seen = 8; +} + +message CollectorMessages { + oneof stream_message { + CollectorConfig config = 1; + ConfigKnowledge result = 2; + } +} + +message CollectorConfig { + string collector_id = 1; + repeated CollectorConfigGroup groups = 2; + string request_id = 3; +} + +message CollectorConfigGroup { + int32 id = 1; + string group_name = 2; + string group_description = 3; + repeated CollectorGroupConfigurations configurations = 4; + int32 collector_id = 5; +} + +message CollectorGroupConfigurations { + int32 id = 1; + int32 group_id = 2; + string conf_key = 3; + string conf_value = 4; + string conf_name = 5; + string conf_description = 6; + string conf_data_type = 7; + bool conf_required = 8; +} + +message ConfigKnowledge{ + string accepted = 1; + string request_id = 2; +} + +message ConfigRequest { + CollectorModule module = 1; +} diff --git a/backend/src/main/resources/config/liquibase/changelog/20260316001_remove_windows_token_manipulation_rule.xml b/backend/src/main/resources/config/liquibase/changelog/20260316001_remove_windows_token_manipulation_rule.xml new file mode 100644 index 000000000..faadd5e47 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260316001_remove_windows_token_manipulation_rule.xml @@ -0,0 +1,14 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260316002_modify_crowdstrike_rules.xml b/backend/src/main/resources/config/liquibase/changelog/20260316002_modify_crowdstrike_rules.xml new file mode 100644 index 000000000..c3c2f38d7 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260316002_modify_crowdstrike_rules.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260316003_update_filter_azure.xml b/backend/src/main/resources/config/liquibase/changelog/20260316003_update_filter_azure.xml new file mode 100644 index 000000000..b1b1d13e2 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260316003_update_filter_azure.xml @@ -0,0 +1,926 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260316004_update_filter_crowdstrike.xml b/backend/src/main/resources/config/liquibase/changelog/20260316004_update_filter_crowdstrike.xml new file mode 100644 index 000000000..65aef5634 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260316004_update_filter_crowdstrike.xml @@ -0,0 +1,735 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260316005_update_filter_linux.xml b/backend/src/main/resources/config/liquibase/changelog/20260316005_update_filter_linux.xml new file mode 100644 index 000000000..cd999fd11 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260316005_update_filter_linux.xml @@ -0,0 +1,410 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260316006_update_filter_o365.xml b/backend/src/main/resources/config/liquibase/changelog/20260316006_update_filter_o365.xml new file mode 100644 index 000000000..4f0c6488a --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260316006_update_filter_o365.xml @@ -0,0 +1,128 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260316007_update_as400.xml b/backend/src/main/resources/config/liquibase/changelog/20260316007_update_as400.xml new file mode 100644 index 000000000..6c820049b --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260316007_update_as400.xml @@ -0,0 +1,105 @@ + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260319001_remove_run_key_susp_process.xml b/backend/src/main/resources/config/liquibase/changelog/20260319001_remove_run_key_susp_process.xml new file mode 100644 index 000000000..e2f7aa003 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260319001_remove_run_key_susp_process.xml @@ -0,0 +1,14 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260319002_update_windosw_filter.xml b/backend/src/main/resources/config/liquibase/changelog/20260319002_update_windosw_filter.xml new file mode 100644 index 000000000..0918f3862 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260319002_update_windosw_filter.xml @@ -0,0 +1,2934 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260320001_update_window_filter.xml b/backend/src/main/resources/config/liquibase/changelog/20260320001_update_window_filter.xml new file mode 100644 index 000000000..aaa829405 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260320001_update_window_filter.xml @@ -0,0 +1,2979 @@ + + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260324001_remove_rundll32_no_arguments.xml b/backend/src/main/resources/config/liquibase/changelog/20260324001_remove_rundll32_no_arguments.xml new file mode 100644 index 000000000..924c56b54 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260324001_remove_rundll32_no_arguments.xml @@ -0,0 +1,14 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260324002_remove_rundll32_abuse.xml b/backend/src/main/resources/config/liquibase/changelog/20260324002_remove_rundll32_abuse.xml new file mode 100644 index 000000000..ca25a497d --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260324002_remove_rundll32_abuse.xml @@ -0,0 +1,14 @@ + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260324003_update_o365_filter.xml b/backend/src/main/resources/config/liquibase/changelog/20260324003_update_o365_filter.xml new file mode 100644 index 000000000..6c22d6332 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260324003_update_o365_filter.xml @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/changelog/20260325001_update_azure.xml b/backend/src/main/resources/config/liquibase/changelog/20260325001_update_azure.xml new file mode 100644 index 000000000..3fc1c4ea8 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/changelog/20260325001_update_azure.xml @@ -0,0 +1,935 @@ + + + + + + + + + + + + + + diff --git a/backend/src/main/resources/config/liquibase/data/20260316/crowdstrike/utm_correlation_rules.sql b/backend/src/main/resources/config/liquibase/data/20260316/crowdstrike/utm_correlation_rules.sql new file mode 100644 index 000000000..9a1426999 --- /dev/null +++ b/backend/src/main/resources/config/liquibase/data/20260316/crowdstrike/utm_correlation_rules.sql @@ -0,0 +1,17 @@ +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1339, 'CrowdStrike Hunting: Windows Event Log Clearing', 0, 3, 2, 'Defense Evasion', 'Indicator Removal: Clear Windows Event Logs', 'A raw process execution was detected attempting to clear Windows Event Logs. Adversaries use this technique to cover their tracks after compromising a host.', '["https://attack.mitre.org/techniques/T1070/001/"]', e'equals("log.event_simpleName", "ProcessRollup2") && exists("log.CommandLine") && regexMatch("log.CommandLine", "(?i).*(wevtutil\\\\s+cl.*|Clear-EventLog.*|Remove-EventLog.*).*")', '2026-03-16 10:00:00.000000', true, true, 'origin', null, '[]', '["lastEvent.log.ComputerName","lastEvent.log.UserName"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1340, 'Suspicious Native Downloaders (LoLBin)', 2, 2, 0, 'Command and Control', 'Ingress Tool Transfer', 'Execution of native binaries like certutil, bitsadmin, curl, or wget was detected making external connections, potentially indicating Ingress Tool Transfer by an adversary.', '["https://attack.mitre.org/techniques/T1105/"]', e'equals("log.event_simpleName", "ProcessRollup2") && exists("log.CommandLine") && regexMatch("log.CommandLine", "(?i).*(certutil.*-urlcache|bitsadmin.*-transfer|curl.*http|wget.*http).*")', '2026-03-16 10:00:00.000000', true, true, 'origin', null, '[]', '["lastEvent.log.ComputerName","lastEvent.log.CommandLine"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1341, 'Suspicious Encoded PowerShell Execution', 3, 2, 1, 'Execution', 'Command and Scripting Interpreter: PowerShell', 'A PowerShell process was spawned with arguments indicating base64 encoded commands (-enc, -EncodedCommand). Malware and threat actors often use this to evade string-based detection.', '["https://attack.mitre.org/techniques/T1059/001/"]', e'equals("log.event_simpleName", "ProcessRollup2") && exists("log.CommandLine") && regexMatch("log.CommandLine", "(?i).*(powershell|pwsh).*-(e|en|enc|encodedcommand|ec)\\\\s+.*")', '2026-03-16 10:00:00.000000', true, true, 'origin', null, '[]', '["lastEvent.log.ComputerName","lastEvent.log.CommandLine"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1342, 'Suspicious Downloader Execution (Linux/macOS)', 2, 2, 0, 'Command and Control', 'Ingress Tool Transfer', 'Execution of native downloaders like curl or wget was detected on a Linux or macOS endpoint making HTTP connections, potentially indicating Ingress Tool Transfer by an adversary.', '["https://attack.mitre.org/techniques/T1105/"]', e'exists("log.event_platform") && oneOf("log.event_platform", ["Mac", "Lin"]) && exists("log.event.CommandLine") && regexMatch("log.event.CommandLine", "(?i).*(curl|wget).*http.*")', '2026-03-16 10:00:00.000000', true, true, 'origin', null, '[]', '["lastEvent.log.event.ComputerName","lastEvent.log.event.CommandLine"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1343, 'Security Policy Disabled or Deleted', 1, 3, 3, 'Defense Evasion', 'Impair Defenses: Disable or Modify Tools', 'An administrator or actor has disabled or deleted a security prevention policy in Falcon, which may leave endpoints vulnerable.', '["https://attack.mitre.org/techniques/T1562/001/"]', e'exists("log.eventOperationName") && oneOf("log.eventOperationName", ["disable_policy", "delete_policy", "remove_policy"])', '2026-03-02 23:03:22.378920', true, true, 'origin', null, '[]', '["lastEvent.log.eventUserId","lastEvent.log.eventOperationName"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1344, 'Security Defenses Impaired or Policy Disabled', 0, 3, 3, 'Defense Evasion', 'Impair Defenses: Disable or Modify Tools', 'An action was taken on the endpoint that resulted in a critical sensor process or security policy being disabled locally. This strongly indicates defense evasion tampering.', '["https://attack.mitre.org/techniques/T1562/001/"]', e'equals("event.PatternDispositionFlags.PolicyDisabled", true) || oneOf("event.PatternDispositionValue", [8192, 8208, 8320, 8704, 9216, 10240, 12304, 73728, 73744])', '2026-03-16 10:00:00.000000', true, true, 'origin', null, '[]', '["lastEvent.log.event.ComputerName","lastEvent.log.event.PatternDispositionDescription"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1345, 'Real-Time Response (RTR) Session Execution', 3, 3, 1, 'Execution', 'Remote Services', 'A user or API has initiated a remote response (RTR) session on an endpoint. This grants deep access to the host.', '["https://attack.mitre.org/techniques/T1021/"]', e'equals("log.metadataEventType", "RemoteResponseSessionStartEvent")', '2026-03-02 23:03:24.925716', true, true, 'origin', null, '[]', '["lastEvent.log.metadataEventType","lastEvent.log.eventUserId"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1346, 'OS Credential Dumping Activity', 3, 1, 0, 'Credential Access', 'OS Credential Dumping: LSASS Memory', 'The endpoint agent detected activity commonly associated with OS Credential Dumping. This includes attempts to read or dump LSASS memory using known tools.', '["https://attack.mitre.org/techniques/T1003/001/"]', e'equals("log.event_simpleName", "ProcessRollup2") && exists("log.CommandLine") && regexMatch("log.CommandLine", "(?i).*(procdump.*lsass|mimikatz|sekurlsa|lsass\\\\.dmp).*")', '2026-03-16 10:00:00.000000', true, true, 'origin', null, '[]', '["lastEvent.log.ComputerName","lastEvent.log.UserName"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1347, 'Multiple Authentication Failures (Possible Brute Force Attack)', 3, 0, 0, 'Credential Access', 'Brute Force: Password Guessing', 'A user or IP address has failed multiple authentication attempts on the CrowdStrike Falcon console within a short period of time.', '["https://attack.mitre.org/techniques/T1110/001/"]', e'equals("log.metadataEventType", "AuthActivityAuditEvent") && equals("log.eventSuccess", false) && exists("origin.ip")', '2026-03-02 23:03:27.493691', true, true, 'origin', '["origin.ip"]', '[{"indexPattern":"v11-log-crowdstrike","with":[{"field":"origin.ip","operator":"filter_term","value":"{{.origin.ip}}"},{"field":"log.eventSuccess","operator":"filter_term","value":"false"}],"or":null,"within":"now-15m","count":5}]', null); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1348, 'Major Incident Generated (CrowdScore)', 3, 3, 3, 'Lateral Movement', 'Lateral Tool Transfer', 'The CrowdScore engine has consolidated multiple detections into a critical incident, a possible indicator of Lateral Movement or widespread intrusion.', '["https://attack.mitre.org/techniques/T1570/","https://attack.mitre.org/tactics/TA0008/"]', e'equals("log.metadataEventType", "IncidentSummaryEvent")', '2026-03-02 23:03:28.702741', true, true, 'origin', null, '[]', '["lastEvent.log.metadataEventType"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1349, 'IP Whitelisting Modification', 1, 3, 2, 'Defense Evasion', 'Impair Defenses: Disable or Modify Cloud Firewall', 'IP addresses have been added to or removed from the CrowdStrike whitelist. An attacker could use this to evade network blocking.', '["https://attack.mitre.org/techniques/T1562/007/"]', e'exists("log.eventOperationName") && oneOf("log.eventOperationName", ["ip_rules_added", "ip_rules_removed"])', '2026-03-02 23:03:30.070795', true, true, 'origin', null, '[]', '["lastEvent.log.eventUserId","lastEvent.log.eventOperationName"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1350, 'Inhibit System Recovery (Shadow Copy Deletion)', 0, 3, 3, 'Impact', 'Inhibit System Recovery', 'The Falcon agent detected command line activity attempting to delete Volume Shadow Copies or disable recovery options. This is a highly reliable precursor to Ransomware encryption.', '["https://attack.mitre.org/techniques/T1490/"]', e'equals("log.event_simpleName", "ProcessRollup2") && exists("log.CommandLine") && regexMatch("log.CommandLine", "(?i).*(vssadmin.*delete shadows|wmic.*shadowcopy.*delete|bcdedit.*recoveryenabled.*no).*")', '2026-03-16 10:00:00.000000', true, true, 'origin', null, '[]', '["lastEvent.log.ComputerName","lastEvent.log.UserName"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1351, 'Endpoint or XDR Detection Alert', 3, 3, 2, 'Threat Detection', 'Command and Scripting Interpreter', 'A critical detection summary has been generated from Falcon EPP or XDR indicating malicious activity or attack patterns.', '["https://attack.mitre.org/techniques/T1059/"]', e'exists("log.metadataEventType") && oneOf("log.metadataEventType", ["EppDetectionSummaryEvent", "XdrDetectionSummaryEvent"])', '2026-03-02 23:03:32.606143', true, true, 'origin', null, '[]', '["lastEvent.log.metadataEventType"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1352, 'Endpoint Network Containment Action', 0, 2, 3, 'Impact', 'Account Access Removal', 'Containment of a host on the network has been requested, or a previously applied containment has been lifted.', '["https://attack.mitre.org/techniques/T1531/"]', e'exists("log.eventOperationName") && oneOf("log.eventOperationName", ["containment_requested", "lift_containment_requested"])', '2026-03-02 23:03:33.875437', true, true, 'origin', null, '[]', '["lastEvent.log.eventUserId","lastEvent.log.eventOperationName"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1353, 'Deletion or Deactivation of User Account', 0, 3, 3, 'Account Manipulation', 'Account Manipulation', 'An administrator has deactivated or deleted a user account in the Falcon console. This indicates account manipulation.', '["https://attack.mitre.org/techniques/T1098/"]', e'exists("log.eventOperationName") && oneOf("log.eventOperationName", ["deactivateUser", "deleteUser"])', '2026-03-02 23:03:35.273093', true, true, 'origin', null, '[]', '["lastEvent.log.eventUserId","lastEvent.log.eventOperationName"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1354, 'Custom Indicator of Compromise (IoC) Detected', 3, 3, 1, 'Threat Detection', 'User Execution', 'The sensor has detected activity that matches an IoC (Hash, Domain, IP) supplied and entered by the client.', '["https://attack.mitre.org/techniques/T1204/"]', e'equals("log.metadataEventType", "CustomerIOCEvent")', '2026-03-02 23:03:36.627153', true, true, 'origin', null, '[]', '["lastEvent.log.metadataEventType"]'); +INSERT INTO public.utm_correlation_rules (id, rule_name, rule_confidentiality, rule_integrity, rule_availability, rule_category, rule_technique, rule_description, rule_references_def, rule_definition_def, rule_last_update, rule_active, system_owner, rule_adversary, rule_deduplicate_by_def, rule_after_events_def, rule_group_by_def) VALUES (1355, 'Critical Role Modification (Privilege Escalation)', 3, 3, 1, 'Privilege Escalation', 'Account Manipulation: Additional Cloud Roles', 'New roles have been granted or updated for a user within the CrowdStrike administration console.', '["https://attack.mitre.org/techniques/T1098/003/"]', e'exists("log.eventOperationName") && oneOf("log.eventOperationName", ["grantUserRoles", "updateUserRoles"])', '2026-03-02 23:03:38.226516', true, true, 'origin', null, '[]', '["lastEvent.log.eventUserId","lastEvent.log.eventOperationName"]'); diff --git a/backend/src/main/resources/config/liquibase/data/20260316/crowdstrike/utm_group_rules_data_type.sql b/backend/src/main/resources/config/liquibase/data/20260316/crowdstrike/utm_group_rules_data_type.sql new file mode 100644 index 000000000..ed6c158dc --- /dev/null +++ b/backend/src/main/resources/config/liquibase/data/20260316/crowdstrike/utm_group_rules_data_type.sql @@ -0,0 +1,17 @@ +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1339, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1340, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1341, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1342, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1343, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1344, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1345, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1346, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1347, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1348, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1349, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1350, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1351, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1352, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1353, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1354, 51, '2026-03-16 10:00:00.000000'); +INSERT INTO public.utm_group_rules_data_type (rule_id, data_type_id, last_update) VALUES (1355, 51, '2026-03-16 10:00:00.000000'); diff --git a/backend/src/main/resources/config/liquibase/master.xml b/backend/src/main/resources/config/liquibase/master.xml index ddba358f1..86c16ca8f 100644 --- a/backend/src/main/resources/config/liquibase/master.xml +++ b/backend/src/main/resources/config/liquibase/master.xml @@ -38,7 +38,7 @@ - + @@ -140,15 +140,15 @@ - + - + - + - + - + @@ -511,5 +511,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/filters/azure/azure-eventhub.yml b/filters/azure/azure-eventhub.yml index 1ffaf26f7..bac4fa8ec 100644 --- a/filters/azure/azure-eventhub.yml +++ b/filters/azure/azure-eventhub.yml @@ -1,4 +1,4 @@ -# Azure Envent-Hub filter, version 2.0.4 +# Azure Envent-Hub filter, version 2.0.6 # # Documentations # 1- https://docs.microsoft.com/en-us/azure/azure-monitor/essentials/activity-log @@ -603,6 +603,216 @@ pipeline: - log.properties.userType to: log.propertiesUserType + - rename: + from: + - log.properties.WAFEvaluationTime + to: log.propertiesWAFEvaluationTime + + - rename: + from: + - log.properties.WAFMode + to: log.propertiesWAFMode + + - rename: + from: + - log.properties.WAFPolicyID + to: log.propertiesWAFPolicyID + + - rename: + from: + - log.properties.backendSslCipher + to: log.propertiesBackendSslCipher + + - rename: + from: + - log.properties.backendSslProtocol + to: log.propertiesBackendSslProtocol + + - rename: + from: + - log.properties.clientIP + to: origin.ip + + - rename: + from: + - log.properties.clientPort + to: origin.port + + - rename: + from: + - log.properties.clientResponseTime + to: log.propertiesClientResponseTime + + - rename: + from: + - log.properties.connectionSerialNumber + to: log.propertiesConnectionSerialNumber + + - rename: + from: + - log.properties.contentType + to: log.propertiesContentType + + - rename: + from: + - log.properties.error_info + to: log.propertiesErrorInfo + + - rename: + from: + - log.properties.host + to: origin.host + + - rename: + from: + - log.properties.httpMethod + to: log.propertiesHttpMethod + + - rename: + from: + - log.properties.httpStatus + to: statusCode + + - rename: + from: + - log.properties.httpStatusCode + to: statusCode + + - rename: + from: + - log.properties.httpVersion + to: log.propertiesHttpVersion + + - rename: + from: + - log.properties.instanceId + to: log.propertiesInstanceId + + - rename: + from: + - log.properties.noOfConnectionRequests + to: log.propertiesNoOfConnectionRequests + + - rename: + from: + - log.properties.originalHost + to: log.propertiesOriginalHost + + - rename: + from: + - log.properties.originalRequestUriWithArgs + to: log.propertiesOriginalRequestUriWithArgs + + - rename: + from: + - log.properties.receivedBytes + to: log.propertiesReceivedBytes + + - rename: + from: + - log.properties.requestQuery + to: log.propertiesRequestQuery + + - rename: + from: + - log.properties.requestUri + to: log.propertiesRequestUri + + - rename: + from: + - log.properties.sentBytes + to: log.propertiesSentBytes + + - rename: + from: + - log.properties.serverConnectTime + to: log.propertiesServerConnectTime + + - rename: + from: + - log.properties.serverHeaderTime + to: log.propertiesServerHeaderTime + + - rename: + from: + - log.properties.serverResponseLatency + to: log.propertiesServerResponseLatency + + - rename: + from: + - log.properties.serverRouted + to: log.propertiesServerRouted + + - rename: + from: + - log.properties.serverStatus + to: log.propertiesServerStatus + + - rename: + from: + - log.properties.sslCipher + to: log.propertiesSslCipher + + - rename: + from: + - log.properties.sslClientCertificateFingerprint + to: log.propertiesSslClientCertificateFingerprint + + - rename: + from: + - log.properties.sslClientCertificateIssuerName + to: log.propertiesSslClientCertificateIssuerName + + - rename: + from: + - log.properties.sslClientVerify + to: log.propertiesSslClientVerify + + - rename: + from: + - log.properties.sslEnabled + to: log.propertiesSslEnabled + + - rename: + from: + - log.properties.sslProtocol + to: log.propertiesSslProtocol + + - rename: + from: + - log.properties.timeTaken + to: log.propertiesTimeTaken + + - rename: + from: + - log.properties.transactionId + to: log.propertiesTransactionId + + - rename: + from: + - log.properties.upstreamSourcePort + to: log.propertiesUpstreamSourcePort + + - rename: + from: + - log.properties.result + to: actionResult + + - rename: + from: + - log.Properties.Result + to: actionResult + + - rename: + from: + - log.properties.resultType + to: actionResult + + - rename: + from: + - log.properties.userAgent + to: log.propertiesUserAgent + # .......................................................................# # Adding severity based on log.level # .......................................................................# @@ -700,20 +910,4 @@ pipeline: fields: - origin.bytesSent - origin.bytesReceived - to: float - - # .......................................................................# - # Adding actionResult - # denied by default - # .......................................................................# - - add: - function: 'string' - params: - key: actionResult - value: 'denied' - - add: - function: 'string' - params: - key: actionResult - value: 'accepted' - where: (greaterOrEqual("statusCode", 200) && lessOrEqual("statusCode", 299)) || (greaterOrEqual("statusCode", 300) && lessOrEqual("statusCode", 399) && greaterThan("origin.bytesReceived", 0)) \ No newline at end of file + to: float \ No newline at end of file diff --git a/filters/crowdstrike/crowdstrike.yml b/filters/crowdstrike/crowdstrike.yml index 72eb5c40c..ab6bcd03d 100644 --- a/filters/crowdstrike/crowdstrike.yml +++ b/filters/crowdstrike/crowdstrike.yml @@ -1,4 +1,4 @@ -# Crowdstrike module filter, version 1.1.1 +# Crowdstrike module filter, version 1.2.0 # Based in docs and samples provided # # Documentations @@ -16,218 +16,630 @@ pipeline: # .......................................................................# - rename: from: - - log.RawMessage.event.Attributes.APIClientID + - log.event.Attributes.APIClientID to: log.eventAttributesAPIClientID - rename: from: - - log.RawMessage.event.Attributes.actor_cid + - log.event.Attributes.actor_cid to: log.eventAttributesActorCid - rename: from: - - log.RawMessage.event.Attributes.actor_user + - log.event.Attributes.actor_user to: log.eventAttributesActorUser - rename: from: - - log.RawMessage.event.Attributes.actor_user_uuid + - log.event.Attributes.actor_user_uuid to: log.eventAttributesActorUserUUID - rename: from: - - log.RawMessage.event.Attributes.name + - log.event.Attributes.name to: log.eventAttributesName - rename: from: - - log.RawMessage.event.Attributes.trace_id + - log.event.Attributes.trace_id to: log.eventAttributesTraceID - rename: from: - - log.RawMessage.event.Attributes.cid + - log.event.Attributes.cid to: log.eventAttributesCid - rename: from: - - log.RawMessage.event.Attributes.consumes + - log.event.Attributes.consumes to: log.eventAttributesConsumes - rename: from: - - log.RawMessage.event.Attributes.elapsed_microseconds + - log.event.Attributes.elapsed_microseconds to: log.eventAttributesElapsedMicroseconds - rename: from: - - log.RawMessage.event.Attributes.elapsed_time + - log.event.Attributes.elapsed_time to: log.eventAttributesElapsedTime - rename: from: - - log.RawMessage.event.Attributes.produces + - log.event.Attributes.produces to: log.eventAttributesProduces - rename: from: - - log.RawMessage.event.Attributes.received_time + - log.event.Attributes.received_time to: log.eventAttributesReceivedTime - rename: from: - - log.RawMessage.event.Attributes.request_content_type + - log.event.Attributes.request_content_type to: log.eventAttributesRequestContentType - rename: from: - - log.RawMessage.event.Attributes.request_method + - log.event.Attributes.request_method to: log.eventAttributesRequestMethod - rename: from: - - log.RawMessage.event.Attributes.request_uri_length + - log.event.Attributes.request_uri_length to: log.eventAttributesRequestURILength - rename: from: - - log.RawMessage.event.Attributes.status_code + - log.event.Attributes.status_code to: log.statusCode - rename: from: - - log.RawMessage.event.Attributes.sub_component_1 + - log.event.Attributes.sub_component_1 to: log.eventAttributesSubComponent1 - rename: from: - - log.RawMessage.event.Attributes.sub_component_2 + - log.event.Attributes.sub_component_2 to: log.eventAttributesSubComponent2 - rename: from: - - log.RawMessage.event.Attributes.sub_component_3 + - log.event.Attributes.sub_component_3 to: log.eventAttributesSubComponent3 - rename: from: - - log.RawMessage.event.Attributes.trace_id + - log.event.Attributes.trace_id to: log.eventAttributesTraceID - rename: from: - - log.RawMessage.event.Attributes.user_agent + - log.event.Attributes.user_agent to: log.eventAttributesUserAgent - rename: from: - - log.RawMessage.event.Attributes.eventType + - log.event.Attributes.eventType to: log.eventAttributesEventType - rename: from: - - log.RawMessage.event.Attributes.offset + - log.event.Attributes.offset to: log.eventAttributesOffset - rename: from: - - log.RawMessage.event.Attributes.partition + - log.event.Attributes.partition to: log.eventAttributesPartition - rename: from: - - log.RawMessage.event.Attributes.request_accept + - log.event.Attributes.request_accept to: log.eventAttributesRequestAccept - rename: from: - - log.RawMessage.event.Attributes.request_path + - log.event.Attributes.request_path to: log.eventAttributesRequestPath - rename: from: - - log.RawMessage.event.Attributes.request_query + - log.event.Attributes.request_query to: log.eventAttributesRequestQuery - rename: from: - - log.RawMessage.event.Attributes.scopes + - log.event.Attributes.scopes to: log.eventAttributesScopes - rename: from: - - log.RawMessage.event.AuditKeyValues + - log.event.Attributes.target_name + to: log.eventAttributesTargetName + + - rename: + from: + - log.event.Attributes.success + to: log.eventAttributesSuccess + + - rename: + from: + - log.event.Attributes.target_cid + to: log.eventAttributesTargetCID + + - rename: + from: + - log.event.Attributes.target_uuid + to: log.eventAttributesTargetUUID + + - rename: + from: + - log.event.Attributes.id + to: log.eventAttributesID + + - rename: + from: + - log.event.Attributes.scope(s) + to: log.eventAttributesScopes + + - rename: + from: + - log.event.Attributes.appId + to: log.eventAttributesAppId + + - rename: + from: + - log.event.AuditKeyValues to: log.eventAuditKeyValues - rename: from: - - log.RawMessage.event.Message + - log.event.Message to: log.eventMessage - rename: from: - - log.RawMessage.event.OperationName + - log.event.OperationName to: log.eventOperationName - rename: from: - - log.RawMessage.event.ServiceName + - log.event.ServiceName to: log.eventServiceName - rename: from: - - log.RawMessage.event.Source + - log.event.Source to: log.eventSource - rename: from: - - log.RawMessage.event.ServiceName + - log.event.ServiceName to: log.eventServiceName - rename: from: - - log.RawMessage.event.SourceIp + - log.event.SourceIp to: origin.ip - rename: from: - - log.RawMessage.event.Success + - log.event.Success to: log.eventSuccess - rename: from: - - log.RawMessage.event.UTCTimestamp + - log.event.UTCTimestamp to: log.eventUTCTimestamp - rename: from: - - log.RawMessage.event.UserId + - log.event.UserId to: log.eventUserId - rename: from: - - log.RawMessage.metadata.customerIDString + - log.metadata.customerIDString to: log.metadataCustomerIDString - rename: from: - - log.RawMessage.metadata.eventCreationTime + - log.metadata.eventCreationTime to: log.metadataEventCreationTime - rename: from: - - log.RawMessage.metadata.eventType + - log.metadata.eventType to: log.metadataEventType - rename: from: - - log.RawMessage.metadata.offset + - log.metadata.offset to: log.metadataOffset - rename: from: - - log.RawMessage.metadata.version + - log.metadata.version to: log.metadataVersion + + - rename: + from: + - log.event.Message + to: log.eventMessage + + - rename: + from: + - log.event.AgentId + to: log.eventAgentId + + - rename: + from: + - log.event.AggregateId + to: log.eventAggregateId + + - rename: + from: + - log.event.CloudIndicator + to: log.eventCloudIndicator + + - rename: + from: + - log.event.CommandLine + to: log.eventCommandLine + + - rename: + from: + - log.event.CompositeId + to: log.eventCompositeId + + - rename: + from: + - log.event.DataDomains + to: log.eventDataDomains + + - rename: + from: + - log.event.Description + to: log.eventDescription + + - rename: + from: + - log.event.FalconHostLink + to: log.eventFalconHostLink + + - rename: + from: + - log.event.FileName + to: origin.filename + + - rename: + from: + - log.event.FilePath + to: origin.path + + - rename: + from: + - log.event.GrandParentCommandLine + to: log.eventGrandParentCommandLine + + - rename: + from: + - log.event.GrandParentImageFileName + to: log.eventGrandParentImageFileName + + - rename: + from: + - log.event.GrandParentImageFilePath + to: log.eventGrandParentImageFilePath + + - rename: + from: + - log.event.Hostname + to: origin.host + + - rename: + from: + - log.event.LocalIP + to: origin.ip + + - rename: + from: + - log.event.LocalIPv6 + to: log.eventLocalIPv6 + + - rename: + from: + - log.event.LogonDomain + to: log.eventLogonDomain + + - rename: + from: + - log.event.MACAddress + to: origin.mac + + - rename: + from: + - log.event.MD5String + to: origin.md5 + + - rename: + from: + - log.event.MitreAttack + to: log.eventMitreAttack + + - rename: + from: + - log.event.Name + to: log.eventName + + - rename: + from: + - log.event.Objective + to: log.eventObjective + + - rename: + from: + - log.event.ParentCommandLine + to: log.eventParentCommandLine + + - rename: + from: + - log.event.ParentImageFileName + to: log.eventParentImageFileName + + - rename: + from: + - log.event.ParentImageFilePath + to: log.eventParentImageFilePath + + - rename: + from: + - log.event.ParentProcessId + to: log.eventParentProcessId + + - rename: + from: + - log.event.PatternDispositionDescription + to: log.eventPatternDispositionDescription + + - rename: + from: + - log.event.PatternDispositionFlags.BlockingUnsupportedOrDisabled + to: log.eventPatternDispositionFlagsBlockingUnsupportedOrDisabled + + - rename: + from: + - log.event.PatternDispositionFlags.BootupSafeguardEnabled + to: log.eventPatternDispositionFlagsBootupSafeguardEnabled + + - rename: + from: + - log.event.PatternDispositionFlags.ContainmentFileSystem + to: log.eventPatternDispositionFlagsContainmentFileSystem + + - rename: + from: + - log.event.PatternDispositionFlags.CriticalProcessDisabled + to: log.eventPatternDispositionFlagsCriticalProcessDisabled + + - rename: + from: + - log.event.PatternDispositionFlags.Detect + to: log.eventPatternDispositionFlagsDetect + + - rename: + from: + - log.event.PatternDispositionFlags.FsOperationBlocked + to: log.eventPatternDispositionFlagsFsOperationBlocked + + - rename: + from: + - log.event.PatternDispositionFlags.HandleOperationDowngraded + to: log.eventPatternDispositionFlagsHandleOperationDowngraded + + - rename: + from: + - log.event.PatternDispositionFlags.InddetMask + to: log.eventPatternDispositionFlagsInddetMask + + - rename: + from: + - log.event.PatternDispositionFlags.Indicator + to: log.eventPatternDispositionFlagsIndicator + + - rename: + from: + - log.event.PatternDispositionFlags.KillActionFailed + to: log.eventPatternDispositionFlagsKillActionFailed + + - rename: + from: + - log.event.PatternDispositionFlags.KillParent + to: log.eventPatternDispositionFlagsKillParent + + - rename: + from: + - log.event.PatternDispositionFlags.KillProcess + to: log.eventPatternDispositionFlagsKillProcess + + - rename: + from: + - log.event.PatternDispositionFlags.KillSubProcess + to: log.eventPatternDispositionFlagsKillSubProcess + + - rename: + from: + - log.event.PatternDispositionFlags.OperationBlocked + to: log.eventPatternDispositionFlagsOperationBlocked + + - rename: + from: + - log.event.PatternDispositionFlags.PolicyDisabled + to: log.eventPatternDispositionFlagsPolicyDisabled + + - rename: + from: + - log.event.PatternDispositionFlags.ProcessBlocked + to: log.eventPatternDispositionFlagsProcessBlocked + + - rename: + from: + - log.event.PatternDispositionFlags.QuarantineFile + to: log.eventPatternDispositionFlagsQuarantineFile + + - rename: + from: + - log.event.PatternDispositionFlags.QuarantineMachine + to: log.eventPatternDispositionFlagsQuarantineMachine + + - rename: + from: + - log.event.PatternDispositionFlags.RegistryOperationBlocked + to: log.eventPatternDispositionFlagsRegistryOperationBlocked + + - rename: + from: + - log.event.PatternDispositionFlags.Rooting + to: log.eventPatternDispositionFlagsRooting + + - rename: + from: + - log.event.PatternDispositionFlags.SensorOnly + to: log.eventPatternDispositionFlagsSensorOnly + + - rename: + from: + - log.event.PatternDispositionFlags.SuspendParent + to: log.eventPatternDispositionFlagsSuspendParent + + - rename: + from: + - log.event.PatternDispositionFlags.SuspendProcess + to: log.eventPatternDispositionFlagsSuspendProcess + + - rename: + from: + - log.event.PatternDispositionValue + to: log.eventPatternDispositionValue + + - rename: + from: + - log.event.PatternId + to: log.eventPatternId + + - rename: + from: + - log.event.PlatformId + to: log.eventPlatformId + + - rename: + from: + - log.event.PlatformName + to: origin.operatingSystem + + - rename: + from: + - log.event.ProcessEndTime + to: log.eventProcessEndTime + + - rename: + from: + - log.event.ProcessId + to: log.eventProcessId + + - rename: + from: + - log.event.ProcessStartTime + to: log.eventProcessStartTime + + - rename: + from: + - log.event.RiskScore + to: log.eventRiskScore + + - rename: + from: + - log.event.SHA1String + to: origin.sha1 + + - rename: + from: + - log.event.SHA256String + to: origin.sha256 + + - rename: + from: + - log.event.Severity + to: log.eventSeverity + + - rename: + from: + - log.event.SeverityName + to: log.eventSeverityName + + - rename: + from: + - log.event.SourceProducts + to: log.eventSourceProducts + + - rename: + from: + - log.event.SourceVendors + to: log.eventSourceVendors + + - rename: + from: + - log.event.Tactic + to: log.eventTactic + + - rename: + from: + - log.event.Technique + to: log.eventTechnique + + - rename: + from: + - log.event.Type + to: log.eventType + + - rename: + from: + - log.event.UserName + to: origin.user + + - rename: + from: + - log.event.FilesAccessed + to: log.eventFilesAccessed + + # .......................................................................# + # Remove inecesarie caracters + # .......................................................................# + - trim: + function: prefix + substring: '"' + fields: + - log.eventCommandLine + + - trim: + function: suffix + substring: '"' + fields: + - log.eventCommandLine + + - trim: + function: prefix + substring: '[{' + fields: + - log.eventCommandLine + + - trim: + function: suffix + substring: '}]' + fields: + - log.eventCommandLine # .......................................................................# # Reformat and field conversions @@ -299,13 +711,6 @@ pipeline: - delete: fields: - log.statusCode - - log.RawMessage.event.Attributes - - log.RawMessage.event.UserIp - - log.metadata - - log.event.AuditKeyValues - - log.event.OperationName - - log.event.ServiceName - - log.event.Success - - log.event.UTCTimestamp - - log.event.UserId - - log.event.UserIp \ No newline at end of file + - log.event.UserIp + - log.event.Attributes.user_ip + - log.event.Attributes.action_target_name \ No newline at end of file diff --git a/filters/filebeat/system_linux_module.yml b/filters/filebeat/system_linux_module.yml index 5bbbac822..eaac62e6b 100644 --- a/filters/filebeat/system_linux_module.yml +++ b/filters/filebeat/system_linux_module.yml @@ -287,6 +287,24 @@ pipeline: to: log.messageId where: exists("log.MESSAGEID") + - rename: + from: + - log.CPUUSAGENSEC + to: log.cpuUsageNsec + where: exists("log.CPUUSAGENSEC") + + - rename: + from: + - log.MEMORYPEAK + to: log.memoryPeak + where: exists("log.MEMORYPEAK") + + - rename: + from: + - log.MEMORYSWAPPEAK + to: log.memorySwapPeak + where: exists("log.MEMORYSWAPPEAK") + # ======================================== # PHASE 3: STANDARD SCHEMA MAPPING # ======================================== diff --git a/filters/ibm/ibm_as_400.yml b/filters/ibm/ibm_as_400.yml index 0c94221aa..11522baef 100644 --- a/filters/ibm/ibm_as_400.yml +++ b/filters/ibm/ibm_as_400.yml @@ -1,12 +1,88 @@ -# IBM AS 400 filter version 3.0.0 -# Support Java Collector Syslog messsages +# IBM AS 400 filter version 4.0.0 +# Support for JT400 Java Collector with full field mapping pipeline: - dataTypes: - ibm-as400 steps: + # ======================================== + # PHASE 1: EXTRACTION + # ======================================== + + # Drop non-AS400 logs (internal system logs, noise) + - drop: + where: 'startsWith("raw", "[") || contains("raw", "RunExecutor") || contains("raw", "ETL pool")' + + # Parse JSON logs from Java collector + - json: + source: raw + where: 'startsWith("raw", "{")' + + # Fallback: Parse non-JSON logs (raw syslog format) - grok: + source: raw patterns: - - field_name: log.message - pattern: '(.*)' - source: raw \ No newline at end of file + - fieldName: log.message + pattern: '{{.greedy}}' + where: '!startsWith("raw", "{")' + + # ======================================== + # PHASE 2: NORMALIZATION (Standard Schema) + # ======================================== + + # Map action/event classification + - rename: + from: + - log.eventType + to: action + where: exists("log.eventType") + + # Map severity level + - rename: + from: + - log.severityLabel + to: severity + where: exists("log.severityLabel") + + # Map origin side attributes + - rename: + from: + - log.hostname + to: origin.host + where: exists("log.hostname") + + - rename: + from: + - log.jobUser + to: origin.user + where: exists("log.jobUser") + + - rename: + from: + - log.jobName + to: origin.process + where: exists("log.jobName") + + - rename: + from: + - log.sourceIp + to: origin.ip + where: exists("log.sourceIp") + + - rename: + from: + - log.file + to: origin.file + where: exists("log.file") + + # ======================================== + # PHASE 3: ENRICHMENT + # ======================================== + + # Add geolocation for source IP + - dynamic: + plugin: com.utmstack.geolocation + params: + source: origin.ip + destination: origin.geolocation + where: exists("origin.ip") \ No newline at end of file diff --git a/filters/office365/o365.yml b/filters/office365/o365.yml index c829fa0d4..2127a1783 100755 --- a/filters/office365/o365.yml +++ b/filters/office365/o365.yml @@ -1,4 +1,4 @@ -# Microsoft 365 filter, version 1.0.3 +# Microsoft 365 filter, version 1.0.4 # Based on Official documentation # See https://learn.microsoft.com/en-us/compliance/assurance/assurance-microsoft-365-audit-log-collection @@ -60,7 +60,7 @@ pipeline: - rename: from: - log.ClientIP - to: log.clientIP + to: origin.ip - rename: from: @@ -92,6 +92,14 @@ pipeline: - log.DestFolder.Path to: log.destFolderPath + # Adding geolocation to origin ip + - dynamic: + plugin: com.utmstack.geolocation + params: + source: origin.ip + destination: origin.geolocation + where: exists("origin.ip") + # Drop unnecessary events - drop: where: oneOf("action", ['SupervisionRuleMatch', 'SupervisoryReviewTag', 'ComplianceManagerAutomationChange', 'LabelContentExplorerAccessedItem', 'CreateCopilotPlugin', 'CreateCopilotPromptBook', 'DeleteCopilotPlugin', 'DeleteCopilotPromptBook', 'DisableCopilotPlugin', 'DisableCopilotPromptBook', 'EnableCopilotPlugin', 'EnableCopilotPromptBook', 'CopilotInteraction', 'UpdateCopilotPlugin', 'UpdateCopilotPromptBook', 'UpdateCopilotSettings', 'ApproveDisposal', 'ExtendRetention', 'RelabelItem', 'SearchUpdated', 'CaseUpdated', 'SearchPermissionUpdated', 'HoldUpdated', 'PreviewItemDownloaded', 'PreviewItemListed', 'SearchCreated', 'CaseAdded', 'HoldCreated', 'SearchRemoved', 'HoldRemoved', 'SearchExportDownloaded', 'SearchPreviewed', 'SearchResultsPurged', 'RemovedSearchResultsSentToZoom', 'RemovedSearchExported', 'RemovedSearchPreviewed', 'RemovedSearchResultsPurged', 'SearchReportRemoved', 'SearchResultsSentToZoom', 'SearchStarted', 'SearchExported', 'SearchReport', 'SearchStopped', 'SearchViewed', 'ViewedSearchExported', 'ViewedSearchPreviewed', 'AddRemediatedData', 'BurnJob', 'CreateWorkingSet', 'CreateWorkingSetSearch', 'CreateTag', 'DeleteWorkingSetSearch', 'DeleteTag', 'DownloadDocument', 'UpdateTag', 'ExportJob', 'UpdateWorkingSetSearch', 'PreviewWorkingSetSearch', 'ErrorRemediationJob', 'TagFiles', 'TagJob', 'ViewDocument', 'Copy', 'Create', 'ApplyRecordLabel', 'HardDelete', 'Send', 'Update', 'FileAccessed', 'FileAccessedExtended', 'ComplianceSettingChanged', 'LockRecord', 'UnlockRecord', 'FileCheckedIn', 'FileCheckedOut', 'FileCopied', 'FileDeletedFirstStageRecycleBin', 'FileDeletedSecondStageRecycleBin', 'RecordDelete', 'DocumentSensitivityMismatchDetected', 'FileCheckOutDiscarded', 'FileDownloaded', 'FileModifiedExtended', 'FilePreviewed', 'SearchQueryPerformed', 'FileRecycled', 'FolderRecycled', 'FileVersionsAllMinorsRecycled', 'FileVersionsAllRecycled', 'FileVersionRecycled', 'FileRestored', 'FileUploaded', 'PageViewed', 'PageViewedExtended', 'ClientViewSignaled', 'PagePrefetched', 'FolderCopied', 'FolderCreated', 'FolderDeletedFirstStageRecycleBin', 'FolderDeletedSecondStageRecycleBin', 'FolderRestored', 'InformationBarriersInsightsReportCompleted', 'InformationBarriersInsightsReportOneDriveSectionQueried', 'InformationBarriersInsightsReportSchedule', 'InformationBarriersInsightsReportSharePointSectionQueried', 'updateddeviceconfiguration', 'UpdatedPolicyConfigPriority', 'BackupPolicyActivated', 'RestoreTaskActivated', 'BackupItemAdded', 'BackupItemRemoved', 'RestoreTaskCompleted', 'DraftRestoreTaskCreated', 'NewBackupPolicyCreated', 'DraftRestoreTaskDeleted', 'DraftRestoreTaskEdited', 'BackupPolicyPaused', 'GetBackupItem', 'ViewBackupPolicyDetails', 'GetRestoreTaskDetails', 'ListAllBackupPolicies', 'ListAllBackupItemsInPolicies', 'ListAllBackupItemsInTenant', 'ListAllBackupItemsInWorkload', 'GetAllRestoreArtifactsInTask', 'ListAllRestorePoints', 'ListAllRestoreTasks', 'BackupItemRestoreCompleted', 'BackupItemRestoreTriggered', 'SetAdvancedFeatures', 'RunAntiVirusScan', 'LogsCollection', 'TaggingConfigurationUpdated', 'AlertExcelDownloaded', 'RemediationActionAdded', 'RemediationActionUpdated', 'SensorCreated', 'SensorDeploymentAccessKeyReceived', 'SensorDeploymentAccessKeyUpdated', 'SensorActivationMethodConfigurationUpdated', 'DomainControllerCoverageExcelDownloaded', 'MonitoringAlertUpdated', 'ReportDownloaded', 'AlertNotificationsRecipientAdded', 'MonitoringAlertNotificationRecipientAdded', 'WorkspaceCreated', 'AddCommentToIncident.', 'AssignUserToIncident', 'UpdateIncidentStatus', 'AddTagsToIncident', 'RemoveTagsFromIncident', 'CreateComment', 'CreateForm', 'MoveForm', 'ViewForm', 'PreviewForm', 'ExportForm', 'AllowShareFormForCopy', 'DisallowShareFormForCopy', 'AddFormCoauthor', 'RemoveFormCoauthor', 'ViewRuntimeForm', 'CreateResponse', 'UpdateResponse', 'ViewResponses', 'ViewResponse', 'GetSummaryLink', 'DeleteSummaryLink', 'ProInvitation', 'ListForms', 'SubmitResponse', 'ConnectToExcelWorkbook', 'CollectionCreated', 'CollectionUpdated', 'CollectionHardDeleted', 'CollectionSoftDeleted', 'CollectionRenamed', 'MovedFormIntoCollection', 'MovedFormOutofCollection', 'PlanCopied', 'TaskAssigned', 'TaskCompleted', 'PlanListRead', 'TaskListRead', 'ProjectCreated', 'RoadmapCreated', 'RoadmapItemCreated', 'TaskCreated', 'ProjectListAccessed', 'RoadmapAccessed', 'RoadmapItemAccessed', 'TaskAccessed', 'AuditSearchCreated', 'AuditSearchCompleted', 'AuditSearchCancelled', 'AuditSearchExportJobCreated', 'AuditSearchExportJobCompleted', 'AuditSearchExportResultsDownloaded', 'EntityCreated', 'ClassificationAdded', 'ClassificationDefinitionCreated', 'GlossaryTermAssigned', 'GlossaryTermCreated', 'BotAddedToTeam', 'ChannelAdded', 'ConnectorAdded', 'MeetingDetail', 'MeetingParticipantDetail', 'MemberAdded', 'TabAdded', 'SensitivityLabelApplied', 'SensitivityLabelChanged', 'ChatCreated', 'TeamCreated', 'MessageDeleted', 'MessageEditedHasLink', 'MessagesExported', 'RecordingExported', 'TranscriptsExported', 'FailedValidation', 'ChatRetrieved', 'MessageHostedContentsListed', 'PerformedCardAction', 'MessageSent', 'AINotesUpdate', 'LiveNotesUpdate', 'AppPublishedToCatalog', 'MessageRead', 'InviteeResponded', 'ChannelOwnerResponded', 'MessagesListed', 'MessageCreatedHasLink', 'MessageCreatedNotification', 'MessageDeletedNotification', 'MessageUpdatedNotification', 'InviteSent', 'SubscribedToMessages', 'AppUpdatedInCatalog', 'ChatUpdated', 'MessageUpdated', 'TabUpdated', 'AppUpgraded', 'MessageSent', 'ScheduleGroupAdded', 'ShiftAdded', 'TimeOffAdded', 'OpenShiftAdded', 'ScheduleShared', 'ClockedIn', 'ClockedOut', 'BreakEnded', 'TimeClockEntryAdded', 'RequestAdded', 'RequestRespondedTo', 'WorkforceIntegrationAdded', 'OffShiftDialogAccepted', 'CreateUpdateRequest', 'EditUpdateRequest', 'SubmitUpdate', 'ViewUpdate', 'AcceptedSharingLinkOnFolder', 'FolderSharingLinkShared', 'LinkedEntityCreated', 'SubTaskCreated', 'TaskCreated', 'TaskRead', 'TaskListCreated', 'TaskListRead', 'AccessedOdataLink', 'CanceledQuery', 'DeletedResult', 'DownloadedReport', 'ExecutedQuery', 'UploadedOrgData', 'ViewedExplore', 'QuarantineReleaseRequestDeny', 'QuarantinePreview', 'QuarantineReleaseRequest', 'QuarantineViewHeader', 'UpdateUsageReportsPrivacySetting', 'NewAdaptiveScope', 'NewComplianceTag', 'NewRetentionCompliancePolicy', 'RemoveAdaptiveScope', 'RemoveComplianceTag', 'SetRestrictiveRetentionUI', 'ExchangeDataProactivelyPreserved', 'SharePointDataProactivelyPreserved', 'ListCreated', 'ListColumnCreated', 'ListContentTypeCreated', 'ListItemCreated', 'SiteColumnCreated', 'SiteContentTypeCreated', 'ListContentTypeDeleted', 'SiteColumnDeleted', 'SiteContentTypeDeleted', 'ListItemRecycled', 'ListItemRestored', 'ListColumnUpdated', 'ListContentTypeUpdated', 'SiteColumnUpdated', 'SiteContentTypeUpdated', 'SharingInvitationCreated', 'AccessRequestUpdated', 'SharingInvitationUpdated', 'SharingInvitationRevoked', 'AllowedDataLocationAdded', 'SiteGeoMoveCancelled', 'MigrationJobCompleted', 'SiteGeoMoveCompleted', 'SiteCollectionCreated', 'HubSiteOrphanHubDeleted', 'PreviewModeEnabledSet', 'LegacyWorkflowEnabledSet', 'OfficeOnDemandSet', 'PeopleResultsScopeSet', 'NewsFeedEnabledSet', 'HubSiteJoined', 'SiteCollectionQuotaModified', 'HubSiteRegistered', 'SiteGeoMoveScheduled', 'GeoQuotaAllocated', 'SiteAdminChangeRequest', 'ManagedSyncClientAllowed', 'FileSyncDownloadedFull', 'FileSyncUploadedFull', 'DataShareCreated', 'DataShareDeleted', 'GenerateCopyOfLakeData', 'DownloadCopyOfLakeData', 'SoftDeleteSettingsUpdated', 'CloseConversation', 'OpenConversation', 'MessageCreation', 'MessageDeleted', 'FileDownloaded', 'DataExport', 'ThreadAccessFailure', 'MarkedMessageChanged', 'RemoveCuratedTopic', 'UsagePolicyAcceptance', 'AdminThreadMuted', 'AdminThreadUnmuted', 'FileUpdateDescription', 'MessageUpdated', 'FileVisited', 'ThreadViewed', 'PulseSubmit', 'PulseCreate', 'PulseExtendDeadline', 'PulseInvite', 'PulseCancel', 'PulseShareResults', 'PulseCreateDraft', 'PulseDeleteDraft']) diff --git a/filters/windows/windows-events.yml b/filters/windows/windows-events.yml index 458bf89e5..9f5f109f5 100644 --- a/filters/windows/windows-events.yml +++ b/filters/windows/windows-events.yml @@ -45,11 +45,6 @@ pipeline: - log.data.SubjectUserSid to: log.eventDataSubjectUserSid - - rename: - from: - - log.data.SubjectUserSid - to: log.eventDataSubjectUserSid - - rename: from: - log.data.PrivilegeList @@ -334,6 +329,51 @@ pipeline: - log.execution.ProcessID to: log.executionProcessID + - rename: + from: + - log.data.ObjectType + to: log.eventDataObjectType + + - rename: + from: + - log.data.AccessList + to: log.eventDataAccessList + + - rename: + from: + - log.data.HandleId + to: log.eventDataHandleId + + - rename: + from: + - log.data.ObjectName + to: log.eventDataObjectName + + - rename: + from: + - log.data.ResourceAttributes + to: log.eventDataResourceAttributes + + - rename: + from: + - log.data.OldSd + to: log.eventDataOldSd + + - rename: + from: + - log.data.NewSd + to: log.eventDataNewSd + + - rename: + from: + - log.data.ObjectServer + to: log.eventDataObjectServer + + - rename: + from: + - log.data.TransactionId + to: log.eventDataTransactionId + - cast: to: "int" fields: @@ -348,6 +388,7 @@ pipeline: - log.logonGuid - log.eventDataSchema - log.processThread + - log.eventDataTransactionId - trim: function: suffix @@ -358,6 +399,7 @@ pipeline: - log.logonGuid - log.eventDataSchema - log.processThread + - log.eventDataTransactionId # Drop unnecessary events - drop: @@ -2282,210 +2324,210 @@ pipeline: params: key: log.accessType value: 'read' - where: equals("log.eeventDataAccessMask", "0x1") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "1") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'For a file object, the right to read the corresponding file data. For a directory object, the right to read the corresponding directory data.\n For a directory, the right to list the contents of the directory.\n For registry objects, this is, Query key value.' - where: equals("log.eeventDataAccessMask", "0x1") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "1") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'write' - where: equals("log.eeventDataAccessMask", "0x2") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "2") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'For a file object, the right to write data to the file.\n For a directory object, the right to create a file in the directory.\n For registry objects, this is, Set key value.' - where: equals("log.eeventDataAccessMask", "0x2") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "2") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'append' - where: equals("log.eeventDataAccessMask", "0x4") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "4") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'For a file object, the right to append data to the file. (For local files, write operations will not overwrite existing data if this flag is specified without FILE_WRITE_DATA.)\n For a directory object, the right to create a subdirectory.\n For a named pipe, the right to create a pipe.' - where: equals("log.eeventDataAccessMask", "0x4") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "4") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'read_extended_attributes' - where: equals("log.eeventDataAccessMask", "0x8") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "8") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to read extended file attributes.\n For registry objects, this is, Enumerate sub-keys.' - where: equals("log.eeventDataAccessMask", "0x8") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "8") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'write_extended_attributes' - where: equals("log.eeventDataAccessMask", "0x10") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "16") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to write extended file attributes.' - where: equals("log.eeventDataAccessMask", "0x10") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "16") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'execute' - where: equals("log.eeventDataAccessMask", "0x20") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "32") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'For a native code file, the right to execute the file. This access right given to scripts may cause the script to be executable, depending on the script interpreter.\n For a directory, the right to traverse the directory. By default, users are assigned the BYPASS_TRAVERSE_CHECKING privilege, which ignores the FILE_TRAVERSE access right.' - where: equals("log.eeventDataAccessMask", "0x20") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "32") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'delete_child' - where: equals("log.eeventDataAccessMask", "0x40") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "64") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'For a directory, the right to delete a directory and all the files it contains, including read-only files.' - where: equals("log.eeventDataAccessMask", "0x40") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "64") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'read_attributes' - where: equals("log.eeventDataAccessMask", "0x80") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "128") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to read file attributes.' - where: equals("log.eeventDataAccessMask", "0x80") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "128") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'write_attributes' - where: equals("log.eeventDataAccessMask", "0x100") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "256") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to write file attributes.' - where: equals("log.eeventDataAccessMask", "0x100") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "256") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'delete' - where: equals("log.eeventDataAccessMask", "0x10000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "65536") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to delete the object.' - where: equals("log.eeventDataAccessMask", "0x10000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "65536") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'read_control' - where: equals("log.eeventDataAccessMask", "0x20000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "131072") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to read information in the security descriptor object, without including the information in the system access control list (SACL).' - where: equals("log.eeventDataAccessMask", "0x20000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "131072") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'write_dac' - where: equals("log.eeventDataAccessMask", "0x40000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "262144") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to modify the discretionary access control list (DACL) in the security descriptor object.' - where: equals("log.eeventDataAccessMask", "0x40000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "262144") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'write_owner' - where: equals("log.eeventDataAccessMask", "0x80000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "524288") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to change the owner in the security descriptor object' - where: equals("log.eeventDataAccessMask", "0x80000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "524288") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'synchronize' - where: equals("log.eeventDataAccessMask", "0x100000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "1048576") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The right to use the object for synchronization. This enables a thread to wait until the object is in the signaled state. Some object types do not support this access right.' - where: equals("log.eeventDataAccessMask", "0x100000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "1048576") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessType value: 'access_sys_sec' - where: equals("log.eeventDataAccessMask", "0x1000000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "16777216") && equals("log.eventCode", 4663) - add: function: 'string' params: key: log.accessDescription value: 'The ACCESS_SYS_SEC access right controls the ability to get or set the SACL in an security descriptor object.' - where: equals("log.eeventDataAccessMask", "0x1000000") && equals("log.eventCode", 4663) + where: equals("log.eventDataAccessMask", "16777216") && equals("log.eventCode", 4663) # Decoding the "eventStatus" field - add: diff --git a/frontend/src/app/app-management/layout/app-management-sidebar/app-management-sidebar.component.html b/frontend/src/app/app-management/layout/app-management-sidebar/app-management-sidebar.component.html index f31ac2421..809572998 100644 --- a/frontend/src/app/app-management/layout/app-management-sidebar/app-management-sidebar.component.html +++ b/frontend/src/app/app-management/layout/app-management-sidebar/app-management-sidebar.component.html @@ -138,7 +138,7 @@ - + diff --git a/frontend/src/app/app-module/conf/int-generic-group-config/int-config-types/collector-configuration.ts b/frontend/src/app/app-module/conf/int-generic-group-config/int-config-types/collector-configuration.ts index a73555dc7..dc564b951 100644 --- a/frontend/src/app/app-module/conf/int-generic-group-config/int-config-types/collector-configuration.ts +++ b/frontend/src/app/app-module/conf/int-generic-group-config/int-config-types/collector-configuration.ts @@ -2,13 +2,10 @@ import {HttpResponse} from '@angular/common/http'; import {Injectable} from '@angular/core'; import {Observable, of} from 'rxjs'; import {catchError, finalize, map, switchMap, tap} from 'rxjs/operators'; -import {ModalService} from '../../../../core/modal/modal.service'; import {UtmToastService} from '../../../../shared/alert/utm-toast.service'; -import {EncryptService} from '../../../../shared/services/util/encrypt.service'; import {ModuleChangeStatusBehavior} from '../../../shared/behavior/module-change-status.behavior'; import {UtmModulesEnum} from '../../../shared/enum/utm-module.enum'; import {UtmModuleCollectorService} from '../../../shared/services/utm-module-collector.service'; -import {UtmModuleGroupConfService} from '../../../shared/services/utm-module-group-conf.service'; import {UtmModuleGroupService} from '../../../shared/services/utm-module-group.service'; import {UtmListCollectorType} from '../../../shared/type/utm-list-collector-type'; import {UtmModuleCollectorType} from '../../../shared/type/utm-module-collector.type'; @@ -23,9 +20,6 @@ export class CollectorConfiguration extends IntegrationConfig { constructor(private utmModuleGroupService: UtmModuleGroupService, private toast: UtmToastService, - private encryptService: EncryptService, - private utmModuleGroupConfService: UtmModuleGroupConfService, - private modalService: ModalService, private moduleChangeStatusBehavior: ModuleChangeStatusBehavior, private collectorService: UtmModuleCollectorService) { super(); @@ -73,14 +67,21 @@ export class CollectorConfiguration extends IntegrationConfig { }; return this.saveCollector(group); } else { - group = { - ...group, - collector: null - }; - return this.utmModuleGroupService.delete(group.id); + return of(null); } } + deleteAllConfigs(collectorId: number = null): Observable { + const collector = this.collectors.find(c => c.id === collectorId); + if (collector && collector.collector !== '') { + const group = { + ...collector, + groups: [] + }; + return this.saveCollector(group); + } + } + validateUniqueHostNameByCollector(group: UtmModuleGroupType) { const configs = []; diff --git a/frontend/src/app/app-module/conf/int-generic-group-config/int-generic-group-config.component.html b/frontend/src/app/app-module/conf/int-generic-group-config/int-generic-group-config.component.html index 6bf13f5a0..5dfb2f720 100644 --- a/frontend/src/app/app-module/conf/int-generic-group-config/int-generic-group-config.component.html +++ b/frontend/src/app/app-module/conf/int-generic-group-config/int-generic-group-config.component.html @@ -42,8 +42,8 @@ [disabled]="(!collectorValid(collector.groups) || savingConfig) || (collectorValid(collector.groups) && changes && changes.keys.length === 0) || (!pendingChangesForCollector(collector.groups))"> - - Save collector + + Save configuration diff --git a/frontend/src/app/app-module/conf/int-generic-group-config/int-generic-group-config.component.ts b/frontend/src/app/app-module/conf/int-generic-group-config/int-generic-group-config.component.ts index fca5fc706..03424208e 100644 --- a/frontend/src/app/app-module/conf/int-generic-group-config/int-generic-group-config.component.ts +++ b/frontend/src/app/app-module/conf/int-generic-group-config/int-generic-group-config.component.ts @@ -23,7 +23,7 @@ import {IntegrationConfigFactory} from './int-config-types/IntegrationConfigFact templateUrl: './int-generic-group-config.component.html', styleUrls: ['./int-generic-group-config.component.css'] }) -export class IntGenericGroupConfigComponent implements OnInit, OnChanges, OnDestroy { +export class IntGenericGroupConfigComponent implements OnInit, OnDestroy { @Input() serverId: number; @Input() moduleId: number; @Input() groupType = GroupTypeEnum.TENANT; @@ -93,34 +93,6 @@ export class IntGenericGroupConfigComponent implements OnInit, OnChanges, OnDest return (this.config as CollectorConfiguration).collectorList; } - ngOnChanges(changes: SimpleChanges) { - if (changes.disablePreAction && changes.disablePreAction.currentValue) { - const collectors = []; - this.collectorList.forEach( c => { - collectors.push({ - moduleId: this.moduleId, - keys: this.configs, - collector: { - ...c, - group: null, - } - }); - }); - this.collectorService.bulkCreate(collectors) - .pipe(map(response => response.body.results)) - .subscribe( results => { - if (results.every( r => r.status === 'success')) { - this.moduleChangeStatusBehavior.setStatus(false, false); - this.getGroups().subscribe(); - } else { - this.toast.showError('Error Removing Collector Configuration', - 'An error occurred while trying to remove the collector configuration. Please try again.'); - } - }, - error => console.log(error)); - } - } - getGroups() { this.loading = true; return this.config.getIntegrationConfigs(this.moduleId) @@ -330,6 +302,7 @@ export class IntGenericGroupConfigComponent implements OnInit, OnChanges, OnDest } saveCollectorConfig(collector: any, action = 'CREATE') { + this.savingConfig = true; (this.config as CollectorConfiguration).saveCollector(collector) .subscribe(response => { this.savingConfig = false; @@ -376,11 +349,21 @@ export class IntGenericGroupConfigComponent implements OnInit, OnChanges, OnDest deleteModal.componentInstance.confirmBtnType = 'delete'; deleteModal.result.then(() => { if (collector && collector.collector !== '') { - const collectorToSave = { - ...collector, - groups: [] - }; - this.deleteAction(collectorToSave); + (this.config as CollectorConfiguration).deleteAllConfigs(collector.id) + .subscribe({ + next: () => { + if (this.groups.length === 1) { + this.moduleChangeStatusBehavior.setStatus(false, false); + } + + this.toast.showSuccessBottom('Collector configuration deleted successfully'); + this.getGroups().subscribe(); + }, + error: () => { + this.toast.showError('Error deleting collector configuration', + 'An error occurred while trying to delete the collector configuration. Please try again.'); + } + }); } }); } diff --git a/frontend/src/app/app-module/guides/guide-as400/constants.ts b/frontend/src/app/app-module/guides/guide-as400/constants.ts index 45663c52f..1086b9fc5 100644 --- a/frontend/src/app/app-module/guides/guide-as400/constants.ts +++ b/frontend/src/app/app-module/guides/guide-as400/constants.ts @@ -1,6 +1,6 @@ export const PLATFORM = [ - { + /*{ id: 1, name: 'WINDOWS', install: `New-Item -ItemType Directory -Force -Path "C:\\Program Files\\UTMStack\\UTMStack Collectors\\AS400"; ` + @@ -24,25 +24,33 @@ export const PLATFORM = [ `-Recurse -Force -ErrorAction Stop; Write-Host "UTMStack AS400 Collector removed successfully."`, shell: 'Open Windows Powershell terminal as “ADMINISTRATOR”' - }, - { - id: 2, - name: 'LINUX UBUNTU', - install: `sudo bash -c "apt update -y && apt install wget unzip -y && mkdir -p ` + - `/opt/utmstack-linux-collectors/as400 && cd /opt/utmstack-linux-collectors/as400 && ` + - `wget --no-check-certificate ` + - `https://V_IP:9001/private/dependencies/collector/linux-as400-collector.zip ` + - `&& unzip linux-as400-collector.zip && rm linux-as400-collector.zip && chmod -R 755 ` + - `utmstack_collectors_installer && ./utmstack_collectors_installer install as400 ` + - `V_IP V_TOKEN"`, - - - uninstall: `sudo bash -c " cd /opt/utmstack-linux-collectors/as400 && ./utmstack_collectors_installer ` + - `uninstall as400 && echo 'Removing UTMStack AS400 Collector dependencies...' && sleep 5 && rm ` + - `-rf /opt/utmstack-linux-collectors/as400 && echo 'UTMStack AS400 Collector removed successfully.'"`, - - shell: 'Linux bash terminal' - } + },*/ + { + id: 2, + name: 'LINUX UBUNTU', + + install: `sudo bash -c " + apt update -y && \ + apt install wget -y && \ + mkdir -p /opt/utmstack-as400-collector && \ + wget --no-check-certificate -P /opt/utmstack-as400-collector https://V_IP:9001/private/dependencies/collector/as400/utmstack_as400_collector_service && \ + chmod -R 755 /opt/utmstack-as400-collector/utmstack_as400_collector_service && \ + /opt/utmstack-as400-collector/utmstack_as400_collector_service install V_IP V_TOKEN yes + "`, + + uninstall: `sudo bash -c " + /opt/utmstack-as400-collector/utmstack_as400_collector_service uninstall || true; \ + systemctl stop UTMStackAS400Collector 2>/dev/null || true; \ + systemctl disable UTMStackAS400Collector 2>/dev/null || true; \ + rm -f /etc/systemd/system/UTMStackAS400Collector.service 2>/dev/null || true; \ + echo 'Removing UTMStack AS400 dependencies...' && sleep 10; \ + rm -rf /opt/utmstack-as400-collector 2>/dev/null || true; \ + echo 'UTMStack AS400 dependencies removed successfully.' + "`, + + shell: 'Linux bash terminal' + } + ]; export const ACTIONS = [ diff --git a/frontend/src/app/app-module/guides/guide-as400/guide-as400.component.html b/frontend/src/app/app-module/guides/guide-as400/guide-as400.component.html index b8aebf00e..85e8266f3 100644 --- a/frontend/src/app/app-module/guides/guide-as400/guide-as400.component.html +++ b/frontend/src/app/app-module/guides/guide-as400/guide-as400.component.html @@ -10,7 +10,23 @@

The UTMStack AS400 Collector communicate over ports 9000, 9001 and 50051. Please make sure these ports are open.
    - +
  1. +

    + 1 + Check pre-installation requirements +

    +
      +
    • Compatible with IBM i (AS400) systems that allow remote access to system logs.
    • +
    • The AS400 Collector requires access to IBM i Host Server services. Make sure the following ports are open from the collector host to the AS400:
    • +
        +
      • 8476 – Signon Server (authentication)
      • +
      • 446 – DRDA/JDBC (SQL-based log access, when available)
      • +
      • 8471, 8470, 8475, 8472 – Required when SQL log access is not available and the collector uses the classic JT400 method
      • +
      +
    +
  2. + +
  3. diff --git a/frontend/src/app/app-module/guides/guide-cisco/guide-cisco.component.html b/frontend/src/app/app-module/guides/guide-cisco/guide-cisco.component.html index 88e3d8b4e..b5168fed0 100644 --- a/frontend/src/app/app-module/guides/guide-cisco/guide-cisco.component.html +++ b/frontend/src/app/app-module/guides/guide-cisco/guide-cisco.component.html @@ -29,7 +29,7 @@

  4. 2 - Enable log collector and this integration in the configuration file which + Enable log collector and this integration in the configuration file which you can find where your UTMStack Agent is located, in the path:

    diff --git a/frontend/src/app/app-module/guides/guide-office365/guide-office365.component.html b/frontend/src/app/app-module/guides/guide-office365/guide-office365.component.html index f187ee87a..9f29021e0 100644 --- a/frontend/src/app/app-module/guides/guide-office365/guide-office365.component.html +++ b/frontend/src/app/app-module/guides/guide-office365/guide-office365.component.html @@ -41,7 +41,7 @@

    4 In the newly created App registration, go to Certificates & Secrets, and create a new one by clicking on "New client secret". - Add a description. Set it to expires in 730 days (24 months) and click on Add. Copy the value in a safe place. + Add a description. Set it to expires in 730 days (24 months) and click on Add. Copy the value in a safe place.

    Certificates & secret @@ -94,30 +94,6 @@
  5. 10 - Go to - https://compliance.microsoft.com and sign in. -

    - -
  6. -
  7. -

    - 11 - In the left navigation pane of the compliance portal, select Audit. If auditing isn't turned on for your organization, - a banner is displayed prompting you start recording user and admin activity. -

    - Overview -
  8. -
  9. -

    - 12 - Select the Start recording user and admin activity banner. It may take up to 60 minutes for the change to take effect. -

    -
  10. -
  11. -

    - 13 Return to Azure Active Directory and go to the Registered Apps (step number 3) and make a note the info that appears in the Overview section (client ID, tenant ID) and the secret.

    @@ -126,7 +102,7 @@
  12. - 14 + 11 Insert information in the following inputs.You can add more than one o365 configuration by clicking on Add tenant button.

    @@ -140,7 +116,7 @@
  13. - 15 + 12 Click on the button shown below, to activate the UTMStack features related to this integration

    -
    +
    - + class="table-responsive resizable-table-responsive main-container"> +
    - +
    @@ -70,7 +70,7 @@
    diff --git a/frontend/src/app/data-management/file-management/file-view/file-view.component.scss b/frontend/src/app/data-management/file-management/file-view/file-view.component.scss index e69de29bb..a63d9574a 100644 --- a/frontend/src/app/data-management/file-management/file-view/file-view.component.scss +++ b/frontend/src/app/data-management/file-management/file-view/file-view.component.scss @@ -0,0 +1,4 @@ +.main-container{ + max-height: 70dvh; + overflow-y: auto; +} diff --git a/frontend/src/app/data-management/file-management/shared/const/file-acces-mask.constant.ts b/frontend/src/app/data-management/file-management/shared/const/file-acces-mask.constant.ts index 4f7be32f5..dabdf2b2b 100644 --- a/frontend/src/app/data-management/file-management/shared/const/file-acces-mask.constant.ts +++ b/frontend/src/app/data-management/file-management/shared/const/file-acces-mask.constant.ts @@ -4,7 +4,7 @@ import {FileAccessMaskCodeType} from '../types/file-access-mask-code.type'; export const ACCESS_MASK_CODES: FileAccessMaskCodeType[] = [ { access: 'ReadData (or ListDirectory)', - hex: AccessMaskEnum.READ_DATA, + hex: `${AccessMaskEnum.READ_DATA}`, description: 'ReadData - For a file object, the right to read ' + 'the corresponding file data. For a directory object, the right to read the' + ' corresponding directory data.\n' + @@ -12,14 +12,14 @@ export const ACCESS_MASK_CODES: FileAccessMaskCodeType[] = [ }, { access: 'WriteData (or AddFile)', - hex: AccessMaskEnum.WRITE_DATA, + hex:`${ AccessMaskEnum.WRITE_DATA}`, description: 'WriteData - For a file object, the right to write data to the file. ' + 'For a directory object, the right to create a file in the directory (FILE_ADD_FILE).\n' + 'AddFile - For a directory, the right to create a file in the directory..' }, { access: 'AppendData (or AddSubdirectory or CreatePipeInstance)', - hex: AccessMaskEnum.APPEND_DATA, + hex:`${ AccessMaskEnum.APPEND_DATA}`, description: 'AppendData - For a file object, the right to append data to the file.' + ' (For local files, write operations will not overwrite existing data if this flag ' + 'is specified without FILE_WRITE_DATA.) For a directory object, the right to create a ' + @@ -29,17 +29,17 @@ export const ACCESS_MASK_CODES: FileAccessMaskCodeType[] = [ }, { access: 'ReadEA(For registry objects, this is Enumerate sub-keys.)', - hex: AccessMaskEnum.READ_EA, + hex:`${ AccessMaskEnum.READ_EA}`, description: 'The right to read extended file attributes.' }, { access: 'WriteEA', - hex: AccessMaskEnum.WRITE_EA, + hex:`${ AccessMaskEnum.WRITE_EA}`, description: 'The right to write extended file attributes.' }, { access: 'Execute/Traverse', - hex: AccessMaskEnum.EXECUTE_TRAVERSE, + hex:`${ AccessMaskEnum.EXECUTE_TRAVERSE}`, description: 'Execute - For a native code file, the right to execute' + ' the file. This access right given to scripts may cause the ' + 'script to be executable, depending on the script interpreter.\n' + @@ -50,52 +50,52 @@ export const ACCESS_MASK_CODES: FileAccessMaskCodeType[] = [ }, { access: 'Delete child', - hex: AccessMaskEnum.DELETE_CHILD, + hex:`${ AccessMaskEnum.DELETE_CHILD}`, description: 'For a directory, the right to delete a directory and all the ' + 'files it contains, including read-only files.' }, { access: 'Read attributes', - hex: AccessMaskEnum.READ_ATTRIBUTES, + hex:`${ AccessMaskEnum.READ_ATTRIBUTES}`, description: 'The right to read file attributes.' }, { access: 'Write attributes', - hex: AccessMaskEnum.WRITE_ATTRIBUTES, + hex:`${ AccessMaskEnum.WRITE_ATTRIBUTES}`, description: 'The right to write file attributes.' }, { access: 'Delete', - hex: AccessMaskEnum.DELETE, + hex:`${ AccessMaskEnum.DELETE}`, description: 'The right to delete the object.' }, { access: 'Read control', - hex: AccessMaskEnum.READ_CONTROL, + hex:`${ AccessMaskEnum.READ_CONTROL}`, description: 'The right to read the information in the object\'s security' + ' descriptor, not including the information' + ' in the system access control list (SACL).' }, { access: 'Write DAC', - hex: AccessMaskEnum.WRITE_AC, + hex:`${ AccessMaskEnum.WRITE_AC}`, description: 'The right to modify the discretionary access control list' + ' (DACL) in the object\'s security descriptor.' }, { access: 'Write OWNER', - hex: AccessMaskEnum.WRITE_OWNER, + hex:`${ AccessMaskEnum.WRITE_OWNER}`, description: 'The right to change the owner in the object\'s security descriptor' }, { access: 'Synchronize', - hex: AccessMaskEnum.SYNCHRONIZE, + hex:`${ AccessMaskEnum.SYNCHRONIZE}`, description: 'The right to use the object for synchronization. This enables a thread to ' + 'wait until the object is in the signaled state. Some object type do not support this access right.' }, { access: 'Access SYS_SEC', - hex: AccessMaskEnum.ACCESS_SYS_SEC, + hex:`${ AccessMaskEnum.ACCESS_SYS_SEC}`, description: 'The ACCESS_SYS_SEC access right controls the ability to get or set the SACL' + ' in an object\'s security descriptor.' } diff --git a/frontend/src/app/data-management/file-management/shared/const/file-field.constant.ts b/frontend/src/app/data-management/file-management/shared/const/file-field.constant.ts index f273552d1..88d435794 100644 --- a/frontend/src/app/data-management/file-management/shared/const/file-field.constant.ts +++ b/frontend/src/app/data-management/file-management/shared/const/file-field.constant.ts @@ -94,12 +94,6 @@ export const FILE_FIELDS: UtmFieldType[] = [ type: ElasticDataTypesEnum.STRING, visible: false, }, - { - label: 'Subject domain name', - field: FileFieldEnum.FILE_SUBJECT_DOMAIN_NAME_FIELD, - type: ElasticDataTypesEnum.STRING, - visible: false, - }, { label: 'Subject logon ID', field: FileFieldEnum.FILE_SUBJECT_LOGON_ID_FIELD, @@ -112,12 +106,6 @@ export const FILE_FIELDS: UtmFieldType[] = [ type: ElasticDataTypesEnum.STRING, visible: false, }, - { - label: 'Host architecture', - field: FileFieldEnum.FILE_HOST_ARCHITECTURE_FIELD, - type: ElasticDataTypesEnum.STRING, - visible: false, - }, { label: 'Host ID', field: FileFieldEnum.FILE_HOST_ID_FIELD, @@ -136,30 +124,6 @@ export const FILE_FIELDS: UtmFieldType[] = [ type: ElasticDataTypesEnum.STRING, visible: false, }, - { - label: 'OS Build', - field: FileFieldEnum.FILE_HOTS_OS_BUILD_FIELD, - type: ElasticDataTypesEnum.STRING, - visible: false, - }, - { - label: 'OS Family', - field: FileFieldEnum.FILE_HOST_OS_FAMILY_FIELD, - type: ElasticDataTypesEnum.STRING, - visible: false, - }, - { - label: 'OS Platform', - field: FileFieldEnum.FILE_HOST_OS_PLATFORM_FIELD, - type: ElasticDataTypesEnum.STRING, - visible: false, - }, - { - label: 'OS Version', - field: FileFieldEnum.FILE_HOST_OS_VERSION_FIELD, - type: ElasticDataTypesEnum.STRING, - visible: false, - }, { label: 'Keywords', field: FileFieldEnum.FILE_KEYWORD_FIELD, @@ -833,4 +797,4 @@ export const DELETED_FILE_EVENT_ID_NUMBER = [4663]; export const CREATED_FILE_EVENT_ID_NUMBER = 4663; export const FILE_OBJECT_TYPE_VALUE = ['File', 'Folder']; -// NETWORK SHARE FIELDS + diff --git a/frontend/src/app/data-management/file-management/shared/enum/access-mask.enum.ts b/frontend/src/app/data-management/file-management/shared/enum/access-mask.enum.ts index 731e18dcb..4243328f5 100644 --- a/frontend/src/app/data-management/file-management/shared/enum/access-mask.enum.ts +++ b/frontend/src/app/data-management/file-management/shared/enum/access-mask.enum.ts @@ -1,17 +1,17 @@ export enum AccessMaskEnum { - READ_DATA = '0x1', - WRITE_DATA = '0x2', - APPEND_DATA = '0x4', - READ_EA = '0x8', - WRITE_EA = '0x10', - EXECUTE_TRAVERSE = '0x20', - DELETE_CHILD = '0x40', - READ_ATTRIBUTES = '0x80', - WRITE_ATTRIBUTES = '0x100', - DELETE = '0x10000', - READ_CONTROL = '0x20000', - WRITE_AC = '0x40000', - WRITE_OWNER = '0x80000', - SYNCHRONIZE = '0x100000', - ACCESS_SYS_SEC = '0x1000000' + READ_DATA = 0x1, + WRITE_DATA = 0x2, + APPEND_DATA = 0x4, + READ_EA = 0x8, + WRITE_EA = 0x10, + EXECUTE_TRAVERSE = 0x20, + DELETE_CHILD = 0x40, + READ_ATTRIBUTES = 0x80, + WRITE_ATTRIBUTES = 0x100, + DELETE = 0x10000, + READ_CONTROL = 0x20000, + WRITE_AC = 0x40000, + WRITE_OWNER = 0x80000, + SYNCHRONIZE = 0x100000, + ACCESS_SYS_SEC = 0x1000000 } diff --git a/frontend/src/app/data-management/file-management/shared/enum/file-field.enum.ts b/frontend/src/app/data-management/file-management/shared/enum/file-field.enum.ts index 364e076e4..5c3fd3477 100644 --- a/frontend/src/app/data-management/file-management/shared/enum/file-field.enum.ts +++ b/frontend/src/app/data-management/file-management/shared/enum/file-field.enum.ts @@ -1,44 +1,34 @@ export enum FileFieldEnum { FILE_TIMESTAMP_FIELD = '@timestamp', - FILE_VERSION_FIELD = '@version', - FILE_ID_FIELD = '_id', - FILE_BEAT_HOSTNAME_FIELD = 'logx.wineventlog.beat.hostname', - FILE_BEAT_NAME_FIELD = 'logx.wineventlog.beat.name', - FILE_BEAT_VERSION_FIELD = 'logx.wineventlog.beat.version', - FILE_COMPUTER_NAME_FIELD = 'logx.wineventlog.computer_name', - FILE_ACCESS_LIST_FIELD = 'logx.wineventlog.event_data.AccessList', - FILE_ACCESS_MASK_FIELD = 'logx.wineventlog.event_data.AccessMask', - FILE_HANDLE_ID_FIELD = 'logx.wineventlog.event_data.HandleId', - FILE_OBJECT_NAME_FIELD = 'logx.wineventlog.event_data.ObjectName', - FILE_OBJECT_SERVER_FIELD = 'logx.wineventlog.event_data.ObjectServer', - FILE_OBJECT_TYPE_FIELD = 'logx.wineventlog.event_data.ObjectType', - FILE_PROCESS_ID_FIELD = 'logx.wineventlog.event_data.ProcessId', - FILE_PROCESS_NAME_FIELD = 'logx.wineventlog.event_data.ProcessName', - FILE_RESOURCE_ATT_FIELD = 'logx.wineventlog.event_data.ResourceAttributes', - FILE_SUBJECT_DOMAIN_NAME_FIELD = 'logx.wineventlog.event_data.SubjectDomainName', - FILE_SUBJECT_LOGON_ID_FIELD = 'logx.wineventlog.event_data.SubjectLogonId', - FILE_SUBJECT_USER_NAME_FIELD = 'logx.wineventlog.event_data.SubjectUserName', - FILE_SUBJECT_USER_ID_FIELD = 'logx.wineventlog.event_data.SubjectUserSid', - FILE_EVENT_ID_FIELD = 'logx.wineventlog.event_id', - FILE_EVENT_NAME_FIELD = 'logx.wineventlog.event_name', - FILE_HOST_ARCHITECTURE_FIELD = 'logx.wineventlog.host.architecture', - FILE_HOST_ID_FIELD = 'logx.wineventlog.host.id', - FILE_HOST_NAME_FIELD = 'logx.wineventlog.host.name', - FILE_HOST_OS_NAME_FIELD = 'logx.wineventlog.host.os.name', - FILE_MESSAGE_FIELD = 'logx.wineventlog.message', - FILE_NEW_SDDL_FIELD = 'logx.wineventlog.event_data.NewSd', - FILE_OLD_SDDL_FIELD = 'logx.wineventlog.event_data.OldSd', - FILE_HOTS_OS_BUILD_FIELD = 'logx.wineventlog.host.os.build', - FILE_HOST_OS_FAMILY_FIELD = 'logx.wineventlog.host.os.family', - FILE_HOST_OS_PLATFORM_FIELD = 'logx.wineventlog.host.os.platform', - FILE_HOST_OS_VERSION_FIELD = 'logx.wineventlog.host.os.version', - FILE_KEYWORD_FIELD = 'logx.wineventlog.keywords', - FILE_LEVEL_FIELD = 'logx.wineventlog.level', - FILE_LOG_NAME_FIELD = 'logx.wineventlog.log_name', - FILE_OPCODE_FIELD = 'logx.wineventlog.opcode', - FILE_PROCESS_ID_SECONDARY_FIELD = 'logx.wineventlog.process_id', - FILE_PROVIDER_GUID_FIELD = 'logx.wineventlog.provider_guid', - FILE_SHARE_NAME_FIELD = 'logx.wineventlog.event_data.ShareName', - FILE_SHARE_PATH_FIELD = 'logx.wineventlog.event_data.ShareLocalPath', - FILE_SHARE_IPPORT_FIELD = 'logx.wineventlog.event_data.IpPort', + FILE_ACCESS_LIST_FIELD = 'log.eventDataAccessList', + FILE_ACCESS_MASK_FIELD = 'log.eventDataAccessMask', + FILE_HANDLE_ID_FIELD = 'log.eventDataHandleId', + FILE_OBJECT_NAME_FIELD = 'log.eventDataObjectName', + FILE_OBJECT_SERVER_FIELD = 'log.eventDataObjectServer', + FILE_OBJECT_TYPE_FIELD = 'log.eventDataObjectType', + FILE_PROCESS_ID_FIELD = 'log.eventDataProcessId', + FILE_PROCESS_NAME_FIELD = 'log.eventDataProcessName', + FILE_RESOURCE_ATT_FIELD = 'log.eventDataResourceAttributes', + FILE_SUBJECT_DOMAIN_NAME_FIELD = 'log.eventDataSubjectDomainName', + FILE_SUBJECT_LOGON_ID_FIELD = 'log.eventDataSubjectLogonId', + FILE_SUBJECT_USER_NAME_FIELD = 'log.eventDataSubjectUserName', + FILE_SUBJECT_USER_ID_FIELD = 'log.eventDataSubjectUserSid', + FILE_EVENT_ID_FIELD = 'log.eventCode', + FILE_EVENT_NAME_FIELD = 'log.eventName', + FILE_HOST_ARCHITECTURE_FIELD = 'log.cpuArchitecture', + FILE_HOST_ID_FIELD = 'id', + FILE_HOST_NAME_FIELD = 'origin.host', + FILE_HOST_OS_NAME_FIELD = 'log.computer', + FILE_MESSAGE_FIELD = 'log.eventName', + FILE_NEW_SDDL_FIELD = 'log.eventDataNewSd', + FILE_OLD_SDDL_FIELD = 'log.eventDataOldSd', + FILE_HOTS_OS_BUILD_FIELD = 'log.host.os.build', + FILE_HOST_OS_FAMILY_FIELD = 'log.host.os.family', + FILE_HOST_OS_PLATFORM_FIELD = 'log.host.os.platform', + FILE_HOST_OS_VERSION_FIELD = 'log.host.os.version', + FILE_KEYWORD_FIELD = 'log.keywords', + FILE_OPCODE_FIELD = 'log.opcode', + FILE_PROVIDER_GUID_FIELD = 'log.providerGuid', + FILE_SHARE_NAME_FIELD = 'log.eventDataShareName', + FILE_SHARE_PATH_FIELD = 'log.eventDataShareLocalPath', } diff --git a/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish-component.scss b/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish-component.scss index 797f122f9..2b212590d 100644 --- a/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish-component.scss +++ b/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish-component.scss @@ -1,5 +1,4 @@ -@import "../../../../../../assets/styles/theme"; -@import "../../../../../../assets/styles/var"; +@import "../../../../../../assets/modules/auth"; .reset-pass-container { .card { @@ -36,3 +35,7 @@ } } +.form-control { + padding: .375rem .75rem !important; +} + diff --git a/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish.component.html b/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish.component.html index 03ea39b52..6439040f7 100644 --- a/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish.component.html +++ b/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish.component.html @@ -1,5 +1,5 @@
    -
    +
    Password reset
    @@ -13,13 +13,13 @@
    Password reset
    The password reset key is missing.
    - Your password couldn't be reset. Remember a password request is only valid for 24 hours. + {{ error }}

    - Your password has been reset. Please - sign in. + Your password has been reset.

    -
    +
    The password and its confirmation do not match!
    @@ -72,12 +72,20 @@
    Password reset
    - +
    + +
    diff --git a/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish.component.ts b/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish.component.ts index 7cbfd227a..512d2b64f 100644 --- a/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish.component.ts +++ b/frontend/src/app/shared/components/auth/password-reset/finish/password-reset-finish.component.ts @@ -3,6 +3,7 @@ import {ActivatedRoute, Router} from '@angular/router'; import {NgbModalRef} from '@ng-bootstrap/ng-bootstrap'; import {ModalService} from '../../../../../core/modal/modal.service'; import {PasswordResetFinishService} from './password-reset-finish.service'; +import {HttpResponse} from "@angular/common/http"; @Component({ @@ -23,7 +24,6 @@ export class PasswordResetFinishComponent implements OnInit, AfterViewInit { constructor( private passwordResetFinishService: PasswordResetFinishService, - private loginModalService: ModalService, private route: ActivatedRoute, private router: Router, private elementRef: ElementRef, @@ -33,10 +33,14 @@ export class PasswordResetFinishComponent implements OnInit, AfterViewInit { ngOnInit() { this.route.queryParams.subscribe(params => { - this.key = params.key; + if (params && params.key) { + this.key = params.key; + this.keyMissing = false + } else { + this.keyMissing = true; + } }); this.resetAccount = {}; - this.keyMissing = !this.key; } ngAfterViewInit() { @@ -58,9 +62,10 @@ export class PasswordResetFinishComponent implements OnInit, AfterViewInit { this.success = 'OK'; this.sending = false; }, - () => { + (error: HttpResponse) => { + this.error = error.headers.get('x-utmstack-error') || + 'An internal error has occurred, please try again.'; this.success = null; - this.error = 'ERROR'; this.sending = false; } ); diff --git a/frontend/src/environments/environment.ts b/frontend/src/environments/environment.ts index c14489291..586cb5a3b 100644 --- a/frontend/src/environments/environment.ts +++ b/frontend/src/environments/environment.ts @@ -4,8 +4,8 @@ export const environment = { production: false, - SERVER_API_URL: 'https://192.168.1.18/', - //SERVER_API_URL: 'http://localhost:8080/', + // SERVER_API_URL: 'https://192.168.1.18/', + SERVER_API_URL: 'http://localhost:8080/', SERVER_API_CONTEXT: '', SESSION_AUTH_TOKEN: window.location.host.split(':')[0].toLocaleUpperCase(), WEBSOCKET_URL: '//localhost:8080', diff --git a/installer/templates/front-end.go b/installer/templates/front-end.go index 76d48cc52..dc960dcd9 100644 --- a/installer/templates/front-end.go +++ b/installer/templates/front-end.go @@ -15,10 +15,25 @@ const FrontEnd string = `server { set $utmstack_agent_manager http://agentmanager:9001; set $utmstack_backend_auth http://backend:8080/api/authenticate; set $utmstack_ws http://backend:8080/ws; - set $utmstack_saml2 http://backend:8080/login/saml2/; set $shared_key {{.SharedKey}}; set $shared_key_header $http_x_shared_key; + location /saml2/ { + proxy_pass http://backend:8080/saml2/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /login/saml2/ { + proxy_pass http://backend:8080/login/saml2/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + location /api { proxy_pass $utmstack_backend; proxy_set_header Host $host; diff --git a/plugins/crowdstrike/check.go b/plugins/crowdstrike/check.go index 063aa7cd4..d5c55e50f 100644 --- a/plugins/crowdstrike/check.go +++ b/plugins/crowdstrike/check.go @@ -58,7 +58,7 @@ func infiniteRetryIfXError(f func() error, exception string) error { _ = catcher.Error("An error occurred (%s), will keep retrying indefinitely...", err, map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) xErrorWasLogged = true } - time.Sleep(wait) + time.Sleep(reconnectDelay) continue } diff --git a/plugins/crowdstrike/config/config.go b/plugins/crowdstrike/config/config.go index beacd83c3..4cb1926be 100644 --- a/plugins/crowdstrike/config/config.go +++ b/plugins/crowdstrike/config/config.go @@ -23,13 +23,17 @@ const ( ) var ( - cnf *ConfigurationSection - mu sync.Mutex - + cnf *ConfigurationSection + mu sync.Mutex + configUpdateChan chan *ConfigurationSection internalKey string modulesConfigHost string ) +func init() { + configUpdateChan = make(chan *ConfigurationSection, 1) +} + func GetConfig() *ConfigurationSection { mu.Lock() defer mu.Unlock() @@ -39,6 +43,10 @@ func GetConfig() *ConfigurationSection { return cnf } +func GetConfigUpdateChannel() <-chan *ConfigurationSection { + return configUpdateChan +} + func StartConfigurationSystem() { for { pluginConfig := plugins.PluginCfg("com.utmstack") @@ -133,7 +141,18 @@ func StartConfigurationSystem() { switch message := in.Payload.(type) { case *BiDirectionalMessage_Config: catcher.Info("Received configuration update", map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) + + mu.Lock() cnf = message.Config + mu.Unlock() + + select { + case configUpdateChan <- message.Config: + + default: + + catcher.Info("Configuration update channel full, skipping notification", map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) + } } } } diff --git a/plugins/crowdstrike/main.go b/plugins/crowdstrike/main.go index 8ced226be..f07a4b67b 100644 --- a/plugins/crowdstrike/main.go +++ b/plugins/crowdstrike/main.go @@ -14,6 +14,7 @@ import ( "github.com/crowdstrike/gofalcon/falcon/client" "github.com/crowdstrike/gofalcon/falcon/client/event_streams" "github.com/crowdstrike/gofalcon/falcon/models" + "github.com/crowdstrike/gofalcon/falcon/models/streaming_models" "github.com/google/uuid" "github.com/threatwinds/go-sdk/catcher" "github.com/threatwinds/go-sdk/plugins" @@ -23,7 +24,28 @@ import ( const ( defaultTenant = "ce66672c-e36d-4761-a8c8-90058fee1a24" urlCheckConnection = "https://falcon.crowdstrike.com" - wait = 1 * time.Second + reconnectDelay = 5 * time.Second +) + +type streamKey struct { + groupID int32 + groupName string +} + +type activeStream struct { + ctx context.Context + cancel context.CancelFunc + processor *CrowdStrikeProcessor + dataSource string + offsets map[string]uint64 + streamStartTime uint64 + wg sync.WaitGroup + mu sync.Mutex +} + +var ( + activeStreams = make(map[streamKey]*activeStream) + activeStreamsMu sync.RWMutex ) func main() { @@ -32,70 +54,305 @@ func main() { return } + if err := connectionChecker(urlCheckConnection); err != nil { + _ = catcher.Error("Failed to establish connectivity, plugin will not start", err, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + return + } + go config.StartConfigurationSystem() - for t := 0; t < 2*runtime.NumCPU(); t++ { + for i := 0; i < 2*runtime.NumCPU(); i++ { go func() { plugins.SendLogsFromChannel("com.utmstack.crowdstrike") }() } - delay := 5 * time.Minute - ticker := time.NewTicker(delay) - defer ticker.Stop() + go watchConfigurationChanges() - for range ticker.C { - if err := connectionChecker(urlCheckConnection); err != nil { - _ = catcher.Error("External connection failure detected: %v", err, map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) + select {} +} + +func watchConfigurationChanges() { + time.Sleep(3 * time.Second) + + initialConfig := config.GetConfig() + if initialConfig != nil && initialConfig.ModuleActive { + updateStreams(initialConfig) + } + + for newConfig := range config.GetConfigUpdateChannel() { + if newConfig == nil || !newConfig.ModuleActive { + stopAllStreams() + continue } - moduleConfig := config.GetConfig() - if moduleConfig != nil && moduleConfig.ModuleActive { - var wg sync.WaitGroup - wg.Add(len(moduleConfig.ModuleGroups)) - for _, grp := range moduleConfig.ModuleGroups { - go func(group *config.ModuleGroup) { - defer wg.Done() - var invalid bool - for _, c := range group.ModuleGroupConfigurations { - if strings.TrimSpace(c.ConfValue) == "" { - invalid = true - break - } - } - - if !invalid { - pullCrowdStrikeEvents(group) - } - }(grp) + updateStreams(newConfig) + } +} + +func updateStreams(newConfig *config.ConfigurationSection) { + activeStreamsMu.Lock() + defer activeStreamsMu.Unlock() + + newGroups := make(map[streamKey]*config.ModuleGroup) + for _, grp := range newConfig.ModuleGroups { + key := streamKey{groupID: grp.Id, groupName: grp.GroupName} + newGroups[key] = grp + } + + for key, stream := range activeStreams { + if _, exists := newGroups[key]; !exists { + stream.cancel() + + go func(s *activeStream, k streamKey) { + s.wg.Wait() + activeStreamsMu.Lock() + delete(activeStreams, k) + activeStreamsMu.Unlock() + }(stream, key) + } + } + + for key, group := range newGroups { + if !isGroupValid(group) { + continue + } + + existingStream, exists := activeStreams[key] + + if exists { + newProcessor := buildProcessor(group) + if processorChanged(existingStream.processor, newProcessor) { + existingStream.cancel() + + go func(s *activeStream, k streamKey, g *config.ModuleGroup) { + s.wg.Wait() + activeStreamsMu.Lock() + delete(activeStreams, k) + startStream(k, g) + activeStreamsMu.Unlock() + }(existingStream, key, group) + } + } else { + startStream(key, group) + } + } +} + +func startStream(key streamKey, group *config.ModuleGroup) { + ctx, cancel := context.WithCancel(context.Background()) + + processor := buildProcessor(group) + + stream := &activeStream{ + ctx: ctx, + cancel: cancel, + processor: processor, + dataSource: group.GroupName, + offsets: make(map[string]uint64), + streamStartTime: uint64(time.Now().UnixMilli()), + } + + activeStreams[key] = stream + + go maintainStreamConnection(stream) +} + +func stopAllStreams() { + activeStreamsMu.Lock() + + if len(activeStreams) == 0 { + activeStreamsMu.Unlock() + return + } + + for _, stream := range activeStreams { + stream.cancel() + } + + var wg sync.WaitGroup + for _, stream := range activeStreams { + wg.Add(1) + go func(s *activeStream) { + defer wg.Done() + s.wg.Wait() + }(stream) + } + + activeStreamsMu.Unlock() + + wg.Wait() + + activeStreamsMu.Lock() + for key := range activeStreams { + delete(activeStreams, key) + } + activeStreamsMu.Unlock() +} + +func maintainStreamConnection(stream *activeStream) { + for { + err := runEventStream(stream) + if err != nil { + select { + case <-stream.ctx.Done(): + return + case <-time.After(reconnectDelay): } - wg.Wait() } } } -func pullCrowdStrikeEvents(group *config.ModuleGroup) { - processor := getCrowdStrikeProcessor(group) +func runEventStream(stream *activeStream) error { + apiClient, err := stream.processor.createClient() + if err != nil { + return catcher.Error("failed to create client", err, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + } - events, err := processor.getEvents() + ctx, cancel := context.WithTimeout(stream.ctx, 2*time.Minute) + defer cancel() + + jsonFormat := "json" + response, err := apiClient.EventStreams.ListAvailableStreamsOAuth2( + &event_streams.ListAvailableStreamsOAuth2Params{ + AppID: stream.processor.AppName, + Format: &jsonFormat, + Context: ctx, + }, + ) if err != nil { - _ = catcher.Error("cannot get CrowdStrike events", err, map[string]any{ - "group": group.GroupName, + return catcher.Error("failed to list streams", err, map[string]any{ "process": "plugin_com.utmstack.crowdstrike", }) - return } - for _, event := range events { - _ = plugins.EnqueueLog(&plugins.Log{ - Id: uuid.NewString(), - TenantId: defaultTenant, - DataType: "crowdstrike", - DataSource: group.GroupName, - Timestamp: time.Now().UTC().Format(time.RFC3339Nano), - Raw: event, - }, "com.utmstack.crowdstrike") + if err = falcon.AssertNoError(response.Payload.Errors); err != nil { + return catcher.Error("CrowdStrike API error", err, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + } + + availableStreams := response.Payload.Resources + + for _, streamV2 := range availableStreams { + if streamV2.DataFeedURL == nil { + catcher.Error("Stream has nil DataFeedURL, skipping", nil, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + continue + } + + streamID := *streamV2.DataFeedURL + + stream.wg.Add(1) + go func(streamResource *models.MainAvailableStreamV2, sid string) { + defer stream.wg.Done() + maintainIndividualStream(stream, apiClient, streamResource, sid) + }(streamV2, streamID) } + + <-stream.ctx.Done() + + stream.wg.Wait() + + return nil +} + +func maintainIndividualStream(stream *activeStream, apiClient *client.CrowdStrikeAPISpecification, + streamResource *models.MainAvailableStreamV2, streamID string) { + + for { + select { + case <-stream.ctx.Done(): + return + default: + stream.mu.Lock() + currentOffset := stream.offsets[streamID] + stream.mu.Unlock() + + falconStream, err := falcon.NewStream(stream.ctx, apiClient, stream.processor.AppName, streamResource, currentOffset) + if err != nil { + catcher.Error("failed to create stream, will retry", err, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + } else { + err = processStreamEvents(stream, falconStream, streamID) + falconStream.Close() + + if err != nil { + catcher.Error("stream error, will reconnect", err, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + } + } + + if err != nil { + select { + case <-stream.ctx.Done(): + return + case <-time.After(reconnectDelay): + continue + } + } + } + } +} + +func processStreamEvents(stream *activeStream, falconStream *falcon.StreamingHandle, streamID string) error { + for { + select { + case <-stream.ctx.Done(): + return nil + + case err := <-falconStream.Errors: + if err.Fatal { + return catcher.Error("fatal stream error", err.Err, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + } + catcher.Error("Non-fatal stream error", err.Err, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + + case event := <-falconStream.Events: + if event.Metadata.EventCreationTime > stream.streamStartTime { + processEvent(event, stream.dataSource) + + stream.mu.Lock() + stream.offsets[streamID] = event.Metadata.Offset + stream.mu.Unlock() + } + } + } +} + +func processEvent(event *streaming_models.EventItem, dataSource string) { + var eventData string + if len(event.RawMessage) > 0 { + eventData = string(event.RawMessage) + } else { + eventJSON, err := json.Marshal(event) + if err != nil { + catcher.Error("Failed to marshal event", err, map[string]any{ + "process": "plugin_com.utmstack.crowdstrike", + }) + return + } + eventData = string(eventJSON) + } + + _ = plugins.EnqueueLog(&plugins.Log{ + Id: uuid.NewString(), + TenantId: defaultTenant, + DataType: "crowdstrike", + DataSource: dataSource, + Timestamp: time.Now().UTC().Format(time.RFC3339Nano), + Raw: eventData, + }, "com.utmstack.crowdstrike") } type CrowdStrikeProcessor struct { @@ -105,8 +362,21 @@ type CrowdStrikeProcessor struct { AppName string } -func getCrowdStrikeProcessor(group *config.ModuleGroup) CrowdStrikeProcessor { - processor := CrowdStrikeProcessor{} +func isGroupValid(group *config.ModuleGroup) bool { + if group == nil { + return false + } + + for _, cnf := range group.ModuleGroupConfigurations { + if strings.TrimSpace(cnf.ConfValue) == "" { + return false + } + } + return true +} + +func buildProcessor(group *config.ModuleGroup) *CrowdStrikeProcessor { + processor := &CrowdStrikeProcessor{} for _, cnf := range group.ModuleGroupConfigurations { switch cnf.ConfKey { @@ -123,6 +393,16 @@ func getCrowdStrikeProcessor(group *config.ModuleGroup) CrowdStrikeProcessor { return processor } +func processorChanged(old, new *CrowdStrikeProcessor) bool { + if old == nil || new == nil { + return true + } + return old.ClientID != new.ClientID || + old.ClientSecret != new.ClientSecret || + old.Cloud != new.Cloud || + old.AppName != new.AppName +} + func (p *CrowdStrikeProcessor) createClient() (*client.CrowdStrikeAPISpecification, error) { if p.ClientID == "" || p.ClientSecret == "" { return nil, catcher.Error("cannot create CrowdStrike client", @@ -172,96 +452,3 @@ func extractCloudFromURL(cloudValue string) (falcon.CloudType, error) { return falcon.CloudValidate(trimmed) } - -func (p *CrowdStrikeProcessor) getEvents() ([]string, error) { - client, err := p.createClient() - if err != nil { - return nil, err - } - - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) - defer cancel() - - json := "json" - response, err := client.EventStreams.ListAvailableStreamsOAuth2( - &event_streams.ListAvailableStreamsOAuth2Params{ - AppID: p.AppName, - Format: &json, - Context: ctx, - }, - ) - if err != nil { - return nil, catcher.Error("cannot list available streams", err, map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) - } - - if err = falcon.AssertNoError(response.Payload.Errors); err != nil { - return nil, catcher.Error("CrowdStrike API error", err, map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) - } - - availableStreams := response.Payload.Resources - if len(availableStreams) == 0 { - _ = catcher.Error("no available streams found", nil, map[string]any{ - "app_id": p.AppName, - "process": "plugin_com.utmstack.crowdstrike", - }) - return []string{}, nil - } - - var events []string - for _, availableStream := range availableStreams { - streamEvents, err := p.getStreamEvents(ctx, client, availableStream) - if err != nil { - _ = catcher.Error("cannot get stream events", err, map[string]any{ - "stream": availableStream, - "process": "plugin_com.utmstack.crowdstrike", - }) - continue - } - events = append(events, streamEvents...) - } - - return events, nil -} - -func (p *CrowdStrikeProcessor) getStreamEvents(ctx context.Context, client *client.CrowdStrikeAPISpecification, availableStream interface{}) ([]string, error) { - stream_v2, ok := availableStream.(*models.MainAvailableStreamV2) - if !ok { - return nil, catcher.Error("invalid stream type", fmt.Errorf("cannot convert to MainAvailableStreamV2"), map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) - } - - stream, err := falcon.NewStream(ctx, client, p.AppName, stream_v2, 0) - if err != nil { - return nil, catcher.Error("cannot create stream", err, map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) - } - defer stream.Close() - - var events []string - timeout := time.NewTimer(30 * time.Second) - defer timeout.Stop() - - for { - select { - case err := <-stream.Errors: - if err.Fatal { - return events, catcher.Error("fatal stream error", err.Err, map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) - } else { - _ = catcher.Error("stream error", err.Err, map[string]any{"process": "plugin_com.utmstack.crowdstrike"}) - } - case event := <-stream.Events: - eventJSON, err := json.Marshal(event) - if err != nil { - _ = catcher.Error("cannot marshal event", err, map[string]any{"process": "crowdstrike-plugin"}) - continue - } - events = append(events, string(eventJSON)) - - if len(events) >= 100 { - return events, nil - } - case <-timeout.C: - return events, nil - case <-ctx.Done(): - return events, nil - } - } -} diff --git a/plugins/geolocation/go.sum b/plugins/geolocation/go.sum index ad1333a84..69740a5d4 100644 --- a/plugins/geolocation/go.sum +++ b/plugins/geolocation/go.sum @@ -88,8 +88,6 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/threatwinds/go-sdk v1.1.14 h1:9XqqGPZvDHHuJ/XkfMsDl3fe7Adfi1fMh/PpQFkUkJU= -github.com/threatwinds/go-sdk v1.1.14/go.mod h1:Kfu26gkSZDpNNkPvuQbTAW3dWIQ66pVIrNYW1YBG3Kg= github.com/threatwinds/go-sdk v1.1.15 h1:LvyaNT78y0whlq9ioR0aRKcRCxt0gX0s90X9z1fF5c4= github.com/threatwinds/go-sdk v1.1.15/go.mod h1:Kfu26gkSZDpNNkPvuQbTAW3dWIQ66pVIrNYW1YBG3Kg= github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= @@ -113,19 +111,14 @@ go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= -go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48= go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= -go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0= go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= -go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18= go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= -go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8= go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= -go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI= go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0= @@ -134,43 +127,25 @@ go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/arch v0.23.0 h1:lKF64A2jF6Zd8L0knGltUnegD62JMFBiCPBmQpToHhg= golang.org/x/arch v0.23.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A= -golang.org/x/arch v0.24.0 h1:qlJ3M9upxvFfwRM51tTg3Yl+8CP9vCC1E7vlFpgv99Y= -golang.org/x/arch v0.24.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= -golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= -golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU= golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU= -golang.org/x/exp v0.0.0-20260218203240-3dfff04db8fa h1:Zt3DZoOFFYkKhDT3v7Lm9FDMEV06GpzjG2jrqW+QTE0= -golang.org/x/exp v0.0.0-20260218203240-3dfff04db8fa/go.mod h1:K79w1Vqn7PoiZn+TkNpx3BUWUQksGO3JcVX6qIjytmA= golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= -golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= -golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= -golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= -golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= -golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= google.golang.org/genproto/googleapis/api v0.0.0-20260203192932-546029d2fa20 h1:7ei4lp52gK1uSejlA8AZl5AJjeLUOHBQscRQZUgAcu0= google.golang.org/genproto/googleapis/api v0.0.0-20260203192932-546029d2fa20/go.mod h1:ZdbssH/1SOVnjnDlXzxDHK2MCidiqXtbYccJNzNYPEE= -google.golang.org/genproto/googleapis/api v0.0.0-20260223185530-2f722ef697dc h1:ULD+ToGXUIU6Pkzr1ARxdyvwfHbelw+agoFDRbLg4TU= -google.golang.org/genproto/googleapis/api v0.0.0-20260223185530-2f722ef697dc/go.mod h1:M5krXqk4GhBKvB596udGL3UyjL4I1+cTbK0orROM9ng= google.golang.org/genproto/googleapis/rpc v0.0.0-20260203192932-546029d2fa20 h1:Jr5R2J6F6qWyzINc+4AM8t5pfUz6beZpHp678GNrMbE= google.golang.org/genproto/googleapis/rpc v0.0.0-20260203192932-546029d2fa20/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260223185530-2f722ef697dc h1:51Wupg8spF+5FC6D+iMKbOddFjMckETnNnEiZ+HX37s= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260223185530-2f722ef697dc/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= -google.golang.org/grpc v1.79.1 h1:zGhSi45ODB9/p3VAawt9a+O/MULLl9dpizzNNpq7flY= -google.golang.org/grpc v1.79.1/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=