Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
1bbda29
Allow hostname and ip change in agent machines
Kbayero Feb 7, 2025
97abffa
Updating agent.proto and rebuild classes. Upgrading liquibase and pro…
c3s4rfred Feb 10, 2025
35b69a6
feat: update compliance reports data
mjabascal10 Feb 10, 2025
a210cb5
Adding update agent attributes services, vms and other used classes
c3s4rfred Feb 10, 2025
7afcb7e
fix: prevent deletion of system-owned dashboards and visualizations
mjabascal10 Feb 10, 2025
79c7f98
feat: show lock icon for private IP addresses instead of country flag
mjabascal10 Feb 10, 2025
8391d6d
Fixing pom.xml error with protoc version and regenerating grpc classe…
c3s4rfred Feb 10, 2025
49e1efb
Adding documentation to the POST method used to update agent's attrib…
c3s4rfred Feb 10, 2025
802f986
Removing unused condition.
c3s4rfred Feb 10, 2025
12b887e
fix tls insecure connection between agent and master
Kbayero Feb 11, 2025
1fc3e0d
Merge branch 'bugfix/10.5.20/update-agent-hostname' of github.com:utm…
Kbayero Feb 11, 2025
ccac933
Merge branch 'bugfix/10.5.20/add-compliance-data' into bugfix/10.5.20…
mjabascal10 Feb 11, 2025
f38062c
feat: set ip address to agent
mjabascal10 Feb 11, 2025
fd438fd
chore: enable creation of new alert when a datasource is down
mjabascal10 Feb 11, 2025
75e1e0e
Merge remote-tracking branch 'origin/bugfix/10.5.20/update-agent-host…
mjabascal10 Feb 11, 2025
cfb3a7b
Merge branch 'main' into bugfix/10.5.20/update-agent-hostname
mjabascal10 Feb 12, 2025
bdaacec
chore: enable creation of new alert when a datasource is down
mjabascal10 Feb 12, 2025
340141e
Merge remote-tracking branch 'origin/bugfix/10.5.20/update-agent-host…
mjabascal10 Feb 12, 2025
5bbbf0d
fix negative operator logic
javjodar Feb 13, 2025
f998073
Refactor and optimize geo and TI processing.
osmontero Feb 17, 2025
4f8c615
Adding SSL to grpc connections for agents and collectors. (using coll…
c3s4rfred Feb 18, 2025
d74489e
Merge branch 'bugfix/10.5.20/update-agent-hostname' of https://github…
c3s4rfred Feb 18, 2025
2644caf
Bugfix Kernel modules aren't loaded because incorrect function call
osmontero Feb 18, 2025
1a32f9f
Remove unused geoip_data volume configuration
osmontero Feb 18, 2025
b84bbc5
Merge remote-tracking branch 'origin/bugfix/10.5.20/update-agent-host…
osmontero Feb 18, 2025
b59757b
fix: improve pipeline failure message handling and formatting.
mjabascal10 Feb 18, 2025
9228cfb
Merge remote-tracking branch 'origin/main' into bugfix/10.5.20/update…
mjabascal10 Feb 18, 2025
ab26074
Merge branch 'main' into bugfix/10.5.20/update-agent-hostname
mjabascal10 Feb 18, 2025
d596434
Update CHANGELOG.md
c3s4rfred Feb 18, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
# UTMStack 10.5.20 Release Notes
## Bug Fixes
- Fixed the IP location component to accurately determine whether an IP address is public or private.
- Fixed communication from/to agents using secure connections.
- Fixed negative operator evaluation matching on wrong input value due to insufficient checking in correlation engine.
- Reorganized GeoIP database and threat intelligence loading into more modular functions for improved maintainability and code readability. Simplified caching, removed unused database function, and restructured rule-handling logic. Addressed minor variable renames and logging adjustments for consistency.
- Removed unused docker volume configuration for GeoIp.
- Fixed Kernel modules wheren't loaded because incorrect function call

## New Features
- Introduced new standards, sections, dashboards, and visualizations to compliance reports.
- Update ip address to agent
- Alert generation for down data sources
- Update ip address to agent.
- Alert generation for down data sources.
2 changes: 1 addition & 1 deletion backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@
<dependency>
<groupId>com.utmstack.grpc.jclient</groupId>
<artifactId>collector-client-4j</artifactId>
<version>1.2.5</version>
<version>2.0.1</version>
</dependency>

<!-- WebSocket dependency -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@

import com.park.utmstack.security.GrpcInterceptor;
import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import io.grpc.netty.GrpcSslContexts;
import io.grpc.netty.NettyChannelBuilder;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PreDestroy;
import javax.net.ssl.SSLException;

@Configuration
public class GrpcConfiguration {
Expand All @@ -20,11 +23,10 @@ public class GrpcConfiguration {
private Integer serverPort;

@Bean
public ManagedChannel managedChannel() {
this.channel = ManagedChannelBuilder.forAddress(serverAddress, serverPort)
public ManagedChannel managedChannel() throws SSLException {
this.channel = NettyChannelBuilder.forAddress(serverAddress, serverPort)
.intercept(new GrpcInterceptor())
.usePlaintext()
.enableRetry()
.sslContext(GrpcSslContexts.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build())
.build();
return this.channel;
}
Expand Down
15 changes: 12 additions & 3 deletions correlation/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
FROM ubuntu:22.04
RUN apt update
RUN apt install -y ca-certificates git
FROM ubuntu:24.04
RUN apt-get update
RUN apt-get install -y ca-certificates git wget
COPY correlation /app/
COPY docs/swagger.json /app/docs/
COPY docs/swagger.yaml /app/docs/
Expand All @@ -9,4 +9,13 @@ COPY run.sh /
RUN chmod +x /app/correlation
RUN chmod +x /run.sh
RUN update-ca-certificates
RUN wget -O /app/asn-blocks-v4.csv https://cdn.utmstack.com/geoip/asn-blocks-v4.csv
RUN wget -O /app/asn-blocks-v6.csv https://cdn.utmstack.com/geoip/asn-blocks-v6.csv
RUN wget -O /app/blocks-v4.csv https://cdn.utmstack.com/geoip/blocks-v4.csv
RUN wget -O /app/blocks-v6.csv https://cdn.utmstack.com/geoip/blocks-v6.csv
RUN wget -O /app/locations-en.csv https://cdn.utmstack.com/geoip/locations-en.csv
RUN wget -O /app/ip_blocklist.list https://intelligence.threatwinds.com/feeds/public/ip/cumulative.list
RUN wget -O /app/domain_blocklist.list https://intelligence.threatwinds.com/feeds/public/domain/cumulative.list
RUN wget -O /app/hostname_blocklist.list https://intelligence.threatwinds.com/feeds/public/hostname/cumulative.list

ENTRYPOINT [ "/run.sh" ]
2 changes: 2 additions & 0 deletions correlation/api/newLogHandler.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package api
import (
"encoding/json"
"fmt"
"github.com/utmstack/UTMStack/correlation/ti"
"io"
"log"
"net/http"
Expand Down Expand Up @@ -74,6 +75,7 @@ func NewLog(c *gin.Context) {
}

cache.AddToCache(l)
ti.Enqueue(l)
search.AddToQueue(l)
response["status"] = "queued"
c.JSON(http.StatusOK, response)
Expand Down
42 changes: 21 additions & 21 deletions correlation/cache/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ import (

const bufferSize int = 1000000

var cacheStorageMutex = &sync.RWMutex{}
var storageMutex = &sync.RWMutex{}

var CacheStorage []string
var storage []string

func Status() {
for {
log.Printf("Logs in cache: %v", len(CacheStorage))
if len(CacheStorage) != 0 {
est := gjson.Get(CacheStorage[0], "@timestamp").String()
log.Printf("Logs in cache: %v", len(storage))
if len(storage) != 0 {
est := gjson.Get(storage[0], "@timestamp").String()
log.Printf("Old document in cache: %s", est)
}
time.Sleep(60 * time.Second)
Expand All @@ -31,8 +31,8 @@ func Status() {

func Search(allOf []rules.AllOf, oneOf []rules.OneOf, seconds int64) []string {
var elements []string
cacheStorageMutex.RLock()
defer cacheStorageMutex.RUnlock()
storageMutex.RLock()
defer storageMutex.RUnlock()

cToBreak := 0
ait := time.Now().UTC().Unix() - func() int64 {
Expand All @@ -43,8 +43,8 @@ func Search(allOf []rules.AllOf, oneOf []rules.OneOf, seconds int64) []string {
return seconds
}
}()
for i := len(CacheStorage) - 1; i >= 0; i-- {
est := gjson.Get(CacheStorage[i], "@timestamp").String()
for i := len(storage) - 1; i >= 0; i-- {
est := gjson.Get(storage[i], "@timestamp").String()
eit, err := time.Parse(time.RFC3339Nano, est)
if err != nil {
log.Printf("Could not parse @timestamp: %v", err)
Expand All @@ -61,23 +61,23 @@ func Search(allOf []rules.AllOf, oneOf []rules.OneOf, seconds int64) []string {
var allCatch bool
var oneCatch bool
for _, of := range oneOf {
oneCatch = evalElement(CacheStorage[i], of.Field, of.Operator, of.Value)
oneCatch = evalElement(storage[i], of.Field, of.Operator, of.Value)
if oneCatch {
break
}
}
for _, af := range allOf {
allCatch = evalElement(CacheStorage[i], af.Field, af.Operator, af.Value)
allCatch = evalElement(storage[i], af.Field, af.Operator, af.Value)
if !allCatch {
break
}
}
if (len(allOf) == 0 || allCatch) && (len(oneOf) == 0 || oneCatch) {
elements = append(elements, CacheStorage[i])
elements = append(elements, storage[i])
}
}
}

return elements
}

Expand All @@ -97,9 +97,9 @@ func ProcessQueue() {
go func() {
for {
l := <-logs
cacheStorageMutex.Lock()
CacheStorage = append(CacheStorage, l)
cacheStorageMutex.Unlock()
storageMutex.Lock()
storage = append(storage, l)
storageMutex.Unlock()
}
}()
}
Expand All @@ -109,11 +109,11 @@ func Clean() {
for {
var clean bool

if len(CacheStorage) > 1 {
if len(storage) > 1 {
if utils.AssignedMemory >= 80 {
clean = true
} else {
old := gjson.Get(CacheStorage[0], "@timestamp").String()
old := gjson.Get(storage[0], "@timestamp").String()
oldTime, err := time.Parse(time.RFC3339Nano, old)
if err != nil {
log.Printf("Could not parse old log timestamp. Cleaning up")
Expand All @@ -129,9 +129,9 @@ func Clean() {
}

if clean {
cacheStorageMutex.Lock()
CacheStorage = CacheStorage[1:]
cacheStorageMutex.Unlock()
storageMutex.Lock()
storage = storage[1:]
storageMutex.Unlock()
} else {
time.Sleep(5 * time.Second)
}
Expand Down
13 changes: 5 additions & 8 deletions correlation/cache/cache_test.go
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
package cache_test
package cache

import (
"testing"


"github.com/utmstack/UTMStack/correlation/cache"
"github.com/utmstack/UTMStack/correlation/rules"
"testing"
)

func TestSearch(t *testing.T) {
Expand All @@ -16,7 +13,7 @@ func TestSearch(t *testing.T) {
`{"@timestamp":"2022-01-01T00:00:03.000Z","field1":"value1","field2":"value2"}`,
`{"@timestamp":"2022-01-01T00:00:04.000Z","field1":"value1","field2":"value2"}`,
}
cache.CacheStorage = cacheStorage
storage = cacheStorage
allOf := []rules.AllOf{
{Field: "field1", Operator: "==", Value: "value1"},
}
Expand All @@ -31,7 +28,7 @@ func TestSearch(t *testing.T) {
`{"@timestamp":"2022-01-01T00:00:01.000Z","field1":"value1","field2":"value2"}`,
`{"@timestamp":"2022-01-01T00:00:00.000Z","field1":"value1","field2":"value2"}`,
}
result := cache.Search(allOf, oneOf, int64(seconds))
result := Search(allOf, oneOf, int64(seconds))
if len(result) != len(expected) {
t.Errorf("Expected %d elements, but got %d", len(expected), len(result))
}
Expand All @@ -40,4 +37,4 @@ func TestSearch(t *testing.T) {
t.Errorf("Expected %s, but got %s", expected[i], r)
}
}
}
}
79 changes: 56 additions & 23 deletions correlation/cache/operators.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package cache

import (
"fmt"
"net"
"regexp"
"strconv"
Expand All @@ -9,17 +10,18 @@ import (
"github.com/tidwall/gjson"
)

func inCIDR(addr, network string) bool {
func inCIDR(addr, network string) (bool, error) {
_, subnet, err := net.ParseCIDR(network)
if err == nil {
ip := net.ParseIP(addr)
if ip != nil {
if subnet.Contains(ip) {
return true
return true, nil
}
}
return false, fmt.Errorf("invalid IP address")
}
return false
return false, err
}

func equal(val1, val2 string) bool {
Expand Down Expand Up @@ -52,27 +54,26 @@ func endWith(str, suff string) bool {
return strings.HasSuffix(str, suff)
}

func expresion(exp, str string) bool {
func expresion(exp, str string) (bool, error) {
re, err := regexp.Compile(exp)
if err == nil {
if re.MatchString(str) {
return true
return true, nil
}
}
return false
return false, err
}

func minThan(min, may string) bool {
minN, err := strconv.ParseFloat(min, 64)
if err != nil {
return false
func parseFloats(val1, val2 string) (float64, float64, error) {
f1, err1 := strconv.ParseFloat(val1, 64)
if err1 != nil {
return 0, 0, err1
}
mayN, err := strconv.ParseFloat(may, 64)
if err != nil {
return false
f2, err2 := strconv.ParseFloat(val2, 64)
if err2 != nil {
return 0, 0, err2
}

return minN < mayN
return f1, f2, nil
}

func compare(operator, val1, val2 string) bool {
Expand Down Expand Up @@ -104,23 +105,55 @@ func compare(operator, val1, val2 string) bool {
case "not end with":
return !endWith(val1, val2)
case "regexp":
return expresion(val2, val1)
matched, err := expresion(val2, val1)
if err != nil {
return false
}
return matched
case "not regexp":
return !expresion(val2, val1)
matched, err := expresion(val2, val1)
if err != nil {
return false
}
return matched
case "<":
return minThan(val1, val2)
f1, f2, err := parseFloats(val1, val2)
if err != nil {
return false
}
return f1 < f2
case ">":
return !minThan(val1, val2)
f1, f2, err := parseFloats(val1, val2)
if err != nil {
return false
}
return f1 > f2
case "<=":
return equal(val1, val2) || minThan(val1, val2)
f1, f2, err := parseFloats(val1, val2)
if err != nil {
return false
}
return f1 <= f2
case ">=":
return equal(val1, val2) || !minThan(val1, val2)
f1, f2, err := parseFloats(val1, val2)
if err != nil {
return false
}
return f1 >= f2
case "exist":
return true
case "in cidr":
return inCIDR(val1, val2)
matched, err := inCIDR(val1, val2)
if err == nil {
return matched
}
return false
case "not in cidr":
return !inCIDR(val1, val2)
matched, err := inCIDR(val1, val2)
if err == nil {
return !matched
}
return false
default:
return false
}
Expand Down
Loading