Compare commits

...

8 Commits

Author SHA1 Message Date
gadotroee
8c187179b0 Adding tests to the bucket statistics (#1160) 2022-06-23 11:04:12 +03:00
lirazyehezkel
d7d802830f TRA-4274 fix ui common linter issues (#1161)
* fix lint issues

* fix all ui-common line issues

* max warnings 0
2022-06-22 09:56:27 +03:00
AmitUp9
4d64dd4b04 TRA-4602_timeline bar charts to traffic statistics (#1159)
* pie chart for protocols and methods by requests and volume

* protocols legend

* timeline bar chart component created

* timeline can view requests and volume

* sorting the bra charts by timestemp

* disable view of <1% pieces in pie

* space added to the end of the file

* package.json update

* cr fixes

* remove spave

* remove unnecessary react fragment

Co-authored-by: Liraz Yehezkel <lirazy@up9.com>
2022-06-21 14:54:56 +03:00
gadotroee
9a40895e9c Add timing endpoint (timeline data) for stats (#1157) 2022-06-21 14:33:04 +03:00
M. Mert Yıldıran
f9a9c05f48 Add ARM64 support to eBPF (#1151)
* Define and use `BPF_CFLAGS` environment variable

* Add eBPF dependencies to `builder-from-amd64-to-arm64v8` and `builder-native-base`

* Add eBPF dependencies to `builder-from-arm64v8-to-amd64`

* Only compile x86 arch of Capstone for x86 target

* Build and install `libbpf` from source

* Fix `builder-from-arm64v8-to-amd64`

* Add `BPF_TARGET` environment variable

* Fix the eBPF verifier error on ARM64

* Fix `go_crypto_tls_ex_uprobe`

* Fix the check

* #run_acceptance_tests

* Fix the build script

* Include ARM64 files

* Bring back `x86.o`

* Generate both endianness

* Fix Dockerfile

* #run_acceptance_tests

* Determine the endianness on runtime if it's possible in Go (default little-endian) #run_acceptance_tests

* Revert "Determine the endianness on runtime if it's possible in Go (default little-endian) #run_acceptance_tests"

This reverts commit a2c83c6040.

* Remove big-endian files #run_acceptance_tests

* Fix Dockerfile #run_acceptance_tests

Co-authored-by: Ubuntu <ubuntu@ip-172-31-33-233.eu-central-1.compute.internal>
2022-06-20 16:16:56 +03:00
lirazyehezkel
c9d4f88de8 TRA-4602 Traffic statistics - Pie Chart (#1153)
* pie chart for protocols and methods by requests and volume

* protocols legend

* methods legend

* pie chart component

* code refactor

* no message

Co-authored-by: gadotroee <55343099+gadotroee@users.noreply.github.com>
2022-06-19 17:19:06 +03:00
gadotroee
6f117d0a84 Accumulative stats for protocol and methods (#1144)
only data for pie chart currently
2022-06-19 16:59:35 +03:00
David Levanon
99cb0b4f44 Close gopacket connection immediately, basic throttling and assembler mutex removal
* close gopacket conn immediately

* increase last ack threshold to 3 seconds

* remove empty condition

* add periodic stats

* protect connections from concurrent updates

* implement basic throttling base on live streams count

* remove assembler mutex

* pr fixes

* change max conns default to 500

* create connectionId type

* fix linter
2022-06-19 16:47:03 +03:00
60 changed files with 1461 additions and 229 deletions

View File

@@ -25,7 +25,18 @@ RUN npm run build
### Base builder image for native builds architecture
FROM golang:1.17-alpine AS builder-native-base
ENV CGO_ENABLED=1 GOOS=linux
RUN apk add --no-cache libpcap-dev g++ perl-utils curl build-base binutils-gold bash
RUN apk add --no-cache \
libpcap-dev \
g++ \
perl-utils \
curl \
build-base \
binutils-gold \
bash \
clang \
llvm \
libbpf-dev \
linux-headers
COPY devops/install-capstone.sh .
RUN ./install-capstone.sh
@@ -33,23 +44,27 @@ RUN ./install-capstone.sh
### Intermediate builder image for x86-64 to x86-64 native builds
FROM builder-native-base AS builder-from-amd64-to-amd64
ENV GOARCH=amd64
ENV BPF_TARGET=amd64 BPF_CFLAGS="-O2 -g -D__TARGET_ARCH_x86"
### Intermediate builder image for AArch64 to AArch64 native builds
FROM builder-native-base AS builder-from-arm64v8-to-arm64v8
ENV GOARCH=arm64
ENV BPF_TARGET=arm64 BPF_CFLAGS="-O2 -g -D__TARGET_ARCH_arm64"
### Builder image for x86-64 to AArch64 cross-compilation
FROM up9inc/linux-arm64-musl-go-libpcap-capstone AS builder-from-amd64-to-arm64v8
FROM up9inc/linux-arm64-musl-go-libpcap-capstone-bpf AS builder-from-amd64-to-arm64v8
ENV CGO_ENABLED=1 GOOS=linux
ENV GOARCH=arm64 CGO_CFLAGS="-I/work/libpcap -I/work/capstone/include"
ENV BPF_TARGET=arm64 BPF_CFLAGS="-O2 -g -D__TARGET_ARCH_arm64 -I/usr/xcc/aarch64-linux-musl-cross/aarch64-linux-musl/include/"
### Builder image for AArch64 to x86-64 cross-compilation
FROM up9inc/linux-x86_64-musl-go-libpcap-capstone AS builder-from-arm64v8-to-amd64
FROM up9inc/linux-x86_64-musl-go-libpcap-capstone-bpf AS builder-from-arm64v8-to-amd64
ENV CGO_ENABLED=1 GOOS=linux
ENV GOARCH=amd64 CGO_CFLAGS="-I/libpcap -I/capstone/include"
ENV BPF_TARGET=amd64 BPF_CFLAGS="-O2 -g -D__TARGET_ARCH_x86 -I/usr/local/musl/x86_64-unknown-linux-musl/include/"
### Final builder image where the build happens
@@ -88,6 +103,11 @@ ARG GIT_BRANCH
ARG BUILD_TIMESTAMP
ARG VER=0.0
WORKDIR /app/tap/tlstapper
RUN rm tlstapper_bpf*
RUN GOARCH=${BUILDARCH} go generate tls_tapper.go
WORKDIR /app/agent-build
RUN go build -ldflags="-extldflags=-static -s -w \

View File

@@ -11,16 +11,15 @@ import (
"strings"
"time"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/dependency"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/oas"
"github.com/up9inc/mizu/agent/pkg/servicemap"
"github.com/up9inc/mizu/agent/pkg/har"
"github.com/up9inc/mizu/agent/pkg/holder"
"github.com/up9inc/mizu/agent/pkg/providers"
"github.com/up9inc/mizu/agent/pkg/oas"
"github.com/up9inc/mizu/agent/pkg/servicemap"
"github.com/up9inc/mizu/agent/pkg/resolver"
"github.com/up9inc/mizu/agent/pkg/utils"
@@ -144,13 +143,14 @@ func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extension
continue
}
providers.EntryAdded(len(data))
entryInserter := dependency.GetInstance(dependency.EntriesInserter).(EntryInserter)
if err := entryInserter.Insert(mizuEntry); err != nil {
logger.Log.Errorf("Error inserting entry, err: %v", err)
}
summary := extension.Dissector.Summarize(mizuEntry)
providers.EntryAdded(len(data), summary)
serviceMapGenerator := dependency.GetInstance(dependency.ServiceMapGeneratorDependency).(servicemap.ServiceMapSink)
serviceMapGenerator.NewTCPEntry(mizuEntry.Source, mizuEntry.Destination, &item.Protocol)

View File

@@ -79,6 +79,15 @@ func GetGeneralStats(c *gin.Context) {
c.JSON(http.StatusOK, providers.GetGeneralStats())
}
func GetAccumulativeStats(c *gin.Context) {
c.JSON(http.StatusOK, providers.GetAccumulativeStats())
}
func GetAccumulativeStatsTiming(c *gin.Context) {
// for now hardcoded 10 bars of 5 minutes interval
c.JSON(http.StatusOK, providers.GetAccumulativeStatsTiming(300, 10))
}
func GetCurrentResolvingInformation(c *gin.Context) {
c.JSON(http.StatusOK, holder.GetResolver().GetMap())
}

View File

@@ -2,7 +2,12 @@ package providers
import (
"reflect"
"sync"
"time"
"github.com/jinzhu/copier"
"github.com/up9inc/mizu/logger"
"github.com/up9inc/mizu/tap/api"
)
type GeneralStats struct {
@@ -12,7 +17,49 @@ type GeneralStats struct {
LastEntryTimestamp int
}
var generalStats = GeneralStats{}
type BucketStats []*TimeFrameStatsValue
type TimeFrameStatsValue struct {
BucketTime time.Time `json:"timestamp"`
ProtocolStats map[string]ProtocolStats `json:"protocols"`
}
type ProtocolStats struct {
MethodsStats map[string]*SizeAndEntriesCount `json:"methods"`
Color string `json:"color"`
}
type SizeAndEntriesCount struct {
EntriesCount int `json:"entriesCount"`
VolumeInBytes int `json:"volumeInBytes"`
}
type AccumulativeStatsCounter struct {
Name string `json:"name"`
EntriesCount int `json:"entriesCount"`
VolumeSizeBytes int `json:"volumeSizeBytes"`
}
type AccumulativeStatsProtocol struct {
AccumulativeStatsCounter
Color string `json:"color"`
Methods []*AccumulativeStatsCounter `json:"methods"`
}
type AccumulativeStatsProtocolTime struct {
ProtocolsData []*AccumulativeStatsProtocol `json:"protocols"`
Time int64 `json:"timestamp"`
}
var (
generalStats = GeneralStats{}
bucketsStats = BucketStats{}
bucketStatsLocker = sync.Mutex{}
)
const (
InternalBucketThreshold = time.Minute * 1
)
func ResetGeneralStats() {
generalStats = GeneralStats{}
@@ -22,7 +69,31 @@ func GetGeneralStats() GeneralStats {
return generalStats
}
func EntryAdded(size int) {
func GetAccumulativeStats() []*AccumulativeStatsProtocol {
bucketStatsCopy := getBucketStatsCopy()
if len(bucketStatsCopy) == 0 {
return make([]*AccumulativeStatsProtocol, 0)
}
methodsPerProtocolAggregated, protocolToColor := getAggregatedStatsAllTime(bucketStatsCopy)
return convertAccumulativeStatsDictToArray(methodsPerProtocolAggregated, protocolToColor)
}
func GetAccumulativeStatsTiming(intervalSeconds int, numberOfBars int) []*AccumulativeStatsProtocolTime {
bucketStatsCopy := getBucketStatsCopy()
if len(bucketStatsCopy) == 0 {
return make([]*AccumulativeStatsProtocolTime, 0)
}
firstBucketTime := getFirstBucketTime(time.Now().UTC(), intervalSeconds, numberOfBars)
methodsPerProtocolPerTimeAggregated, protocolToColor := getAggregatedResultTimingFromSpecificTime(intervalSeconds, bucketStatsCopy, firstBucketTime)
return convertAccumulativeStatsTimelineDictToArray(methodsPerProtocolPerTimeAggregated, protocolToColor)
}
func EntryAdded(size int, summery *api.BaseEntry) {
generalStats.EntriesCount++
generalStats.EntriesVolumeInGB += float64(size) / (1 << 30)
@@ -32,5 +103,186 @@ func EntryAdded(size int) {
generalStats.FirstEntryTimestamp = currentTimestamp
}
addToBucketStats(size, summery)
generalStats.LastEntryTimestamp = currentTimestamp
}
func addToBucketStats(size int, summery *api.BaseEntry) {
entryTimeBucketRounded := getBucketFromTimeStamp(summery.Timestamp)
if len(bucketsStats) == 0 {
bucketsStats = append(bucketsStats, &TimeFrameStatsValue{
BucketTime: entryTimeBucketRounded,
ProtocolStats: map[string]ProtocolStats{},
})
}
bucketOfEntry := bucketsStats[len(bucketsStats)-1]
if bucketOfEntry.BucketTime != entryTimeBucketRounded {
bucketOfEntry = &TimeFrameStatsValue{
BucketTime: entryTimeBucketRounded,
ProtocolStats: map[string]ProtocolStats{},
}
bucketsStats = append(bucketsStats, bucketOfEntry)
}
if _, found := bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation]; !found {
bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation] = ProtocolStats{
MethodsStats: map[string]*SizeAndEntriesCount{},
Color: summery.Protocol.BackgroundColor,
}
}
if _, found := bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation].MethodsStats[summery.Method]; !found {
bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation].MethodsStats[summery.Method] = &SizeAndEntriesCount{
VolumeInBytes: 0,
EntriesCount: 0,
}
}
bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation].MethodsStats[summery.Method].EntriesCount += 1
bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation].MethodsStats[summery.Method].VolumeInBytes += size
}
func getBucketFromTimeStamp(timestamp int64) time.Time {
entryTimeStampAsTime := time.UnixMilli(timestamp)
return entryTimeStampAsTime.Add(-1 * InternalBucketThreshold / 2).Round(InternalBucketThreshold)
}
func getFirstBucketTime(endTime time.Time, intervalSeconds int, numberOfBars int) time.Time {
lastBucketTime := endTime.Add(-1 * time.Second * time.Duration(intervalSeconds) / 2).Round(time.Second * time.Duration(intervalSeconds))
firstBucketTime := lastBucketTime.Add(-1 * time.Second * time.Duration(intervalSeconds*(numberOfBars-1)))
return firstBucketTime
}
func convertAccumulativeStatsTimelineDictToArray(methodsPerProtocolPerTimeAggregated map[time.Time]map[string]map[string]*AccumulativeStatsCounter, protocolToColor map[string]string) []*AccumulativeStatsProtocolTime {
finalResult := make([]*AccumulativeStatsProtocolTime, 0)
for timeKey, item := range methodsPerProtocolPerTimeAggregated {
protocolsData := make([]*AccumulativeStatsProtocol, 0)
for protocolName := range item {
entriesCount := 0
volumeSizeBytes := 0
methods := make([]*AccumulativeStatsCounter, 0)
for _, methodAccData := range methodsPerProtocolPerTimeAggregated[timeKey][protocolName] {
entriesCount += methodAccData.EntriesCount
volumeSizeBytes += methodAccData.VolumeSizeBytes
methods = append(methods, methodAccData)
}
protocolsData = append(protocolsData, &AccumulativeStatsProtocol{
AccumulativeStatsCounter: AccumulativeStatsCounter{
Name: protocolName,
EntriesCount: entriesCount,
VolumeSizeBytes: volumeSizeBytes,
},
Color: protocolToColor[protocolName],
Methods: methods,
})
}
finalResult = append(finalResult, &AccumulativeStatsProtocolTime{
Time: timeKey.UnixMilli(),
ProtocolsData: protocolsData,
})
}
return finalResult
}
func convertAccumulativeStatsDictToArray(methodsPerProtocolAggregated map[string]map[string]*AccumulativeStatsCounter, protocolToColor map[string]string) []*AccumulativeStatsProtocol {
protocolsData := make([]*AccumulativeStatsProtocol, 0)
for protocolName, value := range methodsPerProtocolAggregated {
entriesCount := 0
volumeSizeBytes := 0
methods := make([]*AccumulativeStatsCounter, 0)
for _, methodAccData := range value {
entriesCount += methodAccData.EntriesCount
volumeSizeBytes += methodAccData.VolumeSizeBytes
methods = append(methods, methodAccData)
}
protocolsData = append(protocolsData, &AccumulativeStatsProtocol{
AccumulativeStatsCounter: AccumulativeStatsCounter{
Name: protocolName,
EntriesCount: entriesCount,
VolumeSizeBytes: volumeSizeBytes,
},
Color: protocolToColor[protocolName],
Methods: methods,
})
}
return protocolsData
}
func getBucketStatsCopy() BucketStats {
bucketStatsCopy := BucketStats{}
bucketStatsLocker.Lock()
if err := copier.Copy(&bucketStatsCopy, bucketsStats); err != nil {
logger.Log.Errorf("Error while copying src stats into temporary copied object")
return nil
}
bucketStatsLocker.Unlock()
return bucketStatsCopy
}
func getAggregatedResultTimingFromSpecificTime(intervalSeconds int, bucketStats BucketStats, firstBucketTime time.Time) (map[time.Time]map[string]map[string]*AccumulativeStatsCounter, map[string]string) {
protocolToColor := map[string]string{}
methodsPerProtocolPerTimeAggregated := map[time.Time]map[string]map[string]*AccumulativeStatsCounter{}
bucketStatsIndex := len(bucketStats) - 1
for bucketStatsIndex >= 0 {
currentBucketTime := bucketStats[bucketStatsIndex].BucketTime
if currentBucketTime.After(firstBucketTime) || currentBucketTime.Equal(firstBucketTime) {
resultBucketRoundedKey := currentBucketTime.Add(-1 * time.Second * time.Duration(intervalSeconds) / 2).Round(time.Second * time.Duration(intervalSeconds))
for protocolName, data := range bucketStats[bucketStatsIndex].ProtocolStats {
if _, ok := protocolToColor[protocolName]; !ok {
protocolToColor[protocolName] = data.Color
}
for methodName, dataOfMethod := range data.MethodsStats {
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey]; !ok {
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey] = map[string]map[string]*AccumulativeStatsCounter{}
}
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName]; !ok {
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName] = map[string]*AccumulativeStatsCounter{}
}
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName]; !ok {
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName] = &AccumulativeStatsCounter{
Name: methodName,
EntriesCount: 0,
VolumeSizeBytes: 0,
}
}
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName].EntriesCount += dataOfMethod.EntriesCount
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName].VolumeSizeBytes += dataOfMethod.VolumeInBytes
}
}
}
bucketStatsIndex--
}
return methodsPerProtocolPerTimeAggregated, protocolToColor
}
func getAggregatedStatsAllTime(bucketStatsCopy BucketStats) (map[string]map[string]*AccumulativeStatsCounter, map[string]string) {
protocolToColor := make(map[string]string, 0)
methodsPerProtocolAggregated := make(map[string]map[string]*AccumulativeStatsCounter, 0)
for _, countersOfTimeFrame := range bucketStatsCopy {
for protocolName, value := range countersOfTimeFrame.ProtocolStats {
if _, ok := protocolToColor[protocolName]; !ok {
protocolToColor[protocolName] = value.Color
}
for method, countersValue := range value.MethodsStats {
if _, found := methodsPerProtocolAggregated[protocolName]; !found {
methodsPerProtocolAggregated[protocolName] = map[string]*AccumulativeStatsCounter{}
}
if _, found := methodsPerProtocolAggregated[protocolName][method]; !found {
methodsPerProtocolAggregated[protocolName][method] = &AccumulativeStatsCounter{
Name: method,
EntriesCount: 0,
VolumeSizeBytes: 0,
}
}
methodsPerProtocolAggregated[protocolName][method].EntriesCount += countersValue.EntriesCount
methodsPerProtocolAggregated[protocolName][method].VolumeSizeBytes += countersValue.VolumeInBytes
}
}
}
return methodsPerProtocolAggregated, protocolToColor
}

View File

@@ -0,0 +1,331 @@
package providers
import (
"fmt"
"reflect"
"testing"
"time"
)
func TestGetBucketOfTimeStamp(t *testing.T) {
tests := map[int64]time.Time{
time.Date(2022, time.Month(1), 1, 10, 34, 45, 0, time.Local).UnixMilli(): time.Date(2022, time.Month(1), 1, 10, 34, 00, 0, time.Local),
time.Date(2022, time.Month(1), 1, 10, 34, 00, 0, time.Local).UnixMilli(): time.Date(2022, time.Month(1), 1, 10, 34, 00, 0, time.Local),
time.Date(2022, time.Month(1), 1, 10, 59, 01, 0, time.Local).UnixMilli(): time.Date(2022, time.Month(1), 1, 10, 59, 00, 0, time.Local),
}
for key, value := range tests {
t.Run(fmt.Sprintf("%v", key), func(t *testing.T) {
actual := getBucketFromTimeStamp(key)
if actual != value {
t.Errorf("unexpected result - expected: %v, actual: %v", value, actual)
}
})
}
}
type DataForBucketBorderFunction struct {
EndTime time.Time
IntervalInSeconds int
NumberOfBars int
}
func TestGetBucketBorders(t *testing.T) {
tests := map[DataForBucketBorderFunction]time.Time{
DataForBucketBorderFunction{
time.Date(2022, time.Month(1), 1, 10, 34, 45, 0, time.UTC),
300,
10,
}: time.Date(2022, time.Month(1), 1, 9, 45, 0, 0, time.UTC),
DataForBucketBorderFunction{
time.Date(2022, time.Month(1), 1, 10, 35, 45, 0, time.UTC),
60,
5,
}: time.Date(2022, time.Month(1), 1, 10, 31, 00, 0, time.UTC),
}
for key, value := range tests {
t.Run(fmt.Sprintf("%v", key), func(t *testing.T) {
actual := getFirstBucketTime(key.EndTime, key.IntervalInSeconds, key.NumberOfBars)
if actual != value {
t.Errorf("unexpected result - expected: %v, actual: %v", value, actual)
}
})
}
}
func TestGetAggregatedStatsAllTime(t *testing.T) {
bucketStatsForTest := BucketStats{
&TimeFrameStatsValue{
BucketTime: time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC),
ProtocolStats: map[string]ProtocolStats{
"http": {
MethodsStats: map[string]*SizeAndEntriesCount{
"get": {
EntriesCount: 1,
VolumeInBytes: 2,
},
"post": {
EntriesCount: 2,
VolumeInBytes: 3,
},
},
},
"kafka": {
MethodsStats: map[string]*SizeAndEntriesCount{
"listTopics": {
EntriesCount: 5,
VolumeInBytes: 6,
},
},
},
},
},
&TimeFrameStatsValue{
BucketTime: time.Date(2022, time.Month(1), 1, 10, 01, 00, 0, time.UTC),
ProtocolStats: map[string]ProtocolStats{
"http": {
MethodsStats: map[string]*SizeAndEntriesCount{
"get": {
EntriesCount: 1,
VolumeInBytes: 2,
},
"post": {
EntriesCount: 2,
VolumeInBytes: 3,
},
},
},
"redis": {
MethodsStats: map[string]*SizeAndEntriesCount{
"set": {
EntriesCount: 5,
VolumeInBytes: 6,
},
},
},
},
},
}
expected := map[string]map[string]*AccumulativeStatsCounter{
"http": {
"post": {
Name: "post",
EntriesCount: 4,
VolumeSizeBytes: 6,
},
"get": {
Name: "get",
EntriesCount: 2,
VolumeSizeBytes: 4,
},
},
"kafka": {
"listTopics": {
Name: "listTopics",
EntriesCount: 5,
VolumeSizeBytes: 6,
},
},
"redis": {
"set": {
Name: "set",
EntriesCount: 5,
VolumeSizeBytes: 6,
},
},
}
actual, _ := getAggregatedStatsAllTime(bucketStatsForTest)
if !reflect.DeepEqual(actual, expected) {
t.Errorf("unexpected result - expected: %v, actual: %v", 3, len(actual))
}
}
func TestGetAggregatedStatsFromSpecificTime(t *testing.T) {
bucketStatsForTest := BucketStats{
&TimeFrameStatsValue{
BucketTime: time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC),
ProtocolStats: map[string]ProtocolStats{
"http": {
MethodsStats: map[string]*SizeAndEntriesCount{
"get": {
EntriesCount: 1,
VolumeInBytes: 2,
},
},
},
"kafka": {
MethodsStats: map[string]*SizeAndEntriesCount{
"listTopics": {
EntriesCount: 5,
VolumeInBytes: 6,
},
},
},
},
},
&TimeFrameStatsValue{
BucketTime: time.Date(2022, time.Month(1), 1, 10, 01, 00, 0, time.UTC),
ProtocolStats: map[string]ProtocolStats{
"http": {
MethodsStats: map[string]*SizeAndEntriesCount{
"get": {
EntriesCount: 1,
VolumeInBytes: 2,
},
"post": {
EntriesCount: 2,
VolumeInBytes: 3,
},
},
},
"redis": {
MethodsStats: map[string]*SizeAndEntriesCount{
"set": {
EntriesCount: 5,
VolumeInBytes: 6,
},
},
},
},
},
}
expected := map[time.Time]map[string]map[string]*AccumulativeStatsCounter{
time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC): {
"http": {
"post": {
Name: "post",
EntriesCount: 2,
VolumeSizeBytes: 3,
},
"get": {
Name: "get",
EntriesCount: 2,
VolumeSizeBytes: 4,
},
},
"kafka": {
"listTopics": {
Name: "listTopics",
EntriesCount: 5,
VolumeSizeBytes: 6,
},
},
"redis": {
"set": {
Name: "set",
EntriesCount: 5,
VolumeSizeBytes: 6,
},
},
},
}
actual, _ := getAggregatedResultTimingFromSpecificTime(300, bucketStatsForTest, time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC))
if !reflect.DeepEqual(actual, expected) {
t.Errorf("unexpected result - expected: %v, actual: %v", 3, len(actual))
}
}
func TestGetAggregatedStatsFromSpecificTimeMultipleBuckets(t *testing.T) {
bucketStatsForTest := BucketStats{
&TimeFrameStatsValue{
BucketTime: time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC),
ProtocolStats: map[string]ProtocolStats{
"http": {
MethodsStats: map[string]*SizeAndEntriesCount{
"get": {
EntriesCount: 1,
VolumeInBytes: 2,
},
},
},
"kafka": {
MethodsStats: map[string]*SizeAndEntriesCount{
"listTopics": {
EntriesCount: 5,
VolumeInBytes: 6,
},
},
},
},
},
&TimeFrameStatsValue{
BucketTime: time.Date(2022, time.Month(1), 1, 10, 01, 00, 0, time.UTC),
ProtocolStats: map[string]ProtocolStats{
"http": {
MethodsStats: map[string]*SizeAndEntriesCount{
"get": {
EntriesCount: 1,
VolumeInBytes: 2,
},
"post": {
EntriesCount: 2,
VolumeInBytes: 3,
},
},
},
"redis": {
MethodsStats: map[string]*SizeAndEntriesCount{
"set": {
EntriesCount: 5,
VolumeInBytes: 6,
},
},
},
},
},
}
expected := map[time.Time]map[string]map[string]*AccumulativeStatsCounter{
time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC): {
"http": {
"get": {
Name: "get",
EntriesCount: 1,
VolumeSizeBytes: 2,
},
},
"kafka": {
"listTopics": {
Name: "listTopics",
EntriesCount: 5,
VolumeSizeBytes: 6,
},
},
},
time.Date(2022, time.Month(1), 1, 10, 01, 00, 0, time.UTC): {
"http": {
"post": {
Name: "post",
EntriesCount: 2,
VolumeSizeBytes: 3,
},
"get": {
Name: "get",
EntriesCount: 1,
VolumeSizeBytes: 2,
},
},
"redis": {
"set": {
Name: "set",
EntriesCount: 5,
VolumeSizeBytes: 6,
},
},
},
}
actual, _ := getAggregatedResultTimingFromSpecificTime(60, bucketStatsForTest, time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC))
if !reflect.DeepEqual(actual, expected) {
t.Errorf("unexpected result - expected: %v, actual: %v", 3, len(actual))
}
}

View File

@@ -3,8 +3,10 @@ package providers_test
import (
"fmt"
"testing"
"time"
"github.com/up9inc/mizu/agent/pkg/providers"
"github.com/up9inc/mizu/tap/api"
)
func TestNoEntryAddedCount(t *testing.T) {
@@ -22,10 +24,13 @@ func TestNoEntryAddedCount(t *testing.T) {
func TestEntryAddedCount(t *testing.T) {
tests := []int{1, 5, 10, 100, 500, 1000}
entryBucketKey := time.Date(2021, 1, 1, 10, 0, 0, 0, time.UTC)
valueLessThanBucketThreshold := time.Second * 130
mockSummery := &api.BaseEntry{Protocol: api.Protocol{Name: "mock"}, Method: "mock-method", Timestamp: entryBucketKey.Add(valueLessThanBucketThreshold).UnixNano()}
for _, entriesCount := range tests {
t.Run(fmt.Sprintf("%d", entriesCount), func(t *testing.T) {
for i := 0; i < entriesCount; i++ {
providers.EntryAdded(0)
providers.EntryAdded(0, mockSummery)
}
entriesStats := providers.GetGeneralStats()
@@ -38,7 +43,14 @@ func TestEntryAddedCount(t *testing.T) {
t.Errorf("unexpected result - expected: %v, actual: %v", 0, entriesStats.EntriesVolumeInGB)
}
t.Cleanup(providers.ResetGeneralStats)
t.Cleanup(func() {
providers.ResetGeneralStats()
generalStats := providers.GetGeneralStats()
if generalStats.EntriesCount != 0 {
t.Errorf("unexpected result - expected: %v, actual: %v", 0, generalStats.EntriesCount)
}
})
})
}
}
@@ -49,12 +61,14 @@ func TestEntryAddedVolume(t *testing.T) {
var expectedEntriesCount int
var expectedVolumeInGB float64
mockSummery := &api.BaseEntry{Protocol: api.Protocol{Name: "mock"}, Method: "mock-method", Timestamp: time.Date(2021, 1, 1, 10, 0, 0, 0, time.UTC).UnixNano()}
for _, data := range tests {
t.Run(fmt.Sprintf("%d", len(data)), func(t *testing.T) {
expectedEntriesCount++
expectedVolumeInGB += float64(len(data)) / (1 << 30)
providers.EntryAdded(len(data))
providers.EntryAdded(len(data), mockSummery)
entriesStats := providers.GetGeneralStats()
@@ -67,5 +81,4 @@ func TestEntryAddedVolume(t *testing.T) {
}
})
}
}

View File

@@ -16,6 +16,8 @@ func StatusRoutes(ginApp *gin.Engine) {
routeGroup.GET("/tap", controllers.GetTappingStatus)
routeGroup.GET("/general", controllers.GetGeneralStats) // get general stats about entries in DB
routeGroup.GET("/accumulative", controllers.GetAccumulativeStats)
routeGroup.GET("/accumulativeTiming", controllers.GetAccumulativeStatsTiming)
routeGroup.GET("/resolving", controllers.GetCurrentResolvingInformation)
}

View File

@@ -25,3 +25,6 @@ RUN curl https://github.com/capstone-engine/capstone/archive/4.0.2.tar.gz -Lo ./
WORKDIR /work/capstone
RUN CAPSTONE_ARCHS="aarch64" CAPSTONE_STATIC=yes ./make.sh \
&& cp /work/capstone/libcapstone.a /usr/xcc/aarch64-linux-musl-cross/lib/gcc/aarch64-linux-musl/*/
# Install eBPF related dependencies
RUN apt-get -y install clang llvm libbpf-dev

View File

@@ -0,0 +1,4 @@
#!/bin/bash
set -e
docker build . -t up9inc/linux-arm64-musl-go-libpcap-capstone-bpf && docker push up9inc/linux-arm64-musl-go-libpcap-capstone-bpf

View File

@@ -1,4 +0,0 @@
#!/bin/bash
set -e
docker build . -t up9inc/linux-arm64-musl-go-libpcap-capstone && docker push up9inc/linux-arm64-musl-go-libpcap-capstone

View File

@@ -1,5 +1,18 @@
FROM messense/rust-musl-cross:x86_64-musl AS builder-from-arm64v8-to-amd64
WORKDIR /
# Install eBPF related dependencies
RUN apt-get update
RUN apt-get -y install clang llvm libelf-dev pkg-config
# Build and install libbpf from source
RUN curl https://github.com/libbpf/libbpf/archive/refs/tags/v0.8.0.tar.gz -Lo ./libbpf.tar.gz \
&& tar -xzf libbpf.tar.gz && mv ./libbpf-* ./libbpf
WORKDIR /libbpf/src
RUN make && make install
WORKDIR /
ENV CROSS_TRIPLE x86_64-unknown-linux-musl
ENV CROSS_ROOT /usr/local/musl
@@ -12,7 +25,6 @@ ENV AS=${CROSS_ROOT}/bin/${CROSS_TRIPLE}-as \
FC=${CROSS_ROOT}/bin/${CROSS_TRIPLE}-gfortran
# Install Go
WORKDIR /
RUN curl https://go.dev/dl/go1.17.6.linux-arm64.tar.gz -Lo ./go.linux-arm64.tar.gz \
&& curl https://go.dev/dl/go1.17.6.linux-arm64.tar.gz.asc -Lo ./go.linux-arm64.tar.gz.asc \
&& curl https://dl.google.com/dl/linux/linux_signing_key.pub -Lo linux_signing_key.pub \
@@ -35,5 +47,5 @@ WORKDIR /
RUN curl https://github.com/capstone-engine/capstone/archive/4.0.2.tar.gz -Lo ./capstone.tar.gz \
&& tar -xzf capstone.tar.gz && mv ./capstone-* ./capstone
WORKDIR /capstone
RUN ./make.sh \
RUN CAPSTONE_ARCHS="x86" CAPSTONE_STATIC=yes ./make.sh \
&& cp /capstone/libcapstone.a /usr/local/musl/lib/gcc/x86_64-unknown-linux-musl/*/

View File

@@ -0,0 +1,4 @@
#!/bin/bash
set -e
docker build . -t up9inc/linux-x86_64-musl-go-libpcap-capstone-bpf && docker push up9inc/linux-x86_64-musl-go-libpcap-capstone-bpf

View File

@@ -1,4 +0,0 @@
#!/bin/bash
set -e
docker build . -t up9inc/linux-x86_64-musl-go-libpcap-capstone && docker push up9inc/linux-x86_64-musl-go-libpcap-capstone

View File

@@ -57,7 +57,7 @@ def extract_samples(f: typing.IO) -> typing.Tuple[pd.Series, pd.Series, pd.Serie
append_sample('"matchedPairs"', line, matched_samples)
append_sample('"liveTcpStreams"', line, live_samples)
append_sample('"processedBytes"', line, processed_samples)
append_sample('mem', line, heap_samples)
append_sample('heap-alloc', line, heap_samples)
append_sample('goroutines', line, goroutines_samples)
cpu_samples = pd.Series(cpu_samples)

View File

@@ -16,6 +16,8 @@ type AppStats struct {
MatchedPairs uint64 `json:"matchedPairs"`
DroppedTcpStreams uint64 `json:"droppedTcpStreams"`
LiveTcpStreams uint64 `json:"liveTcpStreams"`
IgnoredLastAckCount uint64 `json:"ignoredLastAckCount"`
ThrottledPackets uint64 `json:"throttledPackets"`
}
func (as *AppStats) IncMatchedPairs() {
@@ -39,6 +41,14 @@ func (as *AppStats) IncIgnoredPacketsCount() {
atomic.AddUint64(&as.IgnoredPacketsCount, 1)
}
func (as *AppStats) IncIgnoredLastAckCount() {
atomic.AddUint64(&as.IgnoredLastAckCount, 1)
}
func (as *AppStats) IncThrottledPackets() {
atomic.AddUint64(&as.ThrottledPackets, 1)
}
func (as *AppStats) IncReassembledTcpPayloadsCount() {
atomic.AddUint64(&as.ReassembledTcpPayloadsCount, 1)
}
@@ -74,6 +84,8 @@ func (as *AppStats) DumpStats() *AppStats {
currentAppStats.TlsConnectionsCount = resetUint64(&as.TlsConnectionsCount)
currentAppStats.MatchedPairs = resetUint64(&as.MatchedPairs)
currentAppStats.DroppedTcpStreams = resetUint64(&as.DroppedTcpStreams)
currentAppStats.IgnoredLastAckCount = resetUint64(&as.IgnoredLastAckCount)
currentAppStats.ThrottledPackets = resetUint64(&as.ThrottledPackets)
currentAppStats.LiveTcpStreams = as.LiveTcpStreams
return currentAppStats

View File

@@ -10,14 +10,11 @@ import (
)
type CleanerStats struct {
flushed int
closed int
deleted int
}
type Cleaner struct {
assembler *reassembly.Assembler
assemblerMutex *sync.Mutex
cleanPeriod time.Duration
connectionTimeout time.Duration
stats CleanerStats
@@ -28,11 +25,6 @@ type Cleaner struct {
func (cl *Cleaner) clean() {
startCleanTime := time.Now()
cl.assemblerMutex.Lock()
logger.Log.Debugf("Assembler Stats before cleaning %s", cl.assembler.Dump())
flushed, closed := cl.assembler.FlushCloseOlderThan(startCleanTime.Add(-cl.connectionTimeout))
cl.assemblerMutex.Unlock()
cl.streamsMap.Range(func(k, v interface{}) bool {
reqResMatchers := v.(api.TcpStream).GetReqResMatchers()
for _, reqResMatcher := range reqResMatchers {
@@ -47,8 +39,6 @@ func (cl *Cleaner) clean() {
cl.statsMutex.Lock()
logger.Log.Debugf("Assembler Stats after cleaning %s", cl.assembler.Dump())
cl.stats.flushed += flushed
cl.stats.closed += closed
cl.statsMutex.Unlock()
}
@@ -67,17 +57,12 @@ func (cl *Cleaner) dumpStats() CleanerStats {
cl.statsMutex.Lock()
stats := CleanerStats{
flushed: cl.stats.flushed,
closed: cl.stats.closed,
deleted: cl.stats.deleted,
}
cl.stats.flushed = 0
cl.stats.closed = 0
cl.stats.deleted = 0
cl.statsMutex.Unlock()
return stats
}

View File

@@ -45,6 +45,7 @@ var quiet = flag.Bool("quiet", false, "Be quiet regarding errors")
var hexdumppkt = flag.Bool("dumppkt", false, "Dump packet as hex")
var procfs = flag.String("procfs", "/proc", "The procfs directory, used when mapping host volumes into a container")
var ignoredPorts = flag.String("ignore-ports", "", "A comma separated list of ports to ignore")
var maxLiveStreams = flag.Int("max-live-streams", 500, "Maximum live streams to handle concurrently")
// capture
var iface = flag.String("i", "en0", "Interface to read packets from")
@@ -59,8 +60,10 @@ var tls = flag.Bool("tls", false, "Enable TLS tapper")
var memprofile = flag.String("memprofile", "", "Write memory profile")
type TapOpts struct {
HostMode bool
IgnoredPorts []uint16
HostMode bool
IgnoredPorts []uint16
maxLiveStreams int
staleConnectionTimeout time.Duration
}
var extensions []*api.Extension // global
@@ -89,7 +92,13 @@ func StartPassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem,
diagnose.StartMemoryProfiler(os.Getenv(MemoryProfilingDumpPath), os.Getenv(MemoryProfilingTimeIntervalSeconds))
}
assembler := initializePassiveTapper(opts, outputItems, streamsMap)
assembler, err := initializePassiveTapper(opts, outputItems, streamsMap)
if err != nil {
logger.Log.Errorf("Error initializing tapper %w", err)
return
}
go startPassiveTapper(streamsMap, assembler)
}
@@ -124,7 +133,7 @@ func printNewTapTargets(success bool) {
}
}
func printPeriodicStats(cleaner *Cleaner) {
func printPeriodicStats(cleaner *Cleaner, assembler *tcpAssembler) {
statsPeriod := time.Second * time.Duration(*statsevery)
ticker := time.NewTicker(statsPeriod)
@@ -162,8 +171,10 @@ func printPeriodicStats(cleaner *Cleaner) {
}
}
logger.Log.Infof(
"mem: %d, goroutines: %d, cpu: %f, cores: %d/%d, rss: %f",
"heap-alloc: %d, heap-idle: %d, heap-objects: %d, goroutines: %d, cpu: %f, cores: %d/%d, rss: %f",
memStats.HeapAlloc,
memStats.HeapIdle,
memStats.HeapObjects,
runtime.NumGoroutine(),
sysInfo.CPU,
logicalCoreCount,
@@ -172,15 +183,19 @@ func printPeriodicStats(cleaner *Cleaner) {
// Since the last print
cleanStats := cleaner.dumpStats()
assemblerStats := assembler.DumpStats()
logger.Log.Infof(
"cleaner - flushed connections: %d, closed connections: %d, deleted messages: %d",
cleanStats.flushed,
cleanStats.closed,
assemblerStats.flushedConnections,
assemblerStats.closedConnections,
cleanStats.deleted,
)
currentAppStats := diagnose.AppStats.DumpStats()
appStatsJSON, _ := json.Marshal(currentAppStats)
logger.Log.Infof("app stats - %v", string(appStatsJSON))
// At the moment
logger.Log.Infof("assembler-stats: %s, packet-source-stats: %s", assembler.Dump(), packetSourceManager.Stats())
}
}
@@ -208,7 +223,7 @@ func initializePacketSources() error {
return err
}
func initializePassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem, streamsMap api.TcpStreamMap) *tcpAssembler {
func initializePassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem, streamsMap api.TcpStreamMap) (*tcpAssembler, error) {
diagnose.InitializeErrorsMap(*debug, *verbose, *quiet)
diagnose.InitializeTapperInternalStats()
@@ -219,10 +234,10 @@ func initializePassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelI
}
opts.IgnoredPorts = append(opts.IgnoredPorts, buildIgnoredPortsList(*ignoredPorts)...)
opts.maxLiveStreams = *maxLiveStreams
opts.staleConnectionTimeout = time.Duration(*staleTimeoutSeconds) * time.Second
assembler := NewTcpAssembler(outputItems, streamsMap, opts)
return assembler
return NewTcpAssembler(outputItems, streamsMap, opts)
}
func startPassiveTapper(streamsMap api.TcpStreamMap, assembler *tcpAssembler) {
@@ -233,14 +248,13 @@ func startPassiveTapper(streamsMap api.TcpStreamMap, assembler *tcpAssembler) {
staleConnectionTimeout := time.Second * time.Duration(*staleTimeoutSeconds)
cleaner := Cleaner{
assembler: assembler.Assembler,
assemblerMutex: &assembler.assemblerMutex,
cleanPeriod: cleanPeriod,
connectionTimeout: staleConnectionTimeout,
streamsMap: streamsMap,
}
cleaner.start()
go printPeriodicStats(&cleaner)
go printPeriodicStats(&cleaner, assembler)
assembler.processPackets(*hexdumppkt, mainPacketInputChan)

View File

@@ -160,3 +160,19 @@ func (m *PacketSourceManager) Close() {
src.close()
}
}
func (m *PacketSourceManager) Stats() string {
result := ""
for _, source := range m.sources {
stats, err := source.Stats()
if err != nil {
result = result + fmt.Sprintf("[%s: err:%s]", source.String(), err)
} else {
result = result + fmt.Sprintf("[%s: rec: %d dropped: %d]", source.String(), stats.PacketsReceived, stats.PacketsDropped)
}
}
return result
}

View File

@@ -116,6 +116,10 @@ func (source *tcpPacketSource) close() {
}
}
func (source *tcpPacketSource) Stats() (stat *pcap.Stats, err error) {
return source.handle.Stats()
}
func (source *tcpPacketSource) readPackets(ipdefrag bool, packets chan<- TcpPacketInfo) {
if dbgctl.MizuTapperDisablePcap {
return

View File

@@ -2,14 +2,15 @@ package tap
import (
"encoding/hex"
"fmt"
"os"
"os/signal"
"sync"
"time"
"github.com/google/gopacket"
"github.com/google/gopacket/layers"
"github.com/google/gopacket/reassembly"
"github.com/hashicorp/golang-lru/simplelru"
"github.com/up9inc/mizu/logger"
"github.com/up9inc/mizu/tap/api"
"github.com/up9inc/mizu/tap/dbgctl"
@@ -17,14 +18,33 @@ import (
"github.com/up9inc/mizu/tap/source"
)
const PACKETS_SEEN_LOG_THRESHOLD = 1000
const (
lastClosedConnectionsMaxItems = 1000
packetsSeenLogThreshold = 1000
lastAckThreshold = time.Duration(3) * time.Second
)
type connectionId string
func NewConnectionId(c string) connectionId {
return connectionId(c)
}
type AssemblerStats struct {
flushedConnections int
closedConnections int
}
type tcpAssembler struct {
*reassembly.Assembler
streamPool *reassembly.StreamPool
streamFactory *tcpStreamFactory
assemblerMutex sync.Mutex
ignoredPorts []uint16
streamPool *reassembly.StreamPool
streamFactory *tcpStreamFactory
ignoredPorts []uint16
lastClosedConnections *simplelru.LRU // Actual type is map[string]int64 which is "connId -> lastSeen"
liveConnections map[connectionId]bool
maxLiveStreams int
staleConnectionTimeout time.Duration
stats AssemblerStats
}
// Context
@@ -38,108 +58,166 @@ func (c *context) GetCaptureInfo() gopacket.CaptureInfo {
return c.CaptureInfo
}
func NewTcpAssembler(outputItems chan *api.OutputChannelItem, streamsMap api.TcpStreamMap, opts *TapOpts) *tcpAssembler {
func NewTcpAssembler(outputItems chan *api.OutputChannelItem, streamsMap api.TcpStreamMap, opts *TapOpts) (*tcpAssembler, error) {
var emitter api.Emitter = &api.Emitting{
AppStats: &diagnose.AppStats,
OutputChannel: outputItems,
}
streamFactory := NewTcpStreamFactory(emitter, streamsMap, opts)
streamPool := reassembly.NewStreamPool(streamFactory)
assembler := reassembly.NewAssembler(streamPool)
lastClosedConnections, err := simplelru.NewLRU(lastClosedConnectionsMaxItems, func(key interface{}, value interface{}) {})
if err != nil {
return nil, err
}
a := &tcpAssembler{
ignoredPorts: opts.IgnoredPorts,
lastClosedConnections: lastClosedConnections,
liveConnections: make(map[connectionId]bool),
maxLiveStreams: opts.maxLiveStreams,
staleConnectionTimeout: opts.staleConnectionTimeout,
stats: AssemblerStats{},
}
a.streamFactory = NewTcpStreamFactory(emitter, streamsMap, opts, a)
a.streamPool = reassembly.NewStreamPool(a.streamFactory)
a.Assembler = reassembly.NewAssembler(a.streamPool)
maxBufferedPagesTotal := GetMaxBufferedPagesPerConnection()
maxBufferedPagesPerConnection := GetMaxBufferedPagesTotal()
logger.Log.Infof("Assembler options: maxBufferedPagesTotal=%d, maxBufferedPagesPerConnection=%d, opts=%v",
maxBufferedPagesTotal, maxBufferedPagesPerConnection, opts)
assembler.AssemblerOptions.MaxBufferedPagesTotal = maxBufferedPagesTotal
assembler.AssemblerOptions.MaxBufferedPagesPerConnection = maxBufferedPagesPerConnection
a.Assembler.AssemblerOptions.MaxBufferedPagesTotal = maxBufferedPagesTotal
a.Assembler.AssemblerOptions.MaxBufferedPagesPerConnection = maxBufferedPagesPerConnection
return &tcpAssembler{
Assembler: assembler,
streamPool: streamPool,
streamFactory: streamFactory,
ignoredPorts: opts.IgnoredPorts,
}
return a, nil
}
func (a *tcpAssembler) processPackets(dumpPacket bool, packets <-chan source.TcpPacketInfo) {
signalChan := make(chan os.Signal, 1)
signal.Notify(signalChan, os.Interrupt)
ticker := time.NewTicker(a.staleConnectionTimeout)
for packetInfo := range packets {
packetsCount := diagnose.AppStats.IncPacketsCount()
if packetsCount%PACKETS_SEEN_LOG_THRESHOLD == 0 {
logger.Log.Debugf("Packets seen: #%d", packetsCount)
}
packet := packetInfo.Packet
data := packet.Data()
diagnose.AppStats.UpdateProcessedBytes(uint64(len(data)))
if dumpPacket {
logger.Log.Debugf("Packet content (%d/0x%x) - %s", len(data), len(data), hex.Dump(data))
}
tcp := packet.Layer(layers.LayerTypeTCP)
if tcp != nil {
diagnose.AppStats.IncTcpPacketsCount()
tcp := tcp.(*layers.TCP)
if a.shouldIgnorePort(uint16(tcp.DstPort)) {
diagnose.AppStats.IncIgnoredPacketsCount()
} else {
c := context{
CaptureInfo: packet.Metadata().CaptureInfo,
Origin: packetInfo.Source.Origin,
}
diagnose.InternalStats.Totalsz += len(tcp.Payload)
if !dbgctl.MizuTapperDisableTcpReassembly {
a.assemblerMutex.Lock()
a.AssembleWithContext(packet.NetworkLayer().NetworkFlow(), tcp, &c)
a.assemblerMutex.Unlock()
}
}
}
done := *maxcount > 0 && int64(diagnose.AppStats.PacketsCount) >= *maxcount
if done {
errorMapLen, _ := diagnose.TapErrors.GetErrorsSummary()
logger.Log.Infof("Processed %v packets (%v bytes) in %v (errors: %v, errTypes:%v)",
diagnose.AppStats.PacketsCount,
diagnose.AppStats.ProcessedBytes,
time.Since(diagnose.AppStats.StartTime),
diagnose.TapErrors.ErrorsCount,
errorMapLen)
}
out:
for {
select {
case packetInfo, ok := <-packets:
if !ok {
break out
}
if a.processPacket(packetInfo, dumpPacket) {
break out
}
case <-signalChan:
logger.Log.Infof("Caught SIGINT: aborting")
done = true
default:
// NOP: continue
}
if done {
break
break out
case <-ticker.C:
a.periodicClean()
}
}
a.assemblerMutex.Lock()
closed := a.FlushAll()
a.assemblerMutex.Unlock()
logger.Log.Debugf("Final flush: %d closed", closed)
}
func (a *tcpAssembler) processPacket(packetInfo source.TcpPacketInfo, dumpPacket bool) bool {
packetsCount := diagnose.AppStats.IncPacketsCount()
if packetsCount%packetsSeenLogThreshold == 0 {
logger.Log.Debugf("Packets seen: #%d", packetsCount)
}
packet := packetInfo.Packet
data := packet.Data()
diagnose.AppStats.UpdateProcessedBytes(uint64(len(data)))
if dumpPacket {
logger.Log.Debugf("Packet content (%d/0x%x) - %s", len(data), len(data), hex.Dump(data))
}
tcp := packet.Layer(layers.LayerTypeTCP)
if tcp != nil {
a.processTcpPacket(packetInfo.Source.Origin, packet, tcp.(*layers.TCP))
}
done := *maxcount > 0 && int64(diagnose.AppStats.PacketsCount) >= *maxcount
if done {
errorMapLen, _ := diagnose.TapErrors.GetErrorsSummary()
logger.Log.Infof("Processed %v packets (%v bytes) in %v (errors: %v, errTypes:%v)",
diagnose.AppStats.PacketsCount,
diagnose.AppStats.ProcessedBytes,
time.Since(diagnose.AppStats.StartTime),
diagnose.TapErrors.ErrorsCount,
errorMapLen)
}
return done
}
func (a *tcpAssembler) processTcpPacket(origin api.Capture, packet gopacket.Packet, tcp *layers.TCP) {
diagnose.AppStats.IncTcpPacketsCount()
if a.shouldIgnorePort(uint16(tcp.DstPort)) || a.shouldIgnorePort(uint16(tcp.SrcPort)) {
diagnose.AppStats.IncIgnoredPacketsCount()
return
}
id := getConnectionId(packet.NetworkLayer().NetworkFlow().Src().String(),
packet.TransportLayer().TransportFlow().Src().String(),
packet.NetworkLayer().NetworkFlow().Dst().String(),
packet.TransportLayer().TransportFlow().Dst().String())
if a.isRecentlyClosed(id) {
diagnose.AppStats.IncIgnoredLastAckCount()
return
}
if a.shouldThrottle(id) {
diagnose.AppStats.IncThrottledPackets()
return
}
c := context{
CaptureInfo: packet.Metadata().CaptureInfo,
Origin: origin,
}
diagnose.InternalStats.Totalsz += len(tcp.Payload)
if !dbgctl.MizuTapperDisableTcpReassembly {
a.AssembleWithContext(packet.NetworkLayer().NetworkFlow(), tcp, &c)
}
}
func (a *tcpAssembler) tcpStreamCreated(stream *tcpStream) {
a.liveConnections[stream.connectionId] = true
}
func (a *tcpAssembler) tcpStreamClosed(stream *tcpStream) {
a.lastClosedConnections.Add(stream.connectionId, time.Now().UnixMilli())
delete(a.liveConnections, stream.connectionId)
}
func (a *tcpAssembler) isRecentlyClosed(c connectionId) bool {
if closedTimeMillis, ok := a.lastClosedConnections.Get(c); ok {
timeSinceClosed := time.Since(time.UnixMilli(closedTimeMillis.(int64)))
if timeSinceClosed < lastAckThreshold {
return true
}
}
return false
}
func (a *tcpAssembler) shouldThrottle(c connectionId) bool {
if _, ok := a.liveConnections[c]; ok {
return false
}
return len(a.liveConnections) > a.maxLiveStreams
}
func (a *tcpAssembler) dumpStreamPool() {
a.streamPool.Dump()
}
func (a *tcpAssembler) waitAndDump() {
a.streamFactory.WaitGoRoutines()
a.assemblerMutex.Lock()
logger.Log.Debugf("%s", a.Dump())
a.assemblerMutex.Unlock()
}
func (a *tcpAssembler) shouldIgnorePort(port uint16) bool {
@@ -151,3 +229,26 @@ func (a *tcpAssembler) shouldIgnorePort(port uint16) bool {
return false
}
func (a *tcpAssembler) periodicClean() {
flushed, closed := a.FlushCloseOlderThan(time.Now().Add(-a.staleConnectionTimeout))
stats := a.stats
stats.closedConnections += closed
stats.flushedConnections += flushed
}
func (a *tcpAssembler) DumpStats() AssemblerStats {
result := a.stats
a.stats = AssemblerStats{}
return result
}
func getConnectionId(saddr string, sport string, daddr string, dport string) connectionId {
s := fmt.Sprintf("%s:%s", saddr, sport)
d := fmt.Sprintf("%s:%s", daddr, dport)
if s > d {
return NewConnectionId(fmt.Sprintf("%s#%s", s, d))
} else {
return NewConnectionId(fmt.Sprintf("%s#%s", d, s))
}
}

View File

@@ -151,6 +151,6 @@ func (t *tcpReassemblyStream) ReassemblyComplete(ac reassembly.AssemblerContext)
if t.tcpStream.GetIsTapTarget() && !t.tcpStream.GetIsClosed() {
t.tcpStream.close()
}
// do not remove the connection to allow last ACK
return false
return true
}

View File

@@ -8,6 +8,11 @@ import (
"github.com/up9inc/mizu/tap/dbgctl"
)
type tcpStreamCallbacks interface {
tcpStreamCreated(stream *tcpStream)
tcpStreamClosed(stream *tcpStream)
}
/* It's a connection (bidirectional)
* Implements gopacket.reassembly.Stream interface (Accept, ReassembledSG, ReassemblyComplete)
* ReassembledSG gets called when new reassembled data is ready (i.e. bytes in order, no duplicates, complete)
@@ -25,16 +30,25 @@ type tcpStream struct {
reqResMatchers []api.RequestResponseMatcher
createdAt time.Time
streamsMap api.TcpStreamMap
connectionId connectionId
callbacks tcpStreamCallbacks
sync.Mutex
}
func NewTcpStream(isTapTarget bool, streamsMap api.TcpStreamMap, capture api.Capture) *tcpStream {
return &tcpStream{
isTapTarget: isTapTarget,
streamsMap: streamsMap,
origin: capture,
createdAt: time.Now(),
func NewTcpStream(isTapTarget bool, streamsMap api.TcpStreamMap, capture api.Capture,
connectionId connectionId, callbacks tcpStreamCallbacks) *tcpStream {
t := &tcpStream{
isTapTarget: isTapTarget,
streamsMap: streamsMap,
origin: capture,
createdAt: time.Now(),
connectionId: connectionId,
callbacks: callbacks,
}
t.callbacks.tcpStreamCreated(t)
return t
}
func (t *tcpStream) getId() int64 {
@@ -56,9 +70,9 @@ func (t *tcpStream) close() {
t.isClosed = true
t.streamsMap.Delete(t.id)
t.client.close()
t.server.close()
t.callbacks.tcpStreamClosed(t)
}
func (t *tcpStream) addCounterPair(counterPair *api.CounterPair) {

View File

@@ -19,14 +19,15 @@ import (
* Generates a new tcp stream for each new tcp connection. Closes the stream when the connection closes.
*/
type tcpStreamFactory struct {
wg sync.WaitGroup
emitter api.Emitter
streamsMap api.TcpStreamMap
ownIps []string
opts *TapOpts
wg sync.WaitGroup
emitter api.Emitter
streamsMap api.TcpStreamMap
ownIps []string
opts *TapOpts
streamsCallbacks tcpStreamCallbacks
}
func NewTcpStreamFactory(emitter api.Emitter, streamsMap api.TcpStreamMap, opts *TapOpts) *tcpStreamFactory {
func NewTcpStreamFactory(emitter api.Emitter, streamsMap api.TcpStreamMap, opts *TapOpts, streamsCallbacks tcpStreamCallbacks) *tcpStreamFactory {
var ownIps []string
if localhostIPs, err := getLocalhostIPs(); err != nil {
@@ -39,10 +40,11 @@ func NewTcpStreamFactory(emitter api.Emitter, streamsMap api.TcpStreamMap, opts
}
return &tcpStreamFactory{
emitter: emitter,
streamsMap: streamsMap,
ownIps: ownIps,
opts: opts,
emitter: emitter,
streamsMap: streamsMap,
ownIps: ownIps,
opts: opts,
streamsCallbacks: streamsCallbacks,
}
}
@@ -57,7 +59,8 @@ func (factory *tcpStreamFactory) New(net, transport gopacket.Flow, tcpLayer *lay
props := factory.getStreamProps(srcIp, srcPort, dstIp, dstPort)
isTapTarget := props.isTapTarget
stream := NewTcpStream(isTapTarget, factory.streamsMap, getPacketOrigin(ac))
connectionId := getConnectionId(srcIp, srcPort, dstIp, dstPort)
stream := NewTcpStream(isTapTarget, factory.streamsMap, getPacketOrigin(ac), connectionId, factory.streamsCallbacks)
reassemblyStream := NewTcpReassemblyStream(fmt.Sprintf("%s:%s", net, transport), tcpLayer, fsmOptions, stream)
if stream.GetIsTapTarget() {
stream.setId(factory.streamsMap.NextId())

View File

@@ -6,17 +6,22 @@ MIZU_HOME=$(realpath ../../../)
docker build -t mizu-ebpf-builder . || exit 1
BPF_TARGET=amd64
BPF_CFLAGS="-O2 -g -D__TARGET_ARCH_x86"
ARCH=$(uname -m)
if [[ $ARCH == "aarch64" ]]; then
BPF_TARGET=arm64
BPF_CFLAGS="-O2 -g -D__TARGET_ARCH_arm64"
fi
docker run --rm \
--name mizu-ebpf-builder \
-v $MIZU_HOME:/mizu \
-v $(go env GOPATH):/root/go \
-it mizu-ebpf-builder \
sh -c "
go generate tap/tlstapper/tls_tapper.go
chown $(id -u):$(id -g) tap/tlstapper/tlstapper_bpfeb.go
chown $(id -u):$(id -g) tap/tlstapper/tlstapper_bpfeb.o
chown $(id -u):$(id -g) tap/tlstapper/tlstapper_bpfel.go
chown $(id -u):$(id -g) tap/tlstapper/tlstapper_bpfel.o
BPF_TARGET=\"$BPF_TARGET\" BPF_CFLAGS=\"$BPF_CFLAGS\" go generate tap/tlstapper/tls_tapper.go
chown $(id -u):$(id -g) tap/tlstapper/tlstapper_bpf*
" || exit 1
popd

View File

@@ -80,10 +80,6 @@ static __always_inline void send_chunk(struct pt_regs *ctx, __u8* buffer, __u64
}
static __always_inline void output_ssl_chunk(struct pt_regs *ctx, struct ssl_info* info, int count_bytes, __u64 id, __u32 flags) {
if (count_bytes <= 0) {
return;
}
if (count_bytes > (CHUNK_SIZE * MAX_CHUNKS_PER_OPERATION)) {
log_error(ctx, LOG_ERROR_BUFFER_TOO_BIG, id, count_bytes, 0l);
return;

View File

@@ -128,6 +128,15 @@ static __always_inline void go_crypto_tls_ex_uprobe(struct pt_regs *ctx, struct
return;
}
// In case of read, the length is determined on return
if (flags == FLAGS_IS_READ_BIT) {
info.buffer_len = GO_ABI_INTERNAL_PT_REGS_R1(ctx); // n in return n, nil
// This check achieves ignoring 0 length reads (the reads result with an error)
if (info.buffer_len <= 0) {
return;
}
}
output_ssl_chunk(ctx, &info, info.buffer_len, pid_tgid, flags);
return;

View File

@@ -101,6 +101,9 @@ static __always_inline void ssl_uretprobe(struct pt_regs *ctx, struct bpf_map_de
}
int count_bytes = get_count_bytes(ctx, &info, id);
if (count_bytes <= 0) {
return;
}
output_ssl_chunk(ctx, &info, count_bytes, id, flags);
}

View File

@@ -113,7 +113,11 @@ func getOffsets(filePath string) (offsets map[string]*goExtendedOffset, err erro
return
}
syms, err := se.Symbols()
var syms []elf.Symbol
syms, err = se.Symbols()
if err != nil {
return
}
for _, sym := range syms {
offset := sym.Value
@@ -158,7 +162,7 @@ func getOffsets(filePath string) (offsets map[string]*goExtendedOffset, err erro
}
symBytes := textSectionData[symStartingIndex:symEndingIndex]
// disasemble the symbol
// disassemble the symbol
var instructions []gapstone.Instruction
instructions, err = engine.Disasm(symBytes, sym.Value, 0)
if err != nil {

View File

@@ -34,7 +34,7 @@ type tlsPoller struct {
extension *api.Extension
procfs string
pidToNamespace sync.Map
fdCache *simplelru.LRU // Actual typs is map[string]addressPair
fdCache *simplelru.LRU // Actual type is map[string]addressPair
evictedCounter int
}

View File

@@ -12,7 +12,7 @@ import (
const GLOABL_TAP_PID = 0
//go:generate go run github.com/cilium/ebpf/cmd/bpf2go@0d0727ef53e2f53b1731c73f4c61e0f58693083a -type tls_chunk tlsTapper bpf/tls_tapper.c -- -O2 -g -D__TARGET_ARCH_x86
//go:generate go run github.com/cilium/ebpf/cmd/bpf2go@0d0727ef53e2f53b1731c73f4c61e0f58693083a -target $BPF_TARGET -cflags $BPF_CFLAGS -type tls_chunk tlsTapper bpf/tls_tapper.c
type TlsTapper struct {
bpfObjects tlsTapperObjects
@@ -161,14 +161,14 @@ func setupRLimit() error {
}
func (t *TlsTapper) tapSsllibPid(pid uint32, sslLibrary string, namespace string) error {
logger.Log.Infof("Tapping TLS (pid: %v) (sslLibrary: %v)", pid, sslLibrary)
newSsl := sslHooks{}
if err := newSsl.installUprobes(&t.bpfObjects, sslLibrary); err != nil {
return err
}
logger.Log.Infof("Tapping TLS (pid: %v) (sslLibrary: %v)", pid, sslLibrary)
t.sslHooksStructs = append(t.sslHooksStructs, newSsl)
t.poller.addPid(pid, namespace)

Binary file not shown.

Binary file not shown.

View File

@@ -1,6 +1,6 @@
// Code generated by bpf2go; DO NOT EDIT.
//go:build arm64be || armbe || mips || mips64 || mips64p32 || ppc64 || s390 || s390x || sparc || sparc64
// +build arm64be armbe mips mips64 mips64p32 ppc64 s390 s390x sparc sparc64
//go:build arm64
// +build arm64
package tlstapper
@@ -208,5 +208,5 @@ func _TlsTapperClose(closers ...io.Closer) error {
}
// Do not access this directly.
//go:embed tlstapper_bpfeb.o
//go:embed tlstapper_bpfel_arm64.o
var _TlsTapperBytes []byte

Binary file not shown.

View File

@@ -1,6 +1,6 @@
// Code generated by bpf2go; DO NOT EDIT.
//go:build 386 || amd64 || amd64p32 || arm || arm64 || mips64le || mips64p32le || mipsle || ppc64le || riscv64
// +build 386 amd64 amd64p32 arm arm64 mips64le mips64p32le mipsle ppc64le riscv64
//go:build 386 || amd64
// +build 386 amd64
package tlstapper
@@ -208,5 +208,5 @@ func _TlsTapperClose(closers ...io.Closer) error {
}
// Do not access this directly.
//go:embed tlstapper_bpfel.o
//go:embed tlstapper_bpfel_x86.o
var _TlsTapperBytes []byte

Binary file not shown.

View File

@@ -20,7 +20,7 @@
"test:lint": "eslint .",
"predeploy": "cd example && npm install && npm run build",
"deploy": "gh-pages -d example/build",
"eslint": "eslint . --ext .js,.jsx,.ts,.tsx"
"eslint": "eslint . --ext .js,.jsx,.ts,.tsx --max-warnings=0"
},
"peerDependencies": {
"@craco/craco": "^6.4.3",
@@ -60,6 +60,7 @@
"react-scrollable-feed-virtualized": "^1.4.9",
"react-syntax-highlighter": "^15.5.0",
"react-toastify": "^8.2.0",
"recharts": "^2.1.10",
"redoc": "^2.0.0-rc.71",
"styled-components": "^5.3.5",
"web-vitals": "^2.1.4",

View File

@@ -109,7 +109,7 @@ export const EntriesList: React.FC<EntriesListProps> = ({
if (scrollTo) {
scrollableRef.current.scrollToIndex(data.data.length - 1);
}
}, [setLoadMoreTop, setIsLoadingTop, entries, setEntries, query, setNoMoreDataTop, leftOffTop, setLeftOffTop, setQueriedTotal, setTruncatedTimestamp, scrollableRef]);
}, [setLoadMoreTop, setIsLoadingTop, entries, setEntries, query, setNoMoreDataTop, leftOffTop, setLeftOffTop, setQueriedTotal, setTruncatedTimestamp, scrollableRef, trafficViewerApi]);
useEffect(() => {
if (!isWsConnectionClosed || !loadMoreTop || noMoreDataTop) return;
@@ -121,7 +121,7 @@ export const EntriesList: React.FC<EntriesListProps> = ({
useEffect(() => {
if (!focusedEntryId && entries.length > 0)
setFocusedEntryId(entries[0].id);
}, [focusedEntryId, entries])
}, [focusedEntryId, entries, setFocusedEntryId])
useEffect(() => {
const newEntries = [...entries];
@@ -131,7 +131,7 @@ export const EntriesList: React.FC<EntriesListProps> = ({
setNoMoreDataTop(false);
setEntries(newEntries);
}
}, [entries])
}, [entries, setLeftOffTop, setNoMoreDataTop, setEntries])
if(ws.current && !ws.current.onmessage) {
ws.current.onmessage = (e) => {

View File

@@ -15,7 +15,6 @@ import outgoingIconFailure from "assets/outgoing-traffic-failure.svg"
import outgoingIconNeutral from "assets/outgoing-traffic-neutral.svg"
import {useRecoilState} from "recoil";
import focusedEntryIdAtom from "../../recoil/focusedEntryId";
import queryAtom from "../../recoil/query";
interface TCPInterface {
ip: string
@@ -66,7 +65,6 @@ enum CaptureTypes {
export const EntryItem: React.FC<EntryProps> = ({entry, style, headingMode, namespace}) => {
const [focusedEntryId, setFocusedEntryId] = useRecoilState(focusedEntryIdAtom);
const [queryState, setQuery] = useRecoilState(queryAtom);
const isSelected = focusedEntryId === entry.id;
const classification = getClassification(entry.status)

View File

@@ -1,4 +1,4 @@
import React, { useEffect, useRef, useState } from "react";
import React, {useCallback, useEffect, useRef, useState} from "react";
import { Filters } from "../Filters/Filters";
import { EntriesList } from "../EntriesList/EntriesList";
import makeStyles from '@mui/styles/makeStyles';
@@ -59,7 +59,6 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
}) => {
const classes = useLayoutStyles();
const setEntries = useSetRecoilState(entriesAtom);
const setFocusedEntryId = useSetRecoilState(focusedEntryIdAtom);
const query = useRecoilValue(queryAtom);
@@ -68,40 +67,44 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
const [noMoreDataTop, setNoMoreDataTop] = useState(false);
const [isSnappedToBottom, setIsSnappedToBottom] = useState(true);
const [wsReadyState, setWsReadyState] = useState(0);
const setLeftOffTop = useSetRecoilState(leftOffTopAtom);
const scrollableRef = useRef(null);
const ws = useRef(null);
const closeWebSocket = useCallback(() => {
if (ws?.current?.readyState === WebSocket.OPEN) {
ws.current.close();
return true;
}
}, [])
useEffect(() => {
if(shouldCloseWebSocket){
closeWebSocket()
setShouldCloseWebSocket(false);
}
}, [shouldCloseWebSocket])
}, [shouldCloseWebSocket, setShouldCloseWebSocket, closeWebSocket])
useEffect(() => {
reopenConnection()
}, [webSocketUrl])
const ws = useRef(null);
const openEmptyWebSocket = () => {
openWebSocket(DEFAULT_LEFTOFF, query, true, DEFAULT_FETCH, DEFAULT_FETCH_TIMEOUT_MS);
}
const closeWebSocket = () => {
if (ws?.current?.readyState === WebSocket.OPEN) {
ws.current.close();
return true;
}
}
const sendQueryWhenWsOpen = useCallback((leftOff: string, query: string, fetch: number, fetchTimeoutMs: number) => {
setTimeout(() => {
if (ws?.current?.readyState === WebSocket.OPEN) {
ws.current.send(JSON.stringify({
"leftOff": leftOff,
"query": query,
"enableFullEntries": false,
"fetch": fetch,
"timeoutMs": fetchTimeoutMs
}));
} else {
sendQueryWhenWsOpen(leftOff, query, fetch, fetchTimeoutMs);
}
}, 500)
}, [])
const listEntry = useRef(null);
const openWebSocket = (leftOff: string, query: string, resetEntries: boolean, fetch: number, fetchTimeoutMs: number) => {
const openWebSocket = useCallback((leftOff: string, query: string, resetEntries: boolean, fetch: number, fetchTimeoutMs: number) => {
if (resetEntries) {
setFocusedEntryId(null);
setEntries([]);
@@ -127,24 +130,11 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
}
} catch (e) {
}
}
const sendQueryWhenWsOpen = (leftOff: string, query: string, fetch: number, fetchTimeoutMs: number) => {
setTimeout(() => {
if (ws?.current?.readyState === WebSocket.OPEN) {
ws.current.send(JSON.stringify({
"leftOff": leftOff,
"query": query,
"enableFullEntries": false,
"fetch": fetch,
"timeoutMs": fetchTimeoutMs
}));
} else {
sendQueryWhenWsOpen(leftOff, query, fetch, fetchTimeoutMs);
}
}, 500)
}
}, [setFocusedEntryId, setEntries, setLeftOffTop, setNoMoreDataTop, ws, sendQueryWhenWsOpen, webSocketUrl])
const openEmptyWebSocket = useCallback(() => {
openWebSocket(DEFAULT_LEFTOFF, query, true, DEFAULT_FETCH, DEFAULT_FETCH_TIMEOUT_MS);
}, [openWebSocket, query])
useEffect(() => {
setTrafficViewerApiState({...trafficViewerApiProp, webSocket: {close: closeWebSocket}});
@@ -156,7 +146,7 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
console.error(error);
}
})()
}, []);
}, [trafficViewerApiProp, closeWebSocket, setTappingStatus, setTrafficViewerApiState]);
const toggleConnection = () => {
if (!closeWebSocket()) {
@@ -166,12 +156,17 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
}
}
const reopenConnection = async () => {
const reopenConnection = useCallback(async () => {
closeWebSocket()
openEmptyWebSocket();
scrollableRef.current.jumpToBottom();
setIsSnappedToBottom(true);
}
}, [scrollableRef, setIsSnappedToBottom, closeWebSocket, openEmptyWebSocket])
useEffect(() => {
reopenConnection()
// eslint-disable-next-line
}, [webSocketUrl])
useEffect(() => {
return () => {

View File

@@ -11,7 +11,7 @@ interface LinkProps {
}
export const Link: React.FC<LinkProps> = ({ link, className, title, children }) => {
return <a href={link} className={className} title={title} target="_blank">
return <a href={link} className={className} title={title} target="_blank" rel="noreferrer">
{children}
</a>
}

View File

@@ -66,7 +66,7 @@ const SelectList: React.FC<Props> = ({ items, tableName, checkedValues = [], mul
}
setCheckedValues(newChecked)
}, [searchValue, checkedValues, filteredValuesKeys])
}, [checkedValues, filteredValuesKeys, items, setCheckedValues])
const dataFieldFunc = (listValue) => listValue.component ? listValue.component :
<span className={styles.nowrap} title={listValue.value}>

View File

@@ -197,14 +197,14 @@ export const ServiceMapModal: React.FC<ServiceMapModalProps> = ({ isOpen, onClos
<Fade in={isOpen}>
<Box sx={modalStyle}>
<div className={styles.closeIcon}>
<img src={closeIcon} alt="close" onClick={() => onClose()} style={{ cursor: "pointer", userSelect: "none" }}></img>
<img src={closeIcon} alt="close" onClick={() => onClose()} style={{ cursor: "pointer", userSelect: "none" }}/>
</div>
<div className={styles.headerContainer}>
<div className={styles.headerSection}>
<span className={styles.title}>Services</span>
<Button size="medium"
variant="contained"
startIcon={<img src={isFilterClicked ? filterIconClicked : filterIcon} className="custom" alt="refresh" style={{ height: "26px", width: "26px" }}></img>}
startIcon={<img src={isFilterClicked ? filterIconClicked : filterIcon} className="custom" alt="refresh" style={{ height: "26px", width: "26px" }}/>}
className={commonClasses.outlinedButton + " " + commonClasses.imagedButton + ` ${isFilterClicked ? commonClasses.clickedButton : ""}`}
onClick={() => setIsFilterClicked(prevState => !prevState)}
style={{ textTransform: 'unset' }}>

View File

@@ -0,0 +1,4 @@
.barChartContainer
width: 100%
display: flex
justify-content: center

View File

@@ -0,0 +1,83 @@
import styles from "./TimelineBarChart.module.sass";
import { StatsMode } from "../TrafficStatsModal"
import React, { useCallback, useEffect, useMemo, useState } from "react";
import {
BarChart,
Bar,
XAxis,
YAxis,
Tooltip,
Legend
} from "recharts";
import { Utils } from "../../../../helpers/Utils";
interface TimelineBarChartProps {
timeLineBarChartMode: string;
data: any;
}
export const TimelineBarChart: React.FC<TimelineBarChartProps> = ({ timeLineBarChartMode, data }) => {
const [protocolStats, setProtocolStats] = useState([]);
const [protocolsNamesAndColors, setProtocolsNamesAndColors] = useState([]);
const padTo2Digits = useCallback((num) => {
return String(num).padStart(2, '0');
}, [])
const getHoursAndMinutes = useCallback((protocolTimeKey) => {
const time = new Date(protocolTimeKey)
const hoursAndMinutes = padTo2Digits(time.getHours()) + ':' + padTo2Digits(time.getMinutes());
return hoursAndMinutes;
}, [padTo2Digits])
const creatUniqueObjArray = useCallback((objArray) => {
return [
...new Map(objArray.map((item) => [item["name"], item])).values(),
];
}, [])
useEffect(() => {
if (!data) return;
const protocolsBarsData = [];
const prtcNames = [];
data.forEach(protocolObj => {
let obj: { [k: string]: any } = {};
obj.timestamp = getHoursAndMinutes(protocolObj.timestamp);
protocolObj.protocols.forEach(protocol => {
obj[`${protocol.name}`] = protocol[StatsMode[timeLineBarChartMode]];
prtcNames.push({ name: protocol.name, color: protocol.color });
})
protocolsBarsData.push(obj);
})
const uniqueObjArray = creatUniqueObjArray(prtcNames);
protocolsBarsData.sort((a, b) => a.timestamp < b.timestamp ? -1 : 1);
setProtocolStats(protocolsBarsData);
setProtocolsNamesAndColors(uniqueObjArray);
}, [data, timeLineBarChartMode, setProtocolStats, setProtocolsNamesAndColors, creatUniqueObjArray, getHoursAndMinutes])
const bars = useMemo(() => protocolsNamesAndColors.map((protocolToDIsplay) => {
return <Bar key={protocolToDIsplay.name} dataKey={protocolToDIsplay.name} stackId="a" fill={protocolToDIsplay.color} />
}), [protocolsNamesAndColors])
return (
<div className={styles.barChartContainer}>
<BarChart
width={730}
height={250}
data={protocolStats}
margin={{
top: 20,
right: 30,
left: 20,
bottom: 5
}}
>
<XAxis dataKey="timestamp" />
<YAxis tickFormatter={(value) => timeLineBarChartMode === "VOLUME" ? Utils.humanFileSize(value) : value} />
<Tooltip formatter={(value) => timeLineBarChartMode === "VOLUME" ? Utils.humanFileSize(value) : value + " Requests"} />
<Legend />
{bars}
</BarChart>
</div>
);
}

View File

@@ -0,0 +1,16 @@
.breadCrumbsContainer
margin-top: 15px
height: 15px
.breadCrumbs
color: #494677
text-align: left
.clickableTag
margin-right: 5px
border-bottom: 1px black solid
cursor: pointer
.nonClickableTag
margin-left: 5px
font-weight: 600

View File

@@ -0,0 +1,129 @@
import React, {useEffect, useMemo, useState} from "react";
import styles from "./TrafficPieChart.module.sass";
import {Cell, Legend, Pie, PieChart, Tooltip} from "recharts";
import {Utils} from "../../../../helpers/Utils";
import {StatsMode as PieChartMode} from "../TrafficStatsModal"
const COLORS = ['#e6194b', '#3cb44b', '#ffe119', '#4363d8', '#f58231', '#911eb4', '#46f0f0', '#f032e6', '#bcf60c', '#fabebe', '#008080', '#e6beff', '#9a6324', '#fffac8', '#800000', '#aaffc3', '#808000', '#ffd8b1', '#000075', '#808080', '#ffffff', '#000000'];
const RADIAN = Math.PI / 180;
const renderCustomizedLabel = ({
cx,
cy,
midAngle,
innerRadius,
outerRadius,
percent,
index
}: any) => {
const radius = innerRadius + (outerRadius - innerRadius) * 0.5;
const x = cx + radius * Math.cos(-midAngle * RADIAN);
const y = cy + radius * Math.sin(-midAngle * RADIAN);
if (Number((percent * 100).toFixed(0)) <= 1) return;
return (
<text
x={x}
y={y}
fill="white"
textAnchor={x > cx ? "start" : "end"}
dominantBaseline="central"
>
{`${(percent * 100).toFixed(0)}%`}
</text>
);
};
interface TrafficPieChartProps {
pieChartMode: string;
data: any;
}
export const TrafficPieChart: React.FC<TrafficPieChartProps> = ({pieChartMode , data}) => {
const [protocolsStats, setProtocolsStats] = useState([]);
const [commandStats, setCommandStats] = useState(null);
const [selectedProtocol, setSelectedProtocol] = useState(null as string);
useEffect(() => {
if (!data) return;
const protocolsPieData = data.map(protocol => {
return {
name: protocol.name,
value: protocol[PieChartMode[pieChartMode]],
color: protocol.color
}
})
setProtocolsStats(protocolsPieData)
}, [data, pieChartMode])
useEffect(() => {
if (!selectedProtocol) {
setCommandStats(null);
return;
}
const commandsPieData = data.find(protocol => protocol.name === selectedProtocol).methods.map(command => {
return {
name: command.name,
value: command[PieChartMode[pieChartMode]]
}
})
setCommandStats(commandsPieData);
}, [selectedProtocol, pieChartMode, data])
const pieLegend = useMemo(() => {
if (!data) return;
let legend;
if (!selectedProtocol) {
legend = data.map(protocol => <div style={{marginBottom: 5, display: "flex"}}>
<div style={{height: 15, width: 30, background: protocol?.color}}/>
<span style={{marginLeft: 5}}>
{protocol.name}
</span>
</div>)
} else {
legend = data.find(protocol => protocol.name === selectedProtocol).methods.map((method, index) => <div
style={{marginBottom: 5, display: "flex"}}>
<div style={{height: 15, width: 30, background: COLORS[index % COLORS.length]}}/>
<span style={{marginLeft: 5}}>
{method.name}
</span>
</div>)
}
return <div>{legend}</div>;
}, [data, selectedProtocol])
return (
<div>
<div className={styles.breadCrumbsContainer}>
{selectedProtocol && <div className={styles.breadCrumbs}>
<span className={styles.clickableTag} onClick={() => setSelectedProtocol(null)}>protocols</span>
<span>/</span>
<span className={styles.nonClickableTag}>{selectedProtocol}</span>
</div>}
</div>
{protocolsStats?.length > 0 && <div style={{width: "100%", display: "flex", justifyContent: "center"}}>
<PieChart width={300} height={300}>
<Pie
data={commandStats || protocolsStats}
dataKey="value"
cx={150}
cy={125}
labelLine={false}
label={renderCustomizedLabel}
outerRadius={125}
fill="#8884d8"
onClick={(section) => !commandStats && setSelectedProtocol(section.name)}>
{(commandStats || protocolsStats).map((entry, index) => (
<Cell key={`cell-${index}`} fill={entry.color || COLORS[index % COLORS.length]}/>)
)}
</Pie>
<Legend wrapperStyle={{position: "absolute", width: "auto", height: "auto", right: -150, top: 0}} content={pieLegend}/>
<Tooltip formatter={(value) => pieChartMode === "VOLUME" ? Utils.humanFileSize(value) : value + " Requests"}/>
</PieChart>
</div>}
</div>
);
}

View File

@@ -0,0 +1,19 @@
.title
color: #494677
font-family: Source Sans Pro,Lucida Grande,Tahoma,sans-serif
font-size: 28px
font-weight: 600
.closeIcon
position: absolute
right: 20px
top: 20px
.mainContainer
padding: 30px
text-align: center
.select
border: none
border-bottom: 1px black solid
outline: none

View File

@@ -0,0 +1,97 @@
import React, { useEffect, useState } from "react";
import { Backdrop, Box, Fade, Modal } from "@mui/material";
import styles from "./TrafficStatsModal.module.sass";
import closeIcon from "assets/close.svg";
import { TrafficPieChart } from "./TrafficPieChart/TrafficPieChart";
import { TimelineBarChart } from "./TimelineBarChart/TimelineBarChart";
import spinnerImg from "assets/spinner.svg";
const modalStyle = {
position: 'absolute',
top: '6%',
left: '50%',
transform: 'translate(-50%, 0%)',
width: '50vw',
height: '82vh',
bgcolor: 'background.paper',
borderRadius: '5px',
boxShadow: 24,
p: 4,
color: '#000',
};
export enum StatsMode {
REQUESTS = "entriesCount",
VOLUME = "volumeSizeBytes"
}
interface TrafficStatsModalProps {
isOpen: boolean;
onClose: () => void;
getPieStatsDataApi: () => Promise<any>
getTimelineStatsDataApi: () => Promise<any>
}
export const TrafficStatsModal: React.FC<TrafficStatsModalProps> = ({ isOpen, onClose, getPieStatsDataApi, getTimelineStatsDataApi }) => {
const modes = Object.keys(StatsMode).filter(x => !(parseInt(x) >= 0));
const [statsMode, setStatsMode] = useState(modes[0]);
const [pieStatsData, setPieStatsData] = useState(null);
const [timelineStatsData, setTimelineStatsData] = useState(null);
const [isLoading, setIsLoading] = useState(false);
useEffect(() => {
if (isOpen && getPieStatsDataApi) {
(async () => {
try {
setIsLoading(true);
const pieData = await getPieStatsDataApi();
setPieStatsData(pieData);
const timelineData = await getTimelineStatsDataApi();
setTimelineStatsData(timelineData);
} catch (e) {
console.error(e)
} finally {
setIsLoading(false)
}
})()
}
}, [isOpen, getPieStatsDataApi, getTimelineStatsDataApi, setPieStatsData, setTimelineStatsData])
return (
<Modal
aria-labelledby="transition-modal-title"
aria-describedby="transition-modal-description"
open={isOpen}
onClose={onClose}
closeAfterTransition
BackdropComponent={Backdrop}
BackdropProps={{ timeout: 500 }}>
<Fade in={isOpen}>
<Box sx={modalStyle}>
<div className={styles.closeIcon}>
<img src={closeIcon} alt="close" onClick={() => onClose()} style={{ cursor: "pointer", userSelect: "none" }} />
</div>
<div className={styles.title}>Traffic Statistics</div>
<div className={styles.mainContainer}>
<div>
<span style={{ marginRight: 15 }}>Breakdown By</span>
<select className={styles.select} value={statsMode} onChange={(e) => setStatsMode(e.target.value)}>
{modes.map(mode => <option key={mode} value={mode}>{mode}</option>)}
</select>
</div>
<div>
{isLoading ? <div style={{ textAlign: "center", marginTop: 20 }}>
<img alt="spinner" src={spinnerImg} style={{ height: 50 }} />
</div> :
<div>
<TrafficPieChart pieChartMode={statsMode} data={pieStatsData} />
<TimelineBarChart timeLineBarChartMode={statsMode} data={timelineStatsData} />
</div>}
</div>
</div>
</Box>
</Fade>
</Modal>
);
}

View File

@@ -0,0 +1,4 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M18.591 9.99997C18.591 14.7446 14.7447 18.5909 10.0001 18.5909C5.25546 18.5909 1.40918 14.7446 1.40918 9.99997C1.40918 5.25534 5.25546 1.40906 10.0001 1.40906C14.7447 1.40906 18.591 5.25534 18.591 9.99997Z" fill="#E9EBF8" stroke="#BCCEFD"/>
<path d="M13.1604 8.23038L11.95 7.01994L10.1392 8.83078L8.32832 7.01994L7.11789 8.23038L8.92872 10.0412L7.12046 11.8495L8.33089 13.0599L10.1392 11.2517L11.9474 13.0599L13.1579 11.8495L11.3496 10.0412L13.1604 8.23038Z" fill="#205CF5"/>
</svg>

After

Width:  |  Height:  |  Size: 588 B

View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" style="margin: auto; background: none; display: block; shape-rendering: auto;" width="200px" height="200px" viewBox="0 0 100 100" preserveAspectRatio="xMidYMid">
<circle cx="50" cy="50" fill="none" stroke="#1d3f72" stroke-width="10" r="35" stroke-dasharray="164.93361431346415 56.97787143782138" transform="rotate(275.903 50 50)">
<animateTransform attributeName="transform" type="rotate" repeatCount="indefinite" dur="1s" values="0 50 50;360 50 50" keyTimes="0;1"></animateTransform>
</circle>
<!-- [ldio] generated by https://loading.io/ --></svg>

After

Width:  |  Height:  |  Size: 673 B

View File

@@ -4,4 +4,26 @@ const IP_ADDRESS_REGEX = /([0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3})(:([0-9]{
export class Utils {
static isIpAddress = (address: string): boolean => IP_ADDRESS_REGEX.test(address)
static lineNumbersInString = (code:string): number => code.split("\n").length;
static humanFileSize(bytes, si=false, dp=1) {
const thresh = si ? 1000 : 1024;
if (Math.abs(bytes) < thresh) {
return bytes + ' B';
}
const units = si
? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];
let u = -1;
const r = 10**dp;
do {
bytes /= thresh;
++u;
} while (Math.round(Math.abs(bytes) * r) / r >= thresh && u < units.length - 1);
return bytes.toFixed(dp) + ' ' + units[u];
}
}

View File

@@ -4,8 +4,9 @@ import { StatusBar } from './components/UI';
import useWS, { DEFAULT_LEFTOFF } from './hooks/useWS';
import OasModal from './components/modals/OasModal/OasModal';
import { ServiceMapModal } from './components/modals/ServiceMapModal/ServiceMapModal';
import { TrafficStatsModal } from './components/modals/TrafficStatsModal/TrafficStatsModal';
export { CodeEditorWrap as QueryForm } from './components/Filters/Filters';
export { UI, StatusBar, OasModal, ServiceMapModal }
export { UI, StatusBar, OasModal, ServiceMapModal, TrafficStatsModal }
export { useWS, DEFAULT_LEFTOFF }
export default TrafficViewer;

View File

@@ -1,5 +1,4 @@
import { atom } from "recoil";
import TrafficViewerApi from "../../components/TrafficViewer/TrafficViewerApi";
const TrafficViewerApiAtom = atom({
key: "TrafficViewerApiAtom",

View File

@@ -5,9 +5,11 @@ import { ServiceMapModal } from '@up9/mizu-common';
import { useRecoilState } from "recoil";
import serviceMapModalOpenAtom from "./recoil/serviceMapModalOpen";
import oasModalOpenAtom from './recoil/oasModalOpen/atom';
import trafficStatsModalOpenAtom from "./recoil/trafficStatsModalOpen";
import { OasModal } from '@up9/mizu-common';
import Api from './helpers/api';
import { ThemeProvider, StyledEngineProvider, createTheme } from '@mui/material';
import {ThemeProvider, StyledEngineProvider, createTheme} from '@mui/material';
import { TrafficStatsModal } from '@up9/mizu-common';
const api = Api.getInstance()
@@ -15,6 +17,7 @@ const App = () => {
const [serviceMapModalOpen, setServiceMapModalOpen] = useRecoilState(serviceMapModalOpenAtom);
const [oasModalOpen, setOasModalOpen] = useRecoilState(oasModalOpenAtom)
const [trafficStatsModalOpen, setTrafficStatsModalOpen] = useRecoilState(trafficStatsModalOpenAtom);
return (
<StyledEngineProvider injectFirst>
@@ -33,6 +36,7 @@ const App = () => {
openModal={oasModalOpen}
handleCloseModal={() => setOasModalOpen(false)}
/>}
<TrafficStatsModal isOpen={trafficStatsModalOpen} onClose={() => setTrafficStatsModalOpen(false)} getPieStatsDataApi={api.getPieStats} getTimelineStatsDataApi={api.getTimelineStats}/>
</div>
</ThemeProvider>
</StyledEngineProvider>

View File

@@ -10,6 +10,8 @@ import "@up9/mizu-common/dist/index.css"
import oasModalOpenAtom from "../../../recoil/oasModalOpen/atom";
import serviceMap from "../../assets/serviceMap.svg";
import services from "../../assets/services.svg";
import trafficStatsIcon from "../../assets/trafficStats.svg";
import trafficStatsModalOpenAtom from "../../../recoil/trafficStatsModalOpen";
const api = Api.getInstance();
@@ -17,6 +19,7 @@ export const TrafficPage: React.FC = () => {
const commonClasses = useCommonStyles();
const [serviceMapModalOpen, setServiceMapModalOpen] = useRecoilState(serviceMapModalOpenAtom);
const [openOasModal, setOpenOasModal] = useRecoilState(oasModalOpenAtom);
const [trafficStatsModalOpen, setTrafficStatsModalOpen] = useRecoilState(trafficStatsModalOpenAtom);
const [shouldCloseWebSocket, setShouldCloseWebSocket] = useState(false);
const trafficViewerApi = { ...api }
@@ -26,13 +29,17 @@ export const TrafficPage: React.FC = () => {
setOpenOasModal(true);
}
const handleOpenStatsModal = () => {
setShouldCloseWebSocket(true)
setTrafficStatsModalOpen(true);
}
const openServiceMapModalDebounce = debounce(() => {
setShouldCloseWebSocket(true)
setServiceMapModalOpen(true)
}, 500);
const actionButtons = (window["isOasEnabled"] || window["isServiceMapEnabled"]) &&
<div style={{ display: 'flex', height: "100%" }}>
const actionButtons = <div style={{ display: 'flex', height: "100%" }}>
{window["isOasEnabled"] && <Button
startIcon={<img className="custom" src={services} alt="services" />}
size="large"
@@ -48,15 +55,24 @@ export const TrafficPage: React.FC = () => {
variant="contained"
className={commonClasses.outlinedButton + " " + commonClasses.imagedButton}
onClick={openServiceMapModalDebounce}
style={{ textTransform: 'unset' }}>
style={{ marginRight: 25, textTransform: 'unset' }}>
Service Map
</Button>}
<Button
startIcon={<img className="custom" src={trafficStatsIcon} alt="services" />}
size="large"
variant="contained"
className={commonClasses.outlinedButton + " " + commonClasses.imagedButton}
style={{ textTransform: 'unset' }}
onClick={handleOpenStatsModal}>
Traffic Stats
</Button>
</div>
return (
<>
<TrafficViewer webSocketUrl={MizuWebsocketURL} shouldCloseWebSocket={shouldCloseWebSocket} setShouldCloseWebSocket={setShouldCloseWebSocket}
trafficViewerApiProp={trafficViewerApi} actionButtons={actionButtons} isShowStatusBar={!(openOasModal || serviceMapModalOpen)} isDemoBannerView={false} />
trafficViewerApiProp={trafficViewerApi} actionButtons={actionButtons} isShowStatusBar={!(openOasModal || serviceMapModalOpen || trafficStatsModalOpen)} isDemoBannerView={false} />
</>
);
};

View File

@@ -0,0 +1,10 @@
<svg width="22" height="16" viewBox="0 0 22 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="1.5" y="2.5" width="9" height="12" rx="1.5" stroke="#A0B2FF"/>
<rect x="5" y="4" width="3.42857" height="2" rx="1" fill="#205CF5"/>
<rect x="3" y="7" width="6" height="2" rx="1" fill="#205CF5"/>
<rect x="3" y="11" width="6" height="2" rx="1" fill="#205CF5"/>
<rect x="-0.5" y="0.5" width="9" height="12" rx="1.5" transform="matrix(-1 0 0 1 19 2)" stroke="#A0B2FF"/>
<rect width="3" height="2" rx="1" transform="matrix(-1 0 0 1 15 4)" fill="#205CF5"/>
<rect width="6" height="2" rx="1" transform="matrix(-1 0 0 1 18 7)" fill="#205CF5"/>
<rect width="6" height="2" rx="1" transform="matrix(-1 0 0 1 18 11)" fill="#205CF5"/>
</svg>

After

Width:  |  Height:  |  Size: 734 B

View File

@@ -110,4 +110,14 @@ export default class Api {
headers
});
}
getPieStats = async () => {
const response = await client.get("/status/accumulative");
return response.data;
}
getTimelineStats = async () => {
const response = await client.get("/status/accumulativeTiming");
return response.data;
}
}

View File

@@ -0,0 +1,8 @@
import { atom } from "recoil"
const trafficStatsModalOpenAtom = atom({
key: "trafficStatsModalOpenAtom",
default: false
})
export default trafficStatsModalOpenAtom;

View File

@@ -0,0 +1,2 @@
import atom from "./atom";
export default atom;