mirror of
https://github.com/kubeshark/kubeshark.git
synced 2026-02-18 03:50:06 +00:00
Compare commits
12 Commits
35.0-dev18
...
35.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8612135197 | ||
|
|
5f73c2d50a | ||
|
|
d6944d467c | ||
|
|
57078517a4 | ||
|
|
b4bc09637c | ||
|
|
302333b4ae | ||
|
|
df642de4ce | ||
|
|
f102031c45 | ||
|
|
a5ddb162e8 | ||
|
|
1fcc22c356 | ||
|
|
960d39f27d | ||
|
|
0709b861d6 |
@@ -30,7 +30,6 @@ require (
|
||||
github.com/up9inc/mizu/tap/extensions/kafka v0.0.0
|
||||
github.com/up9inc/mizu/tap/extensions/redis v0.0.0
|
||||
github.com/wI2L/jsondiff v0.1.1
|
||||
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0
|
||||
k8s.io/api v0.23.3
|
||||
k8s.io/apimachinery v0.23.3
|
||||
k8s.io/client-go v0.23.3
|
||||
|
||||
@@ -707,8 +707,6 @@ github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca/go.mod h1:ce1O1j6Ut
|
||||
github.com/xlab/treeprint v1.1.0 h1:G/1DjNkPpfZCFt9CSh6b5/nY4VimlbHF3Rh4obvtzDk=
|
||||
github.com/xlab/treeprint v1.1.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0=
|
||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY=
|
||||
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI=
|
||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/dependency"
|
||||
"github.com/up9inc/mizu/agent/pkg/models"
|
||||
"github.com/up9inc/mizu/agent/pkg/oas"
|
||||
"github.com/up9inc/mizu/agent/pkg/servicemap"
|
||||
|
||||
@@ -101,20 +100,13 @@ func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extension
|
||||
|
||||
for item := range outputItems {
|
||||
extension := extensionsMap[item.Protocol.Name]
|
||||
resolvedSource, resolvedDestionation, namespace := resolveIP(item.ConnectionInfo)
|
||||
resolvedSource, resolvedDestination, namespace := resolveIP(item.ConnectionInfo)
|
||||
|
||||
if namespace == "" && item.Namespace != tapApi.UnknownNamespace {
|
||||
namespace = item.Namespace
|
||||
}
|
||||
|
||||
mizuEntry := extension.Dissector.Analyze(item, resolvedSource, resolvedDestionation, namespace)
|
||||
if extension.Protocol.Name == "http" {
|
||||
harEntry, err := har.NewEntry(mizuEntry.Request, mizuEntry.Response, mizuEntry.StartTime, mizuEntry.ElapsedTime)
|
||||
if err == nil {
|
||||
rules, _, _ := models.RunValidationRulesState(*harEntry, mizuEntry.Destination.Name)
|
||||
mizuEntry.Rules = rules
|
||||
}
|
||||
}
|
||||
mizuEntry := extension.Dissector.Analyze(item, resolvedSource, resolvedDestination, namespace)
|
||||
|
||||
data, err := json.Marshal(mizuEntry)
|
||||
if err != nil {
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/op/go-logging"
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
"github.com/up9inc/mizu/agent/pkg/api"
|
||||
"github.com/up9inc/mizu/agent/pkg/providers"
|
||||
"github.com/up9inc/mizu/agent/pkg/utils"
|
||||
"github.com/up9inc/mizu/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
@@ -81,6 +82,7 @@ func LoadExtensions() {
|
||||
})
|
||||
|
||||
api.InitMaps(ExtensionsMap, ProtocolsMap)
|
||||
providers.InitProtocolToColor(ProtocolsMap)
|
||||
}
|
||||
|
||||
func ConfigureBasenineServer(host string, port string, dbSize int64, logLevel logging.Level, insertionFilter string) {
|
||||
|
||||
@@ -79,13 +79,8 @@ func GetGeneralStats(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, providers.GetGeneralStats())
|
||||
}
|
||||
|
||||
func GetAccumulativeStats(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, providers.GetAccumulativeStats())
|
||||
}
|
||||
|
||||
func GetAccumulativeStatsTiming(c *gin.Context) {
|
||||
// for now hardcoded 10 bars of 5 minutes interval
|
||||
c.JSON(http.StatusOK, providers.GetAccumulativeStatsTiming(300, 10))
|
||||
func GetTrafficStats(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, providers.GetTrafficStats())
|
||||
}
|
||||
|
||||
func GetCurrentResolvingInformation(c *gin.Context) {
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
"github.com/up9inc/mizu/agent/pkg/app"
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
"github.com/up9inc/mizu/agent/pkg/models"
|
||||
"github.com/up9inc/mizu/logger"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
@@ -95,24 +94,10 @@ func (e *BasenineEntriesProvider) GetEntry(singleEntryRequest *models.SingleEntr
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var rules []map[string]interface{}
|
||||
var isRulesEnabled bool
|
||||
if protocol.Name == "http" {
|
||||
harEntry, _ := har.NewEntry(entry.Request, entry.Response, entry.StartTime, entry.ElapsedTime)
|
||||
_, rulesMatched, _isRulesEnabled := models.RunValidationRulesState(*harEntry, entry.Destination.Name)
|
||||
isRulesEnabled = _isRulesEnabled
|
||||
inrec, _ := json.Marshal(rulesMatched)
|
||||
if err := json.Unmarshal(inrec, &rules); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
return &tapApi.EntryWrapper{
|
||||
Protocol: *protocol,
|
||||
Representation: string(representation),
|
||||
Data: entry,
|
||||
Base: base,
|
||||
Rules: rules,
|
||||
IsRulesEnabled: isRulesEnabled,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
"github.com/up9inc/mizu/agent/pkg/rules"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
@@ -143,9 +142,3 @@ type ExtendedCreator struct {
|
||||
*har.Creator
|
||||
Source *string `json:"_source"`
|
||||
}
|
||||
|
||||
func RunValidationRulesState(harEntry har.Entry, service string) (tapApi.ApplicableRules, []rules.RulesMatched, bool) {
|
||||
resultPolicyToSend, isEnabled := rules.MatchRequestPolicy(harEntry, service)
|
||||
statusPolicyToSend, latency, numberOfRules := rules.PassedValidationRules(resultPolicyToSend)
|
||||
return tapApi.ApplicableRules{Status: statusPolicyToSend, Latency: latency, NumberOfRules: numberOfRules}, resultPolicyToSend, isEnabled
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package providers
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
@@ -26,7 +27,6 @@ type TimeFrameStatsValue struct {
|
||||
|
||||
type ProtocolStats struct {
|
||||
MethodsStats map[string]*SizeAndEntriesCount `json:"methods"`
|
||||
Color string `json:"color"`
|
||||
}
|
||||
|
||||
type SizeAndEntriesCount struct {
|
||||
@@ -51,46 +51,103 @@ type AccumulativeStatsProtocolTime struct {
|
||||
Time int64 `json:"timestamp"`
|
||||
}
|
||||
|
||||
type TrafficStatsResponse struct {
|
||||
PieStats []*AccumulativeStatsProtocol `json:"pie"`
|
||||
TimelineStats []*AccumulativeStatsProtocolTime `json:"timeline"`
|
||||
}
|
||||
|
||||
var (
|
||||
generalStats = GeneralStats{}
|
||||
bucketsStats = BucketStats{}
|
||||
bucketStatsLocker = sync.Mutex{}
|
||||
protocolToColor = map[string]string{}
|
||||
)
|
||||
|
||||
const (
|
||||
InternalBucketThreshold = time.Minute * 1
|
||||
MaxNumberOfBars = 30
|
||||
)
|
||||
|
||||
func ResetGeneralStats() {
|
||||
generalStats = GeneralStats{}
|
||||
}
|
||||
|
||||
func GetGeneralStats() GeneralStats {
|
||||
return generalStats
|
||||
func GetGeneralStats() *GeneralStats {
|
||||
return &generalStats
|
||||
}
|
||||
|
||||
func GetAccumulativeStats() []*AccumulativeStatsProtocol {
|
||||
bucketStatsCopy := getBucketStatsCopy()
|
||||
if len(bucketStatsCopy) == 0 {
|
||||
func InitProtocolToColor(protocolMap map[string]*api.Protocol) {
|
||||
for item, value := range protocolMap {
|
||||
protocolToColor[strings.Split(item, "/")[2]] = value.BackgroundColor
|
||||
}
|
||||
}
|
||||
|
||||
func GetTrafficStats() *TrafficStatsResponse {
|
||||
bucketsStatsCopy := getBucketStatsCopy()
|
||||
interval := calculateInterval(bucketsStatsCopy[0].BucketTime.Unix(), bucketsStatsCopy[len(bucketsStatsCopy)-1].BucketTime.Unix()) // in seconds
|
||||
|
||||
return &TrafficStatsResponse{
|
||||
PieStats: getAccumulativeStats(bucketsStatsCopy),
|
||||
TimelineStats: getAccumulativeStatsTiming(bucketsStatsCopy, interval),
|
||||
}
|
||||
}
|
||||
|
||||
func calculateInterval(firstTimestamp int64, lastTimestamp int64) time.Duration {
|
||||
validDurations := []time.Duration{
|
||||
time.Minute,
|
||||
time.Minute * 2,
|
||||
time.Minute * 3,
|
||||
time.Minute * 5,
|
||||
time.Minute * 10,
|
||||
time.Minute * 15,
|
||||
time.Minute * 20,
|
||||
time.Minute * 30,
|
||||
time.Minute * 45,
|
||||
time.Minute * 60,
|
||||
time.Minute * 75,
|
||||
time.Minute * 90, // 1.5 minutes
|
||||
time.Minute * 120, // 2 hours
|
||||
time.Minute * 150, // 2.5 hours
|
||||
time.Minute * 180, // 3 hours
|
||||
time.Minute * 240, // 4 hours
|
||||
time.Minute * 300, // 5 hours
|
||||
time.Minute * 360, // 6 hours
|
||||
time.Minute * 420, // 7 hours
|
||||
time.Minute * 480, // 8 hours
|
||||
time.Minute * 540, // 9 hours
|
||||
time.Minute * 600, // 10 hours
|
||||
time.Minute * 660, // 11 hours
|
||||
time.Minute * 720, // 12 hours
|
||||
time.Minute * 1440, // 24 hours
|
||||
}
|
||||
duration := time.Duration(lastTimestamp-firstTimestamp) * time.Second / time.Duration(MaxNumberOfBars)
|
||||
for _, validDuration := range validDurations {
|
||||
if validDuration-duration >= 0 {
|
||||
return validDuration
|
||||
}
|
||||
}
|
||||
return duration.Round(validDurations[len(validDurations)-1])
|
||||
|
||||
}
|
||||
|
||||
func getAccumulativeStats(stats BucketStats) []*AccumulativeStatsProtocol {
|
||||
if len(stats) == 0 {
|
||||
return make([]*AccumulativeStatsProtocol, 0)
|
||||
}
|
||||
|
||||
methodsPerProtocolAggregated, protocolToColor := getAggregatedStatsAllTime(bucketStatsCopy)
|
||||
methodsPerProtocolAggregated := getAggregatedStats(stats)
|
||||
|
||||
return convertAccumulativeStatsDictToArray(methodsPerProtocolAggregated, protocolToColor)
|
||||
return convertAccumulativeStatsDictToArray(methodsPerProtocolAggregated)
|
||||
}
|
||||
|
||||
func GetAccumulativeStatsTiming(intervalSeconds int, numberOfBars int) []*AccumulativeStatsProtocolTime {
|
||||
bucketStatsCopy := getBucketStatsCopy()
|
||||
if len(bucketStatsCopy) == 0 {
|
||||
func getAccumulativeStatsTiming(stats BucketStats, interval time.Duration) []*AccumulativeStatsProtocolTime {
|
||||
if len(stats) == 0 {
|
||||
return make([]*AccumulativeStatsProtocolTime, 0)
|
||||
}
|
||||
|
||||
firstBucketTime := getFirstBucketTime(time.Now().UTC(), intervalSeconds, numberOfBars)
|
||||
methodsPerProtocolPerTimeAggregated := getAggregatedResultTiming(interval, stats)
|
||||
|
||||
methodsPerProtocolPerTimeAggregated, protocolToColor := getAggregatedResultTimingFromSpecificTime(intervalSeconds, bucketStatsCopy, firstBucketTime)
|
||||
|
||||
return convertAccumulativeStatsTimelineDictToArray(methodsPerProtocolPerTimeAggregated, protocolToColor)
|
||||
return convertAccumulativeStatsTimelineDictToArray(methodsPerProtocolPerTimeAggregated)
|
||||
}
|
||||
|
||||
func EntryAdded(size int, summery *api.BaseEntry) {
|
||||
@@ -128,7 +185,6 @@ func addToBucketStats(size int, summery *api.BaseEntry) {
|
||||
if _, found := bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation]; !found {
|
||||
bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation] = ProtocolStats{
|
||||
MethodsStats: map[string]*SizeAndEntriesCount{},
|
||||
Color: summery.Protocol.BackgroundColor,
|
||||
}
|
||||
}
|
||||
if _, found := bucketOfEntry.ProtocolStats[summery.Protocol.Abbreviation].MethodsStats[summery.Method]; !found {
|
||||
@@ -147,13 +203,7 @@ func getBucketFromTimeStamp(timestamp int64) time.Time {
|
||||
return entryTimeStampAsTime.Add(-1 * InternalBucketThreshold / 2).Round(InternalBucketThreshold)
|
||||
}
|
||||
|
||||
func getFirstBucketTime(endTime time.Time, intervalSeconds int, numberOfBars int) time.Time {
|
||||
lastBucketTime := endTime.Add(-1 * time.Second * time.Duration(intervalSeconds) / 2).Round(time.Second * time.Duration(intervalSeconds))
|
||||
firstBucketTime := lastBucketTime.Add(-1 * time.Second * time.Duration(intervalSeconds*(numberOfBars-1)))
|
||||
return firstBucketTime
|
||||
}
|
||||
|
||||
func convertAccumulativeStatsTimelineDictToArray(methodsPerProtocolPerTimeAggregated map[time.Time]map[string]map[string]*AccumulativeStatsCounter, protocolToColor map[string]string) []*AccumulativeStatsProtocolTime {
|
||||
func convertAccumulativeStatsTimelineDictToArray(methodsPerProtocolPerTimeAggregated map[time.Time]map[string]map[string]*AccumulativeStatsCounter) []*AccumulativeStatsProtocolTime {
|
||||
finalResult := make([]*AccumulativeStatsProtocolTime, 0)
|
||||
for timeKey, item := range methodsPerProtocolPerTimeAggregated {
|
||||
protocolsData := make([]*AccumulativeStatsProtocol, 0)
|
||||
@@ -184,7 +234,7 @@ func convertAccumulativeStatsTimelineDictToArray(methodsPerProtocolPerTimeAggreg
|
||||
return finalResult
|
||||
}
|
||||
|
||||
func convertAccumulativeStatsDictToArray(methodsPerProtocolAggregated map[string]map[string]*AccumulativeStatsCounter, protocolToColor map[string]string) []*AccumulativeStatsProtocol {
|
||||
func convertAccumulativeStatsDictToArray(methodsPerProtocolAggregated map[string]map[string]*AccumulativeStatsCounter) []*AccumulativeStatsProtocol {
|
||||
protocolsData := make([]*AccumulativeStatsProtocol, 0)
|
||||
for protocolName, value := range methodsPerProtocolAggregated {
|
||||
entriesCount := 0
|
||||
@@ -219,55 +269,44 @@ func getBucketStatsCopy() BucketStats {
|
||||
return bucketStatsCopy
|
||||
}
|
||||
|
||||
func getAggregatedResultTimingFromSpecificTime(intervalSeconds int, bucketStats BucketStats, firstBucketTime time.Time) (map[time.Time]map[string]map[string]*AccumulativeStatsCounter, map[string]string) {
|
||||
protocolToColor := map[string]string{}
|
||||
func getAggregatedResultTiming(interval time.Duration, stats BucketStats) map[time.Time]map[string]map[string]*AccumulativeStatsCounter {
|
||||
methodsPerProtocolPerTimeAggregated := map[time.Time]map[string]map[string]*AccumulativeStatsCounter{}
|
||||
|
||||
bucketStatsIndex := len(bucketStats) - 1
|
||||
bucketStatsIndex := len(stats) - 1
|
||||
for bucketStatsIndex >= 0 {
|
||||
currentBucketTime := bucketStats[bucketStatsIndex].BucketTime
|
||||
if currentBucketTime.After(firstBucketTime) || currentBucketTime.Equal(firstBucketTime) {
|
||||
resultBucketRoundedKey := currentBucketTime.Add(-1 * time.Second * time.Duration(intervalSeconds) / 2).Round(time.Second * time.Duration(intervalSeconds))
|
||||
currentBucketTime := stats[bucketStatsIndex].BucketTime
|
||||
resultBucketRoundedKey := currentBucketTime.Add(-1 * interval / 2).Round(interval)
|
||||
|
||||
for protocolName, data := range bucketStats[bucketStatsIndex].ProtocolStats {
|
||||
if _, ok := protocolToColor[protocolName]; !ok {
|
||||
protocolToColor[protocolName] = data.Color
|
||||
for protocolName, data := range stats[bucketStatsIndex].ProtocolStats {
|
||||
for methodName, dataOfMethod := range data.MethodsStats {
|
||||
|
||||
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey]; !ok {
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey] = map[string]map[string]*AccumulativeStatsCounter{}
|
||||
}
|
||||
|
||||
for methodName, dataOfMethod := range data.MethodsStats {
|
||||
|
||||
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey]; !ok {
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey] = map[string]map[string]*AccumulativeStatsCounter{}
|
||||
}
|
||||
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName]; !ok {
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName] = map[string]*AccumulativeStatsCounter{}
|
||||
}
|
||||
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName]; !ok {
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName] = &AccumulativeStatsCounter{
|
||||
Name: methodName,
|
||||
EntriesCount: 0,
|
||||
VolumeSizeBytes: 0,
|
||||
}
|
||||
}
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName].EntriesCount += dataOfMethod.EntriesCount
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName].VolumeSizeBytes += dataOfMethod.VolumeInBytes
|
||||
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName]; !ok {
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName] = map[string]*AccumulativeStatsCounter{}
|
||||
}
|
||||
if _, ok := methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName]; !ok {
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName] = &AccumulativeStatsCounter{
|
||||
Name: methodName,
|
||||
EntriesCount: 0,
|
||||
VolumeSizeBytes: 0,
|
||||
}
|
||||
}
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName].EntriesCount += dataOfMethod.EntriesCount
|
||||
methodsPerProtocolPerTimeAggregated[resultBucketRoundedKey][protocolName][methodName].VolumeSizeBytes += dataOfMethod.VolumeInBytes
|
||||
}
|
||||
}
|
||||
|
||||
bucketStatsIndex--
|
||||
}
|
||||
return methodsPerProtocolPerTimeAggregated, protocolToColor
|
||||
return methodsPerProtocolPerTimeAggregated
|
||||
}
|
||||
|
||||
func getAggregatedStatsAllTime(bucketStatsCopy BucketStats) (map[string]map[string]*AccumulativeStatsCounter, map[string]string) {
|
||||
protocolToColor := make(map[string]string, 0)
|
||||
func getAggregatedStats(bucketStatsCopy BucketStats) map[string]map[string]*AccumulativeStatsCounter {
|
||||
methodsPerProtocolAggregated := make(map[string]map[string]*AccumulativeStatsCounter, 0)
|
||||
for _, countersOfTimeFrame := range bucketStatsCopy {
|
||||
for protocolName, value := range countersOfTimeFrame.ProtocolStats {
|
||||
if _, ok := protocolToColor[protocolName]; !ok {
|
||||
protocolToColor[protocolName] = value.Color
|
||||
}
|
||||
|
||||
for method, countersValue := range value.MethodsStats {
|
||||
if _, found := methodsPerProtocolAggregated[protocolName]; !found {
|
||||
methodsPerProtocolAggregated[protocolName] = map[string]*AccumulativeStatsCounter{}
|
||||
@@ -284,5 +323,5 @@ func getAggregatedStatsAllTime(bucketStatsCopy BucketStats) (map[string]map[stri
|
||||
}
|
||||
}
|
||||
}
|
||||
return methodsPerProtocolAggregated, protocolToColor
|
||||
return methodsPerProtocolAggregated
|
||||
}
|
||||
|
||||
@@ -26,38 +26,6 @@ func TestGetBucketOfTimeStamp(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
type DataForBucketBorderFunction struct {
|
||||
EndTime time.Time
|
||||
IntervalInSeconds int
|
||||
NumberOfBars int
|
||||
}
|
||||
|
||||
func TestGetBucketBorders(t *testing.T) {
|
||||
tests := map[DataForBucketBorderFunction]time.Time{
|
||||
DataForBucketBorderFunction{
|
||||
time.Date(2022, time.Month(1), 1, 10, 34, 45, 0, time.UTC),
|
||||
300,
|
||||
10,
|
||||
}: time.Date(2022, time.Month(1), 1, 9, 45, 0, 0, time.UTC),
|
||||
DataForBucketBorderFunction{
|
||||
time.Date(2022, time.Month(1), 1, 10, 35, 45, 0, time.UTC),
|
||||
60,
|
||||
5,
|
||||
}: time.Date(2022, time.Month(1), 1, 10, 31, 00, 0, time.UTC),
|
||||
}
|
||||
|
||||
for key, value := range tests {
|
||||
t.Run(fmt.Sprintf("%v", key), func(t *testing.T) {
|
||||
|
||||
actual := getFirstBucketTime(key.EndTime, key.IntervalInSeconds, key.NumberOfBars)
|
||||
|
||||
if actual != value {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", value, actual)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetAggregatedStatsAllTime(t *testing.T) {
|
||||
bucketStatsForTest := BucketStats{
|
||||
&TimeFrameStatsValue{
|
||||
@@ -140,7 +108,7 @@ func TestGetAggregatedStatsAllTime(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
actual, _ := getAggregatedStatsAllTime(bucketStatsForTest)
|
||||
actual := getAggregatedStats(bucketStatsForTest)
|
||||
|
||||
if !reflect.DeepEqual(actual, expected) {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", 3, len(actual))
|
||||
@@ -227,7 +195,7 @@ func TestGetAggregatedStatsFromSpecificTime(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
actual, _ := getAggregatedResultTimingFromSpecificTime(300, bucketStatsForTest, time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC))
|
||||
actual := getAggregatedResultTiming(time.Minute*5, bucketStatsForTest)
|
||||
|
||||
if !reflect.DeepEqual(actual, expected) {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", 3, len(actual))
|
||||
@@ -323,7 +291,7 @@ func TestGetAggregatedStatsFromSpecificTimeMultipleBuckets(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
actual, _ := getAggregatedResultTimingFromSpecificTime(60, bucketStatsForTest, time.Date(2022, time.Month(1), 1, 10, 00, 00, 0, time.UTC))
|
||||
actual := getAggregatedResultTiming(time.Minute, bucketStatsForTest)
|
||||
|
||||
if !reflect.DeepEqual(actual, expected) {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", 3, len(actual))
|
||||
|
||||
@@ -171,8 +171,6 @@ func ExecuteRequest(replayData *Details, timeout time.Duration) *Response {
|
||||
Representation: string(representation),
|
||||
Data: entryUnmarshalled,
|
||||
Base: base,
|
||||
Rules: nil,
|
||||
IsRulesEnabled: false,
|
||||
},
|
||||
ErrorMessage: "",
|
||||
}
|
||||
|
||||
@@ -97,8 +97,6 @@ func TestValid(t *testing.T) {
|
||||
Representation: string(representation),
|
||||
Data: entry,
|
||||
Base: base,
|
||||
Rules: nil,
|
||||
IsRulesEnabled: false,
|
||||
}
|
||||
t.Logf("%+v", result)
|
||||
//data, _ := json.MarshalIndent(result, "", " ")
|
||||
|
||||
@@ -15,9 +15,8 @@ func StatusRoutes(ginApp *gin.Engine) {
|
||||
routeGroup.GET("/connectedTappersCount", controllers.GetConnectedTappersCount)
|
||||
routeGroup.GET("/tap", controllers.GetTappingStatus)
|
||||
|
||||
routeGroup.GET("/general", controllers.GetGeneralStats) // get general stats about entries in DB
|
||||
routeGroup.GET("/accumulative", controllers.GetAccumulativeStats)
|
||||
routeGroup.GET("/accumulativeTiming", controllers.GetAccumulativeStatsTiming)
|
||||
routeGroup.GET("/general", controllers.GetGeneralStats)
|
||||
routeGroup.GET("/trafficStats", controllers.GetTrafficStats)
|
||||
|
||||
routeGroup.GET("/resolving", controllers.GetCurrentResolvingInformation)
|
||||
}
|
||||
|
||||
@@ -1,124 +0,0 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
|
||||
"github.com/up9inc/mizu/logger"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/yalp/jsonpath"
|
||||
)
|
||||
|
||||
type RulesMatched struct {
|
||||
Matched bool `json:"matched"`
|
||||
Rule shared.RulePolicy `json:"rule"`
|
||||
}
|
||||
|
||||
func appendRulesMatched(rulesMatched []RulesMatched, matched bool, rule shared.RulePolicy) []RulesMatched {
|
||||
return append(rulesMatched, RulesMatched{Matched: matched, Rule: rule})
|
||||
}
|
||||
|
||||
func ValidatePath(URLFromRule string, URL string) bool {
|
||||
if URLFromRule != "" {
|
||||
matchPath, err := regexp.MatchString(URLFromRule, URL)
|
||||
if err != nil || !matchPath {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func ValidateService(serviceFromRule string, service string) bool {
|
||||
if serviceFromRule != "" {
|
||||
matchService, err := regexp.MatchString(serviceFromRule, service)
|
||||
if err != nil || !matchService {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func MatchRequestPolicy(harEntry har.Entry, service string) (resultPolicyToSend []RulesMatched, isEnabled bool) {
|
||||
enforcePolicy, err := shared.DecodeEnforcePolicy(fmt.Sprintf("%s%s", shared.ConfigDirPath, shared.ValidationRulesFileName))
|
||||
if err == nil && len(enforcePolicy.Rules) > 0 {
|
||||
isEnabled = true
|
||||
}
|
||||
for _, rule := range enforcePolicy.Rules {
|
||||
if !ValidatePath(rule.Path, harEntry.Request.URL) || !ValidateService(rule.Service, service) {
|
||||
continue
|
||||
}
|
||||
if rule.Type == "json" {
|
||||
var bodyJsonMap interface{}
|
||||
contentTextDecoded, _ := base64.StdEncoding.DecodeString(harEntry.Response.Content.Text)
|
||||
if err := json.Unmarshal(contentTextDecoded, &bodyJsonMap); err != nil {
|
||||
continue
|
||||
}
|
||||
out, err := jsonpath.Read(bodyJsonMap, rule.Key)
|
||||
if err != nil || out == nil {
|
||||
continue
|
||||
}
|
||||
var matchValue bool
|
||||
if reflect.TypeOf(out).Kind() == reflect.String {
|
||||
matchValue, err = regexp.MatchString(rule.Value, out.(string))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
logger.Log.Info(matchValue, rule.Value)
|
||||
} else {
|
||||
val := fmt.Sprint(out)
|
||||
matchValue, err = regexp.MatchString(rule.Value, val)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
resultPolicyToSend = appendRulesMatched(resultPolicyToSend, matchValue, rule)
|
||||
} else if rule.Type == "header" {
|
||||
for j := range harEntry.Response.Headers {
|
||||
matchKey, err := regexp.MatchString(rule.Key, harEntry.Response.Headers[j].Name)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if matchKey {
|
||||
matchValue, err := regexp.MatchString(rule.Value, harEntry.Response.Headers[j].Value)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
resultPolicyToSend = appendRulesMatched(resultPolicyToSend, matchValue, rule)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
resultPolicyToSend = appendRulesMatched(resultPolicyToSend, true, rule)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func PassedValidationRules(rulesMatched []RulesMatched) (bool, int64, int) {
|
||||
var numberOfRulesMatched = len(rulesMatched)
|
||||
var responseTime int64 = -1
|
||||
|
||||
if numberOfRulesMatched == 0 {
|
||||
return false, 0, numberOfRulesMatched
|
||||
}
|
||||
|
||||
for _, rule := range rulesMatched {
|
||||
if !rule.Matched {
|
||||
return false, responseTime, numberOfRulesMatched
|
||||
} else {
|
||||
if strings.ToLower(rule.Rule.Type) == "slo" {
|
||||
if rule.Rule.ResponseTime < responseTime || responseTime == -1 {
|
||||
responseTime = rule.Rule.ResponseTime
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true, responseTime, numberOfRulesMatched
|
||||
}
|
||||
@@ -53,7 +53,6 @@ func init() {
|
||||
tapCmd.Flags().String(configStructs.HumanMaxEntriesDBSizeTapName, defaultTapConfig.HumanMaxEntriesDBSize, "Override the default max entries db size")
|
||||
tapCmd.Flags().String(configStructs.InsertionFilterName, defaultTapConfig.InsertionFilter, "Set the insertion filter. Accepts string or a file path.")
|
||||
tapCmd.Flags().Bool(configStructs.DryRunTapName, defaultTapConfig.DryRun, "Preview of all pods matching the regex, without tapping them")
|
||||
tapCmd.Flags().String(configStructs.EnforcePolicyFile, defaultTapConfig.EnforcePolicyFile, "Yaml file path with policy rules")
|
||||
tapCmd.Flags().Bool(configStructs.ServiceMeshName, defaultTapConfig.ServiceMesh, "Record decrypted traffic if the cluster is configured with a service mesh and with mtls")
|
||||
tapCmd.Flags().Bool(configStructs.TlsName, defaultTapConfig.Tls, "Record tls traffic")
|
||||
tapCmd.Flags().Bool(configStructs.ProfilerName, defaultTapConfig.Profiler, "Run pprof server")
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"github.com/up9inc/mizu/cli/telemetry"
|
||||
"github.com/up9inc/mizu/cli/utils"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
core "k8s.io/api/core/v1"
|
||||
k8serrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
@@ -45,16 +44,6 @@ func RunMizuTap() {
|
||||
|
||||
apiProvider = apiserver.NewProvider(GetApiServerUrl(config.Config.Tap.GuiPort), apiserver.DefaultRetries, apiserver.DefaultTimeout)
|
||||
|
||||
var err error
|
||||
var serializedValidationRules string
|
||||
if config.Config.Tap.EnforcePolicyFile != "" {
|
||||
serializedValidationRules, err = readValidationRules(config.Config.Tap.EnforcePolicyFile)
|
||||
if err != nil {
|
||||
logger.Log.Errorf(uiUtils.Error, fmt.Sprintf("Error reading policy file: %v", errormessage.FormatError(err)))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
kubernetesProvider, err := getKubernetesProviderForCli()
|
||||
if err != nil {
|
||||
return
|
||||
@@ -98,7 +87,7 @@ func RunMizuTap() {
|
||||
}
|
||||
|
||||
logger.Log.Infof("Waiting for Mizu Agent to start...")
|
||||
if state.mizuServiceAccountExists, err = resources.CreateTapMizuResources(ctx, kubernetesProvider, serializedValidationRules, serializedMizuConfig, config.Config.IsNsRestrictedMode(), config.Config.MizuResourcesNamespace, config.Config.AgentImage, config.Config.Tap.MaxEntriesDBSizeBytes(), config.Config.Tap.ApiServerResources, config.Config.ImagePullPolicy(), config.Config.LogLevel(), config.Config.Tap.Profiler); err != nil {
|
||||
if state.mizuServiceAccountExists, err = resources.CreateTapMizuResources(ctx, kubernetesProvider, serializedMizuConfig, config.Config.IsNsRestrictedMode(), config.Config.MizuResourcesNamespace, config.Config.AgentImage, config.Config.Tap.MaxEntriesDBSizeBytes(), config.Config.Tap.ApiServerResources, config.Config.ImagePullPolicy(), config.Config.LogLevel(), config.Config.Tap.Profiler); err != nil {
|
||||
var statusError *k8serrors.StatusError
|
||||
if errors.As(err, &statusError) && (statusError.ErrStatus.Reason == metav1.StatusReasonAlreadyExists) {
|
||||
logger.Log.Info("Mizu is already running in this namespace, change the `mizu-resources-namespace` configuration or run `mizu clean` to remove the currently running Mizu instance")
|
||||
@@ -240,15 +229,6 @@ func getErrorDisplayTextForK8sTapManagerError(err kubernetes.K8sTapManagerError)
|
||||
}
|
||||
}
|
||||
|
||||
func readValidationRules(file string) (string, error) {
|
||||
rules, err := shared.DecodeEnforcePolicy(file)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
newContent, _ := yaml.Marshal(&rules)
|
||||
return string(newContent), nil
|
||||
}
|
||||
|
||||
func getMizuApiFilteringOptions() (*api.TrafficFilteringOptions, error) {
|
||||
var compiledRegexSlice []*api.SerializableRegexp
|
||||
|
||||
|
||||
@@ -23,7 +23,6 @@ const (
|
||||
HumanMaxEntriesDBSizeTapName = "max-entries-db-size"
|
||||
InsertionFilterName = "insertion-filter"
|
||||
DryRunTapName = "dry-run"
|
||||
EnforcePolicyFile = "traffic-validation-file"
|
||||
ServiceMeshName = "service-mesh"
|
||||
TlsName = "tls"
|
||||
ProfilerName = "profiler"
|
||||
@@ -42,7 +41,6 @@ type TapConfig struct {
|
||||
HumanMaxEntriesDBSize string `yaml:"max-entries-db-size" default:"200MB"`
|
||||
InsertionFilter string `yaml:"insertion-filter" default:""`
|
||||
DryRun bool `yaml:"dry-run" default:"false"`
|
||||
EnforcePolicyFile string `yaml:"traffic-validation-file"`
|
||||
ApiServerResources shared.Resources `yaml:"api-server-resources"`
|
||||
TapperResources shared.Resources `yaml:"tapper-resources"`
|
||||
ServiceMesh bool `yaml:"service-mesh" default:"false"`
|
||||
|
||||
@@ -14,14 +14,14 @@ import (
|
||||
core "k8s.io/api/core/v1"
|
||||
)
|
||||
|
||||
func CreateTapMizuResources(ctx context.Context, kubernetesProvider *kubernetes.Provider, serializedValidationRules string, serializedMizuConfig string, isNsRestrictedMode bool, mizuResourcesNamespace string, agentImage string, maxEntriesDBSizeBytes int64, apiServerResources shared.Resources, imagePullPolicy core.PullPolicy, logLevel logging.Level, profiler bool) (bool, error) {
|
||||
func CreateTapMizuResources(ctx context.Context, kubernetesProvider *kubernetes.Provider, serializedMizuConfig string, isNsRestrictedMode bool, mizuResourcesNamespace string, agentImage string, maxEntriesDBSizeBytes int64, apiServerResources shared.Resources, imagePullPolicy core.PullPolicy, logLevel logging.Level, profiler bool) (bool, error) {
|
||||
if !isNsRestrictedMode {
|
||||
if err := createMizuNamespace(ctx, kubernetesProvider, mizuResourcesNamespace); err != nil {
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := createMizuConfigmap(ctx, kubernetesProvider, serializedValidationRules, serializedMizuConfig, mizuResourcesNamespace); err != nil {
|
||||
if err := createMizuConfigmap(ctx, kubernetesProvider, serializedMizuConfig, mizuResourcesNamespace); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
@@ -71,8 +71,8 @@ func createMizuNamespace(ctx context.Context, kubernetesProvider *kubernetes.Pro
|
||||
return err
|
||||
}
|
||||
|
||||
func createMizuConfigmap(ctx context.Context, kubernetesProvider *kubernetes.Provider, serializedValidationRules string, serializedMizuConfig string, mizuResourcesNamespace string) error {
|
||||
err := kubernetesProvider.CreateConfigMap(ctx, mizuResourcesNamespace, kubernetes.ConfigMapName, serializedValidationRules, serializedMizuConfig)
|
||||
func createMizuConfigmap(ctx context.Context, kubernetesProvider *kubernetes.Provider, serializedMizuConfig string, mizuResourcesNamespace string) error {
|
||||
err := kubernetesProvider.CreateConfigMap(ctx, mizuResourcesNamespace, kubernetes.ConfigMapName, serializedMizuConfig)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ const (
|
||||
NodeNameEnvVar = "NODE_NAME"
|
||||
ConfigDirPath = "/app/config/"
|
||||
DataDirPath = "/app/data/"
|
||||
ValidationRulesFileName = "validation-rules.yaml"
|
||||
ConfigFileName = "mizu-config.json"
|
||||
DefaultApiServerPort = 8899
|
||||
LogLevelEnvVar = "LOG_LEVEL"
|
||||
|
||||
@@ -4,11 +4,9 @@ go 1.17
|
||||
|
||||
require (
|
||||
github.com/docker/go-units v0.4.0
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
||||
github.com/up9inc/mizu/logger v0.0.0
|
||||
github.com/up9inc/mizu/tap/api v0.0.0
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
|
||||
k8s.io/api v0.23.3
|
||||
k8s.io/apimachinery v0.23.3
|
||||
k8s.io/client-go v0.23.3
|
||||
@@ -38,11 +36,11 @@ require (
|
||||
github.com/go-openapi/jsonreference v0.19.6 // indirect
|
||||
github.com/go-openapi/swag v0.21.1 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/btree v1.0.1 // indirect
|
||||
github.com/google/go-cmp v0.5.7 // indirect
|
||||
github.com/google/gofuzz v1.2.0 // indirect
|
||||
github.com/google/martian v2.1.0+incompatible // indirect
|
||||
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/googleapis/gnostic v0.5.5 // indirect
|
||||
@@ -81,6 +79,7 @@ require (
|
||||
google.golang.org/protobuf v1.27.1 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||
k8s.io/cli-runtime v0.23.3 // indirect
|
||||
k8s.io/component-base v0.23.3 // indirect
|
||||
k8s.io/klog/v2 v2.40.1 // indirect
|
||||
|
||||
@@ -282,7 +282,6 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/
|
||||
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
|
||||
@@ -685,11 +685,8 @@ func (provider *Provider) handleRemovalError(err error) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) CreateConfigMap(ctx context.Context, namespace string, configMapName string, serializedValidationRules string, serializedMizuConfig string) error {
|
||||
func (provider *Provider) CreateConfigMap(ctx context.Context, namespace string, configMapName string, serializedMizuConfig string) error {
|
||||
configMapData := make(map[string]string)
|
||||
if serializedValidationRules != "" {
|
||||
configMapData[shared.ValidationRulesFileName] = serializedValidationRules
|
||||
}
|
||||
configMapData[shared.ConfigFileName] = serializedMizuConfig
|
||||
|
||||
configMap := &core.ConfigMap{
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
package shared
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
|
||||
"github.com/op/go-logging"
|
||||
"github.com/up9inc/mizu/logger"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
v1 "k8s.io/api/core/v1"
|
||||
)
|
||||
|
||||
@@ -135,83 +130,3 @@ type HealthResponse struct {
|
||||
type VersionResponse struct {
|
||||
Ver string `json:"ver"`
|
||||
}
|
||||
|
||||
type RulesPolicy struct {
|
||||
Rules []RulePolicy `yaml:"rules"`
|
||||
}
|
||||
|
||||
type RulePolicy struct {
|
||||
Type string `yaml:"type"`
|
||||
Service string `yaml:"service"`
|
||||
Path string `yaml:"path"`
|
||||
Method string `yaml:"method"`
|
||||
Key string `yaml:"key"`
|
||||
Value string `yaml:"value"`
|
||||
ResponseTime int64 `yaml:"response-time"`
|
||||
Name string `yaml:"name"`
|
||||
}
|
||||
|
||||
type RulesMatched struct {
|
||||
Matched bool `json:"matched"`
|
||||
Rule RulePolicy `json:"rule"`
|
||||
}
|
||||
|
||||
func (r *RulePolicy) validateType() bool {
|
||||
permitedTypes := []string{"json", "header", "slo"}
|
||||
_, found := Find(permitedTypes, r.Type)
|
||||
if !found {
|
||||
logger.Log.Errorf("Only json, header and slo types are supported on rule definition. This rule will be ignored. rule name: %s", r.Name)
|
||||
found = false
|
||||
}
|
||||
if strings.ToLower(r.Type) == "slo" {
|
||||
if r.ResponseTime <= 0 {
|
||||
logger.Log.Errorf("When rule type is slo, the field response-time should be specified and have a value >= 1. rule name: %s", r.Name)
|
||||
found = false
|
||||
}
|
||||
}
|
||||
return found
|
||||
}
|
||||
|
||||
func (rules *RulesPolicy) ValidateRulesPolicy() []int {
|
||||
invalidIndex := make([]int, 0)
|
||||
for i := range rules.Rules {
|
||||
validated := rules.Rules[i].validateType()
|
||||
if !validated {
|
||||
invalidIndex = append(invalidIndex, i)
|
||||
}
|
||||
}
|
||||
return invalidIndex
|
||||
}
|
||||
|
||||
func Find(slice []string, val string) (int, bool) {
|
||||
for i, item := range slice {
|
||||
if item == val {
|
||||
return i, true
|
||||
}
|
||||
}
|
||||
return -1, false
|
||||
}
|
||||
|
||||
func DecodeEnforcePolicy(path string) (RulesPolicy, error) {
|
||||
content, err := ioutil.ReadFile(path)
|
||||
enforcePolicy := RulesPolicy{}
|
||||
if err != nil {
|
||||
return enforcePolicy, err
|
||||
}
|
||||
err = yaml.Unmarshal(content, &enforcePolicy)
|
||||
if err != nil {
|
||||
return enforcePolicy, err
|
||||
}
|
||||
invalidIndex := enforcePolicy.ValidateRulesPolicy()
|
||||
var k = 0
|
||||
if len(invalidIndex) != 0 {
|
||||
for i, rule := range enforcePolicy.Rules {
|
||||
if !ContainsInt(invalidIndex, i) {
|
||||
enforcePolicy.Rules[k] = rule
|
||||
k++
|
||||
}
|
||||
}
|
||||
enforcePolicy.Rules = enforcePolicy.Rules[:k]
|
||||
}
|
||||
return enforcePolicy, nil
|
||||
}
|
||||
|
||||
@@ -10,15 +10,6 @@ func Contains(slice []string, containsValue string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func ContainsInt(slice []int, containsValue int) bool {
|
||||
for _, sliceValue := range slice {
|
||||
if sliceValue == containsValue {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func Unique(slice []string) []string {
|
||||
keys := make(map[string]bool)
|
||||
var list []string
|
||||
|
||||
@@ -164,40 +164,30 @@ type Entry struct {
|
||||
RequestSize int `json:"requestSize"`
|
||||
ResponseSize int `json:"responseSize"`
|
||||
ElapsedTime int64 `json:"elapsedTime"`
|
||||
Rules ApplicableRules `json:"rules,omitempty"`
|
||||
}
|
||||
|
||||
type EntryWrapper struct {
|
||||
Protocol Protocol `json:"protocol"`
|
||||
Representation string `json:"representation"`
|
||||
Data *Entry `json:"data"`
|
||||
Base *BaseEntry `json:"base"`
|
||||
Rules []map[string]interface{} `json:"rulesMatched,omitempty"`
|
||||
IsRulesEnabled bool `json:"isRulesEnabled"`
|
||||
Protocol Protocol `json:"protocol"`
|
||||
Representation string `json:"representation"`
|
||||
Data *Entry `json:"data"`
|
||||
Base *BaseEntry `json:"base"`
|
||||
}
|
||||
|
||||
type BaseEntry struct {
|
||||
Id string `json:"id"`
|
||||
Protocol Protocol `json:"proto,omitempty"`
|
||||
Capture Capture `json:"capture"`
|
||||
Summary string `json:"summary,omitempty"`
|
||||
SummaryQuery string `json:"summaryQuery,omitempty"`
|
||||
Status int `json:"status"`
|
||||
StatusQuery string `json:"statusQuery"`
|
||||
Method string `json:"method,omitempty"`
|
||||
MethodQuery string `json:"methodQuery,omitempty"`
|
||||
Timestamp int64 `json:"timestamp,omitempty"`
|
||||
Source *TCP `json:"src"`
|
||||
Destination *TCP `json:"dst"`
|
||||
IsOutgoing bool `json:"isOutgoing,omitempty"`
|
||||
Latency int64 `json:"latency"`
|
||||
Rules ApplicableRules `json:"rules,omitempty"`
|
||||
}
|
||||
|
||||
type ApplicableRules struct {
|
||||
Latency int64 `json:"latency,omitempty"`
|
||||
Status bool `json:"status,omitempty"`
|
||||
NumberOfRules int `json:"numberOfRules,omitempty"`
|
||||
Id string `json:"id"`
|
||||
Protocol Protocol `json:"proto,omitempty"`
|
||||
Capture Capture `json:"capture"`
|
||||
Summary string `json:"summary,omitempty"`
|
||||
SummaryQuery string `json:"summaryQuery,omitempty"`
|
||||
Status int `json:"status"`
|
||||
StatusQuery string `json:"statusQuery"`
|
||||
Method string `json:"method,omitempty"`
|
||||
MethodQuery string `json:"methodQuery,omitempty"`
|
||||
Timestamp int64 `json:"timestamp,omitempty"`
|
||||
Source *TCP `json:"src"`
|
||||
Destination *TCP `json:"dst"`
|
||||
IsOutgoing bool `json:"isOutgoing,omitempty"`
|
||||
Latency int64 `json:"latency"`
|
||||
}
|
||||
|
||||
const (
|
||||
|
||||
@@ -13,4 +13,4 @@ test-pull-bin:
|
||||
|
||||
test-pull-expect:
|
||||
@mkdir -p expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect13/amqp/\* expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect14/amqp/\* expect
|
||||
|
||||
@@ -298,7 +298,6 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||
Destination: entry.Destination,
|
||||
IsOutgoing: entry.Outgoing,
|
||||
Latency: entry.ElapsedTime,
|
||||
Rules: entry.Rules,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,4 +13,4 @@ test-pull-bin:
|
||||
|
||||
test-pull-expect:
|
||||
@mkdir -p expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect13/http/\* expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect14/http/\* expect
|
||||
|
||||
@@ -341,7 +341,6 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||
Destination: entry.Destination,
|
||||
IsOutgoing: entry.Outgoing,
|
||||
Latency: entry.ElapsedTime,
|
||||
Rules: entry.Rules,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,4 +13,4 @@ test-pull-bin:
|
||||
|
||||
test-pull-expect:
|
||||
@mkdir -p expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect13/kafka/\* expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect14/kafka/\* expect
|
||||
|
||||
@@ -208,7 +208,6 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||
Destination: entry.Destination,
|
||||
IsOutgoing: entry.Outgoing,
|
||||
Latency: entry.ElapsedTime,
|
||||
Rules: entry.Rules,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,4 +13,4 @@ test-pull-bin:
|
||||
|
||||
test-pull-expect:
|
||||
@mkdir -p expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect13/redis/\* expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect14/redis/\* expect
|
||||
|
||||
@@ -136,7 +136,6 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||
Destination: entry.Destination,
|
||||
IsOutgoing: entry.Outgoing,
|
||||
Latency: entry.ElapsedTime,
|
||||
Rules: entry.Rules,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -146,9 +146,6 @@ export const EntryDetailed = () => {
|
||||
<React.Fragment>
|
||||
{!isLoading && entryData && <EntryViewer
|
||||
representation={entryData.representation}
|
||||
isRulesEnabled={entryData.isRulesEnabled}
|
||||
rulesMatched={entryData.rulesMatched}
|
||||
elapsedTime={entryData.data.elapsedTime}
|
||||
color={entryData.protocol.backgroundColor}
|
||||
/>}
|
||||
</React.Fragment>
|
||||
|
||||
@@ -117,7 +117,8 @@ interface EntryBodySectionProps {
|
||||
selector?: string,
|
||||
}
|
||||
|
||||
export const formatRequest = (body: any, contentType: string, decodeBase64: boolean = true, isBase64Encoding: boolean = false, isPretty: boolean = true): string => {
|
||||
export const formatRequest = (bodyRef: any, contentType: string, decodeBase64: boolean = true, isBase64Encoding: boolean = false, isPretty: boolean = true): string => {
|
||||
const { body } = bodyRef
|
||||
if (!decodeBase64 || !body) return body;
|
||||
|
||||
const chunk = body.slice(0, MAXIMUM_BYTES_TO_FORMAT);
|
||||
@@ -126,7 +127,7 @@ export const formatRequest = (body: any, contentType: string, decodeBase64: bool
|
||||
try {
|
||||
if (jsonLikeFormats.some(format => contentType?.indexOf(format) > -1)) {
|
||||
if (!isPretty) return bodyBuf;
|
||||
return jsonBeautify(JSON.parse(bodyBuf), null, 2, 80);
|
||||
return Utils.isJson(bodyBuf) ? jsonBeautify(JSON.parse(bodyBuf), null, 2, 80) : bodyBuf
|
||||
} else if (xmlLikeFormats.some(format => contentType?.indexOf(format) > -1)) {
|
||||
if (!isPretty) return bodyBuf;
|
||||
return xmlBeautify(bodyBuf, {
|
||||
@@ -144,12 +145,24 @@ export const formatRequest = (body: any, contentType: string, decodeBase64: bool
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
bodyRef.body = bodyBuf
|
||||
throw error
|
||||
}
|
||||
|
||||
return bodyBuf;
|
||||
}
|
||||
|
||||
export const formatRequestWithOutError = (body: any, contentType: string, decodeBase64: boolean = true, isBase64Encoding: boolean = false, isPretty: boolean = true): string => {
|
||||
const bodyRef = { body }
|
||||
try {
|
||||
return formatRequest(bodyRef, contentType, decodeBase64, isBase64Encoding, isPretty)
|
||||
} catch (error) {
|
||||
console.warn(error)
|
||||
}
|
||||
|
||||
return bodyRef.body
|
||||
}
|
||||
|
||||
export const EntryBodySection: React.FC<EntryBodySectionProps> = ({
|
||||
title,
|
||||
color,
|
||||
@@ -173,8 +186,9 @@ export const EntryBodySection: React.FC<EntryBodySectionProps> = ({
|
||||
}, [isLineNumbersGreaterThenOne, isPretty])
|
||||
|
||||
const formatTextBody = useCallback((body) => {
|
||||
const bodyRef = { body }
|
||||
try {
|
||||
return formatRequest(body, contentType, decodeBase64, isBase64Encoding, isPretty)
|
||||
return formatRequest(bodyRef, contentType, decodeBase64, isBase64Encoding, isPretty)
|
||||
} catch (error) {
|
||||
if (String(error).includes("More than one message in")) {
|
||||
if (isDecodeGrpc)
|
||||
@@ -183,6 +197,8 @@ export const EntryBodySection: React.FC<EntryBodySectionProps> = ({
|
||||
console.warn(error);
|
||||
}
|
||||
}
|
||||
|
||||
return bodyRef.body
|
||||
}, [isPretty, contentType, isDecodeGrpc, decodeBase64, isBase64Encoding])
|
||||
|
||||
const formattedText = useMemo(() => formatTextBody(content), [formatTextBody, content]);
|
||||
@@ -265,110 +281,3 @@ export const EntryTableSection: React.FC<EntrySectionProps> = ({ title, color, a
|
||||
}
|
||||
</React.Fragment>
|
||||
}
|
||||
|
||||
interface EntryPolicySectionProps {
|
||||
title: string,
|
||||
color: string,
|
||||
latency?: number,
|
||||
arrayToIterate: any[],
|
||||
}
|
||||
|
||||
interface EntryPolicySectionCollapsibleTitleProps {
|
||||
label: string;
|
||||
matched: string;
|
||||
expanded: boolean;
|
||||
setExpanded: any;
|
||||
}
|
||||
|
||||
const EntryPolicySectionCollapsibleTitle: React.FC<EntryPolicySectionCollapsibleTitleProps> = ({ label, matched, expanded, setExpanded }) => {
|
||||
return <div className={styles.title}>
|
||||
<span
|
||||
className={`${styles.button}
|
||||
${expanded ? styles.expanded : ''}`}
|
||||
onClick={() => {
|
||||
setExpanded(!expanded)
|
||||
}}
|
||||
>
|
||||
{expanded ? '-' : '+'}
|
||||
</span>
|
||||
<span>
|
||||
<tr className={styles.dataLine}>
|
||||
<td className={`${styles.dataKey} ${styles.rulesTitleSuccess}`}>{label}</td>
|
||||
<td className={`${styles.dataKey} ${matched === 'Success' ? styles.rulesMatchedSuccess : styles.rulesMatchedFailure}`}>{matched}</td>
|
||||
</tr>
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
|
||||
interface EntryPolicySectionContainerProps {
|
||||
label: string;
|
||||
matched: string;
|
||||
children?: any;
|
||||
}
|
||||
|
||||
export const EntryPolicySectionContainer: React.FC<EntryPolicySectionContainerProps> = ({ label, matched, children }) => {
|
||||
const [expanded, setExpanded] = useState(false);
|
||||
return <CollapsibleContainer
|
||||
className={styles.collapsibleContainer}
|
||||
expanded={expanded}
|
||||
title={<EntryPolicySectionCollapsibleTitle label={label} matched={matched} expanded={expanded} setExpanded={setExpanded} />}
|
||||
>
|
||||
{children}
|
||||
</CollapsibleContainer>
|
||||
}
|
||||
|
||||
export const EntryTablePolicySection: React.FC<EntryPolicySectionProps> = ({ title, color, latency, arrayToIterate }) => {
|
||||
return <React.Fragment>
|
||||
{
|
||||
arrayToIterate && arrayToIterate.length > 0 ?
|
||||
<React.Fragment>
|
||||
<EntrySectionContainer title={title} color={color}>
|
||||
<table>
|
||||
<tbody>
|
||||
{arrayToIterate.map(({ rule, matched }, index) => {
|
||||
return (
|
||||
<EntryPolicySectionContainer key={index} label={rule.Name} matched={matched && (rule.Type === 'slo' ? rule.ResponseTime >= latency : true) ? "Success" : "Failure"}>
|
||||
{
|
||||
<React.Fragment>
|
||||
{
|
||||
rule.Key &&
|
||||
<tr className={styles.dataValue}><td><b>Key:</b></td> <td>{rule.Key}</td></tr>
|
||||
}
|
||||
{
|
||||
rule.ResponseTime !== 0 &&
|
||||
<tr className={styles.dataValue}><td><b>Response Time:</b></td> <td>{rule.ResponseTime}</td></tr>
|
||||
}
|
||||
{
|
||||
rule.Method &&
|
||||
<tr className={styles.dataValue}><td><b>Method:</b></td> <td>{rule.Method}</td></tr>
|
||||
}
|
||||
{
|
||||
rule.Path &&
|
||||
<tr className={styles.dataValue}><td><b>Path:</b></td> <td>{rule.Path}</td></tr>
|
||||
}
|
||||
{
|
||||
rule.Service &&
|
||||
<tr className={styles.dataValue}><td><b>Service:</b></td> <td>{rule.Service}</td></tr>
|
||||
}
|
||||
{
|
||||
rule.Type &&
|
||||
<tr className={styles.dataValue}><td><b>Type:</b></td> <td>{rule.Type}</td></tr>
|
||||
}
|
||||
{
|
||||
rule.Value &&
|
||||
<tr className={styles.dataValue}><td><b>Value:</b></td> <td>{rule.Value}</td></tr>
|
||||
}
|
||||
</React.Fragment>
|
||||
}
|
||||
</EntryPolicySectionContainer>
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
</tbody>
|
||||
</table>
|
||||
</EntrySectionContainer>
|
||||
</React.Fragment> : <span className={styles.noRules}>No rules could be applied to this request.</span>
|
||||
}
|
||||
</React.Fragment>
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import React, { useState, useCallback } from "react"
|
||||
import React, { useState, useCallback, useEffect, useMemo } from "react"
|
||||
import { useRecoilValue, useSetRecoilState } from "recoil"
|
||||
import entryDataAtom from "../../../recoil/entryData"
|
||||
import SectionsRepresentation from "./SectionsRepresentation";
|
||||
import { EntryTablePolicySection } from "../EntrySections/EntrySections";
|
||||
import { ReactComponent as ReplayIcon } from './replay.svg';
|
||||
import styles from './EntryViewer.module.sass';
|
||||
import { Tabs } from "../../UI";
|
||||
@@ -10,65 +9,70 @@ import replayRequestModalOpenAtom from "../../../recoil/replayRequestModalOpen";
|
||||
|
||||
const enabledProtocolsForReplay = ["http"]
|
||||
|
||||
export const AutoRepresentation: React.FC<any> = ({ representation, isRulesEnabled, rulesMatched, elapsedTime, color, isDisplayReplay = false }) => {
|
||||
export enum TabsEnum {
|
||||
Request = 0,
|
||||
Response = 1
|
||||
}
|
||||
|
||||
export const AutoRepresentation: React.FC<any> = ({ representation, color, openedTab = TabsEnum.Request, isDisplayReplay = false }) => {
|
||||
const entryData = useRecoilValue(entryDataAtom)
|
||||
const setIsOpenRequestModal = useSetRecoilState(replayRequestModalOpenAtom)
|
||||
const isReplayDisplayed = useCallback(() => {
|
||||
return enabledProtocolsForReplay.find(x => x === entryData.protocol.name) && isDisplayReplay
|
||||
}, [entryData.protocol.name, isDisplayReplay])
|
||||
|
||||
const TABS = [
|
||||
{
|
||||
tab: 'Request',
|
||||
badge: isReplayDisplayed() && <span title="Replay Request"><ReplayIcon fill={color} stroke={color} style={{ marginLeft: "10px", cursor: "pointer", height: "22px" }} onClick={() => setIsOpenRequestModal(true)} /></span>
|
||||
const { request, response } = JSON.parse(representation);
|
||||
|
||||
const TABS = useMemo(() => {
|
||||
const arr = [
|
||||
{
|
||||
tab: 'Request',
|
||||
badge: isReplayDisplayed() && <span title="Replay Request"><ReplayIcon fill={color} stroke={color} style={{ marginLeft: "10px", cursor: "pointer", height: "22px" }} onClick={() => setIsOpenRequestModal(true)} /></span>
|
||||
}]
|
||||
|
||||
if (response) {
|
||||
arr.push(
|
||||
{
|
||||
tab: 'Response',
|
||||
badge: null
|
||||
}
|
||||
);
|
||||
}
|
||||
];
|
||||
|
||||
return arr
|
||||
}, [color, isReplayDisplayed, response, setIsOpenRequestModal]);
|
||||
|
||||
const [currentTab, setCurrentTab] = useState(TABS[0].tab);
|
||||
|
||||
const getOpenedTabIndex = useCallback(() => {
|
||||
const currentIndex = TABS.findIndex(current => current.tab === currentTab)
|
||||
return currentIndex > -1 ? currentIndex : 0
|
||||
}, [TABS, currentTab])
|
||||
|
||||
useEffect(() => {
|
||||
if (openedTab) {
|
||||
setCurrentTab(TABS[openedTab].tab)
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
// Don't fail even if `representation` is an empty string
|
||||
if (!representation) {
|
||||
return <React.Fragment></React.Fragment>;
|
||||
}
|
||||
|
||||
const { request, response } = JSON.parse(representation);
|
||||
|
||||
let responseTabIndex = 0;
|
||||
let rulesTabIndex = 0;
|
||||
|
||||
if (response) {
|
||||
TABS.push(
|
||||
{
|
||||
tab: 'Response',
|
||||
badge: null
|
||||
}
|
||||
);
|
||||
responseTabIndex = TABS.length - 1;
|
||||
}
|
||||
|
||||
if (isRulesEnabled) {
|
||||
TABS.push(
|
||||
{
|
||||
tab: 'Rules',
|
||||
badge: null
|
||||
}
|
||||
);
|
||||
rulesTabIndex = TABS.length - 1;
|
||||
}
|
||||
|
||||
return <div className={styles.Entry}>
|
||||
{<div className={styles.body}>
|
||||
<div className={styles.bodyHeader}>
|
||||
<Tabs tabs={TABS} currentTab={currentTab} color={color} onChange={setCurrentTab} leftAligned />
|
||||
</div>
|
||||
{currentTab === TABS[0].tab && <React.Fragment>
|
||||
{getOpenedTabIndex() === TabsEnum.Request && <React.Fragment>
|
||||
<SectionsRepresentation data={request} color={color} requestRepresentation={request} />
|
||||
</React.Fragment>}
|
||||
{response && currentTab === TABS[responseTabIndex].tab && <React.Fragment>
|
||||
{response && getOpenedTabIndex() === TabsEnum.Response && <React.Fragment>
|
||||
<SectionsRepresentation data={response} color={color} />
|
||||
</React.Fragment>}
|
||||
{isRulesEnabled && currentTab === TABS[rulesTabIndex].tab && <React.Fragment>
|
||||
<EntryTablePolicySection title={'Rule'} color={color} latency={elapsedTime} arrayToIterate={rulesMatched ? rulesMatched : []} />
|
||||
</React.Fragment>}
|
||||
</div>}
|
||||
</div>;
|
||||
}
|
||||
|
||||
@@ -3,18 +3,12 @@ import { AutoRepresentation } from './AutoRepresentation';
|
||||
|
||||
interface Props {
|
||||
representation: any;
|
||||
isRulesEnabled: boolean;
|
||||
rulesMatched: any;
|
||||
color: string;
|
||||
elapsedTime: number;
|
||||
}
|
||||
|
||||
const EntryViewer: React.FC<Props> = ({ representation, isRulesEnabled, rulesMatched, elapsedTime, color }) => {
|
||||
const EntryViewer: React.FC<Props> = ({representation, color}) => {
|
||||
return <AutoRepresentation
|
||||
representation={representation}
|
||||
isRulesEnabled={isRulesEnabled}
|
||||
rulesMatched={rulesMatched}
|
||||
elapsedTime={elapsedTime}
|
||||
color={color}
|
||||
isDisplayReplay={true}
|
||||
/>
|
||||
|
||||
@@ -20,31 +20,6 @@
|
||||
.rowSelected
|
||||
border: 1px $blue-color solid
|
||||
|
||||
.ruleSuccessRow
|
||||
background: #E8FFF1
|
||||
|
||||
.ruleSuccessRowSelected
|
||||
border: 1px #6FCF97 solid
|
||||
border-left: 5px #6FCF97 solid
|
||||
|
||||
.ruleFailureRow
|
||||
background: #FFE9EF
|
||||
|
||||
.ruleFailureRowSelected
|
||||
border: 1px $failure-color solid
|
||||
border-left: 5px $failure-color solid
|
||||
|
||||
.ruleNumberText
|
||||
font-size: 12px
|
||||
font-weight: 600
|
||||
white-space: nowrap
|
||||
|
||||
.ruleNumberTextFailure
|
||||
color: #DB2156
|
||||
|
||||
.ruleNumberTextSuccess
|
||||
color: #219653
|
||||
|
||||
.resolvedName
|
||||
text-overflow: ellipsis
|
||||
white-space: nowrap
|
||||
|
||||
@@ -37,13 +37,6 @@ interface Entry {
|
||||
dst: TCPInterface,
|
||||
isOutgoing?: boolean;
|
||||
latency: number;
|
||||
rules: Rules;
|
||||
}
|
||||
|
||||
interface Rules {
|
||||
status: boolean;
|
||||
latency: number;
|
||||
numberOfRules: number;
|
||||
}
|
||||
|
||||
interface EntryProps {
|
||||
@@ -67,7 +60,6 @@ export const EntryItem: React.FC<EntryProps> = ({entry, style, headingMode, name
|
||||
const isSelected = focusedEntryId === entry.id;
|
||||
|
||||
const classification = getClassification(entry.status)
|
||||
const numberOfRules = entry.rules.numberOfRules
|
||||
let ingoingIcon;
|
||||
let outgoingIcon;
|
||||
switch(classification) {
|
||||
@@ -87,35 +79,6 @@ export const EntryItem: React.FC<EntryProps> = ({entry, style, headingMode, name
|
||||
break;
|
||||
}
|
||||
}
|
||||
let additionalRulesProperties = "";
|
||||
let ruleSuccess = true;
|
||||
let rule = 'latency' in entry.rules
|
||||
if (rule) {
|
||||
if (entry.rules.latency !== -1) {
|
||||
if (entry.rules.latency >= entry.latency || !('latency' in entry)) {
|
||||
additionalRulesProperties = styles.ruleSuccessRow
|
||||
ruleSuccess = true
|
||||
} else {
|
||||
additionalRulesProperties = styles.ruleFailureRow
|
||||
ruleSuccess = false
|
||||
}
|
||||
if (isSelected) {
|
||||
additionalRulesProperties += ` ${entry.rules.latency >= entry.latency ? styles.ruleSuccessRowSelected : styles.ruleFailureRowSelected}`
|
||||
}
|
||||
} else {
|
||||
if (entry.rules.status) {
|
||||
additionalRulesProperties = styles.ruleSuccessRow
|
||||
ruleSuccess = true
|
||||
} else {
|
||||
additionalRulesProperties = styles.ruleFailureRow
|
||||
ruleSuccess = false
|
||||
}
|
||||
if (isSelected) {
|
||||
additionalRulesProperties += ` ${entry.rules.status ? styles.ruleSuccessRowSelected : styles.ruleFailureRowSelected}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const isStatusCodeEnabled = ((entry.proto.name === "http" && "status" in entry) || entry.status !== 0);
|
||||
|
||||
@@ -123,7 +86,7 @@ export const EntryItem: React.FC<EntryProps> = ({entry, style, headingMode, name
|
||||
<div
|
||||
id={`entry-${entry.id}`}
|
||||
className={`${styles.row}
|
||||
${isSelected && !rule ? styles.rowSelected : additionalRulesProperties}`}
|
||||
${isSelected ? styles.rowSelected : ""}`}
|
||||
onClick={() => {
|
||||
if (!setFocusedEntryId) return;
|
||||
setFocusedEntryId(entry.id);
|
||||
@@ -187,13 +150,7 @@ export const EntryItem: React.FC<EntryProps> = ({entry, style, headingMode, name
|
||||
</Queryable>
|
||||
</div>
|
||||
</div>
|
||||
{
|
||||
rule ?
|
||||
<div className={`${styles.ruleNumberText} ${ruleSuccess ? styles.ruleNumberTextSuccess : styles.ruleNumberTextFailure} ${rule ? styles.separatorRight : ""}`}>
|
||||
{`Rules (${numberOfRules})`}
|
||||
</div>
|
||||
: ""
|
||||
}
|
||||
|
||||
<div className={styles.separatorRight}>
|
||||
{headingMode ? <Queryable
|
||||
query={`namespace == "${namespace}"`}
|
||||
|
||||
@@ -21,6 +21,7 @@ import { TOAST_CONTAINER_ID } from "../../configs/Consts";
|
||||
import leftOffTopAtom from "../../recoil/leftOffTop";
|
||||
import { DEFAULT_LEFTOFF, DEFAULT_FETCH, DEFAULT_FETCH_TIMEOUT_MS } from '../../hooks/useWS';
|
||||
import ReplayRequestModalContainer from "../modals/ReplayRequestModal/ReplayRequestModal";
|
||||
import replayRequestModalOpenAtom from "../../recoil/replayRequestModalOpen";
|
||||
|
||||
const useLayoutStyles = makeStyles(() => ({
|
||||
details: {
|
||||
@@ -70,6 +71,7 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
|
||||
const [wsReadyState, setWsReadyState] = useState(0);
|
||||
const setLeftOffTop = useSetRecoilState(leftOffTopAtom);
|
||||
const scrollableRef = useRef(null);
|
||||
const isOpenReplayModal = useRecoilValue(replayRequestModalOpenAtom)
|
||||
|
||||
|
||||
const ws = useRef(null);
|
||||
@@ -88,6 +90,10 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
|
||||
}
|
||||
}, [shouldCloseWebSocket, setShouldCloseWebSocket, closeWebSocket])
|
||||
|
||||
useEffect(() => {
|
||||
isOpenReplayModal && setShouldCloseWebSocket(true)
|
||||
}, [isOpenReplayModal, setShouldCloseWebSocket])
|
||||
|
||||
const sendQueryWhenWsOpen = useCallback((leftOff: string, query: string, fetch: number, fetchTimeoutMs: number) => {
|
||||
setTimeout(() => {
|
||||
if (ws?.current?.readyState === WebSocket.OPEN) {
|
||||
|
||||
@@ -14,9 +14,9 @@ import styles from './ReplayRequestModal.module.sass'
|
||||
import closeIcon from "assets/close.svg"
|
||||
import spinnerImg from "assets/spinner.svg"
|
||||
import refreshImg from "assets/refresh.svg"
|
||||
import { formatRequest } from "../../EntryDetailed/EntrySections/EntrySections";
|
||||
import { formatRequestWithOutError } from "../../EntryDetailed/EntrySections/EntrySections";
|
||||
import entryDataAtom from "../../../recoil/entryData";
|
||||
import { AutoRepresentation } from "../../EntryDetailed/EntryViewer/AutoRepresentation";
|
||||
import { AutoRepresentation, TabsEnum } from "../../EntryDetailed/EntryViewer/AutoRepresentation";
|
||||
import useDebounce from "../../../hooks/useDebounce"
|
||||
import replayRequestModalOpenAtom from "../../../recoil/replayRequestModalOpen";
|
||||
import { Utils } from "../../../helpers/Utils";
|
||||
@@ -175,7 +175,7 @@ const ReplayRequestModal: React.FC<ReplayRequestModalProps> = ({ isOpen, onClose
|
||||
</Fragment>
|
||||
break;
|
||||
case RequestTabs.Body:
|
||||
const formatedCode = formatRequest(postData || "", request?.postData?.mimeType)
|
||||
const formatedCode = formatRequestWithOutError(postData || "", request?.postData?.mimeType)
|
||||
innerComponent = <div className={styles.codeEditor}>
|
||||
<CodeEditor language={request?.postData?.mimeType.split("/")[1]}
|
||||
code={Utils.isJson(formatedCode) ? JSON.stringify(JSON.parse(formatedCode || "{}"), null, 2) : formatedCode}
|
||||
@@ -239,7 +239,7 @@ const ReplayRequestModal: React.FC<ReplayRequestModalProps> = ({ isOpen, onClose
|
||||
<span className={styles.sectionHeader}>RESPONSE</span>
|
||||
</AccordionSummary>
|
||||
<AccordionDetails>
|
||||
<AutoRepresentation representation={response} color={entryData.protocol.backgroundColor} />
|
||||
<AutoRepresentation representation={response} color={entryData.protocol.backgroundColor} openedTab={TabsEnum.Response} />
|
||||
</AccordionDetails>
|
||||
</Accordion>)}
|
||||
</div>
|
||||
|
||||
@@ -26,7 +26,7 @@ export const TimelineBarChart: React.FC<TimelineBarChartProps> = ({ timeLineBarC
|
||||
if (!data) return;
|
||||
const protocolsBarsData = [];
|
||||
const prtcNames = [];
|
||||
data.forEach(protocolObj => {
|
||||
data.sort((a, b) => a.timestamp < b.timestamp ? -1 : 1).forEach(protocolObj => {
|
||||
let newProtocolbj: { [k: string]: any } = {};
|
||||
newProtocolbj.timestamp = Utils.getHoursAndMinutes(protocolObj.timestamp);
|
||||
protocolObj.protocols.forEach(protocol => {
|
||||
@@ -36,7 +36,6 @@ export const TimelineBarChart: React.FC<TimelineBarChartProps> = ({ timeLineBarC
|
||||
protocolsBarsData.push(newProtocolbj);
|
||||
})
|
||||
const uniqueObjArray = Utils.creatUniqueObjArrayByProp(prtcNames, "name")
|
||||
protocolsBarsData.sort((a, b) => a.timestamp < b.timestamp ? -1 : 1);
|
||||
setProtocolStats(protocolsBarsData);
|
||||
setProtocolsNamesAndColors(uniqueObjArray);
|
||||
}, [data, timeLineBarChartMode])
|
||||
@@ -49,7 +48,7 @@ export const TimelineBarChart: React.FC<TimelineBarChartProps> = ({ timeLineBarC
|
||||
}
|
||||
const commandsNames = [];
|
||||
const protocolsCommands = [];
|
||||
data.forEach(protocolObj => {
|
||||
data.sort((a, b) => a.timestamp < b.timestamp ? -1 : 1).forEach(protocolObj => {
|
||||
let newCommandlbj: { [k: string]: any } = {};
|
||||
newCommandlbj.timestamp = Utils.getHoursAndMinutes(protocolObj.timestamp);
|
||||
protocolObj.protocols.find(protocol => protocol.name === selectedProtocol)?.methods.forEach(command => {
|
||||
@@ -59,21 +58,33 @@ export const TimelineBarChart: React.FC<TimelineBarChartProps> = ({ timeLineBarC
|
||||
})
|
||||
protocolsCommands.push(newCommandlbj);
|
||||
})
|
||||
protocolsCommands.sort((a, b) => a.timestamp < b.timestamp ? -1 : 1);
|
||||
setcommandNames(commandsNames);
|
||||
setCommandStats(protocolsCommands);
|
||||
}, [data, timeLineBarChartMode, selectedProtocol])
|
||||
|
||||
const bars = useMemo(() => (commandNames || protocolsNamesAndColors).map((entry) => {
|
||||
return <Bar key={entry.name || entry} dataKey={entry.name || entry} stackId="a" fill={entry.color || Utils.stringToColor(entry)} />
|
||||
return <Bar key={entry.name || entry} dataKey={entry.name || entry} stackId="a" fill={entry.color || Utils.stringToColor(entry)} barSize={30} />
|
||||
}), [protocolsNamesAndColors, commandNames])
|
||||
|
||||
const renderTick = (tickProps) => {
|
||||
const { x, y, payload } = tickProps;
|
||||
const { index, value } = payload;
|
||||
|
||||
if (index % 3 === 0) {
|
||||
return <text x={x} y={y + 10} textAnchor="end">{`${value}`}</text>;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
|
||||
return (
|
||||
<div className={styles.barChartContainer}>
|
||||
{protocolStats.length > 0 && <BarChart
|
||||
width={730}
|
||||
width={750}
|
||||
height={250}
|
||||
data={commandStats || protocolStats}
|
||||
barCategoryGap={0}
|
||||
barSize={30}
|
||||
margin={{
|
||||
top: 20,
|
||||
right: 30,
|
||||
@@ -81,7 +92,7 @@ export const TimelineBarChart: React.FC<TimelineBarChartProps> = ({ timeLineBarC
|
||||
bottom: 5
|
||||
}}
|
||||
>
|
||||
<XAxis dataKey="timestamp" />
|
||||
<XAxis dataKey="timestamp" tick={renderTick} tickLine={false} />
|
||||
<YAxis tickFormatter={(value) => timeLineBarChartMode === "VOLUME" ? Utils.humanFileSize(value) : value} />
|
||||
<Tooltip formatter={(value) => timeLineBarChartMode === "VOLUME" ? Utils.humanFileSize(value) : value + " Requests"} />
|
||||
{bars}
|
||||
|
||||
@@ -25,3 +25,4 @@
|
||||
border: none
|
||||
border-bottom: 1px black solid
|
||||
outline: none
|
||||
width: 100px
|
||||
|
||||
@@ -13,7 +13,7 @@ const modalStyle = {
|
||||
top: '6%',
|
||||
left: '50%',
|
||||
transform: 'translate(-50%, 0%)',
|
||||
width: '50vw',
|
||||
width: '60vw',
|
||||
height: '82vh',
|
||||
bgcolor: 'background.paper',
|
||||
borderRadius: '5px',
|
||||
@@ -30,32 +30,31 @@ export enum StatsMode {
|
||||
interface TrafficStatsModalProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
getPieStatsDataApi: () => Promise<any>
|
||||
getTimelineStatsDataApi: () => Promise<any>
|
||||
getTrafficStatsDataApi: () => Promise<any>
|
||||
}
|
||||
|
||||
export const PROTOCOLS = ["ALL PROTOCOLS","gRPC", "REDIS", "HTTP", "GQL", "AMQP", "KFAKA"];
|
||||
|
||||
export const PROTOCOLS = ["ALL", "gRPC", "REDIS", "HTTP", "GQL", "AMQP", "KAFKA"];
|
||||
export const ALL_PROTOCOLS = PROTOCOLS[0];
|
||||
|
||||
export const TrafficStatsModal: React.FC<TrafficStatsModalProps> = ({ isOpen, onClose, getPieStatsDataApi, getTimelineStatsDataApi }) => {
|
||||
export const TrafficStatsModal: React.FC<TrafficStatsModalProps> = ({ isOpen, onClose, getTrafficStatsDataApi }) => {
|
||||
|
||||
const modes = Object.keys(StatsMode).filter(x => !(parseInt(x) >= 0));
|
||||
const [statsMode, setStatsMode] = useState(modes[0]);
|
||||
const [selectedProtocol, setSelectedProtocol] = useState("ALL PROTOCOLS");
|
||||
const [selectedProtocol, setSelectedProtocol] = useState(ALL_PROTOCOLS);
|
||||
const [pieStatsData, setPieStatsData] = useState(null);
|
||||
const [timelineStatsData, setTimelineStatsData] = useState(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const commonClasses = useCommonStyles();
|
||||
|
||||
const getTrafficStats = useCallback(async () => {
|
||||
if (isOpen && getPieStatsDataApi) {
|
||||
if (isOpen && getTrafficStatsDataApi) {
|
||||
(async () => {
|
||||
try {
|
||||
setIsLoading(true);
|
||||
const pieData = await getPieStatsDataApi();
|
||||
setPieStatsData(pieData);
|
||||
const timelineData = await getTimelineStatsDataApi();
|
||||
setTimelineStatsData(timelineData);
|
||||
const statsData = await getTrafficStatsDataApi();
|
||||
setPieStatsData(statsData.pie);
|
||||
setTimelineStatsData(statsData.timeline);
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
} finally {
|
||||
@@ -63,7 +62,7 @@ export const TrafficStatsModal: React.FC<TrafficStatsModalProps> = ({ isOpen, on
|
||||
}
|
||||
})()
|
||||
}
|
||||
}, [isOpen, getPieStatsDataApi, getTimelineStatsDataApi, setPieStatsData, setTimelineStatsData])
|
||||
}, [isOpen, getTrafficStatsDataApi, setPieStatsData, setTimelineStatsData])
|
||||
|
||||
useEffect(() => {
|
||||
getTrafficStats();
|
||||
|
||||
@@ -36,7 +36,7 @@ const App = () => {
|
||||
openModal={oasModalOpen}
|
||||
handleCloseModal={() => setOasModalOpen(false)}
|
||||
/>}
|
||||
<TrafficStatsModal isOpen={trafficStatsModalOpen} onClose={() => setTrafficStatsModalOpen(false)} getPieStatsDataApi={api.getPieStats} getTimelineStatsDataApi={api.getTimelineStats}/>
|
||||
<TrafficStatsModal isOpen={trafficStatsModalOpen} onClose={() => setTrafficStatsModalOpen(false)} getTrafficStatsDataApi={api.getTrafficStats}/>
|
||||
</div>
|
||||
</ThemeProvider>
|
||||
</StyledEngineProvider>
|
||||
|
||||
@@ -116,13 +116,8 @@ export default class Api {
|
||||
});
|
||||
}
|
||||
|
||||
getPieStats = async () => {
|
||||
const response = await client.get("/status/accumulative");
|
||||
return response.data;
|
||||
}
|
||||
|
||||
getTimelineStats = async () => {
|
||||
const response = await client.get("/status/accumulativeTiming");
|
||||
getTrafficStats = async () => {
|
||||
const response = await client.get("/status/trafficStats");
|
||||
return response.data;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user