Compare commits

...

24 Commits

Author SHA1 Message Date
gadotroee
5525214d0a Fix memory of acceptanceTests minikube cluster (#1209) 2022-07-19 12:55:44 +03:00
RoyIsland
efd414a2ed Removed telemetry (#1208) 2022-07-19 12:29:48 +03:00
AmitUp9
b3e79ff244 Grooming Traffic Stats Modal - change font and time picker position (#1206)
* font change and time picker position update

* add font-family to variables scss
2022-07-17 17:19:44 +03:00
AmitUp9
d4b9fea5a7 fix elastic time picker ui css (#1204) 2022-07-14 11:11:05 +03:00
leon-up9
d11770681b full height (#1202)
Co-authored-by: Leon <>
2022-07-13 18:37:22 +03:00
gadotroee
e9719cba3a Add time range to stats (#1199) 2022-07-13 17:21:18 +03:00
leon-up9
15f7b889e2 height change (#1201)
Co-authored-by: Leon <>
2022-07-13 13:37:08 +03:00
RoyUP9
d98ac0e8f7 Removed redundant IgnoredUserAgents field (#1198) 2022-07-12 20:41:42 +03:00
gadotroee
a3c236ff0a Fix colors map initialization (#1200) 2022-07-12 20:05:21 +03:00
gadotroee
4b280ecd6d Hide Response tab if there is no response (#1197) 2022-07-12 18:38:39 +03:00
leon-up9
de554f5fb6 ui/ include scss files in common (#1195)
* include scss files

* exported color

Co-authored-by: Leon <>
2022-07-12 11:50:24 +03:00
RoyUP9
7c159fffc0 Added redact using insertion filter (#1196) 2022-07-12 10:19:24 +03:00
M. Mert Yıldıran
1f2f63d11b Implement AMQP request-response matcher (#1091)
* Implement the basis of AMQP request-response matching

* Fix `package.json`

* Add `ExchangeDeclareOk`

* Add `ConnectionCloseOk`

* Add `BasicConsumeOk`

* Add `QueueBindOk`

* Add `representEmptyResponse` and fix `BasicPublish` and `BasicDeliver`

* Fix ident and matcher, add `connectionOpen`, `channelOpen`, `connectionTune`, `basicCancel`

* Fix linter

* Fix the unit tests

* #run_acceptance_tests

* #run_acceptance_tests

* Fix the tests #run_acceptance_tests

* Log don't panic

* Don't skip AMQP acceptance tests #run_acceptance_tests

* Revert "Don't skip AMQP acceptance tests #run_acceptance_tests"

This reverts commit c60e9cf747.

* Remove `Details` section from `representEmpty`

* Add `This request or response has no data.` text
2022-07-11 17:33:25 +03:00
RoyUP9
e2544aea12 Remove duplication of Headers, Cookies and QueryString (#1192) 2022-07-11 13:16:22 +03:00
Nimrod Gilboa Markevich
57e60073f5 Generate bpf files before running tests (#1194) 2022-07-11 12:31:45 +03:00
Nimrod Gilboa Markevich
f220ad2f1a Delete ebpf object files (#1190)
Do not track object files in git.
Generate the files with `make bpf` or during `make agent`.
2022-07-11 12:08:20 +03:00
RoyUP9
5875ba0eb3 Fixed panic in socket cleanup (#1193) 2022-07-11 11:18:58 +03:00
leon-up9
9aaf3f1423 Ui/Download request replay (#1188)
* added icon

* download & upload

* button changes

* clean up

* changes

* pkj json

* img

* removed codeEditor options

* changes

Co-authored-by: Leon <>
2022-07-10 16:48:18 +03:00
Nimrod Gilboa Markevich
a2463b739a Improve tls info for openssl with kprobes (#1177)
Instead of going through the socket fd, addresses are obtained in kprobe/tcp_sendmsg on ssl write and kprobe/tcp_recvmsg on ssl read. The tcp kprobes and the openssl uprobes communicate through the id->sslInfo bpf map.
2022-07-07 19:11:54 +03:00
AmitUp9
c010d336bb add date to timeline ticks (#1191) 2022-07-07 14:09:41 +03:00
RoyUP9
710411e112 Replaced ProtocolId with Protocol Summary (#1189) 2022-07-07 12:05:59 +03:00
AmitUp9
274fbeb34a warning cleaning from console (#1187)
* warning cleaning from console

* code cleaning
2022-07-06 13:13:04 +03:00
leon-up9
38c05a6634 UI/feature flag for replay modal (#1186)
* context added

* import added

* chnages

* ui enabled

* moved to Consts

* changes to recoil

* change

* new useEffect

Co-authored-by: Leon <>
2022-07-06 11:19:46 +03:00
leon-up9
d857935889 move icon to right side (#1185)
Co-authored-by: Leon <>
2022-07-05 16:35:03 +03:00
121 changed files with 62004 additions and 11468 deletions

View File

@@ -32,6 +32,10 @@ jobs:
id: agent_modified_files
run: devops/check_modified_files.sh agent/
- name: Generate eBPF object files and go bindings
id: generate_ebpf
run: make bpf
- name: Go lint - agent
uses: golangci/golangci-lint-action@v2
if: steps.agent_modified_files.outputs.matched == 'true'

View File

@@ -40,6 +40,10 @@ jobs:
run: |
./devops/install-capstone.sh
- name: Generate eBPF object files and go bindings
id: generate_ebpf
run: make bpf
- name: Check CLI modified files
id: cli_modified_files
run: devops/check_modified_files.sh cli/

3
.gitignore vendored
View File

@@ -56,3 +56,6 @@ tap/extensions/*/expect
# Ignore *.log files
*.log
# Object files
*.o

View File

@@ -8,7 +8,7 @@ SHELL=/bin/bash
# HELP
# This will output the help for each task
# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
.PHONY: help ui agent agent-debug cli tap docker
.PHONY: help ui agent agent-debug cli tap docker bpf clean-bpf
help: ## This help.
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
@@ -20,6 +20,13 @@ TS_SUFFIX="$(shell date '+%s')"
GIT_BRANCH="$(shell git branch | grep \* | cut -d ' ' -f2 | tr '[:upper:]' '[:lower:]' | tr '/' '_')"
BUCKET_PATH=static.up9.io/mizu/$(GIT_BRANCH)
export VER?=0.0
ARCH=$(shell uname -m)
ifeq ($(ARCH),$(filter $(ARCH),aarch64 arm64))
BPF_O_ARCH_LABEL=arm64
else
BPF_O_ARCH_LABEL=x86
endif
BPF_O_FILES = tap/tlstapper/tlstapper46_bpfel_$(BPF_O_ARCH_LABEL).o tap/tlstapper/tlstapper_bpfel_$(BPF_O_ARCH_LABEL).o
ui: ## Build UI.
@(cd ui; npm i ; npm run build; )
@@ -31,11 +38,17 @@ cli: ## Build CLI.
cli-debug: ## Build CLI.
@echo "building cli"; cd cli && $(MAKE) build-debug
agent: ## Build agent.
agent: bpf ## Build agent.
@(echo "building mizu agent .." )
@(cd agent; go build -o build/mizuagent main.go)
@ls -l agent/build
bpf: $(BPF_O_FILES)
$(BPF_O_FILES): $(wildcard tap/tlstapper/bpf/**/*.[ch])
@(echo "building tlstapper bpf")
@(./tap/tlstapper/bpf-builder/build.sh)
agent-debug: ## Build agent for debug.
@(echo "building mizu agent for debug.." )
@(cd agent; go build -gcflags="all=-N -l" -o build/mizuagent main.go)
@@ -76,6 +89,9 @@ clean-cli: ## Clean CLI.
clean-docker: ## Run clean docker
@(echo "DOCKER cleanup - NOT IMPLEMENTED YET " )
clean-bpf:
@(rm $(BPF_O_FILES) ; echo "bpf cleanup done" )
test-lint: ## Run lint on all modules
cd agent && golangci-lint run
cd shared && golangci-lint run

View File

@@ -11,7 +11,6 @@ module.exports = defineConfig({
testUrl: 'http://localhost:8899/',
redactHeaderContent: 'User-Header[REDACTED]',
redactBodyContent: '{ "User": "[REDACTED]" }',
regexMaskingBodyContent: '[REDACTED]',
greenFilterColor: 'rgb(210, 250, 210)',
redFilterColor: 'rgb(250, 214, 220)',
bodyJsonClass: '.hljs',

View File

@@ -1,7 +0,0 @@
import {isValueExistsInElement} from "../testHelpers/TrafficHelper";
it('Loading Mizu', function () {
cy.visit(Cypress.env('testUrl'));
});
isValueExistsInElement(true, Cypress.env('regexMaskingBodyContent'), Cypress.env('bodyJsonClass'));

View File

@@ -18,7 +18,6 @@ require (
github.com/fsnotify/fsnotify v1.5.1 // indirect
github.com/go-logr/logr v1.2.2 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/go-cmp v0.5.7 // indirect
github.com/google/gofuzz v1.2.0 // indirect
@@ -29,7 +28,6 @@ require (
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/up9inc/mizu/logger v0.0.0 // indirect
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect
golang.org/x/sys v0.0.0-20220207234003-57398862261d // indirect

View File

@@ -206,7 +206,6 @@ github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zV
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
github.com/golang-jwt/jwt/v4 v4.2.0 h1:besgBTC8w8HjP6NzQdxwKH9Z5oQMZ24ThTrHp3cZ8eU=
github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=

View File

@@ -27,7 +27,7 @@ else
fi
echo "Starting minikube..."
minikube start --cpus 2 --memory 6946
minikube start --cpus 2 --memory 6000
echo "Creating mizu tests namespaces"
kubectl create namespace mizu-tests --dry-run=client -o yaml | kubectl apply -f -

View File

@@ -2,10 +2,8 @@ package acceptanceTests
import (
"archive/zip"
"bytes"
"fmt"
"io/ioutil"
"net/http"
"os/exec"
"path"
"strings"
@@ -343,7 +341,7 @@ func TestTapRedact(t *testing.T) {
tapNamespace := GetDefaultTapNamespace()
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
tapCmdArgs = append(tapCmdArgs, "--redact")
tapCmdArgs = append(tapCmdArgs, "--redact", "--set", "tap.redact-patterns.request-headers=User-Header", "--set", "tap.redact-patterns.request-body=User")
tapCmd := exec.Command(cliPath, tapCmdArgs...)
t.Logf("running command: %v", tapCmd.String())
@@ -429,60 +427,6 @@ func TestTapNoRedact(t *testing.T) {
RunCypressTests(t, "npx cypress run --spec \"cypress/e2e/tests/NoRedact.js\"")
}
func TestTapRegexMasking(t *testing.T) {
if testing.Short() {
t.Skip("ignored acceptance test")
}
cliPath, cliPathErr := GetCliPath()
if cliPathErr != nil {
t.Errorf("failed to get cli path, err: %v", cliPathErr)
return
}
tapCmdArgs := GetDefaultTapCommandArgs()
tapNamespace := GetDefaultTapNamespace()
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
tapCmdArgs = append(tapCmdArgs, "--redact")
tapCmdArgs = append(tapCmdArgs, "-r", "Mizu")
tapCmd := exec.Command(cliPath, tapCmdArgs...)
t.Logf("running command: %v", tapCmd.String())
t.Cleanup(func() {
if err := CleanupCommand(tapCmd); err != nil {
t.Logf("failed to cleanup tap command, err: %v", err)
}
})
if err := tapCmd.Start(); err != nil {
t.Errorf("failed to start tap command, err: %v", err)
return
}
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
if err := WaitTapPodsReady(apiServerUrl); err != nil {
t.Errorf("failed to start tap pods on time, err: %v", err)
return
}
proxyUrl := GetProxyUrl(DefaultNamespaceName, DefaultServiceName)
for i := 0; i < DefaultEntriesCount; i++ {
response, requestErr := http.Post(fmt.Sprintf("%v/post", proxyUrl), "text/plain", bytes.NewBufferString("Mizu"))
if _, requestErr = ExecuteHttpRequest(response, requestErr); requestErr != nil {
t.Errorf("failed to send proxy request, err: %v", requestErr)
return
}
}
RunCypressTests(t, "npx cypress run --spec \"cypress/e2e/tests/RegexMasking.js\"")
}
func TestTapIgnoredUserAgents(t *testing.T) {
if testing.Short() {
t.Skip("ignored acceptance test")

View File

@@ -215,12 +215,11 @@ func DeleteKubeFile(kubeContext string, namespace string, filename string) error
func getDefaultCommandArgs() []string {
agentImageValue := os.Getenv("MIZU_CI_IMAGE")
setFlag := "--set"
telemetry := "telemetry=false"
agentImage := fmt.Sprintf("agent-image=%s", agentImageValue)
imagePullPolicy := "image-pull-policy=IfNotPresent"
headless := "headless=true"
return []string{setFlag, telemetry, setFlag, agentImage, setFlag, imagePullPolicy, setFlag, headless}
return []string{setFlag, agentImage, setFlag, imagePullPolicy, setFlag, headless}
}
func GetDefaultTapCommandArgs() []string {

View File

@@ -48,7 +48,6 @@ require (
github.com/Masterminds/semver v1.5.0 // indirect
github.com/PuerkitoBio/purell v1.1.1 // indirect
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
github.com/beevik/etree v1.1.0 // indirect
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5 // indirect
github.com/chanced/dynamic v0.0.0-20211210164248-f8fadb1d735b // indirect
github.com/cilium/ebpf v0.9.0 // indirect

View File

@@ -101,7 +101,6 @@ github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM=
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=

View File

@@ -22,7 +22,7 @@ func (e *DefaultEntryStreamerSocketConnector) SendEntry(socketId int, entry *tap
if params.EnableFullEntries {
message, _ = models.CreateFullEntryWebSocketMessage(entry)
} else {
protocol, ok := protocolsMap[entry.ProtocolId]
protocol, ok := protocolsMap[entry.Protocol.ToString()]
if !ok {
return fmt.Errorf("protocol not found, protocol: %v", protocol)
}

View File

@@ -126,7 +126,7 @@ func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extension
serviceMapGenerator.NewTCPEntry(mizuEntry.Source, mizuEntry.Destination, &item.Protocol)
oasGenerator := dependency.GetInstance(dependency.OasGeneratorDependency).(oas.OasGeneratorSink)
oasGenerator.HandleEntry(mizuEntry, &item.Protocol)
oasGenerator.HandleEntry(mizuEntry)
}
}

View File

@@ -97,7 +97,9 @@ func websocketHandler(c *gin.Context, eventHandlers EventHandlers, isTapper bool
websocketIdsLock.Unlock()
defer func() {
socketCleanup(socketId, connectedWebsockets[socketId])
if socketConnection := connectedWebsockets[socketId]; socketConnection != nil {
socketCleanup(socketId, socketConnection)
}
}()
eventHandlers.WebSocketConnect(c, socketId, isTapper)

View File

@@ -36,11 +36,13 @@ var (
)
var ProtocolHttp = &tapApi.Protocol{
Name: "http",
ProtocolSummary: tapApi.ProtocolSummary{
Name: "http",
Version: "1.1",
Abbreviation: "HTTP",
},
LongName: "Hypertext Transfer Protocol -- HTTP/1.1",
Abbreviation: "HTTP",
Macro: "http",
Version: "1.1",
BackgroundColor: "#205cf5",
ForegroundColor: "#ffffff",
FontSize: 12,

View File

@@ -1,7 +1,10 @@
package controllers
import (
"fmt"
"net/http"
"strconv"
"time"
core "k8s.io/api/core/v1"
@@ -80,7 +83,24 @@ func GetGeneralStats(c *gin.Context) {
}
func GetTrafficStats(c *gin.Context) {
c.JSON(http.StatusOK, providers.GetTrafficStats())
startTime, endTime, err := getStartEndTime(c)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, providers.GetTrafficStats(startTime, endTime))
}
func getStartEndTime(c *gin.Context) (time.Time, time.Time, error) {
startTimeValue, err := strconv.Atoi(c.Query("startTimeMs"))
if err != nil {
return time.UnixMilli(0), time.UnixMilli(0), fmt.Errorf("invalid start time: %v", err)
}
endTimeValue, err := strconv.Atoi(c.Query("endTimeMs"))
if err != nil {
return time.UnixMilli(0), time.UnixMilli(0), fmt.Errorf("invalid end time: %v", err)
}
return time.UnixMilli(int64(startTimeValue)), time.UnixMilli(int64(endTimeValue)), nil
}
func GetCurrentResolvingInformation(c *gin.Context) {

View File

@@ -38,7 +38,7 @@ func (e *BasenineEntriesProvider) GetEntries(entriesRequest *models.EntriesReque
return nil, nil, err
}
protocol, ok := app.ProtocolsMap[entry.ProtocolId]
protocol, ok := app.ProtocolsMap[entry.Protocol.ToString()]
if !ok {
return nil, nil, fmt.Errorf("protocol not found, protocol: %v", protocol)
}
@@ -77,7 +77,7 @@ func (e *BasenineEntriesProvider) GetEntry(singleEntryRequest *models.SingleEntr
return nil, errors.New(string(bytes))
}
protocol, ok := app.ProtocolsMap[entry.ProtocolId]
protocol, ok := app.ProtocolsMap[entry.Protocol.ToString()]
if !ok {
return nil, fmt.Errorf("protocol not found, protocol: %v", protocol)
}

View File

@@ -11,75 +11,30 @@ import (
"github.com/up9inc/mizu/logger"
)
// Keep it because we might want cookies in the future
//func BuildCookies(rawCookies []interface{}) []har.Cookie {
// cookies := make([]har.Cookie, 0, len(rawCookies))
//
// for _, cookie := range rawCookies {
// c := cookie.(map[string]interface{})
// expiresStr := ""
// if c["expires"] != nil {
// expiresStr = c["expires"].(string)
// }
// expires, _ := time.Parse(time.RFC3339, expiresStr)
// httpOnly := false
// if c["httponly"] != nil {
// httpOnly, _ = strconv.ParseBool(c["httponly"].(string))
// }
// secure := false
// if c["secure"] != nil {
// secure, _ = strconv.ParseBool(c["secure"].(string))
// }
// path := ""
// if c["path"] != nil {
// path = c["path"].(string)
// }
// domain := ""
// if c["domain"] != nil {
// domain = c["domain"].(string)
// }
//
// cookies = append(cookies, har.Cookie{
// Name: c["name"].(string),
// Value: c["value"].(string),
// Path: path,
// Domain: domain,
// HTTPOnly: httpOnly,
// Secure: secure,
// Expires: expires,
// Expires8601: expiresStr,
// })
// }
//
// return cookies
//}
func BuildHeaders(rawHeaders []interface{}) ([]Header, string, string, string, string, string) {
func BuildHeaders(rawHeaders map[string]interface{}) ([]Header, string, string, string, string, string) {
var host, scheme, authority, path, status string
headers := make([]Header, 0, len(rawHeaders))
for _, header := range rawHeaders {
h := header.(map[string]interface{})
for key, value := range rawHeaders {
headers = append(headers, Header{
Name: h["name"].(string),
Value: h["value"].(string),
Name: key,
Value: value.(string),
})
if h["name"] == "Host" {
host = h["value"].(string)
if key == "Host" {
host = value.(string)
}
if h["name"] == ":authority" {
authority = h["value"].(string)
if key == ":authority" {
authority = value.(string)
}
if h["name"] == ":scheme" {
scheme = h["value"].(string)
if key == ":scheme" {
scheme = value.(string)
}
if h["name"] == ":path" {
path = h["value"].(string)
if key == ":path" {
path = value.(string)
}
if h["name"] == ":status" {
status = h["value"].(string)
if key == ":status" {
status = value.(string)
}
}
@@ -119,8 +74,8 @@ func BuildPostParams(rawParams []interface{}) []Param {
}
func NewRequest(request map[string]interface{}) (harRequest *Request, err error) {
headers, host, scheme, authority, path, _ := BuildHeaders(request["_headers"].([]interface{}))
cookies := make([]Cookie, 0) // BuildCookies(request["_cookies"].([]interface{}))
headers, host, scheme, authority, path, _ := BuildHeaders(request["headers"].(map[string]interface{}))
cookies := make([]Cookie, 0)
postData, _ := request["postData"].(map[string]interface{})
mimeType := postData["mimeType"]
@@ -134,12 +89,20 @@ func NewRequest(request map[string]interface{}) (harRequest *Request, err error)
}
queryString := make([]QueryString, 0)
for _, _qs := range request["_queryString"].([]interface{}) {
qs := _qs.(map[string]interface{})
queryString = append(queryString, QueryString{
Name: qs["name"].(string),
Value: qs["value"].(string),
})
for key, value := range request["queryString"].(map[string]interface{}) {
if valuesInterface, ok := value.([]interface{}); ok {
for _, valueInterface := range valuesInterface {
queryString = append(queryString, QueryString{
Name: key,
Value: valueInterface.(string),
})
}
} else {
queryString = append(queryString, QueryString{
Name: key,
Value: value.(string),
})
}
}
url := fmt.Sprintf("http://%s%s", host, request["url"].(string))
@@ -172,8 +135,8 @@ func NewRequest(request map[string]interface{}) (harRequest *Request, err error)
}
func NewResponse(response map[string]interface{}) (harResponse *Response, err error) {
headers, _, _, _, _, _status := BuildHeaders(response["_headers"].([]interface{}))
cookies := make([]Cookie, 0) // BuildCookies(response["_cookies"].([]interface{}))
headers, _, _, _, _, _status := BuildHeaders(response["headers"].(map[string]interface{}))
cookies := make([]Cookie, 0)
content, _ := response["content"].(map[string]interface{})
mimeType := content["mimeType"]

View File

@@ -16,7 +16,7 @@ var (
)
type OasGeneratorSink interface {
HandleEntry(mizuEntry *api.Entry, protocol *api.Protocol)
HandleEntry(mizuEntry *api.Entry)
}
type OasGenerator interface {
@@ -58,12 +58,12 @@ func (g *defaultOasGenerator) IsStarted() bool {
return g.started
}
func (g *defaultOasGenerator) HandleEntry(mizuEntry *api.Entry, protocol *api.Protocol) {
func (g *defaultOasGenerator) HandleEntry(mizuEntry *api.Entry) {
if !g.started {
return
}
if protocol.Name == "http" {
if mizuEntry.Protocol.Name == "http" {
dest := mizuEntry.Destination.Name
if dest == "" {
logger.Log.Debugf("OAS: Unresolved entry %d", mizuEntry.Id)
@@ -85,7 +85,7 @@ func (g *defaultOasGenerator) HandleEntry(mizuEntry *api.Entry, protocol *api.Pr
g.handleHARWithSource(entryWSource)
} else {
logger.Log.Debugf("OAS: Unsupported protocol in entry %d: %s", mizuEntry.Id, protocol.Name)
logger.Log.Debugf("OAS: Unsupported protocol in entry %d: %s", mizuEntry.Id, mizuEntry.Protocol.Name)
}
}

View File

@@ -5,7 +5,6 @@ import (
"encoding/hex"
"fmt"
"reflect"
"strings"
"sync"
"time"
@@ -82,13 +81,12 @@ func GetGeneralStats() *GeneralStats {
func InitProtocolToColor(protocolMap map[string]*api.Protocol) {
for item, value := range protocolMap {
splitted := strings.SplitN(item, "/", 3)
protocolToColor[splitted[len(splitted)-1]] = value.BackgroundColor
protocolToColor[api.GetProtocolSummary(item).Abbreviation] = value.BackgroundColor
}
}
func GetTrafficStats() *TrafficStatsResponse {
bucketsStatsCopy := getBucketStatsCopy()
func GetTrafficStats(startTime time.Time, endTime time.Time) *TrafficStatsResponse {
bucketsStatsCopy := getFilteredBucketStatsCopy(startTime, endTime)
return &TrafficStatsResponse{
Protocols: getAvailableProtocols(bucketsStatsCopy),
@@ -264,7 +262,7 @@ func convertAccumulativeStatsDictToArray(methodsPerProtocolAggregated map[string
return protocolsData
}
func getBucketStatsCopy() BucketStats {
func getFilteredBucketStatsCopy(startTime time.Time, endTime time.Time) BucketStats {
bucketStatsCopy := BucketStats{}
bucketStatsLocker.Lock()
if err := copier.Copy(&bucketStatsCopy, bucketsStats); err != nil {
@@ -272,7 +270,18 @@ func getBucketStatsCopy() BucketStats {
return nil
}
bucketStatsLocker.Unlock()
return bucketStatsCopy
filteredBucketStatsCopy := BucketStats{}
interval := InternalBucketThreshold
for _, bucket := range bucketStatsCopy {
if (bucket.BucketTime.After(startTime.Add(-1*interval/2).Round(interval)) && bucket.BucketTime.Before(endTime.Add(-1*interval/2).Round(interval))) ||
bucket.BucketTime.Equal(startTime.Add(-1*interval/2).Round(interval)) ||
bucket.BucketTime.Equal(endTime.Add(-1*interval/2).Round(interval)) {
filteredBucketStatsCopy = append(filteredBucketStatsCopy, bucket)
}
}
return filteredBucketStatsCopy
}
func getAggregatedResultTiming(stats BucketStats, interval time.Duration) map[time.Time]map[string]map[string]*AccumulativeStatsCounter {

View File

@@ -26,7 +26,7 @@ func TestEntryAddedCount(t *testing.T) {
entryBucketKey := time.Date(2021, 1, 1, 10, 0, 0, 0, time.UTC)
valueLessThanBucketThreshold := time.Second * 130
mockSummery := &api.BaseEntry{Protocol: api.Protocol{Name: "mock"}, Method: "mock-method", Timestamp: entryBucketKey.Add(valueLessThanBucketThreshold).UnixNano()}
mockSummery := &api.BaseEntry{Protocol: api.Protocol{ProtocolSummary: api.ProtocolSummary{Name: "mock"}}, Method: "mock-method", Timestamp: entryBucketKey.Add(valueLessThanBucketThreshold).UnixNano()}
for _, entriesCount := range tests {
t.Run(fmt.Sprintf("%d", entriesCount), func(t *testing.T) {
for i := 0; i < entriesCount; i++ {
@@ -61,7 +61,7 @@ func TestEntryAddedVolume(t *testing.T) {
var expectedEntriesCount int
var expectedVolumeInGB float64
mockSummery := &api.BaseEntry{Protocol: api.Protocol{Name: "mock"}, Method: "mock-method", Timestamp: time.Date(2021, 1, 1, 10, 0, 0, 0, time.UTC).UnixNano()}
mockSummery := &api.BaseEntry{Protocol: api.Protocol{ProtocolSummary: api.ProtocolSummary{Name: "mock"}}, Method: "mock-method", Timestamp: time.Date(2021, 1, 1, 10, 0, 0, 0, time.UTC).UnixNano()}
for _, data := range tests {
t.Run(fmt.Sprintf("%d", len(data)), func(t *testing.T) {

View File

@@ -50,11 +50,13 @@ var (
IP: fmt.Sprintf("%s.%s", Ip, UnresolvedNodeName),
}
ProtocolHttp = &tapApi.Protocol{
Name: "http",
ProtocolSummary: tapApi.ProtocolSummary{
Name: "http",
Version: "1.1",
Abbreviation: "HTTP",
},
LongName: "Hypertext Transfer Protocol -- HTTP/1.1",
Abbreviation: "HTTP",
Macro: "http",
Version: "1.1",
BackgroundColor: "#205cf5",
ForegroundColor: "#ffffff",
FontSize: 12,
@@ -63,11 +65,13 @@ var (
Priority: 0,
}
ProtocolRedis = &tapApi.Protocol{
Name: "redis",
ProtocolSummary: tapApi.ProtocolSummary{
Name: "redis",
Version: "3.x",
Abbreviation: "REDIS",
},
LongName: "Redis Serialization Protocol",
Abbreviation: "REDIS",
Macro: "redis",
Version: "3.x",
BackgroundColor: "#a41e11",
ForegroundColor: "#ffffff",
FontSize: 11,

View File

@@ -4,9 +4,7 @@ import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"time"
"github.com/up9inc/mizu/cli/utils"
@@ -93,45 +91,3 @@ func (provider *Provider) ReportTappedPods(pods []core.Pod) error {
}
}
}
func (provider *Provider) GetGeneralStats() (map[string]interface{}, error) {
generalStatsUrl := fmt.Sprintf("%s/status/general", provider.url)
response, requestErr := utils.Get(generalStatsUrl, provider.client)
if requestErr != nil {
return nil, fmt.Errorf("failed to get general stats for telemetry, err: %w", requestErr)
}
defer response.Body.Close()
data, readErr := ioutil.ReadAll(response.Body)
if readErr != nil {
return nil, fmt.Errorf("failed to read general stats for telemetry, err: %v", readErr)
}
var generalStats map[string]interface{}
if parseErr := json.Unmarshal(data, &generalStats); parseErr != nil {
return nil, fmt.Errorf("failed to parse general stats for telemetry, err: %v", parseErr)
}
return generalStats, nil
}
func (provider *Provider) GetVersion() (string, error) {
versionUrl, _ := url.Parse(fmt.Sprintf("%s/metadata/version", provider.url))
req := &http.Request{
Method: http.MethodGet,
URL: versionUrl,
}
statusResp, err := utils.Do(req, provider.client)
if err != nil {
return "", err
}
defer statusResp.Body.Close()
versionResponse := &shared.VersionResponse{}
if err := json.NewDecoder(statusResp.Body).Decode(&versionResponse); err != nil {
return "", err
}
return versionResponse.Ver, nil
}

View File

@@ -4,7 +4,6 @@ import (
"github.com/creasty/defaults"
"github.com/spf13/cobra"
"github.com/up9inc/mizu/cli/config/configStructs"
"github.com/up9inc/mizu/cli/telemetry"
"github.com/up9inc/mizu/logger"
)
@@ -12,7 +11,6 @@ var checkCmd = &cobra.Command{
Use: "check",
Short: "Check the Mizu installation for potential problems",
RunE: func(cmd *cobra.Command, args []string) error {
go telemetry.ReportRun("check", nil)
runMizuCheck()
return nil
},

View File

@@ -2,14 +2,12 @@ package cmd
import (
"github.com/spf13/cobra"
"github.com/up9inc/mizu/cli/telemetry"
)
var cleanCmd = &cobra.Command{
Use: "clean",
Short: "Removes all mizu resources",
RunE: func(cmd *cobra.Command, args []string) error {
go telemetry.ReportRun("clean", nil)
performCleanCommand()
return nil
},

View File

@@ -7,7 +7,6 @@ import (
"github.com/spf13/cobra"
"github.com/up9inc/mizu/cli/config"
"github.com/up9inc/mizu/cli/config/configStructs"
"github.com/up9inc/mizu/cli/telemetry"
"github.com/up9inc/mizu/cli/uiUtils"
"github.com/up9inc/mizu/logger"
)
@@ -16,8 +15,6 @@ var configCmd = &cobra.Command{
Use: "config",
Short: "Generate config with default values",
RunE: func(cmd *cobra.Command, args []string) error {
go telemetry.ReportRun("config", config.Config.Config)
configWithDefaults, err := config.GetConfigWithDefaults()
if err != nil {
logger.Log.Errorf("Failed generating config with defaults, err: %v", err)

View File

@@ -4,7 +4,6 @@ import (
"github.com/creasty/defaults"
"github.com/spf13/cobra"
"github.com/up9inc/mizu/cli/config/configStructs"
"github.com/up9inc/mizu/cli/telemetry"
"github.com/up9inc/mizu/logger"
)
@@ -12,7 +11,6 @@ var installCmd = &cobra.Command{
Use: "install",
Short: "Installs mizu components",
RunE: func(cmd *cobra.Command, args []string) error {
go telemetry.ReportRun("install", nil)
runMizuInstall()
return nil
},

View File

@@ -9,7 +9,6 @@ import (
"github.com/up9inc/mizu/cli/config/configStructs"
"github.com/up9inc/mizu/cli/errormessage"
"github.com/up9inc/mizu/cli/mizu/fsUtils"
"github.com/up9inc/mizu/cli/telemetry"
"github.com/up9inc/mizu/logger"
)
@@ -17,8 +16,6 @@ var logsCmd = &cobra.Command{
Use: "logs",
Short: "Create a zip file with logs for Github issue or troubleshoot",
RunE: func(cmd *cobra.Command, args []string) error {
go telemetry.ReportRun("logs", config.Config.Logs)
kubernetesProvider, err := getKubernetesProviderForCli()
if err != nil {
return nil

View File

@@ -48,7 +48,6 @@ func init() {
tapCmd.Flags().Uint16P(configStructs.GuiPortTapName, "p", defaultTapConfig.GuiPort, "Provide a custom port for the web interface webserver")
tapCmd.Flags().StringSliceP(configStructs.NamespacesTapName, "n", defaultTapConfig.Namespaces, "Namespaces selector")
tapCmd.Flags().BoolP(configStructs.AllNamespacesTapName, "A", defaultTapConfig.AllNamespaces, "Tap all namespaces")
tapCmd.Flags().StringSliceP(configStructs.PlainTextFilterRegexesTapName, "r", defaultTapConfig.PlainTextFilterRegexes, "List of regex expressions that are used to filter matching values from text/plain http bodies")
tapCmd.Flags().Bool(configStructs.EnableRedactionTapName, defaultTapConfig.EnableRedaction, "Enables redaction of potentially sensitive request/response headers and body values")
tapCmd.Flags().String(configStructs.HumanMaxEntriesDBSizeTapName, defaultTapConfig.HumanMaxEntriesDBSize, "Override the default max entries db size")
tapCmd.Flags().String(configStructs.InsertionFilterName, defaultTapConfig.InsertionFilter, "Set the insertion filter. Accepts string or a file path.")

View File

@@ -9,7 +9,6 @@ import (
"time"
"github.com/up9inc/mizu/cli/resources"
"github.com/up9inc/mizu/cli/telemetry"
"github.com/up9inc/mizu/cli/utils"
core "k8s.io/api/core/v1"
@@ -109,8 +108,6 @@ func RunMizuTap() {
}
func finishTapExecution(kubernetesProvider *kubernetes.Provider) {
telemetry.ReportTapTelemetry(apiProvider, config.Config.Tap, state.startTime)
finishMizuExecution(kubernetesProvider, config.Config.IsNsRestrictedMode(), config.Config.MizuResourcesNamespace)
}
@@ -126,7 +123,6 @@ func getTapMizuAgentConfig() *shared.MizuAgentConfig {
AgentDatabasePath: shared.DataDirPath,
ServiceMap: config.Config.ServiceMap,
OAS: config.Config.OAS,
Telemetry: config.Config.Telemetry,
}
return &mizuAgentConfig
@@ -151,17 +147,18 @@ func printTappedPodsPreview(ctx context.Context, kubernetesProvider *kubernetes.
}
}
func startTapperSyncer(ctx context.Context, cancel context.CancelFunc, provider *kubernetes.Provider, targetNamespaces []string, mizuApiFilteringOptions api.TrafficFilteringOptions, startTime time.Time) error {
func startTapperSyncer(ctx context.Context, cancel context.CancelFunc, provider *kubernetes.Provider, targetNamespaces []string, startTime time.Time) error {
tapperSyncer, err := kubernetes.CreateAndStartMizuTapperSyncer(ctx, provider, kubernetes.TapperSyncerConfig{
TargetNamespaces: targetNamespaces,
PodFilterRegex: *config.Config.Tap.PodRegex(),
MizuResourcesNamespace: config.Config.MizuResourcesNamespace,
AgentImage: config.Config.AgentImage,
TapperResources: config.Config.Tap.TapperResources,
ImagePullPolicy: config.Config.ImagePullPolicy(),
LogLevel: config.Config.LogLevel(),
IgnoredUserAgents: config.Config.Tap.IgnoredUserAgents,
MizuApiFilteringOptions: mizuApiFilteringOptions,
TargetNamespaces: targetNamespaces,
PodFilterRegex: *config.Config.Tap.PodRegex(),
MizuResourcesNamespace: config.Config.MizuResourcesNamespace,
AgentImage: config.Config.AgentImage,
TapperResources: config.Config.Tap.TapperResources,
ImagePullPolicy: config.Config.ImagePullPolicy(),
LogLevel: config.Config.LogLevel(),
MizuApiFilteringOptions: api.TrafficFilteringOptions{
IgnoredUserAgents: config.Config.Tap.IgnoredUserAgents,
},
MizuServiceAccountExists: state.mizuServiceAccountExists,
ServiceMesh: config.Config.Tap.ServiceMesh,
Tls: config.Config.Tap.Tls,
@@ -229,27 +226,6 @@ func getErrorDisplayTextForK8sTapManagerError(err kubernetes.K8sTapManagerError)
}
}
func getMizuApiFilteringOptions() (*api.TrafficFilteringOptions, error) {
var compiledRegexSlice []*api.SerializableRegexp
if config.Config.Tap.PlainTextFilterRegexes != nil && len(config.Config.Tap.PlainTextFilterRegexes) > 0 {
compiledRegexSlice = make([]*api.SerializableRegexp, 0)
for _, regexStr := range config.Config.Tap.PlainTextFilterRegexes {
compiledRegex, err := api.CompileRegexToSerializableRegexp(regexStr)
if err != nil {
return nil, err
}
compiledRegexSlice = append(compiledRegexSlice, compiledRegex)
}
}
return &api.TrafficFilteringOptions{
PlainTextMaskingRegexes: compiledRegexSlice,
IgnoredUserAgents: config.Config.Tap.IgnoredUserAgents,
EnableRedaction: config.Config.Tap.EnableRedaction,
}, nil
}
func watchApiServerPod(ctx context.Context, kubernetesProvider *kubernetes.Provider, cancel context.CancelFunc) {
podExactRegex := regexp.MustCompile(fmt.Sprintf("^%s$", kubernetes.ApiServerPodName))
podWatchHelper := kubernetes.NewPodWatchHelper(kubernetesProvider, podExactRegex)
@@ -367,8 +343,7 @@ func watchApiServerEvents(ctx context.Context, kubernetesProvider *kubernetes.Pr
func postApiServerStarted(ctx context.Context, kubernetesProvider *kubernetes.Provider, cancel context.CancelFunc) {
startProxyReportErrorIfAny(kubernetesProvider, ctx, cancel, config.Config.Tap.GuiPort)
options, _ := getMizuApiFilteringOptions()
if err := startTapperSyncer(ctx, cancel, kubernetesProvider, state.targetNamespaces, *options, state.startTime); err != nil {
if err := startTapperSyncer(ctx, cancel, kubernetesProvider, state.targetNamespaces, state.startTime); err != nil {
logger.Log.Errorf(uiUtils.Error, fmt.Sprintf("Error starting mizu tapper syncer: %v", errormessage.FormatError(err)))
cancel()
}

View File

@@ -6,7 +6,6 @@ import (
"github.com/up9inc/mizu/cli/config"
"github.com/up9inc/mizu/cli/config/configStructs"
"github.com/up9inc/mizu/cli/telemetry"
"github.com/up9inc/mizu/logger"
"github.com/creasty/defaults"
@@ -18,8 +17,6 @@ var versionCmd = &cobra.Command{
Use: "version",
Short: "Print version info",
RunE: func(cmd *cobra.Command, args []string) error {
go telemetry.ReportRun("version", config.Config.Version)
if config.Config.Version.DebugInfo {
timeStampInt, _ := strconv.ParseInt(mizu.BuildTimestamp, 10, 0)
logger.Log.Infof("Version: %s \nBranch: %s (%s)", mizu.Ver, mizu.Branch, mizu.GitCommitHash)

View File

@@ -3,9 +3,7 @@ package cmd
import (
"github.com/creasty/defaults"
"github.com/spf13/cobra"
"github.com/up9inc/mizu/cli/config"
"github.com/up9inc/mizu/cli/config/configStructs"
"github.com/up9inc/mizu/cli/telemetry"
"github.com/up9inc/mizu/logger"
)
@@ -13,7 +11,6 @@ var viewCmd = &cobra.Command{
Use: "view",
Short: "Open GUI in browser",
RunE: func(cmd *cobra.Command, args []string) error {
go telemetry.ReportRun("view", config.Config.View)
runMizuView()
return nil
},

View File

@@ -31,7 +31,6 @@ type ConfigStruct struct {
AgentImage string `yaml:"agent-image,omitempty" readonly:""`
ImagePullPolicyStr string `yaml:"image-pull-policy" default:"Always"`
MizuResourcesNamespace string `yaml:"mizu-resources-namespace" default:"mizu"`
Telemetry bool `yaml:"telemetry" default:"true"`
DumpLogs bool `yaml:"dump-logs" default:"false"`
KubeConfigPathStr string `yaml:"kube-config-path"`
KubeContext string `yaml:"kube-context"`

View File

@@ -6,6 +6,7 @@ import (
"io/ioutil"
"os"
"regexp"
"strings"
"github.com/up9inc/mizu/cli/uiUtils"
"github.com/up9inc/mizu/shared"
@@ -15,38 +16,43 @@ import (
)
const (
GuiPortTapName = "gui-port"
NamespacesTapName = "namespaces"
AllNamespacesTapName = "all-namespaces"
PlainTextFilterRegexesTapName = "regex-masking"
EnableRedactionTapName = "redact"
HumanMaxEntriesDBSizeTapName = "max-entries-db-size"
InsertionFilterName = "insertion-filter"
DryRunTapName = "dry-run"
ServiceMeshName = "service-mesh"
TlsName = "tls"
ProfilerName = "profiler"
MaxLiveStreamsName = "max-live-streams"
GuiPortTapName = "gui-port"
NamespacesTapName = "namespaces"
AllNamespacesTapName = "all-namespaces"
EnableRedactionTapName = "redact"
HumanMaxEntriesDBSizeTapName = "max-entries-db-size"
InsertionFilterName = "insertion-filter"
DryRunTapName = "dry-run"
ServiceMeshName = "service-mesh"
TlsName = "tls"
ProfilerName = "profiler"
MaxLiveStreamsName = "max-live-streams"
)
type TapConfig struct {
PodRegexStr string `yaml:"regex" default:".*"`
GuiPort uint16 `yaml:"gui-port" default:"8899"`
ProxyHost string `yaml:"proxy-host" default:"127.0.0.1"`
Namespaces []string `yaml:"namespaces"`
AllNamespaces bool `yaml:"all-namespaces" default:"false"`
PlainTextFilterRegexes []string `yaml:"regex-masking"`
IgnoredUserAgents []string `yaml:"ignored-user-agents"`
EnableRedaction bool `yaml:"redact" default:"false"`
HumanMaxEntriesDBSize string `yaml:"max-entries-db-size" default:"200MB"`
InsertionFilter string `yaml:"insertion-filter" default:""`
DryRun bool `yaml:"dry-run" default:"false"`
ApiServerResources shared.Resources `yaml:"api-server-resources"`
TapperResources shared.Resources `yaml:"tapper-resources"`
ServiceMesh bool `yaml:"service-mesh" default:"false"`
Tls bool `yaml:"tls" default:"false"`
Profiler bool `yaml:"profiler" default:"false"`
MaxLiveStreams int `yaml:"max-live-streams" default:"500"`
PodRegexStr string `yaml:"regex" default:".*"`
GuiPort uint16 `yaml:"gui-port" default:"8899"`
ProxyHost string `yaml:"proxy-host" default:"127.0.0.1"`
Namespaces []string `yaml:"namespaces"`
AllNamespaces bool `yaml:"all-namespaces" default:"false"`
IgnoredUserAgents []string `yaml:"ignored-user-agents"`
EnableRedaction bool `yaml:"redact" default:"false"`
RedactPatterns struct {
RequestHeaders []string `yaml:"request-headers"`
ResponseHeaders []string `yaml:"response-headers"`
RequestBody []string `yaml:"request-body"`
ResponseBody []string `yaml:"response-body"`
RequestQueryParams []string `yaml:"request-query-params"`
} `yaml:"redact-patterns"`
HumanMaxEntriesDBSize string `yaml:"max-entries-db-size" default:"200MB"`
InsertionFilter string `yaml:"insertion-filter" default:""`
DryRun bool `yaml:"dry-run" default:"false"`
ApiServerResources shared.Resources `yaml:"api-server-resources"`
TapperResources shared.Resources `yaml:"tapper-resources"`
ServiceMesh bool `yaml:"service-mesh" default:"false"`
Tls bool `yaml:"tls" default:"false"`
Profiler bool `yaml:"profiler" default:"false"`
MaxLiveStreams int `yaml:"max-live-streams" default:"500"`
}
func (config *TapConfig) PodRegex() *regexp.Regexp {
@@ -71,9 +77,48 @@ func (config *TapConfig) GetInsertionFilter() string {
}
}
}
redactFilter := getRedactFilter(config)
if insertionFilter != "" && redactFilter != "" {
return fmt.Sprintf("(%s) and (%s)", insertionFilter, redactFilter)
} else if insertionFilter == "" && redactFilter != "" {
return redactFilter
}
return insertionFilter
}
func getRedactFilter(config *TapConfig) string {
if !config.EnableRedaction {
return ""
}
var redactValues []string
for _, requestHeader := range config.RedactPatterns.RequestHeaders {
redactValues = append(redactValues, fmt.Sprintf("request.headers['%s']", requestHeader))
}
for _, responseHeader := range config.RedactPatterns.ResponseHeaders {
redactValues = append(redactValues, fmt.Sprintf("response.headers['%s']", responseHeader))
}
for _, requestBody := range config.RedactPatterns.RequestBody {
redactValues = append(redactValues, fmt.Sprintf("request.postData.text.json()...%s", requestBody))
}
for _, responseBody := range config.RedactPatterns.ResponseBody {
redactValues = append(redactValues, fmt.Sprintf("response.content.text.json()...%s", responseBody))
}
for _, requestQueryParams := range config.RedactPatterns.RequestQueryParams {
redactValues = append(redactValues, fmt.Sprintf("request.queryString['%s']", requestQueryParams))
}
if len(redactValues) == 0 {
return ""
}
return fmt.Sprintf("redact(\"%s\")", strings.Join(redactValues, "\",\""))
}
func (config *TapConfig) Validate() error {
_, compileErr := regexp.Compile(config.PodRegexStr)
if compileErr != nil {

View File

@@ -4,7 +4,6 @@ go 1.17
require (
github.com/creasty/defaults v1.5.2
github.com/denisbrodbeck/machineid v1.0.1
github.com/google/go-github/v37 v37.0.0
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
github.com/spf13/cobra v1.3.0

View File

@@ -145,8 +145,6 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/daviddengcn/go-colortext v0.0.0-20160507010035-511bcaf42ccd/go.mod h1:dv4zxwHi5C/8AeI+4gX4dCWOIvNi7I6JCSX0HvlKPgE=
github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ=
github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=

View File

@@ -14,8 +14,6 @@ var (
Platform = ""
)
const DEVENVVAR = "MIZU_DISABLE_TELEMTRY"
func GetMizuFolderPath() string {
home, homeDirErr := os.UserHomeDir()
if homeDirErr != nil {

View File

@@ -5,7 +5,6 @@ import (
"fmt"
"io/ioutil"
"net/http"
"os"
"runtime"
"strings"
"time"
@@ -18,10 +17,6 @@ import (
)
func CheckNewerVersion(versionChan chan string) {
if _, present := os.LookupEnv(mizu.DEVENVVAR); present {
versionChan <- ""
return
}
logger.Log.Debugf("Checking for newer version...")
start := time.Now()
client := github.NewClient(nil)

View File

@@ -1,97 +0,0 @@
package telemetry
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"os"
"time"
"github.com/denisbrodbeck/machineid"
"github.com/up9inc/mizu/cli/apiserver"
"github.com/up9inc/mizu/cli/config"
"github.com/up9inc/mizu/cli/mizu"
"github.com/up9inc/mizu/logger"
)
const telemetryUrl = "https://us-east4-up9-prod.cloudfunctions.net/mizu-telemetry"
func ReportRun(cmd string, args interface{}) {
if !shouldRunTelemetry() {
logger.Log.Debug("not reporting telemetry")
return
}
argsBytes, _ := json.Marshal(args)
argsMap := map[string]interface{}{
"cmd": cmd,
"args": string(argsBytes),
}
if err := sendTelemetry(argsMap); err != nil {
logger.Log.Debug(err)
return
}
logger.Log.Debugf("successfully reported telemetry for cmd %v", cmd)
}
func ReportTapTelemetry(apiProvider *apiserver.Provider, args interface{}, startTime time.Time) {
if !shouldRunTelemetry() {
logger.Log.Debug("not reporting telemetry")
return
}
generalStats, err := apiProvider.GetGeneralStats()
if err != nil {
logger.Log.Debugf("[ERROR] failed to get general stats from api server %v", err)
return
}
argsBytes, _ := json.Marshal(args)
argsMap := map[string]interface{}{
"cmd": "tap",
"args": string(argsBytes),
"executionTimeInSeconds": int(time.Since(startTime).Seconds()),
"apiCallsCount": generalStats["EntriesCount"],
"trafficVolumeInGB": generalStats["EntriesVolumeInGB"],
}
if err := sendTelemetry(argsMap); err != nil {
logger.Log.Debug(err)
return
}
logger.Log.Debug("successfully reported telemetry of tap command")
}
func shouldRunTelemetry() bool {
if _, present := os.LookupEnv(mizu.DEVENVVAR); present {
return false
}
if !config.Config.Telemetry {
return false
}
return mizu.Branch == "main" || mizu.Branch == "develop"
}
func sendTelemetry(argsMap map[string]interface{}) error {
argsMap["component"] = "mizu_cli"
argsMap["buildTimestamp"] = mizu.BuildTimestamp
argsMap["branch"] = mizu.Branch
argsMap["version"] = mizu.Ver
argsMap["platform"] = mizu.Platform
if machineId, err := machineid.ProtectedID("mizu"); err == nil {
argsMap["machineId"] = machineId
}
jsonValue, _ := json.Marshal(argsMap)
if resp, err := http.Post(telemetryUrl, "application/json", bytes.NewBuffer(jsonValue)); err != nil {
return fmt.Errorf("ERROR: failed sending telemetry, err: %v, response %v", err, resp)
}
return nil
}

View File

@@ -57,7 +57,7 @@ log "Writing output to $MIZU_BENCHMARK_OUTPUT_DIR"
cd $MIZU_HOME || exit 1
export HOST_MODE=0
export SENSITIVE_DATA_FILTERING_OPTIONS='{"EnableRedaction": false}'
export SENSITIVE_DATA_FILTERING_OPTIONS='{}'
export MIZU_DEBUG_DISABLE_PCAP=false
export MIZU_DEBUG_DISABLE_TCP_REASSEMBLY=false
export MIZU_DEBUG_DISABLE_TCP_STREAM=false

View File

@@ -43,7 +43,6 @@ type TapperSyncerConfig struct {
TapperResources shared.Resources
ImagePullPolicy core.PullPolicy
LogLevel logging.Level
IgnoredUserAgents []string
MizuApiFilteringOptions api.TrafficFilteringOptions
MizuServiceAccountExists bool
ServiceMesh bool

View File

@@ -43,7 +43,6 @@ type MizuAgentConfig struct {
AgentDatabasePath string `json:"agentDatabasePath"`
ServiceMap bool `json:"serviceMap"`
OAS OASConfig `json:"oas"`
Telemetry bool `json:"telemetry"`
}
type WebSocketMessageMetadata struct {

View File

@@ -2,7 +2,9 @@ package api
import (
"bufio"
"fmt"
"net"
"strings"
"sync"
"time"
@@ -14,12 +16,29 @@ const UnknownNamespace = ""
var UnknownIp = net.IP{0, 0, 0, 0}
var UnknownPort uint16 = 0
type ProtocolSummary struct {
Name string `json:"name"`
Version string `json:"version"`
Abbreviation string `json:"abbr"`
}
func (protocol *ProtocolSummary) ToString() string {
return fmt.Sprintf("%s?%s?%s", protocol.Name, protocol.Version, protocol.Abbreviation)
}
func GetProtocolSummary(inputString string) *ProtocolSummary {
splitted := strings.SplitN(inputString, "?", 3)
return &ProtocolSummary{
Name: splitted[0],
Version: splitted[1],
Abbreviation: splitted[2],
}
}
type Protocol struct {
Name string `json:"name"`
ProtocolSummary
LongName string `json:"longName"`
Abbreviation string `json:"abbr"`
Macro string `json:"macro"`
Version string `json:"version"`
BackgroundColor string `json:"backgroundColor"`
ForegroundColor string `json:"foregroundColor"`
FontSize int8 `json:"fontSize"`
@@ -151,7 +170,7 @@ func (e *Emitting) Emit(item *OutputChannelItem) {
type Entry struct {
Id string `json:"id"`
ProtocolId string `json:"protocol"`
Protocol ProtocolSummary `json:"protocol"`
Capture Capture `json:"capture"`
Source *TCP `json:"src"`
Destination *TCP `json:"dst"`

View File

@@ -1,7 +1,5 @@
package api
type TrafficFilteringOptions struct {
IgnoredUserAgents []string
PlainTextMaskingRegexes []*SerializableRegexp
EnableRedaction bool
IgnoredUserAgents []string
}

View File

@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect14/amqp/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect16/amqp/\* expect

View File

@@ -4,16 +4,20 @@ go 1.17
require (
github.com/stretchr/testify v1.7.0
github.com/up9inc/mizu/logger v0.0.0
github.com/up9inc/mizu/tap/api v0.0.0
)
require (
github.com/davecgh/go-spew v1.1.0 // indirect
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/up9inc/mizu/tap/dbgctl v0.0.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect
)
replace github.com/up9inc/mizu/logger v0.0.0 => ../../../logger
replace github.com/up9inc/mizu/tap/api v0.0.0 => ../../api
replace github.com/up9inc/mizu/tap/dbgctl v0.0.0 => ../../dbgctl

View File

@@ -1,5 +1,7 @@
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=

View File

@@ -5,8 +5,8 @@ import (
"fmt"
"sort"
"strconv"
"time"
"github.com/up9inc/mizu/logger"
"github.com/up9inc/mizu/tap/api"
)
@@ -25,14 +25,14 @@ var connectionMethodMap = map[int]string{
61: "connection unblocked",
}
// var channelMethodMap = map[int]string{
// 10: "channel open",
// 11: "channel open-ok",
// 20: "channel flow",
// 21: "channel flow-ok",
// 40: "channel close",
// 41: "channel close-ok",
// }
var channelMethodMap = map[int]string{
10: "channel open",
11: "channel open-ok",
20: "channel flow",
21: "channel flow-ok",
40: "channel close",
41: "channel close-ok",
}
var exchangeMethodMap = map[int]string{
10: "exchange declare",
@@ -94,29 +94,41 @@ type AMQPWrapper struct {
Details interface{} `json:"details"`
}
func emitAMQP(event interface{}, _type string, method string, connectionInfo *api.ConnectionInfo, captureTime time.Time, captureSize int, emitter api.Emitter, capture api.Capture) {
request := &api.GenericMessage{
IsRequest: true,
CaptureTime: captureTime,
Payload: AMQPPayload{
Data: &AMQPWrapper{
Method: method,
Url: "",
Details: event,
},
},
type emptyResponse struct {
}
const emptyMethod = "empty"
func getIdent(reader api.TcpReader, methodFrame *MethodFrame) (ident string) {
tcpID := reader.GetTcpID()
// To match methods to their Ok(s)
methodId := methodFrame.MethodId - methodFrame.MethodId%10
if reader.GetIsClient() {
ident = fmt.Sprintf(
"%s_%s_%s_%s_%d_%d_%d",
tcpID.SrcIP,
tcpID.DstIP,
tcpID.SrcPort,
tcpID.DstPort,
methodFrame.ChannelId,
methodFrame.ClassId,
methodId,
)
} else {
ident = fmt.Sprintf(
"%s_%s_%s_%s_%d_%d_%d",
tcpID.DstIP,
tcpID.SrcIP,
tcpID.DstPort,
tcpID.SrcPort,
methodFrame.ChannelId,
methodFrame.ClassId,
methodId,
)
}
item := &api.OutputChannelItem{
Protocol: protocol,
Capture: capture,
Timestamp: captureTime.UnixNano() / int64(time.Millisecond),
ConnectionInfo: connectionInfo,
Pair: &api.RequestResponsePair{
Request: *request,
Response: api.GenericMessage{},
},
}
emitter.Emit(item)
return
}
func representProperties(properties map[string]interface{}, rep []interface{}) ([]interface{}, string, string) {
@@ -460,6 +472,36 @@ func representQueueDeclare(event map[string]interface{}) []interface{} {
return rep
}
func representQueueDeclareOk(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
details, _ := json.Marshal([]api.TableData{
{
Name: "Queue",
Value: event["queue"].(string),
Selector: `response.queue`,
},
{
Name: "Message Count",
Value: fmt.Sprintf("%g", event["messageCount"].(float64)),
Selector: `response.messageCount`,
},
{
Name: "Consumer Count",
Value: fmt.Sprintf("%g", event["consumerCount"].(float64)),
Selector: `response.consumerCount`,
},
})
rep = append(rep, api.SectionData{
Type: api.TABLE,
Title: "Details",
Data: string(details),
})
return rep
}
func representExchangeDeclare(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
@@ -571,7 +613,7 @@ func representConnectionStart(event map[string]interface{}) []interface{} {
x, _ := json.Marshal(value)
outcome = string(x)
default:
panic("Unknown data type for the server property!")
logger.Log.Info("Unknown data type for the server property!")
}
headers = append(headers, api.TableData{
Name: name,
@@ -593,6 +635,65 @@ func representConnectionStart(event map[string]interface{}) []interface{} {
return rep
}
func representConnectionStartOk(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
details, _ := json.Marshal([]api.TableData{
{
Name: "Mechanism",
Value: event["mechanism"].(string),
Selector: `response.mechanism`,
},
{
Name: "Mechanism",
Value: event["mechanism"].(string),
Selector: `response.response`,
},
{
Name: "Locale",
Value: event["locale"].(string),
Selector: `response.locale`,
},
})
rep = append(rep, api.SectionData{
Type: api.TABLE,
Title: "Details",
Data: string(details),
})
if event["clientProperties"] != nil {
headers := make([]api.TableData, 0)
for name, value := range event["clientProperties"].(map[string]interface{}) {
var outcome string
switch v := value.(type) {
case string:
outcome = v
case map[string]interface{}:
x, _ := json.Marshal(value)
outcome = string(x)
default:
logger.Log.Info("Unknown data type for the client property!")
}
headers = append(headers, api.TableData{
Name: name,
Value: outcome,
Selector: fmt.Sprintf(`response.clientProperties["%s"]`, name),
})
}
sort.Slice(headers, func(i, j int) bool {
return headers[i].Name < headers[j].Name
})
headersMarshaled, _ := json.Marshal(headers)
rep = append(rep, api.SectionData{
Type: api.TABLE,
Title: "Client Properties",
Data: string(headersMarshaled),
})
}
return rep
}
func representConnectionClose(event map[string]interface{}) []interface{} {
replyCode := ""
@@ -750,3 +851,122 @@ func representBasicConsume(event map[string]interface{}) []interface{} {
return rep
}
func representBasicConsumeOk(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
details, _ := json.Marshal([]api.TableData{
{
Name: "Consumer Tag",
Value: event["consumerTag"].(string),
Selector: `response.consumerTag`,
},
})
rep = append(rep, api.SectionData{
Type: api.TABLE,
Title: "Details",
Data: string(details),
})
return rep
}
func representConnectionOpen(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
details, _ := json.Marshal([]api.TableData{
{
Name: "Virtual Host",
Value: event["virtualHost"].(string),
Selector: `request.virtualHost`,
},
})
rep = append(rep, api.SectionData{
Type: api.TABLE,
Title: "Details",
Data: string(details),
})
return rep
}
func representConnectionTune(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
details, _ := json.Marshal([]api.TableData{
{
Name: "Channel Max",
Value: fmt.Sprintf("%g", event["channelMax"].(float64)),
Selector: `request.channelMax`,
},
{
Name: "Frame Max",
Value: fmt.Sprintf("%g", event["frameMax"].(float64)),
Selector: `request.frameMax`,
},
{
Name: "Heartbeat",
Value: fmt.Sprintf("%g", event["heartbeat"].(float64)),
Selector: `request.heartbeat`,
},
})
rep = append(rep, api.SectionData{
Type: api.TABLE,
Title: "Details",
Data: string(details),
})
return rep
}
func representBasicCancel(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
details, _ := json.Marshal([]api.TableData{
{
Name: "Consumer Tag",
Value: event["consumerTag"].(string),
Selector: `response.consumerTag`,
},
{
Name: "NoWait",
Value: strconv.FormatBool(event["noWait"].(bool)),
Selector: `request.noWait`,
},
})
rep = append(rep, api.SectionData{
Type: api.TABLE,
Title: "Details",
Data: string(details),
})
return rep
}
func representBasicCancelOk(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
details, _ := json.Marshal([]api.TableData{
{
Name: "Consumer Tag",
Value: event["consumerTag"].(string),
Selector: `response.consumerTag`,
},
})
rep = append(rep, api.SectionData{
Type: api.TABLE,
Title: "Details",
Data: string(details),
})
return rep
}
func representEmpty(event map[string]interface{}) []interface{} {
rep := make([]interface{}, 0)
return rep
}

View File

@@ -13,11 +13,13 @@ import (
)
var protocol = api.Protocol{
Name: "amqp",
ProtocolSummary: api.ProtocolSummary{
Name: "amqp",
Version: "0-9-1",
Abbreviation: "AMQP",
},
LongName: "Advanced Message Queuing Protocol 0-9-1",
Abbreviation: "AMQP",
Macro: "amqp",
Version: "0-9-1",
BackgroundColor: "#ff6600",
ForegroundColor: "#ffffff",
FontSize: 12,
@@ -27,7 +29,7 @@ var protocol = api.Protocol{
}
var protocolsMap = map[string]*api.Protocol{
fmt.Sprintf("%s/%s/%s", protocol.Name, protocol.Version, protocol.Abbreviation): &protocol,
protocol.ToString(): &protocol,
}
type dissecting string
@@ -44,22 +46,12 @@ func (d dissecting) Ping() {
log.Printf("pong %s", protocol.Name)
}
const amqpRequest string = "amqp_request"
func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.TrafficFilteringOptions) error {
r := AmqpReader{b}
var remaining int
var header *HeaderFrame
connectionInfo := &api.ConnectionInfo{
ClientIP: reader.GetTcpID().SrcIP,
ClientPort: reader.GetTcpID().SrcPort,
ServerIP: reader.GetTcpID().DstIP,
ServerPort: reader.GetTcpID().DstPort,
IsOutgoing: true,
}
eventBasicPublish := &BasicPublish{
Exchange: "",
RoutingKey: "",
@@ -81,6 +73,10 @@ func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.
var lastMethodFrameMessage Message
var ident string
isClient := reader.GetIsClient()
reqResMatcher := reader.GetReqResMatcher().(*requestResponseMatcher)
for {
frameVal, err := r.readFrame()
if err == io.EOF {
@@ -119,16 +115,22 @@ func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.
switch lastMethodFrameMessage.(type) {
case *BasicPublish:
eventBasicPublish.Body = f.Body
emitAMQP(*eventBasicPublish, amqpRequest, basicMethodMap[40], connectionInfo, reader.GetCaptureTime(), reader.GetReadProgress().Current(), reader.GetEmitter(), reader.GetParent().GetOrigin())
reqResMatcher.emitEvent(isClient, ident, basicMethodMap[40], *eventBasicPublish, reader)
reqResMatcher.emitEvent(!isClient, ident, emptyMethod, &emptyResponse{}, reader)
case *BasicDeliver:
eventBasicDeliver.Body = f.Body
emitAMQP(*eventBasicDeliver, amqpRequest, basicMethodMap[60], connectionInfo, reader.GetCaptureTime(), reader.GetReadProgress().Current(), reader.GetEmitter(), reader.GetParent().GetOrigin())
reqResMatcher.emitEvent(!isClient, ident, basicMethodMap[60], *eventBasicDeliver, reader)
reqResMatcher.emitEvent(isClient, ident, emptyMethod, &emptyResponse{}, reader)
}
case *MethodFrame:
reader.GetParent().SetProtocol(&protocol)
lastMethodFrameMessage = f.Method
ident = getIdent(reader, f)
switch m := f.Method.(type) {
case *BasicPublish:
eventBasicPublish.Exchange = m.Exchange
@@ -144,7 +146,10 @@ func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.
NoWait: m.NoWait,
Arguments: m.Arguments,
}
emitAMQP(*eventQueueBind, amqpRequest, queueMethodMap[20], connectionInfo, reader.GetCaptureTime(), reader.GetReadProgress().Current(), reader.GetEmitter(), reader.GetParent().GetOrigin())
reqResMatcher.emitEvent(isClient, ident, queueMethodMap[20], *eventQueueBind, reader)
case *QueueBindOk:
reqResMatcher.emitEvent(isClient, ident, queueMethodMap[21], m, reader)
case *BasicConsume:
eventBasicConsume := &BasicConsume{
@@ -156,7 +161,10 @@ func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.
NoWait: m.NoWait,
Arguments: m.Arguments,
}
emitAMQP(*eventBasicConsume, amqpRequest, basicMethodMap[20], connectionInfo, reader.GetCaptureTime(), reader.GetReadProgress().Current(), reader.GetEmitter(), reader.GetParent().GetOrigin())
reqResMatcher.emitEvent(isClient, ident, basicMethodMap[20], *eventBasicConsume, reader)
case *BasicConsumeOk:
reqResMatcher.emitEvent(isClient, ident, basicMethodMap[21], m, reader)
case *BasicDeliver:
eventBasicDeliver.ConsumerTag = m.ConsumerTag
@@ -175,7 +183,10 @@ func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.
NoWait: m.NoWait,
Arguments: m.Arguments,
}
emitAMQP(*eventQueueDeclare, amqpRequest, queueMethodMap[10], connectionInfo, reader.GetCaptureTime(), reader.GetReadProgress().Current(), reader.GetEmitter(), reader.GetParent().GetOrigin())
reqResMatcher.emitEvent(isClient, ident, queueMethodMap[10], *eventQueueDeclare, reader)
case *QueueDeclareOk:
reqResMatcher.emitEvent(isClient, ident, queueMethodMap[11], m, reader)
case *ExchangeDeclare:
eventExchangeDeclare := &ExchangeDeclare{
@@ -188,17 +199,19 @@ func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.
NoWait: m.NoWait,
Arguments: m.Arguments,
}
emitAMQP(*eventExchangeDeclare, amqpRequest, exchangeMethodMap[10], connectionInfo, reader.GetCaptureTime(), reader.GetReadProgress().Current(), reader.GetEmitter(), reader.GetParent().GetOrigin())
reqResMatcher.emitEvent(isClient, ident, exchangeMethodMap[10], *eventExchangeDeclare, reader)
case *ExchangeDeclareOk:
reqResMatcher.emitEvent(isClient, ident, exchangeMethodMap[11], m, reader)
case *ConnectionStart:
eventConnectionStart := &ConnectionStart{
VersionMajor: m.VersionMajor,
VersionMinor: m.VersionMinor,
ServerProperties: m.ServerProperties,
Mechanisms: m.Mechanisms,
Locales: m.Locales,
}
emitAMQP(*eventConnectionStart, amqpRequest, connectionMethodMap[10], connectionInfo, reader.GetCaptureTime(), reader.GetReadProgress().Current(), reader.GetEmitter(), reader.GetParent().GetOrigin())
// In our tests, *ConnectionStart does not result in *ConnectionStartOk
reqResMatcher.emitEvent(!isClient, ident, connectionMethodMap[10], m, reader)
reqResMatcher.emitEvent(isClient, ident, emptyMethod, &emptyResponse{}, reader)
case *ConnectionStartOk:
// In our tests, *ConnectionStart does not result in *ConnectionStartOk
reqResMatcher.emitEvent(isClient, ident, connectionMethodMap[11], m, reader)
case *ConnectionClose:
eventConnectionClose := &ConnectionClose{
@@ -207,7 +220,40 @@ func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.
ClassId: m.ClassId,
MethodId: m.MethodId,
}
emitAMQP(*eventConnectionClose, amqpRequest, connectionMethodMap[50], connectionInfo, reader.GetCaptureTime(), reader.GetReadProgress().Current(), reader.GetEmitter(), reader.GetParent().GetOrigin())
reqResMatcher.emitEvent(isClient, ident, connectionMethodMap[50], *eventConnectionClose, reader)
case *ConnectionCloseOk:
reqResMatcher.emitEvent(isClient, ident, connectionMethodMap[51], m, reader)
case *connectionOpen:
eventConnectionOpen := &connectionOpen{
VirtualHost: m.VirtualHost,
}
reqResMatcher.emitEvent(isClient, ident, connectionMethodMap[40], *eventConnectionOpen, reader)
case *connectionOpenOk:
reqResMatcher.emitEvent(isClient, ident, connectionMethodMap[41], m, reader)
case *channelOpen:
reqResMatcher.emitEvent(isClient, ident, channelMethodMap[10], m, reader)
case *channelOpenOk:
reqResMatcher.emitEvent(isClient, ident, channelMethodMap[11], m, reader)
case *connectionTune:
// In our tests, *connectionTune does not result in *connectionTuneOk
reqResMatcher.emitEvent(!isClient, ident, connectionMethodMap[30], m, reader)
reqResMatcher.emitEvent(isClient, ident, emptyMethod, &emptyResponse{}, reader)
case *connectionTuneOk:
// In our tests, *connectionTune does not result in *connectionTuneOk
reqResMatcher.emitEvent(isClient, ident, connectionMethodMap[31], m, reader)
case *basicCancel:
reqResMatcher.emitEvent(isClient, ident, basicMethodMap[30], m, reader)
case *basicCancelOk:
reqResMatcher.emitEvent(isClient, ident, basicMethodMap[31], m, reader)
}
default:
@@ -218,12 +264,20 @@ func (d dissecting) Dissect(b *bufio.Reader, reader api.TcpReader, options *api.
func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string, resolvedDestination string, namespace string) *api.Entry {
request := item.Pair.Request.Payload.(map[string]interface{})
response := item.Pair.Response.Payload.(map[string]interface{})
reqDetails := request["details"].(map[string]interface{})
resDetails := response["details"].(map[string]interface{})
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
if elapsedTime < 0 {
elapsedTime = 0
}
reqDetails["method"] = request["method"]
resDetails["method"] = response["method"]
return &api.Entry{
ProtocolId: fmt.Sprintf("%s/%s/%s", protocol.Name, protocol.Version, protocol.Abbreviation),
Capture: item.Capture,
Protocol: protocol.ProtocolSummary,
Capture: item.Capture,
Source: &api.TCP{
Name: resolvedSource,
IP: item.ConnectionInfo.ClientIP,
@@ -234,13 +288,15 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
IP: item.ConnectionInfo.ServerIP,
Port: item.ConnectionInfo.ServerPort,
},
Namespace: namespace,
Outgoing: item.ConnectionInfo.IsOutgoing,
Request: reqDetails,
RequestSize: item.Pair.Request.CaptureSize,
Timestamp: item.Timestamp,
StartTime: item.Pair.Request.CaptureTime,
ElapsedTime: 0,
Namespace: namespace,
Outgoing: item.ConnectionInfo.IsOutgoing,
Request: reqDetails,
Response: resDetails,
RequestSize: item.Pair.Request.CaptureSize,
ResponseSize: item.Pair.Response.CaptureSize,
Timestamp: item.Timestamp,
StartTime: item.Pair.Request.CaptureTime,
ElapsedTime: elapsedTime,
}
}
@@ -281,11 +337,26 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
case basicMethodMap[20]:
summary = entry.Request["queue"].(string)
summaryQuery = fmt.Sprintf(`request.queue == "%s"`, summary)
case connectionMethodMap[40]:
summary = entry.Request["virtualHost"].(string)
summaryQuery = fmt.Sprintf(`request.virtualHost == "%s"`, summary)
case connectionMethodMap[30]:
summary = fmt.Sprintf("%g", entry.Request["channelMax"].(float64))
summaryQuery = fmt.Sprintf(`request.channelMax == "%s"`, summary)
case connectionMethodMap[31]:
summary = fmt.Sprintf("%g", entry.Request["channelMax"].(float64))
summaryQuery = fmt.Sprintf(`request.channelMax == "%s"`, summary)
case basicMethodMap[30]:
summary = entry.Request["consumerTag"].(string)
summaryQuery = fmt.Sprintf(`request.consumerTag == "%s"`, summary)
case basicMethodMap[31]:
summary = entry.Request["consumerTag"].(string)
summaryQuery = fmt.Sprintf(`request.consumerTag == "%s"`, summary)
}
return &api.BaseEntry{
Id: entry.Id,
Protocol: *protocolsMap[entry.ProtocolId],
Protocol: *protocolsMap[entry.Protocol.ToString()],
Capture: entry.Capture,
Summary: summary,
SummaryQuery: summaryQuery,
@@ -304,6 +375,8 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
func (d dissecting) Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, err error) {
representation := make(map[string]interface{})
var repRequest []interface{}
var repResponse []interface{}
switch request["method"].(string) {
case basicMethodMap[40]:
repRequest = representBasicPublish(request)
@@ -321,20 +394,56 @@ func (d dissecting) Represent(request map[string]interface{}, response map[strin
repRequest = representQueueBind(request)
case basicMethodMap[20]:
repRequest = representBasicConsume(request)
case connectionMethodMap[40]:
repRequest = representConnectionOpen(request)
case channelMethodMap[10]:
repRequest = representEmpty(request)
case connectionMethodMap[30]:
repRequest = representConnectionTune(request)
case basicMethodMap[30]:
repRequest = representBasicCancel(request)
}
switch response["method"].(string) {
case queueMethodMap[11]:
repResponse = representQueueDeclareOk(response)
case exchangeMethodMap[11]:
repResponse = representEmpty(response)
case connectionMethodMap[11]:
repResponse = representConnectionStartOk(response)
case connectionMethodMap[51]:
repResponse = representEmpty(response)
case basicMethodMap[21]:
repResponse = representBasicConsumeOk(response)
case queueMethodMap[21]:
repResponse = representEmpty(response)
case connectionMethodMap[41]:
repResponse = representEmpty(response)
case channelMethodMap[11]:
repResponse = representEmpty(request)
case connectionMethodMap[31]:
repResponse = representConnectionTune(request)
case basicMethodMap[31]:
repResponse = representBasicCancelOk(request)
case emptyMethod:
repResponse = representEmpty(response)
}
representation["request"] = repRequest
representation["response"] = repResponse
object, err = json.Marshal(representation)
return
}
func (d dissecting) Macros() map[string]string {
return map[string]string{
`amqp`: fmt.Sprintf(`protocol == "%s/%s/%s"`, protocol.Name, protocol.Version, protocol.Abbreviation),
`amqp`: fmt.Sprintf(`protocol.name == "%s"`, protocol.Name),
}
}
func (d dissecting) NewResponseRequestMatcher() api.RequestResponseMatcher {
return nil
return createResponseRequestMatcher()
}
var Dissector dissecting

View File

@@ -44,7 +44,7 @@ func TestRegister(t *testing.T) {
func TestMacros(t *testing.T) {
expectedMacros := map[string]string{
"amqp": `protocol == "amqp/0-9-1/AMQP"`,
"amqp": `protocol.name == "amqp"`,
}
dissector := NewDissector()
macros := dissector.Macros()

View File

@@ -0,0 +1,113 @@
package amqp
import (
"sync"
"time"
"github.com/up9inc/mizu/tap/api"
)
// Key is {client_addr}_{client_port}_{dest_addr}_{dest_port}_{channel_id}_{class_id}_{method_id}
type requestResponseMatcher struct {
openMessagesMap *sync.Map
}
func createResponseRequestMatcher() api.RequestResponseMatcher {
return &requestResponseMatcher{openMessagesMap: &sync.Map{}}
}
func (matcher *requestResponseMatcher) GetMap() *sync.Map {
return matcher.openMessagesMap
}
func (matcher *requestResponseMatcher) SetMaxTry(value int) {
}
func (matcher *requestResponseMatcher) emitEvent(isRequest bool, ident string, method string, event interface{}, reader api.TcpReader) {
reader.GetParent().SetProtocol(&protocol)
var item *api.OutputChannelItem
if isRequest {
item = matcher.registerRequest(ident, method, event, reader.GetCaptureTime(), reader.GetReadProgress().Current())
} else {
item = matcher.registerResponse(ident, method, event, reader.GetCaptureTime(), reader.GetReadProgress().Current())
}
if item != nil {
item.ConnectionInfo = &api.ConnectionInfo{
ClientIP: reader.GetTcpID().SrcIP,
ClientPort: reader.GetTcpID().SrcPort,
ServerIP: reader.GetTcpID().DstIP,
ServerPort: reader.GetTcpID().DstPort,
IsOutgoing: true,
}
item.Capture = reader.GetParent().GetOrigin()
reader.GetEmitter().Emit(item)
}
}
func (matcher *requestResponseMatcher) registerRequest(ident string, method string, request interface{}, captureTime time.Time, captureSize int) *api.OutputChannelItem {
requestAMQPMessage := api.GenericMessage{
IsRequest: true,
CaptureTime: captureTime,
CaptureSize: captureSize,
Payload: AMQPPayload{
Data: &AMQPWrapper{
Method: method,
Url: "",
Details: request,
},
},
}
if response, found := matcher.openMessagesMap.LoadAndDelete(ident); found {
// Type assertion always succeeds because all of the map's values are of api.GenericMessage type
responseAMQPMessage := response.(*api.GenericMessage)
if responseAMQPMessage.IsRequest {
return nil
}
return matcher.preparePair(&requestAMQPMessage, responseAMQPMessage)
}
matcher.openMessagesMap.Store(ident, &requestAMQPMessage)
return nil
}
func (matcher *requestResponseMatcher) registerResponse(ident string, method string, response interface{}, captureTime time.Time, captureSize int) *api.OutputChannelItem {
responseAMQPMessage := api.GenericMessage{
IsRequest: false,
CaptureTime: captureTime,
CaptureSize: captureSize,
Payload: AMQPPayload{
Data: &AMQPWrapper{
Method: method,
Url: "",
Details: response,
},
},
}
if request, found := matcher.openMessagesMap.LoadAndDelete(ident); found {
// Type assertion always succeeds because all of the map's values are of api.GenericMessage type
requestAMQPMessage := request.(*api.GenericMessage)
if !requestAMQPMessage.IsRequest {
return nil
}
return matcher.preparePair(requestAMQPMessage, &responseAMQPMessage)
}
matcher.openMessagesMap.Store(ident, &responseAMQPMessage)
return nil
}
func (matcher *requestResponseMatcher) preparePair(requestAMQPMessage *api.GenericMessage, responseAMQPMessage *api.GenericMessage) *api.OutputChannelItem {
return &api.OutputChannelItem{
Protocol: protocol,
Timestamp: requestAMQPMessage.CaptureTime.UnixNano() / int64(time.Millisecond),
ConnectionInfo: nil,
Pair: &api.RequestResponsePair{
Request: *requestAMQPMessage,
Response: *responseAMQPMessage,
},
}
}

View File

@@ -81,10 +81,10 @@ func (msg *ConnectionStart) read(r io.Reader) (err error) {
}
type ConnectionStartOk struct {
ClientProperties Table
Mechanism string
Response string
Locale string
ClientProperties Table `json:"clientProperties"`
Mechanism string `json:"mechanism"`
Response string `json:"response"`
Locale string `json:"locale"`
}
func (msg *ConnectionStartOk) read(r io.Reader) (err error) {
@@ -135,9 +135,9 @@ func (msg *connectionSecureOk) read(r io.Reader) (err error) {
}
type connectionTune struct {
ChannelMax uint16
FrameMax uint32
Heartbeat uint16
ChannelMax uint16 `json:"channelMax"`
FrameMax uint32 `json:"frameMax"`
Heartbeat uint16 `json:"heartbeat"`
}
func (msg *connectionTune) read(r io.Reader) (err error) {
@@ -181,7 +181,7 @@ func (msg *connectionTuneOk) read(r io.Reader) (err error) {
}
type connectionOpen struct {
VirtualHost string
VirtualHost string `json:"virtualHost"`
reserved1 string
reserved2 bool
}
@@ -580,9 +580,9 @@ func (msg *QueueDeclare) read(r io.Reader) (err error) {
}
type QueueDeclareOk struct {
Queue string
MessageCount uint32
ConsumerCount uint32
Queue string `json:"queue"`
MessageCount uint32 `json:"messageCount"`
ConsumerCount uint32 `json:"consumerCount"`
}
func (msg *QueueDeclareOk) read(r io.Reader) (err error) {
@@ -840,7 +840,7 @@ func (msg *BasicConsume) read(r io.Reader) (err error) {
}
type BasicConsumeOk struct {
ConsumerTag string
ConsumerTag string `json:"consumerTag"`
}
func (msg *BasicConsumeOk) read(r io.Reader) (err error) {
@@ -853,8 +853,8 @@ func (msg *BasicConsumeOk) read(r io.Reader) (err error) {
}
type basicCancel struct {
ConsumerTag string
NoWait bool
ConsumerTag string `json:"consumerTag"`
NoWait bool `json:"noWait"`
}
func (msg *basicCancel) read(r io.Reader) (err error) {
@@ -873,7 +873,7 @@ func (msg *basicCancel) read(r io.Reader) (err error) {
}
type basicCancelOk struct {
ConsumerTag string
ConsumerTag string `json:"consumerTag"`
}
func (msg *basicCancelOk) read(r io.Reader) (err error) {

View File

@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect14/http/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect16/http/\* expect

View File

@@ -18,10 +18,6 @@ func filterAndEmit(item *api.OutputChannelItem, emitter api.Emitter, options *ap
return
}
if options.EnableRedaction {
FilterSensitiveData(item, options)
}
replaceForwardedFor(item)
emitter.Emit(item)

View File

@@ -6,13 +6,16 @@ import (
"reflect"
"sort"
"strconv"
"strings"
"github.com/up9inc/mizu/tap/api"
)
func mapSliceRebuildAsMap(mapSlice []interface{}) (newMap map[string]interface{}) {
newMap = make(map[string]interface{})
for _, item := range mapSlice {
mergedMapSlice := mapSliceMergeRepeatedKeys(mapSlice)
for _, item := range mergedMapSlice {
h := item.(map[string]interface{})
newMap[h["name"].(string)] = h["value"]
}
@@ -20,6 +23,28 @@ func mapSliceRebuildAsMap(mapSlice []interface{}) (newMap map[string]interface{}
return
}
func mapSliceRebuildAsMergedMap(mapSlice []interface{}) (newMap map[string]interface{}) {
newMap = make(map[string]interface{})
mergedMapSlice := mapSliceMergeRepeatedKeys(mapSlice)
for _, item := range mergedMapSlice {
h := item.(map[string]interface{})
if valuesInterface, ok := h["value"].([]interface{}); ok {
var values []string
for _, valueInterface := range valuesInterface {
values = append(values, valueInterface.(string))
}
newMap[h["name"].(string)] = strings.Join(values, ",")
} else {
newMap[h["name"].(string)] = h["value"]
}
}
return
}
func mapSliceMergeRepeatedKeys(mapSlice []interface{}) (newMapSlice []interface{}) {
newMapSlice = make([]interface{}, 0)
valuesMap := make(map[string][]interface{})
@@ -47,6 +72,24 @@ func mapSliceMergeRepeatedKeys(mapSlice []interface{}) (newMapSlice []interface{
return
}
func representMapAsTable(mapToTable map[string]interface{}, selectorPrefix string) (representation string) {
var table []api.TableData
keys := make([]string, 0, len(mapToTable))
for k := range mapToTable {
keys = append(keys, k)
}
sort.Strings(keys)
for _, key := range keys {
table = append(table, createTableForKey(key, mapToTable[key], selectorPrefix)...)
}
obj, _ := json.Marshal(table)
representation = string(obj)
return
}
func representMapSliceAsTable(mapSlice []interface{}, selectorPrefix string) (representation string) {
var table []api.TableData
for _, item := range mapSlice {
@@ -54,34 +97,7 @@ func representMapSliceAsTable(mapSlice []interface{}, selectorPrefix string) (re
key := h["name"].(string)
value := h["value"]
var reflectKind reflect.Kind
reflectType := reflect.TypeOf(value)
if reflectType == nil {
reflectKind = reflect.Interface
} else {
reflectKind = reflect.TypeOf(value).Kind()
}
switch reflectKind {
case reflect.Slice:
fallthrough
case reflect.Array:
for i, el := range value.([]interface{}) {
selector := fmt.Sprintf("%s.%s[%d]", selectorPrefix, key, i)
table = append(table, api.TableData{
Name: fmt.Sprintf("%s [%d]", key, i),
Value: el,
Selector: selector,
})
}
default:
selector := fmt.Sprintf("%s[\"%s\"]", selectorPrefix, key)
table = append(table, api.TableData{
Name: key,
Value: value,
Selector: selector,
})
}
table = append(table, createTableForKey(key, value, selectorPrefix)...)
}
obj, _ := json.Marshal(table)
@@ -89,6 +105,41 @@ func representMapSliceAsTable(mapSlice []interface{}, selectorPrefix string) (re
return
}
func createTableForKey(key string, value interface{}, selectorPrefix string) []api.TableData {
var table []api.TableData
var reflectKind reflect.Kind
reflectType := reflect.TypeOf(value)
if reflectType == nil {
reflectKind = reflect.Interface
} else {
reflectKind = reflect.TypeOf(value).Kind()
}
switch reflectKind {
case reflect.Slice:
fallthrough
case reflect.Array:
for i, el := range value.([]interface{}) {
selector := fmt.Sprintf("%s.%s[%d]", selectorPrefix, key, i)
table = append(table, api.TableData{
Name: fmt.Sprintf("%s [%d]", key, i),
Value: el,
Selector: selector,
})
}
default:
selector := fmt.Sprintf("%s[\"%s\"]", selectorPrefix, key)
table = append(table, api.TableData{
Name: key,
Value: value,
Selector: selector,
})
}
return table
}
func representSliceAsTable(slice []interface{}, selectorPrefix string) (representation string) {
var table []api.TableData
for i, item := range slice {

View File

@@ -15,11 +15,13 @@ import (
)
var http10protocol = api.Protocol{
Name: "http",
ProtocolSummary: api.ProtocolSummary{
Name: "http",
Version: "1.0",
Abbreviation: "HTTP",
},
LongName: "Hypertext Transfer Protocol -- HTTP/1.0",
Abbreviation: "HTTP",
Macro: "http",
Version: "1.0",
BackgroundColor: "#205cf5",
ForegroundColor: "#ffffff",
FontSize: 12,
@@ -29,11 +31,13 @@ var http10protocol = api.Protocol{
}
var http11protocol = api.Protocol{
Name: "http",
ProtocolSummary: api.ProtocolSummary{
Name: "http",
Version: "1.1",
Abbreviation: "HTTP",
},
LongName: "Hypertext Transfer Protocol -- HTTP/1.1",
Abbreviation: "HTTP",
Macro: "http",
Version: "1.1",
BackgroundColor: "#205cf5",
ForegroundColor: "#ffffff",
FontSize: 12,
@@ -43,11 +47,13 @@ var http11protocol = api.Protocol{
}
var http2Protocol = api.Protocol{
Name: "http",
ProtocolSummary: api.ProtocolSummary{
Name: "http",
Version: "2.0",
Abbreviation: "HTTP/2",
},
LongName: "Hypertext Transfer Protocol Version 2 (HTTP/2)",
Abbreviation: "HTTP/2",
Macro: "http2",
Version: "2.0",
BackgroundColor: "#244c5a",
ForegroundColor: "#ffffff",
FontSize: 11,
@@ -57,11 +63,13 @@ var http2Protocol = api.Protocol{
}
var grpcProtocol = api.Protocol{
Name: "http",
ProtocolSummary: api.ProtocolSummary{
Name: "http",
Version: "2.0",
Abbreviation: "gRPC",
},
LongName: "Hypertext Transfer Protocol Version 2 (HTTP/2) [ gRPC over HTTP/2 ]",
Abbreviation: "gRPC",
Macro: "grpc",
Version: "2.0",
BackgroundColor: "#244c5a",
ForegroundColor: "#ffffff",
FontSize: 11,
@@ -71,11 +79,13 @@ var grpcProtocol = api.Protocol{
}
var graphQL1Protocol = api.Protocol{
Name: "http",
ProtocolSummary: api.ProtocolSummary{
Name: "http",
Version: "1.1",
Abbreviation: "GQL",
},
LongName: "Hypertext Transfer Protocol -- HTTP/1.1 [ GraphQL over HTTP/1.1 ]",
Abbreviation: "GQL",
Macro: "gql",
Version: "1.1",
BackgroundColor: "#e10098",
ForegroundColor: "#ffffff",
FontSize: 12,
@@ -85,11 +95,13 @@ var graphQL1Protocol = api.Protocol{
}
var graphQL2Protocol = api.Protocol{
Name: "http",
ProtocolSummary: api.ProtocolSummary{
Name: "http",
Version: "2.0",
Abbreviation: "GQL",
},
LongName: "Hypertext Transfer Protocol Version 2 (HTTP/2) [ GraphQL over HTTP/2 ]",
Abbreviation: "GQL",
Macro: "gql",
Version: "2.0",
BackgroundColor: "#e10098",
ForegroundColor: "#ffffff",
FontSize: 12,
@@ -99,12 +111,12 @@ var graphQL2Protocol = api.Protocol{
}
var protocolsMap = map[string]*api.Protocol{
fmt.Sprintf("%s/%s/%s", http10protocol.Name, http10protocol.Version, http10protocol.Abbreviation): &http10protocol,
fmt.Sprintf("%s/%s/%s", http11protocol.Name, http11protocol.Version, http11protocol.Abbreviation): &http11protocol,
fmt.Sprintf("%s/%s/%s", http2Protocol.Name, http2Protocol.Version, http2Protocol.Abbreviation): &http2Protocol,
fmt.Sprintf("%s/%s/%s", grpcProtocol.Name, grpcProtocol.Version, grpcProtocol.Abbreviation): &grpcProtocol,
fmt.Sprintf("%s/%s/%s", graphQL1Protocol.Name, graphQL1Protocol.Version, graphQL1Protocol.Abbreviation): &graphQL1Protocol,
fmt.Sprintf("%s/%s/%s", graphQL2Protocol.Name, graphQL2Protocol.Version, graphQL2Protocol.Abbreviation): &graphQL2Protocol,
http10protocol.ToString(): &http10protocol,
http11protocol.ToString(): &http11protocol,
http2Protocol.ToString(): &http2Protocol,
grpcProtocol.ToString(): &grpcProtocol,
graphQL1Protocol.ToString(): &graphQL1Protocol,
graphQL2Protocol.ToString(): &graphQL2Protocol,
}
const (
@@ -274,19 +286,13 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
reqDetails["pathSegments"] = strings.Split(path, "/")[1:]
// Rearrange the maps for the querying
reqDetails["_headers"] = reqDetails["headers"]
reqDetails["headers"] = mapSliceRebuildAsMap(reqDetails["_headers"].([]interface{}))
resDetails["_headers"] = resDetails["headers"]
resDetails["headers"] = mapSliceRebuildAsMap(resDetails["_headers"].([]interface{}))
reqDetails["headers"] = mapSliceRebuildAsMergedMap(reqDetails["headers"].([]interface{}))
resDetails["headers"] = mapSliceRebuildAsMergedMap(resDetails["headers"].([]interface{}))
reqDetails["_cookies"] = reqDetails["cookies"]
reqDetails["cookies"] = mapSliceRebuildAsMap(reqDetails["_cookies"].([]interface{}))
resDetails["_cookies"] = resDetails["cookies"]
resDetails["cookies"] = mapSliceRebuildAsMap(resDetails["_cookies"].([]interface{}))
reqDetails["cookies"] = mapSliceRebuildAsMergedMap(reqDetails["cookies"].([]interface{}))
resDetails["cookies"] = mapSliceRebuildAsMergedMap(resDetails["cookies"].([]interface{}))
reqDetails["_queryString"] = reqDetails["queryString"]
reqDetails["_queryStringMerged"] = mapSliceMergeRepeatedKeys(reqDetails["_queryString"].([]interface{}))
reqDetails["queryString"] = mapSliceRebuildAsMap(reqDetails["_queryStringMerged"].([]interface{}))
reqDetails["queryString"] = mapSliceRebuildAsMap(reqDetails["queryString"].([]interface{}))
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
if elapsedTime < 0 {
@@ -294,8 +300,8 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
}
return &api.Entry{
ProtocolId: fmt.Sprintf("%s/%s/%s", item.Protocol.Name, item.Protocol.Version, item.Protocol.Abbreviation),
Capture: item.Capture,
Protocol: item.Protocol.ProtocolSummary,
Capture: item.Capture,
Source: &api.TCP{
Name: resolvedSource,
IP: item.ConnectionInfo.ClientIP,
@@ -328,7 +334,7 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
return &api.BaseEntry{
Id: entry.Id,
Protocol: *protocolsMap[entry.ProtocolId],
Protocol: *protocolsMap[entry.Protocol.ToString()],
Capture: entry.Capture,
Summary: summary,
SummaryQuery: summaryQuery,
@@ -385,19 +391,19 @@ func representRequest(request map[string]interface{}) (repRequest []interface{})
repRequest = append(repRequest, api.SectionData{
Type: api.TABLE,
Title: "Headers",
Data: representMapSliceAsTable(request["_headers"].([]interface{}), `request.headers`),
Data: representMapAsTable(request["headers"].(map[string]interface{}), `request.headers`),
})
repRequest = append(repRequest, api.SectionData{
Type: api.TABLE,
Title: "Cookies",
Data: representMapSliceAsTable(request["_cookies"].([]interface{}), `request.cookies`),
Data: representMapAsTable(request["cookies"].(map[string]interface{}), `request.cookies`),
})
repRequest = append(repRequest, api.SectionData{
Type: api.TABLE,
Title: "Query String",
Data: representMapSliceAsTable(request["_queryStringMerged"].([]interface{}), `request.queryString`),
Data: representMapAsTable(request["queryString"].(map[string]interface{}), `request.queryString`),
})
postData, _ := request["postData"].(map[string]interface{})
@@ -473,13 +479,13 @@ func representResponse(response map[string]interface{}) (repResponse []interface
repResponse = append(repResponse, api.SectionData{
Type: api.TABLE,
Title: "Headers",
Data: representMapSliceAsTable(response["_headers"].([]interface{}), `response.headers`),
Data: representMapAsTable(response["headers"].(map[string]interface{}), `response.headers`),
})
repResponse = append(repResponse, api.SectionData{
Type: api.TABLE,
Title: "Cookies",
Data: representMapSliceAsTable(response["_cookies"].([]interface{}), `response.cookies`),
Data: representMapAsTable(response["cookies"].(map[string]interface{}), `response.cookies`),
})
content, _ := response["content"].(map[string]interface{})
@@ -515,10 +521,10 @@ func (d dissecting) Represent(request map[string]interface{}, response map[strin
func (d dissecting) Macros() map[string]string {
return map[string]string{
`http`: fmt.Sprintf(`protocol == "%s/%s/%s" or protocol == "%s/%s/%s"`, http10protocol.Name, http10protocol.Version, http10protocol.Abbreviation, http11protocol.Name, http11protocol.Version, http11protocol.Abbreviation),
`http2`: fmt.Sprintf(`protocol == "%s/%s/%s"`, http2Protocol.Name, http2Protocol.Version, http2Protocol.Abbreviation),
`grpc`: fmt.Sprintf(`protocol == "%s/%s/%s"`, grpcProtocol.Name, grpcProtocol.Version, grpcProtocol.Abbreviation),
`gql`: fmt.Sprintf(`protocol == "%s/%s/%s" or protocol == "%s/%s/%s"`, graphQL1Protocol.Name, graphQL1Protocol.Version, graphQL1Protocol.Abbreviation, graphQL2Protocol.Name, graphQL2Protocol.Version, graphQL2Protocol.Abbreviation),
`http`: fmt.Sprintf(`protocol.abbr == "%s"`, http11protocol.Abbreviation),
`http2`: fmt.Sprintf(`protocol.abbr == "%s"`, http2Protocol.Abbreviation),
`grpc`: fmt.Sprintf(`protocol.abbr == "%s"`, grpcProtocol.Abbreviation),
`gql`: fmt.Sprintf(`protocol.abbr == "%s"`, graphQL1Protocol.Abbreviation),
}
}

View File

@@ -44,10 +44,10 @@ func TestRegister(t *testing.T) {
func TestMacros(t *testing.T) {
expectedMacros := map[string]string{
"http": `protocol == "http/1.0/HTTP" or protocol == "http/1.1/HTTP"`,
"http2": `protocol == "http/2.0/HTTP/2"`,
"grpc": `protocol == "http/2.0/gRPC"`,
"gql": `protocol == "http/1.1/GQL" or protocol == "http/2.0/GQL"`,
"http": `protocol.abbr == "HTTP"`,
"http2": `protocol.abbr == "HTTP/2"`,
"grpc": `protocol.abbr == "gRPC"`,
"gql": `protocol.abbr == "GQL"`,
}
dissector := NewDissector()
macros := dissector.Macros()

View File

@@ -1,30 +1,14 @@
package http
import (
"bytes"
"encoding/json"
"encoding/xml"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"strings"
"github.com/beevik/etree"
"github.com/up9inc/mizu/tap/api"
)
const maskedFieldPlaceholderValue = "[REDACTED]"
const userAgent = "user-agent"
//these values MUST be all lower case and contain no `-` or `_` characters
var personallyIdentifiableDataFields = []string{"token", "authorization", "authentication", "cookie", "userid", "password",
"username", "user", "key", "passcode", "pass", "auth", "authtoken", "jwt",
"bearer", "clientid", "clientsecret", "redirecturi", "phonenumber",
"zip", "zipcode", "address", "country", "firstname", "lastname",
"middlename", "fname", "lname", "birthdate"}
func IsIgnoredUserAgent(item *api.OutputChannelItem, options *api.TrafficFilteringOptions) bool {
if item.Protocol.Name != "http" {
return false
@@ -48,192 +32,3 @@ func IsIgnoredUserAgent(item *api.OutputChannelItem, options *api.TrafficFilteri
return false
}
func FilterSensitiveData(item *api.OutputChannelItem, options *api.TrafficFilteringOptions) {
request := item.Pair.Request.Payload.(HTTPPayload).Data.(*http.Request)
response := item.Pair.Response.Payload.(HTTPPayload).Data.(*http.Response)
filterHeaders(&request.Header)
filterHeaders(&response.Header)
filterUrl(request.URL)
filterRequestBody(request, options)
filterResponseBody(response, options)
}
func filterRequestBody(request *http.Request, options *api.TrafficFilteringOptions) {
contenType := getContentTypeHeaderValue(request.Header)
body, err := ioutil.ReadAll(request.Body)
if err != nil {
return
}
filteredBody, err := filterHttpBody(body, contenType, options)
if err == nil {
request.Body = ioutil.NopCloser(bytes.NewBuffer(filteredBody))
} else {
request.Body = ioutil.NopCloser(bytes.NewBuffer(body))
}
}
func filterResponseBody(response *http.Response, options *api.TrafficFilteringOptions) {
contentType := getContentTypeHeaderValue(response.Header)
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return
}
filteredBody, err := filterHttpBody(body, contentType, options)
if err == nil {
response.Body = ioutil.NopCloser(bytes.NewBuffer(filteredBody))
} else {
response.Body = ioutil.NopCloser(bytes.NewBuffer(body))
}
}
func filterHeaders(headers *http.Header) {
for key := range *headers {
if strings.ToLower(key) == userAgent {
continue
}
if strings.ToLower(key) == "cookie" {
headers.Del(key)
} else if isFieldNameSensitive(key) {
headers.Set(key, maskedFieldPlaceholderValue)
}
}
}
func getContentTypeHeaderValue(headers http.Header) string {
for key := range headers {
if strings.ToLower(key) == "content-type" {
return headers.Get(key)
}
}
return ""
}
func isFieldNameSensitive(fieldName string) bool {
if fieldName == ":authority" {
return false
}
name := strings.ToLower(fieldName)
name = strings.ReplaceAll(name, "_", "")
name = strings.ReplaceAll(name, "-", "")
name = strings.ReplaceAll(name, " ", "")
for _, sensitiveField := range personallyIdentifiableDataFields {
if strings.Contains(name, sensitiveField) {
return true
}
}
return false
}
func filterHttpBody(bytes []byte, contentType string, options *api.TrafficFilteringOptions) ([]byte, error) {
mimeType := strings.Split(contentType, ";")[0]
switch strings.ToLower(mimeType) {
case "application/json":
return filterJsonBody(bytes)
case "text/html":
fallthrough
case "application/xhtml+xml":
fallthrough
case "text/xml":
fallthrough
case "application/xml":
return filterXmlEtree(bytes)
case "text/plain":
if options != nil && options.PlainTextMaskingRegexes != nil {
return filterPlainText(bytes, options), nil
}
}
return bytes, nil
}
func filterPlainText(bytes []byte, options *api.TrafficFilteringOptions) []byte {
for _, regex := range options.PlainTextMaskingRegexes {
bytes = regex.ReplaceAll(bytes, []byte(maskedFieldPlaceholderValue))
}
return bytes
}
func filterXmlEtree(bytes []byte) ([]byte, error) {
if !IsValidXML(bytes) {
return nil, errors.New("Invalid XML")
}
xmlDoc := etree.NewDocument()
err := xmlDoc.ReadFromBytes(bytes)
if err != nil {
return nil, err
} else {
filterXmlElement(xmlDoc.Root())
}
return xmlDoc.WriteToBytes()
}
func IsValidXML(data []byte) bool {
return xml.Unmarshal(data, new(interface{})) == nil
}
func filterXmlElement(element *etree.Element) {
for i, attribute := range element.Attr {
if isFieldNameSensitive(attribute.Key) {
element.Attr[i].Value = maskedFieldPlaceholderValue
}
}
if element.ChildElements() == nil || len(element.ChildElements()) == 0 {
if isFieldNameSensitive(element.Tag) {
element.SetText(maskedFieldPlaceholderValue)
}
} else {
for _, element := range element.ChildElements() {
filterXmlElement(element)
}
}
}
func filterJsonBody(bytes []byte) ([]byte, error) {
var bodyJsonMap map[string]interface{}
err := json.Unmarshal(bytes, &bodyJsonMap)
if err != nil {
return nil, err
}
filterJsonMap(bodyJsonMap)
return json.Marshal(bodyJsonMap)
}
func filterJsonMap(jsonMap map[string]interface{}) {
for key, value := range jsonMap {
// Do not replace nil values with maskedFieldPlaceholderValue
if value == nil {
continue
}
nestedMap, isNested := value.(map[string]interface{})
if isNested {
filterJsonMap(nestedMap)
} else {
if isFieldNameSensitive(key) {
jsonMap[key] = maskedFieldPlaceholderValue
}
}
}
}
func filterUrl(url *url.URL) {
if len(url.RawQuery) > 0 {
newQueryArgs := make([]string, 0)
for urlQueryParamName, urlQueryParamValues := range url.Query() {
newValues := urlQueryParamValues
if isFieldNameSensitive(urlQueryParamName) {
newValues = []string{maskedFieldPlaceholderValue}
}
for _, paramValue := range newValues {
newQueryArgs = append(newQueryArgs, fmt.Sprintf("%s=%s", urlQueryParamName, paramValue))
}
}
url.RawQuery = strings.Join(newQueryArgs, "&")
}
}

View File

@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect14/kafka/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect15/kafka/\* expect

View File

@@ -11,11 +11,13 @@ import (
)
var _protocol = api.Protocol{
Name: "kafka",
ProtocolSummary: api.ProtocolSummary{
Name: "kafka",
Version: "12",
Abbreviation: "KAFKA",
},
LongName: "Apache Kafka Protocol",
Abbreviation: "KAFKA",
Macro: "kafka",
Version: "12",
BackgroundColor: "#000000",
ForegroundColor: "#ffffff",
FontSize: 11,
@@ -25,7 +27,7 @@ var _protocol = api.Protocol{
}
var protocolsMap = map[string]*api.Protocol{
fmt.Sprintf("%s/%s/%s", _protocol.Name, _protocol.Version, _protocol.Abbreviation): &_protocol,
_protocol.ToString(): &_protocol,
}
type dissecting string
@@ -70,8 +72,8 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
elapsedTime = 0
}
return &api.Entry{
ProtocolId: fmt.Sprintf("%s/%s/%s", _protocol.Name, _protocol.Version, _protocol.Abbreviation),
Capture: item.Capture,
Protocol: _protocol.ProtocolSummary,
Capture: item.Capture,
Source: &api.TCP{
Name: resolvedSource,
IP: item.ConnectionInfo.ClientIP,
@@ -195,7 +197,7 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
return &api.BaseEntry{
Id: entry.Id,
Protocol: *protocolsMap[entry.ProtocolId],
Protocol: *protocolsMap[entry.Protocol.ToString()],
Capture: entry.Capture,
Summary: summary,
SummaryQuery: summaryQuery,
@@ -250,7 +252,7 @@ func (d dissecting) Represent(request map[string]interface{}, response map[strin
func (d dissecting) Macros() map[string]string {
return map[string]string{
`kafka`: fmt.Sprintf(`protocol == "%s/%s/%s"`, _protocol.Name, _protocol.Version, _protocol.Abbreviation),
`kafka`: fmt.Sprintf(`protocol.name == "%s"`, _protocol.Name),
}
}

View File

@@ -44,7 +44,7 @@ func TestRegister(t *testing.T) {
func TestMacros(t *testing.T) {
expectedMacros := map[string]string{
"kafka": `protocol == "kafka/12/KAFKA"`,
"kafka": `protocol.name == "kafka"`,
}
dissector := NewDissector()
macros := dissector.Macros()

View File

@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect14/redis/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect15/redis/\* expect

View File

@@ -11,11 +11,13 @@ import (
)
var protocol = api.Protocol{
Name: "redis",
ProtocolSummary: api.ProtocolSummary{
Name: "redis",
Version: "3.x",
Abbreviation: "REDIS",
},
LongName: "Redis Serialization Protocol",
Abbreviation: "REDIS",
Macro: "redis",
Version: "3.x",
BackgroundColor: "#a41e11",
ForegroundColor: "#ffffff",
FontSize: 11,
@@ -25,7 +27,7 @@ var protocol = api.Protocol{
}
var protocolsMap = map[string]*api.Protocol{
fmt.Sprintf("%s/%s/%s", protocol.Name, protocol.Version, protocol.Abbreviation): &protocol,
protocol.ToString(): &protocol,
}
type dissecting string
@@ -78,8 +80,8 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
elapsedTime = 0
}
return &api.Entry{
ProtocolId: fmt.Sprintf("%s/%s/%s", protocol.Name, protocol.Version, protocol.Abbreviation),
Capture: item.Capture,
Protocol: protocol.ProtocolSummary,
Capture: item.Capture,
Source: &api.TCP{
Name: resolvedSource,
IP: item.ConnectionInfo.ClientIP,
@@ -123,7 +125,7 @@ func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
return &api.BaseEntry{
Id: entry.Id,
Protocol: *protocolsMap[entry.ProtocolId],
Protocol: *protocolsMap[entry.Protocol.ToString()],
Capture: entry.Capture,
Summary: summary,
SummaryQuery: summaryQuery,
@@ -151,7 +153,7 @@ func (d dissecting) Represent(request map[string]interface{}, response map[strin
func (d dissecting) Macros() map[string]string {
return map[string]string{
`redis`: fmt.Sprintf(`protocol == "%s/%s/%s"`, protocol.Name, protocol.Version, protocol.Abbreviation),
`redis`: fmt.Sprintf(`protocol.name == "%s"`, protocol.Name),
}
}

View File

@@ -45,7 +45,7 @@ func TestRegister(t *testing.T) {
func TestMacros(t *testing.T) {
expectedMacros := map[string]string{
"redis": `protocol == "redis/3.x/REDIS"`,
"redis": `protocol.name == "redis"`,
}
dissector := NewDissector()
macros := dissector.Macros()

View File

@@ -9,7 +9,7 @@ docker build -t mizu-ebpf-builder . || exit 1
BPF_TARGET=amd64
BPF_CFLAGS="-O2 -g -D__TARGET_ARCH_x86"
ARCH=$(uname -m)
if [[ $ARCH == "aarch64" ]]; then
if [[ $ARCH == "aarch64" || $ARCH == "arm64" ]]; then
BPF_TARGET=arm64
BPF_CFLAGS="-O2 -g -D__TARGET_ARCH_arm64"
fi
@@ -18,7 +18,7 @@ docker run --rm \
--name mizu-ebpf-builder \
-v $MIZU_HOME:/mizu \
-v $(go env GOPATH):/root/go \
-it mizu-ebpf-builder \
mizu-ebpf-builder \
sh -c "
BPF_TARGET=\"$BPF_TARGET\" BPF_CFLAGS=\"$BPF_CFLAGS\" go generate tap/tlstapper/tls_tapper.go
chown $(id -u):$(id -g) tap/tlstapper/tlstapper*_bpf*

View File

@@ -12,7 +12,7 @@ Copyright (C) UP9 Inc.
#include "include/common.h"
static __always_inline int add_address_to_chunk(struct pt_regs *ctx, struct tls_chunk* chunk, __u64 id, __u32 fd) {
static __always_inline int add_address_to_chunk(struct pt_regs *ctx, struct tls_chunk* chunk, __u64 id, __u32 fd, struct ssl_info* info) {
__u32 pid = id >> 32;
__u64 key = (__u64) pid << 32 | fd;
@@ -22,14 +22,29 @@ static __always_inline int add_address_to_chunk(struct pt_regs *ctx, struct tls_
return 0;
}
int err = bpf_probe_read(chunk->address, sizeof(chunk->address), fdinfo->ipv4_addr);
chunk->flags |= (fdinfo->flags & FLAGS_IS_CLIENT_BIT);
int err;
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_FD_ADDRESS, id, err, 0l);
return 0;
switch (info->address_info.mode) {
case ADDRESS_INFO_MODE_UNDEFINED:
chunk->address_info.mode = ADDRESS_INFO_MODE_SINGLE;
err = bpf_probe_read(&chunk->address_info.sport, sizeof(chunk->address_info.sport), &fdinfo->ipv4_addr[2]);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_FD_ADDRESS, id, err, 0l);
return 0;
}
err = bpf_probe_read(&chunk->address_info.saddr, sizeof(chunk->address_info.saddr), &fdinfo->ipv4_addr[4]);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_FD_ADDRESS, id, err, 0l);
return 0;
}
break;
default:
bpf_probe_read(&chunk->address_info, sizeof(chunk->address_info), &info->address_info);
}
chunk->flags |= (fdinfo->flags & FLAGS_IS_CLIENT_BIT);
return 1;
}
@@ -104,7 +119,7 @@ static __always_inline void output_ssl_chunk(struct pt_regs *ctx, struct ssl_inf
chunk->len = count_bytes;
chunk->fd = info->fd;
if (!add_address_to_chunk(ctx, chunk, id, chunk->fd)) {
if (!add_address_to_chunk(ctx, chunk, id, chunk->fd, info)) {
// Without an address, we drop the chunk because there is not much to do with it in Go
//
return;

View File

@@ -7,9 +7,11 @@ Copyright (C) UP9 Inc.
#ifndef __COMMON__
#define __COMMON__
#define AF_INET 2 /* Internet IP Protocol */
const __s32 invalid_fd = -1;
static int add_address_to_chunk(struct pt_regs *ctx, struct tls_chunk* chunk, __u64 id, __u32 fd);
static int add_address_to_chunk(struct pt_regs *ctx, struct tls_chunk* chunk, __u64 id, __u32 fd, struct ssl_info* info);
static void send_chunk_part(struct pt_regs *ctx, __u8* buffer, __u64 id, struct tls_chunk* chunk, int start, int end);
static void send_chunk(struct pt_regs *ctx, __u8* buffer, __u64 id, struct tls_chunk* chunk);
static void output_ssl_chunk(struct pt_regs *ctx, struct ssl_info* info, int count_bytes, __u64 id, __u32 flags);

View File

@@ -15,6 +15,7 @@ Copyright (C) UP9 Inc.
#include "legacy_kernel.h"
#include <bpf/bpf_endian.h>
#include <bpf/bpf_helpers.h>
#include <bpf/bpf_tracing.h>
#include <bpf/bpf_core_read.h>

View File

@@ -26,6 +26,11 @@ Copyright (C) UP9 Inc.
#define LOG_ERROR_PUTTING_CONNECT_INFO (14)
#define LOG_ERROR_GETTING_CONNECT_INFO (15)
#define LOG_ERROR_READING_CONNECT_INFO (16)
#define LOG_ERROR_READING_SOCKET_FAMILY (17)
#define LOG_ERROR_READING_SOCKET_DADDR (18)
#define LOG_ERROR_READING_SOCKET_SADDR (19)
#define LOG_ERROR_READING_SOCKET_DPORT (20)
#define LOG_ERROR_READING_SOCKET_SPORT (21)
// Sometimes we have the same error, happening from different locations.
// in order to be able to distinct between them in the log, we add an

View File

@@ -24,6 +24,21 @@ Copyright (C) UP9 Inc.
//
// Be careful when editing, alignment and padding should be exactly the same in go/c.
//
typedef enum {
ADDRESS_INFO_MODE_UNDEFINED,
ADDRESS_INFO_MODE_SINGLE,
ADDRESS_INFO_MODE_PAIR,
} address_info_mode;
struct address_info {
address_info_mode mode;
__be32 saddr;
__be32 daddr;
__be16 sport;
__be16 dport;
};
struct tls_chunk {
__u32 pid;
__u32 tgid;
@@ -32,7 +47,7 @@ struct tls_chunk {
__u32 recorded;
__u32 fd;
__u32 flags;
__u8 address[16];
struct address_info address_info;
__u8 data[CHUNK_SIZE]; // Must be N^2
};
@@ -41,6 +56,7 @@ struct ssl_info {
__u32 buffer_len;
__u32 fd;
__u64 created_at_nano;
struct address_info address_info;
// for ssl_write and ssl_read must be zero
// for ssl_write_ex and ssl_read_ex save the *written/*readbytes pointer.

View File

@@ -42,6 +42,8 @@ static __always_inline int get_count_bytes(struct pt_regs *ctx, struct ssl_info*
}
static __always_inline void ssl_uprobe(struct pt_regs *ctx, void* ssl, void* buffer, int num, struct bpf_map_def* map_fd, size_t *count_ptr) {
long err;
__u64 id = bpf_get_current_pid_tgid();
if (!should_tap(id >> 32)) {
@@ -53,7 +55,7 @@ static __always_inline void ssl_uprobe(struct pt_regs *ctx, void* ssl, void* buf
info.count_ptr = count_ptr;
info.buffer = buffer;
long err = bpf_map_update_elem(map_fd, &id, &info, BPF_ANY);
err = bpf_map_update_elem(map_fd, &id, &info, BPF_ANY);
if (err != 0) {
log_error(ctx, LOG_ERROR_PUTTING_SSL_CONTEXT, id, err, 0l);
@@ -66,7 +68,7 @@ static __always_inline void ssl_uretprobe(struct pt_regs *ctx, struct bpf_map_de
if (!should_tap(id >> 32)) {
return;
}
struct ssl_info *infoPtr = bpf_map_lookup_elem(map_fd, &id);
if (infoPtr == NULL) {
@@ -99,10 +101,10 @@ static __always_inline void ssl_uretprobe(struct pt_regs *ctx, struct bpf_map_de
return;
}
int count_bytes = get_count_bytes(ctx, &info, id);
if (count_bytes <= 0) {
return;
}
int count_bytes = get_count_bytes(ctx, &info, id);
if (count_bytes <= 0) {
return;
}
output_ssl_chunk(ctx, &info, count_bytes, id, flags);
}

View File

@@ -0,0 +1,79 @@
#include "include/headers.h"
#include "include/maps.h"
#include "include/log.h"
#include "include/logger_messages.h"
#include "include/pids.h"
#include "include/common.h"
static __always_inline void tcp_kprobe(struct pt_regs *ctx, struct bpf_map_def *map_fd, _Bool is_send) {
long err;
__u64 id = bpf_get_current_pid_tgid();
__u32 pid = id >> 32;
if (!should_tap(id >> 32)) {
return;
}
struct ssl_info *info_ptr = bpf_map_lookup_elem(map_fd, &id);
// Happens when the connection is not tls
if (info_ptr == NULL) {
return;
}
struct sock *sk = (struct sock *) PT_REGS_PARM1(ctx);
short unsigned int family;
err = bpf_probe_read(&family, sizeof(family), (void *)&sk->__sk_common.skc_family);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_SOCKET_FAMILY, id, err, 0l);
return;
}
if (family != AF_INET) {
return;
}
// daddr, saddr and dport are in network byte order (big endian)
// sport is in host byte order
__be32 saddr;
__be32 daddr;
__be16 dport;
__u16 sport;
err = bpf_probe_read(&saddr, sizeof(saddr), (void *)&sk->__sk_common.skc_rcv_saddr);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_SOCKET_SADDR, id, err, 0l);
return;
}
err = bpf_probe_read(&daddr, sizeof(daddr), (void *)&sk->__sk_common.skc_daddr);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_SOCKET_DADDR, id, err, 0l);
return;
}
err = bpf_probe_read(&dport, sizeof(dport), (void *)&sk->__sk_common.skc_dport);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_SOCKET_DPORT, id, err, 0l);
return;
}
err = bpf_probe_read(&sport, sizeof(sport), (void *)&sk->__sk_common.skc_num);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_SOCKET_SPORT, id, err, 0l);
return;
}
info_ptr->address_info.mode = ADDRESS_INFO_MODE_PAIR;
info_ptr->address_info.daddr = daddr;
info_ptr->address_info.saddr = saddr;
info_ptr->address_info.dport = dport;
info_ptr->address_info.sport = bpf_htons(sport);
}
SEC("kprobe/tcp_sendmsg")
void BPF_KPROBE(tcp_sendmsg) {
tcp_kprobe(ctx, &openssl_write_context, true);
}
SEC("kprobe/tcp_recvmsg")
void BPF_KPROBE(tcp_recvmsg) {
tcp_kprobe(ctx, &openssl_read_context, false);
}

View File

@@ -15,6 +15,7 @@ Copyright (C) UP9 Inc.
//
#include "common.c"
#include "openssl_uprobes.c"
#include "tcp_kprobes.c"
#include "go_uprobes.c"
#include "fd_tracepoints.c"
#include "fd_to_address_tracepoints.c"

View File

@@ -20,4 +20,9 @@ var bpfLogMessages = []string{
/*0014*/ "[%d] Unable to put connect info [err: %d]",
/*0015*/ "[%d] Unable to get connect info",
/*0016*/ "[%d] Unable to read connect info [err: %d]",
/*0017*/ "[%d] Unable to read socket family [err: %d]",
/*0018*/ "[%d] Unable to read socket daddr [err: %d]",
/*0019*/ "[%d] Unable to read socket saddr [err: %d]",
/*0019*/ "[%d] Unable to read socket dport [err: %d]",
/*0021*/ "[%d] Unable to read socket sport [err: %d]",
}

View File

@@ -1,38 +1,33 @@
package tlstapper
import (
"bytes"
"encoding/binary"
"net"
"unsafe"
"github.com/go-errors/errors"
"github.com/up9inc/mizu/tap/api"
)
const FlagsIsClientBit uint32 = 1 << 0
const FlagsIsReadBit uint32 = 1 << 1
const (
addressInfoModeUndefined = iota
addressInfoModeSingle
addressInfoModePair
)
func (c *tlsTapperTlsChunk) getAddress() (net.IP, uint16, error) {
address := bytes.NewReader(c.Address[:])
var family uint16
var port uint16
var ip32 uint32
func (c *tlsTapperTlsChunk) getSrcAddress() (net.IP, uint16) {
ip := intToIP(c.AddressInfo.Saddr)
port := ntohs(c.AddressInfo.Sport)
if err := binary.Read(address, binary.BigEndian, &family); err != nil {
return nil, 0, errors.Wrap(err, 0)
}
return ip, port
}
if err := binary.Read(address, binary.BigEndian, &port); err != nil {
return nil, 0, errors.Wrap(err, 0)
}
func (c *tlsTapperTlsChunk) getDstAddress() (net.IP, uint16) {
ip := intToIP(c.AddressInfo.Daddr)
port := ntohs(c.AddressInfo.Dport)
if err := binary.Read(address, binary.BigEndian, &ip32); err != nil {
return nil, 0, errors.Wrap(err, 0)
}
ip := net.IP{uint8(ip32 >> 24), uint8(ip32 >> 16), uint8(ip32 >> 8), uint8(ip32)}
return ip, port, nil
return ip, port
}
func (c *tlsTapperTlsChunk) isClient() bool {
@@ -59,26 +54,54 @@ func (c *tlsTapperTlsChunk) isRequest() bool {
return (c.isClient() && c.isWrite()) || (c.isServer() && c.isRead())
}
func (c *tlsTapperTlsChunk) getAddressPair() (addressPair, error) {
ip, port, err := c.getAddress()
func (c *tlsTapperTlsChunk) getAddressPair() (addressPair, bool) {
var (
srcIp, dstIp net.IP
srcPort, dstPort uint16
full bool
)
if err != nil {
return addressPair{}, err
switch c.AddressInfo.Mode {
case addressInfoModeSingle:
if c.isRequest() {
srcIp, srcPort = api.UnknownIp, api.UnknownPort
dstIp, dstPort = c.getSrcAddress()
} else {
srcIp, srcPort = c.getSrcAddress()
dstIp, dstPort = api.UnknownIp, api.UnknownPort
}
full = false
case addressInfoModePair:
if c.isRequest() {
srcIp, srcPort = c.getSrcAddress()
dstIp, dstPort = c.getDstAddress()
} else {
srcIp, srcPort = c.getDstAddress()
dstIp, dstPort = c.getSrcAddress()
}
full = true
case addressInfoModeUndefined:
srcIp, srcPort = api.UnknownIp, api.UnknownPort
dstIp, dstPort = api.UnknownIp, api.UnknownPort
full = false
}
if c.isRequest() {
return addressPair{
srcIp: api.UnknownIp,
srcPort: api.UnknownPort,
dstIp: ip,
dstPort: port,
}, nil
} else {
return addressPair{
srcIp: ip,
srcPort: port,
dstIp: api.UnknownIp,
dstPort: api.UnknownPort,
}, nil
}
return addressPair{
srcIp: srcIp,
srcPort: srcPort,
dstIp: dstIp,
dstPort: dstPort,
}, full
}
// intToIP converts IPv4 number to net.IP
func intToIP(ip32be uint32) net.IP {
return net.IPv4(uint8(ip32be), uint8(ip32be>>8), uint8(ip32be>>16), uint8(ip32be>>24))
}
// ntohs converts big endian (network byte order) to little endian (assuming that's the host byte order)
func ntohs(i16be uint16) uint16 {
b := make([]byte, 2)
binary.BigEndian.PutUint16(b, i16be)
return *(*uint16)(unsafe.Pointer(&b[0]))
}

View File

@@ -14,6 +14,8 @@ type sslHooks struct {
sslWriteExRetProbe link.Link
sslReadExProbe link.Link
sslReadExRetProbe link.Link
tcpSendmsg link.Link
tcpRecvmsg link.Link
}
func (s *sslHooks) installUprobes(bpfObjects *tlsTapperObjects, sslLibraryPath string) error {
@@ -103,6 +105,16 @@ func (s *sslHooks) installSslHooks(bpfObjects *tlsTapperObjects, sslLibrary *lin
}
}
s.tcpSendmsg, err = link.Kprobe("tcp_sendmsg", bpfObjects.TcpSendmsg, nil)
if err != nil {
return errors.Wrap(err, 0)
}
s.tcpRecvmsg, err = link.Kprobe("tcp_recvmsg", bpfObjects.TcpRecvmsg, nil)
if err != nil {
return errors.Wrap(err, 0)
}
return nil
}
@@ -149,5 +161,17 @@ func (s *sslHooks) close() []error {
}
}
if s.tcpSendmsg != nil {
if err := s.tcpSendmsg.Close(); err != nil {
returnValue = append(returnValue, err)
}
}
if s.tcpRecvmsg != nil {
if err := s.tcpRecvmsg.Close(); err != nil {
returnValue = append(returnValue, err)
}
}
return returnValue
}

View File

@@ -134,14 +134,9 @@ func (p *tlsPoller) pollChunksPerfBuffer(chunks chan<- *tlsTapperTlsChunk) {
func (p *tlsPoller) handleTlsChunk(chunk *tlsTapperTlsChunk, extension *api.Extension, emitter api.Emitter,
options *api.TrafficFilteringOptions, streamsMap api.TcpStreamMap) error {
address, err := p.getSockfdAddressPair(chunk)
address, err := p.getAddressPair(chunk)
if err != nil {
address, err = chunk.getAddressPair()
if err != nil {
return err
}
return err
}
key := buildTlsKey(address)
@@ -161,6 +156,22 @@ func (p *tlsPoller) handleTlsChunk(chunk *tlsTapperTlsChunk, extension *api.Exte
return nil
}
func (p *tlsPoller) getAddressPair(chunk *tlsTapperTlsChunk) (addressPair, error) {
addrPairFromChunk, full := chunk.getAddressPair()
if full {
return addrPairFromChunk, nil
}
addrPairFromSockfd, err := p.getSockfdAddressPair(chunk)
if err == nil {
return addrPairFromSockfd, nil
} else {
logger.Log.Error("failed to get address from sock fd:", err)
}
return addrPairFromChunk, err
}
func (p *tlsPoller) startNewTlsReader(chunk *tlsTapperTlsChunk, address *addressPair, key string,
emitter api.Emitter, extension *api.Extension, options *api.TrafficFilteringOptions,
streamsMap api.TcpStreamMap) *tlsReader {

View File

@@ -19,15 +19,21 @@ type tlsTapper46GoidOffsets struct {
}
type tlsTapper46TlsChunk struct {
Pid uint32
Tgid uint32
Len uint32
Start uint32
Recorded uint32
Fd uint32
Flags uint32
Address [16]uint8
Data [4096]uint8
Pid uint32
Tgid uint32
Len uint32
Start uint32
Recorded uint32
Fd uint32
Flags uint32
AddressInfo struct {
Mode int32
Saddr uint32
Daddr uint32
Sport uint16
Dport uint16
}
Data [4096]uint8
}
// loadTlsTapper46 returns the embedded CollectionSpec for tlsTapper46.
@@ -93,6 +99,8 @@ type tlsTapper46ProgramSpecs struct {
SysEnterWrite *ebpf.ProgramSpec `ebpf:"sys_enter_write"`
SysExitAccept4 *ebpf.ProgramSpec `ebpf:"sys_exit_accept4"`
SysExitConnect *ebpf.ProgramSpec `ebpf:"sys_exit_connect"`
TcpRecvmsg *ebpf.ProgramSpec `ebpf:"tcp_recvmsg"`
TcpSendmsg *ebpf.ProgramSpec `ebpf:"tcp_sendmsg"`
}
// tlsTapper46MapSpecs contains maps before they are loaded into the kernel.
@@ -189,6 +197,8 @@ type tlsTapper46Programs struct {
SysEnterWrite *ebpf.Program `ebpf:"sys_enter_write"`
SysExitAccept4 *ebpf.Program `ebpf:"sys_exit_accept4"`
SysExitConnect *ebpf.Program `ebpf:"sys_exit_connect"`
TcpRecvmsg *ebpf.Program `ebpf:"tcp_recvmsg"`
TcpSendmsg *ebpf.Program `ebpf:"tcp_sendmsg"`
}
func (p *tlsTapper46Programs) Close() error {
@@ -215,6 +225,8 @@ func (p *tlsTapper46Programs) Close() error {
p.SysEnterWrite,
p.SysExitAccept4,
p.SysExitConnect,
p.TcpRecvmsg,
p.TcpSendmsg,
)
}

View File

@@ -19,15 +19,21 @@ type tlsTapper46GoidOffsets struct {
}
type tlsTapper46TlsChunk struct {
Pid uint32
Tgid uint32
Len uint32
Start uint32
Recorded uint32
Fd uint32
Flags uint32
Address [16]uint8
Data [4096]uint8
Pid uint32
Tgid uint32
Len uint32
Start uint32
Recorded uint32
Fd uint32
Flags uint32
AddressInfo struct {
Mode int32
Saddr uint32
Daddr uint32
Sport uint16
Dport uint16
}
Data [4096]uint8
}
// loadTlsTapper46 returns the embedded CollectionSpec for tlsTapper46.
@@ -93,6 +99,8 @@ type tlsTapper46ProgramSpecs struct {
SysEnterWrite *ebpf.ProgramSpec `ebpf:"sys_enter_write"`
SysExitAccept4 *ebpf.ProgramSpec `ebpf:"sys_exit_accept4"`
SysExitConnect *ebpf.ProgramSpec `ebpf:"sys_exit_connect"`
TcpRecvmsg *ebpf.ProgramSpec `ebpf:"tcp_recvmsg"`
TcpSendmsg *ebpf.ProgramSpec `ebpf:"tcp_sendmsg"`
}
// tlsTapper46MapSpecs contains maps before they are loaded into the kernel.
@@ -189,6 +197,8 @@ type tlsTapper46Programs struct {
SysEnterWrite *ebpf.Program `ebpf:"sys_enter_write"`
SysExitAccept4 *ebpf.Program `ebpf:"sys_exit_accept4"`
SysExitConnect *ebpf.Program `ebpf:"sys_exit_connect"`
TcpRecvmsg *ebpf.Program `ebpf:"tcp_recvmsg"`
TcpSendmsg *ebpf.Program `ebpf:"tcp_sendmsg"`
}
func (p *tlsTapper46Programs) Close() error {
@@ -215,6 +225,8 @@ func (p *tlsTapper46Programs) Close() error {
p.SysEnterWrite,
p.SysExitAccept4,
p.SysExitConnect,
p.TcpRecvmsg,
p.TcpSendmsg,
)
}

View File

@@ -19,15 +19,21 @@ type tlsTapperGoidOffsets struct {
}
type tlsTapperTlsChunk struct {
Pid uint32
Tgid uint32
Len uint32
Start uint32
Recorded uint32
Fd uint32
Flags uint32
Address [16]uint8
Data [4096]uint8
Pid uint32
Tgid uint32
Len uint32
Start uint32
Recorded uint32
Fd uint32
Flags uint32
AddressInfo struct {
Mode int32
Saddr uint32
Daddr uint32
Sport uint16
Dport uint16
}
Data [4096]uint8
}
// loadTlsTapper returns the embedded CollectionSpec for tlsTapper.
@@ -93,6 +99,8 @@ type tlsTapperProgramSpecs struct {
SysEnterWrite *ebpf.ProgramSpec `ebpf:"sys_enter_write"`
SysExitAccept4 *ebpf.ProgramSpec `ebpf:"sys_exit_accept4"`
SysExitConnect *ebpf.ProgramSpec `ebpf:"sys_exit_connect"`
TcpRecvmsg *ebpf.ProgramSpec `ebpf:"tcp_recvmsg"`
TcpSendmsg *ebpf.ProgramSpec `ebpf:"tcp_sendmsg"`
}
// tlsTapperMapSpecs contains maps before they are loaded into the kernel.
@@ -189,6 +197,8 @@ type tlsTapperPrograms struct {
SysEnterWrite *ebpf.Program `ebpf:"sys_enter_write"`
SysExitAccept4 *ebpf.Program `ebpf:"sys_exit_accept4"`
SysExitConnect *ebpf.Program `ebpf:"sys_exit_connect"`
TcpRecvmsg *ebpf.Program `ebpf:"tcp_recvmsg"`
TcpSendmsg *ebpf.Program `ebpf:"tcp_sendmsg"`
}
func (p *tlsTapperPrograms) Close() error {
@@ -215,6 +225,8 @@ func (p *tlsTapperPrograms) Close() error {
p.SysEnterWrite,
p.SysExitAccept4,
p.SysExitConnect,
p.TcpRecvmsg,
p.TcpSendmsg,
)
}

View File

@@ -19,15 +19,21 @@ type tlsTapperGoidOffsets struct {
}
type tlsTapperTlsChunk struct {
Pid uint32
Tgid uint32
Len uint32
Start uint32
Recorded uint32
Fd uint32
Flags uint32
Address [16]uint8
Data [4096]uint8
Pid uint32
Tgid uint32
Len uint32
Start uint32
Recorded uint32
Fd uint32
Flags uint32
AddressInfo struct {
Mode int32
Saddr uint32
Daddr uint32
Sport uint16
Dport uint16
}
Data [4096]uint8
}
// loadTlsTapper returns the embedded CollectionSpec for tlsTapper.
@@ -93,6 +99,8 @@ type tlsTapperProgramSpecs struct {
SysEnterWrite *ebpf.ProgramSpec `ebpf:"sys_enter_write"`
SysExitAccept4 *ebpf.ProgramSpec `ebpf:"sys_exit_accept4"`
SysExitConnect *ebpf.ProgramSpec `ebpf:"sys_exit_connect"`
TcpRecvmsg *ebpf.ProgramSpec `ebpf:"tcp_recvmsg"`
TcpSendmsg *ebpf.ProgramSpec `ebpf:"tcp_sendmsg"`
}
// tlsTapperMapSpecs contains maps before they are loaded into the kernel.
@@ -189,6 +197,8 @@ type tlsTapperPrograms struct {
SysEnterWrite *ebpf.Program `ebpf:"sys_enter_write"`
SysExitAccept4 *ebpf.Program `ebpf:"sys_exit_accept4"`
SysExitConnect *ebpf.Program `ebpf:"sys_exit_connect"`
TcpRecvmsg *ebpf.Program `ebpf:"tcp_recvmsg"`
TcpSendmsg *ebpf.Program `ebpf:"tcp_sendmsg"`
}
func (p *tlsTapperPrograms) Close() error {
@@ -215,6 +225,8 @@ func (p *tlsTapperPrograms) Close() error {
p.SysEnterWrite,
p.SysExitAccept4,
p.SysExitConnect,
p.TcpRecvmsg,
p.TcpSendmsg,
)
}

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@@ -26,14 +26,15 @@
"@craco/craco": "^6.4.3",
"@types/jest": "^26.0.24",
"@types/node": "^12.20.54",
"sass": "^1.52.3",
"react": "^17.0.2",
"react-copy-to-clipboard": "^5.1.0",
"react-dom": "^17.0.2",
"recoil": "^0.7.2"
"recoil": "^0.7.2",
"sass": "^1.52.3"
},
"dependencies": {
"@craco/craco": "^6.4.3",
"@elastic/eui": "^60.2.0",
"@emotion/react": "^11.9.0",
"@emotion/styled": "^11.8.1",
"@mui/icons-material": "^5.8.2",
@@ -65,12 +66,14 @@
"recharts": "^2.1.10",
"redoc": "^2.0.0-rc.71",
"styled-components": "^5.3.5",
"use-file-picker": "^1.4.2",
"web-vitals": "^2.1.4",
"xml-formatter": "^2.6.1"
},
"devDependencies": {
"@rollup/plugin-node-resolve": "^13.3.0",
"@svgr/rollup": "^6.2.1",
"@types/ace": "^0.0.48",
"cross-env": "^7.0.3",
"env-cmd": "^10.1.0",
"gh-pages": "^4.0.0",
@@ -83,7 +86,7 @@
"rollup-plugin-postcss": "^4.0.2",
"rollup-plugin-sass": "^1.2.12",
"rollup-plugin-scss": "^3.0.0",
"typescript": "^4.7.2"
"typescript": "^4.5.3"
},
"eslintConfig": {
"extends": [
@@ -92,6 +95,7 @@
]
},
"files": [
"src/*.scss",
"dist"
]
}

View File

@@ -17,6 +17,6 @@
width: 100%;
width: -moz-available;
width: -webkit-fill-available;
width: strech;
width: stretch;
}
}
}

View File

@@ -1,20 +1,20 @@
import React, {useCallback, useEffect, useMemo, useState} from "react";
import React, { useCallback, useEffect, useMemo, useState } from "react";
import styles from './EntriesList.module.sass';
import ScrollableFeedVirtualized from "react-scrollable-feed-virtualized";
import Moment from 'moment';
import {EntryItem} from "../EntryListItem/EntryListItem";
import { EntryItem } from "../EntryListItem/EntryListItem";
import down from "assets/downImg.svg";
import spinner from 'assets/spinner.svg';
import {RecoilState, useRecoilState, useRecoilValue, useSetRecoilState} from "recoil";
import { RecoilState, useRecoilState, useRecoilValue, useSetRecoilState } from "recoil";
import entriesAtom from "../../recoil/entries";
import queryAtom from "../../recoil/query";
import TrafficViewerApiAtom from "../../recoil/TrafficViewerApi";
import TrafficViewerApi from "../TrafficViewer/TrafficViewerApi";
import focusedEntryIdAtom from "../../recoil/focusedEntryId";
import {toast} from "react-toastify";
import {MAX_ENTRIES, TOAST_CONTAINER_ID} from "../../configs/Consts";
import { toast } from "react-toastify";
import { MAX_ENTRIES, TOAST_CONTAINER_ID } from "../../configs/Consts";
import tappingStatusAtom from "../../recoil/tappingStatus";
import leftOffTopAtom from "../../recoil/leftOffTop";
import Moment from "moment";
interface EntriesListProps {
listEntryREF: any;

View File

@@ -6,6 +6,7 @@ import { ReactComponent as ReplayIcon } from './replay.svg';
import styles from './EntryViewer.module.sass';
import { Tabs } from "../../UI";
import replayRequestModalOpenAtom from "../../../recoil/replayRequestModalOpen";
import entryDetailedConfigAtom, { EntryDetailedConfig } from "../../../recoil/entryDetailedConfig";
const enabledProtocolsForReplay = ["http"]
@@ -16,10 +17,11 @@ export enum TabsEnum {
export const AutoRepresentation: React.FC<any> = ({ representation, color, openedTab = TabsEnum.Request, isDisplayReplay = false }) => {
const entryData = useRecoilValue(entryDataAtom)
const { isReplayEnabled } = useRecoilValue<EntryDetailedConfig>(entryDetailedConfigAtom)
const setIsOpenRequestModal = useSetRecoilState(replayRequestModalOpenAtom)
const isReplayDisplayed = useCallback(() => {
return enabledProtocolsForReplay.find(x => x === entryData.protocol.name) && isDisplayReplay
}, [entryData.protocol.name, isDisplayReplay])
return enabledProtocolsForReplay.find(x => x === entryData.protocol.name) && isDisplayReplay && isReplayEnabled
}, [entryData.protocol.name, isDisplayReplay, isReplayEnabled])
const { request, response } = JSON.parse(representation);
@@ -27,20 +29,18 @@ export const AutoRepresentation: React.FC<any> = ({ representation, color, opene
const arr = [
{
tab: 'Request',
badge: isReplayDisplayed() && <span title="Replay Request"><ReplayIcon fill={color} stroke={color} style={{ marginLeft: "10px", cursor: "pointer", height: "22px" }} onClick={() => setIsOpenRequestModal(true)} /></span>
badge: null
}]
if (response) {
arr.push(
{
tab: 'Response',
badge: null
}
);
if (response && response.length > 0) {
arr.push({
tab: 'Response',
badge: null
});
}
return arr
}, [color, isReplayDisplayed, response, setIsOpenRequestModal]);
}, [response]);
const [currentTab, setCurrentTab] = useState(TABS[0].tab);
@@ -66,11 +66,12 @@ export const AutoRepresentation: React.FC<any> = ({ representation, color, opene
{<div className={styles.body}>
<div className={styles.bodyHeader}>
<Tabs tabs={TABS} currentTab={currentTab} color={color} onChange={setCurrentTab} leftAligned />
{isReplayDisplayed() && <span title="Replay Request"><ReplayIcon fill={color} stroke={color} style={{ marginLeft: "10px", cursor: "pointer", height: "22px" }} onClick={() => setIsOpenRequestModal(true)} /></span>}
</div>
{getOpenedTabIndex() === TabsEnum.Request && <React.Fragment>
<SectionsRepresentation data={request} color={color} requestRepresentation={request} />
</React.Fragment>}
{response && getOpenedTabIndex() === TabsEnum.Response && <React.Fragment>
{response && response.length > 0 && getOpenedTabIndex() === TabsEnum.Response && <React.Fragment>
<SectionsRepresentation data={response} color={color} />
</React.Fragment>}
</div>}

View File

@@ -52,8 +52,13 @@
border-radius: 4px
padding: 10px
position: relative
.bodyHeader
padding: 0 1rem
display: flex
align-items: center
justify-content: space-between
.endpointURL
font-size: .75rem
display: block

View File

@@ -22,6 +22,7 @@ import leftOffTopAtom from "../../recoil/leftOffTop";
import { DEFAULT_LEFTOFF, DEFAULT_FETCH, DEFAULT_FETCH_TIMEOUT_MS } from '../../hooks/useWS';
import ReplayRequestModalContainer from "../modals/ReplayRequestModal/ReplayRequestModal";
import replayRequestModalOpenAtom from "../../recoil/replayRequestModalOpen";
import entryDetailedConfigAtom, { EntryDetailedConfig } from "../../recoil/entryDetailedConfig";
const useLayoutStyles = makeStyles(() => ({
details: {
@@ -51,18 +52,22 @@ interface TrafficViewerProps {
webSocketUrl: string,
shouldCloseWebSocket: boolean,
setShouldCloseWebSocket: (flag: boolean) => void,
isDemoBannerView: boolean
isDemoBannerView: boolean,
entryDetailedConfig: EntryDetailedConfig
}
export const TrafficViewer: React.FC<TrafficViewerProps> = ({
trafficViewerApiProp,
actionButtons, isShowStatusBar, webSocketUrl,
shouldCloseWebSocket, setShouldCloseWebSocket, isDemoBannerView
}) => {
trafficViewerApiProp,
webSocketUrl,
actionButtons,
isShowStatusBar, isDemoBannerView,
shouldCloseWebSocket, setShouldCloseWebSocket,
entryDetailedConfig }) => {
const classes = useLayoutStyles();
const setEntries = useSetRecoilState(entriesAtom);
const setFocusedEntryId = useSetRecoilState(focusedEntryIdAtom);
const setEntryDetailedConfigAtom = useSetRecoilState(entryDetailedConfigAtom)
const query = useRecoilValue(queryAtom);
const setTrafficViewerApiState = useSetRecoilState(trafficViewerApiAtom as RecoilState<TrafficViewerApi>)
const [tappingStatus, setTappingStatus] = useRecoilState(tappingStatusAtom);
@@ -183,6 +188,10 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
};
}, []);
useEffect(() => {
setEntryDetailedConfigAtom(entryDetailedConfig)
}, [entryDetailedConfig, setEntryDetailedConfigAtom])
const getConnectionIndicator = () => {
switch (wsReadyState) {
case WebSocket.OPEN:
@@ -258,7 +267,7 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
</div>
</div>
<div className={classes.details} id="rightSideContainer">
<EntryDetailed/>
<EntryDetailed />
</div>
</div>}
</div>
@@ -266,25 +275,19 @@ export const TrafficViewer: React.FC<TrafficViewerProps> = ({
};
const MemorizedTrafficViewer = React.memo(TrafficViewer)
const TrafficViewerContainer: React.FC<TrafficViewerProps> = ({
trafficViewerApiProp,
actionButtons, isShowStatusBar = true,
webSocketUrl, shouldCloseWebSocket, setShouldCloseWebSocket, isDemoBannerView
}) => {
const TrafficViewerContainer: React.FC<TrafficViewerProps> = (props) => {
return <RecoilRoot>
<MemorizedTrafficViewer actionButtons={actionButtons} isShowStatusBar={isShowStatusBar} webSocketUrl={webSocketUrl}
shouldCloseWebSocket={shouldCloseWebSocket} setShouldCloseWebSocket={setShouldCloseWebSocket} trafficViewerApiProp={trafficViewerApiProp}
isDemoBannerView={isDemoBannerView}/>
<MemorizedTrafficViewer {...props} />
<ToastContainer enableMultiContainer containerId={TOAST_CONTAINER_ID}
position="bottom-right"
autoClose={5000}
hideProgressBar={false}
newestOnTop={false}
closeOnClick
rtl={false}
pauseOnFocusLoss
draggable
pauseOnHover/>
position="bottom-right"
autoClose={5000}
hideProgressBar={false}
newestOnTop={false}
closeOnClick
rtl={false}
pauseOnFocusLoss
draggable
pauseOnHover />
<ReplayRequestModalContainer />
</RecoilRoot>
}

View File

@@ -37,11 +37,6 @@ const CodeEditor: React.FC<CodeEditorProps> = ({
theme="github"
onChange={onChange}
editorProps={{ $blockScrolling: true }}
setOptions={{
enableBasicAutocompletion: true,
enableLiveAutocompletion: true,
enableSnippets: true
}}
showPrintMargin={false}
value={code}
width="100%"

View File

@@ -0,0 +1,33 @@
import React from 'react';
import { useEffect } from 'react';
import { useFilePicker } from 'use-file-picker';
import { FileContent } from 'use-file-picker/dist/interfaces';
interface IFilePickerProps {
onLoadingComplete: (file: FileContent) => void;
elem: any
}
const FilePicker = ({ elem, onLoadingComplete }: IFilePickerProps) => {
const [openFileSelector, { filesContent }] = useFilePicker({
accept: ['.json'],
limitFilesConfig: { max: 1 },
maxFileSize: 1
});
const onFileSelectorClick = (e) => {
e.preventDefault();
e.stopPropagation();
openFileSelector();
}
useEffect(() => {
filesContent.length && onLoadingComplete(filesContent[0])
}, [filesContent, onLoadingComplete]);
return (<React.Fragment>
{React.cloneElement(elem, { onClick: onFileSelectorClick })}
</React.Fragment>)
}
export default FilePicker;

View File

@@ -75,5 +75,6 @@ const KeyValueTable: React.FC<KeyValueTableProps> = ({ data, onDataChange, keyPl
})}
</div>
}
export const convertParamsToArr = (paramsObj) => Object.entries(paramsObj).map(([key, value]) => { return { key, value } })
export const convertArrToKeyValueObject = (arr) => arr.reduce((acc, curr) => { acc[curr.key] = curr.value; return acc }, {})
export default KeyValueTable

View File

@@ -79,5 +79,12 @@
overflow: hidden
b::after
content: '\b'
content: '\b'
display: inline
.icon
width: 24px
height: 26px
stroke-width: 0px
fill: $blue-color
stroke: $blue-color

View File

@@ -1,25 +1,30 @@
import { Accordion, AccordionDetails, AccordionSummary, Backdrop, Box, Button, Fade, Modal } from "@mui/material";
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import DownloadIcon from '@mui/icons-material/FileDownloadOutlined';
import UploadIcon from '@mui/icons-material/UploadFile';
import closeIcon from "assets/close.svg";
import refreshImg from "assets/refresh.svg";
import { Accordion, AccordionDetails, AccordionSummary, Backdrop, Box, Button, Fade, Modal } from "@mui/material";
import React, { Fragment, useCallback, useEffect, useState } from "react";
import { useCommonStyles } from "../../../helpers/commonStyle";
import { Tabs } from "../../UI";
import KeyValueTable from "../../UI/KeyValueTable/KeyValueTable";
import CodeEditor from "../../UI/CodeEditor/CodeEditor";
import { useRecoilValue, RecoilState, useRecoilState } from "recoil";
import TrafficViewerApiAtom from "../../../recoil/TrafficViewerApi/atom";
import TrafficViewerApi from "../../TrafficViewer/TrafficViewerApi";
import { toast } from "react-toastify";
import { RecoilState, useRecoilState, useRecoilValue } from "recoil";
import { FileContent } from "use-file-picker/dist/interfaces";
import { TOAST_CONTAINER_ID } from "../../../configs/Consts";
import styles from './ReplayRequestModal.module.sass'
import closeIcon from "assets/close.svg"
import refreshImg from "assets/refresh.svg"
import { formatRequestWithOutError } from "../../EntryDetailed/EntrySections/EntrySections";
import entryDataAtom from "../../../recoil/entryData";
import { AutoRepresentation, TabsEnum } from "../../EntryDetailed/EntryViewer/AutoRepresentation";
import useDebounce from "../../../hooks/useDebounce"
import replayRequestModalOpenAtom from "../../../recoil/replayRequestModalOpen";
import { useCommonStyles } from "../../../helpers/commonStyle";
import { Utils } from "../../../helpers/Utils";
import useDebounce from "../../../hooks/useDebounce";
import entryDataAtom from "../../../recoil/entryData";
import replayRequestModalOpenAtom from "../../../recoil/replayRequestModalOpen";
import TrafficViewerApiAtom from "../../../recoil/TrafficViewerApi/atom";
import { formatRequestWithOutError } from "../../EntryDetailed/EntrySections/EntrySections";
import { AutoRepresentation, TabsEnum } from "../../EntryDetailed/EntryViewer/AutoRepresentation";
import TrafficViewerApi from "../../TrafficViewer/TrafficViewerApi";
import { Tabs } from "../../UI";
import CodeEditor from "../../UI/CodeEditor/CodeEditor";
import FilePicker from '../../UI/FilePicker/FilePicker';
import KeyValueTable, { convertArrToKeyValueObject, convertParamsToArr } from "../../UI/KeyValueTable/KeyValueTable";
import { LoadingWrapper } from "../../UI/withLoading/withLoading";
import { IReplayRequestData, KeyValuePair } from './interfaces';
import styles from './ReplayRequestModal.module.sass';
const modalStyle = {
position: 'absolute',
@@ -37,11 +42,6 @@ const modalStyle = {
paddingBottom: "15px"
};
interface ReplayRequestModalProps {
isOpen: boolean;
onClose: () => void;
}
enum RequestTabs {
Params = "params",
Headers = "headers",
@@ -51,8 +51,6 @@ enum RequestTabs {
const HTTP_METHODS = ["get", "post", "put", "head", "options", "delete"]
const TABS = [{ tab: RequestTabs.Headers }, { tab: RequestTabs.Params }, { tab: RequestTabs.Body }];
const convertParamsToArr = (paramsObj) => Object.entries(paramsObj).map(([key, value]) => { return { key, value } })
const getQueryStringParams = (link: String) => {
if (link) {
@@ -69,43 +67,61 @@ const decodeQueryParam = (p) => {
return decodeURIComponent(p.replace(/\+/g, ' '));
}
interface ReplayRequestModalProps {
isOpen: boolean;
onClose: () => void;
}
const ReplayRequestModal: React.FC<ReplayRequestModalProps> = ({ isOpen, onClose }) => {
const entryData = useRecoilValue(entryDataAtom)
const request = entryData.data.request
const [method, setMethod] = useState(request?.method?.toLowerCase() as string)
const getHostUrl = useCallback(() => {
return entryData.data.dst.name ? entryData.data?.dst?.name : entryData.data.dst.ip
}, [entryData.data.dst.ip, entryData.data.dst.name])
const [hostPortInput, setHostPortInput] = useState(`${entryData.base.proto.name}://${getHostUrl()}:${entryData.data.dst.port}`)
const getHostPortVal = useCallback(() => {
return `${entryData.base.proto.name}://${getHostUrl()}:${entryData.data.dst.port}`
}, [entryData.base.proto.name, entryData.data.dst.port, getHostUrl])
const [hostPortInput, setHostPortInput] = useState(getHostPortVal())
const [pathInput, setPathInput] = useState(request.path);
const commonClasses = useCommonStyles();
const [currentTab, setCurrentTab] = useState(TABS[0].tab);
const [response, setResponse] = useState(null);
const [postData, setPostData] = useState(request?.postData?.text || JSON.stringify(request?.postData?.params));
const [params, setParams] = useState(convertParamsToArr(request?.queryString || {}))
const [headers, setHeaders] = useState(convertParamsToArr(request?.headers || {}))
const trafficViewerApi = useRecoilValue(TrafficViewerApiAtom as RecoilState<TrafficViewerApi>)
const [isLoading, setIsLoading] = useState(false)
const [requestExpanded, setRequestExpanded] = useState(true)
const [responseExpanded, setResponseExpanded] = useState(false)
const getInitialRequestData = useCallback((): IReplayRequestData => {
return {
method: request?.method?.toLowerCase() as string,
hostPort: `${entryData.base.proto.name}://${getHostUrl()}:${entryData.data.dst.port}`,
path: request.path,
postData: request.postData?.text || JSON.stringify(request.postData?.params),
headers: convertParamsToArr(request.headers || {}),
params: convertParamsToArr(request.queryString || {})
}
}, [entryData.base.proto.name, entryData.data.dst.port, getHostUrl, request.headers, request?.method, request.path, request.postData?.params, request.postData?.text, request.queryString])
const [requestDataModel, setRequestData] = useState<IReplayRequestData>(getInitialRequestData())
const debouncedPath = useDebounce(pathInput, 500);
const addParamsToUrl = useCallback((url: string, params: KeyValuePair[]) => {
const urlParams = new URLSearchParams("");
params.forEach(param => urlParams.append(param.key, param.value as string))
return `${url}?${urlParams.toString()}`
}, [])
const onParamsChange = useCallback((newParams) => {
setParams(newParams);
let newUrl = `${debouncedPath ? debouncedPath.split('?')[0] : ""}`
newParams.forEach(({ key, value }, index) => {
newUrl += index > 0 ? '&' : '?'
newUrl += `${key}` + (value ? `=${value}` : "")
})
newUrl = addParamsToUrl(newUrl, newParams)
setPathInput(newUrl)
}, [debouncedPath])
}, [addParamsToUrl, debouncedPath])
useEffect(() => {
const newParams = getQueryStringParams(debouncedPath);
setParams(convertParamsToArr(newParams))
const params = convertParamsToArr(getQueryStringParams(debouncedPath));
setRequestData({ ...requestDataModel, params })
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [debouncedPath])
const onModalClose = () => {
@@ -114,33 +130,28 @@ const ReplayRequestModal: React.FC<ReplayRequestModalProps> = ({ isOpen, onClose
onClose()
}
const resetModel = useCallback(() => {
setMethod(request?.method?.toLowerCase() as string)
setHostPortInput(`${entryData.base.proto.name}://${getHostUrl()}:${entryData.data.dst.port}`)
setPathInput(request.path);
const resetModal = useCallback((requestDataModel: IReplayRequestData, hostPortInputVal, pathVal) => {
setRequestData(requestDataModel)
setHostPortInput(hostPortInputVal)
setPathInput(addParamsToUrl(pathVal, requestDataModel.params));
setResponse(null);
setPostData(request?.postData?.text || JSON.stringify(request?.postData?.params));
setParams(convertParamsToArr(request?.queryString || {}))
setHeaders(convertParamsToArr(request?.headers || {}))
setRequestExpanded(true)
}, [entryData.base.proto.name, entryData.data.dst.port, getHostUrl, request?.headers, request?.method, request.path, request?.postData?.params, request?.postData?.text, request?.queryString])
setRequestExpanded(true);
}, [addParamsToUrl])
const onRefreshRequest = useCallback((event) => {
event.stopPropagation()
resetModel()
}, [resetModel])
event.stopPropagation();
const hostPortInputVal = getHostPortVal();
resetModal(getInitialRequestData(), hostPortInputVal, request.path);
}, [getHostPortVal, getInitialRequestData, request.path, resetModal])
const sendRequest = useCallback(async () => {
setResponse(null)
const headersData = headers.reduce((prev, corrent) => {
prev[corrent.key] = corrent.value
return prev
}, {})
const buildUrl = `${hostPortInput}${pathInput}`
const requestData = { url: buildUrl, headers: headersData, data: postData, method }
const headersData = convertArrToKeyValueObject(requestDataModel.headers)
try {
setIsLoading(true)
const requestData = { url: `${hostPortInput}${pathInput}`, headers: headersData, data: requestDataModel.postData, method: requestDataModel.method }
const response = await trafficViewerApi.replayRequest(requestData)
setResponse(response?.data?.representation)
if (response.errorMessage) {
@@ -150,7 +161,6 @@ const ReplayRequestModal: React.FC<ReplayRequestModalProps> = ({ isOpen, onClose
setRequestExpanded(false)
setResponseExpanded(true)
}
} catch (error) {
setRequestExpanded(true)
toast.error("Error occurred while fetching response", { containerId: TOAST_CONTAINER_ID });
@@ -159,27 +169,37 @@ const ReplayRequestModal: React.FC<ReplayRequestModalProps> = ({ isOpen, onClose
finally {
setIsLoading(false)
}
}, [hostPortInput, pathInput, requestDataModel.headers, requestDataModel.method, requestDataModel.postData, trafficViewerApi])
}, [headers, hostPortInput, method, pathInput, postData, trafficViewerApi])
const onDownloadRequest = useCallback((e) => {
e.stopPropagation()
const date = Utils.getNow()
Utils.exportToJson(requestDataModel, `${getHostUrl()} - ${date}`)
}, [getHostUrl, requestDataModel])
const onLoadingComplete = useCallback((fileContent: FileContent) => {
const requestData = JSON.parse(fileContent.content) as IReplayRequestData
resetModal(requestData, requestData.hostPort, requestData.path)
}, [resetModal])
let innerComponent
switch (currentTab) {
case RequestTabs.Params:
innerComponent = <div className={styles.keyValueContainer}><KeyValueTable data={params} onDataChange={onParamsChange} key={"params"} valuePlaceholder="New Param Value" keyPlaceholder="New param Key" /></div>
innerComponent = <div className={styles.keyValueContainer}><KeyValueTable data={requestDataModel.params} onDataChange={onParamsChange} key={"params"} valuePlaceholder="New Param Value" keyPlaceholder="New param Key" /></div>
break;
case RequestTabs.Headers:
innerComponent = <Fragment>
<div className={styles.keyValueContainer}><KeyValueTable data={headers} onDataChange={(heaedrs) => setHeaders(heaedrs)} key={"Header"} valuePlaceholder="New Headers Value" keyPlaceholder="New Headers Key" />
<div className={styles.keyValueContainer}><KeyValueTable data={requestDataModel.headers} onDataChange={(headers) => setRequestData({ ...requestDataModel, headers: headers })} key={"Header"} valuePlaceholder="New Headers Value" keyPlaceholder="New Headers Key" />
</div>
<span className={styles.note}><b>* </b> X-Mizu Header added to reuqests</span>
<span className={styles.note}><b>* </b> X-Mizu Header added to requests</span>
</Fragment>
break;
case RequestTabs.Body:
const formatedCode = formatRequestWithOutError(postData || "", request?.postData?.mimeType)
const formattedCode = formatRequestWithOutError(requestDataModel.postData || "", request?.postData?.mimeType)
innerComponent = <div className={styles.codeEditor}>
<CodeEditor language={request?.postData?.mimeType.split("/")[1]}
code={Utils.isJson(formatedCode) ? JSON.stringify(JSON.parse(formatedCode || "{}"), null, 2) : formatedCode}
onChange={setPostData} />
code={Utils.isJson(formattedCode) ? JSON.stringify(JSON.parse(formattedCode || "{}"), null, 2) : formattedCode}
onChange={(postData) => setRequestData({ ...requestDataModel, postData })} />
</div>
break;
default:
@@ -204,17 +224,43 @@ const ReplayRequestModal: React.FC<ReplayRequestModalProps> = ({ isOpen, onClose
<div className={styles.headerContainer}>
<div className={styles.headerSection}>
<span className={styles.title}>Replay Request</span>
<Button style={{ marginLeft: "2%", textTransform: 'unset' }}
startIcon={<img src={refreshImg} className="custom" alt="Refresh Request"></img>}
size="medium"
variant="contained"
className={commonClasses.outlinedButton + " " + commonClasses.imagedButton}
onClick={onRefreshRequest}
>
Refresh
</Button>
<Button style={{ marginLeft: "2%", textTransform: 'unset' }}
startIcon={<DownloadIcon className={`custom ${styles.icon}`} />}
size="medium"
variant="contained"
className={commonClasses.outlinedButton + " " + commonClasses.imagedButton}
onClick={onDownloadRequest}
>
Download
</Button>
<FilePicker onLoadingComplete={onLoadingComplete}
elem={<Button style={{ marginLeft: "2%", textTransform: 'unset' }}
startIcon={<UploadIcon className={`custom ${styles.icon}`} />}
size="medium"
variant="contained"
className={commonClasses.outlinedButton + " " + commonClasses.imagedButton}>
Upload
</Button>}
/>
</div>
</div>
<div className={styles.modalContainer}>
<Accordion TransitionProps={{ unmountOnExit: true }} expanded={requestExpanded} onChange={() => setRequestExpanded(!requestExpanded)}>
<AccordionSummary expandIcon={<ExpandMoreIcon />} aria-controls="response-content">
<span className={styles.sectionHeader}>REQUEST</span>
<img src={refreshImg} style={{ marginLeft: "10px" }} title="Refresh Reuqest" alt="Refresh Reuqest" onClick={onRefreshRequest} />
</AccordionSummary>
<AccordionDetails>
<div className={styles.path}>
<select className={styles.select} value={method} onChange={(e) => setMethod(e.target.value)}>
<select className={styles.select} value={requestDataModel.method} onChange={(e) => setRequestData({ ...requestDataModel, method: e.target.value })}>
{HTTP_METHODS.map(method => <option value={method} key={method}>{method}</option>)}
</select>
<input placeholder="Host:Port" value={hostPortInput} onChange={(event) => setHostPortInput(event.target.value)} className={`${commonClasses.textField} ${styles.hostPort}`} />
@@ -246,7 +292,7 @@ const ReplayRequestModal: React.FC<ReplayRequestModalProps> = ({ isOpen, onClose
</div>
</Box>
</Fade>
</Modal>
</Modal >
);
}

View File

@@ -0,0 +1,13 @@
export interface KeyValuePair {
key: string;
value: unknown;
}
export interface IReplayRequestData {
method: string;
hostPort: string;
path: string;
postData: string;
headers: KeyValuePair[]
params: KeyValuePair[]
}

Some files were not shown because too many files have changed in this diff Show More