mirror of
https://github.com/kubeshark/kubeshark.git
synced 2026-02-19 20:40:17 +00:00
Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fdf552a9ec | ||
|
|
cbff1837c1 | ||
|
|
71425928cc | ||
|
|
82db4acb7d | ||
|
|
9bd82191aa | ||
|
|
83b657472b | ||
|
|
2e5cf13b3f | ||
|
|
4be7164f20 | ||
|
|
0abd7c06ff | ||
|
|
c2739a68c2 | ||
|
|
d0ef6c9f97 | ||
|
|
a4f7e61a6e | ||
|
|
a5fef90781 | ||
|
|
70cef9dc4b | ||
|
|
0f3dd66d2d | ||
|
|
5536e5bb44 | ||
|
|
3bab83754f | ||
|
|
d011478a74 | ||
|
|
7fa1a191a6 | ||
|
|
65bb338ed6 | ||
|
|
c098ff3323 | ||
|
|
19b4810ded | ||
|
|
beb8363722 | ||
|
|
1eb67c69d9 |
30
.github/workflows/pr_validation.yml
vendored
30
.github/workflows/pr_validation.yml
vendored
@@ -12,7 +12,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
build-cli:
|
||||
name: Build CLI
|
||||
name: Build CLI executable
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Go 1.16
|
||||
@@ -27,35 +27,11 @@ jobs:
|
||||
run: make cli
|
||||
|
||||
build-agent:
|
||||
name: Build Agent
|
||||
name: Build Agent docker image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Go 1.16
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '1.16'
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- shell: bash
|
||||
run: |
|
||||
sudo apt-get install libpcap-dev
|
||||
|
||||
- name: Build Agent
|
||||
run: make agent
|
||||
|
||||
build-ui:
|
||||
name: Build UI
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Node 16
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build UI
|
||||
run: make ui
|
||||
run: make agent-docker
|
||||
|
||||
19
Dockerfile
19
Dockerfile
@@ -16,7 +16,7 @@ RUN npm run build-ent
|
||||
### Base builder image for native builds architecture
|
||||
FROM golang:1.16-alpine AS builder-native-base
|
||||
ENV CGO_ENABLED=1 GOOS=linux
|
||||
RUN apk add libpcap-dev g++
|
||||
RUN apk add libpcap-dev g++ perl-utils
|
||||
|
||||
|
||||
### Intermediate builder image for x86-64 to x86-64 native builds
|
||||
@@ -77,20 +77,29 @@ RUN go build -ldflags="-extldflags=-static -s -w \
|
||||
-X 'mizuserver/pkg/version.BuildTimestamp=${BUILD_TIMESTAMP}' \
|
||||
-X 'mizuserver/pkg/version.SemVer=${SEM_VER}'" -o mizuagent .
|
||||
|
||||
# Download Basenine executable, verify the sha1sum
|
||||
ADD https://github.com/up9inc/basenine/releases/download/v0.4.13/basenine_linux_${GOARCH} ./basenine_linux_${GOARCH}
|
||||
ADD https://github.com/up9inc/basenine/releases/download/v0.4.13/basenine_linux_${GOARCH}.sha256 ./basenine_linux_${GOARCH}.sha256
|
||||
RUN shasum -a 256 -c basenine_linux_${GOARCH}.sha256
|
||||
RUN chmod +x ./basenine_linux_${GOARCH}
|
||||
RUN mv ./basenine_linux_${GOARCH} ./basenine
|
||||
|
||||
|
||||
### The shipped image
|
||||
ARG TARGETARCH=amd64
|
||||
FROM ${TARGETARCH}/busybox:latest
|
||||
|
||||
# gin-gonic runs in debug mode without this
|
||||
ENV GIN_MODE=release
|
||||
|
||||
WORKDIR /app/data/
|
||||
WORKDIR /app
|
||||
|
||||
# Copy binary and config files from /build to root folder of scratch container.
|
||||
COPY --from=builder ["/app/agent-build/mizuagent", "."]
|
||||
COPY --from=builder ["/app/agent-build/basenine", "/usr/local/bin/basenine"]
|
||||
COPY --from=front-end ["/app/ui-build/build", "site"]
|
||||
COPY --from=front-end ["/app/ui-build/build-ent", "site-standalone"]
|
||||
|
||||
# gin-gonic runs in debug mode without this
|
||||
ENV GIN_MODE=release
|
||||
|
||||
# this script runs both apiserver and passivetapper and exits either if one of them exits, preventing a scenario where the container runs without one process
|
||||
ENTRYPOINT "/app/mizuagent"
|
||||
ENTRYPOINT ["/app/mizuagent"]
|
||||
|
||||
4
Makefile
4
Makefile
@@ -47,6 +47,10 @@ agent-debug: ## Build agent for debug.
|
||||
docker: ## Build and publish agent docker image.
|
||||
$(MAKE) push-docker
|
||||
|
||||
agent-docker: ## Build agent docker image.
|
||||
@echo "Building agent docker image"
|
||||
@docker build -t up9inc/mizu:devlatest .
|
||||
|
||||
push: push-docker push-cli ## Build and publish agent docker image & CLI.
|
||||
|
||||
push-docker: ## Build and publish agent docker image.
|
||||
|
||||
@@ -4,13 +4,16 @@
|
||||
"viewportHeight": 1080,
|
||||
"video": false,
|
||||
"screenshotOnRunFailure": false,
|
||||
"defaultCommandTimeout": 5000,
|
||||
"testFiles": [
|
||||
"tests/GuiPort.js",
|
||||
"tests/MultipleNamespaces.js",
|
||||
"tests/Redact.js",
|
||||
"tests/NoRedact.js",
|
||||
"tests/Regex.js",
|
||||
"tests/RegexMasking.js"
|
||||
"tests/RegexMasking.js",
|
||||
"tests/IgnoredUserAgents.js",
|
||||
"tests/UiTest.js"
|
||||
],
|
||||
|
||||
"env": {
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"watchForFileChanges":false,
|
||||
"viewportWidth": 1920,
|
||||
"viewportHeight": 3500,
|
||||
"video": false,
|
||||
"screenshotOnRunFailure": false,
|
||||
"testFiles": [
|
||||
"tests/IgnoredUserAgents.js"
|
||||
],
|
||||
|
||||
"env": {
|
||||
"testUrl": "http://localhost:8899/"
|
||||
}
|
||||
}
|
||||
@@ -7,3 +7,12 @@ export function isValueExistsInElement(shouldInclude, content, domPathToContaine
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function resizeToHugeMizu() {
|
||||
cy.viewport(1920, 3500);
|
||||
}
|
||||
|
||||
export function resizeToNormalMizu() {
|
||||
cy.viewport(1920, 1080);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import {isValueExistsInElement} from "../testHelpers/TrafficHelper";
|
||||
import {isValueExistsInElement, resizeToHugeMizu} from "../testHelpers/TrafficHelper";
|
||||
|
||||
it('Loading Mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
it('going through each entry', function () {
|
||||
resizeToHugeMizu();
|
||||
cy.get('#total-entries').then(number => {
|
||||
const getNum = () => {
|
||||
const numOfEntries = number.text();
|
||||
|
||||
218
acceptanceTests/cypress/integration/tests/UiTest.js
Normal file
218
acceptanceTests/cypress/integration/tests/UiTest.js
Normal file
@@ -0,0 +1,218 @@
|
||||
import {findLineAndCheck, getExpectedDetailsDict} from "../testHelpers/StatusBarHelper";
|
||||
import {resizeToHugeMizu, resizeToNormalMizu} from "../testHelpers/TrafficHelper";
|
||||
const greenFilterColor = 'rgb(210, 250, 210)';
|
||||
const redFilterColor = 'rgb(250, 214, 220)';
|
||||
const refreshWaitTimeout = 10000;
|
||||
|
||||
it('opening mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
it('top bar check', function () {
|
||||
const podName1 = 'httpbin', namespace1 = 'mizu-tests';
|
||||
const podName2 = 'httpbin2', namespace2 = 'mizu-tests';
|
||||
|
||||
cy.get('.podsCount').trigger('mouseover');
|
||||
findLineAndCheck(getExpectedDetailsDict(podName1, namespace1));
|
||||
findLineAndCheck(getExpectedDetailsDict(podName2, namespace2));
|
||||
cy.reload();
|
||||
});
|
||||
|
||||
it('filtering guide check', function () {
|
||||
cy.get('[title="Open Filtering Guide (Cheatsheet)"]').click();
|
||||
cy.get('#modal-modal-title').should('be.visible');
|
||||
cy.get('[lang="en"]').click(0, 0);
|
||||
cy.get('#modal-modal-title').should('not.exist');
|
||||
});
|
||||
|
||||
checkIllegalFilter('invalid filter');
|
||||
|
||||
checkFilter({
|
||||
name: 'http',
|
||||
leftSidePath: '> :nth-child(1) > :nth-child(1)',
|
||||
leftSideExpectedText: 'HTTP',
|
||||
rightSidePath: '[title=HTTP]',
|
||||
rightSideExpectedText: 'Hypertext Transfer Protocol -- HTTP/1.1',
|
||||
applyByEnter: true
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'response.status == 200',
|
||||
leftSidePath: '[title="Status Code"]',
|
||||
leftSideExpectedText: '200',
|
||||
rightSidePath: '> :nth-child(2) [title="Status Code"]',
|
||||
rightSideExpectedText: '200',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'src.name == ""',
|
||||
leftSidePath: '[title="Source Name"]',
|
||||
leftSideExpectedText: '[Unresolved]',
|
||||
rightSidePath: '> :nth-child(2) [title="Source Name"]',
|
||||
rightSideExpectedText: '[Unresolved]',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'method == "GET"',
|
||||
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||
leftSideExpectedText: 'GET',
|
||||
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||
rightSideExpectedText: 'GET',
|
||||
applyByEnter: true
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'summary == "/get"',
|
||||
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||
leftSideExpectedText: '/get',
|
||||
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||
rightSideExpectedText: '/get',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'dst.name == "httpbin.mizu-tests"',
|
||||
leftSidePath: '> :nth-child(3) > :nth-child(2) > :nth-child(3) > :nth-child(2)',
|
||||
leftSideExpectedText: 'httpbin.mizu-tests',
|
||||
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(2) > :nth-child(3) > :nth-child(2)',
|
||||
rightSideExpectedText: 'httpbin.mizu-tests',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'src.ip == "127.0.0.1"',
|
||||
leftSidePath: '[title="Source IP"]',
|
||||
leftSideExpectedText: '127.0.0.1',
|
||||
rightSidePath: '> :nth-child(2) [title="Source IP"]',
|
||||
rightSideExpectedText: '127.0.0.1',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
checkFilterNoResults('method == "POST"');
|
||||
|
||||
function checkFilterNoResults(filterName) {
|
||||
it(`checking the filter: ${filterName}. Expecting no results`, function () {
|
||||
cy.get('#total-entries').then(number => {
|
||||
const totalEntries = number.text();
|
||||
|
||||
// applying the filter
|
||||
cy.get('.w-tc-editor-text').type(filterName);
|
||||
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', greenFilterColor);
|
||||
cy.get('[type="submit"]').click();
|
||||
|
||||
// waiting for the entries number to load
|
||||
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
|
||||
// the DOM should show 0 entries
|
||||
cy.get('#entries-length').should('have.text', '0');
|
||||
|
||||
// going through every potential entry and verifies that it doesn't exist
|
||||
[...Array(parseInt(totalEntries)).keys()].map(shouldNotExist);
|
||||
|
||||
cy.get('[title="Fetch old records"]').click();
|
||||
cy.get('#noMoreDataTop', {timeout: refreshWaitTimeout}).should('be.visible');
|
||||
cy.get('#entries-length').should('have.text', '0'); // after loading all entries there should still be 0 entries
|
||||
|
||||
// reloading then waiting for the entries number to load
|
||||
cy.reload();
|
||||
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function shouldNotExist(entryNum) {
|
||||
cy.get(`entry-${entryNum}`).should('not.exist');
|
||||
}
|
||||
|
||||
function checkIllegalFilter(illegalFilterName) {
|
||||
it(`should show red search bar with the input: ${illegalFilterName}`, function () {
|
||||
cy.get('#total-entries').then(number => {
|
||||
const totalEntries = number.text();
|
||||
|
||||
cy.get('.w-tc-editor-text').type(illegalFilterName);
|
||||
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', redFilterColor);
|
||||
cy.get('[type="submit"]').click();
|
||||
|
||||
cy.get('[role="alert"]').should('be.visible');
|
||||
cy.get('.w-tc-editor-text').clear();
|
||||
|
||||
// reloading then waiting for the entries number to load
|
||||
cy.reload();
|
||||
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
});
|
||||
});
|
||||
}
|
||||
function checkFilter(filterDetails){
|
||||
const {name, leftSidePath, rightSidePath, rightSideExpectedText, leftSideExpectedText, applyByEnter} = filterDetails;
|
||||
const entriesForDeeperCheck = 5;
|
||||
|
||||
it(`checking the filter: ${name}`, function () {
|
||||
cy.get('#total-entries').then(number => {
|
||||
const totalEntries = number.text();
|
||||
|
||||
// checks the hover on the last entry (the only one in DOM at the beginning)
|
||||
leftOnHoverCheck(totalEntries - 1, leftSidePath, name);
|
||||
|
||||
// applying the filter with alt+enter or with the button
|
||||
cy.get('.w-tc-editor-text').type(`${name}${applyByEnter ? '{alt+enter}' : ''}`);
|
||||
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', greenFilterColor);
|
||||
if (!applyByEnter)
|
||||
cy.get('[type="submit"]').click();
|
||||
|
||||
// only one entry in DOM after filtering, checking all four checks on it
|
||||
leftTextCheck(totalEntries - 1, leftSidePath, leftSideExpectedText);
|
||||
leftOnHoverCheck(totalEntries - 1, leftSidePath, name);
|
||||
rightTextCheck(rightSidePath, rightSideExpectedText);
|
||||
rightOnHoverCheck(rightSidePath, name);
|
||||
|
||||
cy.get('[title="Fetch old records"]').click();
|
||||
resizeToHugeMizu();
|
||||
|
||||
// waiting for the entries number to load
|
||||
cy.get('#entries-length', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
|
||||
// checking only 'leftTextCheck' on all entries because the rest of the checks require more time
|
||||
[...Array(parseInt(totalEntries)).keys()].forEach(entryNum => {
|
||||
leftTextCheck(entryNum, leftSidePath, leftSideExpectedText);
|
||||
});
|
||||
|
||||
// making the other 3 checks on the first X entries (longer time for each check)
|
||||
deeperChcek(leftSidePath, rightSidePath, name, leftSideExpectedText, rightSideExpectedText, entriesForDeeperCheck);
|
||||
|
||||
// reloading then waiting for the entries number to load
|
||||
resizeToNormalMizu();
|
||||
cy.reload();
|
||||
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function deeperChcek(leftSidePath, rightSidePath, filterName, leftSideExpectedText, rightSideExpectedText, entriesNumToCheck) {
|
||||
[...Array(entriesNumToCheck).keys()].forEach(entryNum => {
|
||||
leftOnHoverCheck(entryNum, leftSidePath, filterName);
|
||||
|
||||
cy.get(`#list #entry-${entryNum}`).click();
|
||||
rightTextCheck(rightSidePath, rightSideExpectedText);
|
||||
rightOnHoverCheck(rightSidePath, filterName);
|
||||
});
|
||||
}
|
||||
|
||||
function leftTextCheck(entryNum, path, expectedText) {
|
||||
cy.get(`#list #entry-${entryNum} ${path}`).invoke('text').should('eq', expectedText);
|
||||
}
|
||||
|
||||
function leftOnHoverCheck(entryNum, path, filterName) {
|
||||
cy.get(`#list #entry-${entryNum} ${path}`).trigger('mouseover');
|
||||
cy.get(`#list #entry-${entryNum} .Queryable-Tooltip`).should('have.text', filterName);
|
||||
}
|
||||
|
||||
function rightTextCheck(path, expectedText) {
|
||||
cy.get(`.TrafficPage-Container > :nth-child(2) ${path}`).should('have.text', expectedText);
|
||||
}
|
||||
|
||||
function rightOnHoverCheck(path, expectedText) {
|
||||
cy.get(`.TrafficPage-Container > :nth-child(2) ${path}`).trigger('mouseover');
|
||||
cy.get(`.TrafficPage-Container > :nth-child(2) .Queryable-Tooltip`).should('have.text', expectedText);
|
||||
}
|
||||
@@ -62,35 +62,7 @@ func TestTap(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
entriesCheckFunc := func() error {
|
||||
timestamp := time.Now().UnixNano() / int64(time.Millisecond)
|
||||
|
||||
entries, err := getDBEntries(timestamp, entriesCount, 1*time.Second)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = checkEntriesAtLeast(entries, 1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
entry := entries[0]
|
||||
|
||||
entryUrl := fmt.Sprintf("%v/entries/%v", apiServerUrl, entry["id"])
|
||||
requestResult, requestErr := executeHttpGetRequest(entryUrl)
|
||||
if requestErr != nil {
|
||||
return fmt.Errorf("failed to get entry, err: %v", requestErr)
|
||||
}
|
||||
|
||||
if requestResult == nil {
|
||||
return fmt.Errorf("unexpected nil entry result")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
if err := retriesExecute(shortRetriesCount, entriesCheckFunc); err != nil {
|
||||
t.Errorf("%v", err)
|
||||
return
|
||||
}
|
||||
runCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/UiTest.js\"")
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -138,7 +110,7 @@ func TestTapGuiPort(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/GuiPort.js\" --env port=%d --config-file cypress/integration/configurations/Default.json", guiPort))
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/GuiPort.js\" --env port=%d", guiPort))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -184,7 +156,7 @@ func TestTapAllNamespaces(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/MultipleNamespaces.js\" --env name1=%v,name2=%v,name3=%v,namespace1=%v,namespace2=%v,namespace3=%v --config-file cypress/integration/configurations/Default.json",
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/MultipleNamespaces.js\" --env name1=%v,name2=%v,name3=%v,namespace1=%v,namespace2=%v,namespace3=%v",
|
||||
expectedPods[0].Name, expectedPods[1].Name, expectedPods[2].Name, expectedPods[0].Namespace, expectedPods[1].Namespace, expectedPods[2].Namespace))
|
||||
}
|
||||
|
||||
@@ -233,7 +205,7 @@ func TestTapMultipleNamespaces(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/MultipleNamespaces.js\" --env name1=%v,name2=%v,name3=%v,namespace1=%v,namespace2=%v,namespace3=%v --config-file cypress/integration/configurations/Default.json",
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/MultipleNamespaces.js\" --env name1=%v,name2=%v,name3=%v,namespace1=%v,namespace2=%v,namespace3=%v",
|
||||
expectedPods[0].Name, expectedPods[1].Name, expectedPods[2].Name, expectedPods[0].Namespace, expectedPods[1].Namespace, expectedPods[2].Namespace))
|
||||
}
|
||||
|
||||
@@ -279,7 +251,7 @@ func TestTapRegex(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/Regex.js\" --env name=%v,namespace=%v --config-file cypress/integration/configurations/Default.json",
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/Regex.js\" --env name=%v,namespace=%v",
|
||||
expectedPods[0].Name, expectedPods[0].Namespace))
|
||||
}
|
||||
|
||||
@@ -377,7 +349,7 @@ func TestTapRedact(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
runCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/Redact.js\" --config-file cypress/integration/configurations/Default.json"))
|
||||
runCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/Redact.js\"")
|
||||
}
|
||||
|
||||
func TestTapNoRedact(t *testing.T) {
|
||||
@@ -429,7 +401,7 @@ func TestTapNoRedact(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
runCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/NoRedact.js\" --config-file cypress/integration/configurations/Default.json")
|
||||
runCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/NoRedact.js\"")
|
||||
}
|
||||
|
||||
func TestTapRegexMasking(t *testing.T) {
|
||||
@@ -480,7 +452,7 @@ func TestTapRegexMasking(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
runCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/RegexMasking.js\" --config-file cypress/integration/configurations/Default.json")
|
||||
runCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/RegexMasking.js\"")
|
||||
|
||||
}
|
||||
|
||||
@@ -542,7 +514,7 @@ func TestTapIgnoredUserAgents(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
runCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/IgnoredUserAgents.js\" --config-file cypress/integration/configurations/HugeMizu.json")
|
||||
runCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/IgnoredUserAgents.js\"")
|
||||
}
|
||||
|
||||
func TestTapDumpLogs(t *testing.T) {
|
||||
|
||||
@@ -6,13 +6,13 @@ require (
|
||||
github.com/antelman107/net-wait-go v0.0.0-20210623112055-cf684aebda7b
|
||||
github.com/chanced/openapi v0.0.6
|
||||
github.com/djherbis/atime v1.0.0
|
||||
github.com/elastic/go-elasticsearch/v7 v7.16.0
|
||||
github.com/getkin/kin-openapi v0.76.0
|
||||
github.com/gin-contrib/static v0.0.1
|
||||
github.com/gin-gonic/gin v1.7.7
|
||||
github.com/go-playground/locales v0.13.0
|
||||
github.com/go-playground/universal-translator v0.17.0
|
||||
github.com/go-playground/validator/v10 v10.5.0
|
||||
github.com/google/martian v2.1.0+incompatible
|
||||
github.com/google/uuid v1.1.2
|
||||
github.com/gorilla/websocket v1.4.2
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
||||
|
||||
@@ -126,6 +126,8 @@ github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDD
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
|
||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw=
|
||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.16.0 h1:GHsxDFXIAlhSleXun4kwA89P7kQFADRChqvgOPeYP5A=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.16.0/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4=
|
||||
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153 h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc=
|
||||
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
|
||||
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"mizuserver/pkg/api"
|
||||
"mizuserver/pkg/config"
|
||||
"mizuserver/pkg/controllers"
|
||||
"mizuserver/pkg/elastic"
|
||||
"mizuserver/pkg/middlewares"
|
||||
"mizuserver/pkg/models"
|
||||
"mizuserver/pkg/oas"
|
||||
@@ -61,7 +62,6 @@ const (
|
||||
socketConnectionRetries = 30
|
||||
socketConnectionRetryDelay = time.Second * 2
|
||||
socketHandshakeTimeout = time.Second * 2
|
||||
uiIndexPath = "./site/index.html"
|
||||
)
|
||||
|
||||
func main() {
|
||||
@@ -159,6 +159,7 @@ func enableExpFeatureIfNeeded() {
|
||||
if config.Config.ServiceMap {
|
||||
servicemap.GetInstance().SetConfig(config.Config)
|
||||
}
|
||||
elastic.GetInstance().Configure(config.Config.Elastic)
|
||||
}
|
||||
|
||||
func configureBasenineServer(host string, port string) {
|
||||
@@ -167,7 +168,7 @@ func configureBasenineServer(host string, port string) {
|
||||
wait.WithWait(200*time.Millisecond),
|
||||
wait.WithBreak(50*time.Millisecond),
|
||||
wait.WithDeadline(5*time.Second),
|
||||
wait.WithDebug(true),
|
||||
wait.WithDebug(config.Config.LogLevel == logging.DEBUG),
|
||||
).Do([]string{fmt.Sprintf("%s:%s", host, port)}) {
|
||||
logger.Log.Panicf("Basenine is not available!")
|
||||
}
|
||||
@@ -246,16 +247,23 @@ func hostApi(socketHarOutputChannel chan<- *tapApi.OutputChannelItem) {
|
||||
|
||||
app.Use(DisableRootStaticCache())
|
||||
|
||||
if err := setUIFlags(); err != nil {
|
||||
logger.Log.Errorf("Error setting ui mode, err: %v", err)
|
||||
var staticFolder string
|
||||
if config.Config.StandaloneMode {
|
||||
staticFolder = "./site-standalone"
|
||||
} else {
|
||||
staticFolder = "./site"
|
||||
}
|
||||
|
||||
if config.Config.StandaloneMode {
|
||||
app.Use(static.ServeRoot("/", "./site-standalone"))
|
||||
} else {
|
||||
app.Use(static.ServeRoot("/", "./site"))
|
||||
indexStaticFile := staticFolder + "/index.html"
|
||||
if err := setUIFlags(indexStaticFile); err != nil {
|
||||
logger.Log.Errorf("Error setting ui flags, err: %v", err)
|
||||
}
|
||||
|
||||
app.Use(static.ServeRoot("/", staticFolder))
|
||||
app.NoRoute(func(c *gin.Context) {
|
||||
c.File(indexStaticFile)
|
||||
})
|
||||
|
||||
app.Use(middlewares.CORSMiddleware()) // This has to be called after the static middleware, does not work if its called before
|
||||
|
||||
api.WebSocketRoutes(app, &eventHandlers, startTime)
|
||||
@@ -276,7 +284,6 @@ func hostApi(socketHarOutputChannel chan<- *tapApi.OutputChannelItem) {
|
||||
routes.EntriesRoutes(app)
|
||||
routes.MetadataRoutes(app)
|
||||
routes.StatusRoutes(app)
|
||||
routes.NotFoundRoute(app)
|
||||
utils.StartServer(app)
|
||||
}
|
||||
|
||||
@@ -291,7 +298,7 @@ func DisableRootStaticCache() gin.HandlerFunc {
|
||||
}
|
||||
}
|
||||
|
||||
func setUIFlags() error {
|
||||
func setUIFlags(uiIndexPath string) error {
|
||||
read, err := ioutil.ReadFile(uiIndexPath)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"mizuserver/pkg/elastic"
|
||||
"mizuserver/pkg/har"
|
||||
"mizuserver/pkg/holder"
|
||||
"mizuserver/pkg/providers"
|
||||
@@ -16,14 +17,15 @@ import (
|
||||
|
||||
"mizuserver/pkg/servicemap"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
"mizuserver/pkg/models"
|
||||
"mizuserver/pkg/oas"
|
||||
"mizuserver/pkg/resolver"
|
||||
"mizuserver/pkg/utils"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
)
|
||||
|
||||
@@ -150,6 +152,7 @@ func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extension
|
||||
connection.SendText(string(data))
|
||||
|
||||
servicemap.GetInstance().NewTCPEntry(mizuEntry.Source, mizuEntry.Destination, &item.Protocol)
|
||||
elastic.GetInstance().PushEntry(mizuEntry)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
120
agent/pkg/elastic/esClient.go
Normal file
120
agent/pkg/elastic/esClient.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package elastic
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"github.com/elastic/go-elasticsearch/v7"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"github.com/up9inc/mizu/tap/api"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type client struct {
|
||||
es *elasticsearch.Client
|
||||
index string
|
||||
insertedCount int
|
||||
}
|
||||
|
||||
var instance *client
|
||||
var once sync.Once
|
||||
|
||||
func GetInstance() *client {
|
||||
once.Do(func() {
|
||||
instance = newClient()
|
||||
})
|
||||
return instance
|
||||
}
|
||||
|
||||
func (client *client) Configure(config shared.ElasticConfig) {
|
||||
if config.Url == "" || config.User == "" || config.Password == "" {
|
||||
logger.Log.Infof("No elastic configuration was supplied, elastic exporter disabled")
|
||||
return
|
||||
}
|
||||
transport := http.DefaultTransport
|
||||
tlsClientConfig := &tls.Config{InsecureSkipVerify: true}
|
||||
transport.(*http.Transport).TLSClientConfig = tlsClientConfig
|
||||
cfg := elasticsearch.Config{
|
||||
Addresses: []string{config.Url},
|
||||
Username: config.User,
|
||||
Password: config.Password,
|
||||
Transport: transport,
|
||||
}
|
||||
|
||||
es, err := elasticsearch.NewClient(cfg)
|
||||
if err != nil {
|
||||
logger.Log.Fatalf("Failed to initialize elastic client %v", err)
|
||||
}
|
||||
|
||||
// Have the client instance return a response
|
||||
res, err := es.Info()
|
||||
if err != nil {
|
||||
logger.Log.Fatalf("Elastic client.Info() ERROR: %v", err)
|
||||
} else {
|
||||
client.es = es
|
||||
client.index = "mizu_traffic_http_" + time.Now().Format("2006_01_02_15_04")
|
||||
client.insertedCount = 0
|
||||
logger.Log.Infof("Elastic client configured, index: %s, cluster info: %v", client.index, res)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
}
|
||||
|
||||
func newClient() *client {
|
||||
return &client{
|
||||
es: nil,
|
||||
index: "",
|
||||
}
|
||||
}
|
||||
|
||||
type httpEntry struct {
|
||||
Source *api.TCP `json:"src"`
|
||||
Destination *api.TCP `json:"dst"`
|
||||
Outgoing bool `json:"outgoing"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
Request map[string]interface{} `json:"request"`
|
||||
Response map[string]interface{} `json:"response"`
|
||||
Summary string `json:"summary"`
|
||||
Method string `json:"method"`
|
||||
Status int `json:"status"`
|
||||
ElapsedTime int64 `json:"elapsedTime"`
|
||||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
func (client *client) PushEntry(entry *api.Entry) {
|
||||
if client.es == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if entry.Protocol.Name != "http" {
|
||||
return
|
||||
}
|
||||
|
||||
entryToPush := httpEntry{
|
||||
Source: entry.Source,
|
||||
Destination: entry.Destination,
|
||||
Outgoing: entry.Outgoing,
|
||||
CreatedAt: entry.StartTime,
|
||||
Request: entry.Request,
|
||||
Response: entry.Response,
|
||||
Summary: entry.Summary,
|
||||
Method: entry.Method,
|
||||
Status: entry.Status,
|
||||
ElapsedTime: entry.ElapsedTime,
|
||||
Path: entry.Path,
|
||||
}
|
||||
|
||||
entryJson, err := json.Marshal(entryToPush)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("json.Marshal ERROR: %v", err)
|
||||
return
|
||||
}
|
||||
var buffer bytes.Buffer
|
||||
buffer.WriteString(string(entryJson))
|
||||
res, _ := client.es.Index(client.index, &buffer)
|
||||
if res.StatusCode == 201 {
|
||||
client.insertedCount += 1
|
||||
}
|
||||
}
|
||||
@@ -51,32 +51,42 @@ func fileSize(fname string) int64 {
|
||||
return fi.Size()
|
||||
}
|
||||
|
||||
func feedEntries(fromFiles []string) (err error) {
|
||||
func feedEntries(fromFiles []string, isSync bool) (count int, err error) {
|
||||
badFiles := make([]string, 0)
|
||||
cnt := 0
|
||||
for _, file := range fromFiles {
|
||||
logger.Log.Info("Processing file: " + file)
|
||||
ext := strings.ToLower(filepath.Ext(file))
|
||||
eCnt := 0
|
||||
switch ext {
|
||||
case ".har":
|
||||
err = feedFromHAR(file)
|
||||
eCnt, err = feedFromHAR(file, isSync)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
badFiles = append(badFiles, file)
|
||||
continue
|
||||
}
|
||||
case ".ldjson":
|
||||
err = feedFromLDJSON(file)
|
||||
eCnt, err = feedFromLDJSON(file, isSync)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
badFiles = append(badFiles, file)
|
||||
continue
|
||||
}
|
||||
default:
|
||||
return errors.New("Unsupported file extension: " + ext)
|
||||
return 0, errors.New("Unsupported file extension: " + ext)
|
||||
}
|
||||
cnt += eCnt
|
||||
}
|
||||
|
||||
return nil
|
||||
for _, f := range badFiles {
|
||||
logger.Log.Infof("Bad file: %s", f)
|
||||
}
|
||||
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func feedFromHAR(file string) error {
|
||||
func feedFromHAR(file string, isSync bool) (int, error) {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@@ -86,23 +96,43 @@ func feedFromHAR(file string) error {
|
||||
|
||||
data, err := ioutil.ReadAll(fd)
|
||||
if err != nil {
|
||||
return err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var harDoc har.HAR
|
||||
err = json.Unmarshal(data, &harDoc)
|
||||
if err != nil {
|
||||
return err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
cnt := 0
|
||||
for _, entry := range harDoc.Log.Entries {
|
||||
GetOasGeneratorInstance().PushEntry(&entry)
|
||||
cnt += 1
|
||||
feedEntry(&entry, isSync)
|
||||
}
|
||||
|
||||
return nil
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func feedFromLDJSON(file string) error {
|
||||
func feedEntry(entry *har.Entry, isSync bool) {
|
||||
if entry.Response.Status == 302 {
|
||||
logger.Log.Debugf("Dropped traffic entry due to permanent redirect status: %s", entry.StartedDateTime)
|
||||
}
|
||||
|
||||
if strings.Contains(entry.Request.URL, "taboola") {
|
||||
logger.Log.Debugf("Interesting: %s", entry.Request.URL)
|
||||
} else {
|
||||
//return
|
||||
}
|
||||
|
||||
if isSync {
|
||||
GetOasGeneratorInstance().entriesChan <- *entry // blocking variant, right?
|
||||
} else {
|
||||
GetOasGeneratorInstance().PushEntry(entry)
|
||||
}
|
||||
}
|
||||
|
||||
func feedFromLDJSON(file string, isSync bool) (int, error) {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@@ -113,8 +143,8 @@ func feedFromLDJSON(file string) error {
|
||||
reader := bufio.NewReader(fd)
|
||||
|
||||
var meta map[string]interface{}
|
||||
|
||||
buf := strings.Builder{}
|
||||
cnt := 0
|
||||
for {
|
||||
substr, isPrefix, err := reader.ReadLine()
|
||||
if err == io.EOF {
|
||||
@@ -132,26 +162,28 @@ func feedFromLDJSON(file string) error {
|
||||
if meta == nil {
|
||||
err := json.Unmarshal([]byte(line), &meta)
|
||||
if err != nil {
|
||||
return err
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
var entry har.Entry
|
||||
err := json.Unmarshal([]byte(line), &entry)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed decoding entry: %s", line)
|
||||
} else {
|
||||
cnt += 1
|
||||
feedEntry(&entry, isSync)
|
||||
}
|
||||
GetOasGeneratorInstance().PushEntry(&entry)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func TestFilesList(t *testing.T) {
|
||||
res, err := getFiles("./test_artifacts/")
|
||||
t.Log(len(res))
|
||||
t.Log(res)
|
||||
if err != nil || len(res) != 2 {
|
||||
if err != nil || len(res) != 3 {
|
||||
t.Logf("Should return 2 files but returned %d", len(res))
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ var (
|
||||
patEmail = regexp.MustCompile(`^\w+([-+.']\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*$`)
|
||||
patHexLower = regexp.MustCompile(`(0x)?[0-9a-f]{6,}`)
|
||||
patHexUpper = regexp.MustCompile(`(0x)?[0-9A-F]{6,}`)
|
||||
patLongNum = regexp.MustCompile(`\d{6,}`)
|
||||
)
|
||||
|
||||
func IsGibberish(str string) bool {
|
||||
@@ -27,7 +28,7 @@ func IsGibberish(str string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
if patHexLower.MatchString(str) || patHexUpper.MatchString(str) {
|
||||
if patHexLower.MatchString(str) || patHexUpper.MatchString(str) || patLongNum.MatchString(str) {
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@@ -17,17 +17,19 @@ var ignoredHeaders = []string{
|
||||
"x-att-deviceid", "x-correlation-id", "correlation-id", "x-client-data",
|
||||
"x-http-method-override", "x-real-ip", "x-request-id", "x-request-start", "x-requested-with", "x-uidh",
|
||||
"x-same-domain", "x-content-type-options", "x-frame-options", "x-xss-protection",
|
||||
"x-wap-profile", "x-scheme",
|
||||
"newrelic", "x-cloud-trace-context", "sentry-trace",
|
||||
"expires", "set-cookie", "p3p", "location", "content-security-policy", "content-security-policy-report-only",
|
||||
"last-modified", "content-language",
|
||||
"x-wap-profile", "x-scheme", "status", "x-cache", "x-application-context", "retry-after",
|
||||
"newrelic", "x-cloud-trace-context", "sentry-trace", "x-cache-hits", "x-served-by", "x-span-name",
|
||||
"expires", "set-cookie", "p3p", "content-security-policy", "content-security-policy-report-only",
|
||||
"last-modified", "content-language", "x-varnish", "true-client-ip", "akamai-origin-hop",
|
||||
"keep-alive", "etag", "alt-svc", "x-csrf-token", "x-ua-compatible", "vary", "x-powered-by",
|
||||
"age", "allow", "www-authenticate",
|
||||
"age", "allow", "www-authenticate", "expect-ct", "timing-allow-origin", "referrer-policy",
|
||||
"x-aspnet-version", "x-aspnetmvc-version", "x-timer", "x-abuse-info", "x-mod-pagespeed",
|
||||
"duration_ms", // UP9 custom
|
||||
}
|
||||
|
||||
var ignoredHeaderPrefixes = []string{
|
||||
":", "accept-", "access-control-", "if-", "sec-", "grpc-",
|
||||
"x-forwarded-", "x-original-",
|
||||
"x-forwarded-", "x-original-", "cf-",
|
||||
"x-up9-", "x-envoy-", "x-hasura-", "x-b3-", "x-datadog-", "x-envoy-", "x-amz-", "x-newrelic-", "x-prometheus-",
|
||||
"x-akamai-", "x-spotim-", "x-amzn-", "x-ratelimit-",
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ func NewGen(server string) *SpecGen {
|
||||
spec.Version = "3.1.0"
|
||||
|
||||
info := openapi.Info{Title: server}
|
||||
info.Version = "0.0"
|
||||
info.Version = "1.0"
|
||||
spec.Info = &info
|
||||
spec.Paths = &openapi.Paths{Items: map[openapi.PathValue]*openapi.PathObj{}}
|
||||
|
||||
@@ -175,11 +175,18 @@ func (g *SpecGen) handlePathObj(entry *har.Entry) (string, error) {
|
||||
|
||||
if isExtIgnored(urlParsed.Path) {
|
||||
logger.Log.Debugf("Dropped traffic entry due to ignored extension: %s", urlParsed.Path)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if entry.Request.Method == "OPTIONS" {
|
||||
logger.Log.Debugf("Dropped traffic entry due to its method: %s", urlParsed.Path)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
ctype := getRespCtype(&entry.Response)
|
||||
if isCtypeIgnored(ctype) {
|
||||
logger.Log.Debugf("Dropped traffic entry due to ignored response ctype: %s", ctype)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if entry.Response.Status < 100 {
|
||||
@@ -192,9 +199,19 @@ func (g *SpecGen) handlePathObj(entry *har.Entry) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
split := strings.Split(urlParsed.Path, "/")
|
||||
if entry.Response.Status == 502 || entry.Response.Status == 503 || entry.Response.Status == 504 {
|
||||
logger.Log.Debugf("Dropped traffic entry due to temporary server error: %s", entry.StartedDateTime)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
var split []string
|
||||
if urlParsed.RawPath != "" {
|
||||
split = strings.Split(urlParsed.RawPath, "/")
|
||||
} else {
|
||||
split = strings.Split(urlParsed.Path, "/")
|
||||
}
|
||||
node := g.tree.getOrSet(split, new(openapi.PathObj))
|
||||
opObj, err := handleOpObj(entry, node.ops)
|
||||
opObj, err := handleOpObj(entry, node.pathObj)
|
||||
|
||||
if opObj != nil {
|
||||
return opObj.OperationID, err
|
||||
@@ -232,20 +249,16 @@ func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool) e
|
||||
// TODO: we don't handle the situation when header/qstr param can be defined on pathObj level. Also the path param defined on opObj
|
||||
|
||||
qstrGW := nvParams{
|
||||
In: openapi.InQuery,
|
||||
Pairs: func() []NVPair {
|
||||
return qstrToNVP(req.QueryString)
|
||||
},
|
||||
In: openapi.InQuery,
|
||||
Pairs: req.QueryString,
|
||||
IsIgnored: func(name string) bool { return false },
|
||||
GeneralizeName: func(name string) string { return name },
|
||||
}
|
||||
handleNameVals(qstrGW, &opObj.Parameters)
|
||||
|
||||
hdrGW := nvParams{
|
||||
In: openapi.InHeader,
|
||||
Pairs: func() []NVPair {
|
||||
return hdrToNVP(req.Headers)
|
||||
},
|
||||
In: openapi.InHeader,
|
||||
Pairs: req.Headers,
|
||||
IsIgnored: isHeaderIgnored,
|
||||
GeneralizeName: strings.ToLower,
|
||||
}
|
||||
@@ -348,7 +361,7 @@ func fillContent(reqResp reqResp, respContent openapi.Content, ctype string, err
|
||||
isBinary, _, text = reqResp.Resp.Content.B64Decoded()
|
||||
}
|
||||
|
||||
if !isBinary {
|
||||
if !isBinary && text != "" {
|
||||
var exampleMsg []byte
|
||||
// try treating it as json
|
||||
any, isJSON := anyJSON(text)
|
||||
|
||||
@@ -3,48 +3,56 @@ package oas
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/op/go-logging"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"io/ioutil"
|
||||
"mizuserver/pkg/har"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// if started via env, write file into subdir
|
||||
func writeFiles(label string, spec *openapi.OpenAPI) {
|
||||
func outputSpec(label string, spec *openapi.OpenAPI, t *testing.T) {
|
||||
content, err := json.MarshalIndent(spec, "", "\t")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if os.Getenv("MIZU_OAS_WRITE_FILES") != "" {
|
||||
path := "./oas-samples"
|
||||
err := os.MkdirAll(path, 0o755)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
content, err := json.MarshalIndent(spec, "", "\t")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
err = ioutil.WriteFile(path+"/"+label+".json", content, 0644)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
t.Logf("Written: %s", label)
|
||||
} else {
|
||||
t.Logf("%s", string(content))
|
||||
}
|
||||
}
|
||||
|
||||
func TestEntries(t *testing.T) {
|
||||
logger.InitLoggerStderrOnly(logging.INFO)
|
||||
files, err := getFiles("./test_artifacts/")
|
||||
// files, err = getFiles("/media/bigdisk/UP9")
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
GetOasGeneratorInstance().Start()
|
||||
loadStartingOAS()
|
||||
|
||||
if err := feedEntries(files); err != nil {
|
||||
cnt, err := feedEntries(files, true)
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
loadStartingOAS()
|
||||
waitQueueProcessed()
|
||||
|
||||
svcs := strings.Builder{}
|
||||
GetOasGeneratorInstance().ServiceSpecs.Range(func(key, val interface{}) bool {
|
||||
@@ -78,33 +86,35 @@ func TestEntries(t *testing.T) {
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
specText, _ := json.MarshalIndent(spec, "", "\t")
|
||||
t.Logf("%s", string(specText))
|
||||
outputSpec(svc, spec, t)
|
||||
|
||||
err = spec.Validate()
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
writeFiles(svc, spec)
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
logger.Log.Infof("Total entries: %d", cnt)
|
||||
}
|
||||
|
||||
func TestFileLDJSON(t *testing.T) {
|
||||
func TestFileSingle(t *testing.T) {
|
||||
GetOasGeneratorInstance().Start()
|
||||
file := "test_artifacts/output_rdwtyeoyrj.har.ldjson"
|
||||
err := feedFromLDJSON(file)
|
||||
// loadStartingOAS()
|
||||
file := "test_artifacts/params.har"
|
||||
files := []string{file}
|
||||
cnt, err := feedEntries(files, true)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
loadStartingOAS()
|
||||
waitQueueProcessed()
|
||||
|
||||
GetOasGeneratorInstance().ServiceSpecs.Range(func(_, val interface{}) bool {
|
||||
GetOasGeneratorInstance().ServiceSpecs.Range(func(key, val interface{}) bool {
|
||||
svc := key.(string)
|
||||
gen := val.(*SpecGen)
|
||||
spec, err := gen.GetSpec()
|
||||
if err != nil {
|
||||
@@ -112,8 +122,7 @@ func TestFileLDJSON(t *testing.T) {
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
specText, _ := json.MarshalIndent(spec, "", "\t")
|
||||
t.Logf("%s", string(specText))
|
||||
outputSpec(svc, spec, t)
|
||||
|
||||
err = spec.Validate()
|
||||
if err != nil {
|
||||
@@ -123,6 +132,19 @@ func TestFileLDJSON(t *testing.T) {
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
logger.Log.Infof("Processed entries: %d", cnt)
|
||||
}
|
||||
|
||||
func waitQueueProcessed() {
|
||||
for {
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
queue := len(GetOasGeneratorInstance().entriesChan)
|
||||
logger.Log.Infof("Queue: %d", queue)
|
||||
if queue < 1 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func loadStartingOAS() {
|
||||
@@ -155,20 +177,29 @@ func loadStartingOAS() {
|
||||
|
||||
func TestEntriesNegative(t *testing.T) {
|
||||
files := []string{"invalid"}
|
||||
err := feedEntries(files)
|
||||
_, err := feedEntries(files, false)
|
||||
if err == nil {
|
||||
t.Logf("Should have failed")
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestEntriesPositive(t *testing.T) {
|
||||
files := []string{"test_artifacts/params.har"}
|
||||
_, err := feedEntries(files, false)
|
||||
if err != nil {
|
||||
t.Logf("Failed")
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadValidHAR(t *testing.T) {
|
||||
inp := `{"startedDateTime": "2021-02-03T07:48:12.959000+00:00", "time": 1, "request": {"method": "GET", "url": "http://unresolved_target/1.0.0/health", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [], "queryString": [], "headersSize": -1, "bodySize": -1}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [], "content": {"size": 2, "mimeType": "", "text": "OK"}, "redirectURL": "", "headersSize": -1, "bodySize": 2}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 1}}`
|
||||
var entry *har.Entry
|
||||
var err = json.Unmarshal([]byte(inp), &entry)
|
||||
if err != nil {
|
||||
t.Logf("Failed to decode entry: %s", err)
|
||||
// t.FailNow() demonstrates the problem of library
|
||||
t.FailNow() // demonstrates the problem of `martian` HAR library
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,10 +13,14 @@
|
||||
"time": 1022,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "http://trcc-api-service/proxies/",
|
||||
"url": "http://trcc-api-service/proxies;matrixparam=example/",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
{
|
||||
"name": "X-Custom-Demo-Header",
|
||||
"value": "demo"
|
||||
},
|
||||
{
|
||||
"name": "Sec-Fetch-Dest",
|
||||
"value": "empty"
|
||||
@@ -124,6 +128,10 @@
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
{
|
||||
"name": "X-Custom-Demo-Header2",
|
||||
"value": "demo2"
|
||||
},
|
||||
{
|
||||
"name": "Vary",
|
||||
"value": "Origin"
|
||||
@@ -24568,7 +24576,7 @@
|
||||
"bodySize": -1
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"status": 308,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
@@ -24635,7 +24643,7 @@
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "http://trcc-api-service/models/aws_kong5/suites/all/runs",
|
||||
"url": "http://trcc-api-service/models/aws_kong5/suites/all/runs.png",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
@@ -24894,7 +24902,7 @@
|
||||
"bodySize": -1
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"status": 0,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
127
agent/pkg/oas/test_artifacts/params.har
Normal file
127
agent/pkg/oas/test_artifacts/params.har
Normal file
@@ -0,0 +1,127 @@
|
||||
{
|
||||
"log": {
|
||||
"version": "1.2",
|
||||
"creator": {
|
||||
"name": "mitmproxy har_dump",
|
||||
"version": "0.1",
|
||||
"comment": "mitmproxy version mitmproxy 4.0.4"
|
||||
},
|
||||
"entries": [
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:14:43.864529+00:00",
|
||||
"time": 111,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/e21f7112-3d3b-4632-9da3-a4af2e0e9166/sub1",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0,
|
||||
"queryString": []
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"mimeType": "text/html",
|
||||
"text": "",
|
||||
"size": 0
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 245,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 22,
|
||||
"receive": 2,
|
||||
"wait": 87,
|
||||
"connect": -1,
|
||||
"ssl": -1
|
||||
},
|
||||
"serverIPAddress": "54.210.29.33"
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:18.747122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/952bea17-3776-11ea-9341-42010a84012a/sub2",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:19.747122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/952bea17-3776-11ea-9341-42010a84012a;mparam=matrixparam",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package oas
|
||||
import (
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
@@ -10,25 +11,37 @@ import (
|
||||
type NodePath = []string
|
||||
|
||||
type Node struct {
|
||||
constant *string
|
||||
param *openapi.ParameterObj
|
||||
ops *openapi.PathObj
|
||||
parent *Node
|
||||
children []*Node
|
||||
constant *string
|
||||
pathParam *openapi.ParameterObj
|
||||
pathObj *openapi.PathObj
|
||||
parent *Node
|
||||
children []*Node
|
||||
}
|
||||
|
||||
func (n *Node) getOrSet(path NodePath, pathObjToSet *openapi.PathObj) (node *Node) {
|
||||
if pathObjToSet == nil {
|
||||
func (n *Node) getOrSet(path NodePath, existingPathObj *openapi.PathObj) (node *Node) {
|
||||
if existingPathObj == nil {
|
||||
panic("Invalid function call")
|
||||
}
|
||||
|
||||
pathChunk := path[0]
|
||||
potentialMatrix := strings.SplitN(pathChunk, ";", 2)
|
||||
if len(potentialMatrix) > 1 {
|
||||
pathChunk = potentialMatrix[0]
|
||||
logger.Log.Warningf("URI matrix params are not supported: %s", potentialMatrix[1])
|
||||
}
|
||||
|
||||
chunkIsParam := strings.HasPrefix(pathChunk, "{") && strings.HasSuffix(pathChunk, "}")
|
||||
pathChunk, err := url.PathUnescape(pathChunk)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("URI segment is not correctly encoded: %s", pathChunk)
|
||||
// any side effects on continuing?
|
||||
}
|
||||
|
||||
chunkIsGibberish := IsGibberish(pathChunk) && !IsVersionString(pathChunk)
|
||||
|
||||
var paramObj *openapi.ParameterObj
|
||||
if chunkIsParam && pathObjToSet != nil && pathObjToSet.Parameters != nil {
|
||||
paramObj = findParamByName(pathObjToSet.Parameters, openapi.InPath, pathChunk[1:len(pathChunk)-1])
|
||||
if chunkIsParam && existingPathObj != nil && existingPathObj.Parameters != nil {
|
||||
_, paramObj = findParamByName(existingPathObj.Parameters, openapi.InPath, pathChunk[1:len(pathChunk)-1])
|
||||
}
|
||||
|
||||
if paramObj == nil {
|
||||
@@ -46,34 +59,30 @@ func (n *Node) getOrSet(path NodePath, pathObjToSet *openapi.PathObj) (node *Nod
|
||||
n.children = append(n.children, node)
|
||||
|
||||
if paramObj != nil {
|
||||
node.param = paramObj
|
||||
node.pathParam = paramObj
|
||||
} else if chunkIsGibberish {
|
||||
initParams(&pathObjToSet.Parameters)
|
||||
|
||||
newParam := n.createParam()
|
||||
node.param = newParam
|
||||
|
||||
appended := append(*pathObjToSet.Parameters, newParam)
|
||||
pathObjToSet.Parameters = &appended
|
||||
node.pathParam = newParam
|
||||
} else {
|
||||
node.constant = &pathChunk
|
||||
}
|
||||
}
|
||||
|
||||
// add example if it's a param
|
||||
if node.param != nil && !chunkIsParam {
|
||||
exmp := &node.param.Examples
|
||||
// add example if it's a gibberish chunk
|
||||
if node.pathParam != nil && !chunkIsParam {
|
||||
exmp := &node.pathParam.Examples
|
||||
err := fillParamExample(&exmp, pathChunk)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to add example to a parameter: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: eat up trailing slash, in a smart way: node.ops!=nil && path[1]==""
|
||||
// TODO: eat up trailing slash, in a smart way: node.pathObj!=nil && path[1]==""
|
||||
if len(path) > 1 {
|
||||
return node.getOrSet(path[1:], pathObjToSet)
|
||||
} else if node.ops == nil {
|
||||
node.ops = pathObjToSet
|
||||
return node.getOrSet(path[1:], existingPathObj)
|
||||
} else if node.pathObj == nil {
|
||||
node.pathObj = existingPathObj
|
||||
}
|
||||
|
||||
return node
|
||||
@@ -90,12 +99,16 @@ func (n *Node) createParam() *openapi.ParameterObj {
|
||||
} else if strings.HasSuffix(*n.constant, "s") && len(*n.constant) > 3 {
|
||||
name = *n.constant
|
||||
name = name[:len(name)-1] + "Id"
|
||||
} else if isAlpha(*n.constant) {
|
||||
name = *n.constant + "Id"
|
||||
}
|
||||
|
||||
name = cleanNonAlnum([]byte(name))
|
||||
}
|
||||
|
||||
newParam := createSimpleParam(name, "path", "string")
|
||||
x := n.countParentParams()
|
||||
if x > 1 {
|
||||
if x > 0 {
|
||||
newParam.Name = newParam.Name + strconv.Itoa(x)
|
||||
}
|
||||
|
||||
@@ -113,7 +126,7 @@ func (n *Node) searchInParams(paramObj *openapi.ParameterObj, chunkIsGibberish b
|
||||
// TODO: check the regex pattern of param? for exceptions etc
|
||||
|
||||
if paramObj != nil {
|
||||
// TODO: mergeParam(subnode.param, paramObj)
|
||||
// TODO: mergeParam(subnode.pathParam, paramObj)
|
||||
return subnode
|
||||
} else {
|
||||
return subnode
|
||||
@@ -147,15 +160,16 @@ func (n *Node) listPaths() *openapi.Paths {
|
||||
var strChunk string
|
||||
if n.constant != nil {
|
||||
strChunk = *n.constant
|
||||
} else if n.param != nil {
|
||||
strChunk = "{" + n.param.Name + "}"
|
||||
} else if n.pathParam != nil {
|
||||
strChunk = "{" + n.pathParam.Name + "}"
|
||||
} else {
|
||||
// this is the root node
|
||||
}
|
||||
|
||||
// add self
|
||||
if n.ops != nil {
|
||||
paths.Items[openapi.PathValue(strChunk)] = n.ops
|
||||
if n.pathObj != nil {
|
||||
fillPathParams(n, n.pathObj)
|
||||
paths.Items[openapi.PathValue(strChunk)] = n.pathObj
|
||||
}
|
||||
|
||||
// recurse into children
|
||||
@@ -175,6 +189,29 @@ func (n *Node) listPaths() *openapi.Paths {
|
||||
return paths
|
||||
}
|
||||
|
||||
func fillPathParams(n *Node, pathObj *openapi.PathObj) {
|
||||
// collect all path parameters from parent hierarchy
|
||||
node := n
|
||||
for {
|
||||
if node.pathParam != nil {
|
||||
initParams(&pathObj.Parameters)
|
||||
|
||||
idx, paramObj := findParamByName(pathObj.Parameters, openapi.InPath, node.pathParam.Name)
|
||||
if paramObj == nil {
|
||||
appended := append(*pathObj.Parameters, node.pathParam)
|
||||
pathObj.Parameters = &appended
|
||||
} else {
|
||||
(*pathObj.Parameters)[idx] = paramObj
|
||||
}
|
||||
}
|
||||
|
||||
node = node.parent
|
||||
if node == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type PathAndOp struct {
|
||||
path string
|
||||
op *openapi.Operation
|
||||
@@ -194,7 +231,7 @@ func (n *Node) countParentParams() int {
|
||||
res := 0
|
||||
node := n
|
||||
for {
|
||||
if node.param != nil {
|
||||
if node.pathParam != nil {
|
||||
res++
|
||||
}
|
||||
|
||||
|
||||
@@ -8,19 +8,30 @@ import (
|
||||
|
||||
func TestTree(t *testing.T) {
|
||||
testCases := []struct {
|
||||
inp string
|
||||
inp string
|
||||
numParams int
|
||||
label string
|
||||
}{
|
||||
{"/"},
|
||||
{"/v1.0.0/config/launcher/sp_nKNHCzsN/f34efcae-6583-11eb-908a-00b0fcb9d4f6/vendor,init,conversation"},
|
||||
{"/", 0, ""},
|
||||
{"/v1.0.0/config/launcher/sp_nKNHCzsN/f34efcae-6583-11eb-908a-00b0fcb9d4f6/vendor,init,conversation", 1, "vendor,init,conversation"},
|
||||
{"/v1.0.0/config/launcher/sp_nKNHCzsN/{f34efcae-6583-11eb-908a-00b0fcb9d4f6}/vendor,init,conversation", 0, "vendor,init,conversation"},
|
||||
{"/getSvgs/size/small/brand/SFLY/layoutId/170943/layoutVersion/1/sizeId/742/surface/0/isLandscape/true/childSkus/%7B%7D", 1, ""},
|
||||
}
|
||||
|
||||
tree := new(Node)
|
||||
for _, tc := range testCases {
|
||||
split := strings.Split(tc.inp, "/")
|
||||
node := tree.getOrSet(split, new(openapi.PathObj))
|
||||
pathObj := new(openapi.PathObj)
|
||||
node := tree.getOrSet(split, pathObj)
|
||||
|
||||
fillPathParams(node, pathObj)
|
||||
|
||||
if node.constant == nil {
|
||||
t.Errorf("nil constant: %s", tc.inp)
|
||||
if node.constant != nil && *node.constant != tc.label {
|
||||
t.Errorf("Constant does not match: %s != %s", *node.constant, tc.label)
|
||||
}
|
||||
|
||||
if tc.numParams > 0 && (pathObj.Parameters == nil || len(*pathObj.Parameters) < tc.numParams) {
|
||||
t.Errorf("Wrong num of params, expected: %d", tc.numParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,9 +71,10 @@ func createSimpleParam(name string, in openapi.In, ptype openapi.SchemaType) *op
|
||||
return &newParam
|
||||
}
|
||||
|
||||
func findParamByName(params *openapi.ParameterList, in openapi.In, name string) (pathParam *openapi.ParameterObj) {
|
||||
func findParamByName(params *openapi.ParameterList, in openapi.In, name string) (idx int, pathParam *openapi.ParameterObj) {
|
||||
caseInsensitive := in == openapi.InHeader
|
||||
for _, param := range *params {
|
||||
for i, param := range *params {
|
||||
idx = i
|
||||
paramObj, err := param.ResolveParameter(paramResolver)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to resolve reference: %s", err)
|
||||
@@ -89,7 +90,8 @@ func findParamByName(params *openapi.ParameterList, in openapi.In, name string)
|
||||
break
|
||||
}
|
||||
}
|
||||
return pathParam
|
||||
|
||||
return idx, pathParam
|
||||
}
|
||||
|
||||
func findHeaderByName(headers *openapi.Headers, name string) *openapi.HeaderObj {
|
||||
@@ -107,37 +109,16 @@ func findHeaderByName(headers *openapi.Headers, name string) *openapi.HeaderObj
|
||||
return nil
|
||||
}
|
||||
|
||||
type NVPair struct {
|
||||
Name string
|
||||
Value string
|
||||
}
|
||||
|
||||
type nvParams struct {
|
||||
In openapi.In
|
||||
Pairs func() []NVPair
|
||||
Pairs []har.NVP
|
||||
IsIgnored func(name string) bool
|
||||
GeneralizeName func(name string) string
|
||||
}
|
||||
|
||||
func qstrToNVP(list []har.QueryString) []NVPair {
|
||||
res := make([]NVPair, len(list))
|
||||
for idx, val := range list {
|
||||
res[idx] = NVPair{Name: val.Name, Value: val.Value}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func hdrToNVP(list []har.Header) []NVPair {
|
||||
res := make([]NVPair, len(list))
|
||||
for idx, val := range list {
|
||||
res[idx] = NVPair{Name: val.Name, Value: val.Value}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func handleNameVals(gw nvParams, params **openapi.ParameterList) {
|
||||
visited := map[string]*openapi.ParameterObj{}
|
||||
for _, pair := range gw.Pairs() {
|
||||
for _, pair := range gw.Pairs {
|
||||
if gw.IsIgnored(pair.Name) {
|
||||
continue
|
||||
}
|
||||
@@ -145,7 +126,7 @@ func handleNameVals(gw nvParams, params **openapi.ParameterList) {
|
||||
nameGeneral := gw.GeneralizeName(pair.Name)
|
||||
|
||||
initParams(params)
|
||||
param := findParamByName(*params, gw.In, pair.Name)
|
||||
_, param := findParamByName(*params, gw.In, pair.Name)
|
||||
if param == nil {
|
||||
param = createSimpleParam(nameGeneral, gw.In, openapi.TypeString)
|
||||
appended := append(**params, param)
|
||||
@@ -342,3 +323,26 @@ func anyJSON(text string) (anyVal interface{}, isJSON bool) {
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func cleanNonAlnum(s []byte) string {
|
||||
j := 0
|
||||
for _, b := range s {
|
||||
if ('a' <= b && b <= 'z') ||
|
||||
('A' <= b && b <= 'Z') ||
|
||||
('0' <= b && b <= '9') ||
|
||||
b == ' ' {
|
||||
s[j] = b
|
||||
j++
|
||||
}
|
||||
}
|
||||
return string(s[:j])
|
||||
}
|
||||
|
||||
func isAlpha(s string) bool {
|
||||
for _, r := range s {
|
||||
if (r < 'a' || r > 'z') && (r < 'A' || r > 'Z') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
package routes
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// NotFoundRoute defines the 404 Error route.
|
||||
func NotFoundRoute(app *gin.Engine) {
|
||||
app.Use(
|
||||
func(c *gin.Context) {
|
||||
c.JSON(http.StatusNotFound, map[string]interface{}{
|
||||
"error": true,
|
||||
"msg": "sorry, endpoint is not found",
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -23,8 +23,8 @@ type Provider struct {
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
const DefaultRetries = 20
|
||||
const DefaultTimeout = 5 * time.Second
|
||||
const DefaultRetries = 3
|
||||
const DefaultTimeout = 2 * time.Second
|
||||
|
||||
func NewProvider(url string, retries int, timeout time.Duration) *Provider {
|
||||
return &Provider{
|
||||
@@ -36,16 +36,6 @@ func NewProvider(url string, retries int, timeout time.Duration) *Provider {
|
||||
}
|
||||
}
|
||||
|
||||
func NewProviderWithoutRetries(url string, timeout time.Duration) *Provider {
|
||||
return &Provider{
|
||||
url: url,
|
||||
retries: 1,
|
||||
client: &http.Client{
|
||||
Timeout: timeout,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *Provider) TestConnection() error {
|
||||
retriesLeft := provider.retries
|
||||
for retriesLeft > 0 {
|
||||
|
||||
@@ -3,13 +3,14 @@ package cmd
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
"github.com/up9inc/mizu/cli/apiserver"
|
||||
"github.com/up9inc/mizu/cli/config"
|
||||
"github.com/up9inc/mizu/cli/uiUtils"
|
||||
"github.com/up9inc/mizu/shared/kubernetes"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"github.com/up9inc/mizu/shared/semver"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
func runMizuCheck() {
|
||||
@@ -96,7 +97,7 @@ func checkServerConnection(kubernetesProvider *kubernetes.Provider) bool {
|
||||
|
||||
serverUrl := GetApiServerUrl()
|
||||
|
||||
apiServerProvider := apiserver.NewProviderWithoutRetries(serverUrl, apiserver.DefaultTimeout)
|
||||
apiServerProvider := apiserver.NewProvider(serverUrl, 1, apiserver.DefaultTimeout)
|
||||
if err := apiServerProvider.TestConnection(); err == nil {
|
||||
logger.Log.Infof("%v found Mizu server tunnel available and connected successfully to API server", fmt.Sprintf(uiUtils.Green, "√"))
|
||||
return true
|
||||
@@ -169,32 +170,32 @@ func checkAllResourcesExist(ctx context.Context, kubernetesProvider *kubernetes.
|
||||
allResourcesExist := checkResourceExist(config.Config.MizuResourcesNamespace, "namespace", exist, err)
|
||||
|
||||
exist, err = kubernetesProvider.DoesConfigMapExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.ConfigMapName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ConfigMapName, "config map", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ConfigMapName, "config map", exist, err) && allResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesServiceAccountExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.ServiceAccountName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ServiceAccountName, "service account", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ServiceAccountName, "service account", exist, err) && allResourcesExist
|
||||
|
||||
if config.Config.IsNsRestrictedMode() {
|
||||
exist, err = kubernetesProvider.DoesRoleExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.RoleName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.RoleName, "role", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.RoleName, "role", exist, err) && allResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesRoleBindingExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.RoleBindingName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.RoleBindingName, "role binding", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.RoleBindingName, "role binding", exist, err) && allResourcesExist
|
||||
} else {
|
||||
exist, err = kubernetesProvider.DoesClusterRoleExist(ctx, kubernetes.ClusterRoleName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ClusterRoleName, "cluster role", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ClusterRoleName, "cluster role", exist, err) && allResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesClusterRoleBindingExist(ctx, kubernetes.ClusterRoleBindingName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ClusterRoleBindingName, "cluster role binding", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ClusterRoleBindingName, "cluster role binding", exist, err) && allResourcesExist
|
||||
}
|
||||
|
||||
exist, err = kubernetesProvider.DoesServiceExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.ApiServerPodName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ApiServerPodName, "service", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ApiServerPodName, "service", exist, err) && allResourcesExist
|
||||
|
||||
if isInstallCommand {
|
||||
allResourcesExist = checkInstallResourcesExist(ctx, kubernetesProvider)
|
||||
allResourcesExist = checkInstallResourcesExist(ctx, kubernetesProvider) && allResourcesExist
|
||||
} else {
|
||||
allResourcesExist = checkTapResourcesExist(ctx, kubernetesProvider)
|
||||
allResourcesExist = checkTapResourcesExist(ctx, kubernetesProvider) && allResourcesExist
|
||||
}
|
||||
|
||||
return allResourcesExist
|
||||
@@ -205,13 +206,13 @@ func checkInstallResourcesExist(ctx context.Context, kubernetesProvider *kuberne
|
||||
installResourcesExist := checkResourceExist(kubernetes.DaemonRoleName, "role", exist, err)
|
||||
|
||||
exist, err = kubernetesProvider.DoesRoleBindingExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.DaemonRoleBindingName)
|
||||
installResourcesExist = checkResourceExist(kubernetes.DaemonRoleBindingName, "role binding", exist, err)
|
||||
installResourcesExist = checkResourceExist(kubernetes.DaemonRoleBindingName, "role binding", exist, err) && installResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesPersistentVolumeClaimExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.PersistentVolumeClaimName)
|
||||
installResourcesExist = checkResourceExist(kubernetes.PersistentVolumeClaimName, "persistent volume claim", exist, err)
|
||||
installResourcesExist = checkResourceExist(kubernetes.PersistentVolumeClaimName, "persistent volume claim", exist, err) && installResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesDeploymentExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.ApiServerPodName)
|
||||
installResourcesExist = checkResourceExist(kubernetes.ApiServerPodName, "deployment", exist, err)
|
||||
installResourcesExist = checkResourceExist(kubernetes.ApiServerPodName, "deployment", exist, err) && installResourcesExist
|
||||
|
||||
return installResourcesExist
|
||||
}
|
||||
|
||||
@@ -5,6 +5,10 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"path"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/cli/apiserver"
|
||||
"github.com/up9inc/mizu/cli/config/configStructs"
|
||||
"github.com/up9inc/mizu/cli/errormessage"
|
||||
@@ -13,9 +17,6 @@ import (
|
||||
"github.com/up9inc/mizu/cli/resources"
|
||||
"github.com/up9inc/mizu/cli/uiUtils"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"path"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/cli/config"
|
||||
"github.com/up9inc/mizu/shared/kubernetes"
|
||||
@@ -35,10 +36,10 @@ func startProxyReportErrorIfAny(kubernetesProvider *kubernetes.Provider, ctx con
|
||||
return
|
||||
}
|
||||
|
||||
apiProvider = apiserver.NewProviderWithoutRetries(GetApiServerUrl(), time.Second) // short check for proxy
|
||||
apiProvider = apiserver.NewProvider(GetApiServerUrl(), apiserver.DefaultRetries, apiserver.DefaultTimeout)
|
||||
if err := apiProvider.TestConnection(); err != nil {
|
||||
logger.Log.Debugf("Couldn't connect using proxy, stopping proxy and trying to create port-forward")
|
||||
if err := httpServer.Shutdown(context.Background()); err != nil {
|
||||
if err := httpServer.Shutdown(ctx); err != nil {
|
||||
logger.Log.Debugf("Error occurred while stopping proxy %v", errormessage.FormatError(err))
|
||||
}
|
||||
|
||||
@@ -50,7 +51,7 @@ func startProxyReportErrorIfAny(kubernetesProvider *kubernetes.Provider, ctx con
|
||||
return
|
||||
}
|
||||
|
||||
apiProvider = apiserver.NewProvider(GetApiServerUrl(), apiserver.DefaultRetries, apiserver.DefaultTimeout) // long check for port-forward
|
||||
apiProvider = apiserver.NewProvider(GetApiServerUrl(), apiserver.DefaultRetries, apiserver.DefaultTimeout)
|
||||
if err := apiProvider.TestConnection(); err != nil {
|
||||
logger.Log.Errorf(uiUtils.Error, fmt.Sprintf("Couldn't connect to API server, for more info check logs at %s", fsUtils.GetLogFilePath()))
|
||||
cancel()
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/creasty/defaults"
|
||||
"github.com/up9inc/mizu/cli/config"
|
||||
"github.com/up9inc/mizu/cli/errormessage"
|
||||
@@ -41,7 +42,7 @@ func runMizuInstall() {
|
||||
|
||||
if err = resources.CreateInstallMizuResources(ctx, kubernetesProvider, serializedValidationRules,
|
||||
serializedContract, serializedMizuConfig, config.Config.IsNsRestrictedMode(),
|
||||
config.Config.MizuResourcesNamespace, config.Config.AgentImage, config.Config.BasenineImage,
|
||||
config.Config.MizuResourcesNamespace, config.Config.AgentImage,
|
||||
config.Config.KratosImage, config.Config.KetoImage,
|
||||
nil, defaultMaxEntriesDBSizeBytes, defaultResources, config.Config.ImagePullPolicy(),
|
||||
config.Config.LogLevel(), false); err != nil {
|
||||
@@ -73,6 +74,7 @@ func getInstallMizuAgentConfig(maxDBSizeBytes int64, tapperResources shared.Reso
|
||||
StandaloneMode: true,
|
||||
ServiceMap: config.Config.ServiceMap,
|
||||
OAS: config.Config.OAS,
|
||||
Elastic: config.Config.Elastic,
|
||||
}
|
||||
|
||||
return &mizuAgentConfig
|
||||
|
||||
@@ -23,7 +23,7 @@ var logsCmd = &cobra.Command{
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
ctx, _ := context.WithCancel(context.Background())
|
||||
ctx := context.Background()
|
||||
|
||||
if validationErr := config.Config.Logs.Validate(); validationErr != nil {
|
||||
return errormessage.FormatError(validationErr)
|
||||
|
||||
@@ -124,7 +124,7 @@ func RunMizuTap() {
|
||||
}
|
||||
|
||||
logger.Log.Infof("Waiting for Mizu Agent to start...")
|
||||
if state.mizuServiceAccountExists, err = resources.CreateTapMizuResources(ctx, kubernetesProvider, serializedValidationRules, serializedContract, serializedMizuConfig, config.Config.IsNsRestrictedMode(), config.Config.MizuResourcesNamespace, config.Config.AgentImage, config.Config.BasenineImage, getSyncEntriesConfig(), config.Config.Tap.MaxEntriesDBSizeBytes(), config.Config.Tap.ApiServerResources, config.Config.ImagePullPolicy(), config.Config.LogLevel()); err != nil {
|
||||
if state.mizuServiceAccountExists, err = resources.CreateTapMizuResources(ctx, kubernetesProvider, serializedValidationRules, serializedContract, serializedMizuConfig, config.Config.IsNsRestrictedMode(), config.Config.MizuResourcesNamespace, config.Config.AgentImage, getSyncEntriesConfig(), config.Config.Tap.MaxEntriesDBSizeBytes(), config.Config.Tap.ApiServerResources, config.Config.ImagePullPolicy(), config.Config.LogLevel()); err != nil {
|
||||
var statusError *k8serrors.StatusError
|
||||
if errors.As(err, &statusError) {
|
||||
if statusError.ErrStatus.Reason == metav1.StatusReasonAlreadyExists {
|
||||
@@ -164,6 +164,7 @@ func getTapMizuAgentConfig() *shared.MizuAgentConfig {
|
||||
AgentDatabasePath: shared.DataDirPath,
|
||||
ServiceMap: config.Config.ServiceMap,
|
||||
OAS: config.Config.OAS,
|
||||
Elastic: config.Config.Elastic,
|
||||
}
|
||||
|
||||
return &mizuAgentConfig
|
||||
@@ -342,7 +343,7 @@ func watchApiServerPod(ctx context.Context, kubernetesProvider *kubernetes.Provi
|
||||
|
||||
if modifiedPod.Status.Phase == core.PodRunning && !isPodReady {
|
||||
isPodReady = true
|
||||
postApiServerStarted(ctx, kubernetesProvider, cancel, err)
|
||||
postApiServerStarted(ctx, kubernetesProvider, cancel)
|
||||
}
|
||||
case kubernetes.EventBookmark:
|
||||
break
|
||||
@@ -405,7 +406,7 @@ func watchApiServerEvents(ctx context.Context, kubernetesProvider *kubernetes.Pr
|
||||
case "FailedScheduling", "Failed":
|
||||
logger.Log.Errorf(uiUtils.Error, fmt.Sprintf("Mizu API Server status: %s - %s", event.Reason, event.Note))
|
||||
cancel()
|
||||
break
|
||||
|
||||
}
|
||||
case err, ok := <-errorChan:
|
||||
if !ok {
|
||||
@@ -421,11 +422,11 @@ func watchApiServerEvents(ctx context.Context, kubernetesProvider *kubernetes.Pr
|
||||
}
|
||||
}
|
||||
|
||||
func postApiServerStarted(ctx context.Context, kubernetesProvider *kubernetes.Provider, cancel context.CancelFunc, err error) {
|
||||
func postApiServerStarted(ctx context.Context, kubernetesProvider *kubernetes.Provider, cancel context.CancelFunc) {
|
||||
startProxyReportErrorIfAny(kubernetesProvider, ctx, cancel)
|
||||
|
||||
options, _ := getMizuApiFilteringOptions()
|
||||
if err = startTapperSyncer(ctx, cancel, kubernetesProvider, state.targetNamespaces, *options, state.startTime); err != nil {
|
||||
if err := startTapperSyncer(ctx, cancel, kubernetesProvider, state.targetNamespaces, *options, state.startTime); err != nil {
|
||||
logger.Log.Errorf(uiUtils.Error, fmt.Sprintf("Error starting mizu tapper syncer: %v", err))
|
||||
cancel()
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ type ConfigStruct struct {
|
||||
Auth configStructs.AuthConfig `yaml:"auth"`
|
||||
Config configStructs.ConfigConfig `yaml:"config,omitempty"`
|
||||
AgentImage string `yaml:"agent-image,omitempty" readonly:""`
|
||||
BasenineImage string `yaml:"basenine-image,omitempty" readonly:""`
|
||||
KratosImage string `yaml:"kratos-image,omitempty" readonly:""`
|
||||
KetoImage string `yaml:"keto-image,omitempty" readonly:""`
|
||||
ImagePullPolicyStr string `yaml:"image-pull-policy" default:"Always"`
|
||||
@@ -41,6 +40,7 @@ type ConfigStruct struct {
|
||||
LogLevelStr string `yaml:"log-level,omitempty" default:"INFO" readonly:""`
|
||||
ServiceMap bool `yaml:"service-map,omitempty" default:"false" readonly:""`
|
||||
OAS bool `yaml:"oas,omitempty" default:"false" readonly:""`
|
||||
Elastic shared.ElasticConfig `yaml:"elastic"`
|
||||
}
|
||||
|
||||
func (config *ConfigStruct) validate() error {
|
||||
@@ -52,7 +52,6 @@ func (config *ConfigStruct) validate() error {
|
||||
}
|
||||
|
||||
func (config *ConfigStruct) SetDefaults() {
|
||||
config.BasenineImage = fmt.Sprintf("%s:%s", shared.BasenineImageRepo, shared.BasenineImageTag)
|
||||
config.KratosImage = shared.KratosImageDefault
|
||||
config.KetoImage = shared.KetoImageDefault
|
||||
config.AgentImage = fmt.Sprintf("%s:%s", shared.MizuAgentImageRepo, mizu.SemVer)
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
"k8s.io/apimachinery/pkg/util/intstr"
|
||||
)
|
||||
|
||||
func CreateTapMizuResources(ctx context.Context, kubernetesProvider *kubernetes.Provider, serializedValidationRules string, serializedContract string, serializedMizuConfig string, isNsRestrictedMode bool, mizuResourcesNamespace string, agentImage string, basenineImage string, syncEntriesConfig *shared.SyncEntriesConfig, maxEntriesDBSizeBytes int64, apiServerResources shared.Resources, imagePullPolicy core.PullPolicy, logLevel logging.Level) (bool, error) {
|
||||
func CreateTapMizuResources(ctx context.Context, kubernetesProvider *kubernetes.Provider, serializedValidationRules string, serializedContract string, serializedMizuConfig string, isNsRestrictedMode bool, mizuResourcesNamespace string, agentImage string, syncEntriesConfig *shared.SyncEntriesConfig, maxEntriesDBSizeBytes int64, apiServerResources shared.Resources, imagePullPolicy core.PullPolicy, logLevel logging.Level) (bool, error) {
|
||||
if !isNsRestrictedMode {
|
||||
if err := createMizuNamespace(ctx, kubernetesProvider, mizuResourcesNamespace); err != nil {
|
||||
return false, err
|
||||
@@ -42,7 +42,6 @@ func CreateTapMizuResources(ctx context.Context, kubernetesProvider *kubernetes.
|
||||
Namespace: mizuResourcesNamespace,
|
||||
PodName: kubernetes.ApiServerPodName,
|
||||
PodImage: agentImage,
|
||||
BasenineImage: basenineImage,
|
||||
KratosImage: "",
|
||||
KetoImage: "",
|
||||
ServiceAccountName: serviceAccountName,
|
||||
@@ -68,7 +67,7 @@ func CreateTapMizuResources(ctx context.Context, kubernetesProvider *kubernetes.
|
||||
return mizuServiceAccountExists, nil
|
||||
}
|
||||
|
||||
func CreateInstallMizuResources(ctx context.Context, kubernetesProvider *kubernetes.Provider, serializedValidationRules string, serializedContract string, serializedMizuConfig string, isNsRestrictedMode bool, mizuResourcesNamespace string, agentImage string, basenineImage string, kratosImage string, ketoImage string, syncEntriesConfig *shared.SyncEntriesConfig, maxEntriesDBSizeBytes int64, apiServerResources shared.Resources, imagePullPolicy core.PullPolicy, logLevel logging.Level, noPersistentVolumeClaim bool) error {
|
||||
func CreateInstallMizuResources(ctx context.Context, kubernetesProvider *kubernetes.Provider, serializedValidationRules string, serializedContract string, serializedMizuConfig string, isNsRestrictedMode bool, mizuResourcesNamespace string, agentImage string, kratosImage string, ketoImage string, syncEntriesConfig *shared.SyncEntriesConfig, maxEntriesDBSizeBytes int64, apiServerResources shared.Resources, imagePullPolicy core.PullPolicy, logLevel logging.Level, noPersistentVolumeClaim bool) error {
|
||||
if err := createMizuNamespace(ctx, kubernetesProvider, mizuResourcesNamespace); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -98,7 +97,6 @@ func CreateInstallMizuResources(ctx context.Context, kubernetesProvider *kuberne
|
||||
Namespace: mizuResourcesNamespace,
|
||||
PodName: kubernetes.ApiServerPodName,
|
||||
PodImage: agentImage,
|
||||
BasenineImage: basenineImage,
|
||||
KratosImage: kratosImage,
|
||||
KetoImage: ketoImage,
|
||||
ServiceAccountName: serviceAccountName,
|
||||
|
||||
@@ -17,8 +17,6 @@ const (
|
||||
MizuAgentImageRepo = "docker.io/up9inc/mizu"
|
||||
BasenineHost = "127.0.0.1"
|
||||
BaseninePort = "9099"
|
||||
BasenineImageRepo = "docker.io/up9inc/basenine"
|
||||
BasenineImageTag = "v0.4.13"
|
||||
KratosImageDefault = "gcr.io/up9-docker-hub/mizu-kratos/stable:0.0.0"
|
||||
KetoImageDefault = "gcr.io/up9-docker-hub/mizu-keto/stable:0.0.0"
|
||||
)
|
||||
|
||||
@@ -169,7 +169,6 @@ type ApiServerOptions struct {
|
||||
Namespace string
|
||||
PodName string
|
||||
PodImage string
|
||||
BasenineImage string
|
||||
KratosImage string
|
||||
KetoImage string
|
||||
ServiceAccountName string
|
||||
@@ -275,7 +274,7 @@ func (provider *Provider) GetMizuApiServerPodObject(opts *ApiServerOptions, moun
|
||||
},
|
||||
{
|
||||
Name: "basenine",
|
||||
Image: opts.BasenineImage,
|
||||
Image: opts.PodImage,
|
||||
ImagePullPolicy: opts.ImagePullPolicy,
|
||||
VolumeMounts: volumeMounts,
|
||||
ReadinessProbe: &core.Probe{
|
||||
@@ -299,7 +298,7 @@ func (provider *Provider) GetMizuApiServerPodObject(opts *ApiServerOptions, moun
|
||||
"memory": memRequests,
|
||||
},
|
||||
},
|
||||
Command: []string{"/basenine"},
|
||||
Command: []string{"basenine"},
|
||||
Args: []string{"-addr", "0.0.0.0", "-port", shared.BaseninePort, "-persistent"},
|
||||
WorkingDir: shared.DataDirPath,
|
||||
},
|
||||
|
||||
@@ -43,6 +43,13 @@ type MizuAgentConfig struct {
|
||||
StandaloneMode bool `json:"standaloneMode"`
|
||||
ServiceMap bool `json:"serviceMap"`
|
||||
OAS bool `json:"oas"`
|
||||
Elastic ElasticConfig `json:"elastic"`
|
||||
}
|
||||
|
||||
type ElasticConfig struct {
|
||||
User string `yaml:"user,omitempty" default:"" readonly:""`
|
||||
Password string `yaml:"password,omitempty" default:"" readonly:""`
|
||||
Url string `yaml:"url,omitempty" default:"" readonly:""`
|
||||
}
|
||||
|
||||
type WebSocketMessageMetadata struct {
|
||||
|
||||
@@ -100,7 +100,6 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
http2Assembler = createHTTP2Assembler(b)
|
||||
}
|
||||
|
||||
dissected := false
|
||||
switchingProtocolsHTTP2 := false
|
||||
for {
|
||||
if switchingProtocolsHTTP2 {
|
||||
@@ -121,7 +120,7 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
} else if err != nil {
|
||||
continue
|
||||
}
|
||||
dissected = true
|
||||
superIdentifier.Protocol = &http11protocol
|
||||
} else if isClient {
|
||||
var req *http.Request
|
||||
switchingProtocolsHTTP2, req, err = handleHTTP1ClientStream(b, tcpID, counterPair, superTimer, emitter, options)
|
||||
@@ -130,7 +129,7 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
} else if err != nil {
|
||||
continue
|
||||
}
|
||||
dissected = true
|
||||
superIdentifier.Protocol = &http11protocol
|
||||
|
||||
// In case of an HTTP2 upgrade, duplicate the HTTP1 request into HTTP2 with stream ID 1
|
||||
if switchingProtocolsHTTP2 {
|
||||
@@ -161,14 +160,14 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
} else if err != nil {
|
||||
continue
|
||||
}
|
||||
dissected = true
|
||||
superIdentifier.Protocol = &http11protocol
|
||||
}
|
||||
}
|
||||
|
||||
if !dissected {
|
||||
if superIdentifier.Protocol == nil {
|
||||
return err
|
||||
}
|
||||
superIdentifier.Protocol = &http11protocol
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -251,10 +251,10 @@ func ReadResponse(r io.Reader, tcpID *api.TcpID, superTimer *api.SuperTimer, emi
|
||||
}
|
||||
|
||||
connectionInfo := &api.ConnectionInfo{
|
||||
ClientIP: tcpID.SrcIP,
|
||||
ClientPort: tcpID.SrcPort,
|
||||
ServerIP: tcpID.DstIP,
|
||||
ServerPort: tcpID.DstPort,
|
||||
ClientIP: tcpID.DstIP,
|
||||
ClientPort: tcpID.DstPort,
|
||||
ServerIP: tcpID.SrcIP,
|
||||
ServerPort: tcpID.SrcPort,
|
||||
IsOutgoing: true,
|
||||
}
|
||||
|
||||
|
||||
@@ -197,6 +197,12 @@ func (r *RedisInputStream) readLineBytes() ([]byte, error) {
|
||||
line := make([]byte, N)
|
||||
j := 0
|
||||
for i := r.count; i <= N; i++ {
|
||||
if i >= len(buf) {
|
||||
return nil, errors.New("Redis buffer index mismatch.")
|
||||
}
|
||||
if i >= len(line) {
|
||||
return nil, errors.New("Redis line index mismatch.")
|
||||
}
|
||||
line[j] = buf[i]
|
||||
j++
|
||||
}
|
||||
@@ -295,27 +301,44 @@ func (p *RedisProtocol) Read() (packet *RedisPacket, err error) {
|
||||
switch x.(type) {
|
||||
case []interface{}:
|
||||
array := x.([]interface{})
|
||||
switch array[0].(type) {
|
||||
case []uint8:
|
||||
packet.Command = RedisCommand(strings.ToUpper(string(array[0].([]uint8))))
|
||||
if len(array) > 1 {
|
||||
packet.Key = string(array[1].([]uint8))
|
||||
}
|
||||
if len(array) > 2 {
|
||||
packet.Value = string(array[2].([]uint8))
|
||||
}
|
||||
if len(array) > 3 {
|
||||
packet.Value = fmt.Sprintf("[%s", packet.Value)
|
||||
for _, item := range array[3:] {
|
||||
packet.Value = fmt.Sprintf("%s, %s", packet.Value, item.([]uint8))
|
||||
if len(array) > 0 {
|
||||
switch array[0].(type) {
|
||||
case []uint8:
|
||||
packet.Command = RedisCommand(strings.ToUpper(string(array[0].([]uint8))))
|
||||
if len(array) > 1 {
|
||||
switch array[1].(type) {
|
||||
case []uint8:
|
||||
packet.Key = string(array[1].([]uint8))
|
||||
case int64:
|
||||
packet.Key = fmt.Sprintf("%d", array[1].(int64))
|
||||
}
|
||||
}
|
||||
packet.Value = strings.TrimSuffix(packet.Value, ", ")
|
||||
packet.Value = fmt.Sprintf("%s]", packet.Value)
|
||||
if len(array) > 2 {
|
||||
switch array[2].(type) {
|
||||
case []uint8:
|
||||
packet.Value = string(array[2].([]uint8))
|
||||
case int64:
|
||||
packet.Value = fmt.Sprintf("%d", array[2].(int64))
|
||||
}
|
||||
}
|
||||
if len(array) > 3 {
|
||||
packet.Value = fmt.Sprintf("[%s", packet.Value)
|
||||
for _, item := range array[3:] {
|
||||
switch item.(type) {
|
||||
case []uint8:
|
||||
packet.Value = fmt.Sprintf("%s, %s", packet.Value, item.([]uint8))
|
||||
case int64:
|
||||
packet.Value = fmt.Sprintf("%s, %d", packet.Value, item.(int64))
|
||||
}
|
||||
}
|
||||
packet.Value = strings.TrimSuffix(packet.Value, ", ")
|
||||
packet.Value = fmt.Sprintf("%s]", packet.Value)
|
||||
}
|
||||
default:
|
||||
msg := fmt.Sprintf("Unrecognized element in Redis array: %v", reflect.TypeOf(array[0]))
|
||||
err = errors.New(msg)
|
||||
return
|
||||
}
|
||||
default:
|
||||
msg := fmt.Sprintf("Unrecognized element in Redis array: %v", reflect.TypeOf(array[0]))
|
||||
err = errors.New(msg)
|
||||
return
|
||||
}
|
||||
case []uint8:
|
||||
val := string(x.([]uint8))
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
REACT_APP_OVERRIDE_WS_URL="ws://localhost:8899/ws"
|
||||
REACT_APP_OVERRIDE_API_URL="http://localhost:8899/"
|
||||
REACT_APP_OVERRIDE_IS_ENTERPRISE="false"
|
||||
|
||||
3
ui/.env.dev.basic
Normal file
3
ui/.env.dev.basic
Normal file
@@ -0,0 +1,3 @@
|
||||
REACT_APP_OVERRIDE_WS_URL="ws://localhost:8899/ws"
|
||||
REACT_APP_OVERRIDE_API_URL="http://localhost:8899/"
|
||||
REACT_APP_OVERRIDE_IS_ENTERPRISE="false"
|
||||
3
ui/.env.dev.enterprise
Normal file
3
ui/.env.dev.enterprise
Normal file
@@ -0,0 +1,3 @@
|
||||
REACT_APP_OVERRIDE_WS_URL="ws://localhost:8899/ws"
|
||||
REACT_APP_OVERRIDE_API_URL="http://localhost:8899/"
|
||||
REACT_APP_OVERRIDE_IS_ENTERPRISE="true"
|
||||
@@ -1,3 +1 @@
|
||||
REACT_APP_OVERRIDE_WS_URL="ws://localhost:8899/ws"
|
||||
REACT_APP_OVERRIDE_API_URL="http://localhost:8899/"
|
||||
REACT_APP_OVERRIDE_IS_ENTERPRISE="true"
|
||||
|
||||
@@ -49,7 +49,8 @@
|
||||
},
|
||||
"scripts": {
|
||||
"start": "craco start",
|
||||
"start-ent": "./node_modules/.bin/env-cmd -f .env.enterprise craco start",
|
||||
"start-dev": "./node_modules/.bin/env-cmd -f .env.dev.basic craco start",
|
||||
"start-dev-ent": "./node_modules/.bin/env-cmd -f .env.dev.enterprise craco start",
|
||||
"build": "./node_modules/.bin/env-cmd -f .env.basic craco build",
|
||||
"build-ent": "BUILD_PATH='./build-ent' ./node_modules/.bin/env-cmd -f .env.enterprise craco build",
|
||||
"test": "craco test",
|
||||
|
||||
@@ -137,12 +137,12 @@ export const EntryBodySection: React.FC<EntryBodySectionProps> = ({
|
||||
const chunk = body.slice(0, MAXIMUM_BYTES_TO_FORMAT);
|
||||
const bodyBuf = isBase64Encoding ? atob(chunk) : chunk;
|
||||
|
||||
if (!isPretty) return bodyBuf;
|
||||
|
||||
try {
|
||||
if (jsonLikeFormats.some(format => contentType?.indexOf(format) > -1)) {
|
||||
if (!isPretty) return bodyBuf;
|
||||
return jsonBeautify(JSON.parse(bodyBuf), null, 2, 80);
|
||||
} else if (xmlLikeFormats.some(format => contentType?.indexOf(format) > -1)) {
|
||||
if (!isPretty) return bodyBuf;
|
||||
return xmlBeautify(bodyBuf, {
|
||||
indentation: ' ',
|
||||
filter: (node) => node.type !== 'Comment',
|
||||
@@ -152,7 +152,9 @@ export const EntryBodySection: React.FC<EntryBodySectionProps> = ({
|
||||
} else if (protobufFormats.some(format => contentType?.indexOf(format) > -1)) {
|
||||
// Replace all non printable characters (ASCII)
|
||||
const protobufDecoder = new ProtobufDecoder(bodyBuf, true);
|
||||
return jsonBeautify(protobufDecoder.decode().toSimple(), null, 2, 80);
|
||||
const protobufDecoded = protobufDecoder.decode().toSimple();
|
||||
if (!isPretty) return JSON.stringify(protobufDecoded);
|
||||
return jsonBeautify(protobufDecoded, null, 2, 80);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
|
||||
@@ -21,7 +21,6 @@ const OasModal = ({ openModal, handleCloseModal }) => {
|
||||
const services = await api.getOasServices();
|
||||
setOasServices(services);
|
||||
} catch (e) {
|
||||
toast.error("Error occurred while fetching services list");
|
||||
console.error(e);
|
||||
}
|
||||
})();
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import * as axios from "axios";
|
||||
|
||||
// When working locally cp `cp .env.example .env`
|
||||
export const MizuWebsocketURL = process.env.REACT_APP_OVERRIDE_WS_URL ? process.env.REACT_APP_OVERRIDE_WS_URL :
|
||||
window.location.protocol === 'https:' ? `wss://${window.location.host}/ws` : `ws://${window.location.host}/ws`;
|
||||
window.location.protocol === 'https:' ? `wss://${window.location.host}/ws` : `ws://${window.location.host}/ws`;
|
||||
|
||||
export const FormValidationErrorType = "formError";
|
||||
|
||||
const CancelToken = axios.CancelToken;
|
||||
|
||||
// When working locally cp `cp .env.example .env`
|
||||
const apiURL = process.env.REACT_APP_OVERRIDE_API_URL ? process.env.REACT_APP_OVERRIDE_API_URL : `${window.location.origin}/`;
|
||||
|
||||
export default class Api {
|
||||
|
||||
@@ -10,8 +10,11 @@ const useKeyPress = (eventConfigs, callback, node = null) => {
|
||||
// handle what happens on key press
|
||||
const handleKeyPress = useCallback(
|
||||
(event) => {
|
||||
|
||||
// check if one of the key is part of the ones we want
|
||||
if (eventConfigs.some((eventConfig) => Object.keys(eventConfig).every(nameKey => eventConfig[nameKey] === event[nameKey]))) {
|
||||
event.stopPropagation()
|
||||
event.preventDefault();
|
||||
callbackRef.current(event);
|
||||
}
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user