mirror of
https://github.com/kubeshark/kubeshark.git
synced 2026-02-15 10:29:55 +00:00
Compare commits
34 Commits
30.0-dev24
...
stable
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fd5b72a66e | ||
|
|
826f2cfb04 | ||
|
|
e6f140adb2 | ||
|
|
1af2cd728d | ||
|
|
d5f6093084 | ||
|
|
32ad2f17c3 | ||
|
|
553433622e | ||
|
|
b6917ed0b7 | ||
|
|
d22e5a9620 | ||
|
|
e880a9224f | ||
|
|
67c3e5aab5 | ||
|
|
ed791c988e | ||
|
|
3567f5c00b | ||
|
|
605c67c22f | ||
|
|
9b44838fed | ||
|
|
9f6abf7a32 | ||
|
|
609f85e6ae | ||
|
|
d2188bfc22 | ||
|
|
d5942c3c49 | ||
|
|
f03c2adce4 | ||
|
|
93ebec09b9 | ||
|
|
06c7b9c748 | ||
|
|
db66119182 | ||
|
|
2ae6153631 | ||
|
|
2dfb0b2549 | ||
|
|
a2b774ae1f | ||
|
|
862f4252c3 | ||
|
|
d5f057156e | ||
|
|
96ce694945 | ||
|
|
2d7ff95426 | ||
|
|
af9daf91cf | ||
|
|
ea82d1779b | ||
|
|
51ddbf3815 | ||
|
|
8bbdcb6877 |
@@ -2,6 +2,7 @@
|
||||
.dockerignore
|
||||
.editorconfig
|
||||
.gitignore
|
||||
.env.*
|
||||
Dockerfile
|
||||
Makefile
|
||||
LICENSE
|
||||
|
||||
38
.github/ISSUE_TEMPLATE/bug_report.md
vendored
38
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,38 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Run `mizu <command> ...`
|
||||
2. Click on '...'
|
||||
3. Scroll down to '...'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Logs**
|
||||
Upload logs:
|
||||
1. Run the mizu command with `--set dump-logs=true` (e.g `mizu tap --set dump-logs=true`)
|
||||
2. Try to reproduce the issue
|
||||
3. <kbd>CTRL</kbd>+<kbd>C</kbd> on terminal tab which runs `mizu`
|
||||
4. Upload the logs zip file from `~/.mizu/mizu_logs_**.zip`
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
- OS: [e.g. macOS]
|
||||
- Web Browser: [e.g. Google Chrome]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
50
.github/workflows/acceptance_tests.yml
vendored
50
.github/workflows/acceptance_tests.yml
vendored
@@ -1,50 +0,0 @@
|
||||
name: acceptance tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
push:
|
||||
branches:
|
||||
- 'develop'
|
||||
|
||||
jobs:
|
||||
run-acceptance-tests:
|
||||
name: Run acceptance tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Go 1.17
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '^1.17'
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup acceptance test
|
||||
run: ./acceptanceTests/setup.sh
|
||||
|
||||
- name: Create k8s users and change context
|
||||
env:
|
||||
USERNAME_UNRESTRICTED: user-with-clusterwide-access
|
||||
USERNAME_RESTRICTED: user-with-restricted-access
|
||||
run: |
|
||||
./acceptanceTests/create_user.sh "${USERNAME_UNRESTRICTED}"
|
||||
./acceptanceTests/create_user.sh "${USERNAME_RESTRICTED}"
|
||||
kubectl apply -f cli/cmd/permissionFiles/permissions-all-namespaces-tap.yaml
|
||||
kubectl config use-context ${USERNAME_UNRESTRICTED}
|
||||
|
||||
- name: Test
|
||||
run: make acceptance-test
|
||||
|
||||
- name: Slack notification on failure
|
||||
uses: ravsamhq/notify-slack-action@v1
|
||||
if: always()
|
||||
with:
|
||||
status: ${{ job.status }}
|
||||
notification_title: 'Mizu {workflow} has {status_message}'
|
||||
message_format: '{emoji} *{workflow}* {status_message} during <{run_url}|run>, after commit: <{commit_url}|{commit_sha}>'
|
||||
footer: 'Linked Repo <{repo_url}|{repo}>'
|
||||
notify_when: 'failure'
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
44
.github/workflows/build-custom-branch.yml
vendored
44
.github/workflows/build-custom-branch.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Build Custom Branch
|
||||
|
||||
on: push
|
||||
|
||||
concurrency:
|
||||
group: custom-branch-build-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Push custom branch image to GCR
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ contains(github.event.head_commit.message, '#build_and_publish_custom_image') }}
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v0'
|
||||
with:
|
||||
credentials_json: '${{ secrets.GCR_JSON_KEY }}'
|
||||
|
||||
- name: 'Set up Cloud SDK'
|
||||
uses: 'google-github-actions/setup-gcloud@v0'
|
||||
|
||||
- name: Get base image name
|
||||
shell: bash
|
||||
run: echo "##[set-output name=image;]$(echo gcr.io/up9-docker-hub/mizu/${GITHUB_REF#refs/heads/})"
|
||||
id: base_image_step
|
||||
|
||||
- name: Login to GCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.base_image_step.outputs.image }}:latest
|
||||
62
.github/workflows/build.yml
vendored
62
.github/workflows/build.yml
vendored
@@ -1,62 +0,0 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'main'
|
||||
|
||||
concurrency:
|
||||
group: mizu-pr-validation-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-cli:
|
||||
name: CLI executable build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Check modified files
|
||||
id: modified_files
|
||||
run: devops/check_modified_files.sh cli/
|
||||
|
||||
- name: Set up Go 1.17
|
||||
if: steps.modified_files.outputs.matched == 'true'
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '1.17'
|
||||
|
||||
- name: Build CLI
|
||||
if: steps.modified_files.outputs.matched == 'true'
|
||||
run: make cli
|
||||
|
||||
build-agent:
|
||||
name: Agent docker image build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Check modified files
|
||||
id: modified_files
|
||||
run: devops/check_modified_files.sh agent/ shared/ tap/ ui/ Dockerfile
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.modified_files.outputs.matched == 'true'
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v2
|
||||
if: steps.modified_files.outputs.matched == 'true'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
tags: up9inc/mizu:devlatest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
22
.github/workflows/inactive-issues-close.yaml
vendored
22
.github/workflows/inactive-issues-close.yaml
vendored
@@ -1,22 +0,0 @@
|
||||
name: Close inactive issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v3
|
||||
with:
|
||||
days-before-issue-stale: 30
|
||||
days-before-issue-close: 14
|
||||
stale-issue-label: "stale"
|
||||
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
47
.github/workflows/publish-cli.yml
vendored
Normal file
47
.github/workflows/publish-cli.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: public-cli
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- main
|
||||
- my-temp-release-check
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@master
|
||||
with:
|
||||
service_account_key: ${{ secrets.GCR_JSON_KEY }}
|
||||
export_default_credentials: true
|
||||
- uses: haya14busa/action-cond@v1
|
||||
id: condval
|
||||
with:
|
||||
cond: ${{ github.ref == 'refs/heads/main' }}
|
||||
if_true: "minor"
|
||||
if_false: "patch"
|
||||
- name: Auto Increment Semver Action
|
||||
uses: MCKanpolat/auto-semver-action@1.0.5
|
||||
id: versioning
|
||||
with:
|
||||
releaseType: ${{ steps.condval.outputs.value }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Get base image name
|
||||
shell: bash
|
||||
run: |
|
||||
echo "##[set-output name=build_timestamp;]$(echo $(date +%s))"
|
||||
echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
|
||||
id: version_parameters
|
||||
- name: Build and Push CLI
|
||||
run: make push-cli SEM_VER='${{ steps.versioning.outputs.version }}' BUILD_TIMESTAMP='${{ steps.version_parameters.outputs.build_timestamp }}'
|
||||
- name: publish
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
artifacts: "cli/bin/*"
|
||||
commit: ${{ steps.version_parameters.outputs.branch }}
|
||||
tag: ${{ steps.versioning.outputs.version }}
|
||||
prerelease: ${{ github.ref != 'refs/heads/main' }}
|
||||
bodyFile: 'cli/bin/README.md'
|
||||
39
.github/workflows/publish-docker.yml
vendored
Normal file
39
.github/workflows/publish-docker.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: publish-docker
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'main'
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Get base image name
|
||||
shell: bash
|
||||
run: echo "##[set-output name=image;]$(echo gcr.io/up9-docker-hub/mizu/${GITHUB_REF#refs/heads/})"
|
||||
id: base_image_step
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: crazy-max/ghaction-docker-meta@v2
|
||||
with:
|
||||
images: ${{ steps.base_image_step.outputs.image }}
|
||||
tags: |
|
||||
type=sha
|
||||
type=raw,${{ github.sha }}
|
||||
type=raw,latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
292
.github/workflows/release.yml
vendored
292
.github/workflows/release.yml
vendored
@@ -1,292 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'main'
|
||||
|
||||
concurrency:
|
||||
group: mizu-publish-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
docker-registry:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
max-parallel: 2
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target:
|
||||
- amd64
|
||||
- arm64v8
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Determine versioning strategy
|
||||
uses: haya14busa/action-cond@v1
|
||||
id: condval
|
||||
with:
|
||||
cond: ${{ github.ref == 'refs/heads/main' }}
|
||||
if_true: "stable"
|
||||
if_false: "dev"
|
||||
|
||||
- name: Auto Increment Ver Action
|
||||
uses: docker://igorgov/auto-inc-ver:v2.0.0
|
||||
id: versioning
|
||||
with:
|
||||
mode: ${{ steps.condval.outputs.value }}
|
||||
suffix: 'dev'
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get version parameters
|
||||
shell: bash
|
||||
run: |
|
||||
echo "##[set-output name=build_timestamp;]$(echo $(date +%s))"
|
||||
echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
|
||||
id: version_parameters
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: |
|
||||
up9inc/mizu
|
||||
tags: |
|
||||
type=raw,${{ steps.versioning.outputs.version }}
|
||||
type=raw,value=latest,enable=${{ steps.condval.outputs.value == 'stable' }}
|
||||
type=raw,value=dev-latest,enable=${{ steps.condval.outputs.value == 'dev' }}
|
||||
flavor: |
|
||||
latest=auto
|
||||
prefix=
|
||||
suffix=-${{ matrix.target }},onlatest=true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_PASS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
TARGETARCH=${{ matrix.target }}
|
||||
VER=${{ steps.versioning.outputs.version }}
|
||||
BUILD_TIMESTAMP=${{ steps.version_parameters.outputs.build_timestamp }}
|
||||
GIT_BRANCH=${{ steps.version_parameters.outputs.branch }}
|
||||
COMMIT_HASH=${{ github.sha }}
|
||||
|
||||
gcp-registry:
|
||||
name: Push Docker image to GCR
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
max-parallel: 2
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target:
|
||||
- amd64
|
||||
- arm64v8
|
||||
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v0'
|
||||
with:
|
||||
credentials_json: '${{ secrets.GCR_JSON_KEY }}'
|
||||
|
||||
- name: 'Set up Cloud SDK'
|
||||
uses: 'google-github-actions/setup-gcloud@v0'
|
||||
|
||||
- name: Determine versioning strategy
|
||||
uses: haya14busa/action-cond@v1
|
||||
id: condval
|
||||
with:
|
||||
cond: ${{ github.ref == 'refs/heads/main' }}
|
||||
if_true: "stable"
|
||||
if_false: "dev"
|
||||
|
||||
- name: Auto Increment Ver Action
|
||||
uses: docker://igorgov/auto-inc-ver:v2.0.0
|
||||
id: versioning
|
||||
with:
|
||||
mode: ${{ steps.condval.outputs.value }}
|
||||
suffix: 'dev'
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get version parameters
|
||||
shell: bash
|
||||
run: |
|
||||
echo "##[set-output name=build_timestamp;]$(echo $(date +%s))"
|
||||
echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
|
||||
id: version_parameters
|
||||
|
||||
- name: Get base image name
|
||||
shell: bash
|
||||
run: echo "##[set-output name=image;]$(echo gcr.io/up9-docker-hub/mizu/${GITHUB_REF#refs/heads/})"
|
||||
id: base_image_step
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: |
|
||||
${{ steps.base_image_step.outputs.image }}
|
||||
tags: |
|
||||
type=raw,${{ steps.versioning.outputs.version }}
|
||||
type=raw,value=latest,enable=${{ steps.condval.outputs.value == 'stable' }}
|
||||
type=raw,value=dev-latest,enable=${{ steps.condval.outputs.value == 'dev' }}
|
||||
flavor: |
|
||||
latest=auto
|
||||
prefix=
|
||||
suffix=-${{ matrix.target }},onlatest=true
|
||||
|
||||
- name: Login to GCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
TARGETARCH=${{ matrix.target }}
|
||||
VER=${{ steps.versioning.outputs.version }}
|
||||
BUILD_TIMESTAMP=${{ steps.version_parameters.outputs.build_timestamp }}
|
||||
GIT_BRANCH=${{ steps.version_parameters.outputs.branch }}
|
||||
COMMIT_HASH=${{ github.sha }}
|
||||
|
||||
docker-manifest:
|
||||
name: Create and Push a Docker Manifest
|
||||
runs-on: ubuntu-latest
|
||||
needs: [docker-registry]
|
||||
steps:
|
||||
- name: Determine versioning strategy
|
||||
uses: haya14busa/action-cond@v1
|
||||
id: condval
|
||||
with:
|
||||
cond: ${{ github.ref == 'refs/heads/main' }}
|
||||
if_true: "stable"
|
||||
if_false: "dev"
|
||||
|
||||
- name: Auto Increment Ver Action
|
||||
uses: docker://igorgov/auto-inc-ver:v2.0.0
|
||||
id: versioning
|
||||
with:
|
||||
mode: ${{ steps.condval.outputs.value }}
|
||||
suffix: 'dev'
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get version parameters
|
||||
shell: bash
|
||||
run: |
|
||||
echo "##[set-output name=build_timestamp;]$(echo $(date +%s))"
|
||||
echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
|
||||
id: version_parameters
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: |
|
||||
up9inc/mizu
|
||||
tags: |
|
||||
type=raw,${{ steps.versioning.outputs.version }}
|
||||
type=raw,value=latest,enable=${{ steps.condval.outputs.value == 'stable' }}
|
||||
type=raw,value=dev-latest,enable=${{ steps.condval.outputs.value == 'dev' }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_PASS }}
|
||||
|
||||
- name: Create manifest
|
||||
run: |
|
||||
while IFS= read -r line; do
|
||||
docker manifest create $line --amend $line-amd64 --amend $line-arm64v8
|
||||
done <<< "${{ steps.meta.outputs.tags }}"
|
||||
|
||||
- name: Push manifest
|
||||
run: |
|
||||
while IFS= read -r line; do
|
||||
docker manifest push $line
|
||||
done <<< "${{ steps.meta.outputs.tags }}"
|
||||
|
||||
cli:
|
||||
name: Build the CLI and publish
|
||||
runs-on: ubuntu-latest
|
||||
needs: [docker-manifest, gcp-registry]
|
||||
steps:
|
||||
- name: Set up Go 1.17
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '1.17'
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- id: 'auth'
|
||||
uses: 'google-github-actions/auth@v0'
|
||||
with:
|
||||
credentials_json: '${{ secrets.GCR_JSON_KEY }}'
|
||||
|
||||
- name: 'Set up Cloud SDK'
|
||||
uses: 'google-github-actions/setup-gcloud@v0'
|
||||
|
||||
- name: Determine versioning strategy
|
||||
uses: haya14busa/action-cond@v1
|
||||
id: condval
|
||||
with:
|
||||
cond: ${{ github.ref == 'refs/heads/main' }}
|
||||
if_true: "stable"
|
||||
if_false: "dev"
|
||||
|
||||
- name: Auto Increment Ver Action
|
||||
uses: docker://igorgov/auto-inc-ver:v2.0.0
|
||||
id: versioning
|
||||
with:
|
||||
mode: ${{ steps.condval.outputs.value }}
|
||||
suffix: 'dev'
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get version parameters
|
||||
shell: bash
|
||||
run: |
|
||||
echo "##[set-output name=build_timestamp;]$(echo $(date +%s))"
|
||||
echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
|
||||
id: version_parameters
|
||||
|
||||
- name: Build and Push CLI
|
||||
run: make push-cli VER='${{ steps.versioning.outputs.version }}' BUILD_TIMESTAMP='${{ steps.version_parameters.outputs.build_timestamp }}'
|
||||
|
||||
- name: Log the version into a .txt file
|
||||
shell: bash
|
||||
run: |
|
||||
echo '${{ steps.versioning.outputs.version }}' >> cli/bin/version.txt
|
||||
|
||||
- name: Release
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
artifacts: "cli/bin/*"
|
||||
commit: ${{ steps.version_parameters.outputs.branch }}
|
||||
tag: ${{ steps.versioning.outputs.version }}
|
||||
prerelease: ${{ github.ref != 'refs/heads/main' }}
|
||||
bodyFile: 'cli/bin/README.md'
|
||||
90
.github/workflows/static_code_analysis.yml
vendored
90
.github/workflows/static_code_analysis.yml
vendored
@@ -1,90 +0,0 @@
|
||||
name: Static code analysis
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'main'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
golangci:
|
||||
name: Go lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '^1.17'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libpcap-dev
|
||||
|
||||
- name: Go lint - agent
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: agent
|
||||
args: --timeout=3m
|
||||
|
||||
- name: Go lint - shared
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: shared
|
||||
args: --timeout=3m
|
||||
|
||||
- name: Go lint - tap
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: tap
|
||||
args: --timeout=3m
|
||||
|
||||
- name: Go lint - CLI
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: cli
|
||||
args: --timeout=3m
|
||||
|
||||
- name: Go lint - acceptanceTests
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: acceptanceTests
|
||||
args: --timeout=3m
|
||||
|
||||
- name: Go lint - tap/api
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: tap/api
|
||||
|
||||
- name: Go lint - tap/extensions/amqp
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: tap/extensions/amqp
|
||||
|
||||
- name: Go lint - tap/extensions/http
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: tap/extensions/http
|
||||
|
||||
- name: Go lint - tap/extensions/kafka
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: tap/extensions/kafka
|
||||
|
||||
- name: Go lint - tap/extensions/redis
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
version: latest
|
||||
working-directory: tap/extensions/redis
|
||||
66
.github/workflows/test.yml
vendored
66
.github/workflows/test.yml
vendored
@@ -1,66 +0,0 @@
|
||||
name: Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'main'
|
||||
push: # needed to upload test coverage report to codecov
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'main'
|
||||
|
||||
concurrency:
|
||||
group: mizu-tests-validation-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
run-unit-tests:
|
||||
name: Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Set up Go 1.17
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '^1.17'
|
||||
|
||||
- name: Install libpcap
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get install libpcap-dev
|
||||
|
||||
- name: Check CLI modified files
|
||||
id: cli_modified_files
|
||||
run: devops/check_modified_files.sh cli/
|
||||
|
||||
- name: CLI Test
|
||||
if: github.event_name == 'push' || steps.cli_modified_files.outputs.matched == 'true'
|
||||
run: make test-cli
|
||||
|
||||
- name: Check Agent modified files
|
||||
id: agent_modified_files
|
||||
run: devops/check_modified_files.sh agent/
|
||||
|
||||
- name: Agent Test
|
||||
if: github.event_name == 'push' || steps.agent_modified_files.outputs.matched == 'true'
|
||||
run: make test-agent
|
||||
|
||||
- name: Shared Test
|
||||
run: make test-shared
|
||||
|
||||
- name: Check extensions modified files
|
||||
id: ext_modified_files
|
||||
run: devops/check_modified_files.sh tap/extensions/
|
||||
|
||||
- name: Extensions Test
|
||||
if: github.event_name == 'push' || steps.ext_modified_files.outputs.matched == 'true'
|
||||
run: make test-extensions
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v2
|
||||
|
||||
36
.gitignore
vendored
36
.gitignore
vendored
@@ -15,41 +15,7 @@
|
||||
# vendor/
|
||||
.idea/
|
||||
build
|
||||
*.db
|
||||
|
||||
# Mac OS
|
||||
.DS_Store
|
||||
.vscode/
|
||||
|
||||
# Ignore the scripts that are created for development
|
||||
*dev.*
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
|
||||
# pprof
|
||||
pprof/*
|
||||
|
||||
# Database Files
|
||||
*.db
|
||||
*.gob
|
||||
|
||||
# Nohup Files - https://man7.org/linux/man-pages/man1/nohup.1p.html
|
||||
nohup.*
|
||||
|
||||
# Cypress tests
|
||||
cypress.env.json
|
||||
*/cypress/downloads
|
||||
*/cypress/fixtures
|
||||
*/cypress/plugins
|
||||
*/cypress/screenshots
|
||||
*/cypress/videos
|
||||
*/cypress/support
|
||||
|
||||
# Ignore test data in extensions
|
||||
tap/extensions/*/bin
|
||||
tap/extensions/*/expect
|
||||
|
||||
# UI folders to ignore
|
||||
**/node_modules/**
|
||||
**/dist/**
|
||||
*.editorconfig
|
||||
102
Dockerfile
102
Dockerfile
@@ -1,103 +1,43 @@
|
||||
ARG BUILDARCH=amd64
|
||||
ARG TARGETARCH=amd64
|
||||
|
||||
### Front-end
|
||||
FROM node:16 AS front-end
|
||||
FROM node:14-slim AS site-build
|
||||
|
||||
WORKDIR /app/ui-build
|
||||
|
||||
COPY ui/package.json .
|
||||
COPY ui/package-lock.json .
|
||||
RUN npm i
|
||||
COPY ui .
|
||||
RUN npm i
|
||||
RUN npm run build
|
||||
|
||||
### Base builder image for native builds architecture
|
||||
FROM golang:1.17-alpine AS builder-native-base
|
||||
ENV CGO_ENABLED=1 GOOS=linux
|
||||
RUN apk add libpcap-dev g++ perl-utils
|
||||
|
||||
FROM golang:1.16-alpine AS builder
|
||||
# Set necessary environment variables needed for our image.
|
||||
ENV CGO_ENABLED=1 GOOS=linux GOARCH=amd64
|
||||
|
||||
### Intermediate builder image for x86-64 to x86-64 native builds
|
||||
FROM builder-native-base AS builder-from-amd64-to-amd64
|
||||
ENV GOARCH=amd64
|
||||
RUN apk add libpcap-dev gcc g++ make
|
||||
|
||||
# Move to api working directory (/api-build).
|
||||
WORKDIR /app/api-build
|
||||
|
||||
### Intermediate builder image for AArch64 to AArch64 native builds
|
||||
FROM builder-native-base AS builder-from-arm64v8-to-arm64v8
|
||||
ENV GOARCH=arm64
|
||||
|
||||
|
||||
### Builder image for x86-64 to AArch64 cross-compilation
|
||||
FROM up9inc/linux-arm64-musl-go-libpcap AS builder-from-amd64-to-arm64v8
|
||||
ENV CGO_ENABLED=1 GOOS=linux
|
||||
ENV GOARCH=arm64 CGO_CFLAGS="-I/work/libpcap"
|
||||
|
||||
|
||||
### Final builder image where the build happens
|
||||
# Possible build strategies:
|
||||
# BUILDARCH=amd64 TARGETARCH=amd64
|
||||
# BUILDARCH=arm64v8 TARGETARCH=arm64v8
|
||||
# BUILDARCH=amd64 TARGETARCH=arm64v8
|
||||
ARG BUILDARCH=amd64
|
||||
ARG TARGETARCH=amd64
|
||||
FROM builder-from-${BUILDARCH}-to-${TARGETARCH} AS builder
|
||||
|
||||
# Move to agent working directory (/agent-build)
|
||||
WORKDIR /app/agent-build
|
||||
|
||||
COPY agent/go.mod agent/go.sum ./
|
||||
COPY api/go.mod api/go.sum ./
|
||||
COPY shared/go.mod shared/go.mod ../shared/
|
||||
COPY tap/go.mod tap/go.mod ../tap/
|
||||
COPY tap/api/go.mod ../tap/api/
|
||||
COPY tap/extensions/amqp/go.mod ../tap/extensions/amqp/
|
||||
COPY tap/extensions/http/go.mod ../tap/extensions/http/
|
||||
COPY tap/extensions/kafka/go.mod ../tap/extensions/kafka/
|
||||
COPY tap/extensions/redis/go.mod ../tap/extensions/redis/
|
||||
RUN go mod download
|
||||
# cheap trick to make the build faster (as long as go.mod did not change)
|
||||
RUN go list -f '{{.Path}}@{{.Version}}' -m all | sed 1d | grep -e 'go-cache' | xargs go get
|
||||
# cheap trick to make the build faster (As long as go.mod wasn't changes)
|
||||
RUN go list -f '{{.Path}}@{{.Version}}' -m all | sed 1d | grep -e 'go-cache' -e 'sqlite' | xargs go get
|
||||
|
||||
# Copy and build agent code
|
||||
# Copy and build api code
|
||||
COPY shared ../shared
|
||||
COPY tap ../tap
|
||||
COPY agent .
|
||||
|
||||
ARG COMMIT_HASH
|
||||
ARG GIT_BRANCH
|
||||
ARG BUILD_TIMESTAMP
|
||||
ARG VER=0.0
|
||||
|
||||
WORKDIR /app/agent-build
|
||||
|
||||
RUN go build -ldflags="-extldflags=-static -s -w \
|
||||
-X 'github.com/up9inc/mizu/agent/pkg/version.GitCommitHash=${COMMIT_HASH}' \
|
||||
-X 'github.com/up9inc/mizu/agent/pkg/version.Branch=${GIT_BRANCH}' \
|
||||
-X 'github.com/up9inc/mizu/agent/pkg/version.BuildTimestamp=${BUILD_TIMESTAMP}' \
|
||||
-X 'github.com/up9inc/mizu/agent/pkg/version.Ver=${VER}'" -o mizuagent .
|
||||
|
||||
# Download Basenine executable, verify the sha1sum
|
||||
ADD https://github.com/up9inc/basenine/releases/download/v0.6.5/basenine_linux_${GOARCH} ./basenine_linux_${GOARCH}
|
||||
ADD https://github.com/up9inc/basenine/releases/download/v0.6.5/basenine_linux_${GOARCH}.sha256 ./basenine_linux_${GOARCH}.sha256
|
||||
RUN shasum -a 256 -c basenine_linux_${GOARCH}.sha256
|
||||
RUN chmod +x ./basenine_linux_${GOARCH}
|
||||
RUN mv ./basenine_linux_${GOARCH} ./basenine
|
||||
COPY api .
|
||||
RUN go build -ldflags="-s -w" -o mizuagent .
|
||||
|
||||
|
||||
### The shipped image
|
||||
ARG TARGETARCH=amd64
|
||||
FROM ${TARGETARCH}/busybox:latest
|
||||
FROM alpine:3.13.5
|
||||
|
||||
# gin-gonic runs in debug mode without this
|
||||
ENV GIN_MODE=release
|
||||
|
||||
WORKDIR /app/data/
|
||||
RUN apk add bash libpcap-dev tcpdump
|
||||
WORKDIR /app
|
||||
|
||||
# Copy binary and config files from /build to root folder of scratch container.
|
||||
COPY --from=builder ["/app/agent-build/mizuagent", "."]
|
||||
COPY --from=builder ["/app/agent-build/basenine", "/usr/local/bin/basenine"]
|
||||
COPY --from=front-end ["/app/ui-build/build", "site"]
|
||||
COPY --from=builder ["/app/api-build/mizuagent", "."]
|
||||
COPY --from=site-build ["/app/ui-build/build", "site"]
|
||||
|
||||
COPY api/start.sh .
|
||||
|
||||
# this script runs both apiserver and passivetapper and exits either if one of them exits, preventing a scenario where the container runs without one process
|
||||
ENTRYPOINT ["/app/mizuagent"]
|
||||
ENTRYPOINT "/app/mizuagent"
|
||||
|
||||
83
Makefile
83
Makefile
@@ -8,7 +8,7 @@ SHELL=/bin/bash
|
||||
# HELP
|
||||
# This will output the help for each task
|
||||
# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
|
||||
.PHONY: help ui agent agent-debug cli tap docker
|
||||
.PHONY: help ui api cli tap docker
|
||||
|
||||
help: ## This help.
|
||||
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
|
||||
@@ -19,42 +19,34 @@ help: ## This help.
|
||||
TS_SUFFIX="$(shell date '+%s')"
|
||||
GIT_BRANCH="$(shell git branch | grep \* | cut -d ' ' -f2 | tr '[:upper:]' '[:lower:]' | tr '/' '_')"
|
||||
BUCKET_PATH=static.up9.io/mizu/$(GIT_BRANCH)
|
||||
export VER?=0.0
|
||||
|
||||
ui: ## Build UI.
|
||||
ui: ## build UI
|
||||
@(cd ui; npm i ; npm run build; )
|
||||
@ls -l ui/build
|
||||
@ls -l ui/build
|
||||
|
||||
cli: ## Build CLI.
|
||||
cli: # build CLI
|
||||
@echo "building cli"; cd cli && $(MAKE) build
|
||||
|
||||
cli-debug: ## Build CLI.
|
||||
@echo "building cli"; cd cli && $(MAKE) build-debug
|
||||
api: ## build API server
|
||||
@(echo "building API server .." )
|
||||
@(cd api; go build -o build/apiserver main.go)
|
||||
@ls -l api/build
|
||||
|
||||
agent: ## Build agent.
|
||||
@(echo "building mizu agent .." )
|
||||
@(cd agent; go build -o build/mizuagent main.go)
|
||||
@ls -l agent/build
|
||||
#tap: ## build tap binary
|
||||
# @(cd tap; go build -o build/tap ./src)
|
||||
# @ls -l tap/build
|
||||
|
||||
agent-debug: ## Build agent for debug.
|
||||
@(echo "building mizu agent for debug.." )
|
||||
@(cd agent; go build -gcflags="all=-N -l" -o build/mizuagent main.go)
|
||||
@ls -l agent/build
|
||||
docker: ## build Docker image
|
||||
@(echo "building docker image" )
|
||||
./build-push-featurebranch.sh
|
||||
|
||||
docker: ## Build and publish agent docker image.
|
||||
$(MAKE) push-docker
|
||||
push: push-docker push-cli ## build and publish Mizu docker image & CLI
|
||||
|
||||
agent-docker: ## Build agent docker image.
|
||||
@echo "Building agent docker image"
|
||||
@docker build -t up9inc/mizu:devlatest .
|
||||
|
||||
push: push-docker push-cli ## Build and publish agent docker image & CLI.
|
||||
|
||||
push-docker: ## Build and publish agent docker image.
|
||||
push-docker:
|
||||
@echo "publishing Docker image .. "
|
||||
devops/build-push-featurebranch.sh
|
||||
./build-push-featurebranch.sh
|
||||
|
||||
push-cli: ## Build and publish CLI.
|
||||
push-cli:
|
||||
@echo "publishing CLI .. "
|
||||
@cd cli; $(MAKE) build-all
|
||||
@echo "publishing file ${OUTPUT_FILE} .."
|
||||
@@ -62,43 +54,18 @@ push-cli: ## Build and publish CLI.
|
||||
gsutil cp -r ./cli/bin/* gs://${BUCKET_PATH}/
|
||||
gsutil setmeta -r -h "Cache-Control:public, max-age=30" gs://${BUCKET_PATH}/\*
|
||||
|
||||
clean: clean-ui clean-agent clean-cli clean-docker ## Clean all build artifacts.
|
||||
|
||||
clean-ui: ## Clean UI.
|
||||
clean: clean-ui clean-api clean-cli clean-docker ## Clean all build artifacts
|
||||
|
||||
clean-ui:
|
||||
@(rm -rf ui/build ; echo "UI cleanup done" )
|
||||
|
||||
clean-agent: ## Clean agent.
|
||||
@(rm -rf agent/build ; echo "agent cleanup done" )
|
||||
clean-api:
|
||||
@(rm -rf api/build ; echo "api cleanup done" )
|
||||
|
||||
clean-cli: ## Clean CLI.
|
||||
clean-cli:
|
||||
@(cd cli; make clean ; echo "CLI cleanup done" )
|
||||
|
||||
clean-docker: ## Run clen docker
|
||||
clean-docker:
|
||||
@(echo "DOCKER cleanup - NOT IMPLEMENTED YET " )
|
||||
|
||||
test-lint: ## Run lint on all modules
|
||||
cd agent && golangci-lint run
|
||||
cd shared && golangci-lint run
|
||||
cd tap && golangci-lint run
|
||||
cd cli && golangci-lint run
|
||||
cd acceptanceTests && golangci-lint run
|
||||
cd tap/api && golangci-lint run
|
||||
cd tap/extensions/ && for D in */; do cd $$D && golangci-lint run && cd ..; done
|
||||
|
||||
test-cli: ## Run cli tests
|
||||
@echo "running cli tests"; cd cli && $(MAKE) test
|
||||
|
||||
test-agent: ## Run agent tests
|
||||
@echo "running agent tests"; cd agent && $(MAKE) test
|
||||
|
||||
test-shared: ## Run shared tests
|
||||
@echo "running shared tests"; cd shared && $(MAKE) test
|
||||
|
||||
test-extensions: ## Run extensions tests
|
||||
@echo "running http tests"; cd tap/extensions/http && $(MAKE) test
|
||||
@echo "running redis tests"; cd tap/extensions/redis && $(MAKE) test
|
||||
@echo "running kafka tests"; cd tap/extensions/kafka && $(MAKE) test
|
||||
@echo "running amqp tests"; cd tap/extensions/amqp && $(MAKE) test
|
||||
|
||||
acceptance-test: ## Run acceptance tests
|
||||
@echo "running acceptance tests"; cd acceptanceTests && $(MAKE) test
|
||||
|
||||
50
README.md
50
README.md
@@ -1,42 +1,24 @@
|
||||

|
||||
# 水 mizu
|
||||
standalone web app traffic viewer for Kubernetes
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/up9inc/mizu/blob/main/LICENSE">
|
||||
<img alt="GitHub License" src="https://img.shields.io/github/license/up9inc/mizu?logo=GitHub&style=flat-square">
|
||||
</a>
|
||||
<a href="https://github.com/up9inc/mizu/releases/latest">
|
||||
<img alt="GitHub Latest Release" src="https://img.shields.io/github/v/release/up9inc/mizu?logo=GitHub&style=flat-square">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/up9inc/mizu">
|
||||
<img alt="Docker pulls" src="https://img.shields.io/docker/pulls/up9inc/mizu?color=%23099cec&logo=Docker&style=flat-square">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/up9inc/mizu">
|
||||
<img alt="Image size" src="https://img.shields.io/docker/image-size/up9inc/mizu/latest?logo=Docker&style=flat-square">
|
||||
</a>
|
||||
<a href="https://join.slack.com/t/up9/shared_invite/zt-tfjnduli-QzlR8VV4Z1w3YnPIAJfhlQ">
|
||||
<img alt="Slack" src="https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social">
|
||||
</a>
|
||||
</p>
|
||||
## Download
|
||||
|
||||
# The API Traffic Viewer for Kubernetes
|
||||
Download `mizu` for your platform and operating system
|
||||
|
||||
A simple-yet-powerful API traffic viewer for Kubernetes enabling you to view all API communication between microservices to help your debug and troubleshoot regressions.
|
||||
### Latest stable release
|
||||
|
||||
Think TCPDump and Wireshark re-invented for Kubernetes.
|
||||
* for MacOS - `curl -o mizu https://github.com/up9inc/mizu/releases/download/latest/mizu_darwin_amd64 && chmod 755 mizu`
|
||||
* for Linux - `curl -o mizu https://github.com/up9inc/mizu/releases/download/latest/mizu_linux_amd64 && chmod 755 mizu`
|
||||
|
||||

|
||||
### Development (unstable) build
|
||||
Pick one from the [Releases](https://github.com/up9inc/mizu/releases) page.
|
||||
|
||||
## Quickstart and documentation
|
||||
## How to run
|
||||
|
||||
You can run Mizu on any Kubernetes cluster (version of 1.16.0 or higher) in a matter of seconds. See the [Mizu Getting Started Guide](https://getmizu.io/docs/) for how.
|
||||
1. Find pod you'd like to tap to in your Kubernetes cluster
|
||||
2. Run `mizu PODNAME` or `mizu REGEX`
|
||||
3. Open browser on `http://localhost:8899` as instructed ..
|
||||
4. Watch the WebAPI traffic flowing ..
|
||||
|
||||
For more comprehensive documentation, start with the [docs](https://getmizu.io/docs/mizu/mizu-cli).
|
||||
|
||||
## Working in this repo
|
||||
|
||||
We ❤️ pull requests! See [CONTRIBUTING.md](docs/CONTRIBUTING.md) for info on contributing changes. <br />
|
||||
In the wiki you can find an intorduction to [mizu components](https://github.com/up9inc/mizu/wiki/Introduction-to-Mizu), and [development workflows](https://github.com/up9inc/mizu/wiki/Development-Workflows).
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
This project is for everyone. We ask that our users and contributors take a few minutes to review our [Code of Conduct](docs/CODE_OF_CONDUCT.md).
|
||||
## Examples
|
||||
TBD
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
|
||||
version: v1.14.0
|
||||
ignore:
|
||||
SNYK-GOLANG-GITHUBCOMPKGSFTP-569475:
|
||||
- '*':
|
||||
reason: None Given
|
||||
@@ -1,2 +0,0 @@
|
||||
test: ## Run acceptance tests.
|
||||
@go test ./... -timeout 1h -v
|
||||
@@ -1,284 +0,0 @@
|
||||
package acceptanceTests
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"testing"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type tapConfig struct {
|
||||
GuiPort uint16 `yaml:"gui-port"`
|
||||
}
|
||||
|
||||
type configStruct struct {
|
||||
Tap tapConfig `yaml:"tap"`
|
||||
}
|
||||
|
||||
func TestConfigRegenerate(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
configPath, configPathErr := GetConfigPath()
|
||||
if configPathErr != nil {
|
||||
t.Errorf("failed to get config path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
configCmdArgs := GetDefaultConfigCommandArgs()
|
||||
|
||||
configCmdArgs = append(configCmdArgs, "-r")
|
||||
|
||||
configCmd := exec.Command(cliPath, configCmdArgs...)
|
||||
t.Logf("running command: %v", configCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := os.Remove(configPath); err != nil {
|
||||
t.Logf("failed to delete config file, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := configCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start config command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := configCmd.Wait(); err != nil {
|
||||
t.Errorf("failed to wait config command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
_, readFileErr := ioutil.ReadFile(configPath)
|
||||
if readFileErr != nil {
|
||||
t.Errorf("failed to read config file, err: %v", readFileErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigGuiPort(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
tests := []uint16{8898}
|
||||
|
||||
for _, guiPort := range tests {
|
||||
t.Run(fmt.Sprintf("%d", guiPort), func(t *testing.T) {
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
configPath, configPathErr := GetConfigPath()
|
||||
if configPathErr != nil {
|
||||
t.Errorf("failed to get config path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
config := configStruct{}
|
||||
config.Tap.GuiPort = guiPort
|
||||
|
||||
configBytes, marshalErr := yaml.Marshal(config)
|
||||
if marshalErr != nil {
|
||||
t.Errorf("failed to marshal config, err: %v", marshalErr)
|
||||
return
|
||||
}
|
||||
|
||||
if writeErr := ioutil.WriteFile(configPath, configBytes, 0644); writeErr != nil {
|
||||
t.Errorf("failed to write config to file, err: %v", writeErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
|
||||
if err := os.Remove(configPath); err != nil {
|
||||
t.Logf("failed to delete config file, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(guiPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSetGuiPort(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
ConfigFileGuiPort uint16
|
||||
SetGuiPort uint16
|
||||
}{
|
||||
{ConfigFileGuiPort: 8898, SetGuiPort: 8897},
|
||||
}
|
||||
|
||||
for _, guiPortStruct := range tests {
|
||||
t.Run(fmt.Sprintf("%d", guiPortStruct.SetGuiPort), func(t *testing.T) {
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
configPath, configPathErr := GetConfigPath()
|
||||
if configPathErr != nil {
|
||||
t.Errorf("failed to get config path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
config := configStruct{}
|
||||
config.Tap.GuiPort = guiPortStruct.ConfigFileGuiPort
|
||||
|
||||
configBytes, marshalErr := yaml.Marshal(config)
|
||||
if marshalErr != nil {
|
||||
t.Errorf("failed to marshal config, err: %v", marshalErr)
|
||||
return
|
||||
}
|
||||
|
||||
if writeErr := ioutil.WriteFile(configPath, configBytes, 0644); writeErr != nil {
|
||||
t.Errorf("failed to write config to file, err: %v", writeErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmdArgs = append(tapCmdArgs, "--set", fmt.Sprintf("tap.gui-port=%v", guiPortStruct.SetGuiPort))
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
|
||||
if err := os.Remove(configPath); err != nil {
|
||||
t.Logf("failed to delete config file, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(guiPortStruct.SetGuiPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigFlagGuiPort(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
ConfigFileGuiPort uint16
|
||||
FlagGuiPort uint16
|
||||
}{
|
||||
{ConfigFileGuiPort: 8898, FlagGuiPort: 8896},
|
||||
}
|
||||
|
||||
for _, guiPortStruct := range tests {
|
||||
t.Run(fmt.Sprintf("%d", guiPortStruct.FlagGuiPort), func(t *testing.T) {
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
configPath, configPathErr := GetConfigPath()
|
||||
if configPathErr != nil {
|
||||
t.Errorf("failed to get config path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
config := configStruct{}
|
||||
config.Tap.GuiPort = guiPortStruct.ConfigFileGuiPort
|
||||
|
||||
configBytes, marshalErr := yaml.Marshal(config)
|
||||
if marshalErr != nil {
|
||||
t.Errorf("failed to marshal config, err: %v", marshalErr)
|
||||
return
|
||||
}
|
||||
|
||||
if writeErr := ioutil.WriteFile(configPath, configBytes, 0644); writeErr != nil {
|
||||
t.Errorf("failed to write config to file, err: %v", writeErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmdArgs = append(tapCmdArgs, "-p", fmt.Sprintf("%v", guiPortStruct.FlagGuiPort))
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
|
||||
if err := os.Remove(configPath); err != nil {
|
||||
t.Logf("failed to delete config file, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(guiPortStruct.FlagGuiPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Create a user in Minikube cluster "minikube"
|
||||
# Create context for user
|
||||
# Usage:
|
||||
# ./create_user.sh <username>
|
||||
|
||||
set -e
|
||||
|
||||
NEW_USERNAME=$1
|
||||
CERT_DIR="${HOME}/certs"
|
||||
KEY_FILE="${CERT_DIR}/${NEW_USERNAME}.key"
|
||||
CRT_FILE="${CERT_DIR}/${NEW_USERNAME}.crt"
|
||||
MINIKUBE_KEY_FILE="${HOME}/.minikube/ca.key"
|
||||
MINIKUBE_CRT_FILE="${HOME}/.minikube/ca.crt"
|
||||
DAYS=1
|
||||
|
||||
echo "Creating user and context for username \"${NEW_USERNAME}\" in Minikube cluster"
|
||||
|
||||
if ! command -v openssl &> /dev/null
|
||||
then
|
||||
echo "Installing openssl"
|
||||
sudo apt-get update
|
||||
sudo apt-get install openssl
|
||||
fi
|
||||
|
||||
echo "Creating certificate for user \"${NEW_USERNAME}\""
|
||||
mkdir -p ${CERT_DIR}
|
||||
echo "Generating key \"${KEY_FILE}\""
|
||||
openssl genrsa -out "${KEY_FILE}" 2048
|
||||
echo "Generating crt \"${CRT_FILE}\""
|
||||
openssl req -new -key "${KEY_FILE}" -out "${CRT_FILE}" -subj "/CN=${NEW_USERNAME}/O=group1"
|
||||
openssl x509 -req -in "${CRT_FILE}" -CA "${MINIKUBE_CRT_FILE}" -CAkey "${MINIKUBE_KEY_FILE}" -CAcreateserial -out "${CRT_FILE}" -days $DAYS
|
||||
|
||||
echo "Creating context for user \"${NEW_USERNAME}\""
|
||||
kubectl config set-credentials "${NEW_USERNAME}" --client-certificate="${CRT_FILE}" --client-key="${KEY_FILE}"
|
||||
kubectl config set-context "${NEW_USERNAME}" --cluster=minikube --user="${NEW_USERNAME}"
|
||||
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"watchForFileChanges":false,
|
||||
"viewportWidth": 1920,
|
||||
"viewportHeight": 1080,
|
||||
"video": false,
|
||||
"screenshotOnRunFailure": false,
|
||||
"defaultCommandTimeout": 6000,
|
||||
"testFiles": [
|
||||
"tests/GuiPort.js",
|
||||
"tests/MultipleNamespaces.js",
|
||||
"tests/Redact.js",
|
||||
"tests/NoRedact.js",
|
||||
"tests/Regex.js",
|
||||
"tests/RegexMasking.js",
|
||||
"tests/IgnoredUserAgents.js",
|
||||
"tests/UiTest.js",
|
||||
"tests/Redis.js",
|
||||
"tests/Rabbit.js",
|
||||
"tests/serviceMapFunction.js"
|
||||
],
|
||||
|
||||
"env": {
|
||||
"testUrl": "http://localhost:8899/",
|
||||
"redactHeaderContent": "User-Header[REDACTED]",
|
||||
"redactBodyContent": "{ \"User\": \"[REDACTED]\" }",
|
||||
"regexMaskingBodyContent": "[REDACTED]",
|
||||
"greenFilterColor": "rgb(210, 250, 210)",
|
||||
"redFilterColor": "rgb(250, 214, 220)",
|
||||
"bodyJsonClass": ".hljs",
|
||||
"mizuWidth": 1920,
|
||||
"normalMizuHeight": 1080,
|
||||
"hugeMizuHeight": 3500
|
||||
}
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
const columns = {podName : 1, namespace : 2, tapping : 3};
|
||||
|
||||
function getDomPathInStatusBar(line, column) {
|
||||
return `[data-cy="expandedStatusBar"] > :nth-child(2) > > :nth-child(2) > :nth-child(${line}) > :nth-child(${column})`;
|
||||
}
|
||||
|
||||
export function checkLine(line, expectedValues) {
|
||||
cy.get(getDomPathInStatusBar(line, columns.podName)).invoke('text').then(podValue => {
|
||||
const podName = getOnlyPodName(podValue);
|
||||
expect(podName).to.equal(expectedValues.podName);
|
||||
|
||||
cy.get(getDomPathInStatusBar(line, columns.namespace)).invoke('text').then(namespaceValue => {
|
||||
expect(namespaceValue).to.equal(expectedValues.namespace);
|
||||
cy.get(getDomPathInStatusBar(line, columns.tapping)).children().should('have.attr', 'src').and("match", /success.*\.svg/);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function findLineAndCheck(expectedValues) {
|
||||
cy.get('[data-cy="expandedStatusBar"] > :nth-child(2) > > :nth-child(2) > > :nth-child(1)').then(pods => {
|
||||
cy.get('[data-cy="expandedStatusBar"] > :nth-child(2) > > :nth-child(2) > > :nth-child(2)').then(namespaces => {
|
||||
// organizing namespaces array
|
||||
const podObjectsArray = Object.values(pods ?? {});
|
||||
const namespacesObjectsArray = Object.values(namespaces ?? {});
|
||||
let lineNumber = -1;
|
||||
namespacesObjectsArray.forEach((namespaceObj, index) => {
|
||||
const currentLine = index + 1;
|
||||
lineNumber = (namespaceObj.getAttribute && namespaceObj.innerHTML === expectedValues.namespace && (getOnlyPodName(podObjectsArray[index].innerHTML)) === expectedValues.podName) ? currentLine : lineNumber;
|
||||
});
|
||||
lineNumber === -1 ? throwError(expectedValues) : checkLine(lineNumber, expectedValues);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function throwError(expectedValues) {
|
||||
throw new Error(`The pod named ${expectedValues.podName} doesn't match any namespace named ${expectedValues.namespace}`);
|
||||
}
|
||||
|
||||
export function getExpectedDetailsDict(podName, namespace) {
|
||||
return {podName : podName, namespace : namespace};
|
||||
}
|
||||
|
||||
function getOnlyPodName(podElementFullStr) {
|
||||
return podElementFullStr.substring(0, podElementFullStr.indexOf('-'));
|
||||
}
|
||||
@@ -1,189 +0,0 @@
|
||||
export const valueTabs = {
|
||||
response: 'RESPONSE',
|
||||
request: 'REQUEST',
|
||||
none: null
|
||||
}
|
||||
|
||||
const maxEntriesInDom = 13;
|
||||
|
||||
export function isValueExistsInElement(shouldInclude, content, domPathToContainer){
|
||||
it(`should ${shouldInclude ? '' : 'not'} include '${content}'`, function () {
|
||||
cy.get(domPathToContainer).then(htmlText => {
|
||||
const allTextString = htmlText.text();
|
||||
if (allTextString.includes(content) !== shouldInclude)
|
||||
throw new Error(`One of the containers part contains ${content}`)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function resizeToHugeMizu() {
|
||||
cy.viewport(Cypress.env('mizuWidth'), Cypress.env('hugeMizuHeight'));
|
||||
}
|
||||
|
||||
export function resizeToNormalMizu() {
|
||||
cy.viewport(Cypress.env('mizuWidth'), Cypress.env('normalMizuHeight'));
|
||||
}
|
||||
|
||||
export function verifyMinimumEntries() {
|
||||
const entriesSent = Cypress.env('entriesCount');
|
||||
const minimumEntries = Math.round((0.75 * entriesSent));
|
||||
|
||||
it(`Making sure that mizu shows at least ${minimumEntries} entries`, function () {
|
||||
cy.get('#total-entries').then(number => {
|
||||
const getNum = () => {
|
||||
return parseInt(number.text());
|
||||
};
|
||||
|
||||
cy.wrap({num: getNum}).invoke('num').should('be.gt', minimumEntries);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function leftTextCheck(entryNum, path, expectedText) {
|
||||
cy.get(`#list #entry-${entryNum} ${path}`).invoke('text').should('eq', expectedText);
|
||||
}
|
||||
|
||||
export function leftOnHoverCheck(entryNum, path, filterName) {
|
||||
cy.get(`#list #entry-${entryNum} ${path}`).trigger('mouseover');
|
||||
cy.get(`#list #entry-${entryNum} [data-cy='QueryableTooltip']`).invoke('text').should('match', new RegExp(filterName));
|
||||
}
|
||||
|
||||
export function rightTextCheck(path, expectedText) {
|
||||
cy.get(`#rightSideContainer ${path}`).should('have.text', expectedText);
|
||||
}
|
||||
|
||||
export function rightOnHoverCheck(path, expectedText) {
|
||||
cy.get(`#rightSideContainer ${path}`).trigger('mouseover');
|
||||
cy.get(`#rightSideContainer [data-cy='QueryableTooltip']`).invoke('text').should('match', new RegExp(expectedText));
|
||||
}
|
||||
|
||||
export function checkThatAllEntriesShown() {
|
||||
cy.get('#entries-length').then(number => {
|
||||
if (number.text() === '1')
|
||||
cy.get('[title="Fetch old records"]').click();
|
||||
});
|
||||
}
|
||||
|
||||
export function checkFilterByMethod(funcDict) {
|
||||
const {protocol, method, methodQuery, summary, summaryQuery} = funcDict;
|
||||
const summaryDict = getSummaryDict(summary, summaryQuery);
|
||||
const methodDict = getMethodDict(method, methodQuery);
|
||||
const protocolDict = getProtocolDict(protocol.name, protocol.text);
|
||||
|
||||
it(`Testing the method: ${method}`, function () {
|
||||
// applying filter
|
||||
cy.get('.w-tc-editor-text').clear().type(methodQuery);
|
||||
cy.get('[type="submit"]').click();
|
||||
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('greenFilterColor'));
|
||||
|
||||
cy.get('#entries-length').then(number => {
|
||||
// if the entries list isn't expanded it expands here
|
||||
if (number.text() === '0' || number.text() === '1') // todo change when TRA-4262 is fixed
|
||||
cy.get('[title="Fetch old records"]').click();
|
||||
|
||||
cy.get('#entries-length').should('not.have.text', '0').and('not.have.text', '1').then(() => {
|
||||
cy.get(`#list [id]`).then(elements => {
|
||||
const listElmWithIdAttr = Object.values(elements);
|
||||
let doneCheckOnFirst = false;
|
||||
|
||||
cy.get('#entries-length').invoke('text').then(len => {
|
||||
resizeIfNeeded(len);
|
||||
listElmWithIdAttr.forEach(entry => {
|
||||
if (entry?.id && entry.id.match(RegExp(/entry-(\d{2}|\d{1})$/gm))) {
|
||||
const entryNum = getEntryNumById(entry.id);
|
||||
|
||||
leftTextCheck(entryNum, methodDict.pathLeft, methodDict.expectedText);
|
||||
leftTextCheck(entryNum, protocolDict.pathLeft, protocolDict.expectedTextLeft);
|
||||
if (summaryDict)
|
||||
leftTextCheck(entryNum, summaryDict.pathLeft, summaryDict.expectedText);
|
||||
|
||||
if (!doneCheckOnFirst) {
|
||||
deepCheck(funcDict, protocolDict, methodDict, entry);
|
||||
doneCheckOnFirst = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
resizeIfNeeded(len);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function resizeIfNeeded(entriesLen) {
|
||||
if (entriesLen > maxEntriesInDom){
|
||||
Cypress.config().viewportHeight === Cypress.env('normalMizuHeight') ?
|
||||
resizeToHugeMizu() : resizeToNormalMizu()
|
||||
}
|
||||
}
|
||||
|
||||
function deepCheck(generalDict, protocolDict, methodDict, entry) {
|
||||
const entryNum = getEntryNumById(entry.id);
|
||||
const {summary, value} = generalDict;
|
||||
const summaryDict = getSummaryDict(summary);
|
||||
|
||||
leftOnHoverCheck(entryNum, methodDict.pathLeft, methodDict.expectedOnHover);
|
||||
leftOnHoverCheck(entryNum, protocolDict.pathLeft, protocolDict.expectedOnHover);
|
||||
if (summaryDict)
|
||||
leftOnHoverCheck(entryNum, summaryDict.pathLeft, summaryDict.expectedOnHover);
|
||||
|
||||
cy.get(`#${entry.id}`).click();
|
||||
|
||||
rightTextCheck(methodDict.pathRight, methodDict.expectedText);
|
||||
rightTextCheck(protocolDict.pathRight, protocolDict.expectedTextRight);
|
||||
if (summaryDict)
|
||||
rightTextCheck(summaryDict.pathRight, summaryDict.expectedText);
|
||||
|
||||
rightOnHoverCheck(methodDict.pathRight, methodDict.expectedOnHover);
|
||||
rightOnHoverCheck(protocolDict.pathRight, protocolDict.expectedOnHover);
|
||||
if (summaryDict)
|
||||
rightOnHoverCheck(summaryDict.pathRight, summaryDict.expectedOnHover);
|
||||
|
||||
if (value) {
|
||||
if (value.tab === valueTabs.response)
|
||||
// temporary fix, change to some "data-cy" attribute,
|
||||
// this will fix the issue that happen because we have "response:" in the header of the right side
|
||||
cy.get('#rightSideContainer > :nth-child(3)').contains('Response').click();
|
||||
cy.get(Cypress.env('bodyJsonClass')).then(text => {
|
||||
expect(text.text()).to.match(value.regex)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getSummaryDict(value, query) {
|
||||
if (value) {
|
||||
return {
|
||||
pathLeft: '> :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||
pathRight: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||
expectedText: value,
|
||||
expectedOnHover: query
|
||||
};
|
||||
}
|
||||
else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getMethodDict(value, query) {
|
||||
return {
|
||||
pathLeft: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||
pathRight: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||
expectedText: value,
|
||||
expectedOnHover: query
|
||||
};
|
||||
}
|
||||
|
||||
function getProtocolDict(protocol, protocolText) {
|
||||
return {
|
||||
pathLeft: '> :nth-child(1) > :nth-child(1)',
|
||||
pathRight: '> :nth-child(1) > :nth-child(1) > :nth-child(1) > :nth-child(1)',
|
||||
expectedTextLeft: protocol.toUpperCase(),
|
||||
expectedTextRight: protocolText,
|
||||
expectedOnHover: protocol.toLowerCase()
|
||||
};
|
||||
}
|
||||
|
||||
function getEntryNumById (id) {
|
||||
return parseInt(id.split('-')[1]);
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import {findLineAndCheck, getExpectedDetailsDict} from "../testHelpers/StatusBarHelper";
|
||||
|
||||
it('check', function () {
|
||||
const podName = Cypress.env('name'), namespace = Cypress.env('namespace');
|
||||
const port = Cypress.env('port');
|
||||
cy.intercept('GET', `http://localhost:${port}/status/tap`).as('statusTap');
|
||||
|
||||
cy.visit(`http://localhost:${port}`);
|
||||
cy.wait('@statusTap').its('response.statusCode').should('match', /^2\d{2}/);
|
||||
|
||||
cy.get(`[data-cy="expandedStatusBar"]`).trigger('mouseover',{force: true});
|
||||
findLineAndCheck(getExpectedDetailsDict(podName, namespace));
|
||||
});
|
||||
@@ -1,29 +0,0 @@
|
||||
import {
|
||||
checkThatAllEntriesShown,
|
||||
isValueExistsInElement,
|
||||
resizeToHugeMizu,
|
||||
} from "../testHelpers/TrafficHelper";
|
||||
|
||||
it('Loading Mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
checkEntries();
|
||||
|
||||
function checkEntries() {
|
||||
it('checking all entries', function () {
|
||||
checkThatAllEntriesShown();
|
||||
resizeToHugeMizu();
|
||||
|
||||
cy.get('#total-entries').then(number => {
|
||||
const numOfEntries = parseInt(number.text());
|
||||
[...Array(numOfEntries).keys()].map(checkEntry);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function checkEntry(entryIndex) {
|
||||
cy.get(`#entry-${entryIndex}`).click();
|
||||
cy.get('#tbody-Headers').should('be.visible');
|
||||
isValueExistsInElement(false, 'Ignored-User-Agent', '#tbody-Headers');
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import {findLineAndCheck, getExpectedDetailsDict} from '../testHelpers/StatusBarHelper';
|
||||
|
||||
it('opening', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
cy.get(`[data-cy="podsCountText"]`).trigger('mouseover');
|
||||
});
|
||||
|
||||
[1, 2, 3].map(doItFunc);
|
||||
|
||||
function doItFunc(number) {
|
||||
const podName = Cypress.env(`name${number}`);
|
||||
const namespace = Cypress.env(`namespace${number}`);
|
||||
|
||||
it(`verifying the pod (${podName}, ${namespace})`, function () {
|
||||
findLineAndCheck(getExpectedDetailsDict(podName, namespace));
|
||||
});
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import {isValueExistsInElement} from '../testHelpers/TrafficHelper';
|
||||
|
||||
it('Loading Mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
isValueExistsInElement(false, Cypress.env('redactHeaderContent'), '#tbody-Headers');
|
||||
isValueExistsInElement(false, Cypress.env('redactBodyContent'), Cypress.env('bodyJsonClass'));
|
||||
@@ -1,61 +0,0 @@
|
||||
import {checkFilterByMethod, valueTabs,} from "../testHelpers/TrafficHelper";
|
||||
|
||||
it('opening mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
const rabbitProtocolDetails = {name: 'AMQP', text: 'Advanced Message Queuing Protocol 0-9-1'};
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: rabbitProtocolDetails,
|
||||
method: 'exchange declare',
|
||||
methodQuery: 'request.method == "exchange declare"',
|
||||
summary: 'exchange',
|
||||
summaryQuery: 'request.exchange == "exchange"',
|
||||
value: null
|
||||
});
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: rabbitProtocolDetails,
|
||||
method: 'queue declare',
|
||||
methodQuery: 'request.method == "queue declare"',
|
||||
summary: 'queue',
|
||||
summaryQuery: 'request.queue == "queue"',
|
||||
value: null
|
||||
});
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: rabbitProtocolDetails,
|
||||
method: 'queue bind',
|
||||
methodQuery: 'request.method == "queue bind"',
|
||||
summary: 'queue',
|
||||
summaryQuery: 'request.queue == "queue"',
|
||||
value: null
|
||||
});
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: rabbitProtocolDetails,
|
||||
method: 'basic publish',
|
||||
methodQuery: 'request.method == "basic publish"',
|
||||
summary: 'exchange',
|
||||
summaryQuery: 'request.exchange == "exchange"',
|
||||
value: {tab: valueTabs.request, regex: /^message$/mg}
|
||||
});
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: rabbitProtocolDetails,
|
||||
method: 'basic consume',
|
||||
methodQuery: 'request.method == "basic consume"',
|
||||
summary: 'queue',
|
||||
summaryQuery: 'request.queue == "queue"',
|
||||
value: null
|
||||
});
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: rabbitProtocolDetails,
|
||||
method: 'basic deliver',
|
||||
methodQuery: 'request.method == "basic deliver"',
|
||||
summary: 'exchange',
|
||||
summaryQuery: 'request.queue == "exchange"',
|
||||
value: {tab: valueTabs.request, regex: /^message$/mg}
|
||||
});
|
||||
@@ -1,8 +0,0 @@
|
||||
import {isValueExistsInElement} from '../testHelpers/TrafficHelper';
|
||||
|
||||
it('Loading Mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
isValueExistsInElement(true, Cypress.env('redactHeaderContent'), '#tbody-Headers');
|
||||
isValueExistsInElement(true, Cypress.env('redactBodyContent'), Cypress.env('bodyJsonClass'));
|
||||
@@ -1,52 +0,0 @@
|
||||
import {checkFilterByMethod, valueTabs,} from "../testHelpers/TrafficHelper";
|
||||
|
||||
it('opening mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
const redisProtocolDetails = {name: 'redis', text: 'Redis Serialization Protocol'};
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: redisProtocolDetails,
|
||||
method: 'PING',
|
||||
methodQuery: 'request.command == "PING"',
|
||||
summary: null,
|
||||
summaryQuery: '',
|
||||
value: null
|
||||
})
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: redisProtocolDetails,
|
||||
method: 'SET',
|
||||
methodQuery: 'request.command == "SET"',
|
||||
summary: 'key',
|
||||
summaryQuery: 'request.key == "key"',
|
||||
value: {tab: valueTabs.request, regex: /^\[value, keepttl]$/mg}
|
||||
})
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: redisProtocolDetails,
|
||||
method: 'EXISTS',
|
||||
methodQuery: 'request.command == "EXISTS"',
|
||||
summary: 'key',
|
||||
summaryQuery: 'request.key == "key"',
|
||||
value: {tab: valueTabs.response, regex: /^1$/mg}
|
||||
})
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: redisProtocolDetails,
|
||||
method: 'GET',
|
||||
methodQuery: 'request.command == "GET"',
|
||||
summary: 'key',
|
||||
summaryQuery: 'request.key == "key"',
|
||||
value: {tab: valueTabs.response, regex: /^value$/mg}
|
||||
})
|
||||
|
||||
checkFilterByMethod({
|
||||
protocol: redisProtocolDetails,
|
||||
method: 'DEL',
|
||||
methodQuery: 'request.command == "DEL"',
|
||||
summary: 'key',
|
||||
summaryQuery: 'request.key == "key"',
|
||||
value: {tab: valueTabs.response, regex: /^1$|^0$/mg}
|
||||
})
|
||||
@@ -1,11 +0,0 @@
|
||||
import {getExpectedDetailsDict, checkLine} from '../testHelpers/StatusBarHelper';
|
||||
|
||||
|
||||
it('opening', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
cy.get(`[data-cy="podsCountText"]`).trigger('mouseover');
|
||||
|
||||
cy.get('[data-cy="expandedStatusBar"] > :nth-child(2) > > :nth-child(2) >').should('have.length', 1); // one line
|
||||
|
||||
checkLine(1, getExpectedDetailsDict(Cypress.env('name'), Cypress.env('namespace')));
|
||||
});
|
||||
@@ -1,7 +0,0 @@
|
||||
import {isValueExistsInElement} from "../testHelpers/TrafficHelper";
|
||||
|
||||
it('Loading Mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
isValueExistsInElement(true, Cypress.env('regexMaskingBodyContent'), Cypress.env('bodyJsonClass'));
|
||||
@@ -1,366 +0,0 @@
|
||||
import {findLineAndCheck, getExpectedDetailsDict} from "../testHelpers/StatusBarHelper";
|
||||
import {
|
||||
leftOnHoverCheck,
|
||||
leftTextCheck,
|
||||
resizeToHugeMizu,
|
||||
resizeToNormalMizu,
|
||||
rightOnHoverCheck,
|
||||
rightTextCheck,
|
||||
verifyMinimumEntries
|
||||
} from "../testHelpers/TrafficHelper";
|
||||
|
||||
const refreshWaitTimeout = 10000;
|
||||
|
||||
|
||||
const fullParam = Cypress.env('arrayDict'); // "Name:fooNamespace:barName:foo1Namespace:bar1"
|
||||
const podsArray = fullParam.split('Name:').slice(1); // ["fooNamespace:bar", "foo1Namespace:bar1"]
|
||||
podsArray.forEach((podStr, index) => {
|
||||
const podAndNamespaceArr = podStr.split('Namespace:'); // [foo, bar] / [foo1, bar1]
|
||||
podsArray[index] = getExpectedDetailsDict(podAndNamespaceArr[0], podAndNamespaceArr[1]);
|
||||
});
|
||||
|
||||
it('opening mizu', function () {
|
||||
cy.visit(Cypress.env('testUrl'));
|
||||
});
|
||||
|
||||
verifyMinimumEntries();
|
||||
|
||||
it('top bar check', function () {
|
||||
cy.get(`[data-cy="podsCountText"]`).trigger('mouseover');
|
||||
podsArray.map(findLineAndCheck);
|
||||
cy.reload();
|
||||
});
|
||||
|
||||
it('filtering guide check', function () {
|
||||
cy.reload();
|
||||
cy.get('[title="Open Filtering Guide (Cheatsheet)"]').click();
|
||||
cy.get('#modal-modal-title').should('be.visible');
|
||||
cy.get('[lang="en"]').click(0, 0);
|
||||
cy.get('#modal-modal-title').should('not.exist');
|
||||
});
|
||||
|
||||
it('right side sanity test', function () {
|
||||
cy.get('#entryDetailedTitleElapsedTime').then(timeInMs => {
|
||||
const time = timeInMs.text();
|
||||
if (time < '0ms') {
|
||||
throw new Error(`The time in the top line cannot be negative ${time}`);
|
||||
}
|
||||
});
|
||||
|
||||
// temporary fix, change to some "data-cy" attribute,
|
||||
// this will fix the issue that happen because we have "response:" in the header of the right side
|
||||
cy.get('#rightSideContainer > :nth-child(3)').contains('Response').click();
|
||||
|
||||
cy.get('#rightSideContainer [title="Status Code"]').then(status => {
|
||||
const statusCode = status.text();
|
||||
cy.contains('Status').parent().next().then(statusInDetails => {
|
||||
const statusCodeInDetails = statusInDetails.text();
|
||||
|
||||
expect(statusCode).to.equal(statusCodeInDetails, 'The status code in the top line should match the status code in details');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
checkIllegalFilter('invalid filter');
|
||||
|
||||
checkFilter({
|
||||
name: 'http',
|
||||
leftSidePath: '> :nth-child(1) > :nth-child(1)',
|
||||
leftSideExpectedText: 'HTTP',
|
||||
rightSidePath: '[title=HTTP]',
|
||||
rightSideExpectedText: 'Hypertext Transfer Protocol -- HTTP/1.1',
|
||||
applyByEnter: true
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'response.status == 200',
|
||||
leftSidePath: '[title="Status Code"]',
|
||||
leftSideExpectedText: '200',
|
||||
rightSidePath: '> :nth-child(2) [title="Status Code"]',
|
||||
rightSideExpectedText: '200',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
if (Cypress.env('shouldCheckSrcAndDest')) {
|
||||
serviceMapCheck();
|
||||
|
||||
checkFilter({
|
||||
name: 'src.name == ""',
|
||||
leftSidePath: '[title="Source Name"]',
|
||||
leftSideExpectedText: '[Unresolved]',
|
||||
rightSidePath: '> :nth-child(2) [title="Source Name"]',
|
||||
rightSideExpectedText: '[Unresolved]',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: `dst.name == "httpbin.mizu-tests"`,
|
||||
leftSidePath: '> :nth-child(3) > :nth-child(2) > :nth-child(3) > :nth-child(2)',
|
||||
leftSideExpectedText: 'httpbin.mizu-tests',
|
||||
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(2) > :nth-child(3) > :nth-child(2)',
|
||||
rightSideExpectedText: 'httpbin.mizu-tests',
|
||||
applyByEnter: false
|
||||
});
|
||||
}
|
||||
|
||||
checkFilter({
|
||||
name: 'request.method == "GET"',
|
||||
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||
leftSideExpectedText: 'GET',
|
||||
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||
rightSideExpectedText: 'GET',
|
||||
applyByEnter: true
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'request.path == "/get"',
|
||||
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||
leftSideExpectedText: '/get',
|
||||
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||
rightSideExpectedText: '/get',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
checkFilter({
|
||||
name: 'src.ip == "127.0.0.1"',
|
||||
leftSidePath: '[title="Source IP"]',
|
||||
leftSideExpectedText: '127.0.0.1',
|
||||
rightSidePath: '> :nth-child(2) [title="Source IP"]',
|
||||
rightSideExpectedText: '127.0.0.1',
|
||||
applyByEnter: false
|
||||
});
|
||||
|
||||
checkFilterNoResults('request.method == "POST"');
|
||||
|
||||
function checkFilterNoResults(filterName) {
|
||||
it(`checking the filter: ${filterName}. Expecting no results`, function () {
|
||||
cy.get('#total-entries').then(number => {
|
||||
const totalEntries = number.text();
|
||||
|
||||
// applying the filter
|
||||
cy.get('.w-tc-editor-text').type(filterName);
|
||||
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('greenFilterColor'));
|
||||
cy.get('[type="submit"]').click();
|
||||
|
||||
// waiting for the entries number to load
|
||||
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
|
||||
// the DOM should show 0 entries
|
||||
cy.get('#entries-length').should('have.text', '0');
|
||||
|
||||
// going through every potential entry and verifies that it doesn't exist
|
||||
[...Array(parseInt(totalEntries)).keys()].map(shouldNotExist);
|
||||
|
||||
cy.get('[title="Fetch old records"]').click();
|
||||
cy.get('#noMoreDataTop', {timeout: refreshWaitTimeout}).should('be.visible');
|
||||
cy.get('#entries-length').should('have.text', '0'); // after loading all entries there should still be 0 entries
|
||||
|
||||
// reloading then waiting for the entries number to load
|
||||
cy.reload();
|
||||
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function shouldNotExist(entryNum) {
|
||||
cy.get(`entry-${entryNum}`).should('not.exist');
|
||||
}
|
||||
|
||||
function checkIllegalFilter(illegalFilterName) {
|
||||
it(`should show red search bar with the input: ${illegalFilterName}`, function () {
|
||||
cy.reload();
|
||||
cy.get('#total-entries').then(number => {
|
||||
const totalEntries = number.text();
|
||||
|
||||
cy.get('.w-tc-editor-text').type(illegalFilterName);
|
||||
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('redFilterColor'));
|
||||
cy.get('[type="submit"]').click();
|
||||
|
||||
cy.get('[role="alert"]').should('be.visible');
|
||||
cy.get('.w-tc-editor-text').clear();
|
||||
|
||||
// reloading then waiting for the entries number to load
|
||||
cy.reload();
|
||||
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
});
|
||||
});
|
||||
}
|
||||
function checkFilter(filterDetails){
|
||||
const {name, leftSidePath, rightSidePath, rightSideExpectedText, leftSideExpectedText, applyByEnter} = filterDetails;
|
||||
const entriesForDeeperCheck = 5;
|
||||
|
||||
it(`checking the filter: ${name}`, function () {
|
||||
cy.get('#total-entries').should('not.have.text', '0').then(number => {
|
||||
const totalEntries = number.text();
|
||||
|
||||
// checks the hover on the last entry (the only one in DOM at the beginning)
|
||||
leftOnHoverCheck(totalEntries - 1, leftSidePath, name);
|
||||
|
||||
cy.get('.w-tc-editor-text').clear();
|
||||
// applying the filter with alt+enter or with the button
|
||||
cy.get('.w-tc-editor-text').type(`${name}${applyByEnter ? '{alt+enter}' : ''}`);
|
||||
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('greenFilterColor'));
|
||||
if (!applyByEnter)
|
||||
cy.get('[type="submit"]').click();
|
||||
|
||||
// only one entry in DOM after filtering, checking all checks on it
|
||||
leftTextCheck(totalEntries - 1, leftSidePath, leftSideExpectedText);
|
||||
leftOnHoverCheck(totalEntries - 1, leftSidePath, name);
|
||||
rightTextCheck(rightSidePath, rightSideExpectedText);
|
||||
rightOnHoverCheck(rightSidePath, name);
|
||||
checkRightSideResponseBody();
|
||||
|
||||
cy.get('[title="Fetch old records"]').click();
|
||||
resizeToHugeMizu();
|
||||
|
||||
// waiting for the entries number to load
|
||||
cy.get('#entries-length', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
|
||||
// checking only 'leftTextCheck' on all entries because the rest of the checks require more time
|
||||
[...Array(parseInt(totalEntries)).keys()].forEach(entryNum => {
|
||||
leftTextCheck(entryNum, leftSidePath, leftSideExpectedText);
|
||||
});
|
||||
|
||||
// making the other 3 checks on the first X entries (longer time for each check)
|
||||
deeperChcek(leftSidePath, rightSidePath, name, leftSideExpectedText, rightSideExpectedText, entriesForDeeperCheck);
|
||||
|
||||
// reloading then waiting for the entries number to load
|
||||
resizeToNormalMizu();
|
||||
cy.reload();
|
||||
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function deeperChcek(leftSidePath, rightSidePath, filterName, leftSideExpectedText, rightSideExpectedText, entriesNumToCheck) {
|
||||
[...Array(entriesNumToCheck).keys()].forEach(entryNum => {
|
||||
leftOnHoverCheck(entryNum, leftSidePath, filterName);
|
||||
|
||||
cy.get(`#list #entry-${entryNum}`).click();
|
||||
rightTextCheck(rightSidePath, rightSideExpectedText);
|
||||
rightOnHoverCheck(rightSidePath, filterName);
|
||||
});
|
||||
}
|
||||
|
||||
function checkRightSideResponseBody() {
|
||||
// temporary fix, change to some "data-cy" attribute,
|
||||
// this will fix the issue that happen because we have "response:" in the header of the right side
|
||||
cy.get('#rightSideContainer > :nth-child(3)').contains('Response').click();
|
||||
clickCheckbox('Decode Base64');
|
||||
|
||||
cy.get(`${Cypress.env('bodyJsonClass')}`).then(value => {
|
||||
const encodedBody = value.text();
|
||||
const decodedBody = atob(encodedBody);
|
||||
const responseBody = JSON.parse(decodedBody);
|
||||
|
||||
const expectdJsonBody = {
|
||||
args: RegExp({}),
|
||||
url: RegExp('http://.*/get'),
|
||||
headers: {
|
||||
"User-Agent": RegExp('[REDACTED]'),
|
||||
"Accept-Encoding": RegExp('gzip'),
|
||||
"X-Forwarded-Uri": RegExp('/api/v1/namespaces/.*/services/.*/proxy/get')
|
||||
}
|
||||
};
|
||||
|
||||
expect(responseBody.args).to.match(expectdJsonBody.args);
|
||||
expect(responseBody.url).to.match(expectdJsonBody.url);
|
||||
expect(responseBody.headers['User-Agent']).to.match(expectdJsonBody.headers['User-Agent']);
|
||||
expect(responseBody.headers['Accept-Encoding']).to.match(expectdJsonBody.headers['Accept-Encoding']);
|
||||
expect(responseBody.headers['X-Forwarded-Uri']).to.match(expectdJsonBody.headers['X-Forwarded-Uri']);
|
||||
|
||||
cy.get(`${Cypress.env('bodyJsonClass')}`).should('have.text', encodedBody);
|
||||
clickCheckbox('Decode Base64');
|
||||
|
||||
cy.get(`${Cypress.env('bodyJsonClass')} > `).its('length').should('be.gt', 1).then(linesNum => {
|
||||
cy.get(`${Cypress.env('bodyJsonClass')} > >`).its('length').should('be.gt', linesNum).then(jsonItemsNum => {
|
||||
checkPrettyAndLineNums(jsonItemsNum, decodedBody);
|
||||
|
||||
clickCheckbox('Line numbers');
|
||||
checkPrettyOrNothing(jsonItemsNum, decodedBody);
|
||||
|
||||
clickCheckbox('Pretty');
|
||||
checkPrettyOrNothing(jsonItemsNum, decodedBody);
|
||||
|
||||
clickCheckbox('Line numbers');
|
||||
checkOnlyLineNumberes(jsonItemsNum, decodedBody);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function clickCheckbox(type) {
|
||||
cy.contains(`${type}`).prev().children().click();
|
||||
}
|
||||
|
||||
function checkPrettyAndLineNums(jsonItemsLen, decodedBody) {
|
||||
decodedBody = decodedBody.replaceAll(' ', '');
|
||||
cy.get(`${Cypress.env('bodyJsonClass')} >`).then(elements => {
|
||||
const lines = Object.values(elements);
|
||||
lines.forEach((line, index) => {
|
||||
if (line.getAttribute) {
|
||||
const cleanLine = getCleanLine(line);
|
||||
const currentLineFromDecodedText = decodedBody.substring(0, cleanLine.length);
|
||||
|
||||
expect(cleanLine).to.equal(currentLineFromDecodedText, `expected the text in line number ${index + 1} to match the text that generated by the base64 decoding`)
|
||||
|
||||
decodedBody = decodedBody.substring(cleanLine.length);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function getCleanLine(lineElement) {
|
||||
return (lineElement.innerText.substring(0, lineElement.innerText.length - 1)).replaceAll(' ', '');
|
||||
}
|
||||
|
||||
function checkPrettyOrNothing(jsonItems, decodedBody) {
|
||||
cy.get(`${Cypress.env('bodyJsonClass')} > `).should('have.length', jsonItems).then(text => {
|
||||
const json = text.text();
|
||||
expect(json).to.equal(decodedBody);
|
||||
});
|
||||
}
|
||||
|
||||
function checkOnlyLineNumberes(jsonItems, decodedText) {
|
||||
cy.get(`${Cypress.env('bodyJsonClass')} >`).should('have.length', 1).and('have.text', decodedText);
|
||||
cy.get(`${Cypress.env('bodyJsonClass')} > >`).should('have.length', jsonItems)
|
||||
}
|
||||
|
||||
function serviceMapCheck() {
|
||||
it('service map test', function () {
|
||||
cy.intercept(`${Cypress.env('testUrl')}/servicemap/get`).as('serviceMapRequest');
|
||||
cy.get('#total-entries').should('not.have.text', '0').then(() => {
|
||||
cy.get('#total-entries').invoke('text').then(entriesNum => {
|
||||
cy.get('[alt="service-map"]').click();
|
||||
cy.wait('@serviceMapRequest').then(({response}) => {
|
||||
const body = response.body;
|
||||
const nodeParams = {
|
||||
destination: 'httpbin.mizu-tests',
|
||||
source: '127.0.0.1'
|
||||
};
|
||||
serviceMapAPICheck(body, parseInt(entriesNum), nodeParams);
|
||||
cy.reload();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function serviceMapAPICheck(body, entriesNum, nodeParams) {
|
||||
const {nodes, edges} = body;
|
||||
|
||||
expect(nodes.length).to.equal(Object.keys(nodeParams).length, `Expected nodes count`);
|
||||
|
||||
expect(edges.some(edge => edge.source.name === nodeParams.source)).to.be.true;
|
||||
expect(edges.some(edge => edge.destination.name === nodeParams.destination)).to.be.true;
|
||||
|
||||
let count = 0;
|
||||
edges.forEach(edge => {
|
||||
count += edge.count;
|
||||
if (edge.destination.name === nodeParams.destination) {
|
||||
expect(edge.source.name).to.equal(nodeParams.source);
|
||||
}
|
||||
});
|
||||
|
||||
expect(count).to.equal(entriesNum);
|
||||
}
|
||||
@@ -1,240 +0,0 @@
|
||||
package acceptanceTests
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/go-redis/redis/v8"
|
||||
amqp "github.com/rabbitmq/amqp091-go"
|
||||
"os/exec"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestRedis(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
kubernetesProvider, err := NewKubernetesProvider()
|
||||
if err != nil {
|
||||
t.Errorf("failed to create k8s provider, err %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
redisExternalIp, err := kubernetesProvider.GetServiceExternalIp(ctx, DefaultNamespaceName, "redis")
|
||||
if err != nil {
|
||||
t.Errorf("failed to get redis external ip, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
rdb := redis.NewClient(&redis.Options{
|
||||
Addr: fmt.Sprintf("%v:6379", redisExternalIp),
|
||||
})
|
||||
|
||||
for i := 0; i < DefaultEntriesCount/5; i++ {
|
||||
requestErr := rdb.Ping(ctx).Err()
|
||||
if requestErr != nil {
|
||||
t.Errorf("failed to send redis request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < DefaultEntriesCount/5; i++ {
|
||||
requestErr := rdb.Set(ctx, "key", "value", -1).Err()
|
||||
if requestErr != nil {
|
||||
t.Errorf("failed to send redis request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < DefaultEntriesCount/5; i++ {
|
||||
requestErr := rdb.Exists(ctx, "key").Err()
|
||||
if requestErr != nil {
|
||||
t.Errorf("failed to send redis request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < DefaultEntriesCount/5; i++ {
|
||||
requestErr := rdb.Get(ctx, "key").Err()
|
||||
if requestErr != nil {
|
||||
t.Errorf("failed to send redis request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < DefaultEntriesCount/5; i++ {
|
||||
requestErr := rdb.Del(ctx, "key").Err()
|
||||
if requestErr != nil {
|
||||
t.Errorf("failed to send redis request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
RunCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/Redis.js\"")
|
||||
}
|
||||
|
||||
func TestAmqp(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
kubernetesProvider, err := NewKubernetesProvider()
|
||||
if err != nil {
|
||||
t.Errorf("failed to create k8s provider, err %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
rabbitmqExternalIp, err := kubernetesProvider.GetServiceExternalIp(ctx, DefaultNamespaceName, "rabbitmq")
|
||||
if err != nil {
|
||||
t.Errorf("failed to get RabbitMQ external ip, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
conn, err := amqp.Dial(fmt.Sprintf("amqp://guest:guest@%v:5672/", rabbitmqExternalIp))
|
||||
if err != nil {
|
||||
t.Errorf("failed to connect to RabbitMQ, err: %v", err)
|
||||
return
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
// Temporary fix for missing amqp entries
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
for i := 0; i < DefaultEntriesCount/5; i++ {
|
||||
ch, err := conn.Channel()
|
||||
if err != nil {
|
||||
t.Errorf("failed to open a channel, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
exchangeName := "exchange"
|
||||
err = ch.ExchangeDeclare(exchangeName, "direct", true, false, false, false, nil)
|
||||
if err != nil {
|
||||
t.Errorf("failed to declare an exchange, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
q, err := ch.QueueDeclare("queue", true, false, false, false, nil)
|
||||
if err != nil {
|
||||
t.Errorf("failed to declare a queue, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
routingKey := "routing_key"
|
||||
err = ch.QueueBind(q.Name, routingKey, exchangeName, false, nil)
|
||||
if err != nil {
|
||||
t.Errorf("failed to bind the queue, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
err = ch.Publish(exchangeName, routingKey, false, false,
|
||||
amqp.Publishing{
|
||||
DeliveryMode: amqp.Persistent,
|
||||
ContentType: "text/plain",
|
||||
Body: []byte("message"),
|
||||
})
|
||||
if err != nil {
|
||||
t.Errorf("failed to publish a message, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
msgChan, err := ch.Consume(q.Name, "Consumer", true, false, false, false, nil)
|
||||
if err != nil {
|
||||
t.Errorf("failed to create a consumer, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
select {
|
||||
case <-msgChan:
|
||||
break
|
||||
case <-time.After(3 * time.Second):
|
||||
t.Errorf("failed to consume a message on time")
|
||||
return
|
||||
}
|
||||
|
||||
err = ch.ExchangeDelete(exchangeName, false, false)
|
||||
if err != nil {
|
||||
t.Errorf("failed to delete the exchange, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
_, err = ch.QueueDelete(q.Name, false, false, false)
|
||||
if err != nil {
|
||||
t.Errorf("failed to delete the queue, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
ch.Close()
|
||||
}
|
||||
|
||||
RunCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/Rabbit.js\"")
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
module github.com/up9inc/mizu/acceptanceTests
|
||||
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/go-redis/redis/v8 v8.11.4
|
||||
github.com/rabbitmq/amqp091-go v1.3.0
|
||||
github.com/up9inc/mizu/shared v0.0.0
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
|
||||
k8s.io/apimachinery v0.23.3
|
||||
k8s.io/client-go v0.23.3
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/fsnotify/fsnotify v1.5.1 // indirect
|
||||
github.com/go-logr/logr v1.2.2 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/go-cmp v0.5.7 // indirect
|
||||
github.com/google/gofuzz v1.2.0 // indirect
|
||||
github.com/googleapis/gnostic v0.5.5 // indirect
|
||||
github.com/imdario/mergo v0.3.12 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect
|
||||
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect
|
||||
golang.org/x/sys v0.0.0-20220207234003-57398862261d // indirect
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
|
||||
golang.org/x/text v0.3.7 // indirect
|
||||
golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/protobuf v1.27.1 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
k8s.io/api v0.23.3 // indirect
|
||||
k8s.io/klog/v2 v2.40.1 // indirect
|
||||
k8s.io/kube-openapi v0.0.0-20220124234850-424119656bbf // indirect
|
||||
k8s.io/utils v0.0.0-20220127004650-9b3446523e65 // indirect
|
||||
sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 // indirect
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.2.1 // indirect
|
||||
sigs.k8s.io/yaml v1.3.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/up9inc/mizu/shared v0.0.0 => ../shared
|
||||
|
||||
replace github.com/up9inc/mizu/tap/api v0.0.0 => ../tap/api
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,206 +0,0 @@
|
||||
package acceptanceTests
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"os/exec"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLogs(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
logsCmdArgs := GetDefaultLogsCommandArgs()
|
||||
|
||||
logsCmd := exec.Command(cliPath, logsCmdArgs...)
|
||||
t.Logf("running command: %v", logsCmd.String())
|
||||
|
||||
if err := logsCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start logs command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := logsCmd.Wait(); err != nil {
|
||||
t.Errorf("failed to wait logs command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
logsPath, logsPathErr := GetLogsPath()
|
||||
if logsPathErr != nil {
|
||||
t.Errorf("failed to get logs path, err: %v", logsPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
zipReader, zipError := zip.OpenReader(logsPath)
|
||||
if zipError != nil {
|
||||
t.Errorf("failed to get zip reader, err: %v", zipError)
|
||||
return
|
||||
}
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := zipReader.Close(); err != nil {
|
||||
t.Logf("failed to close zip reader, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
var logsFileNames []string
|
||||
for _, file := range zipReader.File {
|
||||
logsFileNames = append(logsFileNames, file.Name)
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu.mizu-api-server.mizu-api-server.log") {
|
||||
t.Errorf("api server logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu.mizu-api-server.basenine.log") {
|
||||
t.Errorf("basenine logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu_cli.log") {
|
||||
t.Errorf("cli logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu_events.log") {
|
||||
t.Errorf("events logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !ContainsPartOfValue(logsFileNames, "mizu.mizu-tapper-daemon-set") {
|
||||
t.Errorf("tapper logs not found")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func TestLogsPath(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
logsCmdArgs := GetDefaultLogsCommandArgs()
|
||||
|
||||
logsPath := "../logs.zip"
|
||||
logsCmdArgs = append(logsCmdArgs, "-f", logsPath)
|
||||
|
||||
logsCmd := exec.Command(cliPath, logsCmdArgs...)
|
||||
t.Logf("running command: %v", logsCmd.String())
|
||||
|
||||
if err := logsCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start logs command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := logsCmd.Wait(); err != nil {
|
||||
t.Errorf("failed to wait logs command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
zipReader, zipError := zip.OpenReader(logsPath)
|
||||
if zipError != nil {
|
||||
t.Errorf("failed to get zip reader, err: %v", zipError)
|
||||
return
|
||||
}
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := zipReader.Close(); err != nil {
|
||||
t.Logf("failed to close zip reader, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
var logsFileNames []string
|
||||
for _, file := range zipReader.File {
|
||||
logsFileNames = append(logsFileNames, file.Name)
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu.mizu-api-server.mizu-api-server.log") {
|
||||
t.Errorf("api server logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu.mizu-api-server.basenine.log") {
|
||||
t.Errorf("basenine logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu_cli.log") {
|
||||
t.Errorf("cli logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu_events.log") {
|
||||
t.Errorf("events logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !ContainsPartOfValue(logsFileNames, "mizu.mizu-tapper-daemon-set") {
|
||||
t.Errorf("tapper logs not found")
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
PREFIX=$HOME/local/bin
|
||||
VERSION=v1.22.0
|
||||
|
||||
echo "Attempting to install minikube and assorted tools to $PREFIX"
|
||||
|
||||
if ! [ -x "$(command -v kubectl)" ]; then
|
||||
echo "Installing kubectl version $VERSION"
|
||||
curl -LO "https://storage.googleapis.com/kubernetes-release/release/$VERSION/bin/linux/amd64/kubectl"
|
||||
chmod +x kubectl
|
||||
mv kubectl "$PREFIX"
|
||||
else
|
||||
echo "kubetcl is already installed"
|
||||
fi
|
||||
|
||||
if ! [ -x "$(command -v minikube)" ]; then
|
||||
echo "Installing minikube version $VERSION"
|
||||
curl -Lo minikube https://storage.googleapis.com/minikube/releases/$VERSION/minikube-linux-amd64
|
||||
chmod +x minikube
|
||||
mv minikube "$PREFIX"
|
||||
else
|
||||
echo "minikube is already installed"
|
||||
fi
|
||||
|
||||
echo "Starting minikube..."
|
||||
minikube start
|
||||
|
||||
echo "Creating mizu tests namespaces"
|
||||
kubectl create namespace mizu-tests
|
||||
kubectl create namespace mizu-tests2
|
||||
|
||||
echo "Creating httpbin deployments"
|
||||
kubectl create deployment httpbin --image=kennethreitz/httpbin -n mizu-tests
|
||||
kubectl create deployment httpbin2 --image=kennethreitz/httpbin -n mizu-tests
|
||||
|
||||
kubectl create deployment httpbin --image=kennethreitz/httpbin -n mizu-tests2
|
||||
|
||||
echo "Creating redis deployment"
|
||||
kubectl create deployment redis --image=redis -n mizu-tests
|
||||
|
||||
echo "Creating rabbitmq deployment"
|
||||
kubectl create deployment rabbitmq --image=rabbitmq -n mizu-tests
|
||||
|
||||
echo "Creating httpbin services"
|
||||
kubectl expose deployment httpbin --type=NodePort --port=80 -n mizu-tests
|
||||
kubectl expose deployment httpbin2 --type=NodePort --port=80 -n mizu-tests
|
||||
|
||||
kubectl expose deployment httpbin --type=NodePort --port=80 -n mizu-tests2
|
||||
|
||||
echo "Creating redis service"
|
||||
kubectl expose deployment redis --type=LoadBalancer --port=6379 -n mizu-tests
|
||||
|
||||
echo "Creating rabbitmq service"
|
||||
kubectl expose deployment rabbitmq --type=LoadBalancer --port=5672 -n mizu-tests
|
||||
|
||||
echo "Starting proxy"
|
||||
kubectl proxy --port=8080 &
|
||||
|
||||
echo "Setting minikube docker env"
|
||||
eval $(minikube docker-env)
|
||||
|
||||
echo "Build agent image"
|
||||
docker build -t mizu/ci:0.0 .
|
||||
|
||||
echo "Build cli"
|
||||
cd cli && make build GIT_BRANCH=ci SUFFIX=ci
|
||||
|
||||
echo "Starting tunnel"
|
||||
minikube tunnel &
|
||||
@@ -1,692 +0,0 @@
|
||||
package acceptanceTests
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestTap(t *testing.T) {
|
||||
basicTapTest(t, false)
|
||||
}
|
||||
|
||||
func basicTapTest(t *testing.T, shouldCheckSrcAndDest bool, extraArgs... string) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
tests := []int{50}
|
||||
|
||||
for _, entriesCount := range tests {
|
||||
t.Run(fmt.Sprintf("%d", entriesCount), func(t *testing.T) {
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmdArgs = append(tapCmdArgs, extraArgs...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
proxyUrl := GetProxyUrl(DefaultNamespaceName, DefaultServiceName)
|
||||
for i := 0; i < entriesCount; i++ {
|
||||
if _, requestErr := ExecuteHttpGetRequest(fmt.Sprintf("%v/get", proxyUrl)); requestErr != nil {
|
||||
t.Errorf("failed to send proxy request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
expectedPods := []PodDescriptor{
|
||||
{Name: "httpbin", Namespace: "mizu-tests"},
|
||||
{Name: "httpbin2", Namespace: "mizu-tests"},
|
||||
}
|
||||
|
||||
var expectedPodsStr string
|
||||
for i := 0; i < len(expectedPods); i++ {
|
||||
expectedPodsStr += fmt.Sprintf("Name:%vNamespace:%v", expectedPods[i].Name, expectedPods[i].Namespace)
|
||||
}
|
||||
|
||||
RunCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/UiTest.js\" --env entriesCount=%d,arrayDict=%v,shouldCheckSrcAndDest=%v",
|
||||
entriesCount, expectedPodsStr, shouldCheckSrcAndDest))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTapGuiPort(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
tests := []uint16{8898}
|
||||
|
||||
for _, guiPort := range tests {
|
||||
t.Run(fmt.Sprintf("%d", guiPort), func(t *testing.T) {
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmdArgs = append(tapCmdArgs, "-p", fmt.Sprintf("%d", guiPort))
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(guiPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
proxyUrl := GetProxyUrl(DefaultNamespaceName, DefaultServiceName)
|
||||
for i := 0; i < DefaultEntriesCount; i++ {
|
||||
if _, requestErr := ExecuteHttpGetRequest(fmt.Sprintf("%v/get", proxyUrl)); requestErr != nil {
|
||||
t.Errorf("failed to send proxy request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
RunCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/GuiPort.js\" --env name=%v,namespace=%v,port=%d",
|
||||
"httpbin", "mizu-tests", guiPort))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTapAllNamespaces(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
expectedPods := []PodDescriptor{
|
||||
{Name: "httpbin", Namespace: "mizu-tests"},
|
||||
{Name: "httpbin2", Namespace: "mizu-tests"},
|
||||
{Name: "httpbin", Namespace: "mizu-tests2"},
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
tapCmdArgs = append(tapCmdArgs, "-A")
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
RunCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/MultipleNamespaces.js\" --env name1=%v,name2=%v,name3=%v,namespace1=%v,namespace2=%v,namespace3=%v",
|
||||
expectedPods[0].Name, expectedPods[1].Name, expectedPods[2].Name, expectedPods[0].Namespace, expectedPods[1].Namespace, expectedPods[2].Namespace))
|
||||
}
|
||||
|
||||
func TestTapMultipleNamespaces(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
expectedPods := []PodDescriptor{
|
||||
{Name: "httpbin", Namespace: "mizu-tests"},
|
||||
{Name: "httpbin2", Namespace: "mizu-tests"},
|
||||
{Name: "httpbin", Namespace: "mizu-tests2"},
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
var namespacesCmd []string
|
||||
for _, expectedPod := range expectedPods {
|
||||
namespacesCmd = append(namespacesCmd, "-n", expectedPod.Namespace)
|
||||
}
|
||||
tapCmdArgs = append(tapCmdArgs, namespacesCmd...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
RunCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/MultipleNamespaces.js\" --env name1=%v,name2=%v,name3=%v,namespace1=%v,namespace2=%v,namespace3=%v",
|
||||
expectedPods[0].Name, expectedPods[1].Name, expectedPods[2].Name, expectedPods[0].Namespace, expectedPods[1].Namespace, expectedPods[2].Namespace))
|
||||
}
|
||||
|
||||
func TestTapRegex(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
regexPodName := "httpbin2"
|
||||
expectedPods := []PodDescriptor{
|
||||
{Name: regexPodName, Namespace: "mizu-tests"},
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgsWithRegex(regexPodName)
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
RunCypressTests(t, fmt.Sprintf("npx cypress run --spec \"cypress/integration/tests/Regex.js\" --env name=%v,namespace=%v",
|
||||
expectedPods[0].Name, expectedPods[0].Namespace))
|
||||
}
|
||||
|
||||
func TestTapDryRun(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmdArgs = append(tapCmdArgs, "--dry-run")
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
resultChannel := make(chan string, 1)
|
||||
|
||||
go func() {
|
||||
if err := tapCmd.Wait(); err != nil {
|
||||
resultChannel <- "fail"
|
||||
return
|
||||
}
|
||||
resultChannel <- "success"
|
||||
}()
|
||||
|
||||
go func() {
|
||||
time.Sleep(ShortRetriesCount * time.Second)
|
||||
resultChannel <- "fail"
|
||||
}()
|
||||
|
||||
testResult := <-resultChannel
|
||||
if testResult != "success" {
|
||||
t.Errorf("unexpected result - dry run cmd not done")
|
||||
}
|
||||
}
|
||||
|
||||
func TestTapRedact(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
proxyUrl := GetProxyUrl(DefaultNamespaceName, DefaultServiceName)
|
||||
requestHeaders := map[string]string{"User-Header": "Mizu"}
|
||||
requestBody := map[string]string{"User": "Mizu"}
|
||||
for i := 0; i < DefaultEntriesCount; i++ {
|
||||
if _, requestErr := ExecuteHttpPostRequestWithHeaders(fmt.Sprintf("%v/post", proxyUrl), requestHeaders, requestBody); requestErr != nil {
|
||||
t.Errorf("failed to send proxy request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
RunCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/Redact.js\"")
|
||||
}
|
||||
|
||||
func TestTapNoRedact(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmdArgs = append(tapCmdArgs, "--no-redact")
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
proxyUrl := GetProxyUrl(DefaultNamespaceName, DefaultServiceName)
|
||||
requestHeaders := map[string]string{"User-Header": "Mizu"}
|
||||
requestBody := map[string]string{"User": "Mizu"}
|
||||
for i := 0; i < DefaultEntriesCount; i++ {
|
||||
if _, requestErr := ExecuteHttpPostRequestWithHeaders(fmt.Sprintf("%v/post", proxyUrl), requestHeaders, requestBody); requestErr != nil {
|
||||
t.Errorf("failed to send proxy request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
RunCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/NoRedact.js\"")
|
||||
}
|
||||
|
||||
func TestTapRegexMasking(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmdArgs = append(tapCmdArgs, "-r", "Mizu")
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
proxyUrl := GetProxyUrl(DefaultNamespaceName, DefaultServiceName)
|
||||
for i := 0; i < DefaultEntriesCount; i++ {
|
||||
response, requestErr := http.Post(fmt.Sprintf("%v/post", proxyUrl), "text/plain", bytes.NewBufferString("Mizu"))
|
||||
if _, requestErr = ExecuteHttpRequest(response, requestErr); requestErr != nil {
|
||||
t.Errorf("failed to send proxy request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
RunCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/RegexMasking.js\"")
|
||||
|
||||
}
|
||||
|
||||
func TestTapIgnoredUserAgents(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
ignoredUserAgentValue := "ignore"
|
||||
tapCmdArgs = append(tapCmdArgs, "--set", fmt.Sprintf("tap.ignored-user-agents=%v", ignoredUserAgentValue))
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Logf("failed to cleanup tap command, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
proxyUrl := GetProxyUrl(DefaultNamespaceName, DefaultServiceName)
|
||||
|
||||
ignoredUserAgentCustomHeader := "Ignored-User-Agent"
|
||||
headers := map[string]string{"User-Agent": ignoredUserAgentValue, ignoredUserAgentCustomHeader: ""}
|
||||
for i := 0; i < DefaultEntriesCount; i++ {
|
||||
if _, requestErr := ExecuteHttpGetRequestWithHeaders(fmt.Sprintf("%v/get", proxyUrl), headers); requestErr != nil {
|
||||
t.Errorf("failed to send proxy request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < DefaultEntriesCount; i++ {
|
||||
if _, requestErr := ExecuteHttpGetRequest(fmt.Sprintf("%v/get", proxyUrl)); requestErr != nil {
|
||||
t.Errorf("failed to send proxy request, err: %v", requestErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
RunCypressTests(t, "npx cypress run --spec \"cypress/integration/tests/IgnoredUserAgents.js\"")
|
||||
}
|
||||
|
||||
func TestTapDumpLogs(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("ignored acceptance test")
|
||||
}
|
||||
|
||||
cliPath, cliPathErr := GetCliPath()
|
||||
if cliPathErr != nil {
|
||||
t.Errorf("failed to get cli path, err: %v", cliPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
tapCmdArgs := GetDefaultTapCommandArgs()
|
||||
|
||||
tapNamespace := GetDefaultTapNamespace()
|
||||
tapCmdArgs = append(tapCmdArgs, tapNamespace...)
|
||||
|
||||
tapCmdArgs = append(tapCmdArgs, "--set", "dump-logs=true")
|
||||
|
||||
tapCmd := exec.Command(cliPath, tapCmdArgs...)
|
||||
t.Logf("running command: %v", tapCmd.String())
|
||||
|
||||
if err := tapCmd.Start(); err != nil {
|
||||
t.Errorf("failed to start tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
apiServerUrl := GetApiServerUrl(DefaultApiServerPort)
|
||||
|
||||
if err := WaitTapPodsReady(apiServerUrl); err != nil {
|
||||
t.Errorf("failed to start tap pods on time, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := CleanupCommand(tapCmd); err != nil {
|
||||
t.Errorf("failed to cleanup tap command, err: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
mizuFolderPath, mizuPathErr := GetMizuFolderPath()
|
||||
if mizuPathErr != nil {
|
||||
t.Errorf("failed to get mizu folder path, err: %v", mizuPathErr)
|
||||
return
|
||||
}
|
||||
|
||||
files, readErr := ioutil.ReadDir(mizuFolderPath)
|
||||
if readErr != nil {
|
||||
t.Errorf("failed to read mizu folder files, err: %v", readErr)
|
||||
return
|
||||
}
|
||||
|
||||
var dumpLogsPath string
|
||||
for _, file := range files {
|
||||
fileName := file.Name()
|
||||
if strings.Contains(fileName, "mizu_logs") {
|
||||
dumpLogsPath = path.Join(mizuFolderPath, fileName)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if dumpLogsPath == "" {
|
||||
t.Errorf("dump logs file not found")
|
||||
return
|
||||
}
|
||||
|
||||
zipReader, zipError := zip.OpenReader(dumpLogsPath)
|
||||
if zipError != nil {
|
||||
t.Errorf("failed to get zip reader, err: %v", zipError)
|
||||
return
|
||||
}
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := zipReader.Close(); err != nil {
|
||||
t.Logf("failed to close zip reader, err: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
var logsFileNames []string
|
||||
for _, file := range zipReader.File {
|
||||
logsFileNames = append(logsFileNames, file.Name)
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu.mizu-api-server.mizu-api-server.log") {
|
||||
t.Errorf("api server logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu.mizu-api-server.basenine.log") {
|
||||
t.Errorf("basenine logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu_cli.log") {
|
||||
t.Errorf("cli logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !Contains(logsFileNames, "mizu_events.log") {
|
||||
t.Errorf("events logs not found")
|
||||
return
|
||||
}
|
||||
|
||||
if !ContainsPartOfValue(logsFileNames, "mizu.mizu-tapper-daemon-set") {
|
||||
t.Errorf("tapper logs not found")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func TestIpResolving(t *testing.T) {
|
||||
namespace := AllNamespaces
|
||||
|
||||
t.Log("add permissions for ip-resolution for current user")
|
||||
if err := ApplyKubeFilesForTest(
|
||||
t,
|
||||
"minikube",
|
||||
namespace,
|
||||
"../cli/cmd/permissionFiles/permissions-all-namespaces-ip-resolution-optional.yaml",
|
||||
); err != nil {
|
||||
t.Errorf("failed to create k8s permissions, %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
basicTapTest(t, true)
|
||||
}
|
||||
|
||||
func TestRestrictedMode(t *testing.T) {
|
||||
namespace := "mizu-tests"
|
||||
|
||||
t.Log("creating permissions for restricted user")
|
||||
if err := ApplyKubeFilesForTest(
|
||||
t,
|
||||
"minikube",
|
||||
namespace,
|
||||
"../cli/cmd/permissionFiles/permissions-ns-tap.yaml",
|
||||
); err != nil {
|
||||
t.Errorf("failed to create k8s permissions, %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
t.Log("switching k8s context to user")
|
||||
if err := SwitchKubeContextForTest(t, "user-with-restricted-access"); err != nil {
|
||||
t.Errorf("failed to switch k8s context, %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
extraArgs := []string{"--set", fmt.Sprintf("mizu-resources-namespace=%s", namespace)}
|
||||
t.Run("basic tap", func (testingT *testing.T) {basicTapTest(testingT, false, extraArgs...)})
|
||||
}
|
||||
@@ -1,421 +0,0 @@
|
||||
package acceptanceTests
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
"k8s.io/client-go/tools/clientcmd"
|
||||
"k8s.io/client-go/util/homedir"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"syscall"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
)
|
||||
|
||||
const (
|
||||
LongRetriesCount = 100
|
||||
ShortRetriesCount = 10
|
||||
DefaultApiServerPort = shared.DefaultApiServerPort
|
||||
DefaultNamespaceName = "mizu-tests"
|
||||
DefaultServiceName = "httpbin"
|
||||
DefaultEntriesCount = 50
|
||||
WaitAfterTapPodsReady = 3 * time.Second
|
||||
AllNamespaces = ""
|
||||
)
|
||||
|
||||
type PodDescriptor struct {
|
||||
Name string
|
||||
Namespace string
|
||||
}
|
||||
|
||||
func GetCliPath() (string, error) {
|
||||
dir, filePathErr := os.Getwd()
|
||||
if filePathErr != nil {
|
||||
return "", filePathErr
|
||||
}
|
||||
|
||||
cliPath := path.Join(dir, "../cli/bin/mizu_ci")
|
||||
return cliPath, nil
|
||||
}
|
||||
|
||||
func GetMizuFolderPath() (string, error) {
|
||||
home, homeDirErr := os.UserHomeDir()
|
||||
if homeDirErr != nil {
|
||||
return "", homeDirErr
|
||||
}
|
||||
|
||||
return path.Join(home, ".mizu"), nil
|
||||
}
|
||||
|
||||
func GetConfigPath() (string, error) {
|
||||
mizuFolderPath, mizuPathError := GetMizuFolderPath()
|
||||
if mizuPathError != nil {
|
||||
return "", mizuPathError
|
||||
}
|
||||
|
||||
return path.Join(mizuFolderPath, "config.yaml"), nil
|
||||
}
|
||||
|
||||
func GetProxyUrl(namespace string, service string) string {
|
||||
return fmt.Sprintf("http://localhost:8080/api/v1/namespaces/%v/services/%v/proxy", namespace, service)
|
||||
}
|
||||
|
||||
func GetApiServerUrl(port uint16) string {
|
||||
return fmt.Sprintf("http://localhost:%v", port)
|
||||
}
|
||||
|
||||
func NewKubernetesProvider() (*KubernetesProvider, error) {
|
||||
home := homedir.HomeDir()
|
||||
configLoadingRules := &clientcmd.ClientConfigLoadingRules{ExplicitPath: filepath.Join(home, ".kube", "config")}
|
||||
clientConfig := clientcmd.NewNonInteractiveDeferredLoadingClientConfig(
|
||||
configLoadingRules,
|
||||
&clientcmd.ConfigOverrides{
|
||||
CurrentContext: "",
|
||||
},
|
||||
)
|
||||
|
||||
restClientConfig, err := clientConfig.ClientConfig()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
clientSet, err := kubernetes.NewForConfig(restClientConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &KubernetesProvider{clientSet}, nil
|
||||
}
|
||||
|
||||
type KubernetesProvider struct {
|
||||
clientSet *kubernetes.Clientset
|
||||
}
|
||||
|
||||
func (kp *KubernetesProvider) GetServiceExternalIp(ctx context.Context, namespace string, service string) (string, error) {
|
||||
serviceObj, err := kp.clientSet.CoreV1().Services(namespace).Get(ctx, service, metav1.GetOptions{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
externalIp := serviceObj.Status.LoadBalancer.Ingress[0].IP
|
||||
return externalIp, nil
|
||||
}
|
||||
|
||||
func SwitchKubeContextForTest(t *testing.T, newContextName string) error {
|
||||
prevKubeContextName, err := GetKubeCurrentContextName()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := SetKubeCurrentContext(newContextName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := SetKubeCurrentContext(prevKubeContextName); err != nil {
|
||||
t.Errorf("failed to set Kubernetes context to %s, err: %v", prevKubeContextName, err)
|
||||
t.Errorf("cleanup failed, subsequent tests may be affected")
|
||||
}
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetKubeCurrentContextName() (string, error) {
|
||||
cmd := exec.Command("kubectl", "config", "current-context")
|
||||
|
||||
output, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("%v, %s", err, string(output))
|
||||
}
|
||||
|
||||
return string(bytes.TrimSpace(output)), nil
|
||||
}
|
||||
|
||||
func SetKubeCurrentContext(contextName string) error {
|
||||
cmd := exec.Command("kubectl", "config", "use-context", contextName)
|
||||
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("%v, %s", err, string(output))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ApplyKubeFilesForTest(t *testing.T, kubeContext string, namespace string, filename ...string) error {
|
||||
for i := range filename {
|
||||
fname := filename[i]
|
||||
if err := ApplyKubeFile(kubeContext, namespace, fname); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := DeleteKubeFile(kubeContext, namespace, fname); err != nil {
|
||||
t.Errorf(
|
||||
"failed to delete Kubernetes resources in namespace %s from filename %s, err: %v",
|
||||
namespace,
|
||||
fname,
|
||||
err,
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ApplyKubeFile(kubeContext string, namespace string, filename string) (error) {
|
||||
cmdArgs := []string{
|
||||
"apply",
|
||||
"--context", kubeContext,
|
||||
"-f", filename,
|
||||
}
|
||||
if namespace != AllNamespaces {
|
||||
cmdArgs = append(cmdArgs, "-n", namespace)
|
||||
}
|
||||
cmd := exec.Command("kubectl", cmdArgs...)
|
||||
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("%v, %s", err, string(output))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func DeleteKubeFile(kubeContext string, namespace string, filename string) error {
|
||||
cmdArgs := []string{
|
||||
"delete",
|
||||
"--context", kubeContext,
|
||||
"-f", filename,
|
||||
}
|
||||
if namespace != AllNamespaces {
|
||||
cmdArgs = append(cmdArgs, "-n", namespace)
|
||||
}
|
||||
cmd := exec.Command("kubectl", cmdArgs...)
|
||||
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("%v, %s", err, string(output))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getDefaultCommandArgs() []string {
|
||||
setFlag := "--set"
|
||||
telemetry := "telemetry=false"
|
||||
agentImage := "agent-image=mizu/ci:0.0"
|
||||
imagePullPolicy := "image-pull-policy=IfNotPresent"
|
||||
headless := "headless=true"
|
||||
|
||||
return []string{setFlag, telemetry, setFlag, agentImage, setFlag, imagePullPolicy, setFlag, headless}
|
||||
}
|
||||
|
||||
func GetDefaultTapCommandArgs() []string {
|
||||
tapCommand := "tap"
|
||||
defaultCmdArgs := getDefaultCommandArgs()
|
||||
|
||||
return append([]string{tapCommand}, defaultCmdArgs...)
|
||||
}
|
||||
|
||||
func GetDefaultTapCommandArgsWithRegex(regex string) []string {
|
||||
tapCommand := "tap"
|
||||
defaultCmdArgs := getDefaultCommandArgs()
|
||||
|
||||
return append([]string{tapCommand, regex}, defaultCmdArgs...)
|
||||
}
|
||||
|
||||
func GetDefaultLogsCommandArgs() []string {
|
||||
logsCommand := "logs"
|
||||
defaultCmdArgs := getDefaultCommandArgs()
|
||||
|
||||
return append([]string{logsCommand}, defaultCmdArgs...)
|
||||
}
|
||||
|
||||
func GetDefaultTapNamespace() []string {
|
||||
return []string{"-n", "mizu-tests"}
|
||||
}
|
||||
|
||||
func GetDefaultConfigCommandArgs() []string {
|
||||
configCommand := "config"
|
||||
defaultCmdArgs := getDefaultCommandArgs()
|
||||
|
||||
return append([]string{configCommand}, defaultCmdArgs...)
|
||||
}
|
||||
|
||||
func RunCypressTests(t *testing.T, cypressRunCmd string) {
|
||||
cypressCmd := exec.Command("bash", "-c", cypressRunCmd)
|
||||
t.Logf("running command: %v", cypressCmd.String())
|
||||
out, err := cypressCmd.CombinedOutput()
|
||||
if err != nil {
|
||||
t.Errorf("error running cypress, error: %v, output: %v", err, string(out))
|
||||
return
|
||||
}
|
||||
|
||||
t.Logf("%s", out)
|
||||
}
|
||||
|
||||
func RetriesExecute(retriesCount int, executeFunc func() error) error {
|
||||
var lastError interface{}
|
||||
|
||||
for i := 0; i < retriesCount; i++ {
|
||||
if err := TryExecuteFunc(executeFunc); err != nil {
|
||||
lastError = err
|
||||
|
||||
time.Sleep(1 * time.Second)
|
||||
continue
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("reached max retries count, retries count: %v, last err: %v", retriesCount, lastError)
|
||||
}
|
||||
|
||||
func TryExecuteFunc(executeFunc func() error) (err interface{}) {
|
||||
defer func() {
|
||||
if panicErr := recover(); panicErr != nil {
|
||||
err = panicErr
|
||||
}
|
||||
}()
|
||||
|
||||
return executeFunc()
|
||||
}
|
||||
|
||||
func WaitTapPodsReady(apiServerUrl string) error {
|
||||
resolvingUrl := fmt.Sprintf("%v/status/connectedTappersCount", apiServerUrl)
|
||||
tapPodsReadyFunc := func() error {
|
||||
requestResult, requestErr := ExecuteHttpGetRequest(resolvingUrl)
|
||||
if requestErr != nil {
|
||||
return requestErr
|
||||
}
|
||||
|
||||
connectedTappersCount := requestResult.(float64)
|
||||
if connectedTappersCount == 0 {
|
||||
return fmt.Errorf("no connected tappers running")
|
||||
}
|
||||
time.Sleep(WaitAfterTapPodsReady)
|
||||
return nil
|
||||
}
|
||||
|
||||
return RetriesExecute(LongRetriesCount, tapPodsReadyFunc)
|
||||
}
|
||||
|
||||
func JsonBytesToInterface(jsonBytes []byte) (interface{}, error) {
|
||||
var result interface{}
|
||||
if parseErr := json.Unmarshal(jsonBytes, &result); parseErr != nil {
|
||||
return nil, parseErr
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func ExecuteHttpRequest(response *http.Response, requestErr error) (interface{}, error) {
|
||||
if requestErr != nil {
|
||||
return nil, requestErr
|
||||
} else if response.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("invalid status code %v", response.StatusCode)
|
||||
}
|
||||
|
||||
defer func() { response.Body.Close() }()
|
||||
|
||||
data, readErr := ioutil.ReadAll(response.Body)
|
||||
if readErr != nil {
|
||||
return nil, readErr
|
||||
}
|
||||
|
||||
return JsonBytesToInterface(data)
|
||||
}
|
||||
|
||||
func ExecuteHttpGetRequestWithHeaders(url string, headers map[string]string) (interface{}, error) {
|
||||
request, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for headerKey, headerValue := range headers {
|
||||
request.Header.Add(headerKey, headerValue)
|
||||
}
|
||||
|
||||
client := &http.Client{}
|
||||
response, requestErr := client.Do(request)
|
||||
return ExecuteHttpRequest(response, requestErr)
|
||||
}
|
||||
|
||||
func ExecuteHttpGetRequest(url string) (interface{}, error) {
|
||||
response, requestErr := http.Get(url)
|
||||
return ExecuteHttpRequest(response, requestErr)
|
||||
}
|
||||
|
||||
func ExecuteHttpPostRequestWithHeaders(url string, headers map[string]string, body interface{}) (interface{}, error) {
|
||||
requestBody, jsonErr := json.Marshal(body)
|
||||
if jsonErr != nil {
|
||||
return nil, jsonErr
|
||||
}
|
||||
|
||||
request, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(requestBody))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
request.Header.Add("Content-Type", "application/json")
|
||||
for headerKey, headerValue := range headers {
|
||||
request.Header.Add(headerKey, headerValue)
|
||||
}
|
||||
|
||||
client := &http.Client{}
|
||||
response, requestErr := client.Do(request)
|
||||
return ExecuteHttpRequest(response, requestErr)
|
||||
}
|
||||
|
||||
func CleanupCommand(cmd *exec.Cmd) error {
|
||||
if err := cmd.Process.Signal(syscall.SIGQUIT); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := cmd.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetLogsPath() (string, error) {
|
||||
dir, filePathErr := os.Getwd()
|
||||
if filePathErr != nil {
|
||||
return "", filePathErr
|
||||
}
|
||||
|
||||
logsPath := path.Join(dir, "mizu_logs.zip")
|
||||
return logsPath, nil
|
||||
}
|
||||
|
||||
func Contains(slice []string, containsValue string) bool {
|
||||
for _, sliceValue := range slice {
|
||||
if sliceValue == containsValue {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func ContainsPartOfValue(slice []string, containsValue string) bool {
|
||||
for _, sliceValue := range slice {
|
||||
if strings.Contains(sliceValue, containsValue) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
|
||||
version: v1.14.0
|
||||
ignore:
|
||||
SNYK-GOLANG-GITHUBCOMGINGONICGIN-1041736:
|
||||
- '*':
|
||||
reason: None Given
|
||||
@@ -1,2 +0,0 @@
|
||||
test: ## Run agent tests.
|
||||
@go test ./... -coverpkg=./... -race -coverprofile=coverage.out -covermode=atomic
|
||||
150
agent/go.mod
150
agent/go.mod
@@ -1,150 +0,0 @@
|
||||
module github.com/up9inc/mizu/agent
|
||||
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/antelman107/net-wait-go v0.0.0-20210623112055-cf684aebda7b
|
||||
github.com/chanced/openapi v0.0.8
|
||||
github.com/djherbis/atime v1.1.0
|
||||
github.com/elastic/go-elasticsearch/v7 v7.17.0
|
||||
github.com/getkin/kin-openapi v0.89.0
|
||||
github.com/gin-contrib/static v0.0.1
|
||||
github.com/gin-gonic/gin v1.7.7
|
||||
github.com/go-playground/locales v0.14.0
|
||||
github.com/go-playground/universal-translator v0.18.0
|
||||
github.com/go-playground/validator/v10 v10.10.0
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/gorilla/websocket v1.4.2
|
||||
github.com/nav-inc/datetime v0.1.3
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
||||
github.com/orcaman/concurrent-map v1.0.0
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/stretchr/testify v1.7.0
|
||||
github.com/up9inc/basenine/client/go v0.0.0-20220317230530-8472d80307f6
|
||||
github.com/up9inc/mizu/shared v0.0.0
|
||||
github.com/up9inc/mizu/tap v0.0.0
|
||||
github.com/up9inc/mizu/tap/api v0.0.0
|
||||
github.com/up9inc/mizu/tap/extensions/amqp v0.0.0
|
||||
github.com/up9inc/mizu/tap/extensions/http v0.0.0
|
||||
github.com/up9inc/mizu/tap/extensions/kafka v0.0.0
|
||||
github.com/up9inc/mizu/tap/extensions/redis v0.0.0
|
||||
github.com/wI2L/jsondiff v0.1.1
|
||||
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0
|
||||
k8s.io/api v0.23.3
|
||||
k8s.io/apimachinery v0.23.3
|
||||
k8s.io/client-go v0.23.3
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/compute v1.2.0 // indirect
|
||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
|
||||
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
|
||||
github.com/Azure/go-autorest/autorest v0.11.24 // indirect
|
||||
github.com/Azure/go-autorest/autorest/adal v0.9.18 // indirect
|
||||
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
|
||||
github.com/Azure/go-autorest/logger v0.2.1 // indirect
|
||||
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
|
||||
github.com/MakeNowJust/heredoc v1.0.0 // indirect
|
||||
github.com/PuerkitoBio/purell v1.1.1 // indirect
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
|
||||
github.com/beevik/etree v1.1.0 // indirect
|
||||
github.com/bradleyfalzon/tlsx v0.0.0-20170624122154-28fd0e59bac4 // indirect
|
||||
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5 // indirect
|
||||
github.com/chanced/dynamic v0.0.0-20211210164248-f8fadb1d735b // indirect
|
||||
github.com/cilium/ebpf v0.8.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/evanphx/json-patch v5.6.0+incompatible // indirect
|
||||
github.com/exponent-io/jsonpath v0.0.0-20210407135951-1de76d718b3f // indirect
|
||||
github.com/fatih/camelcase v1.0.0 // indirect
|
||||
github.com/fvbommel/sortorder v1.0.2 // indirect
|
||||
github.com/ghodss/yaml v1.0.0 // indirect
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/go-errors/errors v1.4.2 // indirect
|
||||
github.com/go-logr/logr v1.2.2 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
||||
github.com/go-openapi/jsonreference v0.19.6 // indirect
|
||||
github.com/go-openapi/swag v0.21.1 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/google/btree v1.0.1 // indirect
|
||||
github.com/google/go-cmp v0.5.7 // indirect
|
||||
github.com/google/gofuzz v1.2.0 // indirect
|
||||
github.com/google/gopacket v1.1.19 // indirect
|
||||
github.com/google/martian v2.1.0+incompatible // indirect
|
||||
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
|
||||
github.com/googleapis/gnostic v0.5.5 // indirect
|
||||
github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 // indirect
|
||||
github.com/imdario/mergo v0.3.12 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.14.2 // indirect
|
||||
github.com/leodido/go-urn v1.2.1 // indirect
|
||||
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
github.com/moby/spdystream v0.2.0 // indirect
|
||||
github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect
|
||||
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect
|
||||
github.com/ohler55/ojg v1.12.12 // indirect
|
||||
github.com/peterbourgon/diskv v2.0.1+incompatible // indirect
|
||||
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/russross/blackfriday v1.6.0 // indirect
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.0.0 // indirect
|
||||
github.com/segmentio/kafka-go v0.4.27 // indirect
|
||||
github.com/spf13/cobra v1.3.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/tidwall/gjson v1.14.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tidwall/sjson v1.2.4 // indirect
|
||||
github.com/ugorji/go/codec v1.2.6 // indirect
|
||||
github.com/vishvananda/netns v0.0.0-20211101163701-50045581ed74 // indirect
|
||||
github.com/xlab/treeprint v1.1.0 // indirect
|
||||
go.starlark.net v0.0.0-20220203230714-bb14e151c28f // indirect
|
||||
golang.org/x/crypto v0.0.0-20220208050332-20e1d8d225ab // indirect
|
||||
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect
|
||||
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect
|
||||
golang.org/x/sys v0.0.0-20220207234003-57398862261d // indirect
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
|
||||
golang.org/x/text v0.3.7 // indirect
|
||||
golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/protobuf v1.27.1 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||
k8s.io/cli-runtime v0.23.3 // indirect
|
||||
k8s.io/component-base v0.23.3 // indirect
|
||||
k8s.io/klog/v2 v2.40.1 // indirect
|
||||
k8s.io/kube-openapi v0.0.0-20220124234850-424119656bbf // indirect
|
||||
k8s.io/kubectl v0.23.3 // indirect
|
||||
k8s.io/utils v0.0.0-20220127004650-9b3446523e65 // indirect
|
||||
sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 // indirect
|
||||
sigs.k8s.io/kustomize/api v0.11.1 // indirect
|
||||
sigs.k8s.io/kustomize/kyaml v0.13.3 // indirect
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.2.1 // indirect
|
||||
sigs.k8s.io/yaml v1.3.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/up9inc/mizu/shared v0.0.0 => ../shared
|
||||
|
||||
replace github.com/up9inc/mizu/tap v0.0.0 => ../tap
|
||||
|
||||
replace github.com/up9inc/mizu/tap/api v0.0.0 => ../tap/api
|
||||
|
||||
replace github.com/up9inc/mizu/tap/extensions/amqp v0.0.0 => ../tap/extensions/amqp
|
||||
|
||||
replace github.com/up9inc/mizu/tap/extensions/http v0.0.0 => ../tap/extensions/http
|
||||
|
||||
replace github.com/up9inc/mizu/tap/extensions/kafka v0.0.0 => ../tap/extensions/kafka
|
||||
|
||||
replace github.com/up9inc/mizu/tap/extensions/redis v0.0.0 => ../tap/extensions/redis
|
||||
1294
agent/go.sum
1294
agent/go.sum
File diff suppressed because it is too large
Load Diff
374
agent/main.go
374
agent/main.go
@@ -1,374 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/gin-contrib/static"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/up9inc/mizu/agent/pkg/dependency"
|
||||
"github.com/up9inc/mizu/agent/pkg/elastic"
|
||||
"github.com/up9inc/mizu/agent/pkg/middlewares"
|
||||
"github.com/up9inc/mizu/agent/pkg/models"
|
||||
"github.com/up9inc/mizu/agent/pkg/oas"
|
||||
"github.com/up9inc/mizu/agent/pkg/routes"
|
||||
"github.com/up9inc/mizu/agent/pkg/servicemap"
|
||||
"github.com/up9inc/mizu/agent/pkg/up9"
|
||||
"github.com/up9inc/mizu/agent/pkg/utils"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/api"
|
||||
"github.com/up9inc/mizu/agent/pkg/app"
|
||||
"github.com/up9inc/mizu/agent/pkg/config"
|
||||
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/op/go-logging"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"github.com/up9inc/mizu/tap"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
)
|
||||
|
||||
var tapperMode = flag.Bool("tap", false, "Run in tapper mode without API")
|
||||
var apiServerMode = flag.Bool("api-server", false, "Run in API server mode with API")
|
||||
var standaloneMode = flag.Bool("standalone", false, "Run in standalone tapper and API mode")
|
||||
var apiServerAddress = flag.String("api-server-address", "", "Address of mizu API server")
|
||||
var namespace = flag.String("namespace", "", "Resolve IPs if they belong to resources in this namespace (default is all)")
|
||||
var harsReaderMode = flag.Bool("hars-read", false, "Run in hars-read mode")
|
||||
var harsDir = flag.String("hars-dir", "", "Directory to read hars from")
|
||||
|
||||
const (
|
||||
socketConnectionRetries = 30
|
||||
socketConnectionRetryDelay = time.Second * 2
|
||||
socketHandshakeTimeout = time.Second * 2
|
||||
)
|
||||
|
||||
func main() {
|
||||
initializeDependencies()
|
||||
logLevel := determineLogLevel()
|
||||
logger.InitLoggerStd(logLevel)
|
||||
flag.Parse()
|
||||
|
||||
app.LoadExtensions()
|
||||
|
||||
if !*tapperMode && !*apiServerMode && !*standaloneMode && !*harsReaderMode {
|
||||
panic("One of the flags --tap, --api or --standalone or --hars-read must be provided")
|
||||
}
|
||||
|
||||
if *standaloneMode {
|
||||
runInStandaloneMode()
|
||||
} else if *tapperMode {
|
||||
runInTapperMode()
|
||||
} else if *apiServerMode {
|
||||
utils.StartServer(runInApiServerMode(*namespace))
|
||||
} else if *harsReaderMode {
|
||||
runInHarReaderMode()
|
||||
}
|
||||
|
||||
signalChan := make(chan os.Signal, 1)
|
||||
signal.Notify(signalChan, os.Interrupt)
|
||||
<-signalChan
|
||||
|
||||
logger.Log.Info("Exiting")
|
||||
}
|
||||
|
||||
func hostApi(socketHarOutputChannel chan<- *tapApi.OutputChannelItem) *gin.Engine {
|
||||
app := gin.Default()
|
||||
|
||||
app.GET("/echo", func(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, "Here is Mizu agent")
|
||||
})
|
||||
|
||||
eventHandlers := api.RoutesEventHandlers{
|
||||
SocketOutChannel: socketHarOutputChannel,
|
||||
}
|
||||
|
||||
app.Use(disableRootStaticCache())
|
||||
|
||||
staticFolder := "./site"
|
||||
indexStaticFile := staticFolder + "/index.html"
|
||||
if err := setUIFlags(indexStaticFile); err != nil {
|
||||
logger.Log.Errorf("Error setting ui flags, err: %v", err)
|
||||
}
|
||||
|
||||
app.Use(static.ServeRoot("/", staticFolder))
|
||||
app.NoRoute(func(c *gin.Context) {
|
||||
c.File(indexStaticFile)
|
||||
})
|
||||
|
||||
app.Use(middlewares.CORSMiddleware()) // This has to be called after the static middleware, does not work if its called before
|
||||
|
||||
api.WebSocketRoutes(app, &eventHandlers)
|
||||
|
||||
if config.Config.OAS {
|
||||
routes.OASRoutes(app)
|
||||
}
|
||||
|
||||
if config.Config.ServiceMap {
|
||||
routes.ServiceMapRoutes(app)
|
||||
}
|
||||
|
||||
routes.QueryRoutes(app)
|
||||
routes.EntriesRoutes(app)
|
||||
routes.MetadataRoutes(app)
|
||||
routes.StatusRoutes(app)
|
||||
routes.DbRoutes(app)
|
||||
|
||||
return app
|
||||
}
|
||||
|
||||
func runInApiServerMode(namespace string) *gin.Engine {
|
||||
if err := config.LoadConfig(); err != nil {
|
||||
logger.Log.Fatalf("Error loading config file %v", err)
|
||||
}
|
||||
app.ConfigureBasenineServer(shared.BasenineHost, shared.BaseninePort, config.Config.MaxDBSizeBytes, config.Config.LogLevel, config.Config.InsertionFilter)
|
||||
api.StartResolving(namespace)
|
||||
|
||||
enableExpFeatureIfNeeded()
|
||||
|
||||
syncEntriesConfig := getSyncEntriesConfig()
|
||||
if syncEntriesConfig != nil {
|
||||
if err := up9.SyncEntries(syncEntriesConfig); err != nil {
|
||||
logger.Log.Error("Error syncing entries, err: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return hostApi(app.GetEntryInputChannel())
|
||||
}
|
||||
|
||||
func runInTapperMode() {
|
||||
logger.Log.Infof("Starting tapper, websocket address: %s", *apiServerAddress)
|
||||
if *apiServerAddress == "" {
|
||||
panic("API server address must be provided with --api-server-address when using --tap")
|
||||
}
|
||||
|
||||
hostMode := os.Getenv(shared.HostModeEnvVar) == "1"
|
||||
tapOpts := &tap.TapOpts{HostMode: hostMode}
|
||||
|
||||
filteredOutputItemsChannel := make(chan *tapApi.OutputChannelItem)
|
||||
|
||||
filteringOptions := getTrafficFilteringOptions()
|
||||
tap.StartPassiveTapper(tapOpts, filteredOutputItemsChannel, app.Extensions, filteringOptions)
|
||||
socketConnection, err := dialSocketWithRetry(*apiServerAddress, socketConnectionRetries, socketConnectionRetryDelay)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Error connecting to socket server at %s %v", *apiServerAddress, err))
|
||||
}
|
||||
logger.Log.Infof("Connected successfully to websocket %s", *apiServerAddress)
|
||||
|
||||
go pipeTapChannelToSocket(socketConnection, filteredOutputItemsChannel)
|
||||
}
|
||||
|
||||
func runInStandaloneMode() {
|
||||
api.StartResolving(*namespace)
|
||||
|
||||
outputItemsChannel := make(chan *tapApi.OutputChannelItem)
|
||||
filteredOutputItemsChannel := make(chan *tapApi.OutputChannelItem)
|
||||
|
||||
filteringOptions := getTrafficFilteringOptions()
|
||||
hostMode := os.Getenv(shared.HostModeEnvVar) == "1"
|
||||
tapOpts := &tap.TapOpts{HostMode: hostMode}
|
||||
tap.StartPassiveTapper(tapOpts, outputItemsChannel, app.Extensions, filteringOptions)
|
||||
|
||||
go app.FilterItems(outputItemsChannel, filteredOutputItemsChannel)
|
||||
go api.StartReadingEntries(filteredOutputItemsChannel, nil, app.ExtensionsMap)
|
||||
|
||||
ginApp := hostApi(nil)
|
||||
utils.StartServer(ginApp)
|
||||
}
|
||||
|
||||
func runInHarReaderMode() {
|
||||
outputItemsChannel := make(chan *tapApi.OutputChannelItem, 1000)
|
||||
filteredHarChannel := make(chan *tapApi.OutputChannelItem)
|
||||
|
||||
go app.FilterItems(outputItemsChannel, filteredHarChannel)
|
||||
go api.StartReadingEntries(filteredHarChannel, harsDir, app.ExtensionsMap)
|
||||
ginApp := hostApi(nil)
|
||||
utils.StartServer(ginApp)
|
||||
}
|
||||
|
||||
func enableExpFeatureIfNeeded() {
|
||||
if config.Config.OAS {
|
||||
oasGenerator := dependency.GetInstance(dependency.OasGeneratorDependency).(oas.OasGenerator)
|
||||
oasGenerator.Start()
|
||||
}
|
||||
if config.Config.ServiceMap {
|
||||
serviceMapGenerator := dependency.GetInstance(dependency.ServiceMapGeneratorDependency).(servicemap.ServiceMap)
|
||||
serviceMapGenerator.Enable()
|
||||
}
|
||||
elastic.GetInstance().Configure(config.Config.Elastic)
|
||||
}
|
||||
|
||||
func getSyncEntriesConfig() *shared.SyncEntriesConfig {
|
||||
syncEntriesConfigJson := os.Getenv(shared.SyncEntriesConfigEnvVar)
|
||||
if syncEntriesConfigJson == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
var syncEntriesConfig = &shared.SyncEntriesConfig{}
|
||||
err := json.Unmarshal([]byte(syncEntriesConfigJson), syncEntriesConfig)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("env var %s's value of %s is invalid! json must match the shared.SyncEntriesConfig struct, err: %v", shared.SyncEntriesConfigEnvVar, syncEntriesConfigJson, err))
|
||||
}
|
||||
|
||||
return syncEntriesConfig
|
||||
}
|
||||
|
||||
func disableRootStaticCache() gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
if c.Request.RequestURI == "/" {
|
||||
// Disable cache only for the main static route
|
||||
c.Writer.Header().Set("Cache-Control", "no-store")
|
||||
}
|
||||
|
||||
c.Next()
|
||||
}
|
||||
}
|
||||
|
||||
func setUIFlags(uiIndexPath string) error {
|
||||
read, err := ioutil.ReadFile(uiIndexPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
replacedContent := strings.Replace(string(read), "__IS_OAS_ENABLED__", strconv.FormatBool(config.Config.OAS), 1)
|
||||
replacedContent = strings.Replace(replacedContent, "__IS_SERVICE_MAP_ENABLED__", strconv.FormatBool(config.Config.ServiceMap), 1)
|
||||
|
||||
err = ioutil.WriteFile(uiIndexPath, []byte(replacedContent), 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTrafficFilteringOptions() *tapApi.TrafficFilteringOptions {
|
||||
filteringOptionsJson := os.Getenv(shared.MizuFilteringOptionsEnvVar)
|
||||
if filteringOptionsJson == "" {
|
||||
return &tapApi.TrafficFilteringOptions{
|
||||
IgnoredUserAgents: []string{},
|
||||
}
|
||||
}
|
||||
var filteringOptions tapApi.TrafficFilteringOptions
|
||||
err := json.Unmarshal([]byte(filteringOptionsJson), &filteringOptions)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("env var %s's value of %s is invalid! json must match the api.TrafficFilteringOptions struct %v", shared.MizuFilteringOptionsEnvVar, filteringOptionsJson, err))
|
||||
}
|
||||
|
||||
return &filteringOptions
|
||||
}
|
||||
|
||||
func pipeTapChannelToSocket(connection *websocket.Conn, messageDataChannel <-chan *tapApi.OutputChannelItem) {
|
||||
if connection == nil {
|
||||
panic("Websocket connection is nil")
|
||||
}
|
||||
|
||||
if messageDataChannel == nil {
|
||||
panic("Channel of captured messages is nil")
|
||||
}
|
||||
|
||||
for messageData := range messageDataChannel {
|
||||
marshaledData, err := models.CreateWebsocketTappedEntryMessage(messageData)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("error converting message to json %v, err: %s, (%v,%+v)", messageData, err, err, err)
|
||||
continue
|
||||
}
|
||||
|
||||
// NOTE: This is where the `*tapApi.OutputChannelItem` leaves the code
|
||||
// and goes into the intermediate WebSocket.
|
||||
err = connection.WriteMessage(websocket.TextMessage, marshaledData)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("error sending message through socket server %v, err: %s, (%v,%+v)", messageData, err, err, err)
|
||||
if errors.Is(err, syscall.EPIPE) {
|
||||
logger.Log.Warning("detected socket disconnection, reestablishing socket connection")
|
||||
connection, err = dialSocketWithRetry(*apiServerAddress, socketConnectionRetries, socketConnectionRetryDelay)
|
||||
if err != nil {
|
||||
logger.Log.Fatalf("error reestablishing socket connection: %v", err)
|
||||
} else {
|
||||
logger.Log.Info("recovered connection successfully")
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func determineLogLevel() (logLevel logging.Level) {
|
||||
logLevel, err := logging.LogLevel(os.Getenv(shared.LogLevelEnvVar))
|
||||
if err != nil {
|
||||
logLevel = logging.INFO
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func dialSocketWithRetry(socketAddress string, retryAmount int, retryDelay time.Duration) (*websocket.Conn, error) {
|
||||
var lastErr error
|
||||
dialer := &websocket.Dialer{ // we use our own dialer instead of the default due to the default's 45 sec handshake timeout, we occasionally encounter hanging socket handshakes when tapper tries to connect to api too soon
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
HandshakeTimeout: socketHandshakeTimeout,
|
||||
}
|
||||
for i := 1; i < retryAmount; i++ {
|
||||
socketConnection, _, err := dialer.Dial(socketAddress, nil)
|
||||
if err != nil {
|
||||
if i < retryAmount {
|
||||
logger.Log.Infof("socket connection to %s failed: %v, retrying %d out of %d in %d seconds...", socketAddress, err, i, retryAmount, retryDelay/time.Second)
|
||||
time.Sleep(retryDelay)
|
||||
}
|
||||
} else {
|
||||
go handleIncomingMessageAsTapper(socketConnection)
|
||||
return socketConnection, nil
|
||||
}
|
||||
}
|
||||
return nil, lastErr
|
||||
}
|
||||
|
||||
func handleIncomingMessageAsTapper(socketConnection *websocket.Conn) {
|
||||
for {
|
||||
if _, message, err := socketConnection.ReadMessage(); err != nil {
|
||||
logger.Log.Errorf("error reading message from socket connection, err: %s, (%v,%+v)", err, err, err)
|
||||
if errors.Is(err, syscall.EPIPE) {
|
||||
// socket has disconnected, we can safely stop this goroutine
|
||||
return
|
||||
}
|
||||
} else {
|
||||
var socketMessageBase shared.WebSocketMessageMetadata
|
||||
if err := json.Unmarshal(message, &socketMessageBase); err != nil {
|
||||
logger.Log.Errorf("Could not unmarshal websocket message %v", err)
|
||||
} else {
|
||||
switch socketMessageBase.MessageType {
|
||||
case shared.WebSocketMessageTypeTapConfig:
|
||||
var tapConfigMessage *shared.WebSocketTapConfigMessage
|
||||
if err := json.Unmarshal(message, &tapConfigMessage); err != nil {
|
||||
logger.Log.Errorf("received unknown message from socket connection: %s, err: %s, (%v,%+v)", string(message), err, err, err)
|
||||
} else {
|
||||
tap.UpdateTapTargets(tapConfigMessage.TapTargets)
|
||||
}
|
||||
case shared.WebSocketMessageTypeUpdateTappedPods:
|
||||
var tappedPodsMessage shared.WebSocketTappedPodsMessage
|
||||
if err := json.Unmarshal(message, &tappedPodsMessage); err != nil {
|
||||
logger.Log.Infof("Could not unmarshal message of message type %s %v", socketMessageBase.MessageType, err)
|
||||
return
|
||||
}
|
||||
nodeName := os.Getenv(shared.NodeNameEnvVar)
|
||||
tap.UpdateTapTargets(tappedPodsMessage.NodeToTappedPodMap[nodeName])
|
||||
default:
|
||||
logger.Log.Warningf("Received socket message of type %s for which no handlers are defined", socketMessageBase.MessageType)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func initializeDependencies() {
|
||||
dependency.RegisterGenerator(dependency.ServiceMapGeneratorDependency, func() interface{} { return servicemap.GetDefaultServiceMapInstance() })
|
||||
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} { return oas.GetDefaultOasGeneratorInstance() })
|
||||
}
|
||||
@@ -1,107 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/getkin/kin-openapi/openapi3"
|
||||
"github.com/getkin/kin-openapi/openapi3filter"
|
||||
"github.com/getkin/kin-openapi/routers"
|
||||
legacyrouter "github.com/getkin/kin-openapi/routers/legacy"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/tap/api"
|
||||
)
|
||||
|
||||
const (
|
||||
ContractNotApplicable api.ContractStatus = 0
|
||||
ContractPassed api.ContractStatus = 1
|
||||
ContractFailed api.ContractStatus = 2
|
||||
)
|
||||
|
||||
func loadOAS(ctx context.Context) (doc *openapi3.T, contractContent string, router routers.Router, err error) {
|
||||
path := fmt.Sprintf("%s%s", shared.ConfigDirPath, shared.ContractFileName)
|
||||
bytes, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
contractContent = string(bytes)
|
||||
loader := &openapi3.Loader{Context: ctx}
|
||||
doc, _ = loader.LoadFromData(bytes)
|
||||
err = doc.Validate(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
router, _ = legacyrouter.NewRouter(doc)
|
||||
return
|
||||
}
|
||||
|
||||
func validateOAS(ctx context.Context, doc *openapi3.T, router routers.Router, req *http.Request, res *http.Response) (isValid bool, reqErr error, resErr error) {
|
||||
isValid = true
|
||||
reqErr = nil
|
||||
resErr = nil
|
||||
|
||||
// Find route
|
||||
route, pathParams, err := router.FindRoute(req)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Validate request
|
||||
requestValidationInput := &openapi3filter.RequestValidationInput{
|
||||
Request: req,
|
||||
PathParams: pathParams,
|
||||
Route: route,
|
||||
}
|
||||
if reqErr = openapi3filter.ValidateRequest(ctx, requestValidationInput); reqErr != nil {
|
||||
isValid = false
|
||||
}
|
||||
|
||||
responseValidationInput := &openapi3filter.ResponseValidationInput{
|
||||
RequestValidationInput: requestValidationInput,
|
||||
Status: res.StatusCode,
|
||||
Header: res.Header,
|
||||
}
|
||||
|
||||
if res.Body != nil {
|
||||
body, _ := ioutil.ReadAll(res.Body)
|
||||
res.Body = ioutil.NopCloser(bytes.NewBuffer(body))
|
||||
responseValidationInput.SetBodyBytes(body)
|
||||
}
|
||||
|
||||
// Validate response.
|
||||
if resErr = openapi3filter.ValidateResponse(ctx, responseValidationInput); resErr != nil {
|
||||
isValid = false
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func handleOAS(ctx context.Context, doc *openapi3.T, router routers.Router, req *http.Request, res *http.Response, contractContent string) (contract api.Contract) {
|
||||
contract = api.Contract{
|
||||
Content: contractContent,
|
||||
Status: ContractNotApplicable,
|
||||
}
|
||||
|
||||
isValid, reqErr, resErr := validateOAS(ctx, doc, router, req, res)
|
||||
if isValid {
|
||||
contract.Status = ContractPassed
|
||||
} else {
|
||||
contract.Status = ContractFailed
|
||||
if reqErr != nil {
|
||||
contract.RequestReason = reqErr.Error()
|
||||
} else {
|
||||
contract.RequestReason = ""
|
||||
}
|
||||
if resErr != nil {
|
||||
contract.ResponseReason = resErr.Error()
|
||||
} else {
|
||||
contract.ResponseReason = ""
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/dependency"
|
||||
"github.com/up9inc/mizu/agent/pkg/elastic"
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
"github.com/up9inc/mizu/agent/pkg/holder"
|
||||
"github.com/up9inc/mizu/agent/pkg/providers"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/servicemap"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/models"
|
||||
"github.com/up9inc/mizu/agent/pkg/oas"
|
||||
"github.com/up9inc/mizu/agent/pkg/resolver"
|
||||
"github.com/up9inc/mizu/agent/pkg/utils"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
)
|
||||
|
||||
var k8sResolver *resolver.Resolver
|
||||
|
||||
func StartResolving(namespace string) {
|
||||
errOut := make(chan error, 100)
|
||||
res, err := resolver.NewFromInCluster(errOut, namespace)
|
||||
if err != nil {
|
||||
logger.Log.Infof("error creating k8s resolver %s", err)
|
||||
return
|
||||
}
|
||||
ctx := context.Background()
|
||||
res.Start(ctx)
|
||||
go func() {
|
||||
for {
|
||||
err := <-errOut
|
||||
logger.Log.Infof("name resolving error %s", err)
|
||||
}
|
||||
}()
|
||||
|
||||
k8sResolver = res
|
||||
holder.SetResolver(res)
|
||||
}
|
||||
|
||||
func StartReadingEntries(harChannel <-chan *tapApi.OutputChannelItem, workingDir *string, extensionsMap map[string]*tapApi.Extension) {
|
||||
if workingDir != nil && *workingDir != "" {
|
||||
startReadingFiles(*workingDir)
|
||||
} else {
|
||||
startReadingChannel(harChannel, extensionsMap)
|
||||
}
|
||||
}
|
||||
|
||||
func startReadingFiles(workingDir string) {
|
||||
if err := os.MkdirAll(workingDir, os.ModePerm); err != nil {
|
||||
logger.Log.Errorf("Failed to make dir: %s, err: %v", workingDir, err)
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
dir, _ := os.Open(workingDir)
|
||||
dirFiles, _ := dir.Readdir(-1)
|
||||
|
||||
var harFiles []os.FileInfo
|
||||
for _, fileInfo := range dirFiles {
|
||||
if strings.HasSuffix(fileInfo.Name(), ".har") {
|
||||
harFiles = append(harFiles, fileInfo)
|
||||
}
|
||||
}
|
||||
sort.Sort(utils.ByModTime(harFiles))
|
||||
|
||||
if len(harFiles) == 0 {
|
||||
logger.Log.Infof("Waiting for new files")
|
||||
time.Sleep(3 * time.Second)
|
||||
continue
|
||||
}
|
||||
fileInfo := harFiles[0]
|
||||
inputFilePath := path.Join(workingDir, fileInfo.Name())
|
||||
file, err := os.Open(inputFilePath)
|
||||
utils.CheckErr(err)
|
||||
|
||||
var inputHar har.HAR
|
||||
decErr := json.NewDecoder(bufio.NewReader(file)).Decode(&inputHar)
|
||||
utils.CheckErr(decErr)
|
||||
|
||||
rmErr := os.Remove(inputFilePath)
|
||||
utils.CheckErr(rmErr)
|
||||
}
|
||||
}
|
||||
|
||||
func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extensionsMap map[string]*tapApi.Extension) {
|
||||
if outputItems == nil {
|
||||
panic("Channel of captured messages is nil")
|
||||
}
|
||||
|
||||
connection, err := basenine.NewConnection(shared.BasenineHost, shared.BaseninePort)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
connection.InsertMode()
|
||||
|
||||
disableOASValidation := false
|
||||
ctx := context.Background()
|
||||
doc, contractContent, router, err := loadOAS(ctx)
|
||||
if err != nil {
|
||||
logger.Log.Infof("Disabled OAS validation: %s", err.Error())
|
||||
disableOASValidation = true
|
||||
}
|
||||
|
||||
for item := range outputItems {
|
||||
extension := extensionsMap[item.Protocol.Name]
|
||||
resolvedSource, resolvedDestionation, namespace := resolveIP(item.ConnectionInfo)
|
||||
mizuEntry := extension.Dissector.Analyze(item, resolvedSource, resolvedDestionation, namespace)
|
||||
if extension.Protocol.Name == "http" {
|
||||
if !disableOASValidation {
|
||||
var httpPair tapApi.HTTPRequestResponsePair
|
||||
if err := json.Unmarshal([]byte(mizuEntry.HTTPPair), &httpPair); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
|
||||
contract := handleOAS(ctx, doc, router, httpPair.Request.Payload.RawRequest, httpPair.Response.Payload.RawResponse, contractContent)
|
||||
mizuEntry.ContractStatus = contract.Status
|
||||
mizuEntry.ContractRequestReason = contract.RequestReason
|
||||
mizuEntry.ContractResponseReason = contract.ResponseReason
|
||||
mizuEntry.ContractContent = contract.Content
|
||||
}
|
||||
|
||||
harEntry, err := har.NewEntry(mizuEntry.Request, mizuEntry.Response, mizuEntry.StartTime, mizuEntry.ElapsedTime)
|
||||
if err == nil {
|
||||
rules, _, _ := models.RunValidationRulesState(*harEntry, mizuEntry.Destination.Name)
|
||||
mizuEntry.Rules = rules
|
||||
}
|
||||
|
||||
entryWSource := oas.EntryWithSource{
|
||||
Entry: *harEntry,
|
||||
Source: mizuEntry.Source.Name,
|
||||
Destination: mizuEntry.Destination.Name,
|
||||
Id: mizuEntry.Id,
|
||||
}
|
||||
|
||||
if entryWSource.Destination == "" {
|
||||
entryWSource.Destination = mizuEntry.Destination.IP + ":" + mizuEntry.Destination.Port
|
||||
}
|
||||
|
||||
oasGenerator := dependency.GetInstance(dependency.OasGeneratorDependency).(oas.OasGeneratorSink)
|
||||
oasGenerator.PushEntry(&entryWSource)
|
||||
}
|
||||
|
||||
data, err := json.Marshal(mizuEntry)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
providers.EntryAdded(len(data))
|
||||
|
||||
connection.SendText(string(data))
|
||||
|
||||
serviceMapGenerator := dependency.GetInstance(dependency.ServiceMapGeneratorDependency).(servicemap.ServiceMapSink)
|
||||
serviceMapGenerator.NewTCPEntry(mizuEntry.Source, mizuEntry.Destination, &item.Protocol)
|
||||
|
||||
elastic.GetInstance().PushEntry(mizuEntry)
|
||||
}
|
||||
}
|
||||
|
||||
func resolveIP(connectionInfo *tapApi.ConnectionInfo) (resolvedSource string, resolvedDestination string, namespace string) {
|
||||
if k8sResolver != nil {
|
||||
unresolvedSource := connectionInfo.ClientIP
|
||||
resolvedSourceObject := k8sResolver.Resolve(unresolvedSource)
|
||||
if resolvedSourceObject == nil {
|
||||
logger.Log.Debugf("Cannot find resolved name to source: %s", unresolvedSource)
|
||||
if os.Getenv("SKIP_NOT_RESOLVED_SOURCE") == "1" {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
resolvedSource = resolvedSourceObject.FullAddress
|
||||
}
|
||||
|
||||
unresolvedDestination := fmt.Sprintf("%s:%s", connectionInfo.ServerIP, connectionInfo.ServerPort)
|
||||
resolvedDestinationObject := k8sResolver.Resolve(unresolvedDestination)
|
||||
if resolvedDestinationObject == nil {
|
||||
logger.Log.Debugf("Cannot find resolved name to dest: %s", unresolvedDestination)
|
||||
if os.Getenv("SKIP_NOT_RESOLVED_DEST") == "1" {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
resolvedDestination = resolvedDestinationObject.FullAddress
|
||||
namespace = resolvedDestinationObject.Namespace
|
||||
}
|
||||
}
|
||||
return resolvedSource, resolvedDestination, namespace
|
||||
}
|
||||
|
||||
func CheckIsServiceIP(address string) bool {
|
||||
if k8sResolver == nil {
|
||||
return false
|
||||
}
|
||||
return k8sResolver.CheckIsServiceIP(address)
|
||||
}
|
||||
@@ -1,261 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/models"
|
||||
"github.com/up9inc/mizu/agent/pkg/utils"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/gorilla/websocket"
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
)
|
||||
|
||||
var extensionsMap map[string]*tapApi.Extension // global
|
||||
|
||||
func InitExtensionsMap(ref map[string]*tapApi.Extension) {
|
||||
extensionsMap = ref
|
||||
}
|
||||
|
||||
type EventHandlers interface {
|
||||
WebSocketConnect(socketId int, isTapper bool)
|
||||
WebSocketDisconnect(socketId int, isTapper bool)
|
||||
WebSocketMessage(socketId int, message []byte)
|
||||
}
|
||||
|
||||
type SocketConnection struct {
|
||||
connection *websocket.Conn
|
||||
lock *sync.Mutex
|
||||
eventHandlers EventHandlers
|
||||
isTapper bool
|
||||
}
|
||||
|
||||
type WebSocketParams struct {
|
||||
Query string `json:"query"`
|
||||
EnableFullEntries bool `json:"enableFullEntries"`
|
||||
}
|
||||
|
||||
var (
|
||||
websocketUpgrader = websocket.Upgrader{
|
||||
ReadBufferSize: 1024,
|
||||
WriteBufferSize: 1024,
|
||||
}
|
||||
|
||||
websocketIdsLock = sync.Mutex{}
|
||||
connectedWebsockets map[int]*SocketConnection
|
||||
connectedWebsocketIdCounter = 0
|
||||
SocketGetBrowserHandler gin.HandlerFunc
|
||||
SocketGetTapperHandler gin.HandlerFunc
|
||||
)
|
||||
|
||||
func init() {
|
||||
websocketUpgrader.CheckOrigin = func(r *http.Request) bool { return true } // like cors for web socket
|
||||
connectedWebsockets = make(map[int]*SocketConnection)
|
||||
}
|
||||
|
||||
func WebSocketRoutes(app *gin.Engine, eventHandlers EventHandlers) {
|
||||
SocketGetBrowserHandler = func(c *gin.Context) {
|
||||
websocketHandler(c.Writer, c.Request, eventHandlers, false)
|
||||
}
|
||||
|
||||
SocketGetTapperHandler = func(c *gin.Context) {
|
||||
websocketHandler(c.Writer, c.Request, eventHandlers, true)
|
||||
}
|
||||
|
||||
app.GET("/ws", func(c *gin.Context) {
|
||||
SocketGetBrowserHandler(c)
|
||||
})
|
||||
|
||||
app.GET("/wsTapper", func(c *gin.Context) { // TODO: add m2m authentication to this route
|
||||
SocketGetTapperHandler(c)
|
||||
})
|
||||
}
|
||||
|
||||
func websocketHandler(w http.ResponseWriter, r *http.Request, eventHandlers EventHandlers, isTapper bool) {
|
||||
ws, err := websocketUpgrader.Upgrade(w, r, nil)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Failed to set websocket upgrade: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
websocketIdsLock.Lock()
|
||||
|
||||
connectedWebsocketIdCounter++
|
||||
socketId := connectedWebsocketIdCounter
|
||||
connectedWebsockets[socketId] = &SocketConnection{connection: ws, lock: &sync.Mutex{}, eventHandlers: eventHandlers, isTapper: isTapper}
|
||||
|
||||
websocketIdsLock.Unlock()
|
||||
|
||||
var connection *basenine.Connection
|
||||
var isQuerySet bool
|
||||
|
||||
// `!isTapper` means it's a connection from the web UI
|
||||
if !isTapper {
|
||||
connection, err = basenine.NewConnection(shared.BasenineHost, shared.BaseninePort)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Failed to establish a connection to Basenine: %v", err)
|
||||
socketCleanup(socketId, connectedWebsockets[socketId])
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
data := make(chan []byte)
|
||||
meta := make(chan []byte)
|
||||
|
||||
defer func() {
|
||||
socketCleanup(socketId, connectedWebsockets[socketId])
|
||||
data <- []byte(basenine.CloseChannel)
|
||||
meta <- []byte(basenine.CloseChannel)
|
||||
connection.Close()
|
||||
}()
|
||||
|
||||
eventHandlers.WebSocketConnect(socketId, isTapper)
|
||||
|
||||
startTimeBytes, _ := models.CreateWebsocketStartTimeMessage(utils.StartTime)
|
||||
|
||||
if err = SendToSocket(socketId, startTimeBytes); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
|
||||
var params WebSocketParams
|
||||
|
||||
for {
|
||||
_, msg, err := ws.ReadMessage()
|
||||
if err != nil {
|
||||
if _, ok := err.(*websocket.CloseError); ok {
|
||||
logger.Log.Debugf("Received websocket close message, socket id: %d", socketId)
|
||||
} else {
|
||||
logger.Log.Errorf("Error reading message, socket id: %d, error: %v", socketId, err)
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
if !isTapper && !isQuerySet {
|
||||
if err := json.Unmarshal(msg, ¶ms); err != nil {
|
||||
logger.Log.Errorf("Error unmarshalling parameters: %v", socketId, err)
|
||||
continue
|
||||
}
|
||||
|
||||
query := params.Query
|
||||
err = basenine.Validate(shared.BasenineHost, shared.BaseninePort, query)
|
||||
if err != nil {
|
||||
toastBytes, _ := models.CreateWebsocketToastMessage(&models.ToastMessage{
|
||||
Type: "error",
|
||||
AutoClose: 5000,
|
||||
Text: fmt.Sprintf("Syntax error: %s", err.Error()),
|
||||
})
|
||||
if err := SendToSocket(socketId, toastBytes); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
isQuerySet = true
|
||||
|
||||
handleDataChannel := func(c *basenine.Connection, data chan []byte) {
|
||||
for {
|
||||
bytes := <-data
|
||||
|
||||
if string(bytes) == basenine.CloseChannel {
|
||||
return
|
||||
}
|
||||
|
||||
var entry *tapApi.Entry
|
||||
err = json.Unmarshal(bytes, &entry)
|
||||
if err != nil {
|
||||
logger.Log.Debugf("Error unmarshalling entry: %v", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
var message []byte
|
||||
if params.EnableFullEntries {
|
||||
message, _ = models.CreateFullEntryWebSocketMessage(entry)
|
||||
} else {
|
||||
extension := extensionsMap[entry.Protocol.Name]
|
||||
base := extension.Dissector.Summarize(entry)
|
||||
message, _ = models.CreateBaseEntryWebSocketMessage(base)
|
||||
}
|
||||
|
||||
if err := SendToSocket(socketId, message); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handleMetaChannel := func(c *basenine.Connection, meta chan []byte) {
|
||||
for {
|
||||
bytes := <-meta
|
||||
|
||||
if string(bytes) == basenine.CloseChannel {
|
||||
return
|
||||
}
|
||||
|
||||
var metadata *basenine.Metadata
|
||||
err = json.Unmarshal(bytes, &metadata)
|
||||
if err != nil {
|
||||
logger.Log.Debugf("Error unmarshalling metadata: %v", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
metadataBytes, _ := models.CreateWebsocketQueryMetadataMessage(metadata)
|
||||
if err := SendToSocket(socketId, metadataBytes); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
go handleDataChannel(connection, data)
|
||||
go handleMetaChannel(connection, meta)
|
||||
|
||||
connection.Query(query, data, meta)
|
||||
} else {
|
||||
eventHandlers.WebSocketMessage(socketId, msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func socketCleanup(socketId int, socketConnection *SocketConnection) {
|
||||
err := socketConnection.connection.Close()
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Error closing socket connection for socket id %d: %v", socketId, err)
|
||||
}
|
||||
|
||||
websocketIdsLock.Lock()
|
||||
connectedWebsockets[socketId] = nil
|
||||
websocketIdsLock.Unlock()
|
||||
|
||||
socketConnection.eventHandlers.WebSocketDisconnect(socketId, socketConnection.isTapper)
|
||||
}
|
||||
|
||||
func SendToSocket(socketId int, message []byte) error {
|
||||
socketObj := connectedWebsockets[socketId]
|
||||
if socketObj == nil {
|
||||
return fmt.Errorf("Socket %v is disconnected", socketId)
|
||||
}
|
||||
|
||||
var sent = false
|
||||
time.AfterFunc(time.Second*5, func() {
|
||||
if !sent {
|
||||
logger.Log.Error("Socket timed out")
|
||||
socketCleanup(socketId, socketObj)
|
||||
}
|
||||
})
|
||||
|
||||
socketObj.lock.Lock() // gorilla socket panics from concurrent writes to a single socket
|
||||
err := socketObj.connection.WriteMessage(1, message)
|
||||
socketObj.lock.Unlock()
|
||||
sent = true
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to write message to socket %v, err: %w", socketId, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/models"
|
||||
"github.com/up9inc/mizu/agent/pkg/providers"
|
||||
"github.com/up9inc/mizu/agent/pkg/providers/tappedPods"
|
||||
"github.com/up9inc/mizu/agent/pkg/providers/tappers"
|
||||
"github.com/up9inc/mizu/agent/pkg/up9"
|
||||
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
var browserClientSocketUUIDs = make([]int, 0)
|
||||
var tapperClientSocketUUIDs = make([]int, 0)
|
||||
var socketListLock = sync.Mutex{}
|
||||
|
||||
type RoutesEventHandlers struct {
|
||||
EventHandlers
|
||||
SocketOutChannel chan<- *tapApi.OutputChannelItem
|
||||
}
|
||||
|
||||
func init() {
|
||||
go up9.UpdateAnalyzeStatus(BroadcastToBrowserClients)
|
||||
}
|
||||
|
||||
func (h *RoutesEventHandlers) WebSocketConnect(socketId int, isTapper bool) {
|
||||
if isTapper {
|
||||
logger.Log.Infof("Websocket event - Tapper connected, socket ID: %d", socketId)
|
||||
tappers.Connected()
|
||||
|
||||
socketListLock.Lock()
|
||||
tapperClientSocketUUIDs = append(tapperClientSocketUUIDs, socketId)
|
||||
socketListLock.Unlock()
|
||||
|
||||
nodeToTappedPodMap := tappedPods.GetNodeToTappedPodMap()
|
||||
SendTappedPods(socketId, nodeToTappedPodMap)
|
||||
} else {
|
||||
logger.Log.Infof("Websocket event - Browser socket connected, socket ID: %d", socketId)
|
||||
|
||||
socketListLock.Lock()
|
||||
browserClientSocketUUIDs = append(browserClientSocketUUIDs, socketId)
|
||||
socketListLock.Unlock()
|
||||
|
||||
BroadcastTappedPodsStatus()
|
||||
}
|
||||
}
|
||||
|
||||
func (h *RoutesEventHandlers) WebSocketDisconnect(socketId int, isTapper bool) {
|
||||
if isTapper {
|
||||
logger.Log.Infof("Websocket event - Tapper disconnected, socket ID: %d", socketId)
|
||||
tappers.Disconnected()
|
||||
|
||||
socketListLock.Lock()
|
||||
removeSocketUUIDFromTapperSlice(socketId)
|
||||
socketListLock.Unlock()
|
||||
} else {
|
||||
logger.Log.Infof("Websocket event - Browser socket disconnected, socket ID: %d", socketId)
|
||||
socketListLock.Lock()
|
||||
removeSocketUUIDFromBrowserSlice(socketId)
|
||||
socketListLock.Unlock()
|
||||
}
|
||||
}
|
||||
|
||||
func BroadcastToBrowserClients(message []byte) {
|
||||
for _, socketId := range browserClientSocketUUIDs {
|
||||
go func(socketId int) {
|
||||
if err := SendToSocket(socketId, message); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
}(socketId)
|
||||
}
|
||||
}
|
||||
|
||||
func BroadcastToTapperClients(message []byte) {
|
||||
for _, socketId := range tapperClientSocketUUIDs {
|
||||
go func(socketId int) {
|
||||
if err := SendToSocket(socketId, message); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
}(socketId)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *RoutesEventHandlers) WebSocketMessage(_ int, message []byte) {
|
||||
var socketMessageBase shared.WebSocketMessageMetadata
|
||||
err := json.Unmarshal(message, &socketMessageBase)
|
||||
if err != nil {
|
||||
logger.Log.Infof("Could not unmarshal websocket message %v", err)
|
||||
} else {
|
||||
switch socketMessageBase.MessageType {
|
||||
case shared.WebSocketMessageTypeTappedEntry:
|
||||
var tappedEntryMessage models.WebSocketTappedEntryMessage
|
||||
err := json.Unmarshal(message, &tappedEntryMessage)
|
||||
if err != nil {
|
||||
logger.Log.Infof("Could not unmarshal message of message type %s %v", socketMessageBase.MessageType, err)
|
||||
} else {
|
||||
// NOTE: This is where the message comes back from the intermediate WebSocket to code.
|
||||
h.SocketOutChannel <- tappedEntryMessage.Data
|
||||
}
|
||||
case shared.WebSocketMessageTypeUpdateStatus:
|
||||
var statusMessage shared.WebSocketStatusMessage
|
||||
err := json.Unmarshal(message, &statusMessage)
|
||||
if err != nil {
|
||||
logger.Log.Infof("Could not unmarshal message of message type %s %v", socketMessageBase.MessageType, err)
|
||||
} else {
|
||||
BroadcastToBrowserClients(message)
|
||||
}
|
||||
case shared.WebsocketMessageTypeOutboundLink:
|
||||
var outboundLinkMessage models.WebsocketOutboundLinkMessage
|
||||
err := json.Unmarshal(message, &outboundLinkMessage)
|
||||
if err != nil {
|
||||
logger.Log.Infof("Could not unmarshal message of message type %s %v", socketMessageBase.MessageType, err)
|
||||
} else {
|
||||
handleTLSLink(outboundLinkMessage)
|
||||
}
|
||||
default:
|
||||
logger.Log.Infof("Received socket message of type %s for which no handlers are defined", socketMessageBase.MessageType)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func handleTLSLink(outboundLinkMessage models.WebsocketOutboundLinkMessage) {
|
||||
resolvedNameObject := k8sResolver.Resolve(outboundLinkMessage.Data.DstIP)
|
||||
if resolvedNameObject != nil {
|
||||
outboundLinkMessage.Data.DstIP = resolvedNameObject.FullAddress
|
||||
} else if outboundLinkMessage.Data.SuggestedResolvedName != "" {
|
||||
outboundLinkMessage.Data.DstIP = outboundLinkMessage.Data.SuggestedResolvedName
|
||||
}
|
||||
cacheKey := fmt.Sprintf("%s -> %s:%d", outboundLinkMessage.Data.Src, outboundLinkMessage.Data.DstIP, outboundLinkMessage.Data.DstPort)
|
||||
_, isInCache := providers.RecentTLSLinks.Get(cacheKey)
|
||||
if isInCache {
|
||||
return
|
||||
} else {
|
||||
providers.RecentTLSLinks.SetDefault(cacheKey, outboundLinkMessage.Data)
|
||||
}
|
||||
marshaledMessage, err := json.Marshal(outboundLinkMessage)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Error marshaling outbound link message for broadcasting: %v", err)
|
||||
} else {
|
||||
logger.Log.Errorf("Broadcasting outboundlink message %s", string(marshaledMessage))
|
||||
BroadcastToBrowserClients(marshaledMessage)
|
||||
}
|
||||
}
|
||||
|
||||
func removeSocketUUIDFromBrowserSlice(uuidToRemove int) {
|
||||
newUUIDSlice := make([]int, 0, len(browserClientSocketUUIDs))
|
||||
for _, uuid := range browserClientSocketUUIDs {
|
||||
if uuid != uuidToRemove {
|
||||
newUUIDSlice = append(newUUIDSlice, uuid)
|
||||
}
|
||||
}
|
||||
browserClientSocketUUIDs = newUUIDSlice
|
||||
}
|
||||
|
||||
func removeSocketUUIDFromTapperSlice(uuidToRemove int) {
|
||||
newUUIDSlice := make([]int, 0, len(tapperClientSocketUUIDs))
|
||||
for _, uuid := range tapperClientSocketUUIDs {
|
||||
if uuid != uuidToRemove {
|
||||
newUUIDSlice = append(newUUIDSlice, uuid)
|
||||
}
|
||||
}
|
||||
tapperClientSocketUUIDs = newUUIDSlice
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/providers/tappedPods"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
func BroadcastTappedPodsStatus() {
|
||||
tappedPodsStatus := tappedPods.GetTappedPodsStatus()
|
||||
|
||||
message := shared.CreateWebSocketStatusMessage(tappedPodsStatus)
|
||||
if jsonBytes, err := json.Marshal(message); err != nil {
|
||||
logger.Log.Errorf("Could not Marshal message %v", err)
|
||||
} else {
|
||||
BroadcastToBrowserClients(jsonBytes)
|
||||
}
|
||||
}
|
||||
|
||||
func SendTappedPods(socketId int, nodeToTappedPodMap shared.NodeToPodsMap) {
|
||||
message := shared.CreateWebSocketTappedPodsMessage(nodeToTappedPodMap)
|
||||
if jsonBytes, err := json.Marshal(message); err != nil {
|
||||
logger.Log.Errorf("Could not Marshal message %v", err)
|
||||
} else {
|
||||
if err := SendToSocket(socketId, jsonBytes); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BroadcastTappedPodsToTappers(nodeToTappedPodMap shared.NodeToPodsMap) {
|
||||
message := shared.CreateWebSocketTappedPodsMessage(nodeToTappedPodMap)
|
||||
if jsonBytes, err := json.Marshal(message); err != nil {
|
||||
logger.Log.Errorf("Could not Marshal message %v", err)
|
||||
} else {
|
||||
BroadcastToTapperClients(jsonBytes)
|
||||
}
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/antelman107/net-wait-go/wait"
|
||||
"github.com/op/go-logging"
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
"github.com/up9inc/mizu/agent/pkg/api"
|
||||
"github.com/up9inc/mizu/agent/pkg/utils"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
amqpExt "github.com/up9inc/mizu/tap/extensions/amqp"
|
||||
httpExt "github.com/up9inc/mizu/tap/extensions/http"
|
||||
kafkaExt "github.com/up9inc/mizu/tap/extensions/kafka"
|
||||
redisExt "github.com/up9inc/mizu/tap/extensions/redis"
|
||||
)
|
||||
|
||||
var (
|
||||
Extensions []*tapApi.Extension // global
|
||||
ExtensionsMap map[string]*tapApi.Extension // global
|
||||
)
|
||||
|
||||
func LoadExtensions() {
|
||||
Extensions = make([]*tapApi.Extension, 4)
|
||||
ExtensionsMap = make(map[string]*tapApi.Extension)
|
||||
|
||||
extensionAmqp := &tapApi.Extension{}
|
||||
dissectorAmqp := amqpExt.NewDissector()
|
||||
dissectorAmqp.Register(extensionAmqp)
|
||||
extensionAmqp.Dissector = dissectorAmqp
|
||||
Extensions[0] = extensionAmqp
|
||||
ExtensionsMap[extensionAmqp.Protocol.Name] = extensionAmqp
|
||||
|
||||
extensionHttp := &tapApi.Extension{}
|
||||
dissectorHttp := httpExt.NewDissector()
|
||||
dissectorHttp.Register(extensionHttp)
|
||||
extensionHttp.Dissector = dissectorHttp
|
||||
Extensions[1] = extensionHttp
|
||||
ExtensionsMap[extensionHttp.Protocol.Name] = extensionHttp
|
||||
|
||||
extensionKafka := &tapApi.Extension{}
|
||||
dissectorKafka := kafkaExt.NewDissector()
|
||||
dissectorKafka.Register(extensionKafka)
|
||||
extensionKafka.Dissector = dissectorKafka
|
||||
Extensions[2] = extensionKafka
|
||||
ExtensionsMap[extensionKafka.Protocol.Name] = extensionKafka
|
||||
|
||||
extensionRedis := &tapApi.Extension{}
|
||||
dissectorRedis := redisExt.NewDissector()
|
||||
dissectorRedis.Register(extensionRedis)
|
||||
extensionRedis.Dissector = dissectorRedis
|
||||
Extensions[3] = extensionRedis
|
||||
ExtensionsMap[extensionRedis.Protocol.Name] = extensionRedis
|
||||
|
||||
sort.Slice(Extensions, func(i, j int) bool {
|
||||
return Extensions[i].Protocol.Priority < Extensions[j].Protocol.Priority
|
||||
})
|
||||
|
||||
api.InitExtensionsMap(ExtensionsMap)
|
||||
}
|
||||
|
||||
func ConfigureBasenineServer(host string, port string, dbSize int64, logLevel logging.Level, insertionFilter string) {
|
||||
if !wait.New(
|
||||
wait.WithProto("tcp"),
|
||||
wait.WithWait(200*time.Millisecond),
|
||||
wait.WithBreak(50*time.Millisecond),
|
||||
wait.WithDeadline(5*time.Second),
|
||||
wait.WithDebug(logLevel == logging.DEBUG),
|
||||
).Do([]string{fmt.Sprintf("%s:%s", host, port)}) {
|
||||
logger.Log.Panicf("Basenine is not available!")
|
||||
}
|
||||
|
||||
if err := basenine.Limit(host, port, dbSize); err != nil {
|
||||
logger.Log.Panicf("Error while limiting database size: %v", err)
|
||||
}
|
||||
|
||||
// Define the macros
|
||||
for _, extension := range Extensions {
|
||||
macros := extension.Dissector.Macros()
|
||||
for macro, expanded := range macros {
|
||||
if err := basenine.Macro(host, port, macro, expanded); err != nil {
|
||||
logger.Log.Panicf("Error while adding a macro: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set the insertion filter that comes from the config
|
||||
if err := basenine.InsertionFilter(host, port, insertionFilter); err != nil {
|
||||
logger.Log.Errorf("Error while setting the insertion filter: %v", err)
|
||||
}
|
||||
|
||||
utils.StartTime = time.Now().UnixNano() / int64(time.Millisecond)
|
||||
}
|
||||
|
||||
func GetEntryInputChannel() chan *tapApi.OutputChannelItem {
|
||||
outputItemsChannel := make(chan *tapApi.OutputChannelItem)
|
||||
filteredOutputItemsChannel := make(chan *tapApi.OutputChannelItem)
|
||||
go FilterItems(outputItemsChannel, filteredOutputItemsChannel)
|
||||
go api.StartReadingEntries(filteredOutputItemsChannel, nil, ExtensionsMap)
|
||||
|
||||
return outputItemsChannel
|
||||
}
|
||||
|
||||
func FilterItems(inChannel <-chan *tapApi.OutputChannelItem, outChannel chan *tapApi.OutputChannelItem) {
|
||||
for message := range inChannel {
|
||||
if message.ConnectionInfo.IsOutgoing && api.CheckIsServiceIP(message.ConnectionInfo.ServerIP) {
|
||||
continue
|
||||
}
|
||||
|
||||
outChannel <- message
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
)
|
||||
|
||||
// these values are used when the config.json file is not present
|
||||
const (
|
||||
defaultMaxDatabaseSizeBytes int64 = 200 * 1000 * 1000
|
||||
DefaultDatabasePath string = "./entries"
|
||||
)
|
||||
|
||||
var Config *shared.MizuAgentConfig
|
||||
|
||||
func LoadConfig() error {
|
||||
if Config != nil {
|
||||
return nil
|
||||
}
|
||||
filePath := fmt.Sprintf("%s%s", shared.ConfigDirPath, shared.ConfigFileName)
|
||||
|
||||
content, err := ioutil.ReadFile(filePath)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return applyDefaultConfig()
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(content, &Config); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func applyDefaultConfig() error {
|
||||
defaultConfig, err := getDefaultConfig()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
Config = defaultConfig
|
||||
return nil
|
||||
}
|
||||
|
||||
func getDefaultConfig() (*shared.MizuAgentConfig, error) {
|
||||
return &shared.MizuAgentConfig{
|
||||
MaxDBSizeBytes: defaultMaxDatabaseSizeBytes,
|
||||
AgentDatabasePath: DefaultDatabasePath,
|
||||
}, nil
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
"github.com/up9inc/mizu/agent/pkg/app"
|
||||
"github.com/up9inc/mizu/agent/pkg/config"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
)
|
||||
|
||||
func Flush(c *gin.Context) {
|
||||
if err := basenine.Flush(shared.BasenineHost, shared.BaseninePort); err != nil {
|
||||
c.JSON(http.StatusBadRequest, err)
|
||||
} else {
|
||||
c.JSON(http.StatusOK, "Flushed.")
|
||||
}
|
||||
}
|
||||
|
||||
func Reset(c *gin.Context) {
|
||||
if err := basenine.Reset(shared.BasenineHost, shared.BaseninePort); err != nil {
|
||||
c.JSON(http.StatusBadRequest, err)
|
||||
} else {
|
||||
app.ConfigureBasenineServer(shared.BasenineHost, shared.BaseninePort, config.Config.MaxDBSizeBytes, config.Config.LogLevel, config.Config.InsertionFilter)
|
||||
c.JSON(http.StatusOK, "Resetted.")
|
||||
}
|
||||
}
|
||||
@@ -1,155 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/app"
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
"github.com/up9inc/mizu/agent/pkg/models"
|
||||
"github.com/up9inc/mizu/agent/pkg/validation"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
)
|
||||
|
||||
func Error(c *gin.Context, err error) bool {
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Error getting entry: %v", err)
|
||||
_ = c.Error(err)
|
||||
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
|
||||
"error": true,
|
||||
"type": "error",
|
||||
"autoClose": "5000",
|
||||
"msg": err.Error(),
|
||||
})
|
||||
return true // signal that there was an error and the caller should return
|
||||
}
|
||||
return false // no error, can continue
|
||||
}
|
||||
|
||||
func GetEntries(c *gin.Context) {
|
||||
entriesRequest := &models.EntriesRequest{}
|
||||
|
||||
if err := c.BindQuery(entriesRequest); err != nil {
|
||||
c.JSON(http.StatusBadRequest, err)
|
||||
}
|
||||
validationError := validation.Validate(entriesRequest)
|
||||
if validationError != nil {
|
||||
c.JSON(http.StatusBadRequest, validationError)
|
||||
}
|
||||
|
||||
if entriesRequest.TimeoutMs == 0 {
|
||||
entriesRequest.TimeoutMs = 3000
|
||||
}
|
||||
|
||||
data, meta, err := basenine.Fetch(shared.BasenineHost, shared.BaseninePort,
|
||||
entriesRequest.LeftOff, entriesRequest.Direction, entriesRequest.Query,
|
||||
entriesRequest.Limit, time.Duration(entriesRequest.TimeoutMs)*time.Millisecond)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, validationError)
|
||||
}
|
||||
|
||||
response := &models.EntriesResponse{}
|
||||
var dataSlice []interface{}
|
||||
|
||||
for _, row := range data {
|
||||
var entry *tapApi.Entry
|
||||
err = json.Unmarshal(row, &entry)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{
|
||||
"error": true,
|
||||
"type": "error",
|
||||
"autoClose": "5000",
|
||||
"msg": string(row),
|
||||
})
|
||||
return // exit
|
||||
}
|
||||
|
||||
extension := app.ExtensionsMap[entry.Protocol.Name]
|
||||
base := extension.Dissector.Summarize(entry)
|
||||
|
||||
dataSlice = append(dataSlice, base)
|
||||
}
|
||||
|
||||
var metadata *basenine.Metadata
|
||||
err = json.Unmarshal(meta, &metadata)
|
||||
if err != nil {
|
||||
logger.Log.Debugf("Error recieving metadata: %v", err.Error())
|
||||
}
|
||||
|
||||
response.Data = dataSlice
|
||||
response.Meta = metadata
|
||||
|
||||
c.JSON(http.StatusOK, response)
|
||||
}
|
||||
|
||||
func GetEntry(c *gin.Context) {
|
||||
singleEntryRequest := &models.SingleEntryRequest{}
|
||||
|
||||
if err := c.BindQuery(singleEntryRequest); err != nil {
|
||||
c.JSON(http.StatusBadRequest, err)
|
||||
}
|
||||
validationError := validation.Validate(singleEntryRequest)
|
||||
if validationError != nil {
|
||||
c.JSON(http.StatusBadRequest, validationError)
|
||||
}
|
||||
|
||||
id, _ := strconv.Atoi(c.Param("id"))
|
||||
var entry *tapApi.Entry
|
||||
bytes, err := basenine.Single(shared.BasenineHost, shared.BaseninePort, id, singleEntryRequest.Query)
|
||||
if Error(c, err) {
|
||||
return // exit
|
||||
}
|
||||
err = json.Unmarshal(bytes, &entry)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusNotFound, gin.H{
|
||||
"error": true,
|
||||
"type": "error",
|
||||
"autoClose": "5000",
|
||||
"msg": string(bytes),
|
||||
})
|
||||
return // exit
|
||||
}
|
||||
|
||||
extension := app.ExtensionsMap[entry.Protocol.Name]
|
||||
base := extension.Dissector.Summarize(entry)
|
||||
var representation []byte
|
||||
representation, err = extension.Dissector.Represent(entry.Request, entry.Response)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusNotFound, gin.H{
|
||||
"error": true,
|
||||
"type": "error",
|
||||
"autoClose": "5000",
|
||||
"msg": err.Error(),
|
||||
})
|
||||
return // exit
|
||||
}
|
||||
|
||||
var rules []map[string]interface{}
|
||||
var isRulesEnabled bool
|
||||
if entry.Protocol.Name == "http" {
|
||||
harEntry, _ := har.NewEntry(entry.Request, entry.Response, entry.StartTime, entry.ElapsedTime)
|
||||
_, rulesMatched, _isRulesEnabled := models.RunValidationRulesState(*harEntry, entry.Destination.Name)
|
||||
isRulesEnabled = _isRulesEnabled
|
||||
inrec, _ := json.Marshal(rulesMatched)
|
||||
if err := json.Unmarshal(inrec, &rules); err != nil {
|
||||
logger.Log.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, tapApi.EntryWrapper{
|
||||
Protocol: entry.Protocol,
|
||||
Representation: string(representation),
|
||||
Data: entry,
|
||||
Base: base,
|
||||
Rules: rules,
|
||||
IsRulesEnabled: isRulesEnabled,
|
||||
})
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/up9inc/mizu/agent/pkg/version"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
)
|
||||
|
||||
func GetVersion(c *gin.Context) {
|
||||
resp := shared.VersionResponse{Ver: version.Ver}
|
||||
c.JSON(http.StatusOK, resp)
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/up9inc/mizu/agent/pkg/dependency"
|
||||
"github.com/up9inc/mizu/agent/pkg/oas"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
func GetOASServers(c *gin.Context) {
|
||||
m := make([]string, 0)
|
||||
oasGenerator := dependency.GetInstance(dependency.OasGeneratorDependency).(oas.OasGenerator)
|
||||
oasGenerator.GetServiceSpecs().Range(func(key, value interface{}) bool {
|
||||
m = append(m, key.(string))
|
||||
return true
|
||||
})
|
||||
|
||||
c.JSON(http.StatusOK, m)
|
||||
}
|
||||
|
||||
func GetOASSpec(c *gin.Context) {
|
||||
oasGenerator := dependency.GetInstance(dependency.OasGeneratorDependency).(oas.OasGenerator)
|
||||
res, ok := oasGenerator.GetServiceSpecs().Load(c.Param("id"))
|
||||
if !ok {
|
||||
c.JSON(http.StatusNotFound, gin.H{
|
||||
"error": true,
|
||||
"type": "error",
|
||||
"autoClose": "5000",
|
||||
"msg": "Service not found among specs",
|
||||
})
|
||||
return // exit
|
||||
}
|
||||
|
||||
gen := res.(*oas.SpecGen)
|
||||
spec, err := gen.GetSpec()
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{
|
||||
"error": true,
|
||||
"type": "error",
|
||||
"autoClose": "5000",
|
||||
"msg": err,
|
||||
})
|
||||
return // exit
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, spec)
|
||||
}
|
||||
|
||||
func GetOASAllSpecs(c *gin.Context) {
|
||||
res := map[string]*openapi.OpenAPI{}
|
||||
|
||||
oasGenerator := dependency.GetInstance(dependency.OasGeneratorDependency).(oas.OasGenerator)
|
||||
oasGenerator.GetServiceSpecs().Range(func(key, value interface{}) bool {
|
||||
svc := key.(string)
|
||||
gen := value.(*oas.SpecGen)
|
||||
spec, err := gen.GetSpec()
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to obtain spec for service %s: %s", svc, err)
|
||||
return true
|
||||
}
|
||||
res[svc] = spec
|
||||
return true
|
||||
})
|
||||
c.JSON(http.StatusOK, res)
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/dependency"
|
||||
"github.com/up9inc/mizu/agent/pkg/oas"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
func TestGetOASServers(t *testing.T) {
|
||||
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} { return oas.GetDefaultOasGeneratorInstance() })
|
||||
|
||||
recorder := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(recorder)
|
||||
oas.GetDefaultOasGeneratorInstance().Start()
|
||||
oas.GetDefaultOasGeneratorInstance().GetServiceSpecs().Store("some", oas.NewGen("some"))
|
||||
|
||||
GetOASServers(c)
|
||||
t.Logf("Written body: %s", recorder.Body.String())
|
||||
}
|
||||
|
||||
func TestGetOASAllSpecs(t *testing.T) {
|
||||
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} { return oas.GetDefaultOasGeneratorInstance() })
|
||||
|
||||
recorder := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(recorder)
|
||||
oas.GetDefaultOasGeneratorInstance().Start()
|
||||
oas.GetDefaultOasGeneratorInstance().GetServiceSpecs().Store("some", oas.NewGen("some"))
|
||||
|
||||
GetOASAllSpecs(c)
|
||||
t.Logf("Written body: %s", recorder.Body.String())
|
||||
}
|
||||
|
||||
func TestGetOASSpec(t *testing.T) {
|
||||
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} { return oas.GetDefaultOasGeneratorInstance() })
|
||||
|
||||
recorder := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(recorder)
|
||||
oas.GetDefaultOasGeneratorInstance().Start()
|
||||
oas.GetDefaultOasGeneratorInstance().GetServiceSpecs().Store("some", oas.NewGen("some"))
|
||||
|
||||
c.Params = []gin.Param{{Key: "id", Value: "some"}}
|
||||
|
||||
GetOASSpec(c)
|
||||
t.Logf("Written body: %s", recorder.Body.String())
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
)
|
||||
|
||||
type ValidateResponse struct {
|
||||
Valid bool `json:"valid"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
func PostValidate(c *gin.Context) {
|
||||
query := c.PostForm("query")
|
||||
valid := true
|
||||
message := ""
|
||||
|
||||
err := basenine.Validate(shared.BasenineHost, shared.BaseninePort, query)
|
||||
if err != nil {
|
||||
valid = false
|
||||
message = err.Error()
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, ValidateResponse{
|
||||
Valid: valid,
|
||||
Message: message,
|
||||
})
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/dependency"
|
||||
"github.com/up9inc/mizu/agent/pkg/servicemap"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type ServiceMapController struct {
|
||||
service servicemap.ServiceMap
|
||||
}
|
||||
|
||||
func NewServiceMapController() *ServiceMapController {
|
||||
serviceMapGenerator := dependency.GetInstance(dependency.ServiceMapGeneratorDependency).(servicemap.ServiceMap)
|
||||
return &ServiceMapController{
|
||||
service: serviceMapGenerator,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ServiceMapController) Status(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, s.service.GetStatus())
|
||||
}
|
||||
|
||||
func (s *ServiceMapController) Get(c *gin.Context) {
|
||||
response := &servicemap.ServiceMapResponse{
|
||||
Status: s.service.GetStatus(),
|
||||
Nodes: s.service.GetNodes(),
|
||||
Edges: s.service.GetEdges(),
|
||||
}
|
||||
c.JSON(http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (s *ServiceMapController) Reset(c *gin.Context) {
|
||||
s.service.Reset()
|
||||
s.Status(c)
|
||||
}
|
||||
@@ -1,147 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/dependency"
|
||||
"github.com/up9inc/mizu/agent/pkg/servicemap"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/suite"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
)
|
||||
|
||||
const (
|
||||
a = "aService"
|
||||
b = "bService"
|
||||
Ip = "127.0.0.1"
|
||||
Port = "80"
|
||||
)
|
||||
|
||||
var (
|
||||
TCPEntryA = &tapApi.TCP{
|
||||
Name: a,
|
||||
Port: Port,
|
||||
IP: fmt.Sprintf("%s.%s", Ip, a),
|
||||
}
|
||||
TCPEntryB = &tapApi.TCP{
|
||||
Name: b,
|
||||
Port: Port,
|
||||
IP: fmt.Sprintf("%s.%s", Ip, b),
|
||||
}
|
||||
)
|
||||
|
||||
var ProtocolHttp = &tapApi.Protocol{
|
||||
Name: "http",
|
||||
LongName: "Hypertext Transfer Protocol -- HTTP/1.1",
|
||||
Abbreviation: "HTTP",
|
||||
Macro: "http",
|
||||
Version: "1.1",
|
||||
BackgroundColor: "#205cf5",
|
||||
ForegroundColor: "#ffffff",
|
||||
FontSize: 12,
|
||||
ReferenceLink: "https://datatracker.ietf.org/doc/html/rfc2616",
|
||||
Ports: []string{"80", "443", "8080"},
|
||||
Priority: 0,
|
||||
}
|
||||
|
||||
type ServiceMapControllerSuite struct {
|
||||
suite.Suite
|
||||
|
||||
c *ServiceMapController
|
||||
w *httptest.ResponseRecorder
|
||||
g *gin.Context
|
||||
}
|
||||
|
||||
func (s *ServiceMapControllerSuite) SetupTest() {
|
||||
dependency.RegisterGenerator(dependency.ServiceMapGeneratorDependency, func() interface{} { return servicemap.GetDefaultServiceMapInstance() })
|
||||
|
||||
s.c = NewServiceMapController()
|
||||
s.c.service.Enable()
|
||||
s.c.service.(servicemap.ServiceMapSink).NewTCPEntry(TCPEntryA, TCPEntryB, ProtocolHttp)
|
||||
|
||||
s.w = httptest.NewRecorder()
|
||||
s.g, _ = gin.CreateTestContext(s.w)
|
||||
}
|
||||
|
||||
func (s *ServiceMapControllerSuite) TestGetStatus() {
|
||||
assert := s.Assert()
|
||||
|
||||
s.c.Status(s.g)
|
||||
assert.Equal(http.StatusOK, s.w.Code)
|
||||
|
||||
var status servicemap.ServiceMapStatus
|
||||
err := json.Unmarshal(s.w.Body.Bytes(), &status)
|
||||
assert.NoError(err)
|
||||
assert.Equal("enabled", status.Status)
|
||||
assert.Equal(1, status.EntriesProcessedCount)
|
||||
assert.Equal(2, status.NodeCount)
|
||||
assert.Equal(1, status.EdgeCount)
|
||||
}
|
||||
|
||||
func (s *ServiceMapControllerSuite) TestGet() {
|
||||
assert := s.Assert()
|
||||
|
||||
s.c.Get(s.g)
|
||||
assert.Equal(http.StatusOK, s.w.Code)
|
||||
|
||||
var response servicemap.ServiceMapResponse
|
||||
err := json.Unmarshal(s.w.Body.Bytes(), &response)
|
||||
assert.NoError(err)
|
||||
|
||||
// response status
|
||||
assert.Equal("enabled", response.Status.Status)
|
||||
assert.Equal(1, response.Status.EntriesProcessedCount)
|
||||
assert.Equal(2, response.Status.NodeCount)
|
||||
assert.Equal(1, response.Status.EdgeCount)
|
||||
|
||||
// response nodes
|
||||
aNode := servicemap.ServiceMapNode{
|
||||
Id: 1,
|
||||
Name: TCPEntryA.Name,
|
||||
Entry: TCPEntryA,
|
||||
Count: 1,
|
||||
}
|
||||
bNode := servicemap.ServiceMapNode{
|
||||
Id: 2,
|
||||
Name: TCPEntryB.Name,
|
||||
Entry: TCPEntryB,
|
||||
Count: 1,
|
||||
}
|
||||
assert.Contains(response.Nodes, aNode)
|
||||
assert.Contains(response.Nodes, bNode)
|
||||
assert.Len(response.Nodes, 2)
|
||||
|
||||
// response edges
|
||||
assert.Equal([]servicemap.ServiceMapEdge{
|
||||
{
|
||||
Source: aNode,
|
||||
Destination: bNode,
|
||||
Protocol: ProtocolHttp,
|
||||
Count: 1,
|
||||
},
|
||||
}, response.Edges)
|
||||
}
|
||||
|
||||
func (s *ServiceMapControllerSuite) TestGetReset() {
|
||||
assert := s.Assert()
|
||||
|
||||
s.c.Reset(s.g)
|
||||
assert.Equal(http.StatusOK, s.w.Code)
|
||||
|
||||
var status servicemap.ServiceMapStatus
|
||||
err := json.Unmarshal(s.w.Body.Bytes(), &status)
|
||||
assert.NoError(err)
|
||||
assert.Equal("enabled", status.Status)
|
||||
assert.Equal(0, status.EntriesProcessedCount)
|
||||
assert.Equal(0, status.NodeCount)
|
||||
assert.Equal(0, status.EdgeCount)
|
||||
}
|
||||
|
||||
func TestServiceMapControllerSuite(t *testing.T) {
|
||||
suite.Run(t, new(ServiceMapControllerSuite))
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
package controllers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
core "k8s.io/api/core/v1"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/up9inc/mizu/agent/pkg/api"
|
||||
"github.com/up9inc/mizu/agent/pkg/holder"
|
||||
"github.com/up9inc/mizu/agent/pkg/providers"
|
||||
"github.com/up9inc/mizu/agent/pkg/providers/tappedPods"
|
||||
"github.com/up9inc/mizu/agent/pkg/providers/tappers"
|
||||
"github.com/up9inc/mizu/agent/pkg/up9"
|
||||
"github.com/up9inc/mizu/agent/pkg/validation"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/kubernetes"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
func HealthCheck(c *gin.Context) {
|
||||
tappersStatus := make([]*shared.TapperStatus, 0)
|
||||
for _, value := range tappers.GetStatus() {
|
||||
tappersStatus = append(tappersStatus, value)
|
||||
}
|
||||
|
||||
response := shared.HealthResponse{
|
||||
TappedPods: tappedPods.Get(),
|
||||
ConnectedTappersCount: tappers.GetConnectedCount(),
|
||||
TappersStatus: tappersStatus,
|
||||
}
|
||||
c.JSON(http.StatusOK, response)
|
||||
}
|
||||
|
||||
func PostTappedPods(c *gin.Context) {
|
||||
var requestTappedPods []core.Pod
|
||||
if err := c.Bind(&requestTappedPods); err != nil {
|
||||
c.JSON(http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
podInfos := kubernetes.GetPodInfosForPods(requestTappedPods)
|
||||
|
||||
logger.Log.Infof("[Status] POST request: %d tapped pods", len(requestTappedPods))
|
||||
tappedPods.Set(podInfos)
|
||||
api.BroadcastTappedPodsStatus()
|
||||
|
||||
nodeToTappedPodMap := kubernetes.GetNodeHostToTappedPodsMap(requestTappedPods)
|
||||
tappedPods.SetNodeToTappedPodMap(nodeToTappedPodMap)
|
||||
api.BroadcastTappedPodsToTappers(nodeToTappedPodMap)
|
||||
}
|
||||
|
||||
func PostTapperStatus(c *gin.Context) {
|
||||
tapperStatus := &shared.TapperStatus{}
|
||||
if err := c.Bind(tapperStatus); err != nil {
|
||||
c.JSON(http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validation.Validate(tapperStatus); err != nil {
|
||||
c.JSON(http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Log.Infof("[Status] POST request, tapper status: %v", tapperStatus)
|
||||
tappers.SetStatus(tapperStatus)
|
||||
api.BroadcastTappedPodsStatus()
|
||||
}
|
||||
|
||||
func GetConnectedTappersCount(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, tappers.GetConnectedCount())
|
||||
}
|
||||
|
||||
func GetAuthStatus(c *gin.Context) {
|
||||
authStatus, err := providers.GetAuthStatus()
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, authStatus)
|
||||
}
|
||||
|
||||
func GetTappingStatus(c *gin.Context) {
|
||||
tappedPodsStatus := tappedPods.GetTappedPodsStatus()
|
||||
c.JSON(http.StatusOK, tappedPodsStatus)
|
||||
}
|
||||
|
||||
func AnalyzeInformation(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, up9.GetAnalyzeInfo())
|
||||
}
|
||||
|
||||
func GetGeneralStats(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, providers.GetGeneralStats())
|
||||
}
|
||||
|
||||
func GetRecentTLSLinks(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, providers.GetAllRecentTLSAddresses())
|
||||
}
|
||||
|
||||
func GetCurrentResolvingInformation(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, holder.GetResolver().GetMap())
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
package dependency
|
||||
|
||||
var typeIntializerMap = make(map[DependencyContainerType]func() interface{}, 0)
|
||||
|
||||
func RegisterGenerator(name DependencyContainerType, fn func() interface{}) {
|
||||
typeIntializerMap[name] = fn
|
||||
}
|
||||
|
||||
func GetInstance(name DependencyContainerType) interface{} {
|
||||
return typeIntializerMap[name]()
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
package dependency
|
||||
|
||||
type DependencyContainerType string
|
||||
|
||||
const (
|
||||
ServiceMapGeneratorDependency = "ServiceMapGeneratorDependency"
|
||||
OasGeneratorDependency = "OasGeneratorDependency"
|
||||
)
|
||||
@@ -1,116 +0,0 @@
|
||||
package elastic
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/elastic/go-elasticsearch/v7"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"github.com/up9inc/mizu/tap/api"
|
||||
)
|
||||
|
||||
type client struct {
|
||||
es *elasticsearch.Client
|
||||
index string
|
||||
insertedCount int
|
||||
}
|
||||
|
||||
var instance *client
|
||||
var once sync.Once
|
||||
|
||||
func GetInstance() *client {
|
||||
once.Do(func() {
|
||||
instance = newClient()
|
||||
})
|
||||
return instance
|
||||
}
|
||||
|
||||
func (client *client) Configure(config shared.ElasticConfig) {
|
||||
if config.Url == "" || config.User == "" || config.Password == "" {
|
||||
if client.es != nil {
|
||||
client.es = nil
|
||||
}
|
||||
logger.Log.Infof("No elastic configuration was supplied, elastic exporter disabled")
|
||||
return
|
||||
}
|
||||
transport := http.DefaultTransport
|
||||
tlsClientConfig := &tls.Config{InsecureSkipVerify: true}
|
||||
transport.(*http.Transport).TLSClientConfig = tlsClientConfig
|
||||
cfg := elasticsearch.Config{
|
||||
Addresses: []string{config.Url},
|
||||
Username: config.User,
|
||||
Password: config.Password,
|
||||
Transport: transport,
|
||||
}
|
||||
|
||||
es, err := elasticsearch.NewClient(cfg)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Failed to initialize elastic client %v", err)
|
||||
}
|
||||
|
||||
// Have the client instance return a response
|
||||
res, err := es.Info()
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Elastic client.Info() ERROR: %v", err)
|
||||
} else {
|
||||
client.es = es
|
||||
client.index = "mizu_traffic_http_" + time.Now().Format("2006_01_02_15_04")
|
||||
client.insertedCount = 0
|
||||
logger.Log.Infof("Elastic client configured, index: %s, cluster info: %v", client.index, res)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
}
|
||||
|
||||
func newClient() *client {
|
||||
return &client{
|
||||
es: nil,
|
||||
index: "",
|
||||
}
|
||||
}
|
||||
|
||||
type httpEntry struct {
|
||||
Source *api.TCP `json:"src"`
|
||||
Destination *api.TCP `json:"dst"`
|
||||
Outgoing bool `json:"outgoing"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
Request map[string]interface{} `json:"request"`
|
||||
Response map[string]interface{} `json:"response"`
|
||||
ElapsedTime int64 `json:"elapsedTime"`
|
||||
}
|
||||
|
||||
func (client *client) PushEntry(entry *api.Entry) {
|
||||
if client.es == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if entry.Protocol.Name != "http" {
|
||||
return
|
||||
}
|
||||
|
||||
entryToPush := httpEntry{
|
||||
Source: entry.Source,
|
||||
Destination: entry.Destination,
|
||||
Outgoing: entry.Outgoing,
|
||||
CreatedAt: entry.StartTime,
|
||||
Request: entry.Request,
|
||||
Response: entry.Response,
|
||||
ElapsedTime: entry.ElapsedTime,
|
||||
}
|
||||
|
||||
entryJson, err := json.Marshal(entryToPush)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("json.Marshal ERROR: %v", err)
|
||||
return
|
||||
}
|
||||
var buffer bytes.Buffer
|
||||
buffer.WriteString(string(entryJson))
|
||||
res, _ := client.es.Index(client.index, &buffer)
|
||||
if res.StatusCode == 201 {
|
||||
client.insertedCount += 1
|
||||
}
|
||||
}
|
||||
@@ -1,375 +0,0 @@
|
||||
package har
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
/*
|
||||
HTTP Archive (HAR) format
|
||||
https://w3c.github.io/web-performance/specs/HAR/Overview.html
|
||||
*/
|
||||
|
||||
// HAR is a container type for deserialization
|
||||
type HAR struct {
|
||||
Log Log `json:"log"`
|
||||
}
|
||||
|
||||
// Log represents the root of the exported data. This object MUST be present and its name MUST be "log".
|
||||
type Log struct {
|
||||
// The object contains the following name/value pairs:
|
||||
|
||||
// Required. Version number of the format.
|
||||
Version string `json:"version"`
|
||||
// Required. An object of type creator that contains the name and version
|
||||
// information of the log creator application.
|
||||
Creator Creator `json:"creator"`
|
||||
// Optional. An object of type browser that contains the name and version
|
||||
// information of the user agent.
|
||||
Browser Browser `json:"browser"`
|
||||
// Optional. An array of objects of type page, each representing one exported
|
||||
// (tracked) page. Leave out this field if the application does not support
|
||||
// grouping by pages.
|
||||
Pages []Page `json:"pages,omitempty"`
|
||||
// Required. An array of objects of type entry, each representing one
|
||||
// exported (tracked) HTTP request.
|
||||
Entries []Entry `json:"entries"`
|
||||
// Optional. A comment provided by the user or the application. Sorting
|
||||
// entries by startedDateTime (starting from the oldest) is preferred way how
|
||||
// to export data since it can make importing faster. However the reader
|
||||
// application should always make sure the array is sorted (if required for
|
||||
// the import).
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
// Creator contains information about the log creator application
|
||||
type Creator struct {
|
||||
// Required. The name of the application that created the log.
|
||||
Name string `json:"name"`
|
||||
// Required. The version number of the application that created the log.
|
||||
Version string `json:"version"`
|
||||
// Optional. A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// Browser that created the log
|
||||
type Browser struct {
|
||||
// Required. The name of the browser that created the log.
|
||||
Name string `json:"name"`
|
||||
// Required. The version number of the browser that created the log.
|
||||
Version string `json:"version"`
|
||||
// Optional. A comment provided by the user or the browser.
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
// Page object for every exported web page and one <entry> object for every HTTP request.
|
||||
// In case when an HTTP trace tool isn't able to group requests by a page,
|
||||
// the <pages> object is empty and individual requests doesn't have a parent page.
|
||||
type Page struct {
|
||||
/* There is one <page> object for every exported web page and one <entry>
|
||||
object for every HTTP request. In case when an HTTP trace tool isn't able to
|
||||
group requests by a page, the <pages> object is empty and individual
|
||||
requests doesn't have a parent page.
|
||||
*/
|
||||
|
||||
// Date and time stamp for the beginning of the page load
|
||||
// (ISO 8601 YYYY-MM-DDThh:mm:ss.sTZD, e.g. 2009-07-24T19:20:30.45+01:00).
|
||||
StartedDateTime string `json:"startedDateTime"`
|
||||
// Unique identifier of a page within the . Entries use it to refer the parent page.
|
||||
ID string `json:"id"`
|
||||
// Page title.
|
||||
Title string `json:"title"`
|
||||
// Detailed timing info about page load.
|
||||
PageTiming PageTiming `json:"pageTiming"`
|
||||
// (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// PageTiming describes timings for various events (states) fired during the page load.
|
||||
// All times are specified in milliseconds. If a time info is not available appropriate field is set to -1.
|
||||
type PageTiming struct {
|
||||
// Content of the page loaded. Number of milliseconds since page load started
|
||||
// (page.startedDateTime). Use -1 if the timing does not apply to the current
|
||||
// request.
|
||||
// Depeding on the browser, onContentLoad property represents DOMContentLoad
|
||||
// event or document.readyState == interactive.
|
||||
OnContentLoad int `json:"onContentLoad"`
|
||||
// Page is loaded (onLoad event fired). Number of milliseconds since page
|
||||
// load started (page.startedDateTime). Use -1 if the timing does not apply
|
||||
// to the current request.
|
||||
OnLoad int `json:"onLoad"`
|
||||
// (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
// Entry is a unique, optional Reference to the parent page.
|
||||
// Leave out this field if the application does not support grouping by pages.
|
||||
type Entry struct {
|
||||
Pageref string `json:"pageref,omitempty"`
|
||||
// Date and time stamp of the request start
|
||||
// (ISO 8601 YYYY-MM-DDThh:mm:ss.sTZD).
|
||||
StartedDateTime string `json:"startedDateTime"`
|
||||
// Total elapsed time of the request in milliseconds. This is the sum of all
|
||||
// timings available in the timings object (i.e. not including -1 values) .
|
||||
Time int `json:"time"`
|
||||
// Detailed info about the request.
|
||||
Request Request `json:"request"`
|
||||
// Detailed info about the response.
|
||||
Response Response `json:"response"`
|
||||
// Info about cache usage.
|
||||
Cache Cache `json:"cache"`
|
||||
// Detailed timing info about request/response round trip.
|
||||
PageTimings PageTimings `json:"pageTimings"`
|
||||
// optional (new in 1.2) IP address of the server that was connected
|
||||
// (result of DNS resolution).
|
||||
ServerIPAddress string `json:"serverIPAddress,omitempty"`
|
||||
// optional (new in 1.2) Unique ID of the parent TCP/IP connection, can be
|
||||
// the client port number. Note that a port number doesn't have to be unique
|
||||
// identifier in cases where the port is shared for more connections. If the
|
||||
// port isn't available for the application, any other unique connection ID
|
||||
// can be used instead (e.g. connection index). Leave out this field if the
|
||||
// application doesn't support this info.
|
||||
Connection string `json:"connection,omitempty"`
|
||||
// (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// Request contains detailed info about performed request.
|
||||
type Request struct {
|
||||
// Request method (GET, POST, ...).
|
||||
Method string `json:"method"`
|
||||
// Absolute URL of the request (fragments are not included).
|
||||
URL string `json:"url"`
|
||||
// Request HTTP Version.
|
||||
HTTPVersion string `json:"httpVersion"`
|
||||
// List of cookie objects.
|
||||
Cookies []Cookie `json:"cookies"`
|
||||
// List of header objects.
|
||||
Headers []NVP `json:"headers"`
|
||||
// List of query parameter objects.
|
||||
QueryString []NVP `json:"queryString"`
|
||||
// Posted data.
|
||||
PostData PostData `json:"postData"`
|
||||
// Total number of bytes from the start of the HTTP request message until
|
||||
// (and including) the double CRLF before the body. Set to -1 if the info
|
||||
// is not available.
|
||||
HeaderSize int `json:"headerSize"`
|
||||
// Size of the request body (POST data payload) in bytes. Set to -1 if the
|
||||
// info is not available.
|
||||
BodySize int `json:"bodySize"`
|
||||
// (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
// Response contains detailed info about the response.
|
||||
type Response struct {
|
||||
// Response status.
|
||||
Status int `json:"status"`
|
||||
// Response status description.
|
||||
StatusText string `json:"statusText"`
|
||||
// Response HTTP Version.
|
||||
HTTPVersion string `json:"httpVersion"`
|
||||
// List of cookie objects.
|
||||
Cookies []Cookie `json:"cookies"`
|
||||
// List of header objects.
|
||||
Headers []NVP `json:"headers"`
|
||||
// Details about the response body.
|
||||
Content Content `json:"content"`
|
||||
// Redirection target URL from the Location response header.
|
||||
RedirectURL string `json:"redirectURL"`
|
||||
// Total number of bytes from the start of the HTTP response message until
|
||||
// (and including) the double CRLF before the body. Set to -1 if the info is
|
||||
// not available.
|
||||
// The size of received response-headers is computed only from headers that
|
||||
// are really received from the server. Additional headers appended by the
|
||||
// browser are not included in this number, but they appear in the list of
|
||||
// header objects.
|
||||
HeadersSize int `json:"headersSize"`
|
||||
// Size of the received response body in bytes. Set to zero in case of
|
||||
// responses coming from the cache (304). Set to -1 if the info is not
|
||||
// available.
|
||||
BodySize int `json:"bodySize"`
|
||||
// optional (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// Cookie contains list of all cookies (used in <request> and <response> objects).
|
||||
type Cookie struct {
|
||||
// The name of the cookie.
|
||||
Name string `json:"name"`
|
||||
// The cookie value.
|
||||
Value string `json:"value"`
|
||||
// optional The path pertaining to the cookie.
|
||||
Path string `json:"path,omitempty"`
|
||||
// optional The host of the cookie.
|
||||
Domain string `json:"domain,omitempty"`
|
||||
// optional Cookie expiration time.
|
||||
// (ISO 8601 YYYY-MM-DDThh:mm:ss.sTZD, e.g. 2009-07-24T19:20:30.123+02:00).
|
||||
Expires string `json:"expires,omitempty"`
|
||||
// optional Set to true if the cookie is HTTP only, false otherwise.
|
||||
HTTPOnly bool `json:"httpOnly,omitempty"`
|
||||
// optional (new in 1.2) True if the cookie was transmitted over ssl, false
|
||||
// otherwise.
|
||||
Secure bool `json:"secure,omitempty"`
|
||||
// optional (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// NVP is simply a name/value pair with a comment
|
||||
type NVP struct {
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// PostData describes posted data, if any (embedded in <request> object).
|
||||
type PostData struct {
|
||||
// Mime type of posted data.
|
||||
MimeType string `json:"mimeType"`
|
||||
// List of posted parameters (in case of URL encoded parameters).
|
||||
Params []PostParam `json:"params"`
|
||||
// Plain text posted data
|
||||
Text string `json:"text"`
|
||||
// optional (new in 1.2) A comment provided by the user or the
|
||||
// application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
func (d PostData) B64Decoded() (bool, []byte, string) {
|
||||
// there is a weird gap in HAR spec 1.2, that does not define encoding for binary POST bodies
|
||||
// we have own convention of putting `base64` into comment field to handle it similar to response `Content`
|
||||
return b64Decoded(d.Comment, d.Text)
|
||||
}
|
||||
|
||||
// PostParam is a list of posted parameters, if any (embedded in <postData> object).
|
||||
type PostParam struct {
|
||||
// name of a posted parameter.
|
||||
Name string `json:"name"`
|
||||
// optional value of a posted parameter or content of a posted file.
|
||||
Value string `json:"value,omitempty"`
|
||||
// optional name of a posted file.
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
// optional content type of a posted file.
|
||||
ContentType string `json:"contentType,omitempty"`
|
||||
// optional (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// Content describes details about response content (embedded in <response> object).
|
||||
type Content struct {
|
||||
// Length of the returned content in bytes. Should be equal to
|
||||
// response.bodySize if there is no compression and bigger when the content
|
||||
// has been compressed.
|
||||
Size int `json:"size"`
|
||||
// optional Number of bytes saved. Leave out this field if the information
|
||||
// is not available.
|
||||
Compression int `json:"compression,omitempty"`
|
||||
// MIME type of the response text (value of the Content-Type response
|
||||
// header). The charset attribute of the MIME type is included (if
|
||||
// available).
|
||||
MimeType string `json:"mimeType"`
|
||||
// optional Response body sent from the server or loaded from the browser
|
||||
// cache. This field is populated with textual content only. The text field
|
||||
// is either HTTP decoded text or a encoded (e.g. "base64") representation of
|
||||
// the response body. Leave out this field if the information is not
|
||||
// available.
|
||||
Text string `json:"text,omitempty"`
|
||||
// optional (new in 1.2) Encoding used for response text field e.g
|
||||
// "base64". Leave out this field if the text field is HTTP decoded
|
||||
// (decompressed & unchunked), than trans-coded from its original character
|
||||
// set into UTF-8.
|
||||
Encoding string `json:"encoding,omitempty"`
|
||||
// optional (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
func (c Content) B64Decoded() (bool, []byte, string) {
|
||||
return b64Decoded(c.Encoding, c.Text)
|
||||
}
|
||||
|
||||
func b64Decoded(enc string, text string) (isBinary bool, asBytes []byte, asString string) {
|
||||
if enc == "base64" {
|
||||
decoded, err := base64.StdEncoding.DecodeString(text)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to decode content as base64: %s", text)
|
||||
return false, []byte(text), text
|
||||
}
|
||||
valid := utf8.Valid(decoded)
|
||||
return !valid, decoded, string(decoded)
|
||||
} else {
|
||||
return false, nil, text
|
||||
}
|
||||
}
|
||||
|
||||
// Cache contains info about a request coming from browser cache.
|
||||
type Cache struct {
|
||||
// optional State of a cache entry before the request. Leave out this field
|
||||
// if the information is not available.
|
||||
BeforeRequest CacheObject `json:"beforeRequest,omitempty"`
|
||||
// optional State of a cache entry after the request. Leave out this field if
|
||||
// the information is not available.
|
||||
AfterRequest CacheObject `json:"afterRequest,omitempty"`
|
||||
// optional (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// CacheObject is used by both beforeRequest and afterRequest
|
||||
type CacheObject struct {
|
||||
// optional - Expiration time of the cache entry.
|
||||
Expires string `json:"expires,omitempty"`
|
||||
// The last time the cache entry was opened.
|
||||
LastAccess string `json:"lastAccess"`
|
||||
// Etag
|
||||
ETag string `json:"eTag"`
|
||||
// The number of times the cache entry has been opened.
|
||||
HitCount int `json:"hitCount"`
|
||||
// optional (new in 1.2) A comment provided by the user or the application.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
}
|
||||
|
||||
// PageTimings describes various phases within request-response round trip.
|
||||
// All times are specified in milliseconds.
|
||||
type PageTimings struct {
|
||||
Blocked int `json:"blocked,omitempty"`
|
||||
// optional - Time spent in a queue waiting for a network connection. Use -1
|
||||
// if the timing does not apply to the current request.
|
||||
DNS int `json:"dns,omitempty"`
|
||||
// optional - DNS resolution time. The time required to resolve a host name.
|
||||
// Use -1 if the timing does not apply to the current request.
|
||||
Connect int `json:"connect,omitempty"`
|
||||
// optional - Time required to create TCP connection. Use -1 if the timing
|
||||
// does not apply to the current request.
|
||||
Send int `json:"send"`
|
||||
// Time required to send HTTP request to the server.
|
||||
Wait int `json:"wait"`
|
||||
// Waiting for a response from the server.
|
||||
Receive int `json:"receive"`
|
||||
// Time required to read entire response from the server (or cache).
|
||||
Ssl int `json:"ssl,omitempty"`
|
||||
// optional (new in 1.2) - Time required for SSL/TLS negotiation. If this
|
||||
// field is defined then the time is also included in the connect field (to
|
||||
// ensure backward compatibility with HAR 1.1). Use -1 if the timing does not
|
||||
// apply to the current request.
|
||||
Comment string `json:"comment,omitempty"`
|
||||
// optional (new in 1.2) - A comment provided by the user or the application.
|
||||
}
|
||||
|
||||
// TestResult contains results for an individual HTTP request
|
||||
type TestResult struct {
|
||||
URL string `json:"url"`
|
||||
Status int `json:"status"` // 200, 500, etc.
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime time.Time `json:"endTime"`
|
||||
Latency int `json:"latency"` // milliseconds
|
||||
Method string `json:"method"`
|
||||
HarFile string `json:"harfile"`
|
||||
}
|
||||
|
||||
// aliases for martian lib compatibility
|
||||
|
||||
type Header = NVP
|
||||
type QueryString = NVP
|
||||
type Param = PostParam
|
||||
type Timings = PageTimings
|
||||
@@ -1,38 +0,0 @@
|
||||
package har
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestContentEncoded(t *testing.T) {
|
||||
testCases := []struct {
|
||||
text string
|
||||
isBinary bool
|
||||
expectedStr string
|
||||
binaryLen int
|
||||
}{
|
||||
{"not-base64", false, "not-base64", 10},
|
||||
{"dGVzdA==", false, "test", 4},
|
||||
{"test", true, "\f@A", 3}, // valid UTF-8 with some non-printable chars
|
||||
{"IsDggPCAgPiAgID8gICAgN/vv/e/v/u/v7/9v7+/vyIKIu+3kO+3ke+3ku+3k++3lO+3le+3lu+3l++3mO+3me+3mu+3m++3nO+3ne+3nu+3n++3oO+3oe+3ou+3o++3pO+3pe+3pu+3p++3qO+3qe+3qu+3q++3rO+3re+3ru+3ryIK", true, "test", 132}, // invalid UTF-8 (thus binary), taken from https://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
c := Content{
|
||||
Encoding: "base64",
|
||||
Text: tc.text,
|
||||
}
|
||||
isBinary, asBytes, asString := c.B64Decoded()
|
||||
_ = asBytes
|
||||
|
||||
if tc.isBinary != isBinary {
|
||||
t.Errorf("Binary flag mismatch: %t != %t", tc.isBinary, isBinary)
|
||||
}
|
||||
|
||||
if !isBinary && tc.expectedStr != asString {
|
||||
t.Errorf("Decode value mismatch: %s != %s", tc.expectedStr, asString)
|
||||
}
|
||||
|
||||
if tc.binaryLen != len(asBytes) {
|
||||
t.Errorf("Binary len mismatch: %d != %d", tc.binaryLen, len(asBytes))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,252 +0,0 @@
|
||||
package har
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
// Keep it because we might want cookies in the future
|
||||
//func BuildCookies(rawCookies []interface{}) []har.Cookie {
|
||||
// cookies := make([]har.Cookie, 0, len(rawCookies))
|
||||
//
|
||||
// for _, cookie := range rawCookies {
|
||||
// c := cookie.(map[string]interface{})
|
||||
// expiresStr := ""
|
||||
// if c["expires"] != nil {
|
||||
// expiresStr = c["expires"].(string)
|
||||
// }
|
||||
// expires, _ := time.Parse(time.RFC3339, expiresStr)
|
||||
// httpOnly := false
|
||||
// if c["httponly"] != nil {
|
||||
// httpOnly, _ = strconv.ParseBool(c["httponly"].(string))
|
||||
// }
|
||||
// secure := false
|
||||
// if c["secure"] != nil {
|
||||
// secure, _ = strconv.ParseBool(c["secure"].(string))
|
||||
// }
|
||||
// path := ""
|
||||
// if c["path"] != nil {
|
||||
// path = c["path"].(string)
|
||||
// }
|
||||
// domain := ""
|
||||
// if c["domain"] != nil {
|
||||
// domain = c["domain"].(string)
|
||||
// }
|
||||
//
|
||||
// cookies = append(cookies, har.Cookie{
|
||||
// Name: c["name"].(string),
|
||||
// Value: c["value"].(string),
|
||||
// Path: path,
|
||||
// Domain: domain,
|
||||
// HTTPOnly: httpOnly,
|
||||
// Secure: secure,
|
||||
// Expires: expires,
|
||||
// Expires8601: expiresStr,
|
||||
// })
|
||||
// }
|
||||
//
|
||||
// return cookies
|
||||
//}
|
||||
|
||||
func BuildHeaders(rawHeaders []interface{}) ([]Header, string, string, string, string, string) {
|
||||
var host, scheme, authority, path, status string
|
||||
headers := make([]Header, 0, len(rawHeaders))
|
||||
|
||||
for _, header := range rawHeaders {
|
||||
h := header.(map[string]interface{})
|
||||
|
||||
headers = append(headers, Header{
|
||||
Name: h["name"].(string),
|
||||
Value: h["value"].(string),
|
||||
})
|
||||
|
||||
if h["name"] == "Host" {
|
||||
host = h["value"].(string)
|
||||
}
|
||||
if h["name"] == ":authority" {
|
||||
authority = h["value"].(string)
|
||||
}
|
||||
if h["name"] == ":scheme" {
|
||||
scheme = h["value"].(string)
|
||||
}
|
||||
if h["name"] == ":path" {
|
||||
path = h["value"].(string)
|
||||
}
|
||||
if h["name"] == ":status" {
|
||||
status = h["value"].(string)
|
||||
}
|
||||
}
|
||||
|
||||
return headers, host, scheme, authority, path, status
|
||||
}
|
||||
|
||||
func BuildPostParams(rawParams []interface{}) []Param {
|
||||
params := make([]Param, 0, len(rawParams))
|
||||
for _, param := range rawParams {
|
||||
p := param.(map[string]interface{})
|
||||
name := ""
|
||||
if p["name"] != nil {
|
||||
name = p["name"].(string)
|
||||
}
|
||||
value := ""
|
||||
if p["value"] != nil {
|
||||
value = p["value"].(string)
|
||||
}
|
||||
fileName := ""
|
||||
if p["fileName"] != nil {
|
||||
fileName = p["fileName"].(string)
|
||||
}
|
||||
contentType := ""
|
||||
if p["contentType"] != nil {
|
||||
contentType = p["contentType"].(string)
|
||||
}
|
||||
|
||||
params = append(params, Param{
|
||||
Name: name,
|
||||
Value: value,
|
||||
FileName: fileName,
|
||||
ContentType: contentType,
|
||||
})
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
func NewRequest(request map[string]interface{}) (harRequest *Request, err error) {
|
||||
headers, host, scheme, authority, path, _ := BuildHeaders(request["_headers"].([]interface{}))
|
||||
cookies := make([]Cookie, 0) // BuildCookies(request["_cookies"].([]interface{}))
|
||||
|
||||
postData, _ := request["postData"].(map[string]interface{})
|
||||
mimeType := postData["mimeType"]
|
||||
if mimeType == nil || len(mimeType.(string)) == 0 {
|
||||
mimeType = "text/html"
|
||||
}
|
||||
text := postData["text"]
|
||||
postDataText := ""
|
||||
if text != nil {
|
||||
postDataText = text.(string)
|
||||
}
|
||||
|
||||
queryString := make([]QueryString, 0)
|
||||
for _, _qs := range request["_queryString"].([]interface{}) {
|
||||
qs := _qs.(map[string]interface{})
|
||||
queryString = append(queryString, QueryString{
|
||||
Name: qs["name"].(string),
|
||||
Value: qs["value"].(string),
|
||||
})
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("http://%s%s", host, request["url"].(string))
|
||||
if strings.HasPrefix(mimeType.(string), "application/grpc") {
|
||||
url = fmt.Sprintf("%s://%s%s", scheme, authority, path)
|
||||
}
|
||||
|
||||
harParams := make([]Param, 0)
|
||||
if postData["params"] != nil {
|
||||
harParams = BuildPostParams(postData["params"].([]interface{}))
|
||||
}
|
||||
|
||||
harRequest = &Request{
|
||||
Method: request["method"].(string),
|
||||
URL: url,
|
||||
HTTPVersion: request["httpVersion"].(string),
|
||||
HeaderSize: -1,
|
||||
BodySize: bytes.NewBufferString(postDataText).Len(),
|
||||
QueryString: queryString,
|
||||
Headers: headers,
|
||||
Cookies: cookies,
|
||||
PostData: PostData{
|
||||
MimeType: mimeType.(string),
|
||||
Params: harParams,
|
||||
Text: postDataText,
|
||||
},
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func NewResponse(response map[string]interface{}) (harResponse *Response, err error) {
|
||||
headers, _, _, _, _, _status := BuildHeaders(response["_headers"].([]interface{}))
|
||||
cookies := make([]Cookie, 0) // BuildCookies(response["_cookies"].([]interface{}))
|
||||
|
||||
content, _ := response["content"].(map[string]interface{})
|
||||
mimeType := content["mimeType"]
|
||||
if mimeType == nil || len(mimeType.(string)) == 0 {
|
||||
mimeType = "text/html"
|
||||
}
|
||||
encoding := content["encoding"]
|
||||
text := content["text"]
|
||||
bodyText := ""
|
||||
if text != nil {
|
||||
bodyText = text.(string)
|
||||
}
|
||||
|
||||
harContent := &Content{
|
||||
Encoding: encoding.(string),
|
||||
MimeType: mimeType.(string),
|
||||
Text: bodyText,
|
||||
Size: len(bodyText),
|
||||
}
|
||||
|
||||
status := int(response["status"].(float64))
|
||||
if strings.HasPrefix(mimeType.(string), "application/grpc") {
|
||||
if _status != "" {
|
||||
status, err = strconv.Atoi(_status)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Failed converting status to int %s (%v,%+v)", err, err, err)
|
||||
return nil, errors.New("failed converting response status to int for HAR")
|
||||
}
|
||||
}
|
||||
|
||||
harResponse = &Response{
|
||||
HTTPVersion: response["httpVersion"].(string),
|
||||
Status: status,
|
||||
StatusText: response["statusText"].(string),
|
||||
HeadersSize: -1,
|
||||
BodySize: bytes.NewBufferString(bodyText).Len(),
|
||||
Headers: headers,
|
||||
Cookies: cookies,
|
||||
Content: *harContent,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func NewEntry(request map[string]interface{}, response map[string]interface{}, startTime time.Time, elapsedTime int64) (*Entry, error) {
|
||||
harRequest, err := NewRequest(request)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Failed converting request to HAR %s (%v,%+v)", err, err, err)
|
||||
return nil, errors.New("failed converting request to HAR")
|
||||
}
|
||||
|
||||
harResponse, err := NewResponse(response)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Failed converting response to HAR %s (%v,%+v)", err, err, err)
|
||||
return nil, errors.New("failed converting response to HAR")
|
||||
}
|
||||
|
||||
if elapsedTime < 1 {
|
||||
elapsedTime = 1
|
||||
}
|
||||
|
||||
harEntry := Entry{
|
||||
StartedDateTime: startTime.Format(time.RFC3339),
|
||||
Time: int(elapsedTime),
|
||||
Request: *harRequest,
|
||||
Response: *harResponse,
|
||||
Cache: Cache{},
|
||||
PageTimings: PageTimings{
|
||||
Send: -1,
|
||||
Wait: -1,
|
||||
Receive: int(elapsedTime),
|
||||
},
|
||||
}
|
||||
|
||||
return &harEntry, nil
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
package holder
|
||||
|
||||
import "github.com/up9inc/mizu/agent/pkg/resolver"
|
||||
|
||||
var k8sResolver *resolver.Resolver
|
||||
|
||||
func SetResolver(param *resolver.Resolver) {
|
||||
k8sResolver = param
|
||||
}
|
||||
|
||||
func GetResolver() *resolver.Resolver {
|
||||
return k8sResolver
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
package middlewares
|
||||
|
||||
import "github.com/gin-gonic/gin"
|
||||
|
||||
func CORSMiddleware() gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With, x-session-token")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Methods", "POST, OPTIONS, GET, PUT, DELETE")
|
||||
|
||||
if c.Request.Method == "OPTIONS" {
|
||||
c.AbortWithStatus(204)
|
||||
return
|
||||
}
|
||||
|
||||
c.Next()
|
||||
}
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
"github.com/up9inc/mizu/agent/pkg/rules"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/tap"
|
||||
)
|
||||
|
||||
type EntriesRequest struct {
|
||||
LeftOff int `form:"leftOff" validate:"required,min=-1"`
|
||||
Direction int `form:"direction" validate:"required,oneof='1' '-1'"`
|
||||
Query string `form:"query"`
|
||||
Limit int `form:"limit" validate:"required,min=1"`
|
||||
TimeoutMs int `form:"timeoutMs" validate:"min=1"`
|
||||
}
|
||||
|
||||
type SingleEntryRequest struct {
|
||||
Query string `form:"query"`
|
||||
}
|
||||
|
||||
type EntriesResponse struct {
|
||||
Data []interface{} `json:"data"`
|
||||
Meta *basenine.Metadata `json:"meta"`
|
||||
}
|
||||
|
||||
type WebSocketEntryMessage struct {
|
||||
*shared.WebSocketMessageMetadata
|
||||
Data *tapApi.BaseEntry `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
type WebSocketFullEntryMessage struct {
|
||||
*shared.WebSocketMessageMetadata
|
||||
Data *tapApi.Entry `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
type WebSocketTappedEntryMessage struct {
|
||||
*shared.WebSocketMessageMetadata
|
||||
Data *tapApi.OutputChannelItem
|
||||
}
|
||||
|
||||
type WebsocketOutboundLinkMessage struct {
|
||||
*shared.WebSocketMessageMetadata
|
||||
Data *tap.OutboundLink
|
||||
}
|
||||
|
||||
type AuthStatus struct {
|
||||
Email string `json:"email"`
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
type ToastMessage struct {
|
||||
Type string `json:"type"`
|
||||
AutoClose uint `json:"autoClose"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
type WebSocketToastMessage struct {
|
||||
*shared.WebSocketMessageMetadata
|
||||
Data *ToastMessage `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
type WebSocketQueryMetadataMessage struct {
|
||||
*shared.WebSocketMessageMetadata
|
||||
Data *basenine.Metadata `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
type WebSocketStartTimeMessage struct {
|
||||
*shared.WebSocketMessageMetadata
|
||||
Data int64 `json:"data"`
|
||||
}
|
||||
|
||||
func CreateBaseEntryWebSocketMessage(base *tapApi.BaseEntry) ([]byte, error) {
|
||||
message := &WebSocketEntryMessage{
|
||||
WebSocketMessageMetadata: &shared.WebSocketMessageMetadata{
|
||||
MessageType: shared.WebSocketMessageTypeEntry,
|
||||
},
|
||||
Data: base,
|
||||
}
|
||||
return json.Marshal(message)
|
||||
}
|
||||
|
||||
func CreateFullEntryWebSocketMessage(entry *tapApi.Entry) ([]byte, error) {
|
||||
message := &WebSocketFullEntryMessage{
|
||||
WebSocketMessageMetadata: &shared.WebSocketMessageMetadata{
|
||||
MessageType: shared.WebSocketMessageTypeFullEntry,
|
||||
},
|
||||
Data: entry,
|
||||
}
|
||||
return json.Marshal(message)
|
||||
}
|
||||
|
||||
func CreateWebsocketTappedEntryMessage(base *tapApi.OutputChannelItem) ([]byte, error) {
|
||||
message := &WebSocketTappedEntryMessage{
|
||||
WebSocketMessageMetadata: &shared.WebSocketMessageMetadata{
|
||||
MessageType: shared.WebSocketMessageTypeTappedEntry,
|
||||
},
|
||||
Data: base,
|
||||
}
|
||||
return json.Marshal(message)
|
||||
}
|
||||
|
||||
func CreateWebsocketToastMessage(base *ToastMessage) ([]byte, error) {
|
||||
message := &WebSocketToastMessage{
|
||||
WebSocketMessageMetadata: &shared.WebSocketMessageMetadata{
|
||||
MessageType: shared.WebSocketMessageTypeToast,
|
||||
},
|
||||
Data: base,
|
||||
}
|
||||
return json.Marshal(message)
|
||||
}
|
||||
|
||||
func CreateWebsocketQueryMetadataMessage(base *basenine.Metadata) ([]byte, error) {
|
||||
message := &WebSocketQueryMetadataMessage{
|
||||
WebSocketMessageMetadata: &shared.WebSocketMessageMetadata{
|
||||
MessageType: shared.WebSocketMessageTypeQueryMetadata,
|
||||
},
|
||||
Data: base,
|
||||
}
|
||||
return json.Marshal(message)
|
||||
}
|
||||
|
||||
func CreateWebsocketStartTimeMessage(base int64) ([]byte, error) {
|
||||
message := &WebSocketStartTimeMessage{
|
||||
WebSocketMessageMetadata: &shared.WebSocketMessageMetadata{
|
||||
MessageType: shared.WebSocketMessageTypeStartTime,
|
||||
},
|
||||
Data: base,
|
||||
}
|
||||
return json.Marshal(message)
|
||||
}
|
||||
|
||||
// ExtendedHAR is the top level object of a HAR log.
|
||||
type ExtendedHAR struct {
|
||||
Log *ExtendedLog `json:"log"`
|
||||
}
|
||||
|
||||
// ExtendedLog is the HAR HTTP request and response log.
|
||||
type ExtendedLog struct {
|
||||
// Version number of the HAR format.
|
||||
Version string `json:"version"`
|
||||
// Creator holds information about the log creator application.
|
||||
Creator *ExtendedCreator `json:"creator"`
|
||||
// Entries is a list containing requests and responses.
|
||||
Entries []*har.Entry `json:"entries"`
|
||||
}
|
||||
|
||||
type ExtendedCreator struct {
|
||||
*har.Creator
|
||||
Source *string `json:"_source"`
|
||||
}
|
||||
|
||||
func RunValidationRulesState(harEntry har.Entry, service string) (tapApi.ApplicableRules, []rules.RulesMatched, bool) {
|
||||
resultPolicyToSend, isEnabled := rules.MatchRequestPolicy(harEntry, service)
|
||||
statusPolicyToSend, latency, numberOfRules := rules.PassedValidationRules(resultPolicyToSend)
|
||||
return tapApi.ApplicableRules{Status: statusPolicyToSend, Latency: latency, NumberOfRules: numberOfRules}, resultPolicyToSend, isEnabled
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/chanced/openapi"
|
||||
"math"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Counter struct {
|
||||
Entries int `json:"entries"`
|
||||
Failures int `json:"failures"`
|
||||
FirstSeen float64 `json:"firstSeen"`
|
||||
LastSeen float64 `json:"lastSeen"`
|
||||
SumRT float64 `json:"sumRT"`
|
||||
SumDuration float64 `json:"sumDuration"`
|
||||
}
|
||||
|
||||
func (c *Counter) addEntry(ts float64, rt float64, succ bool, dur float64) {
|
||||
if dur < 0 {
|
||||
panic("Duration cannot be negative")
|
||||
}
|
||||
|
||||
c.Entries += 1
|
||||
c.SumRT += rt
|
||||
c.SumDuration += dur
|
||||
if !succ {
|
||||
c.Failures += 1
|
||||
}
|
||||
|
||||
if c.FirstSeen == 0 {
|
||||
c.FirstSeen = ts
|
||||
} else {
|
||||
c.FirstSeen = math.Min(c.FirstSeen, ts)
|
||||
}
|
||||
|
||||
c.LastSeen = math.Max(c.LastSeen, ts)
|
||||
}
|
||||
|
||||
func (c *Counter) addOther(other *Counter) {
|
||||
c.Entries += other.Entries
|
||||
c.SumRT += other.SumRT
|
||||
c.Failures += other.Failures
|
||||
c.SumDuration += other.SumDuration
|
||||
|
||||
if c.FirstSeen == 0 {
|
||||
c.FirstSeen = other.FirstSeen
|
||||
} else {
|
||||
c.FirstSeen = math.Min(c.FirstSeen, other.FirstSeen)
|
||||
}
|
||||
|
||||
c.LastSeen = math.Max(c.LastSeen, other.LastSeen)
|
||||
}
|
||||
|
||||
type CounterMap map[string]*Counter
|
||||
|
||||
func (m *CounterMap) addOther(other *CounterMap) {
|
||||
for src, cmap := range *other {
|
||||
if existing, ok := (*m)[src]; ok {
|
||||
existing.addOther(cmap)
|
||||
} else {
|
||||
copied := *cmap
|
||||
(*m)[src] = &copied
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setCounterMsgIfOk(oldStr string, cnt *Counter) string {
|
||||
tpl := "Mizu observed %d entries (%d failed), at %.3f hits/s, average response time is %.3f seconds"
|
||||
if oldStr == "" || (strings.HasPrefix(oldStr, "Mizu ") && strings.HasSuffix(oldStr, " seconds")) {
|
||||
return fmt.Sprintf(tpl, cnt.Entries, cnt.Failures, cnt.SumDuration/float64(cnt.Entries), cnt.SumRT/float64(cnt.Entries))
|
||||
}
|
||||
return oldStr
|
||||
}
|
||||
|
||||
type CounterMaps struct {
|
||||
counterTotal Counter
|
||||
counterMapTotal CounterMap
|
||||
}
|
||||
|
||||
func (m *CounterMaps) processOp(opObj *openapi.Operation) error {
|
||||
if _, ok := opObj.Extensions.Extension(CountersTotal); ok {
|
||||
counter := new(Counter)
|
||||
err := opObj.Extensions.DecodeExtension(CountersTotal, counter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.counterTotal.addOther(counter)
|
||||
|
||||
opObj.Description = setCounterMsgIfOk(opObj.Description, counter)
|
||||
}
|
||||
|
||||
if _, ok := opObj.Extensions.Extension(CountersPerSource); ok {
|
||||
counterMap := new(CounterMap)
|
||||
err := opObj.Extensions.DecodeExtension(CountersPerSource, counterMap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.counterMapTotal.addOther(counterMap)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *CounterMaps) processOas(oas *openapi.OpenAPI) error {
|
||||
if oas.Extensions == nil {
|
||||
oas.Extensions = openapi.Extensions{}
|
||||
}
|
||||
|
||||
err := oas.Extensions.SetExtension(CountersTotal, m.counterTotal)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = oas.Extensions.SetExtension(CountersPerSource, m.counterMapTotal)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,215 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
func getFiles(baseDir string) (result []string, err error) {
|
||||
result = make([]string, 0)
|
||||
logger.Log.Infof("Reading files from tree: %s", baseDir)
|
||||
|
||||
inputs := []string{baseDir}
|
||||
|
||||
// https://yourbasic.org/golang/list-files-in-directory/
|
||||
visitor := func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if info.Mode()&os.ModeSymlink != 0 {
|
||||
path, _ = os.Readlink(path)
|
||||
inputs = append(inputs, path)
|
||||
return nil
|
||||
}
|
||||
|
||||
ext := strings.ToLower(filepath.Ext(path))
|
||||
if !info.IsDir() && (ext == ".har" || ext == ".ldjson") {
|
||||
result = append(result, path)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
for len(inputs) > 0 {
|
||||
path := inputs[0]
|
||||
inputs = inputs[1:]
|
||||
err = filepath.Walk(path, visitor)
|
||||
}
|
||||
|
||||
sort.SliceStable(result, func(i, j int) bool {
|
||||
return fileSize(result[i]) < fileSize(result[j])
|
||||
})
|
||||
|
||||
logger.Log.Infof("Got files: %d", len(result))
|
||||
return result, err
|
||||
}
|
||||
|
||||
func fileSize(fname string) int64 {
|
||||
fi, err := os.Stat(fname)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return fi.Size()
|
||||
}
|
||||
|
||||
func feedEntries(fromFiles []string, isSync bool) (count int, err error) {
|
||||
badFiles := make([]string, 0)
|
||||
cnt := 0
|
||||
for _, file := range fromFiles {
|
||||
logger.Log.Info("Processing file: " + file)
|
||||
ext := strings.ToLower(filepath.Ext(file))
|
||||
eCnt := 0
|
||||
switch ext {
|
||||
case ".har":
|
||||
eCnt, err = feedFromHAR(file, isSync)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
badFiles = append(badFiles, file)
|
||||
continue
|
||||
}
|
||||
case ".ldjson":
|
||||
eCnt, err = feedFromLDJSON(file, isSync)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
badFiles = append(badFiles, file)
|
||||
continue
|
||||
}
|
||||
default:
|
||||
return 0, errors.New("Unsupported file extension: " + ext)
|
||||
}
|
||||
cnt += eCnt
|
||||
}
|
||||
|
||||
for _, f := range badFiles {
|
||||
logger.Log.Infof("Bad file: %s", f)
|
||||
}
|
||||
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func feedFromHAR(file string, isSync bool) (int, error) {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
defer fd.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(fd)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var harDoc har.HAR
|
||||
err = json.Unmarshal(data, &harDoc)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
cnt := 0
|
||||
for _, entry := range harDoc.Log.Entries {
|
||||
cnt += 1
|
||||
feedEntry(&entry, "", isSync, file)
|
||||
}
|
||||
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func feedEntry(entry *har.Entry, source string, isSync bool, file string) {
|
||||
entry.Comment = file
|
||||
if entry.Response.Status == 302 {
|
||||
logger.Log.Debugf("Dropped traffic entry due to permanent redirect status: %s", entry.StartedDateTime)
|
||||
}
|
||||
|
||||
if strings.Contains(entry.Request.URL, "some") { // for debugging
|
||||
logger.Log.Debugf("Interesting: %s", entry.Request.URL)
|
||||
}
|
||||
|
||||
u, err := url.Parse(entry.Request.URL)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Failed to parse entry URL: %v, err: %v", entry.Request.URL, err)
|
||||
}
|
||||
|
||||
ews := EntryWithSource{Entry: *entry, Source: source, Destination: u.Host, Id: uint(0)}
|
||||
if isSync {
|
||||
GetDefaultOasGeneratorInstance().entriesChan <- ews // blocking variant, right?
|
||||
} else {
|
||||
GetDefaultOasGeneratorInstance().PushEntry(&ews)
|
||||
}
|
||||
}
|
||||
|
||||
func feedFromLDJSON(file string, isSync bool) (int, error) {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
defer fd.Close()
|
||||
|
||||
reader := bufio.NewReader(fd)
|
||||
|
||||
var meta map[string]interface{}
|
||||
buf := strings.Builder{}
|
||||
cnt := 0
|
||||
source := ""
|
||||
for {
|
||||
substr, isPrefix, err := reader.ReadLine()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
|
||||
buf.WriteString(string(substr))
|
||||
if isPrefix {
|
||||
continue
|
||||
}
|
||||
|
||||
line := buf.String()
|
||||
buf.Reset()
|
||||
|
||||
if meta == nil {
|
||||
err := json.Unmarshal([]byte(line), &meta)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if s, ok := meta["_source"]; ok && s != nil {
|
||||
source = s.(string)
|
||||
}
|
||||
} else {
|
||||
var entry har.Entry
|
||||
err := json.Unmarshal([]byte(line), &entry)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed decoding entry: %s", line)
|
||||
} else {
|
||||
cnt += 1
|
||||
feedEntry(&entry, source, isSync, file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func TestFilesList(t *testing.T) {
|
||||
res, err := getFiles("./test_artifacts/")
|
||||
t.Log(len(res))
|
||||
t.Log(res)
|
||||
if err != nil || len(res) != 3 {
|
||||
t.Logf("Should return 2 files but returned %d", len(res))
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
@@ -1,185 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"math"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
var (
|
||||
patUuid4 = regexp.MustCompile(`(?i)[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}`)
|
||||
patEmail = regexp.MustCompile(`^\w+([-+.']\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*$`)
|
||||
patLongNum = regexp.MustCompile(`^\d{3,}$`)
|
||||
patLongNumB = regexp.MustCompile(`[^\d]\d{3,}`)
|
||||
patLongNumA = regexp.MustCompile(`\d{3,}[^\d]`)
|
||||
)
|
||||
|
||||
func IsGibberish(str string) bool {
|
||||
if IsVersionString(str) {
|
||||
return false
|
||||
}
|
||||
|
||||
if patEmail.MatchString(str) {
|
||||
return true
|
||||
}
|
||||
|
||||
if patUuid4.MatchString(str) {
|
||||
return true
|
||||
}
|
||||
|
||||
if patLongNum.MatchString(str) || patLongNumB.MatchString(str) || patLongNumA.MatchString(str) {
|
||||
return true
|
||||
}
|
||||
|
||||
//alNum := cleanStr(str, isAlNumRune)
|
||||
//alpha := cleanStr(str, isAlphaRune)
|
||||
// noiseAll := isNoisy(alNum)
|
||||
//triAll := isTrigramBad(strings.ToLower(alpha))
|
||||
// _ = noiseAll
|
||||
|
||||
isNotAlNum := func(r rune) bool { return !isAlNumRune(r) }
|
||||
chunks := strings.FieldsFunc(str, isNotAlNum)
|
||||
noisyLen := 0
|
||||
alnumLen := 0
|
||||
for _, chunk := range chunks {
|
||||
alnumLen += len(chunk)
|
||||
noise := isNoisy(chunk)
|
||||
tri := isTrigramBad(strings.ToLower(chunk))
|
||||
if noise || tri {
|
||||
noisyLen += len(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
return float64(noisyLen) > 0
|
||||
|
||||
//if float64(noisyLen) > 0 {
|
||||
// return true
|
||||
//}
|
||||
|
||||
//if len(chunks) > 0 && float64(noisyLen) >= float64(alnumLen)/3.0 {
|
||||
// return true
|
||||
//}
|
||||
|
||||
//if triAll {
|
||||
//return true
|
||||
//}
|
||||
|
||||
// return false
|
||||
}
|
||||
|
||||
func noiseLevel(str string) (score float64) {
|
||||
// opinionated algo of certain char pairs marking the non-human strings
|
||||
prev := *new(rune)
|
||||
cnt := 0.0
|
||||
for _, char := range str {
|
||||
cnt += 1
|
||||
if prev > 0 {
|
||||
switch {
|
||||
// continued class of upper/lower/digit adds no noise
|
||||
case unicode.IsUpper(prev) && unicode.IsUpper(char):
|
||||
case unicode.IsLower(prev) && unicode.IsLower(char):
|
||||
case unicode.IsDigit(prev) && unicode.IsDigit(char):
|
||||
|
||||
// upper =>
|
||||
case unicode.IsUpper(prev) && unicode.IsLower(char):
|
||||
score += 0.10
|
||||
case unicode.IsUpper(prev) && unicode.IsDigit(char):
|
||||
score += 0.5
|
||||
|
||||
// lower =>
|
||||
case unicode.IsLower(prev) && unicode.IsUpper(char):
|
||||
score += 0.75
|
||||
case unicode.IsLower(prev) && unicode.IsDigit(char):
|
||||
score += 0.5
|
||||
|
||||
// digit =>
|
||||
case unicode.IsDigit(prev) && unicode.IsUpper(char):
|
||||
score += 0.75
|
||||
case unicode.IsDigit(prev) && unicode.IsLower(char):
|
||||
score += 1.0
|
||||
|
||||
// the rest is 100% noise
|
||||
default:
|
||||
score += 1
|
||||
}
|
||||
}
|
||||
prev = char
|
||||
}
|
||||
|
||||
return score
|
||||
}
|
||||
|
||||
func IsVersionString(component string) bool {
|
||||
if component == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
hasV := false
|
||||
if strings.HasPrefix(component, "v") {
|
||||
component = component[1:]
|
||||
hasV = true
|
||||
}
|
||||
|
||||
for _, c := range component {
|
||||
if string(c) != "." && !unicode.IsDigit(c) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if !hasV && !strings.Contains(component, ".") {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func trigramScore(str string) (float64, int) {
|
||||
tgScore := 0.0
|
||||
trigrams := ngrams(str, 3)
|
||||
if len(trigrams) > 0 {
|
||||
for _, trigram := range trigrams {
|
||||
score, found := corpus_trigrams[trigram]
|
||||
if found {
|
||||
tgScore += score
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tgScore, len(trigrams)
|
||||
}
|
||||
|
||||
func isTrigramBad(s string) bool {
|
||||
tgScore, cnt := trigramScore(s)
|
||||
|
||||
if cnt > 0 {
|
||||
val := math.Sqrt(tgScore) / float64(cnt)
|
||||
val2 := tgScore / float64(cnt)
|
||||
threshold := 0.005
|
||||
bad := val < threshold
|
||||
threshold2 := math.Log(float64(cnt)-2) * 0.1
|
||||
bad2 := val2 < threshold2
|
||||
return bad && bad2 // TODO: improve this logic to be clearer
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isNoisy(s string) bool {
|
||||
noise := noiseLevel(s)
|
||||
|
||||
if len(s) > 0 {
|
||||
val := (noise * noise) / float64(len(s))
|
||||
threshold := 0.6
|
||||
bad := val > threshold
|
||||
return bad
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func ngrams(s string, n int) []string {
|
||||
result := make([]string, 0)
|
||||
for i := 0; i < len(s)-n+1; i++ {
|
||||
result = append(result, s[i:i+n])
|
||||
}
|
||||
return result
|
||||
}
|
||||
@@ -1,212 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNegative(t *testing.T) {
|
||||
cases := []string{
|
||||
"",
|
||||
"{}",
|
||||
"0.0.29",
|
||||
"0.1",
|
||||
"1.0",
|
||||
"1.0.0",
|
||||
"2.1.73",
|
||||
"abTestV2",
|
||||
"actionText,setName,setAttribute,save,ignore,onEnd,getContext,end,get",
|
||||
"AddUserGroupLink",
|
||||
"advert-management.adBlockerMessage.html",
|
||||
"agents.author.1.json",
|
||||
"animated-gif",
|
||||
"b", // can be valid hexadecimal
|
||||
"big-danger-coronavirus-panic-greater-crisis",
|
||||
"breakout-box",
|
||||
"callback",
|
||||
"core.algorithm_execution.view",
|
||||
"core.devices.view",
|
||||
"data.json",
|
||||
"dialog.overlay.infinity.json",
|
||||
"domain-input",
|
||||
"embeddable", // lul, it's a valid HEX!
|
||||
"embeddable_blip",
|
||||
"E PLURIBUS UNUM",
|
||||
"etc",
|
||||
"eu-central-1a",
|
||||
"fcgi-bin",
|
||||
"footer.include.html",
|
||||
"fullHashes:find",
|
||||
"generate-feed",
|
||||
"GetAds",
|
||||
"GetCart",
|
||||
"GetUniversalVariableUser",
|
||||
"github-audit-exports",
|
||||
"g.js",
|
||||
"g.pixel",
|
||||
".html",
|
||||
"Hugo Michiels",
|
||||
"image.sbix",
|
||||
"index.html",
|
||||
"iPad",
|
||||
"Joanna Mazewski",
|
||||
"LibGit2Sharp",
|
||||
"Michael_Vaughan1.png",
|
||||
"New RSS feed has been generated",
|
||||
"nick-clegg",
|
||||
"opt-out",
|
||||
"pixel_details.html",
|
||||
"post.json",
|
||||
"profile-method-info",
|
||||
"project-id",
|
||||
"publisha.1.json",
|
||||
"publish_and_moderate",
|
||||
"Ronna McDaniel",
|
||||
"rtb-h",
|
||||
"runs",
|
||||
"sign-up",
|
||||
"some-uuid-maybe",
|
||||
"stable-4.0-version.json",
|
||||
"StartUpCheckout",
|
||||
"Steve Flunk",
|
||||
"sync_a9",
|
||||
"Ted Cruz",
|
||||
"test.png",
|
||||
"token",
|
||||
"ToList",
|
||||
"v2.1.3",
|
||||
"VersionCheck.php",
|
||||
"v Rusiji",
|
||||
"Walnut St",
|
||||
"web_widget",
|
||||
"zoom_in.cur",
|
||||
"xray",
|
||||
"web",
|
||||
"vipbets1",
|
||||
"trcc",
|
||||
"fbpixel",
|
||||
|
||||
// TODO below
|
||||
// "tcfv2",
|
||||
// "Matt-cartoon-255x206px-small.png",
|
||||
// "TheTelegraph_portal_white-320-small.png",
|
||||
// "testdata-10kB.js",
|
||||
}
|
||||
|
||||
for _, str := range cases {
|
||||
if IsGibberish(str) {
|
||||
t.Errorf("Mistakenly true: %s", str)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestPositive(t *testing.T) {
|
||||
cases := []string{
|
||||
"0a0d0174-b338-4520-a1c3-24f7e3d5ec50.html",
|
||||
"1024807212418223",
|
||||
"11ca096cbc224a67360493d44a9903",
|
||||
"1553183382779",
|
||||
"1554507871",
|
||||
"19180481",
|
||||
"203ef0f713abcebd8d62c35c0e3f12f87d71e5e4",
|
||||
"456795af-b48f-4a8d-9b37-3e932622c2f0",
|
||||
"601a2bdcc5b69137248ddbbf",
|
||||
"60fe9aaeaefe2400012df94f",
|
||||
"610bc3fd5a77a7fa25033fb0",
|
||||
"610bd0315a77a7fa25034368",
|
||||
"610bd0315a77a7fa25034368zh",
|
||||
"6120c057c7a97b03f6986f1b",
|
||||
"710a462e",
|
||||
"730970532670-compute@developer.gserviceaccount.com",
|
||||
"819db2242a648b305395537022523d65",
|
||||
"952bea17-3776-11ea-9341-42010a84012a",
|
||||
"a3226860758.html",
|
||||
"AAAA028295945",
|
||||
"arn-aws-ecs-eu-west-2-396248696294-cluster-london-01-ECSCluster-27iuIYva8nO4",
|
||||
"arn-aws-ecs-eu-west-2-396248696294-cluster-london-01-ECSCluster-27iuIYva8nO4", // ?
|
||||
"bnjksfd897345nl098asd53412kl98",
|
||||
"c738338322370b47a79251f7510dd", // prefixed hex
|
||||
"ci12NC01YzkyNTEzYzllMDRhLTAtYy5tb25pdG9yaW5nLmpzb24=", // long base64
|
||||
"css/login.0f48c49a34eb53ea4623.min.css",
|
||||
"d_fLLxlhzDilixeBEimaZ5",
|
||||
"e21f7112-3d3b-4632-9da3-a4af2e0e9166",
|
||||
"e8782afc112720300c049ff124434b79",
|
||||
"fb6cjraf9cejut2a",
|
||||
"i-0236530c66ed02200",
|
||||
"JEHJW4BKVFDRTMTUQLHKK5WVAU",
|
||||
"john.dow.1981@protonmail.com",
|
||||
"MDEyOk9yZ2FuaXphdGlvbjU3MzI0Nzk1",
|
||||
"MNUTGVFMGLEMFTBH0XSE5E02F6J2DS",
|
||||
"n63nd45qsj",
|
||||
"n9z9QGNiz",
|
||||
"NC4WTmcy",
|
||||
"proxy.3d2100fd7107262ecb55ce6847f01fa5.html",
|
||||
"QgAAAC6zw0qH2DJtnXe8Z7rUJP0FgAFKkOhcHdFWzL1ZYggtwBgiB3LSoele9o3ZqFh7iCBhHbVLAnMuJ0HF8hEw7UKecE6wd-MBXgeRMdubGydhAMZSmuUjRpqplML40bmrb8VjJKNZswD1Cg",
|
||||
"QgAAAC6zw0qH2DJtnXe8Z7rUJP0rG4sjLa_KVLlww5WEDJ__30J15en-K_6Y68jb_rU93e2TFY6fb0MYiQ1UrLNMQufqODHZUl39Lo6cXAOVOThjAMZSmuVH7n85JOYSCgzpvowMAVueGG0Xxg",
|
||||
"qwerqwerasdfqwer@protonmai.com",
|
||||
"r-ext-5579e00a95c90",
|
||||
"r-ext-5579e8b12f11e",
|
||||
"r-v4-5c92513c9e04a",
|
||||
"r-v4-5c92513c9e04a-0-c.monitoring.json",
|
||||
"segments-1563566437171.639994",
|
||||
"sp_ANQXRpqH_urn$3Auri$3Abase64$3A6698b0a3-97ad-52ce-8fc3-17d99e37a726",
|
||||
"sp_dxJTfx11_576742227280287872",
|
||||
"sp_NnUPB5wj_601a2bdcc5b69137248ddbbf",
|
||||
"sp_NxITuoE4_premiumchron-article-14302157_c_ryGQBs_r_yIWvwP",
|
||||
"t_52d94268-8810-4a7e-ba87-ffd657a6752f",
|
||||
"timeouts-1563566437171.639994",
|
||||
"u_YPF3GsGKMo02",
|
||||
|
||||
"0000000000 65535 f",
|
||||
"0000000178 00000 n",
|
||||
"0-10000",
|
||||
"01526123,",
|
||||
"0,18168,183955,3,4,1151616,5663,731,223,5104,207,3204,10,1051,175,364,1435,4,60,576,241,383,246,5,1102",
|
||||
"05/10/2020",
|
||||
"14336456724940333",
|
||||
"fb6cjraf9cejut2a",
|
||||
"JEHJW4BKVFDRTMTUQLHKK5WVAU",
|
||||
|
||||
// TODO
|
||||
/*
|
||||
"0,20",
|
||||
"0.001",
|
||||
"YISAtiX1",
|
||||
"Fxvd1timk", // questionable
|
||||
"B4GCSkORAJs",
|
||||
"D_4EDAqenHQ",
|
||||
"EICJp29EGOk",
|
||||
"Fxvd1timk",
|
||||
"GTqMZELYfQQ",
|
||||
"GZPTpLPEGmwHGWPC",
|
||||
"_HChnE9NDPY",
|
||||
"NwhjgIWHgGg",
|
||||
"production/tsbqksph4xswqjexfbec",
|
||||
"p/u/bguhrxupr23mw3nwxcrw",
|
||||
"nRSNapbJZnc",
|
||||
"zgfpbtolciznub5egzxk",
|
||||
"zufnu7aimadua9wrgwwo",
|
||||
"zznto1jzch9yjsbtbrul",
|
||||
*/
|
||||
}
|
||||
|
||||
for _, str := range cases {
|
||||
if !IsGibberish(str) {
|
||||
t.Errorf("Mistakenly false: %s", str)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestVersionStrings(t *testing.T) {
|
||||
cases := []string{
|
||||
"1.0",
|
||||
"1.0.0",
|
||||
"v2.1.3",
|
||||
"2.1.73",
|
||||
}
|
||||
|
||||
for _, str := range cases {
|
||||
if !IsVersionString(str) {
|
||||
t.Errorf("Mistakenly false: %s", str)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
package oas
|
||||
|
||||
import "strings"
|
||||
|
||||
var ignoredExtensions = []string{"gif", "svg", "css", "png", "ico", "js", "woff2", "woff", "jpg", "jpeg", "swf", "ttf", "map", "webp", "otf", "mp3"}
|
||||
|
||||
var ignoredCtypePrefixes = []string{"image/", "font/", "video/", "audio/", "text/javascript"}
|
||||
var ignoredCtypes = []string{"application/javascript", "application/x-javascript", "text/css", "application/font-woff2", "application/font-woff", "application/x-font-woff"}
|
||||
|
||||
var ignoredHeaders = []string{
|
||||
"a-im", "accept",
|
||||
"authorization", "cache-control", "connection", "content-encoding", "content-length", "content-type", "cookie",
|
||||
"date", "dnt", "expect", "forwarded", "from", "front-end-https", "host", "http2-settings",
|
||||
"max-forwards", "origin", "pragma", "proxy-authorization", "proxy-connection", "range", "referer",
|
||||
"save-data", "te", "trailer", "transfer-encoding", "upgrade", "upgrade-insecure-requests", "x-download-options",
|
||||
"server", "user-agent", "via", "warning", "strict-transport-security", "x-permitted-cross-domain-policies",
|
||||
"x-att-deviceid", "x-correlation-id", "correlation-id", "x-client-data", "x-dns-prefetch-control",
|
||||
"x-http-method-override", "x-real-ip", "x-request-id", "x-request-start", "x-requested-with", "x-uidh",
|
||||
"x-same-domain", "x-content-type-options", "x-frame-options", "x-xss-protection",
|
||||
"x-wap-profile", "x-scheme", "status", "x-cache", "x-application-context", "retry-after",
|
||||
"newrelic", "x-cloud-trace-context", "sentry-trace", "x-cache-hits", "x-served-by", "x-span-name",
|
||||
"expires", "set-cookie", "p3p", "content-security-policy", "content-security-policy-report-only",
|
||||
"last-modified", "content-language", "x-varnish", "true-client-ip", "akamai-origin-hop",
|
||||
"keep-alive", "etag", "alt-svc", "x-csrf-token", "x-ua-compatible", "vary", "x-powered-by",
|
||||
"age", "allow", "www-authenticate", "expect-ct", "timing-allow-origin", "referrer-policy",
|
||||
"x-aspnet-version", "x-aspnetmvc-version", "x-timer", "x-abuse-info", "x-mod-pagespeed",
|
||||
"duration_ms", // UP9 custom
|
||||
}
|
||||
|
||||
var ignoredHeaderPrefixes = []string{
|
||||
":", "accept-", "access-control-", "if-", "sec-", "grpc-",
|
||||
"x-forwarded-", "x-original-", "cf-",
|
||||
"x-up9-", "x-envoy-", "x-hasura-", "x-b3-", "x-datadog-", "x-envoy-", "x-amz-", "x-newrelic-", "x-prometheus-",
|
||||
"x-akamai-", "x-spotim-", "x-amzn-", "x-ratelimit-", "x-goog-",
|
||||
}
|
||||
|
||||
func isCtypeIgnored(ctype string) bool {
|
||||
for _, prefix := range ignoredCtypePrefixes {
|
||||
if strings.HasPrefix(ctype, prefix) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
for _, toIgnore := range ignoredCtypes {
|
||||
if ctype == toIgnore {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isExtIgnored(path string) bool {
|
||||
for _, extIgn := range ignoredExtensions {
|
||||
if strings.HasSuffix(path, "."+extIgn) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isHeaderIgnored(name string) bool {
|
||||
name = strings.ToLower(name)
|
||||
|
||||
for _, ignore := range ignoredHeaders {
|
||||
if name == ignore {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
for _, prefix := range ignoredHeaderPrefixes {
|
||||
if strings.HasPrefix(name, prefix) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
@@ -1,149 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"sync"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
var (
|
||||
syncOnce sync.Once
|
||||
instance *defaultOasGenerator
|
||||
)
|
||||
|
||||
type OasGeneratorSink interface {
|
||||
PushEntry(entryWithSource *EntryWithSource)
|
||||
}
|
||||
|
||||
type OasGenerator interface {
|
||||
Start()
|
||||
Stop()
|
||||
IsStarted() bool
|
||||
Reset()
|
||||
GetServiceSpecs() *sync.Map
|
||||
}
|
||||
|
||||
type defaultOasGenerator struct {
|
||||
started bool
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
serviceSpecs *sync.Map
|
||||
entriesChan chan EntryWithSource
|
||||
}
|
||||
|
||||
func GetDefaultOasGeneratorInstance() *defaultOasGenerator {
|
||||
syncOnce.Do(func() {
|
||||
instance = NewDefaultOasGenerator()
|
||||
logger.Log.Debug("OAS Generator Initialized")
|
||||
})
|
||||
return instance
|
||||
}
|
||||
|
||||
func (g *defaultOasGenerator) Start() {
|
||||
if g.started {
|
||||
return
|
||||
}
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
g.cancel = cancel
|
||||
g.ctx = ctx
|
||||
g.entriesChan = make(chan EntryWithSource, 100) // buffer up to 100 entries for OAS processing
|
||||
g.serviceSpecs = &sync.Map{}
|
||||
g.started = true
|
||||
go g.runGenerator()
|
||||
}
|
||||
|
||||
func (g *defaultOasGenerator) Stop() {
|
||||
if !g.started {
|
||||
return
|
||||
}
|
||||
g.cancel()
|
||||
g.Reset()
|
||||
g.started = false
|
||||
}
|
||||
|
||||
func (g *defaultOasGenerator) IsStarted() bool {
|
||||
return g.started
|
||||
}
|
||||
|
||||
func (g *defaultOasGenerator) runGenerator() {
|
||||
for {
|
||||
select {
|
||||
case <-g.ctx.Done():
|
||||
logger.Log.Infof("OAS Generator was canceled")
|
||||
return
|
||||
|
||||
case entryWithSource, ok := <-g.entriesChan:
|
||||
if !ok {
|
||||
logger.Log.Infof("OAS Generator - entries channel closed")
|
||||
break
|
||||
}
|
||||
entry := entryWithSource.Entry
|
||||
u, err := url.Parse(entry.Request.URL)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Failed to parse entry URL: %v, err: %v", entry.Request.URL, err)
|
||||
}
|
||||
|
||||
val, found := g.serviceSpecs.Load(entryWithSource.Destination)
|
||||
var gen *SpecGen
|
||||
if !found {
|
||||
gen = NewGen(u.Scheme + "://" + entryWithSource.Destination)
|
||||
g.serviceSpecs.Store(entryWithSource.Destination, gen)
|
||||
} else {
|
||||
gen = val.(*SpecGen)
|
||||
}
|
||||
|
||||
opId, err := gen.feedEntry(entryWithSource)
|
||||
if err != nil {
|
||||
txt, suberr := json.Marshal(entry)
|
||||
if suberr == nil {
|
||||
logger.Log.Debugf("Problematic entry: %s", txt)
|
||||
}
|
||||
|
||||
logger.Log.Warningf("Failed processing entry: %s", err)
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Log.Debugf("Handled entry %s as opId: %s", entry.Request.URL, opId) // TODO: set opId back to entry?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (g *defaultOasGenerator) Reset() {
|
||||
g.serviceSpecs = &sync.Map{}
|
||||
}
|
||||
|
||||
func (g *defaultOasGenerator) PushEntry(entryWithSource *EntryWithSource) {
|
||||
if !g.started {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case g.entriesChan <- *entryWithSource:
|
||||
default:
|
||||
logger.Log.Warningf("OAS Generator - entry wasn't sent to channel because the channel has no buffer or there is no receiver")
|
||||
}
|
||||
}
|
||||
|
||||
func (g *defaultOasGenerator) GetServiceSpecs() *sync.Map {
|
||||
return g.serviceSpecs
|
||||
}
|
||||
|
||||
func NewDefaultOasGenerator() *defaultOasGenerator {
|
||||
return &defaultOasGenerator{
|
||||
started: false,
|
||||
ctx: nil,
|
||||
cancel: nil,
|
||||
serviceSpecs: nil,
|
||||
entriesChan: nil,
|
||||
}
|
||||
}
|
||||
|
||||
type EntryWithSource struct {
|
||||
Source string
|
||||
Destination string
|
||||
Entry har.Entry
|
||||
Id uint
|
||||
}
|
||||
@@ -1,729 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/google/uuid"
|
||||
"github.com/nav-inc/datetime"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"mime/multipart"
|
||||
"net/textproto"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
|
||||
"time"
|
||||
)
|
||||
|
||||
const LastSeenTS = "x-last-seen-ts"
|
||||
const CountersTotal = "x-counters-total"
|
||||
const CountersPerSource = "x-counters-per-source"
|
||||
const SampleId = "x-sample-entry"
|
||||
|
||||
type reqResp struct { // hello, generics in Go
|
||||
Req *har.Request
|
||||
Resp *har.Response
|
||||
}
|
||||
|
||||
type SpecGen struct {
|
||||
oas *openapi.OpenAPI
|
||||
tree *Node
|
||||
lock sync.Mutex
|
||||
}
|
||||
|
||||
func NewGen(server string) *SpecGen {
|
||||
spec := new(openapi.OpenAPI)
|
||||
spec.Version = "3.1.0"
|
||||
|
||||
info := openapi.Info{Title: server}
|
||||
info.Version = "1.0"
|
||||
spec.Info = &info
|
||||
spec.Paths = &openapi.Paths{Items: map[openapi.PathValue]*openapi.PathObj{}}
|
||||
|
||||
spec.Servers = make([]*openapi.Server, 0)
|
||||
spec.Servers = append(spec.Servers, &openapi.Server{URL: server})
|
||||
|
||||
gen := SpecGen{oas: spec, tree: new(Node)}
|
||||
return &gen
|
||||
}
|
||||
|
||||
func (g *SpecGen) StartFromSpec(oas *openapi.OpenAPI) {
|
||||
g.oas = oas
|
||||
g.tree = new(Node)
|
||||
for pathStr, pathObj := range oas.Paths.Items {
|
||||
pathSplit := strings.Split(string(pathStr), "/")
|
||||
g.tree.getOrSet(pathSplit, pathObj)
|
||||
|
||||
// clean "last entry timestamp" markers from the past
|
||||
for _, pathAndOp := range g.tree.listOps() {
|
||||
delete(pathAndOp.op.Extensions, LastSeenTS)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (g *SpecGen) feedEntry(entryWithSource EntryWithSource) (string, error) {
|
||||
g.lock.Lock()
|
||||
defer g.lock.Unlock()
|
||||
|
||||
opId, err := g.handlePathObj(&entryWithSource)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// NOTE: opId can be empty for some failed entries
|
||||
return opId, err
|
||||
}
|
||||
|
||||
func (g *SpecGen) GetSpec() (*openapi.OpenAPI, error) {
|
||||
g.lock.Lock()
|
||||
defer g.lock.Unlock()
|
||||
|
||||
g.tree.compact()
|
||||
|
||||
counters := CounterMaps{counterTotal: Counter{}, counterMapTotal: CounterMap{}}
|
||||
|
||||
for _, pathAndOp := range g.tree.listOps() {
|
||||
opObj := pathAndOp.op
|
||||
if opObj.Summary == "" {
|
||||
opObj.Summary = pathAndOp.path
|
||||
}
|
||||
|
||||
err := counters.processOp(opObj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
err := counters.processOas(g.oas)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// put paths back from tree into OAS
|
||||
g.oas.Paths = g.tree.listPaths()
|
||||
|
||||
suggestTags(g.oas)
|
||||
|
||||
g.oas.Info.Description = setCounterMsgIfOk(g.oas.Info.Description, &counters.counterTotal)
|
||||
|
||||
// to make a deep copy, no better idea than marshal+unmarshal
|
||||
specText, err := json.MarshalIndent(g.oas, "", "\t")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
spec := new(openapi.OpenAPI)
|
||||
err = json.Unmarshal(specText, spec)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return spec, err
|
||||
}
|
||||
|
||||
func suggestTags(oas *openapi.OpenAPI) {
|
||||
paths := getPathsKeys(oas.Paths.Items)
|
||||
sort.Strings(paths) // make it stable in case of multiple candidates
|
||||
for len(paths) > 0 {
|
||||
group := make([]string, 0)
|
||||
group = append(group, paths[0])
|
||||
paths = paths[1:]
|
||||
|
||||
pathsClone := append(paths[:0:0], paths...)
|
||||
for _, path := range pathsClone {
|
||||
if getSimilarPrefix([]string{group[0], path}) != "" {
|
||||
group = append(group, path)
|
||||
paths = deleteFromSlice(paths, path)
|
||||
}
|
||||
}
|
||||
|
||||
common := getSimilarPrefix(group)
|
||||
|
||||
if len(group) > 1 {
|
||||
for _, path := range group {
|
||||
pathObj := oas.Paths.Items[openapi.PathValue(path)]
|
||||
for _, op := range getOps(pathObj) {
|
||||
if op.Tags == nil {
|
||||
op.Tags = make([]string, 0)
|
||||
}
|
||||
// only add tags if not present
|
||||
if len(op.Tags) == 0 {
|
||||
op.Tags = append(op.Tags, common)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getPathsKeys(mymap map[openapi.PathValue]*openapi.PathObj) []string {
|
||||
keys := make([]string, len(mymap))
|
||||
|
||||
i := 0
|
||||
for k := range mymap {
|
||||
keys[i] = string(k)
|
||||
i++
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func (g *SpecGen) handlePathObj(entryWithSource *EntryWithSource) (string, error) {
|
||||
entry := entryWithSource.Entry
|
||||
urlParsed, err := url.Parse(entry.Request.URL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if isExtIgnored(urlParsed.Path) {
|
||||
logger.Log.Debugf("Dropped traffic entry due to ignored extension: %s", urlParsed.Path)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if entry.Request.Method == "OPTIONS" {
|
||||
logger.Log.Debugf("Dropped traffic entry due to its method: %s %s", entry.Request.Method, urlParsed.Path)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
ctype := getRespCtype(&entry.Response)
|
||||
if isCtypeIgnored(ctype) {
|
||||
logger.Log.Debugf("Dropped traffic entry due to ignored response ctype: %s", ctype)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if entry.Response.Status < 100 {
|
||||
logger.Log.Debugf("Dropped traffic entry due to status<100: %s", entry.StartedDateTime)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if entry.Response.Status == 301 || entry.Response.Status == 308 {
|
||||
logger.Log.Debugf("Dropped traffic entry due to permanent redirect status: %s", entry.StartedDateTime)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if entry.Response.Status == 502 || entry.Response.Status == 503 || entry.Response.Status == 504 {
|
||||
logger.Log.Debugf("Dropped traffic entry due to temporary server error: %s", entry.StartedDateTime)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
var split []string
|
||||
if urlParsed.RawPath != "" {
|
||||
split = strings.Split(urlParsed.RawPath, "/")
|
||||
} else {
|
||||
split = strings.Split(urlParsed.Path, "/")
|
||||
}
|
||||
node := g.tree.getOrSet(split, new(openapi.PathObj))
|
||||
opObj, err := handleOpObj(entryWithSource, node.pathObj)
|
||||
|
||||
if opObj != nil {
|
||||
return opObj.OperationID, err
|
||||
}
|
||||
|
||||
return "", err
|
||||
}
|
||||
|
||||
func handleOpObj(entryWithSource *EntryWithSource, pathObj *openapi.PathObj) (*openapi.Operation, error) {
|
||||
entry := entryWithSource.Entry
|
||||
isSuccess := 100 <= entry.Response.Status && entry.Response.Status < 400
|
||||
opObj, wasMissing, err := getOpObj(pathObj, entry.Request.Method, isSuccess)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !isSuccess && wasMissing {
|
||||
logger.Log.Debugf("Dropped traffic entry due to failed status and no known endpoint at: %s", entry.StartedDateTime)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
err = handleRequest(&entry.Request, opObj, isSuccess)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = handleResponse(&entry.Response, opObj, isSuccess)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = handleCounters(opObj, isSuccess, entryWithSource)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return opObj, nil
|
||||
}
|
||||
|
||||
func handleCounters(opObj *openapi.Operation, success bool, entryWithSource *EntryWithSource) error {
|
||||
// TODO: if performance around DecodeExtension+SetExtension is bad, store counters as separate maps
|
||||
counter := Counter{}
|
||||
counterMap := CounterMap{}
|
||||
prevTs := 0.0
|
||||
if opObj.Extensions == nil {
|
||||
opObj.Extensions = openapi.Extensions{}
|
||||
} else {
|
||||
if _, ok := opObj.Extensions.Extension(CountersTotal); ok {
|
||||
err := opObj.Extensions.DecodeExtension(CountersTotal, &counter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := opObj.Extensions.Extension(CountersPerSource); ok {
|
||||
err := opObj.Extensions.DecodeExtension(CountersPerSource, &counterMap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := opObj.Extensions.Extension(LastSeenTS); ok {
|
||||
err := opObj.Extensions.DecodeExtension(LastSeenTS, &prevTs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var counterPerSource *Counter
|
||||
if existing, ok := counterMap[entryWithSource.Source]; ok {
|
||||
counterPerSource = existing
|
||||
} else {
|
||||
counterPerSource = new(Counter)
|
||||
counterMap[entryWithSource.Source] = counterPerSource
|
||||
}
|
||||
|
||||
started, err := datetime.Parse(entryWithSource.Entry.StartedDateTime, time.UTC)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ts := float64(started.UnixNano()) / float64(time.Millisecond) / 1000
|
||||
rt := float64(entryWithSource.Entry.Time) / 1000
|
||||
|
||||
dur := 0.0
|
||||
if prevTs != 0 && ts >= prevTs {
|
||||
dur = ts - prevTs
|
||||
}
|
||||
|
||||
counter.addEntry(ts, rt, success, dur)
|
||||
counterPerSource.addEntry(ts, rt, success, dur)
|
||||
|
||||
err = opObj.Extensions.SetExtension(LastSeenTS, ts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = opObj.Extensions.SetExtension(CountersTotal, counter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = opObj.Extensions.SetExtension(CountersPerSource, counterMap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = opObj.Extensions.SetExtension(SampleId, entryWithSource.Id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool) error {
|
||||
// TODO: we don't handle the situation when header/qstr param can be defined on pathObj level. Also the path param defined on opObj
|
||||
urlParsed, err := url.Parse(req.URL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
qs := make([]har.NVP, 0)
|
||||
for name, vals := range urlParsed.Query() {
|
||||
for _, val := range vals {
|
||||
qs = append(qs, har.NVP{Name: name, Value: val})
|
||||
}
|
||||
}
|
||||
|
||||
if len(qs) != len(req.QueryString) {
|
||||
logger.Log.Warningf("QStr params in HAR do not match URL: %s", req.URL)
|
||||
}
|
||||
|
||||
qstrGW := nvParams{
|
||||
In: openapi.InQuery,
|
||||
Pairs: qs,
|
||||
IsIgnored: func(name string) bool { return false },
|
||||
GeneralizeName: func(name string) string { return name },
|
||||
}
|
||||
handleNameVals(qstrGW, &opObj.Parameters, false)
|
||||
|
||||
hdrGW := nvParams{
|
||||
In: openapi.InHeader,
|
||||
Pairs: req.Headers,
|
||||
IsIgnored: isHeaderIgnored,
|
||||
GeneralizeName: strings.ToLower,
|
||||
}
|
||||
handleNameVals(hdrGW, &opObj.Parameters, true)
|
||||
|
||||
if isSuccess {
|
||||
reqBody, err := getRequestBody(req, opObj)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if reqBody != nil {
|
||||
if req.PostData.Text == "" {
|
||||
reqBody.Required = false
|
||||
} else {
|
||||
|
||||
reqCtype, _ := getReqCtype(req)
|
||||
reqMedia, err := fillContent(reqResp{Req: req}, reqBody.Content, reqCtype)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_ = reqMedia
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleResponse(resp *har.Response, opObj *openapi.Operation, isSuccess bool) error {
|
||||
// TODO: we don't support "default" response
|
||||
respObj, err := getResponseObj(resp, opObj, isSuccess)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
handleRespHeaders(resp.Headers, respObj)
|
||||
|
||||
respCtype := getRespCtype(resp)
|
||||
respContent := respObj.Content
|
||||
respMedia, err := fillContent(reqResp{Resp: resp}, respContent, respCtype)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = respMedia
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleRespHeaders(reqHeaders []har.Header, respObj *openapi.ResponseObj) {
|
||||
visited := map[string]*openapi.HeaderObj{}
|
||||
for _, pair := range reqHeaders {
|
||||
if isHeaderIgnored(pair.Name) {
|
||||
continue
|
||||
}
|
||||
|
||||
nameGeneral := strings.ToLower(pair.Name)
|
||||
|
||||
initHeaders(respObj)
|
||||
objHeaders := respObj.Headers
|
||||
param := findHeaderByName(&respObj.Headers, pair.Name)
|
||||
if param == nil {
|
||||
param = createHeader(openapi.TypeString)
|
||||
objHeaders[nameGeneral] = param
|
||||
}
|
||||
exmp := ¶m.Examples
|
||||
err := fillParamExample(&exmp, pair.Value)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to add example to a parameter: %s", err)
|
||||
}
|
||||
visited[nameGeneral] = param
|
||||
}
|
||||
|
||||
// maintain "required" flag
|
||||
if respObj.Headers != nil {
|
||||
for name, param := range respObj.Headers {
|
||||
paramObj, err := param.ResolveHeader(headerResolver)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to resolve param: %s", err)
|
||||
continue
|
||||
}
|
||||
|
||||
_, ok := visited[strings.ToLower(name)]
|
||||
if !ok {
|
||||
flag := false
|
||||
paramObj.Required = &flag
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func fillContent(reqResp reqResp, respContent openapi.Content, ctype string) (*openapi.MediaType, error) {
|
||||
content, found := respContent[ctype]
|
||||
if !found {
|
||||
respContent[ctype] = &openapi.MediaType{}
|
||||
content = respContent[ctype]
|
||||
}
|
||||
|
||||
var text string
|
||||
var isBinary bool
|
||||
if reqResp.Req != nil {
|
||||
isBinary, _, text = reqResp.Req.PostData.B64Decoded()
|
||||
} else {
|
||||
isBinary, _, text = reqResp.Resp.Content.B64Decoded()
|
||||
}
|
||||
|
||||
if !isBinary && text != "" {
|
||||
var exampleMsg []byte
|
||||
// try treating it as json
|
||||
any, isJSON := anyJSON(text)
|
||||
if isJSON {
|
||||
// re-marshal with forced indent
|
||||
if msg, err := json.MarshalIndent(any, "", "\t"); err != nil {
|
||||
panic("Failed to re-marshal value, super-strange")
|
||||
} else {
|
||||
exampleMsg = msg
|
||||
}
|
||||
} else {
|
||||
if msg, err := json.Marshal(text); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
exampleMsg = msg
|
||||
}
|
||||
}
|
||||
|
||||
if ctype == "application/x-www-form-urlencoded" && reqResp.Req != nil {
|
||||
handleFormDataUrlencoded(text, content)
|
||||
} else if strings.HasPrefix(ctype, "multipart/form-data") && reqResp.Req != nil {
|
||||
_, params := getReqCtype(reqResp.Req)
|
||||
handleFormDataMultipart(text, content, params)
|
||||
}
|
||||
|
||||
if content.Example == nil && len(exampleMsg) > len(content.Example) {
|
||||
content.Example = exampleMsg
|
||||
}
|
||||
}
|
||||
|
||||
return respContent[ctype], nil
|
||||
}
|
||||
|
||||
func handleFormDataUrlencoded(text string, content *openapi.MediaType) {
|
||||
formData, err := url.ParseQuery(text)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Could not decode urlencoded: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
parts := make([]PartWithBody, 0)
|
||||
for name, vals := range formData {
|
||||
for _, val := range vals {
|
||||
part := new(multipart.Part)
|
||||
part.Header = textproto.MIMEHeader{}
|
||||
part.Header.Add("Content-Disposition", "form-data; name=\""+name+"\";")
|
||||
parts = append(parts, PartWithBody{part: part, body: []byte(val)})
|
||||
}
|
||||
}
|
||||
handleFormData(content, parts)
|
||||
}
|
||||
|
||||
func handleFormData(content *openapi.MediaType, parts []PartWithBody) {
|
||||
hadSchema := true
|
||||
if content.Schema == nil {
|
||||
hadSchema = false // will use it for required flags
|
||||
content.Schema = new(openapi.SchemaObj)
|
||||
content.Schema.Type = openapi.Types{openapi.TypeObject}
|
||||
content.Schema.Properties = openapi.Schemas{}
|
||||
}
|
||||
|
||||
props := &content.Schema.Properties
|
||||
seenNames := map[string]struct{}{} // set equivalent in Go, yikes
|
||||
for _, pwb := range parts {
|
||||
name := pwb.part.FormName()
|
||||
seenNames[name] = struct{}{}
|
||||
existing, found := (*props)[name]
|
||||
if !found {
|
||||
existing = new(openapi.SchemaObj)
|
||||
existing.Type = openapi.Types{openapi.TypeString}
|
||||
(*props)[name] = existing
|
||||
|
||||
ctype := pwb.part.Header.Get("content-type")
|
||||
if ctype != "" {
|
||||
if existing.Keywords == nil {
|
||||
existing.Keywords = map[string]json.RawMessage{}
|
||||
}
|
||||
existing.Keywords["contentMediaType"], _ = json.Marshal(ctype)
|
||||
}
|
||||
}
|
||||
|
||||
addSchemaExample(existing, string(pwb.body))
|
||||
}
|
||||
|
||||
// handle required flag
|
||||
if content.Schema.Required == nil {
|
||||
if !hadSchema {
|
||||
content.Schema.Required = make([]string, 0)
|
||||
for name := range seenNames {
|
||||
content.Schema.Required = append(content.Schema.Required, name)
|
||||
}
|
||||
sort.Strings(content.Schema.Required)
|
||||
} // else it's a known schema with no required fields
|
||||
} else {
|
||||
content.Schema.Required = intersectSliceWithMap(content.Schema.Required, seenNames)
|
||||
sort.Strings(content.Schema.Required)
|
||||
}
|
||||
}
|
||||
|
||||
type PartWithBody struct {
|
||||
part *multipart.Part
|
||||
body []byte
|
||||
}
|
||||
|
||||
func handleFormDataMultipart(text string, content *openapi.MediaType, ctypeParams map[string]string) {
|
||||
boundary, ok := ctypeParams["boundary"]
|
||||
if !ok {
|
||||
logger.Log.Errorf("Multipart header has no boundary")
|
||||
return
|
||||
}
|
||||
mpr := multipart.NewReader(strings.NewReader(text), boundary)
|
||||
|
||||
parts := make([]PartWithBody, 0)
|
||||
for {
|
||||
part, err := mpr.NextPart()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Cannot parse multipart body: %v", err)
|
||||
break
|
||||
}
|
||||
defer part.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(part)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Error reading multipart Part %s: %v", part.Header, err)
|
||||
}
|
||||
|
||||
parts = append(parts, PartWithBody{part: part, body: body})
|
||||
}
|
||||
|
||||
handleFormData(content, parts)
|
||||
}
|
||||
|
||||
func getRespCtype(resp *har.Response) string {
|
||||
var ctype string
|
||||
ctype = resp.Content.MimeType
|
||||
for _, hdr := range resp.Headers {
|
||||
if strings.ToLower(hdr.Name) == "content-type" {
|
||||
ctype = hdr.Value
|
||||
}
|
||||
}
|
||||
|
||||
mediaType, _, err := mime.ParseMediaType(ctype)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
return mediaType
|
||||
}
|
||||
|
||||
func getReqCtype(req *har.Request) (ctype string, params map[string]string) {
|
||||
ctype = req.PostData.MimeType
|
||||
for _, hdr := range req.Headers {
|
||||
if strings.ToLower(hdr.Name) == "content-type" {
|
||||
ctype = hdr.Value
|
||||
}
|
||||
}
|
||||
|
||||
if ctype == "" {
|
||||
return "", map[string]string{}
|
||||
}
|
||||
|
||||
mediaType, params, err := mime.ParseMediaType(ctype)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("Cannot parse Content-Type header %q: %v", ctype, err)
|
||||
return "", map[string]string{}
|
||||
}
|
||||
return mediaType, params
|
||||
}
|
||||
|
||||
func getResponseObj(resp *har.Response, opObj *openapi.Operation, isSuccess bool) (*openapi.ResponseObj, error) {
|
||||
statusStr := strconv.Itoa(resp.Status)
|
||||
|
||||
var response openapi.Response
|
||||
response, found := opObj.Responses[statusStr]
|
||||
if !found {
|
||||
if opObj.Responses == nil {
|
||||
opObj.Responses = map[string]openapi.Response{}
|
||||
}
|
||||
|
||||
opObj.Responses[statusStr] = &openapi.ResponseObj{Content: map[string]*openapi.MediaType{}}
|
||||
response = opObj.Responses[statusStr]
|
||||
}
|
||||
|
||||
resResponse, err := response.ResolveResponse(responseResolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if isSuccess {
|
||||
resResponse.Description = "Successful call with status " + statusStr
|
||||
} else {
|
||||
resResponse.Description = "Failed call with status " + statusStr
|
||||
}
|
||||
return resResponse, nil
|
||||
}
|
||||
|
||||
func getRequestBody(req *har.Request, opObj *openapi.Operation) (*openapi.RequestBodyObj, error) {
|
||||
if opObj.RequestBody == nil {
|
||||
// create if there is body in request
|
||||
if req.PostData.Text != "" {
|
||||
opObj.RequestBody = &openapi.RequestBodyObj{Description: "Generic request body", Required: true, Content: map[string]*openapi.MediaType{}}
|
||||
} else {
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
reqBody, err := opObj.RequestBody.ResolveRequestBody(reqBodyResolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return reqBody, nil
|
||||
}
|
||||
|
||||
func getOpObj(pathObj *openapi.PathObj, method string, createIfNone bool) (*openapi.Operation, bool, error) {
|
||||
method = strings.ToLower(method)
|
||||
var op **openapi.Operation
|
||||
|
||||
switch method {
|
||||
case "get":
|
||||
op = &pathObj.Get
|
||||
case "put":
|
||||
op = &pathObj.Put
|
||||
case "post":
|
||||
op = &pathObj.Post
|
||||
case "delete":
|
||||
op = &pathObj.Delete
|
||||
case "options":
|
||||
op = &pathObj.Options
|
||||
case "head":
|
||||
op = &pathObj.Head
|
||||
case "patch":
|
||||
op = &pathObj.Patch
|
||||
case "trace":
|
||||
op = &pathObj.Trace
|
||||
default:
|
||||
return nil, false, errors.New("unsupported HTTP method: " + method)
|
||||
}
|
||||
|
||||
isMissing := false
|
||||
if *op == nil {
|
||||
isMissing = true
|
||||
if createIfNone {
|
||||
*op = &openapi.Operation{Responses: map[string]openapi.Response{}}
|
||||
newUUID := uuid.New().String()
|
||||
(**op).OperationID = newUUID
|
||||
} else {
|
||||
return nil, isMissing, nil
|
||||
}
|
||||
}
|
||||
|
||||
return *op, isMissing, nil
|
||||
}
|
||||
@@ -1,277 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/op/go-logging"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"github.com/wI2L/jsondiff"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
)
|
||||
|
||||
// if started via env, write file into subdir
|
||||
func outputSpec(label string, spec *openapi.OpenAPI, t *testing.T) string {
|
||||
content, err := json.MarshalIndent(spec, "", "\t")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if os.Getenv("MIZU_OAS_WRITE_FILES") != "" {
|
||||
path := "./oas-samples"
|
||||
err := os.MkdirAll(path, 0o755)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
err = ioutil.WriteFile(path+"/"+label+".json", content, 0644)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
t.Logf("Written: %s", label)
|
||||
} else {
|
||||
t.Logf("%s", string(content))
|
||||
}
|
||||
return string(content)
|
||||
}
|
||||
|
||||
func TestEntries(t *testing.T) {
|
||||
logger.InitLoggerStd(logging.INFO)
|
||||
files, err := getFiles("./test_artifacts/")
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
GetDefaultOasGeneratorInstance().Start()
|
||||
loadStartingOAS("test_artifacts/catalogue.json", "catalogue")
|
||||
loadStartingOAS("test_artifacts/trcc.json", "trcc-api-service")
|
||||
|
||||
go func() {
|
||||
for {
|
||||
time.Sleep(1 * time.Second)
|
||||
GetDefaultOasGeneratorInstance().GetServiceSpecs().Range(func(key, val interface{}) bool {
|
||||
svc := key.(string)
|
||||
t.Logf("Getting spec for %s", svc)
|
||||
gen := val.(*SpecGen)
|
||||
_, err := gen.GetSpec()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
}()
|
||||
|
||||
cnt, err := feedEntries(files, true)
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
waitQueueProcessed()
|
||||
|
||||
svcs := strings.Builder{}
|
||||
GetDefaultOasGeneratorInstance().GetServiceSpecs().Range(func(key, val interface{}) bool {
|
||||
gen := val.(*SpecGen)
|
||||
svc := key.(string)
|
||||
svcs.WriteString(svc + ",")
|
||||
spec, err := gen.GetSpec()
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
return false
|
||||
}
|
||||
|
||||
err = spec.Validate()
|
||||
if err != nil {
|
||||
specText, _ := json.MarshalIndent(spec, "", "\t")
|
||||
t.Log(string(specText))
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
GetDefaultOasGeneratorInstance().GetServiceSpecs().Range(func(key, val interface{}) bool {
|
||||
svc := key.(string)
|
||||
gen := val.(*SpecGen)
|
||||
spec, err := gen.GetSpec()
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
outputSpec(svc, spec, t)
|
||||
|
||||
err = spec.Validate()
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
logger.Log.Infof("Total entries: %d", cnt)
|
||||
}
|
||||
|
||||
func TestFileSingle(t *testing.T) {
|
||||
GetDefaultOasGeneratorInstance().Start()
|
||||
GetDefaultOasGeneratorInstance().Reset()
|
||||
// loadStartingOAS()
|
||||
file := "test_artifacts/params.har"
|
||||
files := []string{file}
|
||||
cnt, err := feedEntries(files, true)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
waitQueueProcessed()
|
||||
|
||||
GetDefaultOasGeneratorInstance().GetServiceSpecs().Range(func(key, val interface{}) bool {
|
||||
svc := key.(string)
|
||||
gen := val.(*SpecGen)
|
||||
spec, err := gen.GetSpec()
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
specText := outputSpec(svc, spec, t)
|
||||
|
||||
err = spec.Validate()
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
expected, err := ioutil.ReadFile(file + ".spec.json")
|
||||
if err != nil {
|
||||
t.Errorf(err.Error())
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
patFloatPrecision := regexp.MustCompile(`(\d+\.\d{1,2})(\d*)`)
|
||||
|
||||
expected = []byte(patUuid4.ReplaceAllString(string(expected), "<UUID4>"))
|
||||
specText = patUuid4.ReplaceAllString(specText, "<UUID4>")
|
||||
expected = []byte(patFloatPrecision.ReplaceAllString(string(expected), "$1"))
|
||||
specText = patFloatPrecision.ReplaceAllString(specText, "$1")
|
||||
|
||||
diff, err := jsondiff.CompareJSON(expected, []byte(specText))
|
||||
if err != nil {
|
||||
t.Errorf(err.Error())
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
if os.Getenv("MIZU_OAS_WRITE_FILES") != "" {
|
||||
err = ioutil.WriteFile(file+".spec.json", []byte(specText), 0644)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(diff) > 0 {
|
||||
t.Errorf("Generated spec does not match expected:\n%s", diff.String())
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
logger.Log.Infof("Processed entries: %d", cnt)
|
||||
}
|
||||
|
||||
func waitQueueProcessed() {
|
||||
for {
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
queue := len(GetDefaultOasGeneratorInstance().entriesChan)
|
||||
logger.Log.Infof("Queue: %d", queue)
|
||||
if queue < 1 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func loadStartingOAS(file string, label string) {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
defer fd.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(fd)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
var doc *openapi.OpenAPI
|
||||
err = json.Unmarshal(data, &doc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
gen := NewGen(label)
|
||||
gen.StartFromSpec(doc)
|
||||
|
||||
GetDefaultOasGeneratorInstance().GetServiceSpecs().Store(label, gen)
|
||||
}
|
||||
|
||||
func TestEntriesNegative(t *testing.T) {
|
||||
files := []string{"invalid"}
|
||||
_, err := feedEntries(files, false)
|
||||
if err == nil {
|
||||
t.Logf("Should have failed")
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestEntriesPositive(t *testing.T) {
|
||||
files := []string{"test_artifacts/params.har"}
|
||||
_, err := feedEntries(files, false)
|
||||
if err != nil {
|
||||
t.Logf("Failed")
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadValidHAR(t *testing.T) {
|
||||
inp := `{"startedDateTime": "2021-02-03T07:48:12.959000+00:00", "time": 1, "request": {"method": "GET", "url": "http://unresolved_target/1.0.0/health", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [], "queryString": [], "headersSize": -1, "bodySize": -1}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [], "content": {"size": 2, "mimeType": "", "text": "OK"}, "redirectURL": "", "headersSize": -1, "bodySize": 2}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 1}}`
|
||||
var entry *har.Entry
|
||||
var err = json.Unmarshal([]byte(inp), &entry)
|
||||
if err != nil {
|
||||
t.Logf("Failed to decode entry: %s", err)
|
||||
t.FailNow() // demonstrates the problem of `martian` HAR library
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadValid3_1(t *testing.T) {
|
||||
fd, err := os.Open("test_artifacts/catalogue.json")
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
defer fd.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(fd)
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
var oas openapi.OpenAPI
|
||||
err = json.Unmarshal(data, &oas)
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
{
|
||||
"openapi": "3.1.0",
|
||||
"info": {
|
||||
"title": "Preloaded",
|
||||
"version": "0.1",
|
||||
"description": "Test file for loading pre-existing OAS"
|
||||
},
|
||||
"paths": {
|
||||
"/catalogue/{id}": {
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"example": "some-uuid-maybe"
|
||||
}
|
||||
],
|
||||
"get": {
|
||||
"parameters": [ {
|
||||
"name": "non-required-header",
|
||||
"in": "header",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"example": "some-uuid-maybe"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/catalogue/{id}/details": {
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"style": "simple",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"example": "some-uuid-maybe"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -1,13 +0,0 @@
|
||||
{"messageType": "http", "_source": "some-source", ",firstMessageTime": 1627298057.784151, "lastMessageTime": 1627298065.729303, "messageCount": 12}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:17.78415179Z", "time": 13, "request": {"method": "GET", "url": "http://catalogue/catalogue/size?tags=", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "x-some", "value": "demo val"},{"name": "Host", "value": "catalogue"}, {"name": "Connection", "value": "close"}], "queryString": [{"name": "tags", "value": ""}], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:17 GMT"}, {"name": "Content-Length", "value": "22"}], "content": {"size": 22, "mimeType": "application/json; charset=utf-8", "text": "eyJlcnIiOm51bGwsInNpemUiOjl9", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 22}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 13}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:17.784918698Z", "time": 19, "request": {"method": "GET", "url": "http://catalogue/catalogue?page=1&size=6&tags=", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "catalogue"}], "queryString": [{"name": "page", "value": "1"}, {"name": "size", "value": "6"}, {"name": "tags", "value": ""}], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:17 GMT"}, {"name": "Content-Length", "value": "1927"}], "content": {"size": 1927, "mimeType": "application/json; charset=utf-8", "text": "W3siaWQiOiIwM2ZlZjZhYy0xODk2LTRjZTgtYmQ2OS1iNzk4Zjg1YzZlMGIiLCJuYW1lIjoiSG9seSIsImRlc2NyaXB0aW9uIjoiU29ja3MgZml0IGZvciBhIE1lc3NpYWguIFlvdSB0b28gY2FuIGV4cGVyaWVuY2Ugd2Fsa2luZyBpbiB3YXRlciB3aXRoIHRoZXNlIHNwZWNpYWwgZWRpdGlvbiBiZWF1dGllcy4gRWFjaCBob2xlIGlzIGxvdmluZ2x5IHByb2dnbGVkIHRvIGxlYXZlIHNtb290aCBlZGdlcy4gVGhlIG9ubHkgc29jayBhcHByb3ZlZCBieSBhIGhpZ2hlciBwb3dlci4iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9ob2x5XzEuanBlZyIsIi9jYXRhbG9ndWUvaW1hZ2VzL2hvbHlfMi5qcGVnIl0sInByaWNlIjo5OS45OSwiY291bnQiOjEsInRhZyI6WyJhY3Rpb24iLCJtYWdpYyJdfSx7ImlkIjoiMzM5NWE0M2UtMmQ4OC00MGRlLWI5NWYtZTAwZTE1MDIwODViIiwibmFtZSI6IkNvbG91cmZ1bCIsImRlc2NyaXB0aW9uIjoicHJvaWRlbnQgb2NjYWVjYXQgaXJ1cmUgZXQgZXhjZXB0ZXVyIGxhYm9yZSBtaW5pbSBuaXNpIGFtZXQgaXJ1cmUiLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jb2xvdXJmdWxfc29ja3MuanBnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvY29sb3VyZnVsX3NvY2tzLmpwZyJdLCJwcmljZSI6MTgsImNvdW50Ijo0MzgsInRhZyI6WyJicm93biIsImJsdWUiXX0seyJpZCI6IjUxMGEwZDdlLThlODMtNDE5My1iNDgzLWUyN2UwOWRkYzM0ZCIsIm5hbWUiOiJTdXBlclNwb3J0IFhMIiwiZGVzY3JpcHRpb24iOiJSZWFkeSBmb3IgYWN0aW9uLiBFbmdpbmVlcnM6IGJlIHJlYWR5IHRvIHNtYXNoIHRoYXQgbmV4dCBidWchIEJlIHJlYWR5LCB3aXRoIHRoZXNlIHN1cGVyLWFjdGlvbi1zcG9ydC1tYXN0ZXJwaWVjZXMuIFRoaXMgcGFydGljdWxhciBlbmdpbmVlciB3YXMgY2hhc2VkIGF3YXkgZnJvbSB0aGUgb2ZmaWNlIHdpdGggYSBzdGljay4iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9wdW1hXzEuanBlZyIsIi9jYXRhbG9ndWUvaW1hZ2VzL3B1bWFfMi5qcGVnIl0sInByaWNlIjoxNSwiY291bnQiOjgyMCwidGFnIjpbInNwb3J0IiwiZm9ybWFsIiwiYmxhY2siXX0seyJpZCI6IjgwOGEyZGUxLTFhYWEtNGMyNS1hOWI5LTY2MTJlOGYyOWEzOCIsIm5hbWUiOiJDcm9zc2VkIiwiZGVzY3JpcHRpb24iOiJBIG1hdHVyZSBzb2NrLCBjcm9zc2VkLCB3aXRoIGFuIGFpciBvZiBub25jaGFsYW5jZS4iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jcm9zc18xLmpwZWciLCIvY2F0YWxvZ3VlL2ltYWdlcy9jcm9zc18yLmpwZWciXSwicHJpY2UiOjE3LjMyLCJjb3VudCI6NzM4LCJ0YWciOlsiYmx1ZSIsImFjdGlvbiIsInJlZCIsImZvcm1hbCJdfSx7ImlkIjoiODE5ZTFmYmYtOGI3ZS00ZjZkLTgxMWYtNjkzNTM0OTE2YThiIiwibmFtZSI6IkZpZ3Vlcm9hIiwiZGVzY3JpcHRpb24iOiJlbmltIG9mZmljaWEgYWxpcXVhIGV4Y2VwdGV1ciBlc3NlIGRlc2VydW50IHF1aXMgYWxpcXVpcCBub3N0cnVkIGFuaW0iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9XQVQuanBnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvV0FUMi5qcGciXSwicHJpY2UiOjE0LCJjb3VudCI6ODA4LCJ0YWciOlsiZ3JlZW4iLCJmb3JtYWwiLCJibHVlIl19LHsiaWQiOiI4MzdhYjE0MS0zOTllLTRjMWYtOWFiYy1iYWNlNDAyOTZiYWMiLCJuYW1lIjoiQ2F0IHNvY2tzIiwiZGVzY3JpcHRpb24iOiJjb25zZXF1YXQgYW1ldCBjdXBpZGF0YXQgbWluaW0gbGFib3J1bSB0ZW1wb3IgZWxpdCBleCBjb25zZXF1YXQgaW4iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jYXRzb2Nrcy5qcGciLCIvY2F0YWxvZ3VlL2ltYWdlcy9jYXRzb2NrczIuanBnIl0sInByaWNlIjoxNSwiY291bnQiOjE3NSwidGFnIjpbImJyb3duIiwiZm9ybWFsIiwiZ3JlZW4iXX1dCg==", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 1927}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 19}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:17.78418182Z", "time": 7, "request": {"method": "GET", "url": "http://catalogue/tags", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "catalogue"}], "queryString": [], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:17 GMT"}, {"name": "Content-Length", "value": "107"}], "content": {"size": 107, "mimeType": "application/json; charset=utf-8", "text": "eyJlcnIiOm51bGwsInRhZ3MiOlsiYnJvd24iLCJnZWVrIiwiZm9ybWFsIiwiYmx1ZSIsInNraW4iLCJyZWQiLCJhY3Rpb24iLCJzcG9ydCIsImJsYWNrIiwibWFnaWMiLCJncmVlbiJdfQ==", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 107}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 7}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:18.131501482Z", "time": 5, "request": {"method": "GET", "url": "http://catalogue/catalogue/3395a43e-2d88-40de-b95f-e00e1502085b", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "catalogue"}, {"name": "x-some", "value": "demoval"}], ",queryString": [], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:17 GMT"}, {"name": "Content-Length", "value": "286"}], "content": {"size": 286, "mimeType": "application/json; charset=utf-8", "text": "eyJjb3VudCI6NDM4LCJkZXNjcmlwdGlvbiI6InByb2lkZW50IG9jY2FlY2F0IGlydXJlIGV0IGV4Y2VwdGV1ciBsYWJvcmUgbWluaW0gbmlzaSBhbWV0IGlydXJlIiwiaWQiOiIzMzk1YTQzZS0yZDg4LTQwZGUtYjk1Zi1lMDBlMTUwMjA4NWIiLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jb2xvdXJmdWxfc29ja3MuanBnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvY29sb3VyZnVsX3NvY2tzLmpwZyJdLCJuYW1lIjoiQ29sb3VyZnVsIiwicHJpY2UiOjE4LCJ0YWciOlsiYnJvd24iLCJibHVlIl19", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 286}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 5}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:18.379836908Z", "time": 14, "request": {"method": "GET", "url": "http://carts/carts/mHK0P7zTktmV1zv57iWAvCTd43FFMHap/items", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "carts"}], "queryString": [], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "X-Application-Context", "value": "carts:80"}, {"name": "Content-Type", "value": "application/json;charset=UTF-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:17 GMT"}, {"name": "Transfer-Encoding", "value": "chunked"}], "content": {"size": 113, "mimeType": "application/json;charset=UTF-8", "text": "W3siaWQiOiI2MGZlOThmYjg2YzBmYzAwMDg2OWE5MGMiLCJpdGVtSWQiOiIzMzk1YTQzZS0yZDg4LTQwZGUtYjk1Zi1lMDBlMTUwMjA4NWIiLCJxdWFudGl0eSI6MSwidW5pdFByaWNlIjoxOC4wfV0=", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": -1}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 14}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:22.920540124Z", "time": 3, "request": {"method": "GET", "url": "http://catalogue/catalogue/808a2de1-1aaa-4c25-a9b9-6612e8f29a38", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "catalogue"}], "queryString": [], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:22 GMT"}, {"name": "Content-Length", "value": "275"}], "content": {"size": 275, "mimeType": "application/json; charset=utf-8", "text": "eyJjb3VudCI6NzM4LCJkZXNjcmlwdGlvbiI6IkEgbWF0dXJlIHNvY2ssIGNyb3NzZWQsIHdpdGggYW4gYWlyIG9mIG5vbmNoYWxhbmNlLiIsImlkIjoiODA4YTJkZTEtMWFhYS00YzI1LWE5YjktNjYxMmU4ZjI5YTM4IiwiaW1hZ2VVcmwiOlsiL2NhdGFsb2d1ZS9pbWFnZXMvY3Jvc3NfMS5qcGVnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvY3Jvc3NfMi5qcGVnIl0sIm5hbWUiOiJDcm9zc2VkIiwicHJpY2UiOjE3LjMyLCJ0YWciOlsiYmx1ZSIsInJlZCIsImFjdGlvbiIsImZvcm1hbCJdfQ==", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 275}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 3}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:22.921609501Z", "time": 3, "request": {"method": "GET", "url": "http://catalogue/catalogue?sort=id&size=3&tags=blue", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "catalogue"}], "queryString": [{"name": "size", "value": "3"}, {"name": "tags", "value": "blue"}, {"name": "sort", "value": "id"}], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Length", "value": "789"}, {"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:22 GMT"}], "content": {"size": 789, "mimeType": "application/json; charset=utf-8", "text": "W3siaWQiOiIzMzk1YTQzZS0yZDg4LTQwZGUtYjk1Zi1lMDBlMTUwMjA4NWIiLCJuYW1lIjoiQ29sb3VyZnVsIiwiZGVzY3JpcHRpb24iOiJwcm9pZGVudCBvY2NhZWNhdCBpcnVyZSBldCBleGNlcHRldXIgbGFib3JlIG1pbmltIG5pc2kgYW1ldCBpcnVyZSIsImltYWdlVXJsIjpbIi9jYXRhbG9ndWUvaW1hZ2VzL2NvbG91cmZ1bF9zb2Nrcy5qcGciLCIvY2F0YWxvZ3VlL2ltYWdlcy9jb2xvdXJmdWxfc29ja3MuanBnIl0sInByaWNlIjoxOCwiY291bnQiOjQzOCwidGFnIjpbImJsdWUiXX0seyJpZCI6IjgwOGEyZGUxLTFhYWEtNGMyNS1hOWI5LTY2MTJlOGYyOWEzOCIsIm5hbWUiOiJDcm9zc2VkIiwiZGVzY3JpcHRpb24iOiJBIG1hdHVyZSBzb2NrLCBjcm9zc2VkLCB3aXRoIGFuIGFpciBvZiBub25jaGFsYW5jZS4iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jcm9zc18xLmpwZWciLCIvY2F0YWxvZ3VlL2ltYWdlcy9jcm9zc18yLmpwZWciXSwicHJpY2UiOjE3LjMyLCJjb3VudCI6NzM4LCJ0YWciOlsiYmx1ZSJdfSx7ImlkIjoiODE5ZTFmYmYtOGI3ZS00ZjZkLTgxMWYtNjkzNTM0OTE2YThiIiwibmFtZSI6IkZpZ3Vlcm9hIiwiZGVzY3JpcHRpb24iOiJlbmltIG9mZmljaWEgYWxpcXVhIGV4Y2VwdGV1ciBlc3NlIGRlc2VydW50IHF1aXMgYWxpcXVpcCBub3N0cnVkIGFuaW0iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9XQVQuanBnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvV0FUMi5qcGciXSwicHJpY2UiOjE0LCJjb3VudCI6ODA4LCJ0YWciOlsiYmx1ZSJdfV0K", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 789}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 3}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:22.923197848Z", "time": 3, "request": {"method": "GET", "url": "http://catalogue/catalogue/3395a43e-2d88-40de-b95f-e00e1502085b", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Host", "value": "catalogue"}, {"name": "Connection", "value": "close"}], "queryString": [], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:22 GMT"}, {"name": "Content-Length", "value": "286"}], "content": {"size": 286, "mimeType": "application/json; charset=utf-8", "text": "eyJjb3VudCI6NDM4LCJkZXNjcmlwdGlvbiI6InByb2lkZW50IG9jY2FlY2F0IGlydXJlIGV0IGV4Y2VwdGV1ciBsYWJvcmUgbWluaW0gbmlzaSBhbWV0IGlydXJlIiwiaWQiOiIzMzk1YTQzZS0yZDg4LTQwZGUtYjk1Zi1lMDBlMTUwMjA4NWIiLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jb2xvdXJmdWxfc29ja3MuanBnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvY29sb3VyZnVsX3NvY2tzLmpwZyJdLCJuYW1lIjoiQ29sb3VyZnVsIiwicHJpY2UiOjE4LCJ0YWciOlsiYnJvd24iLCJibHVlIl19", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 286}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 3}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:23.175549218Z", "time": 26, "request": {"method": "GET", "url": "http://carts/carts/mHK0P7zTktmV1zv57iWAvCTd43FFMHap/items", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "carts"}], "queryString": [], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "X-Application-Context", "value": "carts:80"}, {"name": "Content-Type", "value": "application/json;charset=UTF-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:22 GMT"}, {"name": "Transfer-Encoding", "value": "chunked"}], "content": {"size": 113, "mimeType": "application/json;charset=UTF-8", "text": "W3siaWQiOiI2MGZlOThmYjg2YzBmYzAwMDg2OWE5MGMiLCJpdGVtSWQiOiIzMzk1YTQzZS0yZDg4LTQwZGUtYjk1Zi1lMDBlMTUwMjA4NWIiLCJxdWFudGl0eSI6MSwidW5pdFByaWNlIjoxOC4wfV0=", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": -1}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 26}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:25.239777333Z", "time": 10, "request": {"method": "GET", "url": "http://carts/carts/mHK0P7zTktmV1zv57iWAvCTd43FFMHap/items", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "carts"}], "queryString": [], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Type", "value": "application/json;charset=UTF-8"}, {"name": "Transfer-Encoding", "value": "chunked"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:25 GMT"}, {"name": "X-Application-Context", "value": "carts:80"}], "content": {"size": 113, "mimeType": "application/json;charset=UTF-8", "text": "W3siaWQiOiI2MGZlOThmYjg2YzBmYzAwMDg2OWE5MGMiLCJpdGVtSWQiOiIzMzk1YTQzZS0yZDg4LTQwZGUtYjk1Zi1lMDBlMTUwMjA4NWIiLCJxdWFudGl0eSI6MSwidW5pdFByaWNlIjoxOC4wfV0=", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": -1}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 10}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:25.725866772Z", "time": 3, "request": {"method": "GET", "url": "http://catalogue/catalogue?size=5", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "catalogue"}], "queryString": [{"name": "size", "value": "5"}], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Length", "value": "1643"}, {"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:25 GMT"}], "content": {"size": 1643, "mimeType": "application/json; charset=utf-8", "text": "W3siaWQiOiIwM2ZlZjZhYy0xODk2LTRjZTgtYmQ2OS1iNzk4Zjg1YzZlMGIiLCJuYW1lIjoiSG9seSIsImRlc2NyaXB0aW9uIjoiU29ja3MgZml0IGZvciBhIE1lc3NpYWguIFlvdSB0b28gY2FuIGV4cGVyaWVuY2Ugd2Fsa2luZyBpbiB3YXRlciB3aXRoIHRoZXNlIHNwZWNpYWwgZWRpdGlvbiBiZWF1dGllcy4gRWFjaCBob2xlIGlzIGxvdmluZ2x5IHByb2dnbGVkIHRvIGxlYXZlIHNtb290aCBlZGdlcy4gVGhlIG9ubHkgc29jayBhcHByb3ZlZCBieSBhIGhpZ2hlciBwb3dlci4iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9ob2x5XzEuanBlZyIsIi9jYXRhbG9ndWUvaW1hZ2VzL2hvbHlfMi5qcGVnIl0sInByaWNlIjo5OS45OSwiY291bnQiOjEsInRhZyI6WyJhY3Rpb24iLCJtYWdpYyJdfSx7ImlkIjoiMzM5NWE0M2UtMmQ4OC00MGRlLWI5NWYtZTAwZTE1MDIwODViIiwibmFtZSI6IkNvbG91cmZ1bCIsImRlc2NyaXB0aW9uIjoicHJvaWRlbnQgb2NjYWVjYXQgaXJ1cmUgZXQgZXhjZXB0ZXVyIGxhYm9yZSBtaW5pbSBuaXNpIGFtZXQgaXJ1cmUiLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jb2xvdXJmdWxfc29ja3MuanBnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvY29sb3VyZnVsX3NvY2tzLmpwZyJdLCJwcmljZSI6MTgsImNvdW50Ijo0MzgsInRhZyI6WyJicm93biIsImJsdWUiXX0seyJpZCI6IjUxMGEwZDdlLThlODMtNDE5My1iNDgzLWUyN2UwOWRkYzM0ZCIsIm5hbWUiOiJTdXBlclNwb3J0IFhMIiwiZGVzY3JpcHRpb24iOiJSZWFkeSBmb3IgYWN0aW9uLiBFbmdpbmVlcnM6IGJlIHJlYWR5IHRvIHNtYXNoIHRoYXQgbmV4dCBidWchIEJlIHJlYWR5LCB3aXRoIHRoZXNlIHN1cGVyLWFjdGlvbi1zcG9ydC1tYXN0ZXJwaWVjZXMuIFRoaXMgcGFydGljdWxhciBlbmdpbmVlciB3YXMgY2hhc2VkIGF3YXkgZnJvbSB0aGUgb2ZmaWNlIHdpdGggYSBzdGljay4iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9wdW1hXzEuanBlZyIsIi9jYXRhbG9ndWUvaW1hZ2VzL3B1bWFfMi5qcGVnIl0sInByaWNlIjoxNSwiY291bnQiOjgyMCwidGFnIjpbInNwb3J0IiwiZm9ybWFsIiwiYmxhY2siXX0seyJpZCI6IjgwOGEyZGUxLTFhYWEtNGMyNS1hOWI5LTY2MTJlOGYyOWEzOCIsIm5hbWUiOiJDcm9zc2VkIiwiZGVzY3JpcHRpb24iOiJBIG1hdHVyZSBzb2NrLCBjcm9zc2VkLCB3aXRoIGFuIGFpciBvZiBub25jaGFsYW5jZS4iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jcm9zc18xLmpwZWciLCIvY2F0YWxvZ3VlL2ltYWdlcy9jcm9zc18yLmpwZWciXSwicHJpY2UiOjE3LjMyLCJjb3VudCI6NzM4LCJ0YWciOlsiYmx1ZSIsImFjdGlvbiIsInJlZCIsImZvcm1hbCJdfSx7ImlkIjoiODE5ZTFmYmYtOGI3ZS00ZjZkLTgxMWYtNjkzNTM0OTE2YThiIiwibmFtZSI6IkZpZ3Vlcm9hIiwiZGVzY3JpcHRpb24iOiJlbmltIG9mZmljaWEgYWxpcXVhIGV4Y2VwdGV1ciBlc3NlIGRlc2VydW50IHF1aXMgYWxpcXVpcCBub3N0cnVkIGFuaW0iLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9XQVQuanBnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvV0FUMi5qcGciXSwicHJpY2UiOjE0LCJjb3VudCI6ODA4LCJ0YWciOlsiZ3JlZW4iLCJmb3JtYWwiLCJibHVlIl19XQo=", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 1643}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 3}}
|
||||
{"_id": "", "startedDateTime": "2021-07-26T11:14:25.729303217Z", "time": 3, "request": {"method": "GET", "url": "http://catalogue/catalogue/3395a43e-2d88-40de-b95f-e00e1502085b", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Connection", "value": "close"}, {"name": "Host", "value": "catalogue"}], "queryString": [], "headersSize": -1, "bodySize": 0}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [{"name": "Content-Type", "value": "application/json; charset=utf-8"}, {"name": "Date", "value": "Mon, 26 Jul 2021 11:14:25 GMT"}, {"name": "Content-Length", "value": "286"}], "content": {"size": 286, "mimeType": "application/json; charset=utf-8", "text": "eyJjb3VudCI6NDM4LCJkZXNjcmlwdGlvbiI6InByb2lkZW50IG9jY2FlY2F0IGlydXJlIGV0IGV4Y2VwdGV1ciBsYWJvcmUgbWluaW0gbmlzaSBhbWV0IGlydXJlIiwiaWQiOiIzMzk1YTQzZS0yZDg4LTQwZGUtYjk1Zi1lMDBlMTUwMjA4NWIiLCJpbWFnZVVybCI6WyIvY2F0YWxvZ3VlL2ltYWdlcy9jb2xvdXJmdWxfc29ja3MuanBnIiwiL2NhdGFsb2d1ZS9pbWFnZXMvY29sb3VyZnVsX3NvY2tzLmpwZyJdLCJuYW1lIjoiQ29sb3VyZnVsIiwicHJpY2UiOjE4LCJ0YWciOlsiYnJvd24iLCJibHVlIl19", "encoding": "base64"}, "redirectURL": "", "headersSize": -1, "bodySize": 286}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 3}}
|
||||
@@ -1,790 +0,0 @@
|
||||
{
|
||||
"log": {
|
||||
"version": "1.2",
|
||||
"creator": {
|
||||
"name": "mitmproxy har_dump",
|
||||
"version": "0.1",
|
||||
"comment": "mitmproxy version mitmproxy 4.0.4"
|
||||
},
|
||||
"entries": [
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:14:43.864529+00:00",
|
||||
"time": 111,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/e21f7112-3d3b-4632-9da3-a4af2e0e9166/sub1",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0,
|
||||
"queryString": []
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"mimeType": "text/html",
|
||||
"text": "",
|
||||
"size": 0
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 245,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 22,
|
||||
"receive": 2,
|
||||
"wait": 87,
|
||||
"connect": -1,
|
||||
"ssl": -1
|
||||
},
|
||||
"serverIPAddress": "54.210.29.33"
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:18.747122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/952bea17-3776-11ea-9341-42010a84012a/sub2",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:19.747122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/952bea17-3776-11ea-9341-42010a84012a;mparam=matrixparam",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:20.047122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/appears-once",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:20.747122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/appears-twice",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:21.747122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/appears-twice",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:20.747122+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/form-urlencoded",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
{
|
||||
"name": "Content-Type",
|
||||
"value": "application/x-www-form-urlencoded"
|
||||
}
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": "agent-id=ade&callback-url=&token=sometoken"
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:21.747122+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/form-urlencoded",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
{
|
||||
"name": "Content-Type",
|
||||
"value": "application/x-www-form-urlencoded"
|
||||
}
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": "agent-id=ade&callback-url=&token=sometoken-second-val&optional=another"
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:22.747122+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/form-multipart",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
{
|
||||
"name": "Content-Type",
|
||||
"value": "multipart/form-data; boundary=BOUNDARY"
|
||||
}
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": "--BOUNDARY\r\nContent-Disposition: form-data; name=\"file\"; filename=\"metadata.json\"\r\nContent-Type: application/json\r\n\r\n{\"functions\": 123}\r\n--BOUNDARY\r\nContent-Disposition: form-data; name=\"path\"\r\n\r\n/content/components\r\n--BOUNDARY--\r\n"
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 62,
|
||||
"mimeType": "",
|
||||
"text": "{}"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 62
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:21.757122+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/body-optional",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:21.747122+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/body-optional",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
{
|
||||
"name": "Content-Type",
|
||||
"value": "application/json"
|
||||
}
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": "{\"key\", \"val\"}"
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:21.757122+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/body-optional",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:21.757122+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://httpbin.org/body-required",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": "body exists"
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:22.000000+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/param-patterns/prefix-gibberish-fine/234324",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:22.000001+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/param-patterns/prefix-gibberish-sfdlasdfkadf87sd93284q24r/1",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:22.000002+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/param-patterns/prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf/static",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:22.000003+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/param-patterns/prefix-gibberish-4jk5l2345h2452l4352435jlk45",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:22.000004+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/param-patterns/prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:22.000002+00:00",
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/param-patterns/prefix-gibberish-afterwards/23421",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"queryString": [],
|
||||
"headersSize": -1,
|
||||
"bodySize": -1,
|
||||
"postData": {
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"size": 0,
|
||||
"mimeType": "",
|
||||
"text": ""
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": -1,
|
||||
"bodySize": 0
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": -1,
|
||||
"wait": -1,
|
||||
"receive": 1
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,843 +0,0 @@
|
||||
{
|
||||
"openapi": "3.1.0",
|
||||
"info": {
|
||||
"title": "https://httpbin.org",
|
||||
"description": "Mizu observed 19 entries (0 failed), at 0.10 hits/s, average response time is 0.17 seconds",
|
||||
"version": "1.0"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "https://httpbin.org"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"/appears-once": {
|
||||
"get": {
|
||||
"summary": "/appears-once",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.63 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750580.04,
|
||||
"lastSeen": 1567750580.04,
|
||||
"sumRT": 0.63,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750580.04,
|
||||
"lastSeen": 1567750580.04,
|
||||
"sumRT": 0.63,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750580.04,
|
||||
"x-sample-entry": 0
|
||||
}
|
||||
},
|
||||
"/appears-twice": {
|
||||
"get": {
|
||||
"summary": "/appears-twice",
|
||||
"description": "Mizu observed 2 entries (0 failed), at 0.50 hits/s, average response time is 0.63 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 2,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750580.74,
|
||||
"lastSeen": 1567750581.74,
|
||||
"sumRT": 1.26,
|
||||
"sumDuration": 1
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 2,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750580.74,
|
||||
"lastSeen": 1567750581.74,
|
||||
"sumRT": 1.26,
|
||||
"sumDuration": 1
|
||||
},
|
||||
"x-last-seen-ts": 1567750581.74,
|
||||
"x-sample-entry": 0
|
||||
}
|
||||
},
|
||||
"/body-optional": {
|
||||
"post": {
|
||||
"summary": "/body-optional",
|
||||
"description": "Mizu observed 3 entries (0 failed), at 0.00 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 3,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750581.74,
|
||||
"lastSeen": 1567750581.75,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0.01
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 3,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750581.74,
|
||||
"lastSeen": 1567750581.75,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0.01
|
||||
},
|
||||
"x-last-seen-ts": 1567750581.75,
|
||||
"x-sample-entry": 0,
|
||||
"requestBody": {
|
||||
"description": "Generic request body",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": "{\"key\", \"val\"}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/body-required": {
|
||||
"post": {
|
||||
"summary": "/body-required",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750581.75,
|
||||
"lastSeen": 1567750581.75,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750581.75,
|
||||
"lastSeen": 1567750581.75,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750581.75,
|
||||
"x-sample-entry": 0,
|
||||
"requestBody": {
|
||||
"description": "Generic request body",
|
||||
"content": {
|
||||
"": {
|
||||
"example": "body exists"
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"/form-multipart": {
|
||||
"post": {
|
||||
"summary": "/form-multipart",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {
|
||||
"example": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.74,
|
||||
"lastSeen": 1567750582.74,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.74,
|
||||
"lastSeen": 1567750582.74,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750582.74,
|
||||
"x-sample-entry": 0,
|
||||
"requestBody": {
|
||||
"description": "Generic request body",
|
||||
"content": {
|
||||
"multipart/form-data": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"file",
|
||||
"path"
|
||||
],
|
||||
"properties": {
|
||||
"file": {
|
||||
"type": "string",
|
||||
"contentMediaType": "application/json",
|
||||
"examples": [
|
||||
"{\"functions\": 123}"
|
||||
]
|
||||
},
|
||||
"path": {
|
||||
"type": "string",
|
||||
"examples": [
|
||||
"/content/components"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"example": "--BOUNDARY\r\nContent-Disposition: form-data; name=\"file\"; filename=\"metadata.json\"\r\nContent-Type: application/json\r\n\r\n{\"functions\": 123}\r\n--BOUNDARY\r\nContent-Disposition: form-data; name=\"path\"\r\n\r\n/content/components\r\n--BOUNDARY--\r\n"
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"/form-urlencoded": {
|
||||
"post": {
|
||||
"summary": "/form-urlencoded",
|
||||
"description": "Mizu observed 2 entries (0 failed), at 0.50 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 2,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750580.74,
|
||||
"lastSeen": 1567750581.74,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 1
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 2,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750580.74,
|
||||
"lastSeen": 1567750581.74,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 1
|
||||
},
|
||||
"x-last-seen-ts": 1567750581.74,
|
||||
"x-sample-entry": 0,
|
||||
"requestBody": {
|
||||
"description": "Generic request body",
|
||||
"content": {
|
||||
"application/x-www-form-urlencoded": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"agent-id",
|
||||
"callback-url",
|
||||
"token"
|
||||
],
|
||||
"properties": {
|
||||
"agent-id": {
|
||||
"type": "string",
|
||||
"examples": [
|
||||
"ade"
|
||||
]
|
||||
},
|
||||
"callback-url": {
|
||||
"type": "string",
|
||||
"examples": [
|
||||
""
|
||||
]
|
||||
},
|
||||
"optional": {
|
||||
"type": "string",
|
||||
"examples": [
|
||||
"another"
|
||||
]
|
||||
},
|
||||
"token": {
|
||||
"type": "string",
|
||||
"examples": [
|
||||
"sometoken",
|
||||
"sometoken-second-val"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"example": "agent-id=ade\u0026callback-url=\u0026token=sometoken"
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"/param-patterns/prefix-gibberish-fine/{prefixgibberishfineId}": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"param-patterns"
|
||||
],
|
||||
"summary": "/param-patterns/prefix-gibberish-fine/{prefixgibberishfineId}",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582,
|
||||
"lastSeen": 1567750582,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582,
|
||||
"lastSeen": 1567750582,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750582,
|
||||
"x-sample-entry": 0
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "prefixgibberishfineId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "234324"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"/param-patterns/{parampatternId}": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"param-patterns"
|
||||
],
|
||||
"summary": "/param-patterns/{parampatternId}",
|
||||
"description": "Mizu observed 2 entries (0 failed), at 0.00 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 2,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.00,
|
||||
"lastSeen": 1567750582.00,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 9.53e-7
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 2,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.00,
|
||||
"lastSeen": 1567750582.00,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 9.53e-7
|
||||
},
|
||||
"x-last-seen-ts": 1567750582.00,
|
||||
"x-sample-entry": 0
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "parampatternId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"pattern": "^prefix-gibberish-.+"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "prefix-gibberish-sfdlasdfkadf87sd93284q24r"
|
||||
},
|
||||
"example #1": {
|
||||
"value": "prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf"
|
||||
},
|
||||
"example #2": {
|
||||
"value": "prefix-gibberish-4jk5l2345h2452l4352435jlk45"
|
||||
},
|
||||
"example #3": {
|
||||
"value": "prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k"
|
||||
},
|
||||
"example #4": {
|
||||
"value": "prefix-gibberish-afterwards"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"/param-patterns/{parampatternId}/1": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"param-patterns"
|
||||
],
|
||||
"summary": "/param-patterns/{parampatternId}/1",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.00,
|
||||
"lastSeen": 1567750582.00,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.00,
|
||||
"lastSeen": 1567750582.00,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750582.00,
|
||||
"x-sample-entry": 0
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "parampatternId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"pattern": "^prefix-gibberish-.+"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "prefix-gibberish-sfdlasdfkadf87sd93284q24r"
|
||||
},
|
||||
"example #1": {
|
||||
"value": "prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf"
|
||||
},
|
||||
"example #2": {
|
||||
"value": "prefix-gibberish-4jk5l2345h2452l4352435jlk45"
|
||||
},
|
||||
"example #3": {
|
||||
"value": "prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k"
|
||||
},
|
||||
"example #4": {
|
||||
"value": "prefix-gibberish-afterwards"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"/param-patterns/{parampatternId}/static": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"param-patterns"
|
||||
],
|
||||
"summary": "/param-patterns/{parampatternId}/static",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.00,
|
||||
"lastSeen": 1567750582.00,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.00,
|
||||
"lastSeen": 1567750582.00,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750582.00,
|
||||
"x-sample-entry": 0
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "parampatternId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"pattern": "^prefix-gibberish-.+"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "prefix-gibberish-sfdlasdfkadf87sd93284q24r"
|
||||
},
|
||||
"example #1": {
|
||||
"value": "prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf"
|
||||
},
|
||||
"example #2": {
|
||||
"value": "prefix-gibberish-4jk5l2345h2452l4352435jlk45"
|
||||
},
|
||||
"example #3": {
|
||||
"value": "prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k"
|
||||
},
|
||||
"example #4": {
|
||||
"value": "prefix-gibberish-afterwards"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"/param-patterns/{parampatternId}/{param1}": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"param-patterns"
|
||||
],
|
||||
"summary": "/param-patterns/{parampatternId}/{param1}",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.00 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.00,
|
||||
"lastSeen": 1567750582.00,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750582.00,
|
||||
"lastSeen": 1567750582.00,
|
||||
"sumRT": 0.00,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750582.00,
|
||||
"x-sample-entry": 0
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "param1",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "23421"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "parampatternId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"pattern": "^prefix-gibberish-.+"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "prefix-gibberish-sfdlasdfkadf87sd93284q24r"
|
||||
},
|
||||
"example #1": {
|
||||
"value": "prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf"
|
||||
},
|
||||
"example #2": {
|
||||
"value": "prefix-gibberish-4jk5l2345h2452l4352435jlk45"
|
||||
},
|
||||
"example #3": {
|
||||
"value": "prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k"
|
||||
},
|
||||
"example #4": {
|
||||
"value": "prefix-gibberish-afterwards"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"/{Id}": {
|
||||
"get": {
|
||||
"summary": "/{Id}",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.63 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750579.74,
|
||||
"lastSeen": 1567750579.74,
|
||||
"sumRT": 0.63,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750579.74,
|
||||
"lastSeen": 1567750579.74,
|
||||
"sumRT": 0.63,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750579.74,
|
||||
"x-sample-entry": 0
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "Id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "<UUID4>"
|
||||
},
|
||||
"example #1": {
|
||||
"value": "<UUID4>"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"/{Id}/sub1": {
|
||||
"get": {
|
||||
"summary": "/{Id}/sub1",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.11 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"text/html": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750483.86,
|
||||
"lastSeen": 1567750483.86,
|
||||
"sumRT": 0.11,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750483.86,
|
||||
"lastSeen": 1567750483.86,
|
||||
"sumRT": 0.11,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750483.86,
|
||||
"x-sample-entry": 0
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "Id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "<UUID4>"
|
||||
},
|
||||
"example #1": {
|
||||
"value": "<UUID4>"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"/{Id}/sub2": {
|
||||
"get": {
|
||||
"summary": "/{Id}/sub2",
|
||||
"description": "Mizu observed 1 entries (0 failed), at 0.00 hits/s, average response time is 0.63 seconds",
|
||||
"operationId": "<UUID4>",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful call with status 200",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750578.74,
|
||||
"lastSeen": 1567750578.74,
|
||||
"sumRT": 0.63,
|
||||
"sumDuration": 0
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 1,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750578.74,
|
||||
"lastSeen": 1567750578.74,
|
||||
"sumRT": 0.63,
|
||||
"sumDuration": 0
|
||||
},
|
||||
"x-last-seen-ts": 1567750578.74,
|
||||
"x-sample-entry": 0
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "Id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"examples": {
|
||||
"example #0": {
|
||||
"value": "<UUID4>"
|
||||
},
|
||||
"example #1": {
|
||||
"value": "<UUID4>"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"x-counters-per-source": {
|
||||
"": {
|
||||
"entries": 19,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750483.86,
|
||||
"lastSeen": 1567750582.74,
|
||||
"sumRT": 3.27,
|
||||
"sumDuration": 2.01
|
||||
}
|
||||
},
|
||||
"x-counters-total": {
|
||||
"entries": 19,
|
||||
"failures": 0,
|
||||
"firstSeen": 1567750483.86,
|
||||
"lastSeen": 1567750582.74,
|
||||
"sumRT": 3.27,
|
||||
"sumDuration": 2.01
|
||||
}
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
{
|
||||
"openapi": "3.1.0",
|
||||
"info": {
|
||||
"title": "Preloaded TRCC",
|
||||
"version": "0.1",
|
||||
"description": "Test file for loading pre-existing OAS"
|
||||
},
|
||||
"paths": {
|
||||
"/models/{id}": {
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"pattern": ".+(_|-|\\.).+"
|
||||
},
|
||||
"example": "some-uuid-maybe"
|
||||
}
|
||||
]
|
||||
},
|
||||
"/models/{id}/{id2}": {
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"pattern": ".+(_|-|\\.).+"
|
||||
},
|
||||
"example": "some-uuid-maybe"
|
||||
},
|
||||
{
|
||||
"name": "id2",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"style": "simple",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"pattern": "\\d+"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,308 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type NodePath = []string
|
||||
|
||||
type Node struct {
|
||||
constant *string
|
||||
pathParam *openapi.ParameterObj
|
||||
pathObj *openapi.PathObj
|
||||
parent *Node
|
||||
children []*Node
|
||||
}
|
||||
|
||||
func (n *Node) getOrSet(path NodePath, existingPathObj *openapi.PathObj) (node *Node) {
|
||||
if existingPathObj == nil {
|
||||
panic("Invalid function call")
|
||||
}
|
||||
|
||||
pathChunk := path[0]
|
||||
potentialMatrix := strings.SplitN(pathChunk, ";", 2)
|
||||
if len(potentialMatrix) > 1 {
|
||||
pathChunk = potentialMatrix[0]
|
||||
logger.Log.Warningf("URI matrix params are not supported: %s", potentialMatrix[1])
|
||||
}
|
||||
|
||||
chunkIsParam := strings.HasPrefix(pathChunk, "{") && strings.HasSuffix(pathChunk, "}")
|
||||
pathChunk, err := url.PathUnescape(pathChunk)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("URI segment is not correctly encoded: %s", pathChunk)
|
||||
// any side effects on continuing?
|
||||
}
|
||||
|
||||
chunkIsGibberish := IsGibberish(pathChunk) && !IsVersionString(pathChunk)
|
||||
|
||||
var paramObj *openapi.ParameterObj
|
||||
if chunkIsParam && existingPathObj != nil && existingPathObj.Parameters != nil {
|
||||
_, paramObj = findParamByName(existingPathObj.Parameters, openapi.InPath, pathChunk[1:len(pathChunk)-1])
|
||||
}
|
||||
|
||||
if paramObj == nil {
|
||||
node = n.searchInConstants(pathChunk)
|
||||
}
|
||||
|
||||
if node == nil && pathChunk != "" {
|
||||
node = n.searchInParams(paramObj, pathChunk, chunkIsGibberish)
|
||||
}
|
||||
|
||||
// still no node found, should create it
|
||||
if node == nil {
|
||||
node = new(Node)
|
||||
node.parent = n
|
||||
n.children = append(n.children, node)
|
||||
|
||||
if paramObj != nil {
|
||||
node.pathParam = paramObj
|
||||
} else if chunkIsGibberish {
|
||||
newParam := n.createParam()
|
||||
node.pathParam = newParam
|
||||
} else {
|
||||
node.constant = &pathChunk
|
||||
}
|
||||
}
|
||||
|
||||
// add example if it's a gibberish chunk
|
||||
if node.pathParam != nil && !chunkIsParam {
|
||||
exmp := &node.pathParam.Examples
|
||||
err := fillParamExample(&exmp, pathChunk)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to add example to a parameter: %s", err)
|
||||
}
|
||||
|
||||
if len(*exmp) >= 3 && node.pathParam.Schema.Pattern == nil { // is it enough to decide on 2 samples?
|
||||
node.pathParam.Schema.Pattern = getPatternFromExamples(exmp)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: eat up trailing slash, in a smart way: node.pathObj!=nil && path[1]==""
|
||||
if len(path) > 1 {
|
||||
return node.getOrSet(path[1:], existingPathObj)
|
||||
} else if node.pathObj == nil {
|
||||
node.pathObj = existingPathObj
|
||||
}
|
||||
|
||||
return node
|
||||
}
|
||||
|
||||
func getPatternFromExamples(exmp *openapi.Examples) *openapi.Regexp {
|
||||
allInts := true
|
||||
strs := make([]string, 0)
|
||||
for _, example := range *exmp {
|
||||
exampleObj, err := example.ResolveExample(exampleResolver)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
var value string
|
||||
err = json.Unmarshal(exampleObj.Value, &value)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed decoding parameter example into string: %s", err)
|
||||
continue
|
||||
}
|
||||
strs = append(strs, value)
|
||||
|
||||
if _, err := strconv.Atoi(value); err != nil {
|
||||
allInts = false
|
||||
}
|
||||
}
|
||||
|
||||
if allInts {
|
||||
re := new(openapi.Regexp)
|
||||
re.Regexp = regexp.MustCompile(`\d+`)
|
||||
return re
|
||||
} else {
|
||||
prefix := longestCommonXfixStr(strs, true)
|
||||
suffix := longestCommonXfixStr(strs, false)
|
||||
|
||||
pat := ""
|
||||
separators := "-._/:|*,+" // TODO: we could also cut prefix till the last separator
|
||||
if len(prefix) > 0 && strings.Contains(separators, string(prefix[len(prefix)-1])) {
|
||||
pat = "^" + regexp.QuoteMeta(prefix)
|
||||
}
|
||||
|
||||
pat += ".+"
|
||||
|
||||
if len(suffix) > 0 && strings.Contains(separators, string(suffix[0])) {
|
||||
pat += regexp.QuoteMeta(suffix) + "$"
|
||||
}
|
||||
|
||||
if pat != ".+" {
|
||||
re := new(openapi.Regexp)
|
||||
re.Regexp = regexp.MustCompile(pat)
|
||||
return re
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *Node) createParam() *openapi.ParameterObj {
|
||||
name := "param"
|
||||
|
||||
if n.constant != nil { // the node is already a param
|
||||
// REST assumption, not always correct
|
||||
if strings.HasSuffix(*n.constant, "es") && len(*n.constant) > 4 {
|
||||
name = *n.constant
|
||||
name = name[:len(name)-2] + "Id"
|
||||
} else if strings.HasSuffix(*n.constant, "s") && len(*n.constant) > 3 {
|
||||
name = *n.constant
|
||||
name = name[:len(name)-1] + "Id"
|
||||
} else {
|
||||
name = *n.constant + "Id"
|
||||
}
|
||||
|
||||
name = cleanStr(name, isAlNumRune)
|
||||
if !isAlphaRune(rune(name[0])) {
|
||||
name = "_" + name
|
||||
}
|
||||
}
|
||||
|
||||
newParam := createSimpleParam(name, "path", "string")
|
||||
x := n.countParentParams()
|
||||
if x > 0 {
|
||||
newParam.Name = newParam.Name + strconv.Itoa(x)
|
||||
}
|
||||
|
||||
return newParam
|
||||
}
|
||||
|
||||
func (n *Node) searchInParams(paramObj *openapi.ParameterObj, chunk string, chunkIsGibberish bool) *Node {
|
||||
// look among params
|
||||
for _, subnode := range n.children {
|
||||
if subnode.constant != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if paramObj != nil {
|
||||
// TODO: mergeParam(subnode.pathParam, paramObj)
|
||||
return subnode
|
||||
} else if subnode.pathParam.Schema.Pattern != nil { // it has defined param pattern, have to respect it
|
||||
// TODO: and not in exceptions
|
||||
if subnode.pathParam.Schema.Pattern.Match([]byte(chunk)) {
|
||||
return subnode
|
||||
} else if chunkIsGibberish {
|
||||
// TODO: what to do if gibberish chunk does not match the pattern and not in exceptions?
|
||||
return nil
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
} else if chunkIsGibberish {
|
||||
return subnode
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *Node) searchInConstants(pathChunk string) *Node {
|
||||
// look among constants
|
||||
for _, subnode := range n.children {
|
||||
if subnode.constant == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if *subnode.constant == pathChunk {
|
||||
return subnode
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *Node) compact() {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func (n *Node) listPaths() *openapi.Paths {
|
||||
paths := &openapi.Paths{Items: map[openapi.PathValue]*openapi.PathObj{}}
|
||||
|
||||
var strChunk string
|
||||
if n.constant != nil {
|
||||
strChunk = *n.constant
|
||||
} else if n.pathParam != nil {
|
||||
strChunk = "{" + n.pathParam.Name + "}"
|
||||
} // else -> this is the root node
|
||||
|
||||
// add self
|
||||
if n.pathObj != nil {
|
||||
fillPathParams(n, n.pathObj)
|
||||
paths.Items[openapi.PathValue(strChunk)] = n.pathObj
|
||||
}
|
||||
|
||||
// recurse into children
|
||||
for _, child := range n.children {
|
||||
subPaths := child.listPaths()
|
||||
for path, pathObj := range subPaths.Items {
|
||||
var concat string
|
||||
if n.parent == nil {
|
||||
concat = string(path)
|
||||
} else {
|
||||
concat = strChunk + "/" + string(path)
|
||||
}
|
||||
paths.Items[openapi.PathValue(concat)] = pathObj
|
||||
}
|
||||
}
|
||||
|
||||
return paths
|
||||
}
|
||||
|
||||
func fillPathParams(n *Node, pathObj *openapi.PathObj) {
|
||||
// collect all path parameters from parent hierarchy
|
||||
node := n
|
||||
for {
|
||||
if node.pathParam != nil {
|
||||
initParams(&pathObj.Parameters)
|
||||
|
||||
idx, paramObj := findParamByName(pathObj.Parameters, openapi.InPath, node.pathParam.Name)
|
||||
if paramObj == nil {
|
||||
appended := append(*pathObj.Parameters, node.pathParam)
|
||||
pathObj.Parameters = &appended
|
||||
} else {
|
||||
(*pathObj.Parameters)[idx] = paramObj
|
||||
}
|
||||
}
|
||||
|
||||
node = node.parent
|
||||
if node == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type PathAndOp struct {
|
||||
path string
|
||||
op *openapi.Operation
|
||||
}
|
||||
|
||||
func (n *Node) listOps() []PathAndOp {
|
||||
res := make([]PathAndOp, 0)
|
||||
for path, pathObj := range n.listPaths().Items {
|
||||
for _, op := range getOps(pathObj) {
|
||||
res = append(res, PathAndOp{path: string(path), op: op})
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (n *Node) countParentParams() int {
|
||||
res := 0
|
||||
node := n
|
||||
for {
|
||||
if node.pathParam != nil {
|
||||
res++
|
||||
}
|
||||
|
||||
if node.parent == nil {
|
||||
break
|
||||
}
|
||||
node = node.parent
|
||||
}
|
||||
return res
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/chanced/openapi"
|
||||
)
|
||||
|
||||
func TestTree(t *testing.T) {
|
||||
testCases := []struct {
|
||||
inp string
|
||||
numParams int
|
||||
label string
|
||||
}{
|
||||
{"/", 0, ""},
|
||||
{"/v1.0.0/config/launcher/sp_nKNHCzsN/f34efcae-6583-11eb-908a-00b0fcb9d4f6/vendor,init,conversation", 1, "vendor,init,conversation"},
|
||||
{"/v1.0.0/config/launcher/sp_nKNHCzsN/{f34efcae-6583-11eb-908a-00b0fcb9d4f6}/vendor,init,conversation", 0, "vendor,init,conversation"},
|
||||
{"/getSvgs/size/small/brand/SFLY/layoutId/170943/layoutVersion/1/sizeId/742/surface/0/isLandscape/true/childSkus/%7B%7D", 1, "{}"},
|
||||
}
|
||||
|
||||
tree := new(Node)
|
||||
for _, tc := range testCases {
|
||||
split := strings.Split(tc.inp, "/")
|
||||
pathObj := new(openapi.PathObj)
|
||||
node := tree.getOrSet(split, pathObj)
|
||||
|
||||
fillPathParams(node, pathObj)
|
||||
|
||||
if node.constant != nil && *node.constant != tc.label {
|
||||
t.Errorf("Constant does not match: %s != %s", *node.constant, tc.label)
|
||||
}
|
||||
|
||||
if tc.numParams > 0 && (pathObj.Parameters == nil || len(*pathObj.Parameters) < tc.numParams) {
|
||||
t.Errorf("Wrong num of params, expected: %d", tc.numParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,476 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/har"
|
||||
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
func exampleResolver(ref string) (*openapi.ExampleObj, error) {
|
||||
return nil, errors.New("JSON references are not supported at the moment: " + ref)
|
||||
}
|
||||
|
||||
func responseResolver(ref string) (*openapi.ResponseObj, error) {
|
||||
return nil, errors.New("JSON references are not supported at the moment: " + ref)
|
||||
}
|
||||
|
||||
func reqBodyResolver(ref string) (*openapi.RequestBodyObj, error) {
|
||||
return nil, errors.New("JSON references are not supported at the moment: " + ref)
|
||||
}
|
||||
|
||||
func paramResolver(ref string) (*openapi.ParameterObj, error) {
|
||||
return nil, errors.New("JSON references are not supported at the moment: " + ref)
|
||||
}
|
||||
|
||||
func headerResolver(ref string) (*openapi.HeaderObj, error) {
|
||||
return nil, errors.New("JSON references are not supported at the moment: " + ref)
|
||||
}
|
||||
|
||||
func initParams(obj **openapi.ParameterList) {
|
||||
if *obj == nil {
|
||||
var params openapi.ParameterList = make([]openapi.Parameter, 0)
|
||||
*obj = ¶ms
|
||||
}
|
||||
}
|
||||
|
||||
func initHeaders(respObj *openapi.ResponseObj) {
|
||||
if respObj.Headers == nil {
|
||||
var created openapi.Headers = map[string]openapi.Header{}
|
||||
respObj.Headers = created
|
||||
}
|
||||
}
|
||||
|
||||
func createSimpleParam(name string, in openapi.In, ptype openapi.SchemaType) *openapi.ParameterObj {
|
||||
if name == "" {
|
||||
panic("Cannot create parameter with empty name")
|
||||
}
|
||||
required := true // FFS! https://stackoverflow.com/questions/32364027/reference-a-boolean-for-assignment-in-a-struct/32364093
|
||||
schema := new(openapi.SchemaObj)
|
||||
schema.Type = openapi.Types{ptype}
|
||||
|
||||
style := openapi.StyleSimple
|
||||
if in == openapi.InQuery {
|
||||
style = openapi.StyleForm
|
||||
}
|
||||
|
||||
newParam := openapi.ParameterObj{
|
||||
Name: name,
|
||||
In: in,
|
||||
Style: string(style),
|
||||
Examples: map[string]openapi.Example{},
|
||||
Schema: schema,
|
||||
Required: &required,
|
||||
}
|
||||
return &newParam
|
||||
}
|
||||
|
||||
func findParamByName(params *openapi.ParameterList, in openapi.In, name string) (idx int, pathParam *openapi.ParameterObj) {
|
||||
caseInsensitive := in == openapi.InHeader
|
||||
for i, param := range *params {
|
||||
idx = i
|
||||
paramObj, err := param.ResolveParameter(paramResolver)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to resolve reference: %s", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if paramObj.In != in {
|
||||
continue
|
||||
}
|
||||
|
||||
if paramObj.Name == name || (caseInsensitive && strings.EqualFold(paramObj.Name, name)) {
|
||||
pathParam = paramObj
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return idx, pathParam
|
||||
}
|
||||
|
||||
func findHeaderByName(headers *openapi.Headers, name string) *openapi.HeaderObj {
|
||||
for hname, param := range *headers {
|
||||
hdrObj, err := param.ResolveHeader(headerResolver)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to resolve reference: %s", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.EqualFold(hname, name) {
|
||||
return hdrObj
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type nvParams struct {
|
||||
In openapi.In
|
||||
Pairs []har.NVP
|
||||
IsIgnored func(name string) bool
|
||||
GeneralizeName func(name string) string
|
||||
}
|
||||
|
||||
func handleNameVals(gw nvParams, params **openapi.ParameterList, checkIgnore bool) {
|
||||
visited := map[string]*openapi.ParameterObj{}
|
||||
for _, pair := range gw.Pairs {
|
||||
if (checkIgnore && gw.IsIgnored(pair.Name)) || pair.Name == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
nameGeneral := gw.GeneralizeName(pair.Name)
|
||||
|
||||
initParams(params)
|
||||
_, param := findParamByName(*params, gw.In, pair.Name)
|
||||
if param == nil {
|
||||
param = createSimpleParam(nameGeneral, gw.In, openapi.TypeString)
|
||||
appended := append(**params, param)
|
||||
*params = &appended
|
||||
}
|
||||
exmp := ¶m.Examples
|
||||
err := fillParamExample(&exmp, pair.Value)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to add example to a parameter: %s", err)
|
||||
}
|
||||
visited[nameGeneral] = param
|
||||
}
|
||||
|
||||
// maintain "required" flag
|
||||
if *params != nil {
|
||||
for _, param := range **params {
|
||||
paramObj, err := param.ResolveParameter(paramResolver)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to resolve param: %s", err)
|
||||
continue
|
||||
}
|
||||
if paramObj.In != gw.In {
|
||||
continue
|
||||
}
|
||||
|
||||
_, ok := visited[strings.ToLower(paramObj.Name)]
|
||||
if !ok {
|
||||
flag := false
|
||||
paramObj.Required = &flag
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func createHeader(ptype openapi.SchemaType) *openapi.HeaderObj {
|
||||
required := true // FFS! https://stackoverflow.com/questions/32364027/reference-a-boolean-for-assignment-in-a-struct/32364093
|
||||
schema := new(openapi.SchemaObj)
|
||||
schema.Type = make(openapi.Types, 0)
|
||||
schema.Type = append(schema.Type, ptype)
|
||||
|
||||
style := openapi.StyleSimple
|
||||
newParam := openapi.HeaderObj{
|
||||
Style: string(style),
|
||||
Examples: map[string]openapi.Example{},
|
||||
Schema: schema,
|
||||
Required: &required,
|
||||
}
|
||||
return &newParam
|
||||
}
|
||||
|
||||
func fillParamExample(param **openapi.Examples, exampleValue string) error {
|
||||
if **param == nil {
|
||||
**param = map[string]openapi.Example{}
|
||||
}
|
||||
|
||||
cnt := 0
|
||||
for _, example := range **param {
|
||||
cnt++
|
||||
exampleObj, err := example.ResolveExample(exampleResolver)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
var value string
|
||||
err = json.Unmarshal(exampleObj.Value, &value)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed decoding parameter example into string: %s", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if value == exampleValue || cnt >= 5 { // 5 examples is enough
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
valMsg, err := json.Marshal(exampleValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
themap := **param
|
||||
themap["example #"+strconv.Itoa(cnt)] = &openapi.ExampleObj{Value: valMsg}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: somehow generalize the two example setting functions, plus add body example handling
|
||||
|
||||
func addSchemaExample(existing *openapi.SchemaObj, bodyStr string) {
|
||||
if len(existing.Examples) < 5 {
|
||||
found := false
|
||||
for _, eVal := range existing.Examples {
|
||||
existingExample := ""
|
||||
err := json.Unmarshal(eVal, &existingExample)
|
||||
if err != nil {
|
||||
logger.Log.Debugf("Failed to unmarshal example: %v", eVal)
|
||||
continue
|
||||
}
|
||||
|
||||
if existingExample == bodyStr {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
example, err := json.Marshal(bodyStr)
|
||||
if err != nil {
|
||||
logger.Log.Debugf("Failed to marshal example: %v", bodyStr)
|
||||
return
|
||||
}
|
||||
existing.Examples = append(existing.Examples, example)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func longestCommonXfix(strs [][]string, pre bool) []string { // https://github.com/jpillora/longestcommon
|
||||
empty := make([]string, 0)
|
||||
//short-circuit empty list
|
||||
if len(strs) == 0 {
|
||||
return empty
|
||||
}
|
||||
xfix := strs[0]
|
||||
//short-circuit single-element list
|
||||
if len(strs) == 1 {
|
||||
return xfix
|
||||
}
|
||||
//compare first to rest
|
||||
for _, str := range strs[1:] {
|
||||
xfixl := len(xfix)
|
||||
strl := len(str)
|
||||
//short-circuit empty strings
|
||||
if xfixl == 0 || strl == 0 {
|
||||
return empty
|
||||
}
|
||||
//maximum possible length
|
||||
maxl := xfixl
|
||||
if strl < maxl {
|
||||
maxl = strl
|
||||
}
|
||||
//compare letters
|
||||
if pre {
|
||||
//prefix, iterate left to right
|
||||
for i := 0; i < maxl; i++ {
|
||||
if xfix[i] != str[i] {
|
||||
xfix = xfix[:i]
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//suffix, iternate right to left
|
||||
for i := 0; i < maxl; i++ {
|
||||
xi := xfixl - i - 1
|
||||
si := strl - i - 1
|
||||
if xfix[xi] != str[si] {
|
||||
xfix = xfix[xi+1:]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return xfix
|
||||
}
|
||||
|
||||
func longestCommonXfixStr(strs []string, pre bool) string { // https://github.com/jpillora/longestcommon
|
||||
//short-circuit empty list
|
||||
if len(strs) == 0 {
|
||||
return ""
|
||||
}
|
||||
xfix := strs[0]
|
||||
//short-circuit single-element list
|
||||
if len(strs) == 1 {
|
||||
return xfix
|
||||
}
|
||||
//compare first to rest
|
||||
for _, str := range strs[1:] {
|
||||
xfixl := len(xfix)
|
||||
strl := len(str)
|
||||
//short-circuit empty strings
|
||||
if xfixl == 0 || strl == 0 {
|
||||
return ""
|
||||
}
|
||||
//maximum possible length
|
||||
maxl := xfixl
|
||||
if strl < maxl {
|
||||
maxl = strl
|
||||
}
|
||||
//compare letters
|
||||
if pre {
|
||||
//prefix, iterate left to right
|
||||
for i := 0; i < maxl; i++ {
|
||||
if xfix[i] != str[i] {
|
||||
xfix = xfix[:i]
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//suffix, iternate right to left
|
||||
for i := 0; i < maxl; i++ {
|
||||
xi := xfixl - i - 1
|
||||
si := strl - i - 1
|
||||
if xfix[xi] != str[si] {
|
||||
xfix = xfix[xi+1:]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return xfix
|
||||
}
|
||||
|
||||
func getSimilarPrefix(strs []string) string {
|
||||
chunked := make([][]string, 0)
|
||||
for _, item := range strs {
|
||||
chunked = append(chunked, strings.Split(item, "/"))
|
||||
}
|
||||
|
||||
cmn := longestCommonXfix(chunked, true)
|
||||
res := make([]string, 0)
|
||||
for _, chunk := range cmn {
|
||||
if chunk != "api" && !IsVersionString(chunk) && !strings.HasPrefix(chunk, "{") {
|
||||
res = append(res, chunk)
|
||||
}
|
||||
}
|
||||
return strings.Join(res[1:], ".")
|
||||
}
|
||||
|
||||
// returns all non-nil ops in PathObj
|
||||
func getOps(pathObj *openapi.PathObj) []*openapi.Operation {
|
||||
ops := []**openapi.Operation{&pathObj.Get, &pathObj.Patch, &pathObj.Put, &pathObj.Options, &pathObj.Post, &pathObj.Trace, &pathObj.Head, &pathObj.Delete}
|
||||
res := make([]*openapi.Operation, 0)
|
||||
for _, opp := range ops {
|
||||
if *opp == nil {
|
||||
continue
|
||||
}
|
||||
res = append(res, *opp)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// parses JSON into any possible value
|
||||
func anyJSON(text string) (anyVal interface{}, isJSON bool) {
|
||||
isJSON = true
|
||||
asMap := map[string]interface{}{}
|
||||
err := json.Unmarshal([]byte(text), &asMap)
|
||||
if err == nil && asMap != nil {
|
||||
return asMap, isJSON
|
||||
}
|
||||
|
||||
asArray := make([]interface{}, 0)
|
||||
err = json.Unmarshal([]byte(text), &asArray)
|
||||
if err == nil && asArray != nil {
|
||||
return asArray, isJSON
|
||||
}
|
||||
|
||||
asString := ""
|
||||
sPtr := &asString
|
||||
err = json.Unmarshal([]byte(text), &sPtr)
|
||||
if err == nil && sPtr != nil {
|
||||
return asString, isJSON
|
||||
}
|
||||
|
||||
asInt := 0
|
||||
intPtr := &asInt
|
||||
err = json.Unmarshal([]byte(text), &intPtr)
|
||||
if err == nil && intPtr != nil {
|
||||
return asInt, isJSON
|
||||
}
|
||||
|
||||
asFloat := 0.0
|
||||
floatPtr := &asFloat
|
||||
err = json.Unmarshal([]byte(text), &floatPtr)
|
||||
if err == nil && floatPtr != nil {
|
||||
return asFloat, isJSON
|
||||
}
|
||||
|
||||
asBool := false
|
||||
boolPtr := &asBool
|
||||
err = json.Unmarshal([]byte(text), &boolPtr)
|
||||
if err == nil && boolPtr != nil {
|
||||
return asBool, isJSON
|
||||
}
|
||||
|
||||
if text == "null" {
|
||||
return nil, isJSON
|
||||
}
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func cleanStr(str string, criterion func(r rune) bool) string {
|
||||
s := []byte(str)
|
||||
j := 0
|
||||
for _, b := range s {
|
||||
if criterion(rune(b)) {
|
||||
s[j] = b
|
||||
j++
|
||||
}
|
||||
}
|
||||
return string(s[:j])
|
||||
}
|
||||
|
||||
/*
|
||||
func isAlpha(s string) bool {
|
||||
for _, r := range s {
|
||||
if isAlphaRune(r) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
*/
|
||||
|
||||
func isAlphaRune(r rune) bool {
|
||||
return !((r < 'a' || r > 'z') && (r < 'A' || r > 'Z'))
|
||||
}
|
||||
|
||||
func isAlNumRune(b rune) bool {
|
||||
return isAlphaRune(b) || ('0' <= b && b <= '9')
|
||||
}
|
||||
|
||||
func deleteFromSlice(s []string, val string) []string {
|
||||
temp := s[:0]
|
||||
for _, x := range s {
|
||||
if x != val {
|
||||
temp = append(temp, x)
|
||||
}
|
||||
}
|
||||
return temp
|
||||
}
|
||||
|
||||
func sliceContains(s []string, e string) bool {
|
||||
for _, a := range s {
|
||||
if a == e {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func intersectSliceWithMap(required []string, names map[string]struct{}) []string {
|
||||
for name := range names {
|
||||
if !sliceContains(required, name) {
|
||||
required = deleteFromSlice(required, name)
|
||||
}
|
||||
}
|
||||
return required
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
package oas
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAnyJSON(t *testing.T) {
|
||||
testCases := []struct {
|
||||
inp string
|
||||
isJSON bool
|
||||
out interface{}
|
||||
}{
|
||||
{`{"key": 1, "keyNull": null}`, true, nil},
|
||||
{`[{"key": "val"}, ["subarray"], "string", 1, 2.2, true, null]`, true, nil},
|
||||
{`"somestring"`, true, "somestring"},
|
||||
{"0", true, 0},
|
||||
{"0.5", true, 0.5},
|
||||
{"true", true, true},
|
||||
{"null", true, nil},
|
||||
{"sabbra cadabra", false, nil},
|
||||
{"0.1.2.3", false, nil},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
any, isJSON := anyJSON(tc.inp)
|
||||
if isJSON != tc.isJSON {
|
||||
t.Errorf("Parse flag mismatch: %t != %t", tc.isJSON, isJSON)
|
||||
} else if isJSON && tc.out != nil && tc.out != any {
|
||||
t.Errorf("%s != %s", any, tc.out)
|
||||
} else if tc.inp == "null" && any != nil {
|
||||
t.Errorf("null has to parse as nil (but got %s)", any)
|
||||
} else {
|
||||
t.Logf("%s => %s", tc.inp, any)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestStrRunes(t *testing.T) {
|
||||
if isAlphaRune('5') {
|
||||
t.Logf("Failed")
|
||||
}
|
||||
if !isAlphaRune('a') {
|
||||
t.Logf("Failed")
|
||||
}
|
||||
|
||||
if !isAlNumRune('5') {
|
||||
t.Logf("Failed")
|
||||
}
|
||||
if isAlNumRune(' ') {
|
||||
t.Logf("Failed")
|
||||
}
|
||||
|
||||
if cleanStr("-abc_567", isAlphaRune) != "abc" {
|
||||
t.Logf("Failed")
|
||||
}
|
||||
|
||||
if cleanStr("-abc_567", isAlNumRune) != "abc567" {
|
||||
t.Logf("Failed")
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
package providers
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
type GeneralStats struct {
|
||||
EntriesCount int
|
||||
EntriesVolumeInGB float64
|
||||
FirstEntryTimestamp int
|
||||
LastEntryTimestamp int
|
||||
}
|
||||
|
||||
var generalStats = GeneralStats{}
|
||||
|
||||
func ResetGeneralStats() {
|
||||
generalStats = GeneralStats{}
|
||||
}
|
||||
|
||||
func GetGeneralStats() GeneralStats {
|
||||
return generalStats
|
||||
}
|
||||
|
||||
func EntryAdded(size int) {
|
||||
generalStats.EntriesCount++
|
||||
generalStats.EntriesVolumeInGB += float64(size) / (1 << 30)
|
||||
|
||||
currentTimestamp := int(time.Now().Unix())
|
||||
|
||||
if reflect.Value.IsZero(reflect.ValueOf(generalStats.FirstEntryTimestamp)) {
|
||||
generalStats.FirstEntryTimestamp = currentTimestamp
|
||||
}
|
||||
|
||||
generalStats.LastEntryTimestamp = currentTimestamp
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
package providers_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/providers"
|
||||
)
|
||||
|
||||
func TestNoEntryAddedCount(t *testing.T) {
|
||||
entriesStats := providers.GetGeneralStats()
|
||||
|
||||
if entriesStats.EntriesCount != 0 {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", 0, entriesStats.EntriesCount)
|
||||
}
|
||||
|
||||
if entriesStats.EntriesVolumeInGB != 0 {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", 0, entriesStats.EntriesVolumeInGB)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEntryAddedCount(t *testing.T) {
|
||||
tests := []int{1, 5, 10, 100, 500, 1000}
|
||||
|
||||
for _, entriesCount := range tests {
|
||||
t.Run(fmt.Sprintf("%d", entriesCount), func(t *testing.T) {
|
||||
for i := 0; i < entriesCount; i++ {
|
||||
providers.EntryAdded(0)
|
||||
}
|
||||
|
||||
entriesStats := providers.GetGeneralStats()
|
||||
|
||||
if entriesStats.EntriesCount != entriesCount {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", entriesCount, entriesStats.EntriesCount)
|
||||
}
|
||||
|
||||
if entriesStats.EntriesVolumeInGB != 0 {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", 0, entriesStats.EntriesVolumeInGB)
|
||||
}
|
||||
|
||||
t.Cleanup(providers.ResetGeneralStats)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEntryAddedVolume(t *testing.T) {
|
||||
// 6 bytes + 4 bytes
|
||||
tests := [][]byte{[]byte("volume"), []byte("test")}
|
||||
var expectedEntriesCount int
|
||||
var expectedVolumeInGB float64
|
||||
|
||||
for _, data := range tests {
|
||||
t.Run(fmt.Sprintf("%d", len(data)), func(t *testing.T) {
|
||||
expectedEntriesCount++
|
||||
expectedVolumeInGB += float64(len(data)) / (1 << 30)
|
||||
|
||||
providers.EntryAdded(len(data))
|
||||
|
||||
entriesStats := providers.GetGeneralStats()
|
||||
|
||||
if entriesStats.EntriesCount != expectedEntriesCount {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", expectedEntriesCount, entriesStats.EntriesCount)
|
||||
}
|
||||
|
||||
if entriesStats.EntriesVolumeInGB != expectedVolumeInGB {
|
||||
t.Errorf("unexpected result - expected: %v, actual: %v", expectedVolumeInGB, entriesStats.EntriesVolumeInGB)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
package providers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/patrickmn/go-cache"
|
||||
"github.com/up9inc/mizu/agent/pkg/models"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/tap"
|
||||
)
|
||||
|
||||
const tlsLinkRetainmentTime = time.Minute * 15
|
||||
|
||||
var (
|
||||
authStatus *models.AuthStatus
|
||||
RecentTLSLinks = cache.New(tlsLinkRetainmentTime, tlsLinkRetainmentTime)
|
||||
)
|
||||
|
||||
func GetAuthStatus() (*models.AuthStatus, error) {
|
||||
if authStatus == nil {
|
||||
syncEntriesConfigJson := os.Getenv(shared.SyncEntriesConfigEnvVar)
|
||||
if syncEntriesConfigJson == "" {
|
||||
authStatus = &models.AuthStatus{}
|
||||
return authStatus, nil
|
||||
}
|
||||
|
||||
syncEntriesConfig := &shared.SyncEntriesConfig{}
|
||||
err := json.Unmarshal([]byte(syncEntriesConfigJson), syncEntriesConfig)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal sync entries config, err: %v", err)
|
||||
}
|
||||
|
||||
if syncEntriesConfig.Token == "" {
|
||||
authStatus = &models.AuthStatus{}
|
||||
return authStatus, nil
|
||||
}
|
||||
|
||||
tokenEmail, err := shared.GetTokenEmail(syncEntriesConfig.Token)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get token email, err: %v", err)
|
||||
}
|
||||
|
||||
authStatus = &models.AuthStatus{
|
||||
Email: tokenEmail,
|
||||
Model: syncEntriesConfig.Workspace,
|
||||
}
|
||||
}
|
||||
|
||||
return authStatus, nil
|
||||
}
|
||||
|
||||
func GetAllRecentTLSAddresses() []string {
|
||||
recentTLSLinks := make([]string, 0)
|
||||
|
||||
for _, outboundLinkItem := range RecentTLSLinks.Items() {
|
||||
outboundLink, castOk := outboundLinkItem.Object.(*tap.OutboundLink)
|
||||
if castOk {
|
||||
recentTLSLinks = append(recentTLSLinks, outboundLink.DstIP)
|
||||
}
|
||||
}
|
||||
|
||||
return recentTLSLinks
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package tappedPods
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/providers/tappers"
|
||||
"github.com/up9inc/mizu/agent/pkg/utils"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
const FilePath = shared.DataDirPath + "tapped-pods.json"
|
||||
|
||||
var (
|
||||
lock = &sync.Mutex{}
|
||||
syncOnce sync.Once
|
||||
tappedPods []*shared.PodInfo
|
||||
nodeHostToTappedPodsMap shared.NodeToPodsMap
|
||||
)
|
||||
|
||||
func Get() []*shared.PodInfo {
|
||||
syncOnce.Do(func() {
|
||||
if err := utils.ReadJsonFile(FilePath, &tappedPods); err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
logger.Log.Errorf("Error reading tapped pods from file, err: %v", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return tappedPods
|
||||
}
|
||||
|
||||
func Set(tappedPodsToSet []*shared.PodInfo) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
|
||||
tappedPods = tappedPodsToSet
|
||||
if err := utils.SaveJsonFile(FilePath, tappedPods); err != nil {
|
||||
logger.Log.Errorf("Error saving tapped pods, err: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func GetTappedPodsStatus() []shared.TappedPodStatus {
|
||||
tappedPodsStatus := make([]shared.TappedPodStatus, 0)
|
||||
for _, pod := range Get() {
|
||||
var status string
|
||||
if tapperStatus, ok := tappers.GetStatus()[pod.NodeName]; ok {
|
||||
status = strings.ToLower(tapperStatus.Status)
|
||||
}
|
||||
|
||||
isTapped := status == "running"
|
||||
tappedPodsStatus = append(tappedPodsStatus, shared.TappedPodStatus{Name: pod.Name, Namespace: pod.Namespace, IsTapped: isTapped})
|
||||
}
|
||||
|
||||
return tappedPodsStatus
|
||||
}
|
||||
|
||||
func SetNodeToTappedPodMap(nodeToTappedPodsMap shared.NodeToPodsMap) {
|
||||
summary := nodeToTappedPodsMap.Summary()
|
||||
logger.Log.Infof("Setting node to tapped pods map to %v", summary)
|
||||
|
||||
nodeHostToTappedPodsMap = nodeToTappedPodsMap
|
||||
}
|
||||
|
||||
func GetNodeToTappedPodMap() shared.NodeToPodsMap {
|
||||
return nodeHostToTappedPodsMap
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
package tappers
|
||||
|
||||
import (
|
||||
"os"
|
||||
"sync"
|
||||
|
||||
"github.com/up9inc/mizu/agent/pkg/utils"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
)
|
||||
|
||||
const FilePath = shared.DataDirPath + "tappers-status.json"
|
||||
|
||||
var (
|
||||
lockStatus = &sync.Mutex{}
|
||||
syncOnce sync.Once
|
||||
status map[string]*shared.TapperStatus
|
||||
|
||||
lockConnectedCount = &sync.Mutex{}
|
||||
connectedCount int
|
||||
)
|
||||
|
||||
func GetStatus() map[string]*shared.TapperStatus {
|
||||
initStatus()
|
||||
|
||||
return status
|
||||
}
|
||||
|
||||
func SetStatus(tapperStatus *shared.TapperStatus) {
|
||||
initStatus()
|
||||
|
||||
lockStatus.Lock()
|
||||
defer lockStatus.Unlock()
|
||||
|
||||
status[tapperStatus.NodeName] = tapperStatus
|
||||
|
||||
saveStatus()
|
||||
}
|
||||
|
||||
func ResetStatus() {
|
||||
lockStatus.Lock()
|
||||
defer lockStatus.Unlock()
|
||||
|
||||
status = make(map[string]*shared.TapperStatus)
|
||||
|
||||
saveStatus()
|
||||
}
|
||||
|
||||
func GetConnectedCount() int {
|
||||
return connectedCount
|
||||
}
|
||||
|
||||
func Connected() {
|
||||
lockConnectedCount.Lock()
|
||||
defer lockConnectedCount.Unlock()
|
||||
|
||||
connectedCount++
|
||||
}
|
||||
|
||||
func Disconnected() {
|
||||
lockConnectedCount.Lock()
|
||||
defer lockConnectedCount.Unlock()
|
||||
|
||||
connectedCount--
|
||||
}
|
||||
|
||||
func initStatus() {
|
||||
syncOnce.Do(func() {
|
||||
if err := utils.ReadJsonFile(FilePath, &status); err != nil {
|
||||
status = make(map[string]*shared.TapperStatus)
|
||||
|
||||
if !os.IsNotExist(err) {
|
||||
logger.Log.Errorf("Error reading tappers status from file, err: %v", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func saveStatus() {
|
||||
if err := utils.SaveJsonFile(FilePath, status); err != nil {
|
||||
logger.Log.Errorf("Error saving tappers status, err: %v", err)
|
||||
}
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
package resolver
|
||||
|
||||
import (
|
||||
cmap "github.com/orcaman/concurrent-map"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
_ "k8s.io/client-go/plugin/pkg/client/auth/azure"
|
||||
_ "k8s.io/client-go/plugin/pkg/client/auth/gcp"
|
||||
_ "k8s.io/client-go/plugin/pkg/client/auth/oidc"
|
||||
_ "k8s.io/client-go/plugin/pkg/client/auth/openstack"
|
||||
restclient "k8s.io/client-go/rest"
|
||||
)
|
||||
|
||||
func NewFromInCluster(errOut chan error, namespace string) (*Resolver, error) {
|
||||
config, err := restclient.InClusterConfig()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clientset, err := kubernetes.NewForConfig(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Resolver{clientConfig: config, clientSet: clientset, nameMap: cmap.New(), serviceMap: cmap.New(), errOut: errOut, namespace: namespace}, nil
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
package resolver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
k8serrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
|
||||
cmap "github.com/orcaman/concurrent-map"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/watch"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
restclient "k8s.io/client-go/rest"
|
||||
)
|
||||
|
||||
const (
|
||||
kubClientNullString = "None"
|
||||
)
|
||||
|
||||
type Resolver struct {
|
||||
clientConfig *restclient.Config
|
||||
clientSet *kubernetes.Clientset
|
||||
nameMap cmap.ConcurrentMap
|
||||
serviceMap cmap.ConcurrentMap
|
||||
isStarted bool
|
||||
errOut chan error
|
||||
namespace string
|
||||
}
|
||||
|
||||
type ResolvedObjectInfo struct {
|
||||
FullAddress string
|
||||
Namespace string
|
||||
}
|
||||
|
||||
func (resolver *Resolver) Start(ctx context.Context) {
|
||||
if !resolver.isStarted {
|
||||
resolver.isStarted = true
|
||||
|
||||
go resolver.infiniteErrorHandleRetryFunc(ctx, resolver.watchServices)
|
||||
go resolver.infiniteErrorHandleRetryFunc(ctx, resolver.watchEndpoints)
|
||||
go resolver.infiniteErrorHandleRetryFunc(ctx, resolver.watchPods)
|
||||
}
|
||||
}
|
||||
|
||||
func (resolver *Resolver) Resolve(name string) *ResolvedObjectInfo {
|
||||
resolvedName, isFound := resolver.nameMap.Get(name)
|
||||
if !isFound {
|
||||
return nil
|
||||
}
|
||||
return resolvedName.(*ResolvedObjectInfo)
|
||||
}
|
||||
|
||||
func (resolver *Resolver) GetMap() cmap.ConcurrentMap {
|
||||
return resolver.nameMap
|
||||
}
|
||||
|
||||
func (resolver *Resolver) CheckIsServiceIP(address string) bool {
|
||||
_, isFound := resolver.serviceMap.Get(address)
|
||||
return isFound
|
||||
}
|
||||
|
||||
func (resolver *Resolver) watchPods(ctx context.Context) error {
|
||||
// empty namespace makes the client watch all namespaces
|
||||
watcher, err := resolver.clientSet.CoreV1().Pods(resolver.namespace).Watch(ctx, metav1.ListOptions{Watch: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case event := <-watcher.ResultChan():
|
||||
if event.Object == nil {
|
||||
return errors.New("error in kubectl pod watch")
|
||||
}
|
||||
if event.Type == watch.Deleted {
|
||||
pod := event.Object.(*corev1.Pod)
|
||||
resolver.saveResolvedName(pod.Status.PodIP, "", pod.Namespace, event.Type)
|
||||
}
|
||||
case <-ctx.Done():
|
||||
watcher.Stop()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (resolver *Resolver) watchEndpoints(ctx context.Context) error {
|
||||
// empty namespace makes the client watch all namespaces
|
||||
watcher, err := resolver.clientSet.CoreV1().Endpoints(resolver.namespace).Watch(ctx, metav1.ListOptions{Watch: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case event := <-watcher.ResultChan():
|
||||
if event.Object == nil {
|
||||
return errors.New("error in kubectl endpoint watch")
|
||||
}
|
||||
endpoint := event.Object.(*corev1.Endpoints)
|
||||
serviceHostname := fmt.Sprintf("%s.%s", endpoint.Name, endpoint.Namespace)
|
||||
if endpoint.Subsets != nil {
|
||||
for _, subset := range endpoint.Subsets {
|
||||
var ports []int32
|
||||
if subset.Ports != nil {
|
||||
for _, portMapping := range subset.Ports {
|
||||
if portMapping.Port > 0 {
|
||||
ports = append(ports, portMapping.Port)
|
||||
}
|
||||
}
|
||||
}
|
||||
if subset.Addresses != nil {
|
||||
for _, address := range subset.Addresses {
|
||||
resolver.saveResolvedName(address.IP, serviceHostname, endpoint.Namespace, event.Type)
|
||||
for _, port := range ports {
|
||||
ipWithPort := fmt.Sprintf("%s:%d", address.IP, port)
|
||||
resolver.saveResolvedName(ipWithPort, serviceHostname, endpoint.Namespace, event.Type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
case <-ctx.Done():
|
||||
watcher.Stop()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (resolver *Resolver) watchServices(ctx context.Context) error {
|
||||
// empty namespace makes the client watch all namespaces
|
||||
watcher, err := resolver.clientSet.CoreV1().Services(resolver.namespace).Watch(ctx, metav1.ListOptions{Watch: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case event := <-watcher.ResultChan():
|
||||
if event.Object == nil {
|
||||
return errors.New("error in kubectl service watch")
|
||||
}
|
||||
|
||||
service := event.Object.(*corev1.Service)
|
||||
serviceHostname := fmt.Sprintf("%s.%s", service.Name, service.Namespace)
|
||||
if service.Spec.ClusterIP != "" && service.Spec.ClusterIP != kubClientNullString {
|
||||
resolver.saveResolvedName(service.Spec.ClusterIP, serviceHostname, service.Namespace, event.Type)
|
||||
if service.Spec.Ports != nil {
|
||||
for _, port := range service.Spec.Ports {
|
||||
if port.Port > 0 {
|
||||
resolver.saveResolvedName(fmt.Sprintf("%s:%d", service.Spec.ClusterIP, port.Port), serviceHostname, service.Namespace, event.Type)
|
||||
}
|
||||
}
|
||||
}
|
||||
resolver.saveServiceIP(service.Spec.ClusterIP, serviceHostname, service.Namespace, event.Type)
|
||||
}
|
||||
if service.Status.LoadBalancer.Ingress != nil {
|
||||
for _, ingress := range service.Status.LoadBalancer.Ingress {
|
||||
resolver.saveResolvedName(ingress.IP, serviceHostname, service.Namespace, event.Type)
|
||||
}
|
||||
}
|
||||
case <-ctx.Done():
|
||||
watcher.Stop()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (resolver *Resolver) saveResolvedName(key string, resolved string, namespace string, eventType watch.EventType) {
|
||||
if eventType == watch.Deleted {
|
||||
resolver.nameMap.Remove(resolved)
|
||||
resolver.nameMap.Remove(key)
|
||||
logger.Log.Infof("setting %s=nil", key)
|
||||
} else {
|
||||
|
||||
resolver.nameMap.Set(key, &ResolvedObjectInfo{FullAddress: resolved, Namespace: namespace})
|
||||
resolver.nameMap.Set(resolved, &ResolvedObjectInfo{FullAddress: resolved, Namespace: namespace})
|
||||
logger.Log.Infof("setting %s=%s", key, resolved)
|
||||
}
|
||||
}
|
||||
|
||||
func (resolver *Resolver) saveServiceIP(key string, resolved string, namespace string, eventType watch.EventType) {
|
||||
if eventType == watch.Deleted {
|
||||
resolver.serviceMap.Remove(key)
|
||||
} else {
|
||||
resolver.nameMap.Set(key, &ResolvedObjectInfo{FullAddress: resolved, Namespace: namespace})
|
||||
}
|
||||
}
|
||||
|
||||
func (resolver *Resolver) infiniteErrorHandleRetryFunc(ctx context.Context, fun func(ctx context.Context) error) {
|
||||
for {
|
||||
err := fun(ctx)
|
||||
if err != nil {
|
||||
resolver.errOut <- err
|
||||
|
||||
var statusError *k8serrors.StatusError
|
||||
if errors.As(err, &statusError) {
|
||||
if statusError.ErrStatus.Reason == metav1.StatusReasonForbidden {
|
||||
logger.Log.Infof("Resolver loop encountered permission error, aborting event listening - %v", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
if ctx.Err() != nil { // context was cancelled or errored
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
package routes
|
||||
|
||||
import (
|
||||
"github.com/up9inc/mizu/agent/pkg/controllers"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
// DdRoutes defines the group of database routes.
|
||||
func DbRoutes(app *gin.Engine) {
|
||||
routeGroup := app.Group("/db")
|
||||
|
||||
routeGroup.GET("/flush", controllers.Flush)
|
||||
routeGroup.GET("/reset", controllers.Reset)
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
package routes
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/up9inc/mizu/agent/pkg/controllers"
|
||||
)
|
||||
|
||||
// EntriesRoutes defines the group of har entries routes.
|
||||
func EntriesRoutes(ginApp *gin.Engine) {
|
||||
routeGroup := ginApp.Group("/entries")
|
||||
|
||||
routeGroup.GET("/", controllers.GetEntries) // get entries (base/thin entries) and metadata
|
||||
routeGroup.GET("/:id", controllers.GetEntry) // get single (full) entry
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
package routes
|
||||
|
||||
import (
|
||||
"github.com/up9inc/mizu/agent/pkg/controllers"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
// MetadataRoutes defines the group of metadata routes.
|
||||
func MetadataRoutes(app *gin.Engine) {
|
||||
routeGroup := app.Group("/metadata")
|
||||
|
||||
routeGroup.GET("/version", controllers.GetVersion)
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
package routes
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/up9inc/mizu/agent/pkg/controllers"
|
||||
)
|
||||
|
||||
// OASRoutes methods to access OAS spec
|
||||
func OASRoutes(ginApp *gin.Engine) {
|
||||
routeGroup := ginApp.Group("/oas")
|
||||
|
||||
routeGroup.GET("/", controllers.GetOASServers) // list of servers in OAS map
|
||||
routeGroup.GET("/all", controllers.GetOASAllSpecs) // list of servers in OAS map
|
||||
routeGroup.GET("/:id", controllers.GetOASSpec) // get OAS spec for given server
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
package routes
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/up9inc/mizu/agent/pkg/controllers"
|
||||
)
|
||||
|
||||
func QueryRoutes(ginApp *gin.Engine) {
|
||||
routeGroup := ginApp.Group("/query")
|
||||
|
||||
routeGroup.POST("/validate", controllers.PostValidate)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user