Compare commits

...

123 Commits

Author SHA1 Message Date
lirazyehezkel
ee8dce4466 Clear entry detailed on workspace change (#1045) 2022-04-26 15:40:42 +03:00
David Levanon
b5c665b602 set capture time for every packet, so long living readers would be accurate (#1043) 2022-04-26 14:37:44 +03:00
David Levanon
fadd052f02 init tls entries capture time (#1042) 2022-04-25 13:42:52 +03:00
gadotroee
171b72831f Add docker buildx acceptance tests image build (#1039) 2022-04-25 13:06:46 +03:00
leon-up9
36910c84ed filter changes state on diffrent events (#1040)
Co-authored-by: Leon <>
2022-04-24 17:20:14 +03:00
leon-up9
95a37c877e fix/ui/Service-map-filtering-is-not-updated-after-refresh_TRA-4497 (#1033)
* filter graph when refresh

* change to const

* listen only for getServicesForFilter

Co-authored-by: Leon <>
Co-authored-by: Igor Gov <iggvrv@gmail.com>
2022-04-24 11:53:30 +03:00
Igor Gov
7ce74c71b4 Merge branch 'main' into develop
# Conflicts:
#	ui-common/package.json
#	ui-common/src/components/TrafficViewer/EntryListItem/EntryListItem.module.sass
#	ui-common/src/components/TrafficViewer/EntryListItem/EntryListItem.tsx
#	ui/package.json
#	ui/src/components/Pages/TrafficPage/TrafficPage.tsx
#	ui/src/components/ServiceMapModal/ServiceMapModal.tsx
#	ui/src/components/ServiceMapModal/ServiceMapOptions.ts
2022-04-24 11:36:08 +03:00
RoyUP9
6a15bf1aed Fixed OAS showing unresolved services (#1037) 2022-04-24 11:28:50 +03:00
Igor Gov
2de57a6630 Fix: websocket timeouts when many traffic etries sent (#1038) 2022-04-24 11:23:59 +03:00
leon-up9
b5c5a8934b Fix/ui/Service-map-GUI-issues_TRA-4499 (#1034)
* LoadingOverlay z-index

* resizable max width
servicemap edges background color

* z-index as overlay

Co-authored-by: Leon <>
2022-04-24 10:28:00 +03:00
leon-up9
df269fa950 method changed to useCallback (#1032)
height for horizontal scroll like in Ent

Co-authored-by: Leon <>
Co-authored-by: lirazyehezkel <61656597+lirazyehezkel@users.noreply.github.com>
2022-04-24 10:20:19 +03:00
M. Mert Yıldıran
80ebd7199d Fix the CLI build error for Windows target (#1031) 2022-04-24 09:45:15 +03:00
lirazyehezkel
9b475a78f9 Started listening entry time (#1030) 2022-04-20 15:12:36 +03:00
RoyUP9
d76f24edb8 Fixed tapper socket error on failure (#1029) 2022-04-20 14:53:07 +03:00
lirazyehezkel
32ba653d12 TLS icon z index (#1028) 2022-04-20 14:34:16 +03:00
AmitUp9
e03db7b09b Bug/UI/tra 4473 grpc decoding error (#1025)
* note added and formatting added

* showing message on ui instead of console error
2022-04-20 11:20:14 +03:00
gadotroee
de533730d3 Change the affinity to be valid when we are tapping multiple nodes (#1023) 2022-04-19 19:35:17 +03:00
lirazyehezkel
d834fcc3cb Entries count changes frequently (#1021)
Co-authored-by: leon-up9 <97597983+leon-up9@users.noreply.github.com>
2022-04-19 19:19:43 +03:00
gadotroee
1624b0d7b9 Apply daemon set by field selector metadata.name instead of label (#1022) 2022-04-19 18:22:29 +03:00
M. Mert Yıldıran
9715bb046b GetEntry unmarshaling error indicates an error message sent by the database server (#1020) 2022-04-19 16:23:23 +03:00
M. Mert Yıldıran
65e5ebe23c Upgrade Basenine to 0.7.3 (#1019)
* Upgrade Basenine to `0.7.3`

* Bring back `github.com/patrickmn/go-cache` that's removed by `go mod tidy`
2022-04-19 14:10:37 +03:00
leon-up9
30986c3b22 Link component (#1005)
* Link component

* change defualt use

* PR comments

Co-authored-by: Leon <>
2022-04-19 13:50:34 +03:00
Igor Gov
1e167f2757 Trigger by commit message acceptance test on PR to develop (#1017) 2022-04-19 09:12:30 +03:00
gadotroee
149e86d050 Run unit tests when tap/api changes (#1016)
* update test files paths
2022-04-19 08:51:32 +03:00
David Levanon
1213162b85 Add kube namespace to tls (TRA-4443) (#1013)
* add namespace to tls - initial commit
* add tls namespace to mizu entry
2022-04-18 16:12:51 +03:00
M. Mert Yıldıran
189c158150 Remove TLSWarning React component (#1014) 2022-04-18 15:21:25 +03:00
M. Mert Yıldıran
c5006e5f57 TRA-4501 Don't panic in case of a Basenine connection error and try to reconnect with 3 seconds intervals (#1012)
* Don't panic in case of a Basenine connection error and try to reconnect with 3 seconds intervals

* More improvements
2022-04-18 11:00:43 +03:00
M. Mert Yıldıran
d7fcf273c0 TRA-4494 Remove all non-functional OutboundLink code that was providing /status/recentTLSLinks endpoint (#1008)
* Remove non-critical TLS detected log that causes `slice bounds out of range` error

* Remove all non-functional `OutboundLink` code that was providing `/status/recentTLSLinks` endpoint

* Fix more unused code
2022-04-17 19:01:21 +03:00
lirazyehezkel
eca3267b47 Fix acceptance tests (#1011) 2022-04-17 18:05:19 +03:00
lirazyehezkel
a527fc6c51 FE miss WS entries (#1010) 2022-04-17 15:34:21 +03:00
RoyUP9
e104128df8 Fix for acceptance tests (#1006) 2022-04-14 15:01:15 +03:00
M. Mert Yıldıran
5bb904d068 Fix the acceptance tests noMoreDataTop issue (#1003) 2022-04-14 10:16:02 +03:00
M. Mert Yıldıran
1a5378b64b Increase the OOM risk in exchange of less idle CPU usage (#979)
* Increase the OOM risk in exchange of less idle CPU usage

* Read the interval from an environment variable named `CLOSE_TIMEDOUT_TCP_STREAM_CHANNELS_INTERVAL_MS`

* Log the `getCloseTimedoutTcpChannelsInterval` return value
2022-04-14 09:58:05 +03:00
Igor Gov
7cfe506897 Remove auto issue close if stale too long (#999)
* Remove auto issue close if stale too long
2022-04-13 18:58:02 +03:00
Lamercho
ab64046e8e Remove cluster.local hostnames (#1002)
Co-authored-by: Milen Minev <milen@eta.bg>
2022-04-13 16:31:17 +03:00
Nimrod Gilboa Markevich
62cbcf8857 Exclude labels with the enhancement label from becoming stale (#997) 2022-04-13 16:17:49 +03:00
gadotroee
8f3b0e8fee Fix acceptance tests entry id is string and not number (#1000) 2022-04-13 14:37:59 +03:00
David Levanon
32d473ea26 Add service mesh badge (#989)
Co-authored-by: M. Mert Yıldıran <mehmet@up9.com>
Co-authored-by: M. Mert Yıldıran <mehmet@up9.com>
Co-authored-by: Nimrod Gilboa Markevich <59927337+nimrod-up9@users.noreply.github.com>
2022-04-13 13:30:57 +03:00
M. Mert Yıldıran
97000293fd Fix the unit tests of protocol extensions (#998) 2022-04-13 12:23:08 +03:00
gadotroee
3ed9bc1e0d Upgrade Basenine version to v0.7.1 (#984)
* Basenine MongoDB mess

* Fix more

* Fix the `mongodb` container arguments

* Add Basenine ARM64 binary

* Make the changes related to `leftOff` becoming a string value

* Make `leftOffTop` state string

* Handle `CloseConnection` in `Fetch`

* Upgrade Basenine to `0.7.0`

* Revert the changes in `package.json` and `package-lock.json`

* Fix the `Dockerfile`

* Remove the binaries

* Increase the Basenine up deadline to 20 seconds

* Revert the changes in `shared/kubernetes/provider.go`

* Fix the OAS generator tests

* Protect from race condition

* Fix mutexes

* Fix unlock

* Fix logging data types

* Try to stabilize the tests

* Remove the `replace` statement

* revert the change the done in 2899414f2b to not change the leftOff

* Change `leftOffBottom` empty string default value to `latest`

* Upgrade Basenine to `0.7.1`

* Handle the Basenine client library errors better

* Use `DEFAULT_QUERY` constant

* Remove `min=-1`

* Replace some `Errorf`s with `Panicf`s

* Remove the closure in `runGenerator` method

* Remove an unnecessary check

Co-authored-by: M. Mert Yildiran <mehmet@up9.com>
Co-authored-by: Andrey Pokhilko <apc4@ya.ru>
Co-authored-by: undera <undera@undera-old-desktop.home>
Co-authored-by: AmitUp9 <96980485+AmitUp9@users.noreply.github.com>
2022-04-13 11:28:48 +03:00
David Levanon
86e5dcea19 Tap TCP connections even if the handshake was missed (#988)
Support long-living connections. This commit improves support for Linkerd which uses long-living connections.
2022-04-13 11:17:37 +03:00
lirazyehezkel
81fe4af30d close ws on modals open (#994) 2022-04-12 17:56:03 +03:00
leon-up9
df1fd2c3a7 Ui/Resiszable (#990)
* fixed toast
fixed filter refresh on reload

* revarted

* sticky selectlist header

* apply check to filtered items

* grpc filter Bug

* should almost fix filtering

* working without disabled

* handle disabled items

* small refactor

* almost working with weird jesture

* test

* servicesFilterList height

* fixed to work

* refresh margin

* after PR notes

* remove redunded var

* pr review

* Pr comments

* removed line

* removed redundant

* nullable check

Co-authored-by: Leon <>
2022-04-12 16:27:20 +03:00
lirazyehezkel
f8496c0235 Fix screen layout (#993)
Co-authored-by: gadotroee <55343099+gadotroee@users.noreply.github.com>
2022-04-12 11:20:02 +03:00
Nimrod Gilboa Markevich
2de7107c0a Use author instead of commiter in slack alerts (#992) 2022-04-12 10:48:50 +03:00
leon-up9
22e3b3d8b2 ServiceMapModal filters (#981)
* fixed toast
fixed filter refresh on reload

* revarted

* sticky selectlist header

* apply check to filtered items

* grpc filter Bug

* should almost fix filtering

* working without disabled

* handle disabled items

* small refactor

* servicesFilterList height

* after PR notes

* remove redunded var

* pr review

Co-authored-by: Leon <>
2022-04-11 17:26:28 +03:00
lirazyehezkel
45611c4c13 TRA-4477 FE holds limit of 10000 entries (#987)
* FE holds limit of 10000 entries

* let to const
2022-04-11 15:04:42 +03:00
RoyUP9
bb425fa6e2 Fixed service map unresolved bug (#986) 2022-04-11 14:37:53 +03:00
lirazyehezkel
4bc83ebcb5 Fix WS error when switching from settings to traffic viewer (#985) 2022-04-11 14:21:31 +03:00
M. Mert Yıldıran
bbb44dae79 Fix the unit tests of protocol extensions (#982) 2022-04-09 06:56:09 -07:00
M. Mert Yıldıran
72a1aba3e5 TRA-4410 Display namespace field in the UI (#974) 2022-04-08 21:16:25 +03:00
RoyUP9
d8fb8ff710 Fix for OAS reset not working (#978) 2022-04-07 18:14:03 +03:00
Nimrod Gilboa Markevich
f344bd2633 Make minor changes to OasGenerator (#977)
* Added log message
* Remove Reset function from OasGenerator interface, use Stop+Start instead
* SetEntriesQuery returns a bool stating whether the query changed
2022-04-07 10:46:30 +03:00
M. Mert Yıldıran
6575495fa5 Remove gRPC related modifications (#958)
* Remove gRPC related modifications

* Remove gRPC status text related modifications as well

* Fixing gRPC vertical image

detect grpc when content type is 'application/grpc' as well  (and not only from the grpc-status)

Co-authored-by: gadotroee <55343099+gadotroee@users.noreply.github.com>
2022-04-06 18:50:36 +03:00
RoyUP9
cf5c03d45c Fixed service map returning nil values (#975) 2022-04-06 13:12:38 +03:00
Nimrod Gilboa Markevich
491da24c63 Add ability to set query in OAS Generator (#964) 2022-04-06 11:54:55 +03:00
leon-up9
832162ae0f Ui/Service-map-split-to-ui-common (#966)
* added serviceModal & selectList & noDataMessage
removed leftovers from split

* scroll fix

* sort by name

* search alightment

* space removed

* margin-bottom

* utils class

Co-authored-by: Leon <>
Co-authored-by: Igor Gov <iggvrv@gmail.com>
2022-04-05 13:23:46 +03:00
lirazyehezkel
866378b451 Mizu cant show more than 10000 entries (#973) 2022-04-05 12:25:01 +03:00
lirazyehezkel
0b0b9ce6d1 Performance fixes (#972) 2022-04-04 20:03:57 +03:00
RoyUP9
d99c632102 Fixed golint strings.Title is deprecated error (#971) 2022-04-04 18:06:22 +03:00
RamiBerm
76a6a77a14 Refactor ws (#961)
* Separate socket and basenine logic

* WIP

* Update socket_server_handlers.go

* Update socket_data_streamer.go and socket_server_handlers.go

* Update socket_server_handlers.go

* Merge branch 'develop' into refactor_ws
# Please enter a commit message to explain why this merge is necessary,
# especially if it merges an updated upstream into a topic branch.
#
# Lines starting with '#' will be ignored, and an empty message aborts
# the commit.

* empty commit for actions

* empty commit for actions

* commit for actions

* Revert "commit for actions"

This reverts commit 8ba2ecf7d3.

Co-authored-by: RoyUP9 <87927115+RoyUP9@users.noreply.github.com>
2022-04-04 17:33:53 +03:00
M. Mert Yıldıran
2bfc523bbc Handle reflect.TypeOf returning nil case (#970) 2022-04-04 16:25:18 +03:00
RoyUP9
66ba778384 Fixed golint modified files (#969) 2022-04-04 15:32:22 +03:00
leon-up9
7adbf7bf1b Ui/TRA-4461_service-map-&-OAS---GUI-changes (#962)
* OpenAPI renamed to Service Catalog

* Docs icon change
Hide Navbar on serviceMap modal open

* PR comments

Co-authored-by: Leon <>
Co-authored-by: RoyUP9 <87927115+RoyUP9@users.noreply.github.com>
2022-04-04 14:49:41 +03:00
Igor Gov
a97b5b3b38 Add conditional Go lint validation to CI (#967) 2022-04-04 14:35:47 +03:00
Nimrod Gilboa Markevich
aa8dcc5f5c Format commit message as code to handle multi line messages (#963)
Co-authored-by: gadotroee <55343099+gadotroee@users.noreply.github.com>
2022-04-03 22:10:43 +03:00
lirazyehezkel
9d08dbdd5d UI performance fix 2022-04-03 21:35:09 +03:00
lirazyehezkel
b47718e094 TRA-4442 Improve UI performance (#960)
* Move ws entry listener to entriesList component

* unused code
2022-04-03 15:51:20 +03:00
Igor Gov
6a7fad430c Adding resolved prop to service map node (#959)
* Adding resolved prop to service map node

* fixing tests
2022-04-03 15:32:21 +03:00
lirazyehezkel
dc0365dfb0 HOTFIX: Fix TLS icon position
HOTFIX: Fix TLS icon position #patch
2022-03-31 11:28:33 +03:00
lirazyehezkel
59ad8d8fad TLS icon position (#956)
* TLS icon position

* cr fix
2022-03-31 11:26:58 +03:00
lirazyehezkel
84f641d8fe Merge branch 'main' into hotfix/TRA-4451_fix_TLS_icon_position 2022-03-30 17:20:39 +03:00
Liraz Yehezkel
ac94554d2f common versions 2022-03-30 17:14:56 +03:00
Nimrod Gilboa Markevich
2ef6afa395 Hotfix tap issues - restart service mesh tapping when tap targets change, fallback to source namespace (#953)
* Read from service mesh network namespaces upon update (#944)  #patch

* Set the entry namespace to the source namespace if the destination is not resolved (#950)
2022-03-30 17:13:56 +03:00
Liraz Yehezkel
697bfef861 TLS icon position 2022-03-30 17:03:21 +03:00
Nimrod Gilboa Markevich
a49443f101 Set the entry namespace to the source namespace if the destination is not resolved (#950)
* Set the entry namespace to the source namespace if the destination is not resolved
* Overwrite src namespace with dst namespace only if dst non-empty
2022-03-30 15:40:21 +03:00
lirazyehezkel
2427955aa4 Avoid overlap only for service map including under 10 services 2022-03-30 15:30:09 +03:00
lirazyehezkel
f8c0ddd69a HOTFIX: [FE] Avoid overlap only for service map including under 10 services
HOTFIX: [FE] Avoid overlap only for service map including under 10 services #patch
2022-03-30 15:11:08 +03:00
David Levanon
27a73e21fb Read from service mesh network namespaces upon update (#944) 2022-03-30 13:56:37 +03:00
Liraz Yehezkel
2253737625 Try to avoid overlap only for maps including under 10 services 2022-03-30 13:09:10 +03:00
Igor Gov
8eeb0e54c9 Changing unit tests workflow timeout to 30 minutes 2022-03-30 11:52:47 +03:00
Andrey Pokhilko
97db24aeba OAS: rework data feeding + sampleIDs (#917)
* Call OAS feeder

* Don't call old OAS code

* Rework calls

* Work on it

* Put back rules

* Make it compile

* start thinking of test

* Compiles

* Save

* Fixes

* Save

* Fixing

* Trying to fake conn

* add timeout

* Test timeout

* Fix tests

* Only build OAS for HTTP entries

* Remove some dead code

* Adding SampleIDs

* Cosmetics

* lint

* Revert rename

* Sample ID for content

* Cleanuo

* Add more sample IDs

* Checking hypothesis

* Move assignment place a bit

* Cosmetics

* Update test.yml

Co-authored-by: undera <undera@undera-old-desktop.home>
Co-authored-by: Igor Gov <iggvrv@gmail.com>
2022-03-30 11:14:25 +03:00
RamiBerm
63cf7ac34e Refactor entries controller logic (#949)
* wip

* Update entries_controller.go and entries_provider.go

* Update entries_controller.go

* change entries provider into a struct + interface

* Update entries_provider.go

* Update entries_provider.go
2022-03-29 18:30:19 +03:00
Igor Gov
e867b7d0f1 Build ui-common part of CI (#914)
* Build ui-common always locally
2022-03-29 14:14:52 +03:00
lirazyehezkel
dcd8a64f43 Hotfix/remove token from community (#948) 2022-03-29 13:16:50 +03:00
Igor Gov
4a0294c61a Hotfix: [FE] Remove user token from websocket
Hotfix: Remove token from mizu community #patch
Merge pull request #947 from up9inc/hotfix/Remove_token_from_community
2022-03-29 13:01:53 +03:00
Liraz Yehezkel
9053c58146 Remove token from mizu community 2022-03-29 10:25:25 +03:00
lirazyehezkel
bf8d5ed069 Support multiple workspaces (TRA-4365) (#945)
* support multiple workspaces

* reopen by websocket url dep

* open websocket only when websocketURL is changed

* upgrade common version

Co-authored-by: gadotroee <55343099+gadotroee@users.noreply.github.com>
2022-03-29 09:56:59 +03:00
Nimrod Gilboa Markevich
1f6e539590 Add commit message and committer to acceptance tests slack alert (#946)
* Add commit message and committer username to slack alerts
* Use name instead of username
* Use name and email
2022-03-29 09:15:42 +03:00
David Levanon
590fa08c81 EBPF error handling 2022-03-28 14:19:06 +03:00
Igor Gov
5d5b13fc86 Develop -> main (Release 30.0)
Merge pull request #942 from up9inc/develop #major
Develop -> main (Release 30.0)
2022-03-28 13:00:15 +03:00
Igor Gov
0a9be1884a Traffic viewer list item ebpf to lock icon (#939)
* Traffic viewer changing ebpf to lock icon
2022-03-28 11:10:54 +03:00
Igor Gov
40c745068e Traffic viewer changing ebpf to lock icon (#938) 2022-03-28 10:45:52 +03:00
AmitUp9
10dffd9331 TRA-4419 remove headers from OAS select (#936) 2022-03-28 10:01:57 +03:00
leon-up9
0a800e8d8a passed containerId in the object config (#935)
* passed containerId in the object config

* pkg version

* tmp: run acceptance tests, revert before merge

* reverte

Co-authored-by: Leon <>
2022-03-27 19:41:51 +03:00
RamiBerm
068a4ff86e TRA-4422 line numbers fix (#934)
* fix line numbering in body syntax highlighter

* Packages
2022-03-27 16:17:34 +03:00
leon-up9
c45a869b75 Fix/regration-fixes-24.3 (#933)
* link open in a new tab

* fixed double toast

* added const

* javiers new icon

* epbg img fix

* pkg update

* changed import

* formated

Co-authored-by: Leon <>
2022-03-27 15:47:46 +03:00
leon-up9
0a793cd9e0 added props (#924) 2022-03-27 11:31:40 +03:00
Adrian Gąsior
8d19080c11 Updated Dockerfile and fixed typo in Makefile (#929)
* Updated Dockerfile and fixed typo in Makefile

* PR request

* added small hotfix to basenine
2022-03-27 01:39:50 +03:00
Nimrod Gilboa Markevich
319c3c7a8d Initialize tapper before ws (#932) 2022-03-26 21:25:18 +03:00
M. Mert Yıldıran
0e7704eb15 Bump the version to 0.6.6 and fix the index shift bug that occurs after database truncation (#931) 2022-03-26 18:25:38 +03:00
gadotroee
dbcb776139 no message (#930) 2022-03-26 13:19:04 +03:00
Igor Gov
a3de34f544 Turn OAS feature ON by default (#927)
* Turn OAS feature ON by default
2022-03-24 20:40:16 +02:00
Nimrod Gilboa Markevich
99667984d6 Update tap targets without reinitializing packet source manager (#925) 2022-03-24 15:39:20 +02:00
David Levanon
763b0e7362 quick tls update pods solution (#918)
Update TLS tappers when tapped pods are updated via WS.
2022-03-24 15:21:56 +02:00
AmitUp9
e07e04377f mizu and ui-common version update (#923) 2022-03-24 12:31:08 +02:00
AmitUp9
3c8f63ed92 Feature/UI/tra 4390 status bar banner demo env (#921)
* no message

* banner styling condition added and button text transform disabled

* added prps to container

* build fix
2022-03-24 12:14:31 +02:00
leon-up9
11a2246cb9 Fix/entry-title-responsive-comments (#922)
* Make `EntryTitle` component responsive

* useRequetTextByWidth added

* renamed

Co-authored-by: M. Mert Yildiran <mehmet@up9.com>
Co-authored-by: Leon <>
2022-03-24 12:09:28 +02:00
leon-up9
a2595afd5e changed icon (#920)
Co-authored-by: Leon <>
2022-03-24 10:43:35 +02:00
AmitUp9
0f4710918f TRA-4122 oas gui grooming (#915)
* oas styling
* oas button text change
* ui-common version update
* font fix
* version updating
Co-authored-by: gadotroee <55343099+gadotroee@users.noreply.github.com>
2022-03-23 22:15:04 +02:00
RoyUP9
4bdda920d5 Fix for check command (#916)
* Fix for check command

* empty commit for checks

Co-authored-by: Roee Gadot <roee.gadot@up9.com>
2022-03-23 22:02:29 +02:00
Adrian Gąsior
59e6268ddd Refactored Dockerfile (#902)
* Refactored Dockerfile

* PR request
2022-03-23 16:16:37 +02:00
leon-up9
2513e9099f UI/tra 4406 add an information icon (#913)
* information icon added

* new icon and position

* icon changed

* new Ver

* space removed

Co-authored-by: Adam Kol <adam@up9.com>
Co-authored-by: Leon <>
2022-03-23 14:54:51 +02:00
Nimrod Gilboa Markevich
a5c35d7d90 Update tap targets over ws (#901)
Update tappers via websocket instead of by env var. This way the DaemonSet doesn't have to be applied just to notify the tappers that the tap targets changed. The number of tapper restarts is reduced. The DaemonSet still gets applied when there is a need to add/remove a tapper from a node.
2022-03-23 13:50:33 +02:00
Igor Gov
41a7587088 Remove redundant Google auth from test workflow (#911)
* Remove google auth for test workflow
2022-03-23 11:51:42 +02:00
David Levanon
12f46da5c6 Support TLS big buffers (#893) 2022-03-23 11:27:06 +02:00
Adam Kol
17f7879cff Fixing Tabs for Ent (#909)
Co-authored-by: gadotroee <55343099+gadotroee@users.noreply.github.com>
2022-03-22 16:04:04 +02:00
lirazyehezkel
bc7776cbd3 Fix ws errors and warnings (#908)
* Fix ws errors and warnings

* versioning
2022-03-22 15:43:05 +02:00
gadotroee
2a31739100 Fix acceptance test (body size and right side pane changes) (#907) 2022-03-22 09:56:34 +02:00
gadotroee
af61c69fb6 Merge pull request #889 from up9inc/develop
develop -> main (Release 29.0) #major
2022-03-13 10:30:12 +02:00
RamiBerm
94dfa68858 Revert "Support stopping oas/servicemesh/telemetry in flight (#867)" (#868)
This reverts commit 50c0062db4.
2022-03-02 09:48:42 +02:00
RamiBerm
50c0062db4 Support stopping oas/servicemesh/telemetry in flight (#867)
* Update oas_generator.go and servicemap.go

* Update oas_generator.go

* Update esClient.go

* Update servicemap.go
2022-03-01 14:41:23 +02:00
Igor Gov
720969bbe6 Merge pull request #861 from up9inc/develop
Develop -> main (28.0) #major
2022-02-27 11:27:05 +02:00
197 changed files with 11015 additions and 3574 deletions

View File

@@ -8,6 +8,9 @@ on:
branches:
- 'develop'
env:
MIZU_CI_IMAGE: mizu/ci:0.0
jobs:
run-acceptance-tests:
name: Run acceptance tests
@@ -21,6 +24,19 @@ jobs:
- name: Check out code into the Go module directory
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Build
uses: docker/build-push-action@v2
with:
context: .
push: false
load: true
tags: ${{ env.MIZU_CI_IMAGE }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Setup acceptance test
run: ./acceptanceTests/setup.sh
@@ -43,7 +59,7 @@ jobs:
with:
status: ${{ job.status }}
notification_title: 'Mizu {workflow} has {status_message}'
message_format: '{emoji} *{workflow}* {status_message} during <{run_url}|run>, after commit: <{commit_url}|{commit_sha}>'
message_format: '{emoji} *{workflow}* {status_message} during <{run_url}|run>, after commit <{commit_url}|{commit_sha}> by ${{ github.event.head_commit.author.name }} <${{ github.event.head_commit.author.email }}> ```${{ github.event.head_commit.message }}```'
footer: 'Linked Repo <{repo_url}|{repo}>'
notify_when: 'failure'
env:

View File

@@ -0,0 +1,54 @@
name: Acceptance tests on PR
on: push
env:
MIZU_CI_IMAGE: mizu/ci:0.0
concurrency:
group: acceptance-tests-on-pr-${{ github.ref }}
cancel-in-progress: true
jobs:
run-tests:
name: Run tests
runs-on: ubuntu-latest
if: ${{ contains(github.event.head_commit.message, '#run_acceptance_tests') }}
steps:
- name: Set up Go 1.17
uses: actions/setup-go@v2
with:
go-version: '^1.17'
- name: Check out code into the Go module directory
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Build
uses: docker/build-push-action@v2
with:
context: .
push: false
load: true
tags: ${{ env.MIZU_CI_IMAGE }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Setup acceptance test
run: ./acceptanceTests/setup.sh
- name: Create k8s users and change context
env:
USERNAME_UNRESTRICTED: user-with-clusterwide-access
USERNAME_RESTRICTED: user-with-restricted-access
run: |
./acceptanceTests/create_user.sh "${USERNAME_UNRESTRICTED}"
./acceptanceTests/create_user.sh "${USERNAME_RESTRICTED}"
kubectl apply -f cli/cmd/permissionFiles/permissions-all-namespaces-tap.yaml
kubectl config use-context ${USERNAME_UNRESTRICTED}
- name: Test
run: make acceptance-test

View File

@@ -45,7 +45,7 @@ jobs:
- name: Check modified files
id: modified_files
run: devops/check_modified_files.sh agent/ shared/ tap/ ui/ Dockerfile
run: devops/check_modified_files.sh agent/ shared/ tap/ ui/ ui-common/ Dockerfile
- name: Set up Docker Buildx
if: steps.modified_files.outputs.matched == 'true'

View File

@@ -13,10 +13,11 @@ jobs:
- uses: actions/stale@v3
with:
days-before-issue-stale: 30
days-before-issue-close: 14
days-before-issue-close: -1
exempt-issue-labels: "enhancement"
stale-issue-label: "stale"
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
close-issue-message: ""
days-before-pr-stale: -1
days-before-pr-close: -1
repo-token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -15,6 +15,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 2
- uses: actions/setup-go@v2
with:
go-version: '^1.17'
@@ -24,67 +27,117 @@ jobs:
sudo apt update
sudo apt install -y libpcap-dev
- name: Check Agent modified files
id: agent_modified_files
run: devops/check_modified_files.sh agent/
- name: Go lint - agent
uses: golangci/golangci-lint-action@v2
if: steps.agent_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: agent
args: --timeout=3m
- name: Check shared modified files
id: shared_modified_files
run: devops/check_modified_files.sh shared/
- name: Go lint - shared
uses: golangci/golangci-lint-action@v2
if: steps.shared_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: shared
args: --timeout=3m
- name: Check tap modified files
id: tap_modified_files
run: devops/check_modified_files.sh tap/
- name: Go lint - tap
uses: golangci/golangci-lint-action@v2
if: steps.tap_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: tap
args: --timeout=3m
- name: Check cli modified files
id: cli_modified_files
run: devops/check_modified_files.sh cli/
- name: Go lint - CLI
uses: golangci/golangci-lint-action@v2
if: steps.cli_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: cli
args: --timeout=3m
- name: Check acceptanceTests modified files
id: acceptanceTests_modified_files
run: devops/check_modified_files.sh acceptanceTests/
- name: Go lint - acceptanceTests
uses: golangci/golangci-lint-action@v2
if: steps.acceptanceTests_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: acceptanceTests
args: --timeout=3m
- name: Check tap/api modified files
id: tap_api_modified_files
run: devops/check_modified_files.sh tap/api/
- name: Go lint - tap/api
uses: golangci/golangci-lint-action@v2
if: steps.tap_api_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: tap/api
- name: Check tap/extensions/amqp modified files
id: tap_amqp_modified_files
run: devops/check_modified_files.sh tap/extensions/amqp/
- name: Go lint - tap/extensions/amqp
uses: golangci/golangci-lint-action@v2
if: steps.tap_amqp_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: tap/extensions/amqp
- name: Check tap/extensions/http modified files
id: tap_http_modified_files
run: devops/check_modified_files.sh tap/extensions/http/
- name: Go lint - tap/extensions/http
uses: golangci/golangci-lint-action@v2
if: steps.tap_http_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: tap/extensions/http
- name: Check tap/extensions/kafka modified files
id: tap_kafka_modified_files
run: devops/check_modified_files.sh tap/extensions/kafka/
- name: Go lint - tap/extensions/kafka
uses: golangci/golangci-lint-action@v2
if: steps.tap_kafka_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: tap/extensions/kafka
- name: Check tap/extensions/redis modified files
id: tap_redis_modified_files
run: devops/check_modified_files.sh tap/extensions/redis/
- name: Go lint - tap/extensions/redis
uses: golangci/golangci-lint-action@v2
if: steps.tap_redis_modified_files.outputs.matched == 'true'
with:
version: latest
working-directory: tap/extensions/redis

View File

@@ -18,6 +18,7 @@ jobs:
run-unit-tests:
name: Unit Tests
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- name: Check out code into the Go module directory
uses: actions/checkout@v2
@@ -34,14 +35,6 @@ jobs:
run: |
sudo apt-get install libpcap-dev
- id: 'auth'
uses: 'google-github-actions/auth@v0'
with:
credentials_json: '${{ secrets.GCR_JSON_KEY }}'
- name: 'Set up Cloud SDK'
uses: 'google-github-actions/setup-gcloud@v0'
- name: Check CLI modified files
id: cli_modified_files
run: devops/check_modified_files.sh cli/
@@ -63,7 +56,7 @@ jobs:
- name: Check extensions modified files
id: ext_modified_files
run: devops/check_modified_files.sh tap/extensions/
run: devops/check_modified_files.sh tap/extensions/ tap/api/
- name: Extensions Test
if: github.event_name == 'push' || steps.ext_modified_files.outputs.matched == 'true'
@@ -71,4 +64,3 @@ jobs:
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v2

View File

@@ -1,13 +1,23 @@
ARG BUILDARCH=amd64
ARG TARGETARCH=amd64
### Front-end common
FROM node:16 AS front-end-common
WORKDIR /app/ui-build
COPY ui-common/package.json .
COPY ui-common/package-lock.json .
RUN npm i
COPY ui-common .
RUN npm pack
### Front-end
FROM node:16 AS front-end
WORKDIR /app/ui-build
COPY ui/package.json .
COPY ui/package-lock.json .
COPY ui/package.json ui/package-lock.json ./
COPY --from=front-end-common ["/app/ui-build/up9-mizu-common-0.0.0.tgz", "."]
RUN npm i
COPY ui .
RUN npm run build
@@ -15,7 +25,7 @@ RUN npm run build
### Base builder image for native builds architecture
FROM golang:1.17-alpine AS builder-native-base
ENV CGO_ENABLED=1 GOOS=linux
RUN apk add libpcap-dev g++ perl-utils
RUN apk add --no-cache libpcap-dev g++ perl-utils
### Intermediate builder image for x86-64 to x86-64 native builds
@@ -77,17 +87,16 @@ RUN go build -ldflags="-extldflags=-static -s -w \
-X 'github.com/up9inc/mizu/agent/pkg/version.Ver=${VER}'" -o mizuagent .
# Download Basenine executable, verify the sha1sum
ADD https://github.com/up9inc/basenine/releases/download/v0.6.5/basenine_linux_${GOARCH} ./basenine_linux_${GOARCH}
ADD https://github.com/up9inc/basenine/releases/download/v0.6.5/basenine_linux_${GOARCH}.sha256 ./basenine_linux_${GOARCH}.sha256
RUN shasum -a 256 -c basenine_linux_${GOARCH}.sha256
RUN chmod +x ./basenine_linux_${GOARCH}
RUN mv ./basenine_linux_${GOARCH} ./basenine
ADD https://github.com/up9inc/basenine/releases/download/v0.7.3/basenine_linux_${GOARCH} ./basenine_linux_${GOARCH}
ADD https://github.com/up9inc/basenine/releases/download/v0.7.3/basenine_linux_${GOARCH}.sha256 ./basenine_linux_${GOARCH}.sha256
RUN shasum -a 256 -c basenine_linux_"${GOARCH}".sha256 && \
chmod +x ./basenine_linux_"${GOARCH}" && \
mv ./basenine_linux_"${GOARCH}" ./basenine
### The shipped image
ARG TARGETARCH=amd64
FROM ${TARGETARCH}/busybox:latest
# gin-gonic runs in debug mode without this
ENV GIN_MODE=release

View File

@@ -73,7 +73,7 @@ clean-agent: ## Clean agent.
clean-cli: ## Clean CLI.
@(cd cli; make clean ; echo "CLI cleanup done" )
clean-docker: ## Run clen docker
clean-docker: ## Run clean docker
@(echo "DOCKER cleanup - NOT IMPLEMENTED YET " )
test-lint: ## Run lint on all modules

View File

@@ -39,13 +39,13 @@ export function verifyMinimumEntries() {
});
}
export function leftTextCheck(entryNum, path, expectedText) {
cy.get(`#list #entry-${entryNum} ${path}`).invoke('text').should('eq', expectedText);
export function leftTextCheck(entryId, path, expectedText) {
cy.get(`#list #entry-${entryId} ${path}`).invoke('text').should('eq', expectedText);
}
export function leftOnHoverCheck(entryNum, path, filterName) {
cy.get(`#list #entry-${entryNum} ${path}`).trigger('mouseover');
cy.get(`#list #entry-${entryNum} [data-cy='QueryableTooltip']`).invoke('text').should('match', new RegExp(filterName));
export function leftOnHoverCheck(entryId, path, filterName) {
cy.get(`#list #entry-${entryId} ${path}`).trigger('mouseover');
cy.get(`#list #entry-${entryId} [data-cy='QueryableTooltip']`).invoke('text').should('match', new RegExp(filterName));
}
export function rightTextCheck(path, expectedText) {
@@ -89,13 +89,13 @@ export function checkFilterByMethod(funcDict) {
cy.get('#entries-length').invoke('text').then(len => {
resizeIfNeeded(len);
listElmWithIdAttr.forEach(entry => {
if (entry?.id && entry.id.match(RegExp(/entry-(\d{2}|\d{1})$/gm))) {
const entryNum = getEntryNumById(entry.id);
if (entry?.id && entry.id.match(RegExp(/entry-(\d{24})$/gm))) {
const entryId = getEntryId(entry.id);
leftTextCheck(entryNum, methodDict.pathLeft, methodDict.expectedText);
leftTextCheck(entryNum, protocolDict.pathLeft, protocolDict.expectedTextLeft);
leftTextCheck(entryId, methodDict.pathLeft, methodDict.expectedText);
leftTextCheck(entryId, protocolDict.pathLeft, protocolDict.expectedTextLeft);
if (summaryDict)
leftTextCheck(entryNum, summaryDict.pathLeft, summaryDict.expectedText);
leftTextCheck(entryId, summaryDict.pathLeft, summaryDict.expectedText);
if (!doneCheckOnFirst) {
deepCheck(funcDict, protocolDict, methodDict, entry);
@@ -111,6 +111,11 @@ export function checkFilterByMethod(funcDict) {
});
}
export function getEntryId(id) {
// take the second part from the string (entry-<ID>)
return id.split('-')[1];
}
function resizeIfNeeded(entriesLen) {
if (entriesLen > maxEntriesInDom){
Cypress.config().viewportHeight === Cypress.env('normalMizuHeight') ?
@@ -119,14 +124,14 @@ function resizeIfNeeded(entriesLen) {
}
function deepCheck(generalDict, protocolDict, methodDict, entry) {
const entryNum = getEntryNumById(entry.id);
const entryId = getEntryId(entry.id);
const {summary, value} = generalDict;
const summaryDict = getSummaryDict(summary);
leftOnHoverCheck(entryNum, methodDict.pathLeft, methodDict.expectedOnHover);
leftOnHoverCheck(entryNum, protocolDict.pathLeft, protocolDict.expectedOnHover);
leftOnHoverCheck(entryId, methodDict.pathLeft, methodDict.expectedOnHover);
leftOnHoverCheck(entryId, protocolDict.pathLeft, protocolDict.expectedOnHover);
if (summaryDict)
leftOnHoverCheck(entryNum, summaryDict.pathLeft, summaryDict.expectedOnHover);
leftOnHoverCheck(entryId, summaryDict.pathLeft, summaryDict.expectedOnHover);
cy.get(`#${entry.id}`).click();
@@ -142,7 +147,9 @@ function deepCheck(generalDict, protocolDict, methodDict, entry) {
if (value) {
if (value.tab === valueTabs.response)
cy.contains('Response').click();
// temporary fix, change to some "data-cy" attribute,
// this will fix the issue that happen because we have "response:" in the header of the right side
cy.get('#rightSideContainer > :nth-child(3)').contains('Response').click();
cy.get(Cypress.env('bodyJsonClass')).then(text => {
expect(text.text()).to.match(value.regex)
});
@@ -181,7 +188,3 @@ function getProtocolDict(protocol, protocolText) {
expectedOnHover: protocol.toLowerCase()
};
}
function getEntryNumById (id) {
return parseInt(id.split('-')[1]);
}

View File

@@ -15,15 +15,11 @@ function checkEntries() {
checkThatAllEntriesShown();
resizeToHugeMizu();
cy.get('#total-entries').then(number => {
const numOfEntries = parseInt(number.text());
[...Array(numOfEntries).keys()].map(checkEntry);
cy.get('#list [id^=entry]').each(entryElement => {
entryElement.click();
cy.get('#tbody-Headers').should('be.visible');
isValueExistsInElement(false, 'Ignored-User-Agent', '#tbody-Headers');
});
});
}
function checkEntry(entryIndex) {
cy.get(`#entry-${entryIndex}`).click();
cy.get('#tbody-Headers').should('be.visible');
isValueExistsInElement(false, 'Ignored-User-Agent', '#tbody-Headers');
}

View File

@@ -1,5 +1,6 @@
import {findLineAndCheck, getExpectedDetailsDict} from "../testHelpers/StatusBarHelper";
import {
getEntryId,
leftOnHoverCheck,
leftTextCheck,
resizeToHugeMizu,
@@ -40,32 +41,23 @@ it('filtering guide check', function () {
});
it('right side sanity test', function () {
cy.get('#entryDetailedTitleBodySize').then(sizeTopLine => {
const sizeOnTopLine = sizeTopLine.text().replace(' B', '');
cy.contains('Response').click();
cy.contains('Body Size (bytes)').parent().next().then(size => {
const bodySizeByDetails = size.text();
expect(sizeOnTopLine).to.equal(bodySizeByDetails, 'The body size in the top line should match the details in the response');
cy.get('#entryDetailedTitleElapsedTime').then(timeInMs => {
const time = timeInMs.text();
if (time < '0ms') {
throw new Error(`The time in the top line cannot be negative ${time}`);
}
});
if (parseInt(bodySizeByDetails) < 0) {
throw new Error(`The body size cannot be negative. got the size: ${bodySizeByDetails}`)
}
// temporary fix, change to some "data-cy" attribute,
// this will fix the issue that happen because we have "response:" in the header of the right side
cy.get('#rightSideContainer > :nth-child(3)').contains('Response').click();
cy.get('#entryDetailedTitleElapsedTime').then(timeInMs => {
const time = timeInMs.text();
if (time < '0ms') {
throw new Error(`The time in the top line cannot be negative ${time}`);
}
cy.get('#rightSideContainer [title="Status Code"]').then(status => {
const statusCode = status.text();
cy.contains('Status').parent().next().then(statusInDetails => {
const statusCodeInDetails = statusInDetails.text();
cy.get('#rightSideContainer [title="Status Code"]').then(status => {
const statusCode = status.text();
cy.contains('Status').parent().next().then(statusInDetails => {
const statusCodeInDetails = statusInDetails.text();
expect(statusCode).to.equal(statusCodeInDetails, 'The status code in the top line should match the status code in details');
});
});
});
expect(statusCode).to.equal(statusCodeInDetails, 'The status code in the top line should match the status code in details');
});
});
});
@@ -157,9 +149,6 @@ function checkFilterNoResults(filterName) {
// the DOM should show 0 entries
cy.get('#entries-length').should('have.text', '0');
// going through every potential entry and verifies that it doesn't exist
[...Array(parseInt(totalEntries)).keys()].map(shouldNotExist);
cy.get('[title="Fetch old records"]').click();
cy.get('#noMoreDataTop', {timeout: refreshWaitTimeout}).should('be.visible');
cy.get('#entries-length').should('have.text', '0'); // after loading all entries there should still be 0 entries
@@ -171,10 +160,6 @@ function checkFilterNoResults(filterName) {
});
}
function shouldNotExist(entryNum) {
cy.get(`entry-${entryNum}`).should('not.exist');
}
function checkIllegalFilter(illegalFilterName) {
it(`should show red search bar with the input: ${illegalFilterName}`, function () {
cy.reload();
@@ -194,30 +179,44 @@ function checkIllegalFilter(illegalFilterName) {
});
});
}
function checkFilter(filterDetails){
const {name, leftSidePath, rightSidePath, rightSideExpectedText, leftSideExpectedText, applyByEnter} = filterDetails;
function checkFilter(filterDetails) {
const {
name,
leftSidePath,
rightSidePath,
rightSideExpectedText,
leftSideExpectedText,
applyByEnter
} = filterDetails;
const entriesForDeeperCheck = 5;
it(`checking the filter: ${name}`, function () {
cy.get('#total-entries').should('not.have.text', '0').then(number => {
const totalEntries = number.text();
// checks the hover on the last entry (the only one in DOM at the beginning)
leftOnHoverCheck(totalEntries - 1, leftSidePath, name);
cy.get(`#list [id^=entry]`).last().then(elem => {
const element = elem[0];
const entryId = getEntryId(element.id);
// checks the hover on the last entry (the only one in DOM at the beginning)
leftOnHoverCheck(entryId, leftSidePath, name);
cy.get('.w-tc-editor-text').clear();
// applying the filter with alt+enter or with the button
cy.get('.w-tc-editor-text').type(`${name}${applyByEnter ? '{alt+enter}' : ''}`);
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('greenFilterColor'));
if (!applyByEnter)
cy.get('[type="submit"]').click();
cy.get('.w-tc-editor-text').clear();
// applying the filter with alt+enter or with the button
cy.get('.w-tc-editor-text').type(`${name}${applyByEnter ? '{alt+enter}' : ''}`);
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('greenFilterColor'));
if (!applyByEnter)
cy.get('[type="submit"]').click();
// only one entry in DOM after filtering, checking all checks on it
leftTextCheck(totalEntries - 1, leftSidePath, leftSideExpectedText);
leftOnHoverCheck(totalEntries - 1, leftSidePath, name);
rightTextCheck(rightSidePath, rightSideExpectedText);
rightOnHoverCheck(rightSidePath, name);
checkRightSideResponseBody();
// only one entry in DOM after filtering, checking all checks on it
leftTextCheck(entryId, leftSidePath, leftSideExpectedText);
leftOnHoverCheck(entryId, leftSidePath, name);
rightTextCheck(rightSidePath, rightSideExpectedText);
rightOnHoverCheck(rightSidePath, name);
checkRightSideResponseBody();
});
cy.get('[title="Fetch old records"]').click();
resizeToHugeMizu();
@@ -226,33 +225,40 @@ function checkFilter(filterDetails){
cy.get('#entries-length', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
// checking only 'leftTextCheck' on all entries because the rest of the checks require more time
[...Array(parseInt(totalEntries)).keys()].forEach(entryNum => {
leftTextCheck(entryNum, leftSidePath, leftSideExpectedText);
cy.get(`#list [id^=entry]`).each(elem => {
const element = elem[0];
let entryId = getEntryId(element.id);
leftTextCheck(entryId, leftSidePath, leftSideExpectedText);
});
// making the other 3 checks on the first X entries (longer time for each check)
deeperChcek(leftSidePath, rightSidePath, name, leftSideExpectedText, rightSideExpectedText, entriesForDeeperCheck);
deeperCheck(leftSidePath, rightSidePath, name, leftSideExpectedText, rightSideExpectedText, entriesForDeeperCheck);
// reloading then waiting for the entries number to load
resizeToNormalMizu();
cy.reload();
cy.get('#total-entries', {timeout: refreshWaitTimeout}).should('have.text', totalEntries);
});
})
});
}
function deeperChcek(leftSidePath, rightSidePath, filterName, leftSideExpectedText, rightSideExpectedText, entriesNumToCheck) {
[...Array(entriesNumToCheck).keys()].forEach(entryNum => {
leftOnHoverCheck(entryNum, leftSidePath, filterName);
function deeperCheck(leftSidePath, rightSidePath, filterName, leftSideExpectedText, rightSideExpectedText, entriesNumToCheck) {
cy.get(`#list [id^=entry]`).each((element, index) => {
if (index < entriesNumToCheck) {
const entryId = getEntryId(element[0].id);
leftOnHoverCheck(entryId, leftSidePath, filterName);
cy.get(`#list #entry-${entryNum}`).click();
rightTextCheck(rightSidePath, rightSideExpectedText);
rightOnHoverCheck(rightSidePath, filterName);
cy.get(`#list #entry-${entryId}`).click();
rightTextCheck(rightSidePath, rightSideExpectedText);
rightOnHoverCheck(rightSidePath, filterName);
}
});
}
function checkRightSideResponseBody() {
cy.contains('Response').click();
// temporary fix, change to some "data-cy" attribute,
// this will fix the issue that happen because we have "response:" in the header of the right side
cy.get('#rightSideContainer > :nth-child(3)').contains('Response').click();
clickCheckbox('Decode Base64');
cy.get(`${Cypress.env('bodyJsonClass')}`).then(value => {

View File

@@ -57,11 +57,16 @@ kubectl expose deployment rabbitmq --type=LoadBalancer --port=5672 -n mizu-tests
echo "Starting proxy"
kubectl proxy --port=8080 &
echo "Setting minikube docker env"
eval $(minikube docker-env)
if [[ -z "${CI}" ]]; then
echo "Setting env var of mizu ci image"
export MIZU_CI_IMAGE="mizu/ci:0.0"
echo "Build agent image"
docker build -t "${MIZU_CI_IMAGE}" .
else
echo "not building docker image in CI because it is created as separate step"
fi
echo "Build agent image"
docker build -t mizu/ci:0.0 .
minikube image load "${MIZU_CI_IMAGE}"
echo "Build cli"
cd cli && make build GIT_BRANCH=ci SUFFIX=ci

View File

@@ -212,9 +212,10 @@ func DeleteKubeFile(kubeContext string, namespace string, filename string) error
}
func getDefaultCommandArgs() []string {
agentImageValue := os.Getenv("MIZU_CI_IMAGE")
setFlag := "--set"
telemetry := "telemetry=false"
agentImage := "agent-image=mizu/ci:0.0"
agentImage := fmt.Sprintf("agent-image=%s", agentImageValue)
imagePullPolicy := "image-pull-policy=IfNotPresent"
headless := "headless=true"

View File

@@ -15,12 +15,13 @@ require (
github.com/go-playground/validator/v10 v10.10.0
github.com/google/uuid v1.3.0
github.com/gorilla/websocket v1.4.2
github.com/jinzhu/copier v0.3.5
github.com/nav-inc/datetime v0.1.3
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
github.com/orcaman/concurrent-map v1.0.0
github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/stretchr/testify v1.7.0
github.com/up9inc/basenine/client/go v0.0.0-20220317230530-8472d80307f6
github.com/up9inc/basenine/client/go v0.0.0-20220419100955-e2ca51087607
github.com/up9inc/mizu/shared v0.0.0
github.com/up9inc/mizu/tap v0.0.0
github.com/up9inc/mizu/tap/api v0.0.0
@@ -37,53 +38,78 @@ require (
require (
cloud.google.com/go/compute v1.2.0 // indirect
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
github.com/Azure/go-autorest/autorest v0.11.24 // indirect
github.com/Azure/go-autorest/autorest/adal v0.9.18 // indirect
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
github.com/Azure/go-autorest/logger v0.2.1 // indirect
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
github.com/MakeNowJust/heredoc v1.0.0 // indirect
github.com/PuerkitoBio/purell v1.1.1 // indirect
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
github.com/beevik/etree v1.1.0 // indirect
github.com/bradleyfalzon/tlsx v0.0.0-20170624122154-28fd0e59bac4 // indirect
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5 // indirect
github.com/chanced/dynamic v0.0.0-20211210164248-f8fadb1d735b // indirect
github.com/cilium/ebpf v0.8.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/evanphx/json-patch v5.6.0+incompatible // indirect
github.com/exponent-io/jsonpath v0.0.0-20210407135951-1de76d718b3f // indirect
github.com/fatih/camelcase v1.0.0 // indirect
github.com/fsnotify/fsnotify v1.5.1 // indirect
github.com/fvbommel/sortorder v1.0.2 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-errors/errors v1.4.2 // indirect
github.com/go-logr/logr v1.2.2 // indirect
github.com/go-openapi/jsonpointer v0.19.5 // indirect
github.com/go-openapi/jsonreference v0.19.6 // indirect
github.com/go-openapi/swag v0.21.1 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v4 v4.2.0 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/btree v1.0.1 // indirect
github.com/google/go-cmp v0.5.7 // indirect
github.com/google/gofuzz v1.2.0 // indirect
github.com/google/gopacket v1.1.19 // indirect
github.com/google/martian v2.1.0+incompatible // indirect
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
github.com/googleapis/gnostic v0.5.5 // indirect
github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 // indirect
github.com/imdario/mergo v0.3.12 // indirect
github.com/inconshreveable/mousetrap v1.0.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.14.2 // indirect
github.com/leodido/go-urn v1.2.1 // indirect
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-isatty v0.0.14 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/moby/spdystream v0.2.0 // indirect
github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect
github.com/ohler55/ojg v1.12.12 // indirect
github.com/peterbourgon/diskv v2.0.1+incompatible // indirect
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/russross/blackfriday v1.6.0 // indirect
github.com/santhosh-tekuri/jsonschema/v5 v5.0.0 // indirect
github.com/segmentio/kafka-go v0.4.27 // indirect
github.com/spf13/cobra v1.3.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/tidwall/gjson v1.14.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.0 // indirect
github.com/tidwall/sjson v1.2.4 // indirect
github.com/ugorji/go/codec v1.2.6 // indirect
github.com/vishvananda/netns v0.0.0-20211101163701-50045581ed74 // indirect
github.com/xlab/treeprint v1.1.0 // indirect
go.starlark.net v0.0.0-20220203230714-bb14e151c28f // indirect
golang.org/x/crypto v0.0.0-20220208050332-20e1d8d225ab // indirect
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect
@@ -96,10 +122,15 @@ require (
gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
k8s.io/cli-runtime v0.23.3 // indirect
k8s.io/component-base v0.23.3 // indirect
k8s.io/klog/v2 v2.40.1 // indirect
k8s.io/kube-openapi v0.0.0-20220124234850-424119656bbf // indirect
k8s.io/kubectl v0.23.3 // indirect
k8s.io/utils v0.0.0-20220127004650-9b3446523e65 // indirect
sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 // indirect
sigs.k8s.io/kustomize/api v0.11.1 // indirect
sigs.k8s.io/kustomize/kyaml v0.13.3 // indirect
sigs.k8s.io/structured-merge-diff/v4 v4.2.1 // indirect
sigs.k8s.io/yaml v1.3.0 // indirect
)

View File

@@ -53,6 +53,7 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/Azure/go-ansiterm v0.0.0-20210608223527-2377c96fe795/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=
github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
@@ -74,10 +75,13 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/MakeNowJust/heredoc v0.0.0-20170808103936-bb23615498cd/go.mod h1:64YHyfSL2R96J44Nlwm39UHepQbyR5q10x7iYa1ks2E=
github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
@@ -104,13 +108,12 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/bradleyfalzon/tlsx v0.0.0-20170624122154-28fd0e59bac4 h1:NJOOlc6ZJjix0A1rAU+nxruZtR8KboG1848yqpIUo4M=
github.com/bradleyfalzon/tlsx v0.0.0-20170624122154-28fd0e59bac4/go.mod h1:DQPxZS994Ld1Y8uwnJT+dRL04XPD0cElP/pHH/zEBHM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5 h1:7aWHqerlJ41y6FOsEUvknqgXnGmJyJSbjhAWq5pO4F8=
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5/go.mod h1:/iP1qXHoty45bqomnu2LM+VVyAEdWN+vtSHGlQgyxbw=
github.com/chanced/cmpjson v0.0.0-20210415035445-da9262c1f20a h1:zG6t+4krPXcCKtLbjFvAh+fKN1d0qfD+RaCj+680OU8=
github.com/chanced/cmpjson v0.0.0-20210415035445-da9262c1f20a/go.mod h1:yhcmlFk1hxuZ+5XZbupzT/cEm/eE4ZvWbmsW1+Q/aZE=
@@ -147,6 +150,7 @@ github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfc
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw=
github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
@@ -163,6 +167,7 @@ github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/elastic/go-elasticsearch/v7 v7.17.0 h1:0fcSh4qeC/i1+7QU1KXpmq2iUAdMk4l0/vmbtW1+KJM=
github.com/elastic/go-elasticsearch/v7 v7.17.0/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153 h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
@@ -184,6 +189,7 @@ github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLi
github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww=
github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4=
github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4=
github.com/exponent-io/jsonpath v0.0.0-20210407135951-1de76d718b3f h1:Wl78ApPPB2Wvf/TIe2xdyJxTlb6obmF18d8QdkxNDu4=
github.com/exponent-io/jsonpath v0.0.0-20210407135951-1de76d718b3f/go.mod h1:OSYXu++VVOHnXeitef/D8n/6y4QV8uLHSFXX4NeXMGc=
github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8=
github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc=
@@ -201,6 +207,7 @@ github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4
github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI=
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
github.com/fvbommel/sortorder v1.0.1/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo=
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
github.com/getkin/kin-openapi v0.76.0/go.mod h1:660oXbgy5JFMKreazJaQTw7o+X00qeSyhcnluiMv+Xg=
github.com/getkin/kin-openapi v0.89.0 h1:p4nagHchUKGn85z/f+pse4aSh50nIBOYjOhMIku2hiA=
@@ -237,6 +244,7 @@ github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUe
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs=
github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
@@ -303,6 +311,7 @@ github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW
github.com/golangplus/testing v0.0.0-20180327235837-af21d9c3145e/go.mod h1:0AA//k/eakGydO4jKRoRL2j92ZKSzTgj9tclaCrvXHk=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4=
github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
@@ -344,6 +353,7 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe
github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -363,6 +373,7 @@ github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoA
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 h1:+ngKgrYPPJrOjhax5N+uePQ0Fh1Z7PheYoUI/0nzkPA=
github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
@@ -410,9 +421,13 @@ github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jinzhu/copier v0.3.5 h1:GlvfUwHk62RokgqVNvYsku0TATCF7bAHVwEXoBh3iJg=
github.com/jinzhu/copier v0.3.5/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
@@ -451,6 +466,7 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE=
github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc=
github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w=
@@ -486,6 +502,7 @@ github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrk
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
@@ -493,8 +510,10 @@ github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:F
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8=
github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c=
github.com/moby/term v0.0.0-20210610120745-9d4ed1856297/go.mod h1:vgPCkQMyxTZ7IDy8SXRufE172gr8+K/JE/7hHFxHW3A=
github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 h1:dcztxKSvZ4Id8iPpHERQBbIJfabdt4wUm5qy3wOL2Zc=
github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
@@ -503,10 +522,12 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 h1:n6/2gBQ3RWajuToeY6ZtZTIKv2v7ThUy5KKusIT0yc0=
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4=
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus=
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
github.com/nav-inc/datetime v0.1.3 h1:PaybPUsScX+Cd3TEa1tYpfwU61deCEhMTlCO2hONm1c=
github.com/nav-inc/datetime v0.1.3/go.mod h1:gKGf5G+cW7qkTo5TC/sieNyz6lYdrA9cf1PNV+pXIOE=
@@ -538,6 +559,7 @@ github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTK
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
github.com/pierrec/lz4 v2.6.0+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM=
@@ -583,6 +605,7 @@ github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTE
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
@@ -593,6 +616,7 @@ github.com/santhosh-tekuri/jsonschema/v5 v5.0.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/segmentio/kafka-go v0.4.27 h1:sIhEozeL/TLN2mZ5dkG462vcGEWYKS+u31sXPjKhAM4=
github.com/segmentio/kafka-go v0.4.27/go.mod h1:XzMcoMjSzDGHcIwpWUI7GB43iKZ2fTVmryPSGLf/MPg=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
@@ -611,6 +635,7 @@ github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkU
github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk=
github.com/spf13/cobra v1.3.0 h1:R7cSvGu+Vv+qX0gW5R/85dx2kmmJT5z5NM8ifdYjdn0=
github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
@@ -624,6 +649,7 @@ github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy
github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
@@ -655,8 +681,8 @@ github.com/ugorji/go v1.2.6/go.mod h1:anCg0y61KIhDlPZmnH+so+RQbysYVyDko0IMgJv0Nn
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
github.com/ugorji/go/codec v1.2.6 h1:7kbGefxLoDBuYXOms4yD7223OpNMMPNPZxXk5TvFcyQ=
github.com/ugorji/go/codec v1.2.6/go.mod h1:V6TCNZ4PHqoHGFZuSG1W8nrCzzdgA2DozYxWFFpvxTw=
github.com/up9inc/basenine/client/go v0.0.0-20220317230530-8472d80307f6 h1:c0aVbLKYeFDAg246+NDgie2y484bsc20NaKLo8ODV3E=
github.com/up9inc/basenine/client/go v0.0.0-20220317230530-8472d80307f6/go.mod h1:SvJGPoa/6erhUQV7kvHBwM/0x5LyO6XaG2lUaCaKiUI=
github.com/up9inc/basenine/client/go v0.0.0-20220419100955-e2ca51087607 h1:UqxUSkOYOmsLZWQtMSk02ttnhdRwBRLOLt2aDiS9tEk=
github.com/up9inc/basenine/client/go v0.0.0-20220419100955-e2ca51087607/go.mod h1:SvJGPoa/6erhUQV7kvHBwM/0x5LyO6XaG2lUaCaKiUI=
github.com/vishvananda/netns v0.0.0-20211101163701-50045581ed74 h1:gga7acRE695APm9hlsSMoOoE65U4/TcqNj90mc69Rlg=
github.com/vishvananda/netns v0.0.0-20211101163701-50045581ed74/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0=
github.com/wI2L/jsondiff v0.1.1 h1:r2TkoEet7E4JMO5+s1RCY2R0LrNPNHY6hbDeow2hRHw=
@@ -665,6 +691,7 @@ github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhe
github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg=
github.com/xlab/treeprint v1.1.0 h1:G/1DjNkPpfZCFt9CSh6b5/nY4VimlbHF3Rh4obvtzDk=
github.com/xlab/treeprint v1.1.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY=
@@ -701,6 +728,7 @@ go.opentelemetry.io/otel/sdk/metric v0.20.0/go.mod h1:knxiS8Xd4E/N+ZqKmUPf3gTTZ4
go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5/go.mod h1:nmDLcffg48OtT/PSW0Hg7FvpRQsQh5OSqIylirxKC7o=
go.starlark.net v0.0.0-20220203230714-bb14e151c28f h1:aW4TkS39/naJa9wPSbIXtZUQOlvuUh8gxCsLRrJoByU=
go.starlark.net v0.0.0-20220203230714-bb14e151c28f/go.mod h1:t3mmBBPzAVvK0L0n1drDmrQsJ8FoIx4INCqVMTr/Zo0=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
@@ -1205,6 +1233,7 @@ gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0=
gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
@@ -1217,10 +1246,12 @@ k8s.io/api v0.23.3 h1:KNrME8KHGr12Ozjf8ytOewKzZh6hl/hHUZeHddT3a38=
k8s.io/api v0.23.3/go.mod h1:w258XdGyvCmnBj/vGzQMj6kzdufJZVUwEM1U2fRJwSQ=
k8s.io/apimachinery v0.23.3 h1:7IW6jxNzrXTsP0c8yXz2E5Yx/WTzVPTsHIx/2Vm0cIk=
k8s.io/apimachinery v0.23.3/go.mod h1:BEuFMMBaIbcOqVIJqNZJXGFTP4W6AycEpb5+m/97hrM=
k8s.io/cli-runtime v0.23.3 h1:aJiediw+uUbxkfO6BNulcAMTUoU9Om43g3R7rIkYqcw=
k8s.io/cli-runtime v0.23.3/go.mod h1:yA00O5pDqnjkBh8fkuugBbfIfjB1nOpz+aYLotbnOfc=
k8s.io/client-go v0.23.3 h1:23QYUmCQ/W6hW78xIwm3XqZrrKZM+LWDqW2zfo+szJs=
k8s.io/client-go v0.23.3/go.mod h1:47oMd+YvAOqZM7pcQ6neJtBiFH7alOyfunYN48VsmwE=
k8s.io/code-generator v0.23.3/go.mod h1:S0Q1JVA+kSzTI1oUvbKAxZY/DYbA/ZUb4Uknog12ETk=
k8s.io/component-base v0.23.3 h1:q+epprVdylgecijVGVdf4MbizEL2feW4ssd7cdo6LVY=
k8s.io/component-base v0.23.3/go.mod h1:1Smc4C60rWG7d3HjSYpIwEbySQ3YWg0uzH5a2AtaTLg=
k8s.io/component-helpers v0.23.3/go.mod h1:SH+W/WPTaTenbWyDEeY7iytAQiMh45aqKxkvlqQ57cg=
k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
@@ -1234,6 +1265,7 @@ k8s.io/kube-openapi v0.0.0-20210421082810-95288971da7e/go.mod h1:vHXdDvt9+2spS2R
k8s.io/kube-openapi v0.0.0-20211115234752-e816edb12b65/go.mod h1:sX9MT8g7NVZM5lVL/j8QyCCJe8YSMW30QvGZWaCIDIk=
k8s.io/kube-openapi v0.0.0-20220124234850-424119656bbf h1:M9XBsiMslw2lb2ZzglC0TOkBPK5NQi0/noUrdnoFwUg=
k8s.io/kube-openapi v0.0.0-20220124234850-424119656bbf/go.mod h1:sX9MT8g7NVZM5lVL/j8QyCCJe8YSMW30QvGZWaCIDIk=
k8s.io/kubectl v0.23.3 h1:gJsF7cahkWDPYlNvYKK+OrBZLAJUBzCym+Zsi+dfi1E=
k8s.io/kubectl v0.23.3/go.mod h1:VBeeXNgLhSabu4/k0O7Q0YujgnA3+CLTUE0RcmF73yY=
k8s.io/metrics v0.23.3/go.mod h1:Ut8TvkbsO4oMVeUzaTArvPrcw9QRFLs2XNzUlORjdYE=
k8s.io/utils v0.0.0-20210802155522-efc7438f0176/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
@@ -1247,10 +1279,12 @@ sigs.k8s.io/json v0.0.0-20211020170558-c049b76a60c6/go.mod h1:p4QtZmO4uMYipTQNza
sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 h1:kDi4JBNAsJWfz1aEXhO8Jg87JJaPNLh5tIzYHgStQ9Y=
sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2/go.mod h1:B+TnT182UBxE84DiCz4CVE26eOSDAeYCpfDnC2kdKMY=
sigs.k8s.io/kustomize/api v0.10.1/go.mod h1:2FigT1QN6xKdcnGS2Ppp1uIWrtWN28Ms8A3OZUZhwr8=
sigs.k8s.io/kustomize/api v0.11.1 h1:/Vutu+gAqVo8skw1xCZrsZD39SN4Adg+z7FrSTw9pds=
sigs.k8s.io/kustomize/api v0.11.1/go.mod h1:GZuhith5YcqxIDe0GnRJNx5xxPTjlwaLTt/e+ChUtJA=
sigs.k8s.io/kustomize/cmd/config v0.10.2/go.mod h1:K2aW7nXJ0AaT+VA/eO0/dzFLxmpFcTzudmAgDwPY1HQ=
sigs.k8s.io/kustomize/kustomize/v4 v4.4.1/go.mod h1:qOKJMMz2mBP+vcS7vK+mNz4HBLjaQSWRY22EF6Tb7Io=
sigs.k8s.io/kustomize/kyaml v0.13.0/go.mod h1:FTJxEZ86ScK184NpGSAQcfEqee0nul8oLCK30D47m4E=
sigs.k8s.io/kustomize/kyaml v0.13.3 h1:tNNQIC+8cc+aXFTVg+RtQAOsjwUdYBZRAgYOVI3RBc4=
sigs.k8s.io/kustomize/kyaml v0.13.3/go.mod h1:/ya3Gk4diiQzlE4mBh7wykyLRFZNvqlbh+JnwQ9Vhrc=
sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
sigs.k8s.io/structured-merge-diff/v4 v4.2.1 h1:bKCqE9GvQ5tiVHn5rfn1r+yao3aLQEaLzkkmAkf+A6Y=

View File

@@ -18,6 +18,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/up9inc/mizu/agent/pkg/dependency"
"github.com/up9inc/mizu/agent/pkg/elastic"
"github.com/up9inc/mizu/agent/pkg/entries"
"github.com/up9inc/mizu/agent/pkg/middlewares"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/oas"
@@ -30,8 +31,6 @@ import (
"github.com/up9inc/mizu/agent/pkg/app"
"github.com/up9inc/mizu/agent/pkg/config"
v1 "k8s.io/api/core/v1"
"github.com/gorilla/websocket"
"github.com/op/go-logging"
"github.com/up9inc/mizu/shared"
@@ -155,11 +154,6 @@ func runInTapperMode() {
hostMode := os.Getenv(shared.HostModeEnvVar) == "1"
tapOpts := &tap.TapOpts{HostMode: hostMode}
tapTargets := getTapTargets()
if tapTargets != nil {
tapOpts.FilterAuthorities = tapTargets
logger.Log.Infof("Filtering for the following authorities: %v", tapOpts.FilterAuthorities)
}
filteredOutputItemsChannel := make(chan *tapApi.OutputChannelItem)
@@ -205,7 +199,7 @@ func runInHarReaderMode() {
func enableExpFeatureIfNeeded() {
if config.Config.OAS {
oasGenerator := dependency.GetInstance(dependency.OasGeneratorDependency).(oas.OasGenerator)
oasGenerator.Start()
oasGenerator.Start(nil)
}
if config.Config.ServiceMap {
serviceMapGenerator := dependency.GetInstance(dependency.ServiceMapGeneratorDependency).(servicemap.ServiceMap)
@@ -257,28 +251,6 @@ func setUIFlags(uiIndexPath string) error {
return nil
}
func parseEnvVar(env string) map[string][]v1.Pod {
var mapOfList map[string][]v1.Pod
val, present := os.LookupEnv(env)
if !present {
return mapOfList
}
err := json.Unmarshal([]byte(val), &mapOfList)
if err != nil {
panic(fmt.Sprintf("env var %s's value of %v is invalid! must be map[string][]v1.Pod %v", env, mapOfList, err))
}
return mapOfList
}
func getTapTargets() []v1.Pod {
nodeName := os.Getenv(shared.NodeNameEnvVar)
tappedAddressesPerNodeDict := parseEnvVar(shared.TappedAddressesPerNodeDictEnvVar)
return tappedAddressesPerNodeDict[nodeName]
}
func getTrafficFilteringOptions() *tapApi.TrafficFilteringOptions {
filteringOptionsJson := os.Getenv(shared.MizuFilteringOptionsEnvVar)
if filteringOptionsJson == "" {
@@ -348,6 +320,7 @@ func dialSocketWithRetry(socketAddress string, retryAmount int, retryDelay time.
for i := 1; i < retryAmount; i++ {
socketConnection, _, err := dialer.Dial(socketAddress, nil)
if err != nil {
lastErr = err
if i < retryAmount {
logger.Log.Infof("socket connection to %s failed: %v, retrying %d out of %d in %d seconds...", socketAddress, err, i, retryAmount, retryDelay/time.Second)
time.Sleep(retryDelay)
@@ -381,6 +354,14 @@ func handleIncomingMessageAsTapper(socketConnection *websocket.Conn) {
} else {
tap.UpdateTapTargets(tapConfigMessage.TapTargets)
}
case shared.WebSocketMessageTypeUpdateTappedPods:
var tappedPodsMessage shared.WebSocketTappedPodsMessage
if err := json.Unmarshal(message, &tappedPodsMessage); err != nil {
logger.Log.Infof("Could not unmarshal message of message type %s %v", socketMessageBase.MessageType, err)
return
}
nodeName := os.Getenv(shared.NodeNameEnvVar)
tap.UpdateTapTargets(tappedPodsMessage.NodeToTappedPodMap[nodeName])
default:
logger.Log.Warningf("Received socket message of type %s for which no handlers are defined", socketMessageBase.MessageType)
}
@@ -392,4 +373,7 @@ func handleIncomingMessageAsTapper(socketConnection *websocket.Conn) {
func initializeDependencies() {
dependency.RegisterGenerator(dependency.ServiceMapGeneratorDependency, func() interface{} { return servicemap.GetDefaultServiceMapInstance() })
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} { return oas.GetDefaultOasGeneratorInstance() })
dependency.RegisterGenerator(dependency.EntriesProvider, func() interface{} { return &entries.BasenineEntriesProvider{} })
dependency.RegisterGenerator(dependency.EntriesSocketStreamer, func() interface{} { return &api.BasenineEntryStreamer{} })
dependency.RegisterGenerator(dependency.EntryStreamerSocketConnector, func() interface{} { return &api.DefaultEntryStreamerSocketConnector{} })
}

View File

@@ -0,0 +1,57 @@
package api
import (
"fmt"
basenine "github.com/up9inc/basenine/client/go"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/shared/logger"
tapApi "github.com/up9inc/mizu/tap/api"
)
type EntryStreamerSocketConnector interface {
SendEntry(socketId int, entry *tapApi.Entry, params *WebSocketParams)
SendMetadata(socketId int, metadata *basenine.Metadata)
SendToastError(socketId int, err error)
CleanupSocket(socketId int)
}
type DefaultEntryStreamerSocketConnector struct{}
func (e *DefaultEntryStreamerSocketConnector) SendEntry(socketId int, entry *tapApi.Entry, params *WebSocketParams) {
var message []byte
if params.EnableFullEntries {
message, _ = models.CreateFullEntryWebSocketMessage(entry)
} else {
extension := extensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
message, _ = models.CreateBaseEntryWebSocketMessage(base)
}
if err := SendToSocket(socketId, message); err != nil {
logger.Log.Error(err)
}
}
func (e *DefaultEntryStreamerSocketConnector) SendMetadata(socketId int, metadata *basenine.Metadata) {
metadataBytes, _ := models.CreateWebsocketQueryMetadataMessage(metadata)
if err := SendToSocket(socketId, metadataBytes); err != nil {
logger.Log.Error(err)
}
}
func (e *DefaultEntryStreamerSocketConnector) SendToastError(socketId int, err error) {
toastBytes, _ := models.CreateWebsocketToastMessage(&models.ToastMessage{
Type: "error",
AutoClose: 5000,
Text: fmt.Sprintf("Syntax error: %s", err.Error()),
})
if err := SendToSocket(socketId, toastBytes); err != nil {
logger.Log.Error(err)
}
}
func (e *DefaultEntryStreamerSocketConnector) CleanupSocket(socketId int) {
socketObj := connectedWebsockets[socketId]
socketCleanup(socketId, socketObj)
}

View File

@@ -11,6 +11,8 @@ import (
"strings"
"time"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/dependency"
"github.com/up9inc/mizu/agent/pkg/elastic"
"github.com/up9inc/mizu/agent/pkg/har"
@@ -19,8 +21,6 @@ import (
"github.com/up9inc/mizu/agent/pkg/servicemap"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/oas"
"github.com/up9inc/mizu/agent/pkg/resolver"
"github.com/up9inc/mizu/agent/pkg/utils"
@@ -103,11 +103,19 @@ func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extension
panic("Channel of captured messages is nil")
}
BasenineReconnect:
connection, err := basenine.NewConnection(shared.BasenineHost, shared.BaseninePort)
if err != nil {
panic(err)
logger.Log.Errorf("Can't establish a new connection to Basenine server: %v", err)
time.Sleep(shared.BasenineReconnectInterval * time.Second)
goto BasenineReconnect
}
if err = connection.InsertMode(); err != nil {
logger.Log.Errorf("Insert mode call failed: %v", err)
connection.Close()
time.Sleep(shared.BasenineReconnectInterval * time.Second)
goto BasenineReconnect
}
connection.InsertMode()
disableOASValidation := false
ctx := context.Background()
@@ -120,19 +128,24 @@ func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extension
for item := range outputItems {
extension := extensionsMap[item.Protocol.Name]
resolvedSource, resolvedDestionation, namespace := resolveIP(item.ConnectionInfo)
if namespace == "" && item.Namespace != tapApi.UNKNOWN_NAMESPACE {
namespace = item.Namespace
}
mizuEntry := extension.Dissector.Analyze(item, resolvedSource, resolvedDestionation, namespace)
if extension.Protocol.Name == "http" {
if !disableOASValidation {
var httpPair tapApi.HTTPRequestResponsePair
if err := json.Unmarshal([]byte(mizuEntry.HTTPPair), &httpPair); err != nil {
logger.Log.Error(err)
} else {
contract := handleOAS(ctx, doc, router, httpPair.Request.Payload.RawRequest, httpPair.Response.Payload.RawResponse, contractContent)
mizuEntry.ContractStatus = contract.Status
mizuEntry.ContractRequestReason = contract.RequestReason
mizuEntry.ContractResponseReason = contract.ResponseReason
mizuEntry.ContractContent = contract.Content
}
contract := handleOAS(ctx, doc, router, httpPair.Request.Payload.RawRequest, httpPair.Response.Payload.RawResponse, contractContent)
mizuEntry.ContractStatus = contract.Status
mizuEntry.ContractRequestReason = contract.RequestReason
mizuEntry.ContractResponseReason = contract.ResponseReason
mizuEntry.ContractContent = contract.Content
}
harEntry, err := har.NewEntry(mizuEntry.Request, mizuEntry.Response, mizuEntry.StartTime, mizuEntry.ElapsedTime)
@@ -140,30 +153,22 @@ func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extension
rules, _, _ := models.RunValidationRulesState(*harEntry, mizuEntry.Destination.Name)
mizuEntry.Rules = rules
}
entryWSource := oas.EntryWithSource{
Entry: *harEntry,
Source: mizuEntry.Source.Name,
Destination: mizuEntry.Destination.Name,
Id: mizuEntry.Id,
}
if entryWSource.Destination == "" {
entryWSource.Destination = mizuEntry.Destination.IP + ":" + mizuEntry.Destination.Port
}
oasGenerator := dependency.GetInstance(dependency.OasGeneratorDependency).(oas.OasGeneratorSink)
oasGenerator.PushEntry(&entryWSource)
}
data, err := json.Marshal(mizuEntry)
if err != nil {
panic(err)
logger.Log.Errorf("Error while marshaling entry: %v", err)
continue
}
providers.EntryAdded(len(data))
connection.SendText(string(data))
if err = connection.SendText(string(data)); err != nil {
logger.Log.Errorf("An error occured while inserting a new record to database: %v", err)
connection.Close()
time.Sleep(shared.BasenineReconnectInterval * time.Second)
goto BasenineReconnect
}
serviceMapGenerator := dependency.GetInstance(dependency.ServiceMapGeneratorDependency).(servicemap.ServiceMapSink)
serviceMapGenerator.NewTCPEntry(mizuEntry.Source, mizuEntry.Destination, &item.Protocol)
@@ -183,6 +188,7 @@ func resolveIP(connectionInfo *tapApi.ConnectionInfo) (resolvedSource string, re
}
} else {
resolvedSource = resolvedSourceObject.FullAddress
namespace = resolvedSourceObject.Namespace
}
unresolvedDestination := fmt.Sprintf("%s:%s", connectionInfo.ServerIP, connectionInfo.ServerPort)
@@ -194,7 +200,11 @@ func resolveIP(connectionInfo *tapApi.ConnectionInfo) (resolvedSource string, re
}
} else {
resolvedDestination = resolvedDestinationObject.FullAddress
namespace = resolvedDestinationObject.Namespace
// Overwrite namespace (if it was set according to the source)
// Only overwrite if non-empty
if resolvedDestinationObject.Namespace != "" {
namespace = resolvedDestinationObject.Namespace
}
}
}
return resolvedSource, resolvedDestination, namespace

View File

@@ -0,0 +1,96 @@
package api
import (
"context"
"encoding/json"
basenine "github.com/up9inc/basenine/client/go"
"github.com/up9inc/mizu/agent/pkg/dependency"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/shared/logger"
tapApi "github.com/up9inc/mizu/tap/api"
)
type EntryStreamer interface {
Get(ctx context.Context, socketId int, params *WebSocketParams) error
}
type BasenineEntryStreamer struct{}
func (e *BasenineEntryStreamer) Get(ctx context.Context, socketId int, params *WebSocketParams) error {
var connection *basenine.Connection
entryStreamerSocketConnector := dependency.GetInstance(dependency.EntryStreamerSocketConnector).(EntryStreamerSocketConnector)
connection, err := basenine.NewConnection(shared.BasenineHost, shared.BaseninePort)
if err != nil {
logger.Log.Errorf("Failed to establish a connection to Basenine: %v", err)
entryStreamerSocketConnector.CleanupSocket(socketId)
return err
}
data := make(chan []byte)
meta := make(chan []byte)
query := params.Query
err = basenine.Validate(shared.BasenineHost, shared.BaseninePort, query)
if err != nil {
entryStreamerSocketConnector.SendToastError(socketId, err)
}
handleDataChannel := func(c *basenine.Connection, data chan []byte) {
for {
bytes := <-data
if string(bytes) == basenine.CloseChannel {
return
}
var entry *tapApi.Entry
err = json.Unmarshal(bytes, &entry)
if err != nil {
logger.Log.Debugf("Error unmarshalling entry: %v", err.Error())
continue
}
entryStreamerSocketConnector.SendEntry(socketId, entry, params)
}
}
handleMetaChannel := func(c *basenine.Connection, meta chan []byte) {
for {
bytes := <-meta
if string(bytes) == basenine.CloseChannel {
return
}
var metadata *basenine.Metadata
err = json.Unmarshal(bytes, &metadata)
if err != nil {
logger.Log.Debugf("Error unmarshalling metadata: %v", err.Error())
continue
}
entryStreamerSocketConnector.SendMetadata(socketId, metadata)
}
}
go handleDataChannel(connection, data)
go handleMetaChannel(connection, meta)
if err = connection.Query(query, data, meta); err != nil {
logger.Log.Errorf("Query mode call failed: %v", err)
entryStreamerSocketConnector.CleanupSocket(socketId)
return err
}
go func() {
<-ctx.Done()
data <- []byte(basenine.CloseChannel)
meta <- []byte(basenine.CloseChannel)
connection.Close()
}()
return nil
}

View File

@@ -1,19 +1,15 @@
package api
import (
"encoding/json"
"fmt"
"net/http"
"sync"
"time"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/utils"
"github.com/gin-gonic/gin"
"github.com/gorilla/websocket"
basenine "github.com/up9inc/basenine/client/go"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/utils"
"github.com/up9inc/mizu/shared/logger"
tapApi "github.com/up9inc/mizu/tap/api"
)
@@ -25,9 +21,9 @@ func InitExtensionsMap(ref map[string]*tapApi.Extension) {
}
type EventHandlers interface {
WebSocketConnect(socketId int, isTapper bool)
WebSocketConnect(c *gin.Context, socketId int, isTapper bool)
WebSocketDisconnect(socketId int, isTapper bool)
WebSocketMessage(socketId int, message []byte)
WebSocketMessage(socketId int, isTapper bool, message []byte)
}
type SocketConnection struct {
@@ -62,11 +58,11 @@ func init() {
func WebSocketRoutes(app *gin.Engine, eventHandlers EventHandlers) {
SocketGetBrowserHandler = func(c *gin.Context) {
websocketHandler(c.Writer, c.Request, eventHandlers, false)
websocketHandler(c, eventHandlers, false)
}
SocketGetTapperHandler = func(c *gin.Context) {
websocketHandler(c.Writer, c.Request, eventHandlers, true)
websocketHandler(c, eventHandlers, true)
}
app.GET("/ws", func(c *gin.Context) {
@@ -78,10 +74,10 @@ func WebSocketRoutes(app *gin.Engine, eventHandlers EventHandlers) {
})
}
func websocketHandler(w http.ResponseWriter, r *http.Request, eventHandlers EventHandlers, isTapper bool) {
ws, err := websocketUpgrader.Upgrade(w, r, nil)
func websocketHandler(c *gin.Context, eventHandlers EventHandlers, isTapper bool) {
ws, err := websocketUpgrader.Upgrade(c.Writer, c.Request, nil)
if err != nil {
logger.Log.Errorf("Failed to set websocket upgrade: %v", err)
logger.Log.Errorf("failed to set websocket upgrade: %v", err)
return
}
@@ -93,30 +89,11 @@ func websocketHandler(w http.ResponseWriter, r *http.Request, eventHandlers Even
websocketIdsLock.Unlock()
var connection *basenine.Connection
var isQuerySet bool
// `!isTapper` means it's a connection from the web UI
if !isTapper {
connection, err = basenine.NewConnection(shared.BasenineHost, shared.BaseninePort)
if err != nil {
logger.Log.Errorf("Failed to establish a connection to Basenine: %v", err)
socketCleanup(socketId, connectedWebsockets[socketId])
return
}
}
data := make(chan []byte)
meta := make(chan []byte)
defer func() {
socketCleanup(socketId, connectedWebsockets[socketId])
data <- []byte(basenine.CloseChannel)
meta <- []byte(basenine.CloseChannel)
connection.Close()
}()
eventHandlers.WebSocketConnect(socketId, isTapper)
eventHandlers.WebSocketConnect(c, socketId, isTapper)
startTimeBytes, _ := models.CreateWebsocketStartTimeMessage(utils.StartTime)
@@ -124,108 +101,52 @@ func websocketHandler(w http.ResponseWriter, r *http.Request, eventHandlers Even
logger.Log.Error(err)
}
var params WebSocketParams
for {
_, msg, err := ws.ReadMessage()
if err != nil {
if _, ok := err.(*websocket.CloseError); ok {
logger.Log.Debugf("Received websocket close message, socket id: %d", socketId)
logger.Log.Debugf("received websocket close message, socket id: %d", socketId)
} else {
logger.Log.Errorf("Error reading message, socket id: %d, error: %v", socketId, err)
logger.Log.Errorf("error reading message, socket id: %d, error: %v", socketId, err)
}
break
}
if !isTapper && !isQuerySet {
if err := json.Unmarshal(msg, &params); err != nil {
logger.Log.Errorf("Error unmarshalling parameters: %v", socketId, err)
continue
}
query := params.Query
err = basenine.Validate(shared.BasenineHost, shared.BaseninePort, query)
if err != nil {
toastBytes, _ := models.CreateWebsocketToastMessage(&models.ToastMessage{
Type: "error",
AutoClose: 5000,
Text: fmt.Sprintf("Syntax error: %s", err.Error()),
})
if err := SendToSocket(socketId, toastBytes); err != nil {
logger.Log.Error(err)
}
break
}
isQuerySet = true
handleDataChannel := func(c *basenine.Connection, data chan []byte) {
for {
bytes := <-data
if string(bytes) == basenine.CloseChannel {
return
}
var entry *tapApi.Entry
err = json.Unmarshal(bytes, &entry)
if err != nil {
logger.Log.Debugf("Error unmarshalling entry: %v", err.Error())
continue
}
var message []byte
if params.EnableFullEntries {
message, _ = models.CreateFullEntryWebSocketMessage(entry)
} else {
extension := extensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
message, _ = models.CreateBaseEntryWebSocketMessage(base)
}
if err := SendToSocket(socketId, message); err != nil {
logger.Log.Error(err)
}
}
}
handleMetaChannel := func(c *basenine.Connection, meta chan []byte) {
for {
bytes := <-meta
if string(bytes) == basenine.CloseChannel {
return
}
var metadata *basenine.Metadata
err = json.Unmarshal(bytes, &metadata)
if err != nil {
logger.Log.Debugf("Error unmarshalling metadata: %v", err.Error())
continue
}
metadataBytes, _ := models.CreateWebsocketQueryMetadataMessage(metadata)
if err := SendToSocket(socketId, metadataBytes); err != nil {
logger.Log.Error(err)
}
}
}
go handleDataChannel(connection, data)
go handleMetaChannel(connection, meta)
connection.Query(query, data, meta)
} else {
eventHandlers.WebSocketMessage(socketId, msg)
}
eventHandlers.WebSocketMessage(socketId, isTapper, msg)
}
}
func SendToSocket(socketId int, message []byte) error {
socketObj := connectedWebsockets[socketId]
if socketObj == nil {
return fmt.Errorf("socket %v is disconnected", socketId)
}
socketObj.lock.Lock() // gorilla socket panics from concurrent writes to a single socket
defer socketObj.lock.Unlock()
if connectedWebsockets[socketId] == nil {
return fmt.Errorf("socket %v is disconnected", socketId)
}
if err := socketObj.connection.SetWriteDeadline(time.Now().Add(time.Second * 10)); err != nil {
socketCleanup(socketId, socketObj)
return fmt.Errorf("error setting timeout to socket %v, err: %v", socketId, err)
}
if err := socketObj.connection.WriteMessage(websocket.TextMessage, message); err != nil {
socketCleanup(socketId, socketObj)
return fmt.Errorf("failed to write message to socket %v, err: %v", socketId, err)
}
return nil
}
func socketCleanup(socketId int, socketConnection *SocketConnection) {
err := socketConnection.connection.Close()
if err != nil {
logger.Log.Errorf("Error closing socket connection for socket id %d: %v", socketId, err)
logger.Log.Errorf("error closing socket connection for socket id %d: %v", socketId, err)
}
websocketIdsLock.Lock()
@@ -234,28 +155,3 @@ func socketCleanup(socketId int, socketConnection *SocketConnection) {
socketConnection.eventHandlers.WebSocketDisconnect(socketId, socketConnection.isTapper)
}
func SendToSocket(socketId int, message []byte) error {
socketObj := connectedWebsockets[socketId]
if socketObj == nil {
return fmt.Errorf("Socket %v is disconnected", socketId)
}
var sent = false
time.AfterFunc(time.Second*5, func() {
if !sent {
logger.Log.Error("Socket timed out")
socketCleanup(socketId, socketObj)
}
})
socketObj.lock.Lock() // gorilla socket panics from concurrent writes to a single socket
err := socketObj.connection.WriteMessage(1, message)
socketObj.lock.Unlock()
sent = true
if err != nil {
return fmt.Errorf("Failed to write message to socket %v, err: %w", socketId, err)
}
return nil
}

View File

@@ -1,12 +1,14 @@
package api
import (
"context"
"encoding/json"
"fmt"
"sync"
"github.com/gin-gonic/gin"
"github.com/up9inc/mizu/agent/pkg/dependency"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/providers"
"github.com/up9inc/mizu/agent/pkg/providers/tappedPods"
"github.com/up9inc/mizu/agent/pkg/providers/tappers"
"github.com/up9inc/mizu/agent/pkg/up9"
@@ -16,7 +18,12 @@ import (
"github.com/up9inc/mizu/shared/logger"
)
var browserClientSocketUUIDs = make([]int, 0)
type BrowserClient struct {
dataStreamCancelFunc context.CancelFunc
}
var browserClients = make(map[int]*BrowserClient, 0)
var tapperClientSocketUUIDs = make([]int, 0)
var socketListLock = sync.Mutex{}
type RoutesEventHandlers struct {
@@ -28,15 +35,22 @@ func init() {
go up9.UpdateAnalyzeStatus(BroadcastToBrowserClients)
}
func (h *RoutesEventHandlers) WebSocketConnect(socketId int, isTapper bool) {
func (h *RoutesEventHandlers) WebSocketConnect(_ *gin.Context, socketId int, isTapper bool) {
if isTapper {
logger.Log.Infof("Websocket event - Tapper connected, socket ID: %d", socketId)
tappers.Connected()
socketListLock.Lock()
tapperClientSocketUUIDs = append(tapperClientSocketUUIDs, socketId)
socketListLock.Unlock()
nodeToTappedPodMap := tappedPods.GetNodeToTappedPodMap()
SendTappedPods(socketId, nodeToTappedPodMap)
} else {
logger.Log.Infof("Websocket event - Browser socket connected, socket ID: %d", socketId)
socketListLock.Lock()
browserClientSocketUUIDs = append(browserClientSocketUUIDs, socketId)
browserClients[socketId] = &BrowserClient{}
socketListLock.Unlock()
BroadcastTappedPodsStatus()
@@ -47,16 +61,23 @@ func (h *RoutesEventHandlers) WebSocketDisconnect(socketId int, isTapper bool) {
if isTapper {
logger.Log.Infof("Websocket event - Tapper disconnected, socket ID: %d", socketId)
tappers.Disconnected()
socketListLock.Lock()
removeSocketUUIDFromTapperSlice(socketId)
socketListLock.Unlock()
} else {
logger.Log.Infof("Websocket event - Browser socket disconnected, socket ID: %d", socketId)
socketListLock.Lock()
removeSocketUUIDFromBrowserSlice(socketId)
if browserClients[socketId] != nil && browserClients[socketId].dataStreamCancelFunc != nil {
browserClients[socketId].dataStreamCancelFunc()
}
delete(browserClients, socketId)
socketListLock.Unlock()
}
}
func BroadcastToBrowserClients(message []byte) {
for _, socketId := range browserClientSocketUUIDs {
for socketId := range browserClients {
go func(socketId int) {
if err := SendToSocket(socketId, message); err != nil {
logger.Log.Error(err)
@@ -65,7 +86,43 @@ func BroadcastToBrowserClients(message []byte) {
}
}
func (h *RoutesEventHandlers) WebSocketMessage(_ int, message []byte) {
func BroadcastToTapperClients(message []byte) {
for _, socketId := range tapperClientSocketUUIDs {
go func(socketId int) {
if err := SendToSocket(socketId, message); err != nil {
logger.Log.Error(err)
}
}(socketId)
}
}
func (h *RoutesEventHandlers) WebSocketMessage(socketId int, isTapper bool, message []byte) {
if isTapper {
HandleTapperIncomingMessage(message, h.SocketOutChannel, BroadcastToBrowserClients)
} else {
// we initiate the basenine stream after the first websocket message we receive (it contains the entry query), we then store a cancelfunc to later cancel this stream
if browserClients[socketId] != nil && browserClients[socketId].dataStreamCancelFunc == nil {
var params WebSocketParams
if err := json.Unmarshal(message, &params); err != nil {
logger.Log.Errorf("Error: %v", socketId, err)
return
}
entriesStreamer := dependency.GetInstance(dependency.EntriesSocketStreamer).(EntryStreamer)
ctx, cancelFunc := context.WithCancel(context.Background())
err := entriesStreamer.Get(ctx, socketId, &params)
if err != nil {
logger.Log.Errorf("error initializing basenine stream for browser socket %d %+v", socketId, err)
cancelFunc()
} else {
browserClients[socketId].dataStreamCancelFunc = cancelFunc
}
}
}
}
func HandleTapperIncomingMessage(message []byte, socketOutChannel chan<- *tapApi.OutputChannelItem, broadcastMessageFunc func([]byte)) {
var socketMessageBase shared.WebSocketMessageMetadata
err := json.Unmarshal(message, &socketMessageBase)
if err != nil {
@@ -79,7 +136,7 @@ func (h *RoutesEventHandlers) WebSocketMessage(_ int, message []byte) {
logger.Log.Infof("Could not unmarshal message of message type %s %v", socketMessageBase.MessageType, err)
} else {
// NOTE: This is where the message comes back from the intermediate WebSocket to code.
h.SocketOutChannel <- tappedEntryMessage.Data
socketOutChannel <- tappedEntryMessage.Data
}
case shared.WebSocketMessageTypeUpdateStatus:
var statusMessage shared.WebSocketStatusMessage
@@ -87,15 +144,7 @@ func (h *RoutesEventHandlers) WebSocketMessage(_ int, message []byte) {
if err != nil {
logger.Log.Infof("Could not unmarshal message of message type %s %v", socketMessageBase.MessageType, err)
} else {
BroadcastToBrowserClients(message)
}
case shared.WebsocketMessageTypeOutboundLink:
var outboundLinkMessage models.WebsocketOutboundLinkMessage
err := json.Unmarshal(message, &outboundLinkMessage)
if err != nil {
logger.Log.Infof("Could not unmarshal message of message type %s %v", socketMessageBase.MessageType, err)
} else {
handleTLSLink(outboundLinkMessage)
broadcastMessageFunc(message)
}
default:
logger.Log.Infof("Received socket message of type %s for which no handlers are defined", socketMessageBase.MessageType)
@@ -103,35 +152,12 @@ func (h *RoutesEventHandlers) WebSocketMessage(_ int, message []byte) {
}
}
func handleTLSLink(outboundLinkMessage models.WebsocketOutboundLinkMessage) {
resolvedNameObject := k8sResolver.Resolve(outboundLinkMessage.Data.DstIP)
if resolvedNameObject != nil {
outboundLinkMessage.Data.DstIP = resolvedNameObject.FullAddress
} else if outboundLinkMessage.Data.SuggestedResolvedName != "" {
outboundLinkMessage.Data.DstIP = outboundLinkMessage.Data.SuggestedResolvedName
}
cacheKey := fmt.Sprintf("%s -> %s:%d", outboundLinkMessage.Data.Src, outboundLinkMessage.Data.DstIP, outboundLinkMessage.Data.DstPort)
_, isInCache := providers.RecentTLSLinks.Get(cacheKey)
if isInCache {
return
} else {
providers.RecentTLSLinks.SetDefault(cacheKey, outboundLinkMessage.Data)
}
marshaledMessage, err := json.Marshal(outboundLinkMessage)
if err != nil {
logger.Log.Errorf("Error marshaling outbound link message for broadcasting: %v", err)
} else {
logger.Log.Errorf("Broadcasting outboundlink message %s", string(marshaledMessage))
BroadcastToBrowserClients(marshaledMessage)
}
}
func removeSocketUUIDFromBrowserSlice(uuidToRemove int) {
newUUIDSlice := make([]int, 0, len(browserClientSocketUUIDs))
for _, uuid := range browserClientSocketUUIDs {
func removeSocketUUIDFromTapperSlice(uuidToRemove int) {
newUUIDSlice := make([]int, 0, len(tapperClientSocketUUIDs))
for _, uuid := range tapperClientSocketUUIDs {
if uuid != uuidToRemove {
newUUIDSlice = append(newUUIDSlice, uuid)
}
}
browserClientSocketUUIDs = newUUIDSlice
tapperClientSocketUUIDs = newUUIDSlice
}

View File

@@ -18,3 +18,23 @@ func BroadcastTappedPodsStatus() {
BroadcastToBrowserClients(jsonBytes)
}
}
func SendTappedPods(socketId int, nodeToTappedPodMap shared.NodeToPodsMap) {
message := shared.CreateWebSocketTappedPodsMessage(nodeToTappedPodMap)
if jsonBytes, err := json.Marshal(message); err != nil {
logger.Log.Errorf("Could not Marshal message %v", err)
} else {
if err := SendToSocket(socketId, jsonBytes); err != nil {
logger.Log.Error(err)
}
}
}
func BroadcastTappedPodsToTappers(nodeToTappedPodMap shared.NodeToPodsMap) {
message := shared.CreateWebSocketTappedPodsMessage(nodeToTappedPodMap)
if jsonBytes, err := json.Marshal(message); err != nil {
logger.Log.Errorf("Could not Marshal message %v", err)
} else {
BroadcastToTapperClients(jsonBytes)
}
}

View File

@@ -67,7 +67,7 @@ func ConfigureBasenineServer(host string, port string, dbSize int64, logLevel lo
wait.WithProto("tcp"),
wait.WithWait(200*time.Millisecond),
wait.WithBreak(50*time.Millisecond),
wait.WithDeadline(5*time.Second),
wait.WithDeadline(20*time.Second),
wait.WithDebug(logLevel == logging.DEBUG),
).Do([]string{fmt.Sprintf("%s:%s", host, port)}) {
logger.Log.Panicf("Basenine is not available!")

View File

@@ -1,25 +1,19 @@
package controllers
import (
"encoding/json"
"net/http"
"strconv"
"time"
"github.com/up9inc/mizu/agent/pkg/app"
"github.com/up9inc/mizu/agent/pkg/har"
"github.com/up9inc/mizu/agent/pkg/dependency"
"github.com/up9inc/mizu/agent/pkg/entries"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/agent/pkg/validation"
"github.com/gin-gonic/gin"
basenine "github.com/up9inc/basenine/client/go"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/shared/logger"
tapApi "github.com/up9inc/mizu/tap/api"
)
func Error(c *gin.Context, err error) bool {
func HandleEntriesError(c *gin.Context, err error) bool {
if err != nil {
logger.Log.Errorf("Error getting entry: %v", err)
_ = c.Error(err)
@@ -49,45 +43,18 @@ func GetEntries(c *gin.Context) {
entriesRequest.TimeoutMs = 3000
}
data, meta, err := basenine.Fetch(shared.BasenineHost, shared.BaseninePort,
entriesRequest.LeftOff, entriesRequest.Direction, entriesRequest.Query,
entriesRequest.Limit, time.Duration(entriesRequest.TimeoutMs)*time.Millisecond)
if err != nil {
c.JSON(http.StatusInternalServerError, validationError)
}
response := &models.EntriesResponse{}
var dataSlice []interface{}
for _, row := range data {
var entry *tapApi.Entry
err = json.Unmarshal(row, &entry)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{
"error": true,
"type": "error",
"autoClose": "5000",
"msg": string(row),
})
return // exit
entriesProvider := dependency.GetInstance(dependency.EntriesProvider).(entries.EntriesProvider)
entries, metadata, err := entriesProvider.GetEntries(entriesRequest)
if !HandleEntriesError(c, err) {
baseEntries := make([]interface{}, 0)
for _, entry := range entries {
baseEntries = append(baseEntries, entry.Base)
}
extension := app.ExtensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
dataSlice = append(dataSlice, base)
c.JSON(http.StatusOK, models.EntriesResponse{
Data: baseEntries,
Meta: metadata,
})
}
var metadata *basenine.Metadata
err = json.Unmarshal(meta, &metadata)
if err != nil {
logger.Log.Debugf("Error recieving metadata: %v", err.Error())
}
response.Data = dataSlice
response.Meta = metadata
c.JSON(http.StatusOK, response)
}
func GetEntry(c *gin.Context) {
@@ -101,55 +68,12 @@ func GetEntry(c *gin.Context) {
c.JSON(http.StatusBadRequest, validationError)
}
id, _ := strconv.Atoi(c.Param("id"))
var entry *tapApi.Entry
bytes, err := basenine.Single(shared.BasenineHost, shared.BaseninePort, id, singleEntryRequest.Query)
if Error(c, err) {
return // exit
}
err = json.Unmarshal(bytes, &entry)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{
"error": true,
"type": "error",
"autoClose": "5000",
"msg": string(bytes),
})
return // exit
}
id := c.Param("id")
extension := app.ExtensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
var representation []byte
representation, err = extension.Dissector.Represent(entry.Request, entry.Response)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{
"error": true,
"type": "error",
"autoClose": "5000",
"msg": err.Error(),
})
return // exit
}
entriesProvider := dependency.GetInstance(dependency.EntriesProvider).(entries.EntriesProvider)
entry, err := entriesProvider.GetEntry(singleEntryRequest, id)
var rules []map[string]interface{}
var isRulesEnabled bool
if entry.Protocol.Name == "http" {
harEntry, _ := har.NewEntry(entry.Request, entry.Response, entry.StartTime, entry.ElapsedTime)
_, rulesMatched, _isRulesEnabled := models.RunValidationRulesState(*harEntry, entry.Destination.Name)
isRulesEnabled = _isRulesEnabled
inrec, _ := json.Marshal(rulesMatched)
if err := json.Unmarshal(inrec, &rules); err != nil {
logger.Log.Error(err)
}
if !HandleEntriesError(c, err) {
c.JSON(http.StatusOK, entry)
}
c.JSON(http.StatusOK, tapApi.EntryWrapper{
Protocol: entry.Protocol,
Representation: string(representation),
Data: entry,
Base: base,
Rules: rules,
IsRulesEnabled: isRulesEnabled,
})
}

View File

@@ -1,8 +1,12 @@
package controllers
import (
"bytes"
basenine "github.com/up9inc/basenine/client/go"
"net"
"net/http/httptest"
"testing"
"time"
"github.com/up9inc/mizu/agent/pkg/dependency"
"github.com/up9inc/mizu/agent/pkg/oas"
@@ -11,39 +15,55 @@ import (
)
func TestGetOASServers(t *testing.T) {
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} { return oas.GetDefaultOasGeneratorInstance() })
recorder := httptest.NewRecorder()
c, _ := gin.CreateTestContext(recorder)
oas.GetDefaultOasGeneratorInstance().Start()
oas.GetDefaultOasGeneratorInstance().GetServiceSpecs().Store("some", oas.NewGen("some"))
recorder, c := getRecorderAndContext()
GetOASServers(c)
t.Logf("Written body: %s", recorder.Body.String())
}
func TestGetOASAllSpecs(t *testing.T) {
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} { return oas.GetDefaultOasGeneratorInstance() })
recorder := httptest.NewRecorder()
c, _ := gin.CreateTestContext(recorder)
oas.GetDefaultOasGeneratorInstance().Start()
oas.GetDefaultOasGeneratorInstance().GetServiceSpecs().Store("some", oas.NewGen("some"))
recorder, c := getRecorderAndContext()
GetOASAllSpecs(c)
t.Logf("Written body: %s", recorder.Body.String())
}
func TestGetOASSpec(t *testing.T) {
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} { return oas.GetDefaultOasGeneratorInstance() })
recorder := httptest.NewRecorder()
c, _ := gin.CreateTestContext(recorder)
oas.GetDefaultOasGeneratorInstance().Start()
oas.GetDefaultOasGeneratorInstance().GetServiceSpecs().Store("some", oas.NewGen("some"))
recorder, c := getRecorderAndContext()
c.Params = []gin.Param{{Key: "id", Value: "some"}}
GetOASSpec(c)
t.Logf("Written body: %s", recorder.Body.String())
}
type fakeConn struct {
sendBuffer *bytes.Buffer
receiveBuffer *bytes.Buffer
}
func (f fakeConn) Read(p []byte) (int, error) { return f.sendBuffer.Read(p) }
func (f fakeConn) Write(p []byte) (int, error) { return f.receiveBuffer.Write(p) }
func (fakeConn) Close() error { return nil }
func (fakeConn) LocalAddr() net.Addr { return nil }
func (fakeConn) RemoteAddr() net.Addr { return nil }
func (fakeConn) SetDeadline(t time.Time) error { return nil }
func (fakeConn) SetReadDeadline(t time.Time) error { return nil }
func (fakeConn) SetWriteDeadline(t time.Time) error { return nil }
func getRecorderAndContext() (*httptest.ResponseRecorder, *gin.Context) {
dummyConn := new(basenine.Connection)
dummyConn.Conn = fakeConn{
sendBuffer: bytes.NewBufferString("\n"),
receiveBuffer: bytes.NewBufferString("\n"),
}
dependency.RegisterGenerator(dependency.OasGeneratorDependency, func() interface{} {
return oas.GetDefaultOasGeneratorInstance()
})
recorder := httptest.NewRecorder()
c, _ := gin.CreateTestContext(recorder)
oas.GetDefaultOasGeneratorInstance().Start(dummyConn)
oas.GetDefaultOasGeneratorInstance().GetServiceSpecs().Store("some", oas.NewGen("some"))
return recorder, c
}

View File

@@ -101,16 +101,18 @@ func (s *ServiceMapControllerSuite) TestGet() {
// response nodes
aNode := servicemap.ServiceMapNode{
Id: 1,
Name: TCPEntryA.Name,
Entry: TCPEntryA,
Count: 1,
Id: 1,
Name: TCPEntryA.Name,
Entry: TCPEntryA,
Resolved: true,
Count: 1,
}
bNode := servicemap.ServiceMapNode{
Id: 2,
Name: TCPEntryB.Name,
Entry: TCPEntryB,
Count: 1,
Id: 2,
Name: TCPEntryB.Name,
Entry: TCPEntryB,
Resolved: true,
Count: 1,
}
assert.Contains(response.Nodes, aNode)
assert.Contains(response.Nodes, bNode)

View File

@@ -3,6 +3,8 @@ package controllers
import (
"net/http"
core "k8s.io/api/core/v1"
"github.com/gin-gonic/gin"
"github.com/up9inc/mizu/agent/pkg/api"
"github.com/up9inc/mizu/agent/pkg/holder"
@@ -12,6 +14,7 @@ import (
"github.com/up9inc/mizu/agent/pkg/up9"
"github.com/up9inc/mizu/agent/pkg/validation"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/shared/kubernetes"
"github.com/up9inc/mizu/shared/logger"
)
@@ -30,15 +33,21 @@ func HealthCheck(c *gin.Context) {
}
func PostTappedPods(c *gin.Context) {
var requestTappedPods []*shared.PodInfo
var requestTappedPods []core.Pod
if err := c.Bind(&requestTappedPods); err != nil {
c.JSON(http.StatusBadRequest, err)
return
}
podInfos := kubernetes.GetPodInfosForPods(requestTappedPods)
logger.Log.Infof("[Status] POST request: %d tapped pods", len(requestTappedPods))
tappedPods.Set(requestTappedPods)
tappedPods.Set(podInfos)
api.BroadcastTappedPodsStatus()
nodeToTappedPodMap := kubernetes.GetNodeHostToTappedPodsMap(requestTappedPods)
tappedPods.SetNodeToTappedPodMap(nodeToTappedPodMap)
api.BroadcastTappedPodsToTappers(nodeToTappedPodMap)
}
func PostTapperStatus(c *gin.Context) {
@@ -85,10 +94,6 @@ func GetGeneralStats(c *gin.Context) {
c.JSON(http.StatusOK, providers.GetGeneralStats())
}
func GetRecentTLSLinks(c *gin.Context) {
c.JSON(http.StatusOK, providers.GetAllRecentTLSAddresses())
}
func GetCurrentResolvingInformation(c *gin.Context) {
c.JSON(http.StatusOK, holder.GetResolver().GetMap())
}

View File

@@ -5,4 +5,7 @@ type DependencyContainerType string
const (
ServiceMapGeneratorDependency = "ServiceMapGeneratorDependency"
OasGeneratorDependency = "OasGeneratorDependency"
EntriesProvider = "EntriesProvider"
EntriesSocketStreamer = "EntriesSocketStreamer"
EntryStreamerSocketConnector = "EntryStreamerSocketConnector"
)

View File

@@ -0,0 +1,99 @@
package entries
import (
"encoding/json"
"errors"
"time"
basenine "github.com/up9inc/basenine/client/go"
"github.com/up9inc/mizu/agent/pkg/app"
"github.com/up9inc/mizu/agent/pkg/har"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/shared/logger"
tapApi "github.com/up9inc/mizu/tap/api"
)
type EntriesProvider interface {
GetEntries(entriesRequest *models.EntriesRequest) ([]*tapApi.EntryWrapper, *basenine.Metadata, error)
GetEntry(singleEntryRequest *models.SingleEntryRequest, entryId string) (*tapApi.EntryWrapper, error)
}
type BasenineEntriesProvider struct{}
func (e *BasenineEntriesProvider) GetEntries(entriesRequest *models.EntriesRequest) ([]*tapApi.EntryWrapper, *basenine.Metadata, error) {
data, meta, err := basenine.Fetch(shared.BasenineHost, shared.BaseninePort,
entriesRequest.LeftOff, entriesRequest.Direction, entriesRequest.Query,
entriesRequest.Limit, time.Duration(entriesRequest.TimeoutMs)*time.Millisecond)
if err != nil {
return nil, nil, err
}
var dataSlice []*tapApi.EntryWrapper
for _, row := range data {
var entry *tapApi.Entry
err = json.Unmarshal(row, &entry)
if err != nil {
return nil, nil, err
}
extension := app.ExtensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
dataSlice = append(dataSlice, &tapApi.EntryWrapper{
Protocol: entry.Protocol,
Data: entry,
Base: base,
})
}
var metadata *basenine.Metadata
err = json.Unmarshal(meta, &metadata)
if err != nil {
logger.Log.Debugf("Error recieving metadata: %v", err.Error())
}
return dataSlice, metadata, nil
}
func (e *BasenineEntriesProvider) GetEntry(singleEntryRequest *models.SingleEntryRequest, entryId string) (*tapApi.EntryWrapper, error) {
var entry *tapApi.Entry
bytes, err := basenine.Single(shared.BasenineHost, shared.BaseninePort, entryId, singleEntryRequest.Query)
if err != nil {
return nil, err
}
err = json.Unmarshal(bytes, &entry)
if err != nil {
return nil, errors.New(string(bytes))
}
extension := app.ExtensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
var representation []byte
representation, err = extension.Dissector.Represent(entry.Request, entry.Response)
if err != nil {
return nil, err
}
var rules []map[string]interface{}
var isRulesEnabled bool
if entry.Protocol.Name == "http" {
harEntry, _ := har.NewEntry(entry.Request, entry.Response, entry.StartTime, entry.ElapsedTime)
_, rulesMatched, _isRulesEnabled := models.RunValidationRulesState(*harEntry, entry.Destination.Name)
isRulesEnabled = _isRulesEnabled
inrec, _ := json.Marshal(rulesMatched)
if err := json.Unmarshal(inrec, &rules); err != nil {
logger.Log.Error(err)
}
}
return &tapApi.EntryWrapper{
Protocol: entry.Protocol,
Representation: string(representation),
Data: entry,
Base: base,
Rules: rules,
IsRulesEnabled: isRulesEnabled,
}, nil
}

View File

@@ -9,11 +9,10 @@ import (
basenine "github.com/up9inc/basenine/client/go"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/tap"
)
type EntriesRequest struct {
LeftOff int `form:"leftOff" validate:"required,min=-1"`
LeftOff string `form:"leftOff" validate:"required"`
Direction int `form:"direction" validate:"required,oneof='1' '-1'"`
Query string `form:"query"`
Limit int `form:"limit" validate:"required,min=1"`
@@ -44,11 +43,6 @@ type WebSocketTappedEntryMessage struct {
Data *tapApi.OutputChannelItem
}
type WebsocketOutboundLinkMessage struct {
*shared.WebSocketMessageMetadata
Data *tap.OutboundLink
}
type AuthStatus struct {
Email string `json:"email"`
Model string `json:"model"`

View File

@@ -4,6 +4,7 @@ import (
"bufio"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/url"
@@ -67,23 +68,23 @@ func fileSize(fname string) int64 {
return fi.Size()
}
func feedEntries(fromFiles []string, isSync bool) (count int, err error) {
func feedEntries(fromFiles []string, isSync bool, gen *defaultOasGenerator) (count uint, err error) {
badFiles := make([]string, 0)
cnt := 0
cnt := uint(0)
for _, file := range fromFiles {
logger.Log.Info("Processing file: " + file)
ext := strings.ToLower(filepath.Ext(file))
eCnt := 0
eCnt := uint(0)
switch ext {
case ".har":
eCnt, err = feedFromHAR(file, isSync)
eCnt, err = feedFromHAR(file, isSync, gen)
if err != nil {
logger.Log.Warning("Failed processing file: " + err.Error())
badFiles = append(badFiles, file)
continue
}
case ".ldjson":
eCnt, err = feedFromLDJSON(file, isSync)
eCnt, err = feedFromLDJSON(file, isSync, gen)
if err != nil {
logger.Log.Warning("Failed processing file: " + err.Error())
badFiles = append(badFiles, file)
@@ -102,7 +103,7 @@ func feedEntries(fromFiles []string, isSync bool) (count int, err error) {
return cnt, nil
}
func feedFromHAR(file string, isSync bool) (int, error) {
func feedFromHAR(file string, isSync bool, gen *defaultOasGenerator) (uint, error) {
fd, err := os.Open(file)
if err != nil {
panic(err)
@@ -121,16 +122,16 @@ func feedFromHAR(file string, isSync bool) (int, error) {
return 0, err
}
cnt := 0
cnt := uint(0)
for _, entry := range harDoc.Log.Entries {
cnt += 1
feedEntry(&entry, "", isSync, file)
feedEntry(&entry, "", file, gen, fmt.Sprintf("%024d", cnt))
}
return cnt, nil
}
func feedEntry(entry *har.Entry, source string, isSync bool, file string) {
func feedEntry(entry *har.Entry, source string, file string, gen *defaultOasGenerator, cnt string) {
entry.Comment = file
if entry.Response.Status == 302 {
logger.Log.Debugf("Dropped traffic entry due to permanent redirect status: %s", entry.StartedDateTime)
@@ -145,15 +146,11 @@ func feedEntry(entry *har.Entry, source string, isSync bool, file string) {
logger.Log.Errorf("Failed to parse entry URL: %v, err: %v", entry.Request.URL, err)
}
ews := EntryWithSource{Entry: *entry, Source: source, Destination: u.Host, Id: uint(0)}
if isSync {
GetDefaultOasGeneratorInstance().entriesChan <- ews // blocking variant, right?
} else {
GetDefaultOasGeneratorInstance().PushEntry(&ews)
}
ews := EntryWithSource{Entry: *entry, Source: source, Destination: u.Host, Id: cnt}
gen.handleHARWithSource(&ews)
}
func feedFromLDJSON(file string, isSync bool) (int, error) {
func feedFromLDJSON(file string, isSync bool, gen *defaultOasGenerator) (uint, error) {
fd, err := os.Open(file)
if err != nil {
panic(err)
@@ -165,7 +162,7 @@ func feedFromLDJSON(file string, isSync bool) (int, error) {
var meta map[string]interface{}
buf := strings.Builder{}
cnt := 0
cnt := uint(0)
source := ""
for {
substr, isPrefix, err := reader.ReadLine()
@@ -196,7 +193,7 @@ func feedFromLDJSON(file string, isSync bool) (int, error) {
logger.Log.Warningf("Failed decoding entry: %s", line)
} else {
cnt += 1
feedEntry(&entry, source, isSync, file)
feedEntry(&entry, source, file, gen, fmt.Sprintf("%024d", cnt))
}
}
}

View File

@@ -0,0 +1,96 @@
{
"openapi": "3.1.0",
"info": {
"title": "http://carts",
"description": "Mizu observed 3 entries (0 failed), at 2.287 hits/s, average response time is 0.017 seconds",
"version": "1.0"
},
"servers": [
{
"url": "http://carts"
}
],
"paths": {
"/carts/{cartId}/items": {
"get": {
"summary": "/carts/{cartId}/items",
"description": "Mizu observed 3 entries (0 failed), at 2.287 hits/s, average response time is 0.017 seconds",
"operationId": "84c9b926-1f73-4ab4-b381-3c124528959f",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": [
{
"id": "60fe98fb86c0fc000869a90c",
"itemId": "3395a43e-2d88-40de-b95f-e00e1502085b",
"quantity": 1,
"unitPrice": 18
}
],
"x-sample-entry": "000000000000000000000010"
}
},
"x-sample-entry": "000000000000000000000010"
}
},
"x-counters-per-source": {
"some-source": {
"entries": 3,
"failures": 0,
"firstSeen": 1627298058.3798368,
"lastSeen": 1627298065.2397773,
"sumRT": 0.05,
"sumDuration": 6.859940528869629
}
},
"x-counters-total": {
"entries": 3,
"failures": 0,
"firstSeen": 1627298058.3798368,
"lastSeen": 1627298065.2397773,
"sumRT": 0.05,
"sumDuration": 6.859940528869629
},
"x-last-seen-ts": 1627298065.2397773,
"x-sample-entry": "000000000000000000000010"
},
"parameters": [
{
"name": "cartId",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "mHK0P7zTktmV1zv57iWAvCTd43FFMHap"
}
},
"x-sample-entry": "000000000000000000000010"
}
]
}
},
"x-counters-per-source": {
"some-source": {
"entries": 3,
"failures": 0,
"firstSeen": 1627298058.3798368,
"lastSeen": 1627298065.2397773,
"sumRT": 0.05,
"sumDuration": 6.859940528869629
}
},
"x-counters-total": {
"entries": 3,
"failures": 0,
"firstSeen": 1627298058.3798368,
"lastSeen": 1627298065.2397773,
"sumRT": 0.05,
"sumDuration": 6.859940528869629
}
}

View File

@@ -0,0 +1,485 @@
{
"openapi": "3.1.0",
"info": {
"title": "Preloaded",
"description": "Test file for loading pre-existing OAS",
"version": "0.1"
},
"paths": {
"/catalogue": {
"get": {
"tags": [
"catalogue"
],
"summary": "/catalogue",
"description": "Mizu observed 3 entries (0 failed), at 2.647 hits/s, average response time is 0.008 seconds",
"operationId": "dd6c3dbe-6b6b-4ddd-baed-757e237ddb8a",
"parameters": [
{
"name": "page",
"in": "query",
"required": false,
"style": "form",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "1"
}
},
"x-sample-entry": "000000000000000000000002"
},
{
"name": "size",
"in": "query",
"required": true,
"style": "form",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "6"
},
"example #1": {
"value": "3"
},
"example #2": {
"value": "5"
}
},
"x-sample-entry": "000000000000000000000011"
},
{
"name": "tags",
"in": "query",
"required": false,
"style": "form",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": ""
},
"example #1": {
"value": "blue"
}
},
"x-sample-entry": "000000000000000000000007"
},
{
"name": "sort",
"in": "query",
"required": false,
"style": "form",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "id"
}
},
"x-sample-entry": "000000000000000000000007"
}
],
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": [
{
"count": 1,
"description": "Socks fit for a Messiah. You too can experience walking in water with these special edition beauties. Each hole is lovingly proggled to leave smooth edges. The only sock approved by a higher power.",
"id": "03fef6ac-1896-4ce8-bd69-b798f85c6e0b",
"imageUrl": [
"/catalogue/images/holy_1.jpeg",
"/catalogue/images/holy_2.jpeg"
],
"name": "Holy",
"price": 99.99,
"tag": [
"action",
"magic"
]
},
{
"count": 438,
"description": "proident occaecat irure et excepteur labore minim nisi amet irure",
"id": "3395a43e-2d88-40de-b95f-e00e1502085b",
"imageUrl": [
"/catalogue/images/colourful_socks.jpg",
"/catalogue/images/colourful_socks.jpg"
],
"name": "Colourful",
"price": 18,
"tag": [
"brown",
"blue"
]
},
{
"count": 820,
"description": "Ready for action. Engineers: be ready to smash that next bug! Be ready, with these super-action-sport-masterpieces. This particular engineer was chased away from the office with a stick.",
"id": "510a0d7e-8e83-4193-b483-e27e09ddc34d",
"imageUrl": [
"/catalogue/images/puma_1.jpeg",
"/catalogue/images/puma_2.jpeg"
],
"name": "SuperSport XL",
"price": 15,
"tag": [
"sport",
"formal",
"black"
]
},
{
"count": 738,
"description": "A mature sock, crossed, with an air of nonchalance.",
"id": "808a2de1-1aaa-4c25-a9b9-6612e8f29a38",
"imageUrl": [
"/catalogue/images/cross_1.jpeg",
"/catalogue/images/cross_2.jpeg"
],
"name": "Crossed",
"price": 17.32,
"tag": [
"blue",
"action",
"red",
"formal"
]
},
{
"count": 808,
"description": "enim officia aliqua excepteur esse deserunt quis aliquip nostrud anim",
"id": "819e1fbf-8b7e-4f6d-811f-693534916a8b",
"imageUrl": [
"/catalogue/images/WAT.jpg",
"/catalogue/images/WAT2.jpg"
],
"name": "Figueroa",
"price": 14,
"tag": [
"green",
"formal",
"blue"
]
},
{
"count": 175,
"description": "consequat amet cupidatat minim laborum tempor elit ex consequat in",
"id": "837ab141-399e-4c1f-9abc-bace40296bac",
"imageUrl": [
"/catalogue/images/catsocks.jpg",
"/catalogue/images/catsocks2.jpg"
],
"name": "Cat socks",
"price": 15,
"tag": [
"brown",
"formal",
"green"
]
}
],
"x-sample-entry": "000000000000000000000011"
}
},
"x-sample-entry": "000000000000000000000011"
}
},
"x-counters-per-source": {
"some-source": {
"entries": 3,
"failures": 0,
"firstSeen": 1627298057.7849188,
"lastSeen": 1627298065.7258668,
"sumRT": 0.024999999999999998,
"sumDuration": 7.940948009490967
}
},
"x-counters-total": {
"entries": 3,
"failures": 0,
"firstSeen": 1627298057.7849188,
"lastSeen": 1627298065.7258668,
"sumRT": 0.024999999999999998,
"sumDuration": 7.940948009490967
},
"x-last-seen-ts": 1627298065.7258668,
"x-sample-entry": "000000000000000000000011"
}
},
"/catalogue/size": {
"get": {
"tags": [
"catalogue"
],
"summary": "/catalogue/size",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.013 seconds",
"operationId": "2315e69d-9d66-48cf-b3d3-fec9c30bd28b",
"parameters": [
{
"name": "tags",
"in": "query",
"required": true,
"style": "form",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": ""
}
},
"x-sample-entry": "000000000000000000000001"
},
{
"name": "x-some",
"in": "header",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "demo val"
}
},
"x-sample-entry": "000000000000000000000001"
}
],
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": {
"err": null,
"size": 9
},
"x-sample-entry": "000000000000000000000001"
}
},
"x-sample-entry": "000000000000000000000001"
}
},
"x-counters-per-source": {
"some-source": {
"entries": 1,
"failures": 0,
"firstSeen": 1627298057.7841518,
"lastSeen": 1627298057.7841518,
"sumRT": 0.013,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1627298057.7841518,
"lastSeen": 1627298057.7841518,
"sumRT": 0.013,
"sumDuration": 0
},
"x-last-seen-ts": 1627298057.7841518,
"x-sample-entry": "000000000000000000000001"
}
},
"/catalogue/{id}": {
"get": {
"tags": [
"catalogue"
],
"summary": "/catalogue/{id}",
"description": "Mizu observed 4 entries (0 failed), at 1.899 hits/s, average response time is 0.003 seconds",
"parameters": [
{
"name": "non-required-header",
"in": "header",
"required": false,
"style": "simple",
"schema": {
"type": "string"
},
"example": "some-uuid-maybe"
},
{
"name": "x-some",
"in": "header",
"required": false,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "demoval"
}
},
"x-sample-entry": "000000000000000000000004"
}
],
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": {
"count": 438,
"description": "proident occaecat irure et excepteur labore minim nisi amet irure",
"id": "3395a43e-2d88-40de-b95f-e00e1502085b",
"imageUrl": [
"/catalogue/images/colourful_socks.jpg",
"/catalogue/images/colourful_socks.jpg"
],
"name": "Colourful",
"price": 18,
"tag": [
"brown",
"blue"
]
},
"x-sample-entry": "000000000000000000000012"
}
},
"x-sample-entry": "000000000000000000000012"
}
},
"x-counters-per-source": {
"some-source": {
"entries": 4,
"failures": 0,
"firstSeen": 1627298058.1315014,
"lastSeen": 1627298065.7293031,
"sumRT": 0.013999999999999999,
"sumDuration": 7.597801685333252
}
},
"x-counters-total": {
"entries": 4,
"failures": 0,
"firstSeen": 1627298058.1315014,
"lastSeen": 1627298065.7293031,
"sumRT": 0.013999999999999999,
"sumDuration": 7.597801685333252
},
"x-last-seen-ts": 1627298065.7293031,
"x-sample-entry": "000000000000000000000012"
},
"parameters": [
{
"name": "id",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "3395a43e-2d88-40de-b95f-e00e1502085b"
},
"example #1": {
"value": "808a2de1-1aaa-4c25-a9b9-6612e8f29a38"
}
},
"example": "some-uuid-maybe",
"x-sample-entry": "000000000000000000000012"
}
]
},
"/catalogue/{id}/details": {
"parameters": [
{
"name": "id",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"example": "some-uuid-maybe"
}
]
},
"/tags": {
"get": {
"summary": "/tags",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.007 seconds",
"operationId": "c4d7d0ed-1a78-4370-a049-efe3abc631a6",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": {
"err": null,
"tags": [
"brown",
"geek",
"formal",
"blue",
"skin",
"red",
"action",
"sport",
"black",
"magic",
"green"
]
},
"x-sample-entry": "000000000000000000000003"
}
},
"x-sample-entry": "000000000000000000000003"
}
},
"x-counters-per-source": {
"some-source": {
"entries": 1,
"failures": 0,
"firstSeen": 1627298057.7841816,
"lastSeen": 1627298057.7841816,
"sumRT": 0.007,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1627298057.7841816,
"lastSeen": 1627298057.7841816,
"sumRT": 0.007,
"sumDuration": 0
},
"x-last-seen-ts": 1627298057.7841816,
"x-sample-entry": "000000000000000000000003"
}
}
},
"x-counters-per-source": {
"some-source": {
"entries": 9,
"failures": 0,
"firstSeen": 1627298057.7841518,
"lastSeen": 1627298065.7293031,
"sumRT": 0.05899999999999999,
"sumDuration": 15.538749694824219
}
},
"x-counters-total": {
"entries": 9,
"failures": 0,
"firstSeen": 1627298057.7841518,
"lastSeen": 1627298065.7293031,
"sumRT": 0.05899999999999999,
"sumDuration": 15.538749694824219
}
}

View File

@@ -0,0 +1,897 @@
{
"openapi": "3.1.0",
"info": {
"title": "https://httpbin.org",
"description": "Mizu observed 19 entries (0 failed), at 0.106 hits/s, average response time is 0.172 seconds",
"version": "1.0"
},
"servers": [
{
"url": "https://httpbin.org"
}
],
"paths": {
"/appears-once": {
"get": {
"summary": "/appears-once",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.630 seconds",
"operationId": "2d34623e-fde8-4720-8390-9a7439051755",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": null,
"x-sample-entry": "000000000000000000000004"
}
},
"x-sample-entry": "000000000000000000000004"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750580.0471218,
"lastSeen": 1567750580.0471218,
"sumRT": 0.63,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750580.0471218,
"lastSeen": 1567750580.0471218,
"sumRT": 0.63,
"sumDuration": 0
},
"x-last-seen-ts": 1567750580.0471218,
"x-sample-entry": "000000000000000000000004"
}
},
"/appears-twice": {
"get": {
"summary": "/appears-twice",
"description": "Mizu observed 2 entries (0 failed), at 0.500 hits/s, average response time is 0.630 seconds",
"operationId": "9c5330f3-8062-468b-b5a3-df1ad82b4846",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": null,
"x-sample-entry": "000000000000000000000006"
}
},
"x-sample-entry": "000000000000000000000006"
}
},
"x-counters-per-source": {
"": {
"entries": 2,
"failures": 0,
"firstSeen": 1567750580.7471218,
"lastSeen": 1567750581.7471218,
"sumRT": 1.26,
"sumDuration": 1
}
},
"x-counters-total": {
"entries": 2,
"failures": 0,
"firstSeen": 1567750580.7471218,
"lastSeen": 1567750581.7471218,
"sumRT": 1.26,
"sumDuration": 1
},
"x-last-seen-ts": 1567750581.7471218,
"x-sample-entry": "000000000000000000000006"
}
},
"/body-optional": {
"post": {
"summary": "/body-optional",
"description": "Mizu observed 3 entries (0 failed), at 0.003 hits/s, average response time is 0.001 seconds",
"operationId": "34f3d66c-b1f7-4dca-9cab-987fcc8ae472",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"x-sample-entry": "000000000000000000000012"
}
},
"x-sample-entry": "000000000000000000000012"
}
},
"x-counters-per-source": {
"": {
"entries": 3,
"failures": 0,
"firstSeen": 1567750581.7471218,
"lastSeen": 1567750581.757122,
"sumRT": 0.003,
"sumDuration": 0.010000228881835938
}
},
"x-counters-total": {
"entries": 3,
"failures": 0,
"firstSeen": 1567750581.7471218,
"lastSeen": 1567750581.757122,
"sumRT": 0.003,
"sumDuration": 0.010000228881835938
},
"x-last-seen-ts": 1567750581.757122,
"x-sample-entry": "000000000000000000000012",
"requestBody": {
"description": "Generic request body",
"content": {
"application/json": {
"example": "{\"key\", \"val\"}",
"x-sample-entry": "000000000000000000000011"
}
},
"x-sample-entry": "000000000000000000000012"
}
}
},
"/body-required": {
"post": {
"summary": "/body-required",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.001 seconds",
"operationId": "ff6add53-ab1c-4d4e-b590-0835fa318276",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"x-sample-entry": "000000000000000000000013"
}
},
"x-sample-entry": "000000000000000000000013"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750581.757122,
"lastSeen": 1567750581.757122,
"sumRT": 0.001,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750581.757122,
"lastSeen": 1567750581.757122,
"sumRT": 0.001,
"sumDuration": 0
},
"x-last-seen-ts": 1567750581.757122,
"x-sample-entry": "000000000000000000000013",
"requestBody": {
"description": "Generic request body",
"content": {
"": {
"example": "body exists",
"x-sample-entry": "000000000000000000000013"
}
},
"required": true,
"x-sample-entry": "000000000000000000000013"
}
}
},
"/form-multipart": {
"post": {
"summary": "/form-multipart",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.001 seconds",
"operationId": "153f0925-9fc7-4e9f-9d33-f1470f25f0f7",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"example": {},
"x-sample-entry": "000000000000000000000009"
}
},
"x-sample-entry": "000000000000000000000009"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582.7471218,
"lastSeen": 1567750582.7471218,
"sumRT": 0.001,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582.7471218,
"lastSeen": 1567750582.7471218,
"sumRT": 0.001,
"sumDuration": 0
},
"x-last-seen-ts": 1567750582.7471218,
"x-sample-entry": "000000000000000000000009",
"requestBody": {
"description": "Generic request body",
"content": {
"multipart/form-data": {
"schema": {
"type": "object",
"required": [
"file",
"path"
],
"properties": {
"file": {
"type": "string",
"contentMediaType": "application/json",
"examples": [
"{\"functions\": 123}"
]
},
"path": {
"type": "string",
"examples": [
"/content/components"
]
}
}
},
"example": "--BOUNDARY\r\nContent-Disposition: form-data; name=\"file\"; filename=\"metadata.json\"\r\nContent-Type: application/json\r\n\r\n{\"functions\": 123}\r\n--BOUNDARY\r\nContent-Disposition: form-data; name=\"path\"\r\n\r\n/content/components\r\n--BOUNDARY--\r\n",
"x-sample-entry": "000000000000000000000009"
}
},
"required": true,
"x-sample-entry": "000000000000000000000009"
}
}
},
"/form-urlencoded": {
"post": {
"summary": "/form-urlencoded",
"description": "Mizu observed 2 entries (0 failed), at 0.500 hits/s, average response time is 0.001 seconds",
"operationId": "c92189f5-5636-46eb-ac71-92b17941a568",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"x-sample-entry": "000000000000000000000008"
}
},
"x-sample-entry": "000000000000000000000008"
}
},
"x-counters-per-source": {
"": {
"entries": 2,
"failures": 0,
"firstSeen": 1567750580.7471218,
"lastSeen": 1567750581.7471218,
"sumRT": 0.002,
"sumDuration": 1
}
},
"x-counters-total": {
"entries": 2,
"failures": 0,
"firstSeen": 1567750580.7471218,
"lastSeen": 1567750581.7471218,
"sumRT": 0.002,
"sumDuration": 1
},
"x-last-seen-ts": 1567750581.7471218,
"x-sample-entry": "000000000000000000000008",
"requestBody": {
"description": "Generic request body",
"content": {
"application/x-www-form-urlencoded": {
"schema": {
"type": "object",
"required": [
"agent-id",
"callback-url",
"token"
],
"properties": {
"agent-id": {
"type": "string",
"examples": [
"ade"
]
},
"callback-url": {
"type": "string",
"examples": [
""
]
},
"optional": {
"type": "string",
"examples": [
"another"
]
},
"token": {
"type": "string",
"examples": [
"sometoken",
"sometoken-second-val"
]
}
}
},
"example": "agent-id=ade\u0026callback-url=\u0026token=sometoken",
"x-sample-entry": "000000000000000000000008"
}
},
"required": true,
"x-sample-entry": "000000000000000000000008"
}
}
},
"/param-patterns/prefix-gibberish-fine/{prefixgibberishfineId}": {
"get": {
"tags": [
"param-patterns"
],
"summary": "/param-patterns/prefix-gibberish-fine/{prefixgibberishfineId}",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.001 seconds",
"operationId": "85270437-7aae-4a5b-b988-3662092463d0",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"x-sample-entry": "000000000000000000000014"
}
},
"x-sample-entry": "000000000000000000000014"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582,
"lastSeen": 1567750582,
"sumRT": 0.001,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582,
"lastSeen": 1567750582,
"sumRT": 0.001,
"sumDuration": 0
},
"x-last-seen-ts": 1567750582,
"x-sample-entry": "000000000000000000000014"
},
"parameters": [
{
"name": "prefixgibberishfineId",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "234324"
}
},
"x-sample-entry": "000000000000000000000014"
}
]
},
"/param-patterns/{parampatternId}": {
"get": {
"tags": [
"param-patterns"
],
"summary": "/param-patterns/{parampatternId}",
"description": "Mizu observed 2 entries (0 failed), at 0.000 hits/s, average response time is 0.001 seconds",
"operationId": "da597734-1cf5-4d3b-917b-6b02dacf7b7b",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"x-sample-entry": "000000000000000000000018"
}
},
"x-sample-entry": "000000000000000000000018"
}
},
"x-counters-per-source": {
"": {
"entries": 2,
"failures": 0,
"firstSeen": 1567750582.000003,
"lastSeen": 1567750582.000004,
"sumRT": 0.002,
"sumDuration": 9.5367431640625e-7
}
},
"x-counters-total": {
"entries": 2,
"failures": 0,
"firstSeen": 1567750582.000003,
"lastSeen": 1567750582.000004,
"sumRT": 0.002,
"sumDuration": 9.5367431640625e-7
},
"x-last-seen-ts": 1567750582.000004,
"x-sample-entry": "000000000000000000000018"
},
"parameters": [
{
"name": "parampatternId",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string",
"pattern": "^prefix-gibberish-.+"
},
"examples": {
"example #0": {
"value": "prefix-gibberish-sfdlasdfkadf87sd93284q24r"
},
"example #1": {
"value": "prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf"
},
"example #2": {
"value": "prefix-gibberish-4jk5l2345h2452l4352435jlk45"
},
"example #3": {
"value": "prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k"
},
"example #4": {
"value": "prefix-gibberish-afterwards"
}
},
"x-sample-entry": "000000000000000000000019"
}
]
},
"/param-patterns/{parampatternId}/1": {
"get": {
"tags": [
"param-patterns"
],
"summary": "/param-patterns/{parampatternId}/1",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.001 seconds",
"operationId": "e965a245-9cfc-48ed-94e1-f765eadb3960",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"x-sample-entry": "000000000000000000000015"
}
},
"x-sample-entry": "000000000000000000000015"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582.000001,
"lastSeen": 1567750582.000001,
"sumRT": 0.001,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582.000001,
"lastSeen": 1567750582.000001,
"sumRT": 0.001,
"sumDuration": 0
},
"x-last-seen-ts": 1567750582.000001,
"x-sample-entry": "000000000000000000000015"
},
"parameters": [
{
"name": "parampatternId",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string",
"pattern": "^prefix-gibberish-.+"
},
"examples": {
"example #0": {
"value": "prefix-gibberish-sfdlasdfkadf87sd93284q24r"
},
"example #1": {
"value": "prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf"
},
"example #2": {
"value": "prefix-gibberish-4jk5l2345h2452l4352435jlk45"
},
"example #3": {
"value": "prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k"
},
"example #4": {
"value": "prefix-gibberish-afterwards"
}
},
"x-sample-entry": "000000000000000000000019"
}
]
},
"/param-patterns/{parampatternId}/static": {
"get": {
"tags": [
"param-patterns"
],
"summary": "/param-patterns/{parampatternId}/static",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.001 seconds",
"operationId": "7af420dc-f8b7-450f-8f6f-18b039aa3cde",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"x-sample-entry": "000000000000000000000016"
}
},
"x-sample-entry": "000000000000000000000016"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582.000002,
"lastSeen": 1567750582.000002,
"sumRT": 0.001,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582.000002,
"lastSeen": 1567750582.000002,
"sumRT": 0.001,
"sumDuration": 0
},
"x-last-seen-ts": 1567750582.000002,
"x-sample-entry": "000000000000000000000016"
},
"parameters": [
{
"name": "parampatternId",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string",
"pattern": "^prefix-gibberish-.+"
},
"examples": {
"example #0": {
"value": "prefix-gibberish-sfdlasdfkadf87sd93284q24r"
},
"example #1": {
"value": "prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf"
},
"example #2": {
"value": "prefix-gibberish-4jk5l2345h2452l4352435jlk45"
},
"example #3": {
"value": "prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k"
},
"example #4": {
"value": "prefix-gibberish-afterwards"
}
},
"x-sample-entry": "000000000000000000000019"
}
]
},
"/param-patterns/{parampatternId}/{param1}": {
"get": {
"tags": [
"param-patterns"
],
"summary": "/param-patterns/{parampatternId}/{param1}",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.001 seconds",
"operationId": "02a1771d-2d50-4a8c-8be2-29c7e59b8435",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"": {
"x-sample-entry": "000000000000000000000019"
}
},
"x-sample-entry": "000000000000000000000019"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582.000002,
"lastSeen": 1567750582.000002,
"sumRT": 0.001,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750582.000002,
"lastSeen": 1567750582.000002,
"sumRT": 0.001,
"sumDuration": 0
},
"x-last-seen-ts": 1567750582.000002,
"x-sample-entry": "000000000000000000000019"
},
"parameters": [
{
"name": "param1",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "23421"
}
},
"x-sample-entry": "000000000000000000000019"
},
{
"name": "parampatternId",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string",
"pattern": "^prefix-gibberish-.+"
},
"examples": {
"example #0": {
"value": "prefix-gibberish-sfdlasdfkadf87sd93284q24r"
},
"example #1": {
"value": "prefix-gibberish-adslkfasdf89sa7dfasddafa8a98sd7kansdf"
},
"example #2": {
"value": "prefix-gibberish-4jk5l2345h2452l4352435jlk45"
},
"example #3": {
"value": "prefix-gibberish-84395h2j4k35hj243j5h2kl34h54k"
},
"example #4": {
"value": "prefix-gibberish-afterwards"
}
},
"x-sample-entry": "000000000000000000000019"
}
]
},
"/{Id}": {
"get": {
"summary": "/{Id}",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.630 seconds",
"operationId": "77ec4910-d47a-46a5-8234-fb80a11034b4",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": null,
"x-sample-entry": "000000000000000000000003"
}
},
"x-sample-entry": "000000000000000000000003"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750579.7471218,
"lastSeen": 1567750579.7471218,
"sumRT": 0.63,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750579.7471218,
"lastSeen": 1567750579.7471218,
"sumRT": 0.63,
"sumDuration": 0
},
"x-last-seen-ts": 1567750579.7471218,
"x-sample-entry": "000000000000000000000003"
},
"parameters": [
{
"name": "Id",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "e21f7112-3d3b-4632-9da3-a4af2e0e9166"
},
"example #1": {
"value": "952bea17-3776-11ea-9341-42010a84012a"
}
},
"x-sample-entry": "000000000000000000000003"
}
]
},
"/{Id}/sub1": {
"get": {
"summary": "/{Id}/sub1",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.111 seconds",
"operationId": "198675eb-9faf-407b-83fa-0483a730bbbe",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"text/html": {
"x-sample-entry": "000000000000000000000001"
}
},
"x-sample-entry": "000000000000000000000001"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750483.864529,
"lastSeen": 1567750483.864529,
"sumRT": 0.111,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750483.864529,
"lastSeen": 1567750483.864529,
"sumRT": 0.111,
"sumDuration": 0
},
"x-last-seen-ts": 1567750483.864529,
"x-sample-entry": "000000000000000000000001"
},
"parameters": [
{
"name": "Id",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "e21f7112-3d3b-4632-9da3-a4af2e0e9166"
},
"example #1": {
"value": "952bea17-3776-11ea-9341-42010a84012a"
}
},
"x-sample-entry": "000000000000000000000003"
}
]
},
"/{Id}/sub2": {
"get": {
"summary": "/{Id}/sub2",
"description": "Mizu observed 1 entries (0 failed), at 0.000 hits/s, average response time is 0.630 seconds",
"operationId": "31d880f1-152f-4dd6-84a7-463e13b694a5",
"responses": {
"200": {
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": null,
"x-sample-entry": "000000000000000000000002"
}
},
"x-sample-entry": "000000000000000000000002"
}
},
"x-counters-per-source": {
"": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750578.7471218,
"lastSeen": 1567750578.7471218,
"sumRT": 0.63,
"sumDuration": 0
}
},
"x-counters-total": {
"entries": 1,
"failures": 0,
"firstSeen": 1567750578.7471218,
"lastSeen": 1567750578.7471218,
"sumRT": 0.63,
"sumDuration": 0
},
"x-last-seen-ts": 1567750578.7471218,
"x-sample-entry": "000000000000000000000002"
},
"parameters": [
{
"name": "Id",
"in": "path",
"required": true,
"style": "simple",
"schema": {
"type": "string"
},
"examples": {
"example #0": {
"value": "e21f7112-3d3b-4632-9da3-a4af2e0e9166"
},
"example #1": {
"value": "952bea17-3776-11ea-9341-42010a84012a"
}
},
"x-sample-entry": "000000000000000000000003"
}
]
}
},
"x-counters-per-source": {
"": {
"entries": 19,
"failures": 0,
"firstSeen": 1567750483.864529,
"lastSeen": 1567750582.7471218,
"sumRT": 3.273999999999999,
"sumDuration": 2.0100011825561523
}
},
"x-counters-total": {
"entries": 19,
"failures": 0,
"firstSeen": 1567750483.864529,
"lastSeen": 1567750582.7471218,
"sumRT": 3.273999999999999,
"sumDuration": 2.0100011825561523
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,11 @@ import (
"net/url"
"sync"
basenine "github.com/up9inc/basenine/client/go"
"github.com/up9inc/mizu/agent/pkg/har"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/tap/api"
"github.com/up9inc/mizu/shared/logger"
)
@@ -15,16 +19,12 @@ var (
instance *defaultOasGenerator
)
type OasGeneratorSink interface {
PushEntry(entryWithSource *EntryWithSource)
}
type OasGenerator interface {
Start()
Start(conn *basenine.Connection)
Stop()
IsStarted() bool
Reset()
GetServiceSpecs() *sync.Map
SetEntriesQuery(query string) bool
}
type defaultOasGenerator struct {
@@ -32,7 +32,9 @@ type defaultOasGenerator struct {
ctx context.Context
cancel context.CancelFunc
serviceSpecs *sync.Map
entriesChan chan EntryWithSource
dbConn *basenine.Connection
dbMutex sync.Mutex
entriesQuery string
}
func GetDefaultOasGeneratorInstance() *defaultOasGenerator {
@@ -43,16 +45,33 @@ func GetDefaultOasGeneratorInstance() *defaultOasGenerator {
return instance
}
func (g *defaultOasGenerator) Start() {
func (g *defaultOasGenerator) Start(conn *basenine.Connection) {
if g.started {
return
}
if g.dbConn == nil {
if conn == nil {
logger.Log.Infof("Creating new DB connection for OAS generator to address %s:%s", shared.BasenineHost, shared.BaseninePort)
newConn, err := basenine.NewConnection(shared.BasenineHost, shared.BaseninePort)
if err != nil {
logger.Log.Error("Error connecting to DB for OAS generator, err: %v", err)
return
}
conn = newConn
}
g.dbConn = conn
}
ctx, cancel := context.WithCancel(context.Background())
g.cancel = cancel
g.ctx = ctx
g.entriesChan = make(chan EntryWithSource, 100) // buffer up to 100 entries for OAS processing
g.serviceSpecs = &sync.Map{}
g.started = true
go g.runGenerator()
}
@@ -60,9 +79,17 @@ func (g *defaultOasGenerator) Stop() {
if !g.started {
return
}
g.cancel()
g.Reset()
g.started = false
g.cancel()
g.reset()
g.dbMutex.Lock()
defer g.dbMutex.Unlock()
if g.dbConn != nil {
g.dbConn.Close()
g.dbConn = nil
}
}
func (g *defaultOasGenerator) IsStarted() bool {
@@ -70,80 +97,131 @@ func (g *defaultOasGenerator) IsStarted() bool {
}
func (g *defaultOasGenerator) runGenerator() {
// Make []byte channels to receive the data and the meta
dataChan := make(chan []byte)
metaChan := make(chan []byte)
g.dbMutex.Lock()
defer g.dbMutex.Unlock()
logger.Log.Infof("Querying DB for OAS generator with query '%s'", g.entriesQuery)
if err := g.dbConn.Query(g.entriesQuery, dataChan, metaChan); err != nil {
logger.Log.Errorf("Query mode call failed: %v", err)
}
for {
select {
case <-g.ctx.Done():
logger.Log.Infof("OAS Generator was canceled")
close(dataChan)
close(metaChan)
return
case entryWithSource, ok := <-g.entriesChan:
case metaBytes, ok := <-metaChan:
if !ok {
logger.Log.Infof("OAS Generator - meta channel closed")
break
}
logger.Log.Debugf("Meta: %s", metaBytes)
case dataBytes, ok := <-dataChan:
if !ok {
logger.Log.Infof("OAS Generator - entries channel closed")
break
}
entry := entryWithSource.Entry
u, err := url.Parse(entry.Request.URL)
logger.Log.Debugf("Data: %s", dataBytes)
e := new(api.Entry)
err := json.Unmarshal(dataBytes, e)
if err != nil {
logger.Log.Errorf("Failed to parse entry URL: %v, err: %v", entry.Request.URL, err)
}
val, found := g.serviceSpecs.Load(entryWithSource.Destination)
var gen *SpecGen
if !found {
gen = NewGen(u.Scheme + "://" + entryWithSource.Destination)
g.serviceSpecs.Store(entryWithSource.Destination, gen)
} else {
gen = val.(*SpecGen)
}
opId, err := gen.feedEntry(entryWithSource)
if err != nil {
txt, suberr := json.Marshal(entry)
if suberr == nil {
logger.Log.Debugf("Problematic entry: %s", txt)
}
logger.Log.Warningf("Failed processing entry: %s", err)
continue
}
logger.Log.Debugf("Handled entry %s as opId: %s", entry.Request.URL, opId) // TODO: set opId back to entry?
g.handleEntry(e)
}
}
}
func (g *defaultOasGenerator) Reset() {
g.serviceSpecs = &sync.Map{}
func (g *defaultOasGenerator) handleEntry(mizuEntry *api.Entry) {
if mizuEntry.Protocol.Name == "http" {
dest := mizuEntry.Destination.Name
if dest == "" {
logger.Log.Debugf("OAS: Unresolved entry %d", mizuEntry.Id)
return
}
entry, err := har.NewEntry(mizuEntry.Request, mizuEntry.Response, mizuEntry.StartTime, mizuEntry.ElapsedTime)
if err != nil {
logger.Log.Warningf("Failed to turn MizuEntry %d into HAR Entry: %s", mizuEntry.Id, err)
return
}
entryWSource := &EntryWithSource{
Entry: *entry,
Source: mizuEntry.Source.Name,
Destination: dest,
Id: mizuEntry.Id,
}
g.handleHARWithSource(entryWSource)
} else {
logger.Log.Debugf("OAS: Unsupported protocol in entry %d: %s", mizuEntry.Id, mizuEntry.Protocol.Name)
}
}
func (g *defaultOasGenerator) PushEntry(entryWithSource *EntryWithSource) {
if !g.started {
func (g *defaultOasGenerator) handleHARWithSource(entryWSource *EntryWithSource) {
entry := entryWSource.Entry
gen := g.getGen(entryWSource.Destination, entry.Request.URL)
opId, err := gen.feedEntry(entryWSource)
if err != nil {
txt, suberr := json.Marshal(entry)
if suberr == nil {
logger.Log.Debugf("Problematic entry: %s", txt)
}
logger.Log.Warningf("Failed processing entry %d: %s", entryWSource.Id, err)
return
}
select {
case g.entriesChan <- *entryWithSource:
default:
logger.Log.Warningf("OAS Generator - entry wasn't sent to channel because the channel has no buffer or there is no receiver")
logger.Log.Debugf("Handled entry %s as opId: %s", entryWSource.Id, opId) // TODO: set opId back to entry?
}
func (g *defaultOasGenerator) getGen(dest string, urlStr string) *SpecGen {
u, err := url.Parse(urlStr)
if err != nil {
logger.Log.Errorf("Failed to parse entry URL: %v, err: %v", urlStr, err)
}
val, found := g.serviceSpecs.Load(dest)
var gen *SpecGen
if !found {
gen = NewGen(u.Scheme + "://" + dest)
g.serviceSpecs.Store(dest, gen)
} else {
gen = val.(*SpecGen)
}
return gen
}
func (g *defaultOasGenerator) reset() {
g.serviceSpecs = &sync.Map{}
}
func (g *defaultOasGenerator) GetServiceSpecs() *sync.Map {
return g.serviceSpecs
}
func (g *defaultOasGenerator) SetEntriesQuery(query string) bool {
changed := g.entriesQuery != query
g.entriesQuery = query
return changed
}
func NewDefaultOasGenerator() *defaultOasGenerator {
return &defaultOasGenerator{
started: false,
ctx: nil,
cancel: nil,
serviceSpecs: nil,
entriesChan: nil,
dbConn: nil,
}
}
type EntryWithSource struct {
Source string
Destination string
Entry har.Entry
Id uint
}

View File

@@ -0,0 +1,46 @@
package oas
import (
"encoding/json"
"github.com/up9inc/mizu/agent/pkg/har"
"testing"
"time"
)
func TestOASGen(t *testing.T) {
gen := new(defaultOasGenerator)
e := new(har.Entry)
err := json.Unmarshal([]byte(`{"startedDateTime": "20000101","request": {"url": "https://host/path", "method": "GET"}, "response": {"status": 200}}`), e)
if err != nil {
panic(err)
}
ews := &EntryWithSource{
Destination: "some",
Entry: *e,
}
dummyConn := GetFakeDBConn(`{"startedDateTime": "20000101","request": {"url": "https://host/path", "method": "GET"}, "response": {"status": 200}}`)
gen.Start(dummyConn)
gen.handleHARWithSource(ews)
g, ok := gen.serviceSpecs.Load("some")
if !ok {
panic("Failed")
}
sg := g.(*SpecGen)
spec, err := sg.GetSpec()
if err != nil {
panic(err)
}
specText, _ := json.Marshal(spec)
t.Log(string(specText))
if !gen.IsStarted() {
t.Errorf("Should be started")
}
time.Sleep(100 * time.Millisecond)
gen.Stop()
}

View File

@@ -3,10 +3,6 @@ package oas
import (
"encoding/json"
"errors"
"github.com/chanced/openapi"
"github.com/google/uuid"
"github.com/nav-inc/datetime"
"github.com/up9inc/mizu/shared/logger"
"io"
"io/ioutil"
"mime"
@@ -18,6 +14,11 @@ import (
"strings"
"sync"
"github.com/chanced/openapi"
"github.com/google/uuid"
"github.com/nav-inc/datetime"
"github.com/up9inc/mizu/shared/logger"
"github.com/up9inc/mizu/agent/pkg/har"
"time"
@@ -28,6 +29,13 @@ const CountersTotal = "x-counters-total"
const CountersPerSource = "x-counters-per-source"
const SampleId = "x-sample-entry"
type EntryWithSource struct {
Source string
Destination string
Entry har.Entry
Id string
}
type reqResp struct { // hello, generics in Go
Req *har.Request
Resp *har.Response
@@ -60,7 +68,7 @@ func (g *SpecGen) StartFromSpec(oas *openapi.OpenAPI) {
g.tree = new(Node)
for pathStr, pathObj := range oas.Paths.Items {
pathSplit := strings.Split(string(pathStr), "/")
g.tree.getOrSet(pathSplit, pathObj)
g.tree.getOrSet(pathSplit, pathObj, "")
// clean "last entry timestamp" markers from the past
for _, pathAndOp := range g.tree.listOps() {
@@ -69,11 +77,11 @@ func (g *SpecGen) StartFromSpec(oas *openapi.OpenAPI) {
}
}
func (g *SpecGen) feedEntry(entryWithSource EntryWithSource) (string, error) {
func (g *SpecGen) feedEntry(entryWithSource *EntryWithSource) (string, error) {
g.lock.Lock()
defer g.lock.Unlock()
opId, err := g.handlePathObj(&entryWithSource)
opId, err := g.handlePathObj(entryWithSource)
if err != nil {
return "", err
}
@@ -219,7 +227,7 @@ func (g *SpecGen) handlePathObj(entryWithSource *EntryWithSource) (string, error
} else {
split = strings.Split(urlParsed.Path, "/")
}
node := g.tree.getOrSet(split, new(openapi.PathObj))
node := g.tree.getOrSet(split, new(openapi.PathObj), entryWithSource.Id)
opObj, err := handleOpObj(entryWithSource, node.pathObj)
if opObj != nil {
@@ -242,12 +250,12 @@ func handleOpObj(entryWithSource *EntryWithSource, pathObj *openapi.PathObj) (*o
return nil, nil
}
err = handleRequest(&entry.Request, opObj, isSuccess)
err = handleRequest(&entry.Request, opObj, isSuccess, entryWithSource.Id)
if err != nil {
return nil, err
}
err = handleResponse(&entry.Response, opObj, isSuccess)
err = handleResponse(&entry.Response, opObj, isSuccess, entryWithSource.Id)
if err != nil {
return nil, err
}
@@ -257,6 +265,8 @@ func handleOpObj(entryWithSource *EntryWithSource, pathObj *openapi.PathObj) (*o
return nil, err
}
setSampleID(&opObj.Extensions, entryWithSource.Id)
return opObj, nil
}
@@ -329,15 +339,10 @@ func handleCounters(opObj *openapi.Operation, success bool, entryWithSource *Ent
return err
}
err = opObj.Extensions.SetExtension(SampleId, entryWithSource.Id)
if err != nil {
return err
}
return nil
}
func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool) error {
func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool, sampleId string) error {
// TODO: we don't handle the situation when header/qstr param can be defined on pathObj level. Also the path param defined on opObj
urlParsed, err := url.Parse(req.URL)
if err != nil {
@@ -361,7 +366,7 @@ func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool) e
IsIgnored: func(name string) bool { return false },
GeneralizeName: func(name string) string { return name },
}
handleNameVals(qstrGW, &opObj.Parameters, false)
handleNameVals(qstrGW, &opObj.Parameters, false, sampleId)
hdrGW := nvParams{
In: openapi.InHeader,
@@ -369,7 +374,7 @@ func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool) e
IsIgnored: isHeaderIgnored,
GeneralizeName: strings.ToLower,
}
handleNameVals(hdrGW, &opObj.Parameters, true)
handleNameVals(hdrGW, &opObj.Parameters, true, sampleId)
if isSuccess {
reqBody, err := getRequestBody(req, opObj)
@@ -378,12 +383,14 @@ func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool) e
}
if reqBody != nil {
setSampleID(&reqBody.Extensions, sampleId)
if req.PostData.Text == "" {
reqBody.Required = false
} else {
reqCtype, _ := getReqCtype(req)
reqMedia, err := fillContent(reqResp{Req: req}, reqBody.Content, reqCtype)
reqMedia, err := fillContent(reqResp{Req: req}, reqBody.Content, reqCtype, sampleId)
if err != nil {
return err
}
@@ -395,18 +402,20 @@ func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool) e
return nil
}
func handleResponse(resp *har.Response, opObj *openapi.Operation, isSuccess bool) error {
func handleResponse(resp *har.Response, opObj *openapi.Operation, isSuccess bool, sampleId string) error {
// TODO: we don't support "default" response
respObj, err := getResponseObj(resp, opObj, isSuccess)
if err != nil {
return err
}
handleRespHeaders(resp.Headers, respObj)
setSampleID(&respObj.Extensions, sampleId)
handleRespHeaders(resp.Headers, respObj, sampleId)
respCtype := getRespCtype(resp)
respContent := respObj.Content
respMedia, err := fillContent(reqResp{Resp: resp}, respContent, respCtype)
respMedia, err := fillContent(reqResp{Resp: resp}, respContent, respCtype, sampleId)
if err != nil {
return err
}
@@ -414,7 +423,7 @@ func handleResponse(resp *har.Response, opObj *openapi.Operation, isSuccess bool
return nil
}
func handleRespHeaders(reqHeaders []har.Header, respObj *openapi.ResponseObj) {
func handleRespHeaders(reqHeaders []har.Header, respObj *openapi.ResponseObj, sampleId string) {
visited := map[string]*openapi.HeaderObj{}
for _, pair := range reqHeaders {
if isHeaderIgnored(pair.Name) {
@@ -436,6 +445,8 @@ func handleRespHeaders(reqHeaders []har.Header, respObj *openapi.ResponseObj) {
logger.Log.Warningf("Failed to add example to a parameter: %s", err)
}
visited[nameGeneral] = param
setSampleID(&param.Extensions, sampleId)
}
// maintain "required" flag
@@ -456,13 +467,15 @@ func handleRespHeaders(reqHeaders []har.Header, respObj *openapi.ResponseObj) {
}
}
func fillContent(reqResp reqResp, respContent openapi.Content, ctype string) (*openapi.MediaType, error) {
func fillContent(reqResp reqResp, respContent openapi.Content, ctype string, sampleId string) (*openapi.MediaType, error) {
content, found := respContent[ctype]
if !found {
respContent[ctype] = &openapi.MediaType{}
content = respContent[ctype]
}
setSampleID(&content.Extensions, sampleId)
var text string
var isBinary bool
if reqResp.Req != nil {
@@ -474,10 +487,10 @@ func fillContent(reqResp reqResp, respContent openapi.Content, ctype string) (*o
if !isBinary && text != "" {
var exampleMsg []byte
// try treating it as json
any, isJSON := anyJSON(text)
anyVal, isJSON := anyJSON(text)
if isJSON {
// re-marshal with forced indent
if msg, err := json.MarshalIndent(any, "", "\t"); err != nil {
if msg, err := json.MarshalIndent(anyVal, "", "\t"); err != nil {
panic("Failed to re-marshal value, super-strange")
} else {
exampleMsg = msg

View File

@@ -1,25 +1,37 @@
package oas
import (
"bytes"
"encoding/json"
"io/ioutil"
"net"
"os"
"regexp"
"strings"
"sync"
"testing"
"time"
"github.com/chanced/openapi"
"github.com/op/go-logging"
"github.com/up9inc/mizu/shared/logger"
"github.com/wI2L/jsondiff"
basenine "github.com/up9inc/basenine/client/go"
"github.com/up9inc/mizu/agent/pkg/har"
)
func GetFakeDBConn(send string) *basenine.Connection {
dummyConn := new(basenine.Connection)
dummyConn.Conn = FakeConn{
sendBuffer: bytes.NewBufferString(send),
receiveBuffer: bytes.NewBufferString(""),
}
return dummyConn
}
// if started via env, write file into subdir
func outputSpec(label string, spec *openapi.OpenAPI, t *testing.T) string {
content, err := json.MarshalIndent(spec, "", "\t")
content, err := json.MarshalIndent(spec, "", " ")
if err != nil {
panic(err)
}
@@ -42,20 +54,22 @@ func outputSpec(label string, spec *openapi.OpenAPI, t *testing.T) string {
}
func TestEntries(t *testing.T) {
logger.InitLoggerStd(logging.INFO)
//logger.InitLoggerStd(logging.INFO) causes race condition
files, err := getFiles("./test_artifacts/")
if err != nil {
t.Log(err)
t.FailNow()
}
GetDefaultOasGeneratorInstance().Start()
loadStartingOAS("test_artifacts/catalogue.json", "catalogue")
loadStartingOAS("test_artifacts/trcc.json", "trcc-api-service")
gen := NewDefaultOasGenerator()
gen.serviceSpecs = new(sync.Map)
loadStartingOAS("test_artifacts/catalogue.json", "catalogue", gen.serviceSpecs)
loadStartingOAS("test_artifacts/trcc.json", "trcc-api-service", gen.serviceSpecs)
go func() {
for {
time.Sleep(1 * time.Second)
GetDefaultOasGeneratorInstance().GetServiceSpecs().Range(func(key, val interface{}) bool {
gen.serviceSpecs.Range(func(key, val interface{}) bool {
svc := key.(string)
t.Logf("Getting spec for %s", svc)
gen := val.(*SpecGen)
@@ -68,16 +82,14 @@ func TestEntries(t *testing.T) {
}
}()
cnt, err := feedEntries(files, true)
cnt, err := feedEntries(files, true, gen)
if err != nil {
t.Log(err)
t.Fail()
}
waitQueueProcessed()
svcs := strings.Builder{}
GetDefaultOasGeneratorInstance().GetServiceSpecs().Range(func(key, val interface{}) bool {
gen.serviceSpecs.Range(func(key, val interface{}) bool {
gen := val.(*SpecGen)
svc := key.(string)
svcs.WriteString(svc + ",")
@@ -99,7 +111,7 @@ func TestEntries(t *testing.T) {
return true
})
GetDefaultOasGeneratorInstance().GetServiceSpecs().Range(func(key, val interface{}) bool {
gen.serviceSpecs.Range(func(key, val interface{}) bool {
svc := key.(string)
gen := val.(*SpecGen)
spec, err := gen.GetSpec()
@@ -123,20 +135,18 @@ func TestEntries(t *testing.T) {
}
func TestFileSingle(t *testing.T) {
GetDefaultOasGeneratorInstance().Start()
GetDefaultOasGeneratorInstance().Reset()
gen := NewDefaultOasGenerator()
gen.serviceSpecs = new(sync.Map)
// loadStartingOAS()
file := "test_artifacts/params.har"
files := []string{file}
cnt, err := feedEntries(files, true)
cnt, err := feedEntries(files, true, gen)
if err != nil {
logger.Log.Warning("Failed processing file: " + err.Error())
t.Fail()
}
waitQueueProcessed()
GetDefaultOasGeneratorInstance().GetServiceSpecs().Range(func(key, val interface{}) bool {
gen.serviceSpecs.Range(func(key, val interface{}) bool {
svc := key.(string)
gen := val.(*SpecGen)
spec, err := gen.GetSpec()
@@ -189,18 +199,7 @@ func TestFileSingle(t *testing.T) {
logger.Log.Infof("Processed entries: %d", cnt)
}
func waitQueueProcessed() {
for {
time.Sleep(100 * time.Millisecond)
queue := len(GetDefaultOasGeneratorInstance().entriesChan)
logger.Log.Infof("Queue: %d", queue)
if queue < 1 {
break
}
}
}
func loadStartingOAS(file string, label string) {
func loadStartingOAS(file string, label string, specs *sync.Map) {
fd, err := os.Open(file)
if err != nil {
panic(err)
@@ -222,12 +221,14 @@ func loadStartingOAS(file string, label string) {
gen := NewGen(label)
gen.StartFromSpec(doc)
GetDefaultOasGeneratorInstance().GetServiceSpecs().Store(label, gen)
specs.Store(label, gen)
}
func TestEntriesNegative(t *testing.T) {
gen := NewDefaultOasGenerator()
gen.serviceSpecs = new(sync.Map)
files := []string{"invalid"}
_, err := feedEntries(files, false)
_, err := feedEntries(files, false, gen)
if err == nil {
t.Logf("Should have failed")
t.Fail()
@@ -235,8 +236,10 @@ func TestEntriesNegative(t *testing.T) {
}
func TestEntriesPositive(t *testing.T) {
gen := NewDefaultOasGenerator()
gen.serviceSpecs = new(sync.Map)
files := []string{"test_artifacts/params.har"}
_, err := feedEntries(files, false)
_, err := feedEntries(files, false, gen)
if err != nil {
t.Logf("Failed")
t.Fail()
@@ -275,3 +278,17 @@ func TestLoadValid3_1(t *testing.T) {
t.FailNow()
}
}
type FakeConn struct {
sendBuffer *bytes.Buffer
receiveBuffer *bytes.Buffer
}
func (f FakeConn) Read(p []byte) (int, error) { return f.sendBuffer.Read(p) }
func (f FakeConn) Write(p []byte) (int, error) { return f.receiveBuffer.Write(p) }
func (FakeConn) Close() error { return nil }
func (FakeConn) LocalAddr() net.Addr { return nil }
func (FakeConn) RemoteAddr() net.Addr { return nil }
func (FakeConn) SetDeadline(t time.Time) error { return nil }
func (FakeConn) SetReadDeadline(t time.Time) error { return nil }
func (FakeConn) SetWriteDeadline(t time.Time) error { return nil }

View File

@@ -21,9 +21,11 @@
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": null
"example": null,
"x-sample-entry": "000000000000000000000004"
}
}
},
"x-sample-entry": "000000000000000000000004"
}
},
"x-counters-per-source": {
@@ -45,7 +47,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750580.04,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000004"
}
},
"/appears-twice": {
@@ -58,9 +60,11 @@
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": null
"example": null,
"x-sample-entry": "000000000000000000000006"
}
}
},
"x-sample-entry": "000000000000000000000006"
}
},
"x-counters-per-source": {
@@ -82,7 +86,7 @@
"sumDuration": 1
},
"x-last-seen-ts": 1567750581.74,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000006"
}
},
"/body-optional": {
@@ -94,8 +98,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"": {}
}
"": {
"x-sample-entry": "000000000000000000000012"
}
},
"x-sample-entry": "000000000000000000000012"
}
},
"x-counters-per-source": {
@@ -117,14 +124,16 @@
"sumDuration": 0.01
},
"x-last-seen-ts": 1567750581.75,
"x-sample-entry": 0,
"x-sample-entry": "000000000000000000000012",
"requestBody": {
"description": "Generic request body",
"content": {
"application/json": {
"example": "{\"key\", \"val\"}"
"example": "{\"key\", \"val\"}",
"x-sample-entry": "000000000000000000000011"
}
}
},
"x-sample-entry": "000000000000000000000012"
}
}
},
@@ -137,8 +146,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"": {}
}
"": {
"x-sample-entry": "000000000000000000000013"
}
},
"x-sample-entry": "000000000000000000000013"
}
},
"x-counters-per-source": {
@@ -160,15 +172,17 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750581.75,
"x-sample-entry": 0,
"x-sample-entry": "000000000000000000000013",
"requestBody": {
"description": "Generic request body",
"content": {
"": {
"example": "body exists"
"example": "body exists",
"x-sample-entry": "000000000000000000000013"
}
},
"required": true
"required": true,
"x-sample-entry": "000000000000000000000013"
}
}
},
@@ -182,9 +196,11 @@
"description": "Successful call with status 200",
"content": {
"": {
"example": {}
"example": {},
"x-sample-entry": "000000000000000000000009"
}
}
},
"x-sample-entry": "000000000000000000000009"
}
},
"x-counters-per-source": {
@@ -206,7 +222,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750582.74,
"x-sample-entry": 0,
"x-sample-entry": "000000000000000000000009",
"requestBody": {
"description": "Generic request body",
"content": {
@@ -233,10 +249,12 @@
}
}
},
"example": "--BOUNDARY\r\nContent-Disposition: form-data; name=\"file\"; filename=\"metadata.json\"\r\nContent-Type: application/json\r\n\r\n{\"functions\": 123}\r\n--BOUNDARY\r\nContent-Disposition: form-data; name=\"path\"\r\n\r\n/content/components\r\n--BOUNDARY--\r\n"
"example": "--BOUNDARY\r\nContent-Disposition: form-data; name=\"file\"; filename=\"metadata.json\"\r\nContent-Type: application/json\r\n\r\n{\"functions\": 123}\r\n--BOUNDARY\r\nContent-Disposition: form-data; name=\"path\"\r\n\r\n/content/components\r\n--BOUNDARY--\r\n",
"x-sample-entry": "000000000000000000000009"
}
},
"required": true
"required": true,
"x-sample-entry": "000000000000000000000009"
}
}
},
@@ -249,8 +267,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"": {}
}
"": {
"x-sample-entry": "000000000000000000000008"
}
},
"x-sample-entry": "000000000000000000000008"
}
},
"x-counters-per-source": {
@@ -272,7 +293,7 @@
"sumDuration": 1
},
"x-last-seen-ts": 1567750581.74,
"x-sample-entry": 0,
"x-sample-entry": "000000000000000000000008",
"requestBody": {
"description": "Generic request body",
"content": {
@@ -312,10 +333,12 @@
}
}
},
"example": "agent-id=ade\u0026callback-url=\u0026token=sometoken"
"example": "agent-id=ade\u0026callback-url=\u0026token=sometoken",
"x-sample-entry": "000000000000000000000008"
}
},
"required": true
"required": true,
"x-sample-entry": "000000000000000000000008"
}
}
},
@@ -331,8 +354,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"": {}
}
"": {
"x-sample-entry": "000000000000000000000014"
}
},
"x-sample-entry": "000000000000000000000014"
}
},
"x-counters-per-source": {
@@ -354,7 +380,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750582,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000014"
},
"parameters": [
{
@@ -369,7 +395,8 @@
"example #0": {
"value": "234324"
}
}
},
"x-sample-entry": "000000000000000000000014"
}
]
},
@@ -385,8 +412,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"": {}
}
"": {
"x-sample-entry": "000000000000000000000018"
}
},
"x-sample-entry": "000000000000000000000018"
}
},
"x-counters-per-source": {
@@ -408,7 +438,7 @@
"sumDuration": 9.53e-7
},
"x-last-seen-ts": 1567750582.00,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000018"
},
"parameters": [
{
@@ -436,7 +466,8 @@
"example #4": {
"value": "prefix-gibberish-afterwards"
}
}
},
"x-sample-entry": "000000000000000000000019"
}
]
},
@@ -452,8 +483,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"": {}
}
"": {
"x-sample-entry": "000000000000000000000015"
}
},
"x-sample-entry": "000000000000000000000015"
}
},
"x-counters-per-source": {
@@ -475,7 +509,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750582.00,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000015"
},
"parameters": [
{
@@ -503,7 +537,8 @@
"example #4": {
"value": "prefix-gibberish-afterwards"
}
}
},
"x-sample-entry": "000000000000000000000019"
}
]
},
@@ -519,8 +554,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"": {}
}
"": {
"x-sample-entry": "000000000000000000000016"
}
},
"x-sample-entry": "000000000000000000000016"
}
},
"x-counters-per-source": {
@@ -542,7 +580,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750582.00,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000016"
},
"parameters": [
{
@@ -570,7 +608,8 @@
"example #4": {
"value": "prefix-gibberish-afterwards"
}
}
},
"x-sample-entry": "000000000000000000000019"
}
]
},
@@ -586,8 +625,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"": {}
}
"": {
"x-sample-entry": "000000000000000000000019"
}
},
"x-sample-entry": "000000000000000000000019"
}
},
"x-counters-per-source": {
@@ -609,7 +651,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750582.00,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000019"
},
"parameters": [
{
@@ -624,7 +666,8 @@
"example #0": {
"value": "23421"
}
}
},
"x-sample-entry": "000000000000000000000019"
},
{
"name": "parampatternId",
@@ -651,7 +694,8 @@
"example #4": {
"value": "prefix-gibberish-afterwards"
}
}
},
"x-sample-entry": "000000000000000000000019"
}
]
},
@@ -665,9 +709,11 @@
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": null
"example": null,
"x-sample-entry": "000000000000000000000003"
}
}
},
"x-sample-entry": "000000000000000000000003"
}
},
"x-counters-per-source": {
@@ -689,7 +735,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750579.74,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000003"
},
"parameters": [
{
@@ -707,7 +753,8 @@
"example #1": {
"value": "<UUID4>"
}
}
},
"x-sample-entry": "000000000000000000000003"
}
]
},
@@ -720,8 +767,11 @@
"200": {
"description": "Successful call with status 200",
"content": {
"text/html": {}
}
"text/html": {
"x-sample-entry": "000000000000000000000001"
}
},
"x-sample-entry": "000000000000000000000001"
}
},
"x-counters-per-source": {
@@ -743,7 +793,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750483.86,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000001"
},
"parameters": [
{
@@ -761,7 +811,8 @@
"example #1": {
"value": "<UUID4>"
}
}
},
"x-sample-entry": "000000000000000000000003"
}
]
},
@@ -775,9 +826,11 @@
"description": "Successful call with status 200",
"content": {
"application/json": {
"example": null
"example": null,
"x-sample-entry": "000000000000000000000002"
}
}
},
"x-sample-entry": "000000000000000000000002"
}
},
"x-counters-per-source": {
@@ -799,7 +852,7 @@
"sumDuration": 0
},
"x-last-seen-ts": 1567750578.74,
"x-sample-entry": 0
"x-sample-entry": "000000000000000000000002"
},
"parameters": [
{
@@ -817,7 +870,8 @@
"example #1": {
"value": "<UUID4>"
}
}
},
"x-sample-entry": "000000000000000000000003"
}
]
}

View File

@@ -2,12 +2,13 @@ package oas
import (
"encoding/json"
"github.com/chanced/openapi"
"github.com/up9inc/mizu/shared/logger"
"net/url"
"regexp"
"strconv"
"strings"
"github.com/chanced/openapi"
"github.com/up9inc/mizu/shared/logger"
)
type NodePath = []string
@@ -20,7 +21,7 @@ type Node struct {
children []*Node
}
func (n *Node) getOrSet(path NodePath, existingPathObj *openapi.PathObj) (node *Node) {
func (n *Node) getOrSet(path NodePath, existingPathObj *openapi.PathObj, sampleId string) (node *Node) {
if existingPathObj == nil {
panic("Invalid function call")
}
@@ -70,6 +71,10 @@ func (n *Node) getOrSet(path NodePath, existingPathObj *openapi.PathObj) (node *
}
}
if node.pathParam != nil {
setSampleID(&node.pathParam.Extensions, sampleId)
}
// add example if it's a gibberish chunk
if node.pathParam != nil && !chunkIsParam {
exmp := &node.pathParam.Examples
@@ -85,7 +90,7 @@ func (n *Node) getOrSet(path NodePath, existingPathObj *openapi.PathObj) (node *
// TODO: eat up trailing slash, in a smart way: node.pathObj!=nil && path[1]==""
if len(path) > 1 {
return node.getOrSet(path[1:], existingPathObj)
return node.getOrSet(path[1:], existingPathObj, sampleId)
} else if node.pathObj == nil {
node.pathObj = existingPathObj
}

View File

@@ -1,6 +1,7 @@
package oas
import (
"fmt"
"strings"
"testing"
@@ -20,10 +21,10 @@ func TestTree(t *testing.T) {
}
tree := new(Node)
for _, tc := range testCases {
for i, tc := range testCases {
split := strings.Split(tc.inp, "/")
pathObj := new(openapi.PathObj)
node := tree.getOrSet(split, pathObj)
node := tree.getOrSet(split, pathObj, fmt.Sprintf("%024d", i))
fillPathParams(node, pathObj)

View File

@@ -115,7 +115,7 @@ type nvParams struct {
GeneralizeName func(name string) string
}
func handleNameVals(gw nvParams, params **openapi.ParameterList, checkIgnore bool) {
func handleNameVals(gw nvParams, params **openapi.ParameterList, checkIgnore bool, sampleId string) {
visited := map[string]*openapi.ParameterObj{}
for _, pair := range gw.Pairs {
if (checkIgnore && gw.IsIgnored(pair.Name)) || pair.Name == "" {
@@ -137,6 +137,8 @@ func handleNameVals(gw nvParams, params **openapi.ParameterList, checkIgnore boo
logger.Log.Warningf("Failed to add example to a parameter: %s", err)
}
visited[nameGeneral] = param
setSampleID(&param.Extensions, sampleId)
}
// maintain "required" flag
@@ -474,3 +476,15 @@ func intersectSliceWithMap(required []string, names map[string]struct{}) []strin
}
return required
}
func setSampleID(extensions *openapi.Extensions, id string) {
if id != "" {
if *extensions == nil {
*extensions = openapi.Extensions{}
}
err := (extensions).SetExtension(SampleId, id)
if err != nil {
logger.Log.Warningf("Failed to set sample ID: %s", err)
}
}
}

View File

@@ -29,7 +29,7 @@ func TestAnyJSON(t *testing.T) {
} else if tc.inp == "null" && any != nil {
t.Errorf("null has to parse as nil (but got %s)", any)
} else {
t.Logf("%s => %s", tc.inp, any)
t.Logf("%s => %v", tc.inp, any)
}
}
}

View File

@@ -4,19 +4,13 @@ import (
"encoding/json"
"fmt"
"os"
"time"
"github.com/patrickmn/go-cache"
"github.com/up9inc/mizu/agent/pkg/models"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/tap"
)
const tlsLinkRetainmentTime = time.Minute * 15
var (
authStatus *models.AuthStatus
RecentTLSLinks = cache.New(tlsLinkRetainmentTime, tlsLinkRetainmentTime)
authStatus *models.AuthStatus
)
func GetAuthStatus() (*models.AuthStatus, error) {
@@ -51,16 +45,3 @@ func GetAuthStatus() (*models.AuthStatus, error) {
return authStatus, nil
}
func GetAllRecentTLSAddresses() []string {
recentTLSLinks := make([]string, 0)
for _, outboundLinkItem := range RecentTLSLinks.Items() {
outboundLink, castOk := outboundLinkItem.Object.(*tap.OutboundLink)
if castOk {
recentTLSLinks = append(recentTLSLinks, outboundLink.DstIP)
}
}
return recentTLSLinks
}

View File

@@ -14,9 +14,10 @@ import (
const FilePath = shared.DataDirPath + "tapped-pods.json"
var (
lock = &sync.Mutex{}
syncOnce sync.Once
tappedPods []*shared.PodInfo
lock = &sync.Mutex{}
syncOnce sync.Once
tappedPods []*shared.PodInfo
nodeHostToTappedPodsMap shared.NodeToPodsMap
)
func Get() []*shared.PodInfo {
@@ -55,3 +56,14 @@ func GetTappedPodsStatus() []shared.TappedPodStatus {
return tappedPodsStatus
}
func SetNodeToTappedPodMap(nodeToTappedPodsMap shared.NodeToPodsMap) {
summary := nodeToTappedPodsMap.Summary()
logger.Log.Infof("Setting node to tapped pods map to %v", summary)
nodeHostToTappedPodsMap = nodeToTappedPodsMap
}
func GetNodeToTappedPodMap() shared.NodeToPodsMap {
return nodeHostToTappedPodsMap
}

View File

@@ -21,7 +21,5 @@ func StatusRoutes(ginApp *gin.Engine) {
routeGroup.GET("/general", controllers.GetGeneralStats) // get general stats about entries in DB
routeGroup.GET("/recentTLSLinks", controllers.GetRecentTLSLinks)
routeGroup.GET("/resolving", controllers.GetCurrentResolvingInformation)
}

View File

@@ -18,10 +18,11 @@ type ServiceMapResponse struct {
}
type ServiceMapNode struct {
Id int `json:"id"`
Name string `json:"name"`
Entry *tapApi.TCP `json:"entry"`
Count int `json:"count"`
Id int `json:"id"`
Name string `json:"name"`
Entry *tapApi.TCP `json:"entry"`
Count int `json:"count"`
Resolved bool `json:"resolved"`
}
type ServiceMapEdge struct {

View File

@@ -1,6 +1,7 @@
package servicemap
import (
"github.com/jinzhu/copier"
"sync"
"github.com/up9inc/mizu/shared/logger"
@@ -183,8 +184,12 @@ func (s *defaultServiceMap) NewTCPEntry(src *tapApi.TCP, dst *tapApi.TCP, p *tap
if len(src.Name) == 0 {
srcEntry = &entryData{
key: key(src.IP),
entry: src,
entry: &tapApi.TCP{},
}
if err := copier.Copy(srcEntry.entry, src); err != nil {
logger.Log.Errorf("Error while copying src entry into src entry data")
}
srcEntry.entry.Name = UnresolvedNodeName
} else {
srcEntry = &entryData{
@@ -196,8 +201,12 @@ func (s *defaultServiceMap) NewTCPEntry(src *tapApi.TCP, dst *tapApi.TCP, p *tap
if len(dst.Name) == 0 {
dstEntry = &entryData{
key: key(dst.IP),
entry: dst,
entry: &tapApi.TCP{},
}
if err := copier.Copy(dstEntry.entry, dst); err != nil {
logger.Log.Errorf("Error while copying dst entry into dst entry data")
}
dstEntry.entry.Name = UnresolvedNodeName
} else {
dstEntry = &entryData{
@@ -224,35 +233,41 @@ func (s *defaultServiceMap) GetStatus() ServiceMapStatus {
}
func (s *defaultServiceMap) GetNodes() []ServiceMapNode {
var nodes []ServiceMapNode
nodes := []ServiceMapNode{}
for i, n := range s.graph.Nodes {
nodes = append(nodes, ServiceMapNode{
Id: n.id,
Name: string(i),
Entry: n.entry,
Count: n.count,
Id: n.id,
Name: string(i),
Resolved: n.entry.Name != UnresolvedNodeName,
Entry: n.entry,
Count: n.count,
})
}
return nodes
}
func (s *defaultServiceMap) GetEdges() []ServiceMapEdge {
var edges []ServiceMapEdge
edges := []ServiceMapEdge{}
for u, m := range s.graph.Edges {
for v := range m {
for _, p := range s.graph.Edges[u][v].data {
edges = append(edges, ServiceMapEdge{
Source: ServiceMapNode{
Id: s.graph.Nodes[u].id,
Name: string(u),
Entry: s.graph.Nodes[u].entry,
Count: s.graph.Nodes[u].count,
Id: s.graph.Nodes[u].id,
Name: string(u),
Entry: s.graph.Nodes[u].entry,
Resolved: s.graph.Nodes[u].entry.Name != UnresolvedNodeName,
Count: s.graph.Nodes[u].count,
},
Destination: ServiceMapNode{
Id: s.graph.Nodes[v].id,
Name: string(v),
Entry: s.graph.Nodes[v].entry,
Count: s.graph.Nodes[v].count,
Id: s.graph.Nodes[v].id,
Name: string(v),
Entry: s.graph.Nodes[v].entry,
Resolved: s.graph.Nodes[v].entry.Name != UnresolvedNodeName,
Count: s.graph.Nodes[v].count,
},
Count: p.count,
Protocol: p.protocol,
@@ -260,6 +275,7 @@ func (s *defaultServiceMap) GetEdges() []ServiceMapEdge {
}
}
}
return edges
}

View File

@@ -403,10 +403,10 @@ func (s *ServiceMapEnabledSuite) TestServiceMap() {
assert.Equal(0, status.EdgeCount)
// Nodes after reset
assert.Equal([]ServiceMapNode(nil), nodes)
assert.Equal([]ServiceMapNode{}, nodes)
// Edges after reset
assert.Equal([]ServiceMapEdge(nil), edges)
assert.Equal([]ServiceMapEdge{}, edges)
}
func TestServiceMapSuite(t *testing.T) {

View File

@@ -211,11 +211,15 @@ func syncEntriesImpl(token string, model string, envPrefix string, uploadInterva
logger.Log.Infof("Getting entries from the database")
BasenineReconnect:
var connection *basenine.Connection
var err error
connection, err = basenine.NewConnection(shared.BasenineHost, shared.BaseninePort)
if err != nil {
panic(err)
logger.Log.Errorf("Can't establish a new connection to Basenine server: %v", err)
connection.Close()
time.Sleep(shared.BasenineReconnectInterval * time.Second)
goto BasenineReconnect
}
data := make(chan []byte)
@@ -323,7 +327,12 @@ func syncEntriesImpl(token string, model string, envPrefix string, uploadInterva
go handleMetaChannel(&wg, connection, meta)
wg.Add(2)
connection.Query(query, data, meta)
if err = connection.Query(query, data, meta); err != nil {
logger.Log.Errorf("Query mode call failed: %v", err)
connection.Close()
time.Sleep(shared.BasenineReconnectInterval * time.Second)
goto BasenineReconnect
}
wg.Wait()
}

View File

@@ -36,16 +36,16 @@ build-base: ## Build mizu CLI binary (select platform via GOOS / GOARCH env vari
(cd bin && shasum -a 256 mizu_${SUFFIX} > mizu_${SUFFIX}.sha256)
build-all: ## Build for all supported platforms.
@echo "Compiling for every OS and Platform"
@mkdir -p bin && sed s/_VER_/$(VER)/g README.md.TEMPLATE > bin/README.md
@$(MAKE) build GOOS=linux GOARCH=amd64
@$(MAKE) build GOOS=linux GOARCH=arm64
@$(MAKE) build GOOS=darwin GOARCH=amd64
@$(MAKE) build GOOS=darwin GOARCH=arm64
@$(MAKE) build GOOS=windows GOARCH=amd64
@mv ./bin/mizu_windows_amd64 ./bin/mizu.exe
@echo "---------"
@find ./bin -ls
echo "Compiling for every OS and Platform" && \
mkdir -p bin && sed s/_VER_/$(VER)/g README.md.TEMPLATE > bin/README.md && \
$(MAKE) build GOOS=linux GOARCH=amd64 && \
$(MAKE) build GOOS=linux GOARCH=arm64 && \
$(MAKE) build GOOS=darwin GOARCH=amd64 && \
$(MAKE) build GOOS=darwin GOARCH=arm64 && \
$(MAKE) build GOOS=windows GOARCH=amd64 && \
mv ./bin/mizu_windows_amd64 ./bin/mizu.exe && \
echo "---------" && \
find ./bin -ls
clean: ## Clean all build artifacts.
go clean

View File

@@ -10,8 +10,6 @@ import (
"net/url"
"time"
"github.com/up9inc/mizu/shared/kubernetes"
"github.com/up9inc/mizu/cli/config"
"github.com/up9inc/mizu/shared"
"github.com/up9inc/mizu/shared/logger"
@@ -83,15 +81,13 @@ func (provider *Provider) ReportTapperStatus(tapperStatus shared.TapperStatus) e
func (provider *Provider) ReportTappedPods(pods []core.Pod) error {
tappedPodsUrl := fmt.Sprintf("%s/status/tappedPods", provider.url)
podInfos := kubernetes.GetPodInfosForPods(pods)
if jsonValue, err := json.Marshal(podInfos); err != nil {
if jsonValue, err := json.Marshal(pods); err != nil {
return fmt.Errorf("failed Marshal the tapped pods %w", err)
} else {
if _, err := utils.Post(tappedPodsUrl, "application/json", bytes.NewBuffer(jsonValue), provider.client); err != nil {
return fmt.Errorf("failed sending to API server the tapped pods %w", err)
} else {
logger.Log.Debugf("Reported to server API about %d taped pods successfully", len(podInfos))
logger.Log.Debugf("Reported to server API about %d taped pods successfully", len(pods))
return nil
}
}

View File

@@ -111,9 +111,9 @@ func checkRulesPermissions(ctx context.Context, kubernetesProvider *kubernetes.P
func checkPermissionExist(group string, resource string, verb string, namespace string, exist bool, err error) bool {
var groupAndNamespace string
if group != "" && namespace != "" {
groupAndNamespace = fmt.Sprintf("in group '%v' and namespace '%v'", group, namespace)
groupAndNamespace = fmt.Sprintf("in api group '%v' and namespace '%v'", group, namespace)
} else if group != "" {
groupAndNamespace = fmt.Sprintf("in group '%v'", group)
groupAndNamespace = fmt.Sprintf("in api group '%v'", group)
} else if namespace != "" {
groupAndNamespace = fmt.Sprintf("in namespace '%v'", namespace)
}

View File

@@ -27,13 +27,21 @@ func runMizuCheck() {
checkPassed = check.KubernetesVersion(kubernetesVersion)
}
if config.Config.Check.PreTap {
if checkPassed {
checkPassed = check.TapKubernetesPermissions(ctx, embedFS, kubernetesProvider)
if config.Config.Check.PreTap || config.Config.Check.PreInstall || config.Config.Check.ImagePull {
if config.Config.Check.PreTap {
if checkPassed {
checkPassed = check.TapKubernetesPermissions(ctx, embedFS, kubernetesProvider)
}
} else if config.Config.Check.PreInstall {
if checkPassed {
checkPassed = check.InstallKubernetesPermissions(ctx, kubernetesProvider)
}
}
} else if config.Config.Check.PreInstall {
if checkPassed {
checkPassed = check.InstallKubernetesPermissions(ctx, kubernetesProvider)
if config.Config.Check.ImagePull {
if checkPassed {
checkPassed = check.ImagePullInCluster(ctx, kubernetesProvider)
}
}
} else {
if checkPassed {
@@ -45,12 +53,6 @@ func runMizuCheck() {
}
}
if config.Config.Check.ImagePull {
if checkPassed {
checkPassed = check.ImagePullInCluster(ctx, kubernetesProvider)
}
}
if checkPassed {
logger.Log.Infof("\nStatus check results are %v", fmt.Sprintf(uiUtils.Green, "√"))
} else {

View File

@@ -40,7 +40,7 @@ type ConfigStruct struct {
HeadlessMode bool `yaml:"headless" default:"false"`
LogLevelStr string `yaml:"log-level,omitempty" default:"INFO" readonly:""`
ServiceMap bool `yaml:"service-map" default:"true"`
OAS bool `yaml:"oas,omitempty" default:"false" readonly:""`
OAS bool `yaml:"oas" default:"true"`
Elastic shared.ElasticConfig `yaml:"elastic"`
}

View File

@@ -11,7 +11,6 @@ import (
"github.com/up9inc/mizu/cli/uiUtils"
"github.com/up9inc/mizu/shared"
basenine "github.com/up9inc/basenine/server/lib"
"github.com/up9inc/mizu/shared/logger"
"github.com/up9inc/mizu/shared/units"
)
@@ -79,10 +78,6 @@ func (config *TapConfig) GetInsertionFilter() string {
}
}
}
_, err := basenine.Parse(insertionFilter)
if err != nil {
logger.Log.Warningf(uiUtils.Warning, fmt.Sprintf("Insertion filter syntax error: %v", err))
}
return insertionFilter
}

View File

@@ -11,7 +11,6 @@ require (
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
github.com/spf13/cobra v1.3.0
github.com/spf13/pflag v1.0.5
github.com/up9inc/basenine/server/lib v0.0.0-20220317230530-8472d80307f6
github.com/up9inc/mizu/shared v0.0.0
github.com/up9inc/mizu/tap/api v0.0.0
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8
@@ -33,11 +32,8 @@ require (
github.com/MakeNowJust/heredoc v1.0.0 // indirect
github.com/PuerkitoBio/purell v1.1.1 // indirect
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
github.com/alecthomas/participle/v2 v2.0.0-alpha7 // indirect
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5 // indirect
github.com/clbanning/mxj/v2 v2.5.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.4.0 // indirect
github.com/docker/go-units v0.4.0 // indirect
github.com/evanphx/json-patch v5.6.0+incompatible // indirect
github.com/exponent-io/jsonpath v0.0.0-20210407135951-1de76d718b3f // indirect
@@ -72,7 +68,6 @@ require (
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect
github.com/ohler55/ojg v1.12.13 // indirect
github.com/peterbourgon/diskv v2.0.1+incompatible // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect

View File

@@ -83,10 +83,6 @@ github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tN
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/alecthomas/participle/v2 v2.0.0-alpha7 h1:cK4vjj0VSgb3lN1nuKA5F7dw+1s1pWBe5bx7nNCnN+c=
github.com/alecthomas/participle/v2 v2.0.0-alpha7/go.mod h1:NumScqsC42o9x+dGj8/YqsIfhrIQjFEOFovxotbBirA=
github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1 h1:GDQdwm/gAcJcLAKQQZGOJ4knlw+7rfEQQcmwTbt4p5E=
github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@@ -120,8 +116,6 @@ github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5P
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
github.com/clbanning/mxj/v2 v2.5.5 h1:oT81vUeEiQQ/DcHbzSytRngP6Ky9O+L+0Bw0zSJag9E=
github.com/clbanning/mxj/v2 v2.5.5/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
@@ -155,8 +149,6 @@ github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMS
github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
@@ -487,8 +479,6 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWb
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
github.com/ohler55/ojg v1.12.13 h1:FvfVpYzLgMraLcg3rrXiRXaihOP6fnzQNEU9YyZ/AmM=
github.com/ohler55/ojg v1.12.13/go.mod h1:LBbIVRAgoFbYBXQhRhuEpaJIqq+goSO63/FQ+nyJU88=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
@@ -600,8 +590,6 @@ github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/up9inc/basenine/server/lib v0.0.0-20220317230530-8472d80307f6 h1:+RZTD+HdfIW2SMbc65yWkruTY+g5/1Av074m62A74ls=
github.com/up9inc/basenine/server/lib v0.0.0-20220317230530-8472d80307f6/go.mod h1:ZIkxWiJm65jYQIso9k+OZKhR7gQ1we2jNyE2kQX9IQI=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg=
github.com/xlab/treeprint v1.1.0 h1:G/1DjNkPpfZCFt9CSh6b5/nY4VimlbHF3Rh4obvtzDk=

View File

@@ -1,18 +1,20 @@
FROM dockcross/linux-arm64-musl:latest AS builder-from-amd64-to-arm64v8
# Install Go
RUN curl https://go.dev/dl/go1.17.6.linux-amd64.tar.gz -Lo ./go.linux-amd64.tar.gz
RUN curl https://go.dev/dl/go1.17.6.linux-amd64.tar.gz.asc -Lo ./go.linux-amd64.tar.gz.asc
RUN curl https://dl.google.com/dl/linux/linux_signing_key.pub -Lo linux_signing_key.pub
RUN gpg --import linux_signing_key.pub && gpg --verify ./go.linux-amd64.tar.gz.asc ./go.linux-amd64.tar.gz
RUN rm -rf /usr/local/go && tar -C /usr/local -xzf go.linux-amd64.tar.gz
RUN curl https://go.dev/dl/go1.17.6.linux-amd64.tar.gz -Lo ./go.linux-amd64.tar.gz \
&& curl https://go.dev/dl/go1.17.6.linux-amd64.tar.gz.asc -Lo ./go.linux-amd64.tar.gz.asc \
&& curl https://dl.google.com/dl/linux/linux_signing_key.pub -Lo linux_signing_key.pub \
&& gpg --import linux_signing_key.pub && gpg --verify ./go.linux-amd64.tar.gz.asc ./go.linux-amd64.tar.gz \
&& rm -rf /usr/local/go && tar -C /usr/local -xzf go.linux-amd64.tar.gz
ENV PATH "$PATH:/usr/local/go/bin"
# Compile libpcap from source
RUN curl https://www.tcpdump.org/release/libpcap-1.10.1.tar.gz -Lo ./libpcap.tar.gz
RUN curl https://www.tcpdump.org/release/libpcap-1.10.1.tar.gz.sig -Lo ./libpcap.tar.gz.sig
RUN curl https://www.tcpdump.org/release/signing-key.asc -Lo ./signing-key.asc
RUN gpg --import signing-key.asc && gpg --verify libpcap.tar.gz.sig libpcap.tar.gz
RUN tar -xzf libpcap.tar.gz && mv ./libpcap-* ./libpcap
RUN cd ./libpcap && ./configure --host=arm && make
RUN cp /work/libpcap/libpcap.a /usr/xcc/aarch64-linux-musl-cross/lib/gcc/aarch64-linux-musl/*/
RUN curl https://www.tcpdump.org/release/libpcap-1.10.1.tar.gz -Lo ./libpcap.tar.gz \
&& curl https://www.tcpdump.org/release/libpcap-1.10.1.tar.gz.sig -Lo ./libpcap.tar.gz.sig \
&& curl https://www.tcpdump.org/release/signing-key.asc -Lo ./signing-key.asc \
&& gpg --import signing-key.asc && gpg --verify libpcap.tar.gz.sig libpcap.tar.gz \
&& tar -xzf libpcap.tar.gz && mv ./libpcap-* ./libpcap
WORKDIR /work/libpcap
RUN ./configure --host=arm && make \
&& cp /work/libpcap/libpcap.a /usr/xcc/aarch64-linux-musl-cross/lib/gcc/aarch64-linux-musl/*/

View File

@@ -1,20 +1,20 @@
package shared
const (
MizuFilteringOptionsEnvVar = "SENSITIVE_DATA_FILTERING_OPTIONS"
SyncEntriesConfigEnvVar = "SYNC_ENTRIES_CONFIG"
HostModeEnvVar = "HOST_MODE"
NodeNameEnvVar = "NODE_NAME"
TappedAddressesPerNodeDictEnvVar = "TAPPED_ADDRESSES_PER_HOST"
ConfigDirPath = "/app/config/"
DataDirPath = "/app/data/"
ValidationRulesFileName = "validation-rules.yaml"
ContractFileName = "contract-oas.yaml"
ConfigFileName = "mizu-config.json"
GoGCEnvVar = "GOGC"
DefaultApiServerPort = 8899
LogLevelEnvVar = "LOG_LEVEL"
MizuAgentImageRepo = "docker.io/up9inc/mizu"
BasenineHost = "127.0.0.1"
BaseninePort = "9099"
MizuFilteringOptionsEnvVar = "SENSITIVE_DATA_FILTERING_OPTIONS"
SyncEntriesConfigEnvVar = "SYNC_ENTRIES_CONFIG"
HostModeEnvVar = "HOST_MODE"
NodeNameEnvVar = "NODE_NAME"
ConfigDirPath = "/app/config/"
DataDirPath = "/app/data/"
ValidationRulesFileName = "validation-rules.yaml"
ContractFileName = "contract-oas.yaml"
ConfigFileName = "mizu-config.json"
GoGCEnvVar = "GOGC"
DefaultApiServerPort = 8899
LogLevelEnvVar = "LOG_LEVEL"
MizuAgentImageRepo = "docker.io/up9inc/mizu"
BasenineHost = "127.0.0.1"
BaseninePort = "9099"
BasenineReconnectInterval = 3
)

View File

@@ -31,7 +31,8 @@ type MizuTapperSyncer struct {
TapPodChangesOut chan TappedPodChangeEvent
TapperStatusChangedOut chan shared.TapperStatus
ErrorOut chan K8sTapManagerError
nodeToTappedPodMap map[string][]core.Pod
nodeToTappedPodMap shared.NodeToPodsMap
tappedNodes []string
}
type TapperSyncerConfig struct {
@@ -177,7 +178,7 @@ func (tapperSyncer *MizuTapperSyncer) watchPodsForTapping() {
podWatchHelper := NewPodWatchHelper(tapperSyncer.kubernetesProvider, &tapperSyncer.config.PodFilterRegex)
eventChan, errorChan := FilteredWatch(tapperSyncer.context, podWatchHelper, tapperSyncer.config.TargetNamespaces, podWatchHelper)
restartTappers := func() {
handleChangeInPods := func() {
err, changeFound := tapperSyncer.updateCurrentlyTappedPods()
if err != nil {
tapperSyncer.ErrorOut <- K8sTapManagerError{
@@ -197,7 +198,7 @@ func (tapperSyncer *MizuTapperSyncer) watchPodsForTapping() {
}
}
}
restartTappersDebouncer := debounce.NewDebouncer(updateTappersDelay, restartTappers)
restartTappersDebouncer := debounce.NewDebouncer(updateTappersDelay, handleChangeInPods)
for {
select {
@@ -295,6 +296,20 @@ func (tapperSyncer *MizuTapperSyncer) updateCurrentlyTappedPods() (err error, ch
}
func (tapperSyncer *MizuTapperSyncer) updateMizuTappers() error {
nodesToTap := make([]string, len(tapperSyncer.nodeToTappedPodMap))
i := 0
for node := range tapperSyncer.nodeToTappedPodMap {
nodesToTap[i] = node
i++
}
if shared.EqualStringSlices(nodesToTap, tapperSyncer.tappedNodes) {
logger.Log.Debug("Skipping apply, DaemonSet is up to date")
return nil
}
logger.Log.Debugf("Updating DaemonSet to run on nodes: %v", nodesToTap)
if len(tapperSyncer.nodeToTappedPodMap) > 0 {
var serviceAccountName string
if tapperSyncer.config.MizuServiceAccountExists {
@@ -303,14 +318,19 @@ func (tapperSyncer *MizuTapperSyncer) updateMizuTappers() error {
serviceAccountName = ""
}
nodeNames := make([]string, 0, len(tapperSyncer.nodeToTappedPodMap))
for nodeName := range tapperSyncer.nodeToTappedPodMap {
nodeNames = append(nodeNames, nodeName)
}
if err := tapperSyncer.kubernetesProvider.ApplyMizuTapperDaemonSet(
tapperSyncer.context,
tapperSyncer.config.MizuResourcesNamespace,
TapperDaemonSetName,
tapperSyncer.config.AgentImage,
TapperPodName,
fmt.Sprintf("%s.%s.svc.cluster.local", ApiServerPodName, tapperSyncer.config.MizuResourcesNamespace),
tapperSyncer.nodeToTappedPodMap,
fmt.Sprintf("%s.%s.svc", ApiServerPodName, tapperSyncer.config.MizuResourcesNamespace),
nodeNames,
serviceAccountName,
tapperSyncer.config.TapperResources,
tapperSyncer.config.ImagePullPolicy,
@@ -335,5 +355,7 @@ func (tapperSyncer *MizuTapperSyncer) updateMizuTappers() error {
logger.Log.Debugf("Successfully reset tapper daemon set")
}
tapperSyncer.tappedNodes = nodesToTap
return nil
}

View File

@@ -708,18 +708,13 @@ func (provider *Provider) CreateConfigMap(ctx context.Context, namespace string,
return nil
}
func (provider *Provider) ApplyMizuTapperDaemonSet(ctx context.Context, namespace string, daemonSetName string, podImage string, tapperPodName string, apiServerPodIp string, nodeToTappedPodMap map[string][]core.Pod, serviceAccountName string, resources shared.Resources, imagePullPolicy core.PullPolicy, mizuApiFilteringOptions api.TrafficFilteringOptions, logLevel logging.Level, serviceMesh bool, tls bool) error {
logger.Log.Debugf("Applying %d tapper daemon sets, ns: %s, daemonSetName: %s, podImage: %s, tapperPodName: %s", len(nodeToTappedPodMap), namespace, daemonSetName, podImage, tapperPodName)
func (provider *Provider) ApplyMizuTapperDaemonSet(ctx context.Context, namespace string, daemonSetName string, podImage string, tapperPodName string, apiServerPodIp string, nodeNames []string, serviceAccountName string, resources shared.Resources, imagePullPolicy core.PullPolicy, mizuApiFilteringOptions api.TrafficFilteringOptions, logLevel logging.Level, serviceMesh bool, tls bool) error {
logger.Log.Debugf("Applying %d tapper daemon sets, ns: %s, daemonSetName: %s, podImage: %s, tapperPodName: %s", len(nodeNames), namespace, daemonSetName, podImage, tapperPodName)
if len(nodeToTappedPodMap) == 0 {
if len(nodeNames) == 0 {
return fmt.Errorf("daemon set %s must tap at least 1 pod", daemonSetName)
}
nodeToTappedPodMapJsonStr, err := json.Marshal(nodeToTappedPodMap)
if err != nil {
return err
}
mizuApiFilteringOptionsJsonStr, err := json.Marshal(mizuApiFilteringOptions)
if err != nil {
return err
@@ -773,7 +768,6 @@ func (provider *Provider) ApplyMizuTapperDaemonSet(ctx context.Context, namespac
agentContainer.WithEnv(
applyconfcore.EnvVar().WithName(shared.LogLevelEnvVar).WithValue(logLevel.String()),
applyconfcore.EnvVar().WithName(shared.HostModeEnvVar).WithValue("1"),
applyconfcore.EnvVar().WithName(shared.TappedAddressesPerNodeDictEnvVar).WithValue(string(nodeToTappedPodMapJsonStr)),
applyconfcore.EnvVar().WithName(shared.GoGCEnvVar).WithValue("12800"),
applyconfcore.EnvVar().WithName(shared.MizuFilteringOptionsEnvVar).WithValue(string(mizuApiFilteringOptionsJsonStr)),
)
@@ -811,18 +805,20 @@ func (provider *Provider) ApplyMizuTapperDaemonSet(ctx context.Context, namespac
agentResources := applyconfcore.ResourceRequirements().WithRequests(agentResourceRequests).WithLimits(agentResourceLimits)
agentContainer.WithResources(agentResources)
nodeNames := make([]string, 0, len(nodeToTappedPodMap))
for nodeName := range nodeToTappedPodMap {
nodeNames = append(nodeNames, nodeName)
matchFields := make([]*applyconfcore.NodeSelectorTermApplyConfiguration, 0)
for _, nodeName := range nodeNames {
nodeSelectorRequirement := applyconfcore.NodeSelectorRequirement()
nodeSelectorRequirement.WithKey("metadata.name")
nodeSelectorRequirement.WithOperator(core.NodeSelectorOpIn)
nodeSelectorRequirement.WithValues(nodeName)
nodeSelectorTerm := applyconfcore.NodeSelectorTerm()
nodeSelectorTerm.WithMatchFields(nodeSelectorRequirement)
matchFields = append(matchFields, nodeSelectorTerm)
}
nodeSelectorRequirement := applyconfcore.NodeSelectorRequirement()
nodeSelectorRequirement.WithKey("kubernetes.io/hostname")
nodeSelectorRequirement.WithOperator(core.NodeSelectorOpIn)
nodeSelectorRequirement.WithValues(nodeNames...)
nodeSelectorTerm := applyconfcore.NodeSelectorTerm()
nodeSelectorTerm.WithMatchExpressions(nodeSelectorRequirement)
nodeSelector := applyconfcore.NodeSelector()
nodeSelector.WithNodeSelectorTerms(nodeSelectorTerm)
nodeSelector.WithNodeSelectorTerms(matchFields...)
nodeAffinity := applyconfcore.NodeAffinity()
nodeAffinity.WithRequiredDuringSchedulingIgnoredDuringExecution(nodeSelector)
affinity := applyconfcore.Affinity()

View File

@@ -5,12 +5,11 @@ import (
"github.com/up9inc/mizu/shared"
core "k8s.io/api/core/v1"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func GetNodeHostToTappedPodsMap(tappedPods []core.Pod) map[string][]core.Pod {
nodeToTappedPodMap := make(map[string][]core.Pod)
func GetNodeHostToTappedPodsMap(tappedPods []core.Pod) shared.NodeToPodsMap {
nodeToTappedPodMap := make(shared.NodeToPodsMap)
for _, pod := range tappedPods {
minimizedPod := getMinimizedPod(pod)
@@ -27,20 +26,21 @@ func GetNodeHostToTappedPodsMap(tappedPods []core.Pod) map[string][]core.Pod {
func getMinimizedPod(fullPod core.Pod) core.Pod {
return core.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: fullPod.Name,
Name: fullPod.Name,
Namespace: fullPod.Namespace,
},
Status: v1.PodStatus{
Status: core.PodStatus{
PodIP: fullPod.Status.PodIP,
ContainerStatuses: getMinimizedContainerStatuses(fullPod),
},
}
}
func getMinimizedContainerStatuses(fullPod core.Pod) []v1.ContainerStatus {
result := make([]v1.ContainerStatus, len(fullPod.Status.ContainerStatuses))
func getMinimizedContainerStatuses(fullPod core.Pod) []core.ContainerStatus {
result := make([]core.ContainerStatus, len(fullPod.Status.ContainerStatuses))
for i, container := range fullPod.Status.ContainerStatuses {
result[i] = v1.ContainerStatus{
result[i] = core.ContainerStatus{
ContainerID: container.ContainerID,
}
}

View File

@@ -6,24 +6,24 @@ import (
"github.com/op/go-logging"
"github.com/up9inc/mizu/shared/logger"
v1 "k8s.io/api/core/v1"
"gopkg.in/yaml.v3"
v1 "k8s.io/api/core/v1"
)
type WebSocketMessageType string
const (
WebSocketMessageTypeEntry WebSocketMessageType = "entry"
WebSocketMessageTypeFullEntry WebSocketMessageType = "fullEntry"
WebSocketMessageTypeTappedEntry WebSocketMessageType = "tappedEntry"
WebSocketMessageTypeUpdateStatus WebSocketMessageType = "status"
WebSocketMessageTypeAnalyzeStatus WebSocketMessageType = "analyzeStatus"
WebsocketMessageTypeOutboundLink WebSocketMessageType = "outboundLink"
WebSocketMessageTypeToast WebSocketMessageType = "toast"
WebSocketMessageTypeQueryMetadata WebSocketMessageType = "queryMetadata"
WebSocketMessageTypeStartTime WebSocketMessageType = "startTime"
WebSocketMessageTypeTapConfig WebSocketMessageType = "tapConfig"
WebSocketMessageTypeEntry WebSocketMessageType = "entry"
WebSocketMessageTypeFullEntry WebSocketMessageType = "fullEntry"
WebSocketMessageTypeTappedEntry WebSocketMessageType = "tappedEntry"
WebSocketMessageTypeUpdateStatus WebSocketMessageType = "status"
WebSocketMessageTypeUpdateTappedPods WebSocketMessageType = "tappedPods"
WebSocketMessageTypeAnalyzeStatus WebSocketMessageType = "analyzeStatus"
WebSocketMessageTypeToast WebSocketMessageType = "toast"
WebSocketMessageTypeQueryMetadata WebSocketMessageType = "queryMetadata"
WebSocketMessageTypeStartTime WebSocketMessageType = "startTime"
WebSocketMessageTypeTapConfig WebSocketMessageType = "tapConfig"
)
type Resources struct {
@@ -75,11 +75,29 @@ type WebSocketStatusMessage struct {
TappingStatus []TappedPodStatus `json:"tappingStatus"`
}
type WebSocketTappedPodsMessage struct {
*WebSocketMessageMetadata
NodeToTappedPodMap NodeToPodsMap `json:"nodeToTappedPodMap"`
}
type WebSocketTapConfigMessage struct {
*WebSocketMessageMetadata
TapTargets []v1.Pod `json:"pods"`
}
type NodeToPodsMap map[string][]v1.Pod
func (np NodeToPodsMap) Summary() map[string][]string {
summary := make(map[string][]string)
for node, pods := range np {
for _, pod := range pods {
summary[node] = append(summary[node], pod.Namespace+"/"+pod.Name)
}
}
return summary
}
type TapperStatus struct {
TapperName string `json:"tapperName"`
NodeName string `json:"nodeName"`
@@ -121,6 +139,15 @@ func CreateWebSocketStatusMessage(tappedPodsStatus []TappedPodStatus) WebSocketS
}
}
func CreateWebSocketTappedPodsMessage(nodeToTappedPodMap NodeToPodsMap) WebSocketTappedPodsMessage {
return WebSocketTappedPodsMessage{
WebSocketMessageMetadata: &WebSocketMessageMetadata{
MessageType: WebSocketMessageTypeUpdateTappedPods,
},
NodeToTappedPodMap: nodeToTappedPodMap,
}
}
func CreateWebSocketMessageTypeAnalyzeStatus(analyzeStatus AnalyzeStatus) WebSocketAnalyzeStatusMessage {
return WebSocketAnalyzeStatusMessage{
WebSocketMessageMetadata: &WebSocketMessageMetadata{

View File

@@ -32,3 +32,17 @@ func Unique(slice []string) []string {
return list
}
func EqualStringSlices(slice1 []string, slice2 []string) bool {
if len(slice1) != len(slice2) {
return false
}
for _, v := range slice1 {
if !Contains(slice2, v) {
return false
}
}
return true
}

View File

@@ -18,6 +18,7 @@ import (
)
const mizuTestEnvVar = "MIZU_TEST"
const UNKNOWN_NAMESPACE = ""
var UnknownIp net.IP = net.IP{0, 0, 0, 0}
var UnknownPort uint16 = 0
@@ -92,14 +93,15 @@ type RequestResponsePair struct {
Response GenericMessage `json:"response"`
}
// `Protocol` is modified in the later stages of data propagation. Therefore it's not a pointer.
type OutputChannelItem struct {
// `Protocol` is modified in later stages of data propagation. Therefore, it's not a pointer.
Protocol Protocol
Capture Capture
Timestamp int64
ConnectionInfo *ConnectionInfo
Pair *RequestResponsePair
Summary *BaseEntry
Namespace string
}
type SuperTimer struct {
@@ -156,12 +158,12 @@ func (e *Emitting) Emit(item *OutputChannelItem) {
}
type Entry struct {
Id uint `json:"id"`
Id string `json:"id"`
Protocol Protocol `json:"proto"`
Capture Capture `json:"capture"`
Source *TCP `json:"src"`
Destination *TCP `json:"dst"`
Namespace string `json:"namespace,omitempty"`
Namespace string `json:"namespace"`
Outgoing bool `json:"outgoing"`
Timestamp int64 `json:"timestamp"`
StartTime time.Time `json:"startTime"`
@@ -188,7 +190,7 @@ type EntryWrapper struct {
}
type BaseEntry struct {
Id uint `json:"id"`
Id string `json:"id"`
Protocol Protocol `json:"proto,omitempty"`
Capture Capture `json:"capture"`
Summary string `json:"summary,omitempty"`

View File

@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect5/amqp/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect8/amqp/\* expect

View File

@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect5/http/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect8/http/\* expect

View File

@@ -53,7 +53,16 @@ func representMapSliceAsTable(mapSlice []interface{}, selectorPrefix string) (re
h := item.(map[string]interface{})
key := h["name"].(string)
value := h["value"]
switch reflect.TypeOf(value).Kind() {
var reflectKind reflect.Kind
reflectType := reflect.TypeOf(value)
if reflectType == nil {
reflectKind = reflect.Interface
} else {
reflectKind = reflect.TypeOf(value).Kind()
}
switch reflectKind {
case reflect.Slice:
fallthrough
case reflect.Array:

View File

@@ -28,26 +28,6 @@ const protoMinorHTTP2 = 0
var maxHTTP2DataLen = 1 * 1024 * 1024 // 1MB
var grpcStatusCodes = []string{
"OK",
"CANCELLED",
"UNKNOWN",
"INVALID_ARGUMENT",
"DEADLINE_EXCEEDED",
"NOT_FOUND",
"ALREADY_EXISTS",
"PERMISSION_DENIED",
"RESOURCE_EXHAUSTED",
"FAILED_PRECONDITION",
"ABORTED",
"OUT_OF_RANGE",
"UNIMPLEMENTED",
"INTERNAL",
"UNAVAILABLE",
"DATA_LOSS",
"UNAUTHENTICATED",
}
type messageFragment struct {
headers []hpack.HeaderField
data []byte
@@ -142,18 +122,8 @@ func (ga *Http2Assembler) readMessage() (streamID uint32, messageHTTP1 interface
// gRPC detection
grpcStatus := headersHTTP1.Get("Grpc-Status")
if grpcStatus != "" {
if grpcStatus != "" || strings.Contains(headersHTTP1.Get("Content-Type"), "application/grpc") {
isGrpc = true
status = grpcStatus
}
if strings.Contains(headersHTTP1.Get("Content-Type"), "application/grpc") {
isGrpc = true
grpcPath := headersHTTP1.Get(":path")
pathSegments := strings.Split(grpcPath, "/")
if len(pathSegments) > 0 {
method = pathSegments[len(pathSegments)-1]
}
}
if method != "" {

View File

@@ -248,11 +248,6 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
reqDetails["_queryStringMerged"] = mapSliceMergeRepeatedKeys(reqDetails["_queryString"].([]interface{}))
reqDetails["queryString"] = mapSliceRebuildAsMap(reqDetails["_queryStringMerged"].([]interface{}))
statusCode := int(resDetails["status"].(float64))
if item.Protocol.Abbreviation == "gRPC" {
resDetails["statusText"] = grpcStatusCodes[statusCode]
}
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
if elapsedTime < 0 {
elapsedTime = 0

View File

@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect5/kafka/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect8/kafka/\* expect

View File

@@ -8,6 +8,7 @@ require (
github.com/segmentio/kafka-go v0.4.27
github.com/stretchr/testify v1.6.1
github.com/up9inc/mizu/tap/api v0.0.0
golang.org/x/text v0.3.0
)
require (

View File

@@ -40,6 +40,7 @@ golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View File

@@ -3,6 +3,8 @@ package kafka
import (
"encoding/json"
"fmt"
"golang.org/x/text/cases"
"golang.org/x/text/language"
"reflect"
"sort"
"strconv"
@@ -891,8 +893,9 @@ func representMapAsTable(mapData map[string]interface{}, selectorPrefix string,
}
}
selector := fmt.Sprintf("%s[\"%s\"]", selectorPrefix, key)
caser := cases.Title(language.Und, cases.NoLower)
table = append(table, api.TableData{
Name: strings.Join(camelcase.Split(strings.Title(key)), " "),
Name: strings.Join(camelcase.Split(caser.String(key)), " "),
Value: value,
Selector: selector,
})

View File

@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect5/redis/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect8/redis/\* expect

View File

@@ -3,7 +3,6 @@ module github.com/up9inc/mizu/tap
go 1.17
require (
github.com/bradleyfalzon/tlsx v0.0.0-20170624122154-28fd0e59bac4
github.com/cilium/ebpf v0.8.0
github.com/go-errors/errors v1.4.2
github.com/google/gopacket v1.1.19

View File

@@ -91,8 +91,6 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/bradleyfalzon/tlsx v0.0.0-20170624122154-28fd0e59bac4 h1:NJOOlc6ZJjix0A1rAU+nxruZtR8KboG1848yqpIUo4M=
github.com/bradleyfalzon/tlsx v0.0.0-20170624122154-28fd0e59bac4/go.mod h1:DQPxZS994Ld1Y8uwnJT+dRL04XPD0cElP/pHH/zEBHM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=

View File

@@ -29,21 +29,6 @@ func getLocalhostIPs() ([]string, error) {
return myIPs, nil
}
//lint:ignore U1000 will be used in the future
func isPrivateIP(ipStr string) bool {
ip := net.ParseIP(ipStr)
if ip.IsLoopback() || ip.IsLinkLocalUnicast() || ip.IsLinkLocalMulticast() {
return true
}
for _, block := range privateIPBlocks {
if block.Contains(ip) {
return true
}
}
return false
}
func initPrivateIPBlocks() {
for _, cidr := range []string{
"127.0.0.0/8", // IPv4 loopback

View File

@@ -1,39 +0,0 @@
package tap
type OutboundLinkProtocol string
const (
TLSProtocol OutboundLinkProtocol = "tls"
)
type OutboundLink struct {
Src string
DstIP string
DstPort int
SuggestedResolvedName string
SuggestedProtocol OutboundLinkProtocol
}
func NewOutboundLinkWriter() *OutboundLinkWriter {
return &OutboundLinkWriter{
OutChan: make(chan *OutboundLink),
}
}
type OutboundLinkWriter struct {
OutChan chan *OutboundLink
}
func (olw *OutboundLinkWriter) WriteOutboundLink(src string, DstIP string, DstPort int, SuggestedResolvedName string, SuggestedProtocol OutboundLinkProtocol) {
olw.OutChan <- &OutboundLink{
Src: src,
DstIP: DstIP,
DstPort: DstPort,
SuggestedResolvedName: SuggestedResolvedName,
SuggestedProtocol: SuggestedProtocol,
}
}
func (olw *OutboundLinkWriter) Stop() {
close(olw.OutChan)
}

View File

@@ -27,9 +27,6 @@ import (
const cleanPeriod = time.Second * 10
//lint:ignore U1000 will be used in the future
var remoteOnlyOutboundPorts = []int{80, 443}
var maxcount = flag.Int64("c", -1, "Only grab this many packets, then exit")
var decoder = flag.String("decoder", "", "Name of the decoder to use (default: guess from capture)")
var statsevery = flag.Int("stats", 60, "Output statistics every N seconds")
@@ -58,8 +55,7 @@ var tls = flag.Bool("tls", false, "Enable TLS tapper")
var memprofile = flag.String("memprofile", "", "Write memory profile")
type TapOpts struct {
HostMode bool
FilterAuthorities []v1.Pod
HostMode bool
}
var extensions []*api.Extension // global
@@ -67,39 +63,16 @@ var filteringOptions *api.TrafficFilteringOptions // global
var tapTargets []v1.Pod // global
var packetSourceManager *source.PacketSourceManager // global
var mainPacketInputChan chan source.TcpPacketInfo // global
func inArrayInt(arr []int, valueToCheck int) bool {
for _, value := range arr {
if value == valueToCheck {
return true
}
}
return false
}
func inArrayString(arr []string, valueToCheck string) bool {
for _, value := range arr {
if value == valueToCheck {
return true
}
}
return false
}
var tlsTapperInstance *tlstapper.TlsTapper // global
func StartPassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem, extensionsRef []*api.Extension, options *api.TrafficFilteringOptions) {
extensions = extensionsRef
filteringOptions = options
if opts.FilterAuthorities == nil {
tapTargets = []v1.Pod{}
} else {
tapTargets = opts.FilterAuthorities
}
if *tls {
for _, e := range extensions {
if e.Protocol.Name == "http" {
startTlsTapper(e, outputItems, options)
tlsTapperInstance = startTlsTapper(e, outputItems, options)
break
}
}
@@ -109,24 +82,39 @@ func StartPassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem,
diagnose.StartMemoryProfiler(os.Getenv(MemoryProfilingDumpPath), os.Getenv(MemoryProfilingTimeIntervalSeconds))
}
go startPassiveTapper(opts, outputItems)
streamsMap, assembler := initializePassiveTapper(opts, outputItems)
go startPassiveTapper(streamsMap, assembler)
}
func UpdateTapTargets(newTapTargets []v1.Pod) {
success := true
tapTargets = newTapTargets
if err := initializePacketSources(); err != nil {
logger.Log.Fatal(err)
packetSourceManager.UpdatePods(tapTargets, !*nodefrag, mainPacketInputChan)
if tlsTapperInstance != nil {
if err := tlstapper.UpdateTapTargets(tlsTapperInstance, &tapTargets, *procfs); err != nil {
tlstapper.LogError(err)
success = false
}
}
printNewTapTargets()
printNewTapTargets(success)
}
func printNewTapTargets() {
func printNewTapTargets(success bool) {
printStr := ""
for _, tapTarget := range tapTargets {
printStr += fmt.Sprintf("%s (%s), ", tapTarget.Status.PodIP, tapTarget.Name)
}
printStr = strings.TrimRight(printStr, ", ")
logger.Log.Infof("Now tapping: %s", printStr)
if success {
logger.Log.Infof("Now tapping: %s", printStr)
} else {
logger.Log.Errorf("Failed to start tapping: %s", printStr)
}
}
func printPeriodicStats(cleaner *Cleaner) {
@@ -189,17 +177,12 @@ func initializePacketSources() error {
}
var err error
if packetSourceManager, err = source.NewPacketSourceManager(*procfs, *fname, *iface, *servicemesh, tapTargets, behaviour); err != nil {
return err
} else {
packetSourceManager.ReadPackets(!*nodefrag, mainPacketInputChan)
return nil
}
packetSourceManager, err = source.NewPacketSourceManager(*procfs, *fname, *iface, *servicemesh, tapTargets, behaviour, !*nodefrag, mainPacketInputChan)
return err
}
func startPassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem) {
func initializePassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem) (*tcpStreamMap, *tcpAssembler) {
streamsMap := NewTcpStreamMap()
go streamsMap.closeTimedoutTcpStreamChannels()
diagnose.InitializeErrorsMap(*debug, *verbose, *quiet)
diagnose.InitializeTapperInternalStats()
@@ -212,6 +195,12 @@ func startPassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem)
assembler := NewTcpAssembler(outputItems, streamsMap, opts)
return streamsMap, assembler
}
func startPassiveTapper(streamsMap *tcpStreamMap, assembler *tcpAssembler) {
go streamsMap.closeTimedoutTcpStreamChannels()
diagnose.AppStats.SetStartTime(time.Now())
staleConnectionTimeout := time.Second * time.Duration(*staleTimeoutSeconds)
@@ -243,13 +232,19 @@ func startPassiveTapper(opts *TapOpts, outputItems chan *api.OutputChannelItem)
logger.Log.Infof("AppStats: %v", diagnose.AppStats)
}
func startTlsTapper(extension *api.Extension, outputItems chan *api.OutputChannelItem, options *api.TrafficFilteringOptions) {
func startTlsTapper(extension *api.Extension, outputItems chan *api.OutputChannelItem, options *api.TrafficFilteringOptions) *tlstapper.TlsTapper {
tls := tlstapper.TlsTapper{}
tlsPerfBufferSize := os.Getpagesize() * 100
chunksBufferSize := os.Getpagesize() * 100
logBufferSize := os.Getpagesize()
if err := tls.Init(tlsPerfBufferSize, *procfs, extension); err != nil {
if err := tls.Init(chunksBufferSize, logBufferSize, *procfs, extension); err != nil {
tlstapper.LogError(err)
return
return nil
}
if err := tlstapper.UpdateTapTargets(&tls, &tapTargets, *procfs); err != nil {
tlstapper.LogError(err)
return nil
}
// A quick way to instrument libssl.so without PID filtering - used for debuging and troubleshooting
@@ -257,19 +252,17 @@ func startTlsTapper(extension *api.Extension, outputItems chan *api.OutputChanne
if os.Getenv("MIZU_GLOBAL_SSL_LIBRARY") != "" {
if err := tls.GlobalTap(os.Getenv("MIZU_GLOBAL_SSL_LIBRARY")); err != nil {
tlstapper.LogError(err)
return
return nil
}
}
if err := tlstapper.UpdateTapTargets(&tls, &tapTargets, *procfs); err != nil {
tlstapper.LogError(err)
return
}
var emitter api.Emitter = &api.Emitting{
AppStats: &diagnose.AppStats,
OutputChannel: outputItems,
}
go tls.PollForLogging()
go tls.Poll(emitter, options)
return &tls
}

View File

@@ -13,6 +13,7 @@ const (
MaxBufferedPagesTotalEnvVarName = "MAX_BUFFERED_PAGES_TOTAL"
MaxBufferedPagesPerConnectionEnvVarName = "MAX_BUFFERED_PAGES_PER_CONNECTION"
TcpStreamChannelTimeoutMsEnvVarName = "TCP_STREAM_CHANNEL_TIMEOUT_MS"
CloseTimedoutTcpChannelsIntervalMsEnvVar = "CLOSE_TIMEDOUT_TCP_STREAM_CHANNELS_INTERVAL_MS"
MaxBufferedPagesTotalDefaultValue = 5000
MaxBufferedPagesPerConnectionDefaultValue = 5000
TcpStreamChannelTimeoutMsDefaultValue = 10000

View File

@@ -5,11 +5,12 @@ import (
"runtime"
"github.com/up9inc/mizu/shared/logger"
"github.com/up9inc/mizu/tap/api"
"github.com/vishvananda/netns"
)
func newNetnsPacketSource(procfs string, pid string,
interfaceName string, behaviour TcpPacketSourceBehaviour) (*tcpPacketSource, error) {
func newNetnsPacketSource(procfs string, pid string, interfaceName string,
behaviour TcpPacketSourceBehaviour, origin api.Capture) (*tcpPacketSource, error) {
nsh, err := netns.GetFromPath(fmt.Sprintf("%s/%s/ns/net", procfs, pid))
if err != nil {
@@ -17,7 +18,7 @@ func newNetnsPacketSource(procfs string, pid string,
return nil, err
}
src, err := newPacketSourceFromNetnsHandle(pid, nsh, interfaceName, behaviour)
src, err := newPacketSourceFromNetnsHandle(pid, nsh, interfaceName, behaviour, origin)
if err != nil {
logger.Log.Errorf("Error starting netns packet source for %s - %w", pid, err)
@@ -28,7 +29,7 @@ func newNetnsPacketSource(procfs string, pid string,
}
func newPacketSourceFromNetnsHandle(pid string, nsh netns.NsHandle, interfaceName string,
behaviour TcpPacketSourceBehaviour) (*tcpPacketSource, error) {
behaviour TcpPacketSourceBehaviour, origin api.Capture) (*tcpPacketSource, error) {
done := make(chan *tcpPacketSource)
errors := make(chan error)
@@ -57,7 +58,7 @@ func newPacketSourceFromNetnsHandle(pid string, nsh netns.NsHandle, interfaceNam
}
name := fmt.Sprintf("netns-%s-%s", pid, interfaceName)
src, err := newTcpPacketSource(name, "", interfaceName, behaviour)
src, err := newTcpPacketSource(name, "", interfaceName, behaviour, origin)
if err != nil {
logger.Log.Errorf("Error listening to PID %s - %w", pid, err)

View File

@@ -5,18 +5,27 @@ import (
"strings"
"github.com/up9inc/mizu/shared/logger"
"github.com/up9inc/mizu/tap/api"
v1 "k8s.io/api/core/v1"
)
const bpfFilterMaxPods = 150
const hostSourcePid = "0"
type PacketSourceManagerConfig struct {
mtls bool
procfs string
interfaceName string
behaviour TcpPacketSourceBehaviour
}
type PacketSourceManager struct {
sources map[string]*tcpPacketSource
config PacketSourceManagerConfig
}
func NewPacketSourceManager(procfs string, filename string, interfaceName string,
mtls bool, pods []v1.Pod, behaviour TcpPacketSourceBehaviour) (*PacketSourceManager, error) {
mtls bool, pods []v1.Pod, behaviour TcpPacketSourceBehaviour, ipdefrag bool, packets chan<- TcpPacketInfo) (*PacketSourceManager, error) {
hostSource, err := newHostPacketSource(filename, interfaceName, behaviour)
if err != nil {
return nil, err
@@ -28,7 +37,14 @@ func NewPacketSourceManager(procfs string, filename string, interfaceName string
},
}
sourceManager.UpdatePods(mtls, procfs, pods, interfaceName, behaviour)
sourceManager.config = PacketSourceManagerConfig{
mtls: mtls,
procfs: procfs,
interfaceName: interfaceName,
behaviour: behaviour,
}
go hostSource.readPackets(ipdefrag, packets)
return sourceManager, nil
}
@@ -41,7 +57,7 @@ func newHostPacketSource(filename string, interfaceName string,
name = fmt.Sprintf("file-%s", filename)
}
source, err := newTcpPacketSource(name, filename, interfaceName, behaviour)
source, err := newTcpPacketSource(name, filename, interfaceName, behaviour, api.Pcap)
if err != nil {
return nil, err
}
@@ -49,17 +65,16 @@ func newHostPacketSource(filename string, interfaceName string,
return source, nil
}
func (m *PacketSourceManager) UpdatePods(mtls bool, procfs string, pods []v1.Pod,
interfaceName string, behaviour TcpPacketSourceBehaviour) {
if mtls {
m.updateMtlsPods(procfs, pods, interfaceName, behaviour)
func (m *PacketSourceManager) UpdatePods(pods []v1.Pod, ipdefrag bool, packets chan<- TcpPacketInfo) {
if m.config.mtls {
m.updateMtlsPods(m.config.procfs, pods, m.config.interfaceName, m.config.behaviour, ipdefrag, packets)
}
m.setBPFFilter(pods)
}
func (m *PacketSourceManager) updateMtlsPods(procfs string, pods []v1.Pod,
interfaceName string, behaviour TcpPacketSourceBehaviour) {
interfaceName string, behaviour TcpPacketSourceBehaviour, ipdefrag bool, packets chan<- TcpPacketInfo) {
relevantPids := m.getRelevantPids(procfs, pods)
logger.Log.Infof("Updating mtls pods (new: %v) (current: %v)", relevantPids, m.sources)
@@ -71,26 +86,27 @@ func (m *PacketSourceManager) updateMtlsPods(procfs string, pods []v1.Pod,
}
}
for pid := range relevantPids {
for pid, origin := range relevantPids {
if _, ok := m.sources[pid]; !ok {
source, err := newNetnsPacketSource(procfs, pid, interfaceName, behaviour)
source, err := newNetnsPacketSource(procfs, pid, interfaceName, behaviour, origin)
if err == nil {
go source.readPackets(ipdefrag, packets)
m.sources[pid] = source
}
}
}
}
func (m *PacketSourceManager) getRelevantPids(procfs string, pods []v1.Pod) map[string]bool {
relevantPids := make(map[string]bool)
relevantPids[hostSourcePid] = true
func (m *PacketSourceManager) getRelevantPids(procfs string, pods []v1.Pod) map[string]api.Capture {
relevantPids := make(map[string]api.Capture)
relevantPids[hostSourcePid] = api.Pcap
if envoyPids, err := discoverRelevantEnvoyPids(procfs, pods); err != nil {
logger.Log.Warningf("Unable to discover envoy pids - %w", err)
} else {
for _, pid := range envoyPids {
relevantPids[pid] = true
relevantPids[pid] = api.Envoy
}
}
@@ -98,7 +114,7 @@ func (m *PacketSourceManager) getRelevantPids(procfs string, pods []v1.Pod) map[
logger.Log.Warningf("Unable to discover linkerd pids - %w", err)
} else {
for _, pid := range linkerdPids {
relevantPids[pid] = true
relevantPids[pid] = api.Linkerd
}
}
@@ -139,12 +155,6 @@ func (m *PacketSourceManager) setBPFFilter(pods []v1.Pod) {
}
}
func (m *PacketSourceManager) ReadPackets(ipdefrag bool, packets chan<- TcpPacketInfo) {
for _, src := range m.sources {
go src.readPackets(ipdefrag, packets)
}
}
func (m *PacketSourceManager) Close() {
for _, src := range m.sources {
src.close()

View File

@@ -10,6 +10,7 @@ import (
"github.com/google/gopacket/layers"
"github.com/google/gopacket/pcap"
"github.com/up9inc/mizu/shared/logger"
"github.com/up9inc/mizu/tap/api"
"github.com/up9inc/mizu/tap/diagnose"
)
@@ -19,6 +20,7 @@ type tcpPacketSource struct {
defragger *ip4defrag.IPv4Defragmenter
Behaviour *TcpPacketSourceBehaviour
name string
Origin api.Capture
}
type TcpPacketSourceBehaviour struct {
@@ -36,13 +38,14 @@ type TcpPacketInfo struct {
}
func newTcpPacketSource(name, filename string, interfaceName string,
behaviour TcpPacketSourceBehaviour) (*tcpPacketSource, error) {
behaviour TcpPacketSourceBehaviour, origin api.Capture) (*tcpPacketSource, error) {
var err error
result := &tcpPacketSource{
name: name,
defragger: ip4defrag.NewIPv4Defragmenter(),
Behaviour: &behaviour,
Origin: origin,
}
if filename != "" {

View File

@@ -29,6 +29,7 @@ type tcpAssembler struct {
// The assembler context
type context struct {
CaptureInfo gopacket.CaptureInfo
Origin api.Capture
}
func (c *context) GetCaptureInfo() gopacket.CaptureInfo {
@@ -87,8 +88,10 @@ func (a *tcpAssembler) processPackets(dumpPacket bool, packets <-chan source.Tcp
logger.Log.Fatalf("Failed to set network layer for checksum: %s", err)
}
}
c := context{
CaptureInfo: packet.Metadata().CaptureInfo,
Origin: packetInfo.Source.Origin,
}
diagnose.InternalStats.Totalsz += len(tcp.Payload)
a.assemblerMutex.Lock()

View File

@@ -7,13 +7,10 @@ import (
"sync"
"time"
"github.com/bradleyfalzon/tlsx"
"github.com/up9inc/mizu/shared/logger"
"github.com/up9inc/mizu/tap/api"
)
const checkTLSPacketAmount = 100
type tcpReaderDataMsg struct {
bytes []byte
timestamp time.Time
@@ -33,22 +30,21 @@ type ConnectionInfo struct {
* Implements io.Reader interface (Read)
*/
type tcpReader struct {
ident string
tcpID *api.TcpID
isClosed bool
isClient bool
isOutgoing bool
msgQueue chan tcpReaderDataMsg // Channel of captured reassembled tcp payload
data []byte
progress *api.ReadProgress
superTimer *api.SuperTimer
parent *tcpStream
packetsSeen uint
outboundLinkWriter *OutboundLinkWriter
extension *api.Extension
emitter api.Emitter
counterPair *api.CounterPair
reqResMatcher api.RequestResponseMatcher
ident string
tcpID *api.TcpID
isClosed bool
isClient bool
isOutgoing bool
msgQueue chan tcpReaderDataMsg // Channel of captured reassembled tcp payload
data []byte
progress *api.ReadProgress
superTimer *api.SuperTimer
parent *tcpStream
packetsSeen uint
extension *api.Extension
emitter api.Emitter
counterPair *api.CounterPair
reqResMatcher api.RequestResponseMatcher
sync.Mutex
}
@@ -64,16 +60,6 @@ func (h *tcpReader) Read(p []byte) (int, error) {
if len(h.data) > 0 {
h.packetsSeen += 1
}
if h.packetsSeen < checkTLSPacketAmount && len(msg.bytes) > 5 { // packets with less than 5 bytes cause tlsx to panic
clientHello := tlsx.ClientHello{}
err := clientHello.Unmarshall(msg.bytes)
if err == nil {
logger.Log.Debugf("Detected TLS client hello with SNI %s", clientHello.SNI)
// TODO: Throws `panic: runtime error: invalid memory address or nil pointer dereference` error.
// numericPort, _ := strconv.Atoi(h.tcpID.DstPort)
// h.outboundLinkWriter.WriteOutboundLink(h.tcpID.SrcIP, h.tcpID.DstIP, numericPort, clientHello.SNI, TLSProtocol)
}
}
}
if !ok || len(h.data) == 0 {
return 0, io.EOF
@@ -98,8 +84,7 @@ func (h *tcpReader) Close() {
func (h *tcpReader) run(wg *sync.WaitGroup) {
defer wg.Done()
b := bufio.NewReader(h)
// TODO: Add api.Pcap, api.Envoy and api.Linkerd distinction by refactoring NewPacketSourceManager method
err := h.extension.Dissector.Dissect(b, h.progress, api.Pcap, h.isClient, h.tcpID, h.counterPair, h.superTimer, h.parent.superIdentifier, h.emitter, filteringOptions, h.reqResMatcher)
err := h.extension.Dissector.Dissect(b, h.progress, h.parent.origin, h.isClient, h.tcpID, h.counterPair, h.superTimer, h.parent.superIdentifier, h.emitter, filteringOptions, h.reqResMatcher)
if err != nil {
_, err = io.Copy(ioutil.Discard, b)
if err != nil {

View File

@@ -29,6 +29,7 @@ type tcpStream struct {
clients []tcpReader
servers []tcpReader
ident string
origin api.Capture
sync.Mutex
streamsMap *tcpStreamMap
}
@@ -70,6 +71,9 @@ func (t *tcpStream) Accept(tcp *layers.TCP, ci gopacket.CaptureInfo, dir reassem
if !accept {
diagnose.InternalStats.RejectOpt++
}
*start = true
return accept
}

View File

@@ -20,12 +20,11 @@ import (
* Generates a new tcp stream for each new tcp connection. Closes the stream when the connection closes.
*/
type tcpStreamFactory struct {
wg sync.WaitGroup
outboundLinkWriter *OutboundLinkWriter
Emitter api.Emitter
streamsMap *tcpStreamMap
ownIps []string
opts *TapOpts
wg sync.WaitGroup
Emitter api.Emitter
streamsMap *tcpStreamMap
ownIps []string
opts *TapOpts
}
type tcpStreamWrapper struct {
@@ -63,9 +62,6 @@ func (factory *tcpStreamFactory) New(net, transport gopacket.Flow, tcp *layers.T
srcPort := transport.Src().String()
dstPort := transport.Dst().String()
// if factory.shouldNotifyOnOutboundLink(dstIp, dstPort) {
// factory.outboundLinkWriter.WriteOutboundLink(net.Src().String(), dstIp, dstPort, "", "")
// }
props := factory.getStreamProps(srcIp, srcPort, dstIp, dstPort)
isTapTarget := props.isTapTarget
stream := &tcpStream{
@@ -78,6 +74,7 @@ func (factory *tcpStreamFactory) New(net, transport gopacket.Flow, tcp *layers.T
optchecker: reassembly.NewTCPOptionCheck(),
superIdentifier: &api.SuperIdentifier{},
streamsMap: factory.streamsMap,
origin: getPacketOrigin(ac),
}
if stream.isTapTarget {
stream.id = factory.streamsMap.nextId()
@@ -98,14 +95,13 @@ func (factory *tcpStreamFactory) New(net, transport gopacket.Flow, tcp *layers.T
SrcPort: srcPort,
DstPort: dstPort,
},
parent: stream,
isClient: true,
isOutgoing: props.isOutgoing,
outboundLinkWriter: factory.outboundLinkWriter,
extension: extension,
emitter: factory.Emitter,
counterPair: counterPair,
reqResMatcher: reqResMatcher,
parent: stream,
isClient: true,
isOutgoing: props.isOutgoing,
extension: extension,
emitter: factory.Emitter,
counterPair: counterPair,
reqResMatcher: reqResMatcher,
})
stream.servers = append(stream.servers, tcpReader{
msgQueue: make(chan tcpReaderDataMsg),
@@ -118,14 +114,13 @@ func (factory *tcpStreamFactory) New(net, transport gopacket.Flow, tcp *layers.T
SrcPort: transport.Dst().String(),
DstPort: transport.Src().String(),
},
parent: stream,
isClient: false,
isOutgoing: props.isOutgoing,
outboundLinkWriter: factory.outboundLinkWriter,
extension: extension,
emitter: factory.Emitter,
counterPair: counterPair,
reqResMatcher: reqResMatcher,
parent: stream,
isClient: false,
isOutgoing: props.isOutgoing,
extension: extension,
emitter: factory.Emitter,
counterPair: counterPair,
reqResMatcher: reqResMatcher,
})
factory.streamsMap.Store(stream.id, &tcpStreamWrapper{
@@ -173,13 +168,15 @@ func (factory *tcpStreamFactory) getStreamProps(srcIP string, srcPort string, ds
}
}
//lint:ignore U1000 will be used in the future
func (factory *tcpStreamFactory) shouldNotifyOnOutboundLink(dstIP string, dstPort int) bool {
if inArrayInt(remoteOnlyOutboundPorts, dstPort) {
isDirectedHere := inArrayString(factory.ownIps, dstIP)
return !isDirectedHere && !isPrivateIP(dstIP)
func getPacketOrigin(ac reassembly.AssemblerContext) api.Capture {
c, ok := ac.(*context)
if !ok {
// If ac is not our context, fallback to Pcap
return api.Pcap
}
return true
return c.Origin
}
type streamProps struct {

View File

@@ -1,8 +1,10 @@
package tap
import (
"os"
"runtime"
_debug "runtime/debug"
"strconv"
"sync"
"time"
@@ -34,10 +36,35 @@ func (streamMap *tcpStreamMap) nextId() int64 {
return streamMap.streamId
}
func (streamMap *tcpStreamMap) getCloseTimedoutTcpChannelsInterval() time.Duration {
defaultDuration := 1000 * time.Millisecond
rangeMin := 10
rangeMax := 10000
closeTimedoutTcpChannelsIntervalMsStr := os.Getenv(CloseTimedoutTcpChannelsIntervalMsEnvVar)
if closeTimedoutTcpChannelsIntervalMsStr == "" {
return defaultDuration
} else {
closeTimedoutTcpChannelsIntervalMs, err := strconv.Atoi(closeTimedoutTcpChannelsIntervalMsStr)
if err != nil {
logger.Log.Warningf("Error parsing environment variable %s: %v\n", CloseTimedoutTcpChannelsIntervalMsEnvVar, err)
return defaultDuration
} else {
if closeTimedoutTcpChannelsIntervalMs < rangeMin || closeTimedoutTcpChannelsIntervalMs > rangeMax {
logger.Log.Warningf("The value of environment variable %s is not in acceptable range: %d - %d\n", CloseTimedoutTcpChannelsIntervalMsEnvVar, rangeMin, rangeMax)
return defaultDuration
} else {
return time.Duration(closeTimedoutTcpChannelsIntervalMs) * time.Millisecond
}
}
}
}
func (streamMap *tcpStreamMap) closeTimedoutTcpStreamChannels() {
tcpStreamChannelTimeout := GetTcpChannelTimeoutMs()
closeTimedoutTcpChannelsIntervalMs := streamMap.getCloseTimedoutTcpChannelsInterval()
logger.Log.Infof("Using %d ms as the close timedout TCP stream channels interval", closeTimedoutTcpChannelsIntervalMs/time.Millisecond)
for {
time.Sleep(10 * time.Millisecond)
time.Sleep(closeTimedoutTcpChannelsIntervalMs)
_debug.FreeOSMemory()
streamMap.streams.Range(func(key interface{}, value interface{}) bool {
streamWrapper := value.(*tcpStreamWrapper)
@@ -47,7 +74,7 @@ func (streamMap *tcpStreamMap) closeTimedoutTcpStreamChannels() {
stream.Close()
diagnose.AppStats.IncDroppedTcpStreams()
logger.Log.Debugf("Dropped an unidentified TCP stream because of timeout. Total dropped: %d Total Goroutines: %d Timeout (ms): %d",
diagnose.AppStats.DroppedTcpStreams, runtime.NumGoroutine(), tcpStreamChannelTimeout/1000000)
diagnose.AppStats.DroppedTcpStreams, runtime.NumGoroutine(), tcpStreamChannelTimeout/time.Millisecond)
}
} else {
if !stream.superIdentifier.IsClosedOthers {

View File

@@ -1,5 +1,7 @@
#!/bin/bash
pushd "$(dirname "$0")" || exit 1
MIZU_HOME=$(realpath ../../../)
docker build -t mizu-ebpf-builder . || exit 1
@@ -7,6 +9,7 @@ docker build -t mizu-ebpf-builder . || exit 1
docker run --rm \
--name mizu-ebpf-builder \
-v $MIZU_HOME:/mizu \
-v $(go env GOPATH):/root/go \
-it mizu-ebpf-builder \
sh -c "
go generate tap/tlstapper/tls_tapper.go
@@ -15,3 +18,5 @@ docker run --rm \
chown $(id -u):$(id -g) tap/tlstapper/tlstapper_bpfel.go
chown $(id -u):$(id -g) tap/tlstapper/tlstapper_bpfel.o
" || exit 1
popd

View File

@@ -7,8 +7,12 @@ Copyright (C) UP9 Inc.
#include "include/headers.h"
#include "include/util.h"
#include "include/maps.h"
#include "include/log.h"
#include "include/logger_messages.h"
#include "include/pids.h"
#define IPV4_ADDR_LEN (16)
struct accept_info {
__u64* sockaddr;
__u32* addrlen;
@@ -41,9 +45,7 @@ void sys_enter_accept4(struct sys_enter_accept4_ctx *ctx) {
long err = bpf_map_update_elem(&accept_syscall_context, &id, &info, BPF_ANY);
if (err != 0) {
char msg[] = "Error putting accept info (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
return;
log_error(ctx, LOG_ERROR_PUTTING_ACCEPT_INFO, id, err, 0l);
}
}
@@ -69,7 +71,8 @@ void sys_exit_accept4(struct sys_exit_accept4_ctx *ctx) {
struct accept_info *infoPtr = bpf_map_lookup_elem(&accept_syscall_context, &id);
if (infoPtr == 0) {
if (infoPtr == NULL) {
log_error(ctx, LOG_ERROR_GETTING_ACCEPT_INFO, id, 0l, 0l);
return;
}
@@ -79,15 +82,14 @@ void sys_exit_accept4(struct sys_exit_accept4_ctx *ctx) {
bpf_map_delete_elem(&accept_syscall_context, &id);
if (err != 0) {
char msg[] = "Error reading accept info from accept syscall (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
log_error(ctx, LOG_ERROR_READING_ACCEPT_INFO, id, err, 0l);
return;
}
__u32 addrlen;
bpf_probe_read(&addrlen, sizeof(__u32), info.addrlen);
if (addrlen != 16) {
if (addrlen != IPV4_ADDR_LEN) {
// Currently only ipv4 is supported linux-src/include/linux/inet.h
return;
}
@@ -105,9 +107,7 @@ void sys_exit_accept4(struct sys_exit_accept4_ctx *ctx) {
err = bpf_map_update_elem(&file_descriptor_to_ipv4, &key, &fdinfo, BPF_ANY);
if (err != 0) {
char msg[] = "Error putting fd to address mapping from accept (key: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), key, err);
return;
log_error(ctx, LOG_ERROR_PUTTING_FD_MAPPING, id, err, ORIGIN_SYS_EXIT_ACCEPT4_CODE);
}
}
@@ -145,9 +145,7 @@ void sys_enter_connect(struct sys_enter_connect_ctx *ctx) {
long err = bpf_map_update_elem(&connect_syscall_info, &id, &info, BPF_ANY);
if (err != 0) {
char msg[] = "Error putting connect info (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
return;
log_error(ctx, LOG_ERROR_PUTTING_CONNECT_INFO, id, err, 0l);
}
}
@@ -175,7 +173,8 @@ void sys_exit_connect(struct sys_exit_connect_ctx *ctx) {
struct connect_info *infoPtr = bpf_map_lookup_elem(&connect_syscall_info, &id);
if (infoPtr == 0) {
if (infoPtr == NULL) {
log_error(ctx, LOG_ERROR_GETTING_CONNECT_INFO, id, 0l, 0l);
return;
}
@@ -185,12 +184,11 @@ void sys_exit_connect(struct sys_exit_connect_ctx *ctx) {
bpf_map_delete_elem(&connect_syscall_info, &id);
if (err != 0) {
char msg[] = "Error reading connect info from connect syscall (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
log_error(ctx, LOG_ERROR_READING_CONNECT_INFO, id, err, 0l);
return;
}
if (info.addrlen != 16) {
if (info.addrlen != IPV4_ADDR_LEN) {
// Currently only ipv4 is supported linux-src/include/linux/inet.h
return;
}
@@ -208,8 +206,6 @@ void sys_exit_connect(struct sys_exit_connect_ctx *ctx) {
err = bpf_map_update_elem(&file_descriptor_to_ipv4, &key, &fdinfo, BPF_ANY);
if (err != 0) {
char msg[] = "Error putting fd to address mapping from connect (key: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), key, err);
return;
log_error(ctx, LOG_ERROR_PUTTING_FD_MAPPING, id, err, ORIGIN_SYS_EXIT_CONNECT_CODE);
}
}

View File

@@ -7,6 +7,8 @@ Copyright (C) UP9 Inc.
#include "include/headers.h"
#include "include/util.h"
#include "include/maps.h"
#include "include/log.h"
#include "include/logger_messages.h"
#include "include/pids.h"
struct sys_enter_read_ctx {
@@ -28,7 +30,7 @@ void sys_enter_read(struct sys_enter_read_ctx *ctx) {
struct ssl_info *infoPtr = bpf_map_lookup_elem(&ssl_read_context, &id);
if (infoPtr == 0) {
if (infoPtr == NULL) {
return;
}
@@ -36,8 +38,7 @@ void sys_enter_read(struct sys_enter_read_ctx *ctx) {
long err = bpf_probe_read(&info, sizeof(struct ssl_info), infoPtr);
if (err != 0) {
char msg[] = "Error reading read info from read syscall (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
log_error(ctx, LOG_ERROR_READING_SSL_CONTEXT, id, err, ORIGIN_SYS_ENTER_READ_CODE);
return;
}
@@ -46,9 +47,7 @@ void sys_enter_read(struct sys_enter_read_ctx *ctx) {
err = bpf_map_update_elem(&ssl_read_context, &id, &info, BPF_ANY);
if (err != 0) {
char msg[] = "Error putting file descriptor from read syscall (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
return;
log_error(ctx, LOG_ERROR_PUTTING_FILE_DESCRIPTOR, id, err, ORIGIN_SYS_ENTER_READ_CODE);
}
}
@@ -71,7 +70,7 @@ void sys_enter_write(struct sys_enter_write_ctx *ctx) {
struct ssl_info *infoPtr = bpf_map_lookup_elem(&ssl_write_context, &id);
if (infoPtr == 0) {
if (infoPtr == NULL) {
return;
}
@@ -79,8 +78,7 @@ void sys_enter_write(struct sys_enter_write_ctx *ctx) {
long err = bpf_probe_read(&info, sizeof(struct ssl_info), infoPtr);
if (err != 0) {
char msg[] = "Error reading write context from write syscall (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
log_error(ctx, LOG_ERROR_READING_SSL_CONTEXT, id, err, ORIGIN_SYS_ENTER_WRITE_CODE);
return;
}
@@ -89,8 +87,6 @@ void sys_enter_write(struct sys_enter_write_ctx *ctx) {
err = bpf_map_update_elem(&ssl_write_context, &id, &info, BPF_ANY);
if (err != 0) {
char msg[] = "Error putting file descriptor from write syscall (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
return;
log_error(ctx, LOG_ERROR_PUTTING_FILE_DESCRIPTOR, id, err, ORIGIN_SYS_ENTER_WRITE_CODE);
}
}

View File

@@ -0,0 +1,79 @@
/*
Note: This file is licenced differently from the rest of the project
SPDX-License-Identifier: GPL-2.0
Copyright (C) UP9 Inc.
*/
#ifndef __LOG__
#define __LOG__
// The same consts defined in bpf_logger.go
//
#define LOG_LEVEL_ERROR (0)
#define LOG_LEVEL_INFO (1)
#define LOG_LEVEL_DEBUG (2)
// The same struct can be found in bpf_logger.go
//
// Be careful when editing, alignment and padding should be exactly the same in go/c.
//
struct log_message {
__u32 level;
__u32 message_code;
__u64 arg1;
__u64 arg2;
__u64 arg3;
};
static __always_inline void log_error(void* ctx, __u16 message_code, __u64 arg1, __u64 arg2, __u64 arg3) {
struct log_message entry = {};
entry.level = LOG_LEVEL_ERROR;
entry.message_code = message_code;
entry.arg1 = arg1;
entry.arg2 = arg2;
entry.arg3 = arg3;
long err = bpf_perf_event_output(ctx, &log_buffer, BPF_F_CURRENT_CPU, &entry, sizeof(struct log_message));
if (err != 0) {
char msg[] = "Error writing log error to perf buffer - %ld";
bpf_trace_printk(msg, sizeof(msg), err);
}
}
static __always_inline void log_info(void* ctx, __u16 message_code, __u64 arg1, __u64 arg2, __u64 arg3) {
struct log_message entry = {};
entry.level = LOG_LEVEL_INFO;
entry.message_code = message_code;
entry.arg1 = arg1;
entry.arg2 = arg2;
entry.arg3 = arg3;
long err = bpf_perf_event_output(ctx, &log_buffer, BPF_F_CURRENT_CPU, &entry, sizeof(struct log_message));
if (err != 0) {
char msg[] = "Error writing log info to perf buffer - %ld";
bpf_trace_printk(msg, sizeof(msg), arg1, err);
}
}
static __always_inline void log_debug(void* ctx, __u16 message_code, __u64 arg1, __u64 arg2, __u64 arg3) {
struct log_message entry = {};
entry.level = LOG_LEVEL_DEBUG;
entry.message_code = message_code;
entry.arg1 = arg1;
entry.arg2 = arg2;
entry.arg3 = arg3;
long err = bpf_perf_event_output(ctx, &log_buffer, BPF_F_CURRENT_CPU, &entry, sizeof(struct log_message));
if (err != 0) {
char msg[] = "Error writing log debug to perf buffer - %ld";
bpf_trace_printk(msg, sizeof(msg), arg1, err);
}
}
#endif /* __LOG__ */

View File

@@ -0,0 +1,42 @@
/*
Note: This file is licenced differently from the rest of the project
SPDX-License-Identifier: GPL-2.0
Copyright (C) UP9 Inc.
*/
#ifndef __LOG_MESSAGES__
#define __LOG_MESSAGES__
// Must be synced with bpf_logger_messages.go
//
#define LOG_ERROR_READING_BYTES_COUNT (0)
#define LOG_ERROR_READING_FD_ADDRESS (1)
#define LOG_ERROR_READING_FROM_SSL_BUFFER (2)
#define LOG_ERROR_BUFFER_TOO_BIG (3)
#define LOG_ERROR_ALLOCATING_CHUNK (4)
#define LOG_ERROR_READING_SSL_CONTEXT (5)
#define LOG_ERROR_PUTTING_SSL_CONTEXT (6)
#define LOG_ERROR_GETTING_SSL_CONTEXT (7)
#define LOG_ERROR_MISSING_FILE_DESCRIPTOR (8)
#define LOG_ERROR_PUTTING_FILE_DESCRIPTOR (9)
#define LOG_ERROR_PUTTING_ACCEPT_INFO (10)
#define LOG_ERROR_GETTING_ACCEPT_INFO (11)
#define LOG_ERROR_READING_ACCEPT_INFO (12)
#define LOG_ERROR_PUTTING_FD_MAPPING (13)
#define LOG_ERROR_PUTTING_CONNECT_INFO (14)
#define LOG_ERROR_GETTING_CONNECT_INFO (15)
#define LOG_ERROR_READING_CONNECT_INFO (16)
// Sometimes we have the same error, happening from different locations.
// in order to be able to distinct between them in the log, we add an
// extra number that identify the location. The number can be anything,
// but do not give the same number to different origins.
//
#define ORIGIN_SSL_UPROBE_CODE (0l)
#define ORIGIN_SSL_URETPROBE_CODE (1l)
#define ORIGIN_SYS_ENTER_READ_CODE (2l)
#define ORIGIN_SYS_ENTER_WRITE_CODE (3l)
#define ORIGIN_SYS_EXIT_ACCEPT4_CODE (4l)
#define ORIGIN_SYS_EXIT_CONNECT_CODE (5l)
#endif /* __LOG_MESSAGES__ */

View File

@@ -10,6 +10,12 @@ Copyright (C) UP9 Inc.
#define FLAGS_IS_CLIENT_BIT (1 << 0)
#define FLAGS_IS_READ_BIT (1 << 1)
#define CHUNK_SIZE (1 << 12)
#define MAX_CHUNKS_PER_OPERATION (8)
// One minute in nano seconds. Chosen by gut feeling.
#define SSL_INFO_MAX_TTL_NANO (1000000000l * 60l)
// The same struct can be found in chunk.go
//
// Be careful when editing, alignment and padding should be exactly the same in go/c.
@@ -18,16 +24,18 @@ struct tlsChunk {
__u32 pid;
__u32 tgid;
__u32 len;
__u32 start;
__u32 recorded;
__u32 fd;
__u32 flags;
__u8 address[16];
__u8 data[4096]; // Must be N^2
__u8 data[CHUNK_SIZE]; // Must be N^2
};
struct ssl_info {
void* buffer;
__u32 fd;
__u64 created_at_nano;
// for ssl_write and ssl_read must be zero
// for ssl_write_ex and ssl_read_ex save the *written/*readbytes pointer.
@@ -53,11 +61,15 @@ struct fd_info {
#define BPF_PERF_OUTPUT(_name) \
BPF_MAP(_name, BPF_MAP_TYPE_PERF_EVENT_ARRAY, int, __u32, 1024)
#define BPF_LRU_HASH(_name, _key_type, _value_type) \
BPF_MAP(_name, BPF_MAP_TYPE_LRU_HASH, _key_type, _value_type, 16384)
BPF_HASH(pids_map, __u32, __u32);
BPF_HASH(ssl_write_context, __u64, struct ssl_info);
BPF_HASH(ssl_read_context, __u64, struct ssl_info);
BPF_LRU_HASH(ssl_write_context, __u64, struct ssl_info);
BPF_LRU_HASH(ssl_read_context, __u64, struct ssl_info);
BPF_HASH(file_descriptor_to_ipv4, __u64, struct fd_info);
BPF_PERF_OUTPUT(chunks_buffer);
BPF_PERF_OUTPUT(log_buffer);
#endif /* __MAPS__ */

View File

@@ -7,6 +7,8 @@ Copyright (C) UP9 Inc.
#include "include/headers.h"
#include "include/util.h"
#include "include/maps.h"
#include "include/log.h"
#include "include/logger_messages.h"
#include "include/pids.h"
// Heap-like area for eBPF programs - stack size limited to 512 bytes, we must use maps for bigger (chunk) objects.
@@ -18,181 +20,249 @@ struct {
__type(value, struct tlsChunk);
} heap SEC(".maps");
static __always_inline int ssl_uprobe(void* ssl, void* buffer, int num, struct bpf_map_def* map_fd, size_t *count_ptr) {
__u64 id = bpf_get_current_pid_tgid();
static __always_inline int get_count_bytes(struct pt_regs *ctx, struct ssl_info* info, __u64 id) {
int returnValue = PT_REGS_RC(ctx);
if (!should_tap(id >> 32)) {
if (info->count_ptr == NULL) {
// ssl_read and ssl_write return the number of bytes written/read
//
return returnValue;
}
// ssl_read_ex and ssl_write_ex return 1 for success
//
if (returnValue != 1) {
return 0;
}
// ssl_read_ex and ssl_write_ex write the number of bytes to an arg named *count
//
size_t countBytes;
long err = bpf_probe_read(&countBytes, sizeof(size_t), (void*) info->count_ptr);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_BYTES_COUNT, id, err, 0l);
return 0;
}
return countBytes;
}
static __always_inline void add_address_to_chunk(struct pt_regs *ctx, struct tlsChunk* chunk, __u64 id, __u32 fd) {
__u32 pid = id >> 32;
__u64 key = (__u64) pid << 32 | fd;
struct fd_info *fdinfo = bpf_map_lookup_elem(&file_descriptor_to_ipv4, &key);
if (fdinfo == NULL) {
return;
}
int err = bpf_probe_read(chunk->address, sizeof(chunk->address), fdinfo->ipv4_addr);
chunk->flags |= (fdinfo->flags & FLAGS_IS_CLIENT_BIT);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_FD_ADDRESS, id, err, 0l);
}
}
static __always_inline void send_chunk_part(struct pt_regs *ctx, __u8* buffer, __u64 id,
struct tlsChunk* chunk, int start, int end) {
size_t recorded = MIN(end - start, sizeof(chunk->data));
if (recorded <= 0) {
return;
}
chunk->recorded = recorded;
chunk->start = start;
// This ugly trick is for the ebpf verifier happiness
//
long err = 0;
if (chunk->recorded == sizeof(chunk->data)) {
err = bpf_probe_read(chunk->data, sizeof(chunk->data), buffer + start);
} else {
recorded &= (sizeof(chunk->data) - 1); // Buffer must be N^2
err = bpf_probe_read(chunk->data, recorded, buffer + start);
}
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_FROM_SSL_BUFFER, id, err, 0l);
return;
}
bpf_perf_event_output(ctx, &chunks_buffer, BPF_F_CURRENT_CPU, chunk, sizeof(struct tlsChunk));
}
static __always_inline void send_chunk(struct pt_regs *ctx, __u8* buffer, __u64 id, struct tlsChunk* chunk) {
// ebpf loops must be bounded at compile time, we can't use (i < chunk->len / CHUNK_SIZE)
//
// https://lwn.net/Articles/794934/
//
// However we want to run in kernel older than 5.3, hence we use "#pragma unroll" anyway
//
#pragma unroll
for (int i = 0; i < MAX_CHUNKS_PER_OPERATION; i++) {
if (chunk->len <= (CHUNK_SIZE * i)) {
break;
}
send_chunk_part(ctx, buffer, id, chunk, CHUNK_SIZE * i, chunk->len);
}
}
static __always_inline void output_ssl_chunk(struct pt_regs *ctx, struct ssl_info* info, __u64 id, __u32 flags) {
int countBytes = get_count_bytes(ctx, info, id);
if (countBytes <= 0) {
return;
}
if (countBytes > (CHUNK_SIZE * MAX_CHUNKS_PER_OPERATION)) {
log_error(ctx, LOG_ERROR_BUFFER_TOO_BIG, id, countBytes, 0l);
return;
}
struct tlsChunk* chunk;
int zero = 0;
// If other thread, running on the same CPU get to this point at the same time like us (context switch)
// the data will be corrupted - protection may be added in the future
//
chunk = bpf_map_lookup_elem(&heap, &zero);
if (!chunk) {
log_error(ctx, LOG_ERROR_ALLOCATING_CHUNK, id, 0l, 0l);
return;
}
chunk->flags = flags;
chunk->pid = id >> 32;
chunk->tgid = id;
chunk->len = countBytes;
chunk->fd = info->fd;
add_address_to_chunk(ctx, chunk, id, chunk->fd);
send_chunk(ctx, info->buffer, id, chunk);
}
static __always_inline void ssl_uprobe(struct pt_regs *ctx, void* ssl, void* buffer, int num, struct bpf_map_def* map_fd, size_t *count_ptr) {
__u64 id = bpf_get_current_pid_tgid();
if (!should_tap(id >> 32)) {
return;
}
struct ssl_info *infoPtr = bpf_map_lookup_elem(map_fd, &id);
struct ssl_info info = {};
info.fd = -1;
if (infoPtr == NULL) {
info.fd = -1;
info.created_at_nano = bpf_ktime_get_ns();
} else {
long err = bpf_probe_read(&info, sizeof(struct ssl_info), infoPtr);
if (err != 0) {
log_error(ctx, LOG_ERROR_READING_SSL_CONTEXT, id, err, ORIGIN_SSL_UPROBE_CODE);
}
if ((bpf_ktime_get_ns() - info.created_at_nano) > SSL_INFO_MAX_TTL_NANO) {
// If the ssl info is too old, we don't want to use its info because it may be incorrect.
//
info.fd = -1;
info.created_at_nano = bpf_ktime_get_ns();
}
}
info.count_ptr = count_ptr;
info.buffer = buffer;
long err = bpf_map_update_elem(map_fd, &id, &info, BPF_ANY);
if (err != 0) {
char msg[] = "Error putting ssl context (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
return 0;
log_error(ctx, LOG_ERROR_PUTTING_SSL_CONTEXT, id, err, 0l);
}
return 0;
}
static __always_inline int ssl_uretprobe(struct pt_regs *ctx, struct bpf_map_def* map_fd, __u32 flags) {
static __always_inline void ssl_uretprobe(struct pt_regs *ctx, struct bpf_map_def* map_fd, __u32 flags) {
__u64 id = bpf_get_current_pid_tgid();
if (!should_tap(id >> 32)) {
return 0;
return;
}
struct ssl_info *infoPtr = bpf_map_lookup_elem(map_fd, &id);
if (infoPtr == 0) {
char msg[] = "Error getting ssl context info (id: %ld)";
bpf_trace_printk(msg, sizeof(msg), id);
return 0;
if (infoPtr == NULL) {
log_error(ctx, LOG_ERROR_GETTING_SSL_CONTEXT, id, 0l, 0l);
return;
}
struct ssl_info info;
long err = bpf_probe_read(&info, sizeof(struct ssl_info), infoPtr);
bpf_map_delete_elem(map_fd, &id);
// Do not clean map on purpose, sometimes there are two calls to ssl_read in a raw
// while the first call actually goes to read from socket, and we get the chance
// to find the fd. The other call already have all the information and we don't
// have the chance to get the fd.
//
// There are two risks keeping the map items
// 1. It gets full - we solve it by using BPF_MAP_TYPE_LRU_HASH with hard limit
// 2. We get wrong info of an old call - we solve it by comparing the timestamp
// info before using it
//
// bpf_map_delete_elem(map_fd, &id);
if (err != 0) {
char msg[] = "Error reading ssl context (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
return 0;
log_error(ctx, LOG_ERROR_READING_SSL_CONTEXT, id, err, ORIGIN_SSL_URETPROBE_CODE);
return;
}
if (info.fd == -1) {
char msg[] = "File descriptor is missing from ssl info (id: %ld)";
bpf_trace_printk(msg, sizeof(msg), id);
return 0;
log_error(ctx, LOG_ERROR_MISSING_FILE_DESCRIPTOR, id, 0l, 0l);
return;
}
int countBytes = PT_REGS_RC(ctx);
if (info.count_ptr != 0) {
// ssl_read_ex and ssl_write_ex return 1 for success
//
if (countBytes != 1) {
return 0;
}
size_t tempCount;
long err = bpf_probe_read(&tempCount, sizeof(size_t), (void*) info.count_ptr);
if (err != 0) {
char msg[] = "Error reading bytes count of _ex (id: %ld) (err: %ld)";
bpf_trace_printk(msg, sizeof(msg), id, err);
return 0;
}
countBytes = tempCount;
}
if (countBytes <= 0) {
return 0;
}
struct tlsChunk* c;
int zero = 0;
// If other thread, running on the same CPU get to this point at the same time like us
// the data will be corrupted - protection may be added in the future
//
c = bpf_map_lookup_elem(&heap, &zero);
if (!c) {
char msg[] = "Unable to allocate chunk (id: %ld)";
bpf_trace_printk(msg, sizeof(msg), id);
return 0;
}
size_t recorded = MIN(countBytes, sizeof(c->data));
c->flags = flags;
c->pid = id >> 32;
c->tgid = id;
c->len = countBytes;
c->recorded = recorded;
c->fd = info.fd;
// This ugly trick is for the ebpf verifier happiness
//
if (recorded == sizeof(c->data)) {
err = bpf_probe_read(c->data, sizeof(c->data), info.buffer);
} else {
recorded &= sizeof(c->data) - 1; // Buffer must be N^2
err = bpf_probe_read(c->data, recorded, info.buffer);
}
if (err != 0) {
char msg[] = "Error reading from ssl buffer %ld - %ld";
bpf_trace_printk(msg, sizeof(msg), id, err);
return 0;
}
__u32 pid = id >> 32;
__u32 fd = info.fd;
__u64 key = (__u64) pid << 32 | fd;
struct fd_info *fdinfo = bpf_map_lookup_elem(&file_descriptor_to_ipv4, &key);
if (fdinfo != 0) {
err = bpf_probe_read(c->address, sizeof(c->address), fdinfo->ipv4_addr);
c->flags |= (fdinfo->flags & FLAGS_IS_CLIENT_BIT);
if (err != 0) {
char msg[] = "Error reading from fd address %ld - %ld";
bpf_trace_printk(msg, sizeof(msg), id, err);
}
}
bpf_perf_event_output(ctx, &chunks_buffer, BPF_F_CURRENT_CPU, c, sizeof(struct tlsChunk));
return 0;
output_ssl_chunk(ctx, &info, id, flags);
}
SEC("uprobe/ssl_write")
int BPF_KPROBE(ssl_write, void* ssl, void* buffer, int num) {
return ssl_uprobe(ssl, buffer, num, &ssl_write_context, 0);
void BPF_KPROBE(ssl_write, void* ssl, void* buffer, int num) {
ssl_uprobe(ctx, ssl, buffer, num, &ssl_write_context, 0);
}
SEC("uretprobe/ssl_write")
int BPF_KPROBE(ssl_ret_write) {
return ssl_uretprobe(ctx, &ssl_write_context, 0);
void BPF_KPROBE(ssl_ret_write) {
ssl_uretprobe(ctx, &ssl_write_context, 0);
}
SEC("uprobe/ssl_read")
int BPF_KPROBE(ssl_read, void* ssl, void* buffer, int num) {
return ssl_uprobe(ssl, buffer, num, &ssl_read_context, 0);
void BPF_KPROBE(ssl_read, void* ssl, void* buffer, int num) {
ssl_uprobe(ctx, ssl, buffer, num, &ssl_read_context, 0);
}
SEC("uretprobe/ssl_read")
int BPF_KPROBE(ssl_ret_read) {
return ssl_uretprobe(ctx, &ssl_read_context, FLAGS_IS_READ_BIT);
void BPF_KPROBE(ssl_ret_read) {
ssl_uretprobe(ctx, &ssl_read_context, FLAGS_IS_READ_BIT);
}
SEC("uprobe/ssl_write_ex")
int BPF_KPROBE(ssl_write_ex, void* ssl, void* buffer, size_t num, size_t *written) {
return ssl_uprobe(ssl, buffer, num, &ssl_write_context, written);
void BPF_KPROBE(ssl_write_ex, void* ssl, void* buffer, size_t num, size_t *written) {
ssl_uprobe(ctx, ssl, buffer, num, &ssl_write_context, written);
}
SEC("uretprobe/ssl_write_ex")
int BPF_KPROBE(ssl_ret_write_ex) {
return ssl_uretprobe(ctx, &ssl_write_context, 0);
void BPF_KPROBE(ssl_ret_write_ex) {
ssl_uretprobe(ctx, &ssl_write_context, 0);
}
SEC("uprobe/ssl_read_ex")
int BPF_KPROBE(ssl_read_ex, void* ssl, void* buffer, size_t num, size_t *readbytes) {
return ssl_uprobe(ssl, buffer, num, &ssl_read_context, readbytes);
void BPF_KPROBE(ssl_read_ex, void* ssl, void* buffer, size_t num, size_t *readbytes) {
ssl_uprobe(ctx, ssl, buffer, num, &ssl_read_context, readbytes);
}
SEC("uretprobe/ssl_read_ex")
int BPF_KPROBE(ssl_ret_read_ex) {
return ssl_uretprobe(ctx, &ssl_read_context, FLAGS_IS_READ_BIT);
void BPF_KPROBE(ssl_ret_read_ex) {
ssl_uretprobe(ctx, &ssl_read_context, FLAGS_IS_READ_BIT);
}

View File

@@ -7,6 +7,8 @@ Copyright (C) UP9 Inc.
#include "include/headers.h"
#include "include/util.h"
#include "include/maps.h"
#include "include/log.h"
#include "include/logger_messages.h"
#include "include/pids.h"
// To avoid multiple .o files

116
tap/tlstapper/bpf_logger.go Normal file
View File

@@ -0,0 +1,116 @@
package tlstapper
import (
"bytes"
"encoding/binary"
"strings"
"github.com/cilium/ebpf/perf"
"github.com/go-errors/errors"
"github.com/up9inc/mizu/shared/logger"
)
const logPrefix = "[bpf] "
// The same consts defined in log.h
//
const logLevelError = 0
const logLevelInfo = 1
const logLevelDebug = 2
type logMessage struct {
Level uint32
MessageCode uint32
Arg1 uint64
Arg2 uint64
Arg3 uint64
}
type bpfLogger struct {
logReader *perf.Reader
}
func newBpfLogger() *bpfLogger {
return &bpfLogger{
logReader: nil,
}
}
func (p *bpfLogger) init(bpfObjects *tlsTapperObjects, bufferSize int) error {
var err error
p.logReader, err = perf.NewReader(bpfObjects.LogBuffer, bufferSize)
if err != nil {
return errors.Wrap(err, 0)
}
return nil
}
func (p *bpfLogger) close() error {
return p.logReader.Close()
}
func (p *bpfLogger) poll() {
logger.Log.Infof("Start polling for bpf logs")
for {
record, err := p.logReader.Read()
if err != nil {
if errors.Is(err, perf.ErrClosed) {
return
}
LogError(errors.Errorf("Error reading from bpf logger perf buffer, aboring logger! %w", err))
return
}
if record.LostSamples != 0 {
logger.Log.Infof("Log buffer is full, dropped %d logs", record.LostSamples)
continue
}
buffer := bytes.NewReader(record.RawSample)
var log logMessage
if err := binary.Read(buffer, binary.LittleEndian, &log); err != nil {
LogError(errors.Errorf("Error parsing log %v", err))
continue
}
p.log(&log)
}
}
func (p *bpfLogger) log(log *logMessage) {
if int(log.MessageCode) >= len(bpfLogMessages) {
logger.Log.Errorf("Unknown message code from bpf logger %d", log.MessageCode)
return
}
format := bpfLogMessages[log.MessageCode]
tokensCount := strings.Count(format, "%")
if tokensCount == 0 {
p.logLevel(log.Level, format)
} else if tokensCount == 1 {
p.logLevel(log.Level, format, log.Arg1)
} else if tokensCount == 2 {
p.logLevel(log.Level, format, log.Arg1, log.Arg2)
} else if tokensCount == 3 {
p.logLevel(log.Level, format, log.Arg1, log.Arg2, log.Arg3)
}
}
func (p *bpfLogger) logLevel(level uint32, format string, args ...interface{}) {
if level == logLevelError {
logger.Log.Errorf(logPrefix+format, args...)
} else if level == logLevelInfo {
logger.Log.Infof(logPrefix+format, args...)
} else if level == logLevelDebug {
logger.Log.Debugf(logPrefix+format, args...)
}
}

Some files were not shown because too many files have changed in this diff Show More