Compare commits

...

7 Commits

Author SHA1 Message Date
M. Mert Yildiran
7e56d45c6b 🔖 Bump the Helm chart version to 51.0.14 2023-10-25 03:02:16 +03:00
M. Mert Yildiran
0e2bca9729 Revert "🔨 Decrease the default storage limit back to 200Mi"
This reverts commit b1a40df069.
2023-10-24 03:10:23 +03:00
M. Mert Yildiran
b1a40df069 🔨 Decrease the default storage limit back to 200Mi 2023-10-24 03:06:02 +03:00
M. Mert Yildiran
773cf371f3 🩹 Exit if couldn't set the secret and log server init error instead of panic 2023-10-20 20:55:16 +03:00
Alon Girmonsky
1527f43396 Update README.md
updated the docker pull link
2023-10-19 14:20:34 -07:00
Alon Girmonsky
c9a2b9eb44 Update README.md
updated the announcement text
2023-10-19 14:18:08 -07:00
Alon Girmonsky
2b92bb74c7 📝 Update README.md (#1439)
* Update README.md

* Update helm-chart/README.md

Co-authored-by: M. Mert Yildiran <me@mertyildiran.com>

* Update helm-chart/README.md

Co-authored-by: M. Mert Yildiran <me@mertyildiran.com>

* Update README.md

fixed storage limit and change a title auth+eks

---------

Co-authored-by: M. Mert Yildiran <me@mertyildiran.com>
2023-10-17 01:42:29 +03:00
5 changed files with 84 additions and 56 deletions

View File

@@ -7,7 +7,7 @@
<img alt="GitHub Latest Release" src="https://img.shields.io/github/v/release/kubeshark/kubeshark?logo=GitHub&style=flat-square">
</a>
<a href="https://hub.docker.com/r/kubeshark/worker">
<img alt="Docker pulls" src="https://img.shields.io/docker/pulls/kubeshark/kubeshark?color=%23099cec&logo=Docker&style=flat-square">
<img alt="Docker pulls" src="https://img.shields.io/docker/pulls/kubeshark/worker?color=%23099cec&logo=Docker&style=flat-square">
</a>
<a href="https://hub.docker.com/r/kubeshark/worker">
<img alt="Image size" src="https://img.shields.io/docker/image-size/kubeshark/kubeshark/latest?logo=Docker&style=flat-square">
@@ -23,8 +23,7 @@
<p align="center">
<b>
<span>NEW: </span>
<a href="https://kubeshark.co/traffic-recording">Traffic Recording and Offline Investigation</a>, and
<a href="https://kubeshark.co/self-hosting">Self-hosting with Ingress and Authentication</a>.
<a href="https://github.com/kubeshark/kubeshark/releases/latest">v51.0.0</a> is out, with significantly improved performance and optimized resource utilization.
</b>
</p>

View File

@@ -77,7 +77,6 @@ func updateLicense(licenseKey string) {
updated, err := kubernetes.SetSecret(kubernetesProvider, kubernetes.SECRET_LICENSE, config.Config.License)
if err != nil {
log.Error().Err(err).Send()
return
}
if updated {
@@ -125,7 +124,7 @@ func runLicenseRecieverServer() {
go func() {
if err := ginApp.Run(fmt.Sprintf(":%d", PRO_PORT)); err != nil {
panic(err)
log.Error().Err(err).Send()
}
}()

View File

@@ -1,6 +1,6 @@
apiVersion: v2
name: kubeshark
version: "51.0.0"
version: "51.0.14"
description: The API Traffic Analyzer for Kubernetes
home: https://kubeshark.co
keywords:

View File

@@ -51,19 +51,49 @@ kubectl port-forward service/kubeshark-front 8899:80
Visit [localhost:8899](http://localhost:8899)
## Installing with Ingress Enabled
## Installing with Ingress (EKS) and enable Auth
```shell
helm install kubeshark kubeshark/kubeshark \
--set tap.ingress.enabled=true \
--set tap.ingress.host=ks.svc.cluster.local \
--set-json='tap.ingress.approveddomains=["gmail.com"]' \
--set license=LICENSE_GOES_HERE \
--set-json 'tap.annotations={ "eks.amazonaws.com/role-arn" : "arn:aws:iam::7...0:role/s3-role" }'
helm install kubeshark kubeshark/kubeshark -f values.yaml
```
You can get your license [here](https://console.kubeshark.co/).
Set this `value.yaml`:
```shell
tap:
auth:
enabled: true
approvedemails:
- john.doe@example.com
approveddomains: []
ingress:
enabled: true
classname: "alb"
host: ks.example.com
tls: []
annotations:
alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-1:7..8:certificate/b...65c
alb.ingress.kubernetes.io/target-type: ip
alb.ingress.kubernetes.io/scheme: internet-facing
```
## Add a License
When it's necessary, you can use:
```shell
--set license=YOUR_LICENSE_GOES_HERE
```
Get your license from Kubeshark's [Admin Console](https://console.kubeshark.co/).
## Increase the Worker's Storage Limit
For example, change from the default 500Mi to 1Gi:
```shell
--set tap.storagelimit=1Gi
```
## Disabling IPV6
Not all have IPV6 enabled, hence this has to be disabled as follows:

View File

@@ -4,10 +4,10 @@ apiVersion: v1
kind: ServiceAccount
metadata:
labels:
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-service-account
@@ -21,10 +21,10 @@ metadata:
namespace: default
labels:
app.kubeshark.co/app: hub
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
stringData:
LICENSE: ''
@@ -36,10 +36,10 @@ metadata:
name: kubeshark-nginx-config-map
namespace: default
labels:
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
data:
default.conf: |
@@ -85,10 +85,10 @@ metadata:
namespace: default
labels:
app.kubeshark.co/app: hub
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
data:
POD_REGEX: '.*'
@@ -105,10 +105,10 @@ apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
labels:
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-cluster-role
@@ -133,10 +133,10 @@ apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
labels:
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-cluster-role-binding
@@ -155,10 +155,10 @@ apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
labels:
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-self-config-role
@@ -183,10 +183,10 @@ apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
labels:
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-self-config-role-binding
@@ -206,10 +206,10 @@ kind: Service
metadata:
labels:
app.kubeshark.co/app: hub
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-hub
@@ -228,10 +228,10 @@ apiVersion: v1
kind: Service
metadata:
labels:
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-front
@@ -252,10 +252,10 @@ metadata:
labels:
app.kubeshark.co/app: worker
sidecar.istio.io/inject: "false"
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-worker-daemon-set
@@ -264,19 +264,19 @@ spec:
selector:
matchLabels:
app.kubeshark.co/app: worker
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
template:
metadata:
labels:
app.kubeshark.co/app: worker
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
name: kubeshark-worker-daemon-set
namespace: kubeshark
@@ -291,7 +291,7 @@ spec:
- -servicemesh
- -procfs
- /hostproc
image: 'docker.io/kubeshark/worker:v51.0.0'
image: 'docker.io/kubeshark/worker:v51.0.14'
imagePullPolicy: Always
name: sniffer
env:
@@ -351,7 +351,7 @@ spec:
- ./tracer
- -procfs
- /hostproc
image: 'docker.io/kubeshark/worker:v51.0.0'
image: 'docker.io/kubeshark/worker:v51.0.14'
imagePullPolicy: Always
name: tracer
env:
@@ -421,10 +421,10 @@ kind: Deployment
metadata:
labels:
app.kubeshark.co/app: hub
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-hub
@@ -434,19 +434,19 @@ spec:
selector:
matchLabels:
app.kubeshark.co/app: hub
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
template:
metadata:
labels:
app.kubeshark.co/app: hub
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
spec:
dnsPolicy: ClusterFirstWithHostNet
@@ -469,7 +469,7 @@ spec:
name: kubeshark-config-map
- secretRef:
name: kubeshark-secret
image: 'docker.io/kubeshark/hub:v51.0.0'
image: 'docker.io/kubeshark/hub:v51.0.14'
imagePullPolicy: Always
readinessProbe:
periodSeconds: 1
@@ -499,10 +499,10 @@ kind: Deployment
metadata:
labels:
app.kubeshark.co/app: front
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
annotations:
name: kubeshark-front
@@ -512,19 +512,19 @@ spec:
selector:
matchLabels:
app.kubeshark.co/app: front
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
template:
metadata:
labels:
app.kubeshark.co/app: front
helm.sh/chart: kubeshark-51.0.0
helm.sh/chart: kubeshark-51.0.14
app.kubernetes.io/name: kubeshark
app.kubernetes.io/instance: kubeshark
app.kubernetes.io/version: "51.0.0"
app.kubernetes.io/version: "51.0.14"
app.kubernetes.io/managed-by: Helm
spec:
containers:
@@ -535,7 +535,7 @@ spec:
value: ' '
- name: REACT_APP_HUB_PORT
value: ':8899/api'
image: 'docker.io/kubeshark/front:v51.0.0'
image: 'docker.io/kubeshark/front:v51.0.14'
imagePullPolicy: Always
name: kubeshark-front
livenessProbe: