mirror of
https://github.com/kubeshark/kubeshark.git
synced 2026-02-19 20:40:17 +00:00
Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0abd7c06ff | ||
|
|
c2739a68c2 | ||
|
|
d0ef6c9f97 | ||
|
|
a4f7e61a6e | ||
|
|
a5fef90781 | ||
|
|
70cef9dc4b | ||
|
|
0f3dd66d2d | ||
|
|
5536e5bb44 | ||
|
|
3bab83754f | ||
|
|
d011478a74 |
@@ -6,13 +6,13 @@ require (
|
||||
github.com/antelman107/net-wait-go v0.0.0-20210623112055-cf684aebda7b
|
||||
github.com/chanced/openapi v0.0.6
|
||||
github.com/djherbis/atime v1.0.0
|
||||
github.com/elastic/go-elasticsearch/v7 v7.16.0
|
||||
github.com/getkin/kin-openapi v0.76.0
|
||||
github.com/gin-contrib/static v0.0.1
|
||||
github.com/gin-gonic/gin v1.7.7
|
||||
github.com/go-playground/locales v0.13.0
|
||||
github.com/go-playground/universal-translator v0.17.0
|
||||
github.com/go-playground/validator/v10 v10.5.0
|
||||
github.com/google/martian v2.1.0+incompatible
|
||||
github.com/google/uuid v1.1.2
|
||||
github.com/gorilla/websocket v1.4.2
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
||||
|
||||
@@ -126,6 +126,8 @@ github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDD
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
|
||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw=
|
||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.16.0 h1:GHsxDFXIAlhSleXun4kwA89P7kQFADRChqvgOPeYP5A=
|
||||
github.com/elastic/go-elasticsearch/v7 v7.16.0/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4=
|
||||
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153 h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc=
|
||||
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
|
||||
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"mizuserver/pkg/api"
|
||||
"mizuserver/pkg/config"
|
||||
"mizuserver/pkg/controllers"
|
||||
"mizuserver/pkg/elastic"
|
||||
"mizuserver/pkg/middlewares"
|
||||
"mizuserver/pkg/models"
|
||||
"mizuserver/pkg/oas"
|
||||
@@ -159,6 +160,7 @@ func enableExpFeatureIfNeeded() {
|
||||
if config.Config.ServiceMap {
|
||||
servicemap.GetInstance().SetConfig(config.Config)
|
||||
}
|
||||
elastic.GetInstance().Configure(config.Config.Elastic)
|
||||
}
|
||||
|
||||
func configureBasenineServer(host string, port string) {
|
||||
@@ -167,7 +169,7 @@ func configureBasenineServer(host string, port string) {
|
||||
wait.WithWait(200*time.Millisecond),
|
||||
wait.WithBreak(50*time.Millisecond),
|
||||
wait.WithDeadline(5*time.Second),
|
||||
wait.WithDebug(true),
|
||||
wait.WithDebug(config.Config.LogLevel == logging.DEBUG),
|
||||
).Do([]string{fmt.Sprintf("%s:%s", host, port)}) {
|
||||
logger.Log.Panicf("Basenine is not available!")
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"mizuserver/pkg/elastic"
|
||||
"mizuserver/pkg/har"
|
||||
"mizuserver/pkg/holder"
|
||||
"mizuserver/pkg/providers"
|
||||
@@ -16,14 +17,15 @@ import (
|
||||
|
||||
"mizuserver/pkg/servicemap"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
"mizuserver/pkg/models"
|
||||
"mizuserver/pkg/oas"
|
||||
"mizuserver/pkg/resolver"
|
||||
"mizuserver/pkg/utils"
|
||||
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
tapApi "github.com/up9inc/mizu/tap/api"
|
||||
|
||||
basenine "github.com/up9inc/basenine/client/go"
|
||||
)
|
||||
|
||||
@@ -150,6 +152,7 @@ func startReadingChannel(outputItems <-chan *tapApi.OutputChannelItem, extension
|
||||
connection.SendText(string(data))
|
||||
|
||||
servicemap.GetInstance().NewTCPEntry(mizuEntry.Source, mizuEntry.Destination, &item.Protocol)
|
||||
elastic.GetInstance().PushEntry(mizuEntry)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
120
agent/pkg/elastic/esClient.go
Normal file
120
agent/pkg/elastic/esClient.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package elastic
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"github.com/elastic/go-elasticsearch/v7"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"github.com/up9inc/mizu/tap/api"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type client struct {
|
||||
es *elasticsearch.Client
|
||||
index string
|
||||
insertedCount int
|
||||
}
|
||||
|
||||
var instance *client
|
||||
var once sync.Once
|
||||
|
||||
func GetInstance() *client {
|
||||
once.Do(func() {
|
||||
instance = newClient()
|
||||
})
|
||||
return instance
|
||||
}
|
||||
|
||||
func (client *client) Configure(config shared.ElasticConfig) {
|
||||
if config.Url == "" || config.User == "" || config.Password == "" {
|
||||
logger.Log.Infof("No elastic configuration was supplied, elastic exporter disabled")
|
||||
return
|
||||
}
|
||||
transport := http.DefaultTransport
|
||||
tlsClientConfig := &tls.Config{InsecureSkipVerify: true}
|
||||
transport.(*http.Transport).TLSClientConfig = tlsClientConfig
|
||||
cfg := elasticsearch.Config{
|
||||
Addresses: []string{config.Url},
|
||||
Username: config.User,
|
||||
Password: config.Password,
|
||||
Transport: transport,
|
||||
}
|
||||
|
||||
es, err := elasticsearch.NewClient(cfg)
|
||||
if err != nil {
|
||||
logger.Log.Fatalf("Failed to initialize elastic client %v", err)
|
||||
}
|
||||
|
||||
// Have the client instance return a response
|
||||
res, err := es.Info()
|
||||
if err != nil {
|
||||
logger.Log.Fatalf("Elastic client.Info() ERROR: %v", err)
|
||||
} else {
|
||||
client.es = es
|
||||
client.index = "mizu_traffic_http_" + time.Now().Format("2006_01_02_15_04")
|
||||
client.insertedCount = 0
|
||||
logger.Log.Infof("Elastic client configured, index: %s, cluster info: %v", client.index, res)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
}
|
||||
|
||||
func newClient() *client {
|
||||
return &client{
|
||||
es: nil,
|
||||
index: "",
|
||||
}
|
||||
}
|
||||
|
||||
type httpEntry struct {
|
||||
Source *api.TCP `json:"src"`
|
||||
Destination *api.TCP `json:"dst"`
|
||||
Outgoing bool `json:"outgoing"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
Request map[string]interface{} `json:"request"`
|
||||
Response map[string]interface{} `json:"response"`
|
||||
Summary string `json:"summary"`
|
||||
Method string `json:"method"`
|
||||
Status int `json:"status"`
|
||||
ElapsedTime int64 `json:"elapsedTime"`
|
||||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
func (client *client) PushEntry(entry *api.Entry) {
|
||||
if client.es == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if entry.Protocol.Name != "http" {
|
||||
return
|
||||
}
|
||||
|
||||
entryToPush := httpEntry{
|
||||
Source: entry.Source,
|
||||
Destination: entry.Destination,
|
||||
Outgoing: entry.Outgoing,
|
||||
CreatedAt: entry.StartTime,
|
||||
Request: entry.Request,
|
||||
Response: entry.Response,
|
||||
Summary: entry.Summary,
|
||||
Method: entry.Method,
|
||||
Status: entry.Status,
|
||||
ElapsedTime: entry.ElapsedTime,
|
||||
Path: entry.Path,
|
||||
}
|
||||
|
||||
entryJson, err := json.Marshal(entryToPush)
|
||||
if err != nil {
|
||||
logger.Log.Errorf("json.Marshal ERROR: %v", err)
|
||||
return
|
||||
}
|
||||
var buffer bytes.Buffer
|
||||
buffer.WriteString(string(entryJson))
|
||||
res, _ := client.es.Index(client.index, &buffer)
|
||||
if res.StatusCode == 201 {
|
||||
client.insertedCount += 1
|
||||
}
|
||||
}
|
||||
@@ -51,32 +51,42 @@ func fileSize(fname string) int64 {
|
||||
return fi.Size()
|
||||
}
|
||||
|
||||
func feedEntries(fromFiles []string) (err error) {
|
||||
func feedEntries(fromFiles []string, isSync bool) (count int, err error) {
|
||||
badFiles := make([]string, 0)
|
||||
cnt := 0
|
||||
for _, file := range fromFiles {
|
||||
logger.Log.Info("Processing file: " + file)
|
||||
ext := strings.ToLower(filepath.Ext(file))
|
||||
eCnt := 0
|
||||
switch ext {
|
||||
case ".har":
|
||||
err = feedFromHAR(file)
|
||||
eCnt, err = feedFromHAR(file, isSync)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
badFiles = append(badFiles, file)
|
||||
continue
|
||||
}
|
||||
case ".ldjson":
|
||||
err = feedFromLDJSON(file)
|
||||
eCnt, err = feedFromLDJSON(file, isSync)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
badFiles = append(badFiles, file)
|
||||
continue
|
||||
}
|
||||
default:
|
||||
return errors.New("Unsupported file extension: " + ext)
|
||||
return 0, errors.New("Unsupported file extension: " + ext)
|
||||
}
|
||||
cnt += eCnt
|
||||
}
|
||||
|
||||
return nil
|
||||
for _, f := range badFiles {
|
||||
logger.Log.Infof("Bad file: %s", f)
|
||||
}
|
||||
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func feedFromHAR(file string) error {
|
||||
func feedFromHAR(file string, isSync bool) (int, error) {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@@ -86,23 +96,43 @@ func feedFromHAR(file string) error {
|
||||
|
||||
data, err := ioutil.ReadAll(fd)
|
||||
if err != nil {
|
||||
return err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var harDoc har.HAR
|
||||
err = json.Unmarshal(data, &harDoc)
|
||||
if err != nil {
|
||||
return err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
cnt := 0
|
||||
for _, entry := range harDoc.Log.Entries {
|
||||
GetOasGeneratorInstance().PushEntry(&entry)
|
||||
cnt += 1
|
||||
feedEntry(&entry, isSync)
|
||||
}
|
||||
|
||||
return nil
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func feedFromLDJSON(file string) error {
|
||||
func feedEntry(entry *har.Entry, isSync bool) {
|
||||
if entry.Response.Status == 302 {
|
||||
logger.Log.Debugf("Dropped traffic entry due to permanent redirect status: %s", entry.StartedDateTime)
|
||||
}
|
||||
|
||||
if strings.Contains(entry.Request.URL, "taboola") {
|
||||
logger.Log.Debugf("Interesting: %s", entry.Request.URL)
|
||||
} else {
|
||||
//return
|
||||
}
|
||||
|
||||
if isSync {
|
||||
GetOasGeneratorInstance().entriesChan <- *entry // blocking variant, right?
|
||||
} else {
|
||||
GetOasGeneratorInstance().PushEntry(entry)
|
||||
}
|
||||
}
|
||||
|
||||
func feedFromLDJSON(file string, isSync bool) (int, error) {
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@@ -113,8 +143,8 @@ func feedFromLDJSON(file string) error {
|
||||
reader := bufio.NewReader(fd)
|
||||
|
||||
var meta map[string]interface{}
|
||||
|
||||
buf := strings.Builder{}
|
||||
cnt := 0
|
||||
for {
|
||||
substr, isPrefix, err := reader.ReadLine()
|
||||
if err == io.EOF {
|
||||
@@ -132,26 +162,28 @@ func feedFromLDJSON(file string) error {
|
||||
if meta == nil {
|
||||
err := json.Unmarshal([]byte(line), &meta)
|
||||
if err != nil {
|
||||
return err
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
var entry har.Entry
|
||||
err := json.Unmarshal([]byte(line), &entry)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed decoding entry: %s", line)
|
||||
} else {
|
||||
cnt += 1
|
||||
feedEntry(&entry, isSync)
|
||||
}
|
||||
GetOasGeneratorInstance().PushEntry(&entry)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return cnt, nil
|
||||
}
|
||||
|
||||
func TestFilesList(t *testing.T) {
|
||||
res, err := getFiles("./test_artifacts/")
|
||||
t.Log(len(res))
|
||||
t.Log(res)
|
||||
if err != nil || len(res) != 2 {
|
||||
if err != nil || len(res) != 3 {
|
||||
t.Logf("Should return 2 files but returned %d", len(res))
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ var (
|
||||
patEmail = regexp.MustCompile(`^\w+([-+.']\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*$`)
|
||||
patHexLower = regexp.MustCompile(`(0x)?[0-9a-f]{6,}`)
|
||||
patHexUpper = regexp.MustCompile(`(0x)?[0-9A-F]{6,}`)
|
||||
patLongNum = regexp.MustCompile(`\d{6,}`)
|
||||
)
|
||||
|
||||
func IsGibberish(str string) bool {
|
||||
@@ -27,7 +28,7 @@ func IsGibberish(str string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
if patHexLower.MatchString(str) || patHexUpper.MatchString(str) {
|
||||
if patHexLower.MatchString(str) || patHexUpper.MatchString(str) || patLongNum.MatchString(str) {
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@@ -17,17 +17,19 @@ var ignoredHeaders = []string{
|
||||
"x-att-deviceid", "x-correlation-id", "correlation-id", "x-client-data",
|
||||
"x-http-method-override", "x-real-ip", "x-request-id", "x-request-start", "x-requested-with", "x-uidh",
|
||||
"x-same-domain", "x-content-type-options", "x-frame-options", "x-xss-protection",
|
||||
"x-wap-profile", "x-scheme",
|
||||
"newrelic", "x-cloud-trace-context", "sentry-trace",
|
||||
"expires", "set-cookie", "p3p", "location", "content-security-policy", "content-security-policy-report-only",
|
||||
"last-modified", "content-language",
|
||||
"x-wap-profile", "x-scheme", "status", "x-cache", "x-application-context", "retry-after",
|
||||
"newrelic", "x-cloud-trace-context", "sentry-trace", "x-cache-hits", "x-served-by", "x-span-name",
|
||||
"expires", "set-cookie", "p3p", "content-security-policy", "content-security-policy-report-only",
|
||||
"last-modified", "content-language", "x-varnish", "true-client-ip", "akamai-origin-hop",
|
||||
"keep-alive", "etag", "alt-svc", "x-csrf-token", "x-ua-compatible", "vary", "x-powered-by",
|
||||
"age", "allow", "www-authenticate",
|
||||
"age", "allow", "www-authenticate", "expect-ct", "timing-allow-origin", "referrer-policy",
|
||||
"x-aspnet-version", "x-aspnetmvc-version", "x-timer", "x-abuse-info", "x-mod-pagespeed",
|
||||
"duration_ms", // UP9 custom
|
||||
}
|
||||
|
||||
var ignoredHeaderPrefixes = []string{
|
||||
":", "accept-", "access-control-", "if-", "sec-", "grpc-",
|
||||
"x-forwarded-", "x-original-",
|
||||
"x-forwarded-", "x-original-", "cf-",
|
||||
"x-up9-", "x-envoy-", "x-hasura-", "x-b3-", "x-datadog-", "x-envoy-", "x-amz-", "x-newrelic-", "x-prometheus-",
|
||||
"x-akamai-", "x-spotim-", "x-amzn-", "x-ratelimit-",
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ func NewGen(server string) *SpecGen {
|
||||
spec.Version = "3.1.0"
|
||||
|
||||
info := openapi.Info{Title: server}
|
||||
info.Version = "0.0"
|
||||
info.Version = "1.0"
|
||||
spec.Info = &info
|
||||
spec.Paths = &openapi.Paths{Items: map[openapi.PathValue]*openapi.PathObj{}}
|
||||
|
||||
@@ -175,11 +175,18 @@ func (g *SpecGen) handlePathObj(entry *har.Entry) (string, error) {
|
||||
|
||||
if isExtIgnored(urlParsed.Path) {
|
||||
logger.Log.Debugf("Dropped traffic entry due to ignored extension: %s", urlParsed.Path)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if entry.Request.Method == "OPTIONS" {
|
||||
logger.Log.Debugf("Dropped traffic entry due to its method: %s", urlParsed.Path)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
ctype := getRespCtype(&entry.Response)
|
||||
if isCtypeIgnored(ctype) {
|
||||
logger.Log.Debugf("Dropped traffic entry due to ignored response ctype: %s", ctype)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if entry.Response.Status < 100 {
|
||||
@@ -192,9 +199,19 @@ func (g *SpecGen) handlePathObj(entry *har.Entry) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
split := strings.Split(urlParsed.Path, "/")
|
||||
if entry.Response.Status == 502 || entry.Response.Status == 503 || entry.Response.Status == 504 {
|
||||
logger.Log.Debugf("Dropped traffic entry due to temporary server error: %s", entry.StartedDateTime)
|
||||
return "", nil
|
||||
}
|
||||
|
||||
var split []string
|
||||
if urlParsed.RawPath != "" {
|
||||
split = strings.Split(urlParsed.RawPath, "/")
|
||||
} else {
|
||||
split = strings.Split(urlParsed.Path, "/")
|
||||
}
|
||||
node := g.tree.getOrSet(split, new(openapi.PathObj))
|
||||
opObj, err := handleOpObj(entry, node.ops)
|
||||
opObj, err := handleOpObj(entry, node.pathObj)
|
||||
|
||||
if opObj != nil {
|
||||
return opObj.OperationID, err
|
||||
@@ -232,20 +249,16 @@ func handleRequest(req *har.Request, opObj *openapi.Operation, isSuccess bool) e
|
||||
// TODO: we don't handle the situation when header/qstr param can be defined on pathObj level. Also the path param defined on opObj
|
||||
|
||||
qstrGW := nvParams{
|
||||
In: openapi.InQuery,
|
||||
Pairs: func() []NVPair {
|
||||
return qstrToNVP(req.QueryString)
|
||||
},
|
||||
In: openapi.InQuery,
|
||||
Pairs: req.QueryString,
|
||||
IsIgnored: func(name string) bool { return false },
|
||||
GeneralizeName: func(name string) string { return name },
|
||||
}
|
||||
handleNameVals(qstrGW, &opObj.Parameters)
|
||||
|
||||
hdrGW := nvParams{
|
||||
In: openapi.InHeader,
|
||||
Pairs: func() []NVPair {
|
||||
return hdrToNVP(req.Headers)
|
||||
},
|
||||
In: openapi.InHeader,
|
||||
Pairs: req.Headers,
|
||||
IsIgnored: isHeaderIgnored,
|
||||
GeneralizeName: strings.ToLower,
|
||||
}
|
||||
@@ -348,7 +361,7 @@ func fillContent(reqResp reqResp, respContent openapi.Content, ctype string, err
|
||||
isBinary, _, text = reqResp.Resp.Content.B64Decoded()
|
||||
}
|
||||
|
||||
if !isBinary {
|
||||
if !isBinary && text != "" {
|
||||
var exampleMsg []byte
|
||||
// try treating it as json
|
||||
any, isJSON := anyJSON(text)
|
||||
|
||||
@@ -3,48 +3,56 @@ package oas
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/op/go-logging"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"io/ioutil"
|
||||
"mizuserver/pkg/har"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// if started via env, write file into subdir
|
||||
func writeFiles(label string, spec *openapi.OpenAPI) {
|
||||
func outputSpec(label string, spec *openapi.OpenAPI, t *testing.T) {
|
||||
content, err := json.MarshalIndent(spec, "", "\t")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if os.Getenv("MIZU_OAS_WRITE_FILES") != "" {
|
||||
path := "./oas-samples"
|
||||
err := os.MkdirAll(path, 0o755)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
content, err := json.MarshalIndent(spec, "", "\t")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
err = ioutil.WriteFile(path+"/"+label+".json", content, 0644)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
t.Logf("Written: %s", label)
|
||||
} else {
|
||||
t.Logf("%s", string(content))
|
||||
}
|
||||
}
|
||||
|
||||
func TestEntries(t *testing.T) {
|
||||
logger.InitLoggerStderrOnly(logging.INFO)
|
||||
files, err := getFiles("./test_artifacts/")
|
||||
// files, err = getFiles("/media/bigdisk/UP9")
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
GetOasGeneratorInstance().Start()
|
||||
loadStartingOAS()
|
||||
|
||||
if err := feedEntries(files); err != nil {
|
||||
cnt, err := feedEntries(files, true)
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
loadStartingOAS()
|
||||
waitQueueProcessed()
|
||||
|
||||
svcs := strings.Builder{}
|
||||
GetOasGeneratorInstance().ServiceSpecs.Range(func(key, val interface{}) bool {
|
||||
@@ -78,33 +86,35 @@ func TestEntries(t *testing.T) {
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
specText, _ := json.MarshalIndent(spec, "", "\t")
|
||||
t.Logf("%s", string(specText))
|
||||
outputSpec(svc, spec, t)
|
||||
|
||||
err = spec.Validate()
|
||||
if err != nil {
|
||||
t.Log(err)
|
||||
t.FailNow()
|
||||
}
|
||||
writeFiles(svc, spec)
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
logger.Log.Infof("Total entries: %d", cnt)
|
||||
}
|
||||
|
||||
func TestFileLDJSON(t *testing.T) {
|
||||
func TestFileSingle(t *testing.T) {
|
||||
GetOasGeneratorInstance().Start()
|
||||
file := "test_artifacts/output_rdwtyeoyrj.har.ldjson"
|
||||
err := feedFromLDJSON(file)
|
||||
// loadStartingOAS()
|
||||
file := "test_artifacts/params.har"
|
||||
files := []string{file}
|
||||
cnt, err := feedEntries(files, true)
|
||||
if err != nil {
|
||||
logger.Log.Warning("Failed processing file: " + err.Error())
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
loadStartingOAS()
|
||||
waitQueueProcessed()
|
||||
|
||||
GetOasGeneratorInstance().ServiceSpecs.Range(func(_, val interface{}) bool {
|
||||
GetOasGeneratorInstance().ServiceSpecs.Range(func(key, val interface{}) bool {
|
||||
svc := key.(string)
|
||||
gen := val.(*SpecGen)
|
||||
spec, err := gen.GetSpec()
|
||||
if err != nil {
|
||||
@@ -112,8 +122,7 @@ func TestFileLDJSON(t *testing.T) {
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
specText, _ := json.MarshalIndent(spec, "", "\t")
|
||||
t.Logf("%s", string(specText))
|
||||
outputSpec(svc, spec, t)
|
||||
|
||||
err = spec.Validate()
|
||||
if err != nil {
|
||||
@@ -123,6 +132,19 @@ func TestFileLDJSON(t *testing.T) {
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
logger.Log.Infof("Processed entries: %d", cnt)
|
||||
}
|
||||
|
||||
func waitQueueProcessed() {
|
||||
for {
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
queue := len(GetOasGeneratorInstance().entriesChan)
|
||||
logger.Log.Infof("Queue: %d", queue)
|
||||
if queue < 1 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func loadStartingOAS() {
|
||||
@@ -155,20 +177,29 @@ func loadStartingOAS() {
|
||||
|
||||
func TestEntriesNegative(t *testing.T) {
|
||||
files := []string{"invalid"}
|
||||
err := feedEntries(files)
|
||||
_, err := feedEntries(files, false)
|
||||
if err == nil {
|
||||
t.Logf("Should have failed")
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestEntriesPositive(t *testing.T) {
|
||||
files := []string{"test_artifacts/params.har"}
|
||||
_, err := feedEntries(files, false)
|
||||
if err != nil {
|
||||
t.Logf("Failed")
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadValidHAR(t *testing.T) {
|
||||
inp := `{"startedDateTime": "2021-02-03T07:48:12.959000+00:00", "time": 1, "request": {"method": "GET", "url": "http://unresolved_target/1.0.0/health", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [], "queryString": [], "headersSize": -1, "bodySize": -1}, "response": {"status": 200, "statusText": "OK", "httpVersion": "HTTP/1.1", "cookies": [], "headers": [], "content": {"size": 2, "mimeType": "", "text": "OK"}, "redirectURL": "", "headersSize": -1, "bodySize": 2}, "cache": {}, "timings": {"send": -1, "wait": -1, "receive": 1}}`
|
||||
var entry *har.Entry
|
||||
var err = json.Unmarshal([]byte(inp), &entry)
|
||||
if err != nil {
|
||||
t.Logf("Failed to decode entry: %s", err)
|
||||
// t.FailNow() demonstrates the problem of library
|
||||
t.FailNow() // demonstrates the problem of `martian` HAR library
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,10 +13,14 @@
|
||||
"time": 1022,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "http://trcc-api-service/proxies/",
|
||||
"url": "http://trcc-api-service/proxies;matrixparam=example/",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
{
|
||||
"name": "X-Custom-Demo-Header",
|
||||
"value": "demo"
|
||||
},
|
||||
{
|
||||
"name": "Sec-Fetch-Dest",
|
||||
"value": "empty"
|
||||
@@ -124,6 +128,10 @@
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
{
|
||||
"name": "X-Custom-Demo-Header2",
|
||||
"value": "demo2"
|
||||
},
|
||||
{
|
||||
"name": "Vary",
|
||||
"value": "Origin"
|
||||
@@ -24568,7 +24576,7 @@
|
||||
"bodySize": -1
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"status": 308,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
@@ -24635,7 +24643,7 @@
|
||||
"time": 1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "http://trcc-api-service/models/aws_kong5/suites/all/runs",
|
||||
"url": "http://trcc-api-service/models/aws_kong5/suites/all/runs.png",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
@@ -24894,7 +24902,7 @@
|
||||
"bodySize": -1
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"status": 0,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "",
|
||||
"cookies": [],
|
||||
127
agent/pkg/oas/test_artifacts/params.har
Normal file
127
agent/pkg/oas/test_artifacts/params.har
Normal file
@@ -0,0 +1,127 @@
|
||||
{
|
||||
"log": {
|
||||
"version": "1.2",
|
||||
"creator": {
|
||||
"name": "mitmproxy har_dump",
|
||||
"version": "0.1",
|
||||
"comment": "mitmproxy version mitmproxy 4.0.4"
|
||||
},
|
||||
"entries": [
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:14:43.864529+00:00",
|
||||
"time": 111,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/e21f7112-3d3b-4632-9da3-a4af2e0e9166/sub1",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0,
|
||||
"queryString": []
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
],
|
||||
"content": {
|
||||
"mimeType": "text/html",
|
||||
"text": "",
|
||||
"size": 0
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 245,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 22,
|
||||
"receive": 2,
|
||||
"wait": 87,
|
||||
"connect": -1,
|
||||
"ssl": -1
|
||||
},
|
||||
"serverIPAddress": "54.210.29.33"
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:18.747122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/952bea17-3776-11ea-9341-42010a84012a/sub2",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
},
|
||||
{
|
||||
"startedDateTime": "2019-09-06T06:16:19.747122+00:00",
|
||||
"time": 630,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "https://httpbin.org/952bea17-3776-11ea-9341-42010a84012a;mparam=matrixparam",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"queryString": [],
|
||||
"headersSize": 1542,
|
||||
"bodySize": 0
|
||||
},
|
||||
"response": {
|
||||
"status": 200,
|
||||
"statusText": "OK",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [],
|
||||
"content": {
|
||||
"size": 39,
|
||||
"compression": -20,
|
||||
"mimeType": "application/json",
|
||||
"text": "null"
|
||||
},
|
||||
"redirectURL": "",
|
||||
"headersSize": 248,
|
||||
"bodySize": 39
|
||||
},
|
||||
"cache": {},
|
||||
"timings": {
|
||||
"send": 14,
|
||||
"receive": 4,
|
||||
"wait": 350,
|
||||
"connect": 262,
|
||||
"ssl": -1
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package oas
|
||||
import (
|
||||
"github.com/chanced/openapi"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
@@ -10,25 +11,37 @@ import (
|
||||
type NodePath = []string
|
||||
|
||||
type Node struct {
|
||||
constant *string
|
||||
param *openapi.ParameterObj
|
||||
ops *openapi.PathObj
|
||||
parent *Node
|
||||
children []*Node
|
||||
constant *string
|
||||
pathParam *openapi.ParameterObj
|
||||
pathObj *openapi.PathObj
|
||||
parent *Node
|
||||
children []*Node
|
||||
}
|
||||
|
||||
func (n *Node) getOrSet(path NodePath, pathObjToSet *openapi.PathObj) (node *Node) {
|
||||
if pathObjToSet == nil {
|
||||
func (n *Node) getOrSet(path NodePath, existingPathObj *openapi.PathObj) (node *Node) {
|
||||
if existingPathObj == nil {
|
||||
panic("Invalid function call")
|
||||
}
|
||||
|
||||
pathChunk := path[0]
|
||||
potentialMatrix := strings.SplitN(pathChunk, ";", 2)
|
||||
if len(potentialMatrix) > 1 {
|
||||
pathChunk = potentialMatrix[0]
|
||||
logger.Log.Warningf("URI matrix params are not supported: %s", potentialMatrix[1])
|
||||
}
|
||||
|
||||
chunkIsParam := strings.HasPrefix(pathChunk, "{") && strings.HasSuffix(pathChunk, "}")
|
||||
pathChunk, err := url.PathUnescape(pathChunk)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("URI segment is not correctly encoded: %s", pathChunk)
|
||||
// any side effects on continuing?
|
||||
}
|
||||
|
||||
chunkIsGibberish := IsGibberish(pathChunk) && !IsVersionString(pathChunk)
|
||||
|
||||
var paramObj *openapi.ParameterObj
|
||||
if chunkIsParam && pathObjToSet != nil && pathObjToSet.Parameters != nil {
|
||||
paramObj = findParamByName(pathObjToSet.Parameters, openapi.InPath, pathChunk[1:len(pathChunk)-1])
|
||||
if chunkIsParam && existingPathObj != nil && existingPathObj.Parameters != nil {
|
||||
_, paramObj = findParamByName(existingPathObj.Parameters, openapi.InPath, pathChunk[1:len(pathChunk)-1])
|
||||
}
|
||||
|
||||
if paramObj == nil {
|
||||
@@ -46,34 +59,30 @@ func (n *Node) getOrSet(path NodePath, pathObjToSet *openapi.PathObj) (node *Nod
|
||||
n.children = append(n.children, node)
|
||||
|
||||
if paramObj != nil {
|
||||
node.param = paramObj
|
||||
node.pathParam = paramObj
|
||||
} else if chunkIsGibberish {
|
||||
initParams(&pathObjToSet.Parameters)
|
||||
|
||||
newParam := n.createParam()
|
||||
node.param = newParam
|
||||
|
||||
appended := append(*pathObjToSet.Parameters, newParam)
|
||||
pathObjToSet.Parameters = &appended
|
||||
node.pathParam = newParam
|
||||
} else {
|
||||
node.constant = &pathChunk
|
||||
}
|
||||
}
|
||||
|
||||
// add example if it's a param
|
||||
if node.param != nil && !chunkIsParam {
|
||||
exmp := &node.param.Examples
|
||||
// add example if it's a gibberish chunk
|
||||
if node.pathParam != nil && !chunkIsParam {
|
||||
exmp := &node.pathParam.Examples
|
||||
err := fillParamExample(&exmp, pathChunk)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to add example to a parameter: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: eat up trailing slash, in a smart way: node.ops!=nil && path[1]==""
|
||||
// TODO: eat up trailing slash, in a smart way: node.pathObj!=nil && path[1]==""
|
||||
if len(path) > 1 {
|
||||
return node.getOrSet(path[1:], pathObjToSet)
|
||||
} else if node.ops == nil {
|
||||
node.ops = pathObjToSet
|
||||
return node.getOrSet(path[1:], existingPathObj)
|
||||
} else if node.pathObj == nil {
|
||||
node.pathObj = existingPathObj
|
||||
}
|
||||
|
||||
return node
|
||||
@@ -90,12 +99,16 @@ func (n *Node) createParam() *openapi.ParameterObj {
|
||||
} else if strings.HasSuffix(*n.constant, "s") && len(*n.constant) > 3 {
|
||||
name = *n.constant
|
||||
name = name[:len(name)-1] + "Id"
|
||||
} else if isAlpha(*n.constant) {
|
||||
name = *n.constant + "Id"
|
||||
}
|
||||
|
||||
name = cleanNonAlnum([]byte(name))
|
||||
}
|
||||
|
||||
newParam := createSimpleParam(name, "path", "string")
|
||||
x := n.countParentParams()
|
||||
if x > 1 {
|
||||
if x > 0 {
|
||||
newParam.Name = newParam.Name + strconv.Itoa(x)
|
||||
}
|
||||
|
||||
@@ -113,7 +126,7 @@ func (n *Node) searchInParams(paramObj *openapi.ParameterObj, chunkIsGibberish b
|
||||
// TODO: check the regex pattern of param? for exceptions etc
|
||||
|
||||
if paramObj != nil {
|
||||
// TODO: mergeParam(subnode.param, paramObj)
|
||||
// TODO: mergeParam(subnode.pathParam, paramObj)
|
||||
return subnode
|
||||
} else {
|
||||
return subnode
|
||||
@@ -147,15 +160,16 @@ func (n *Node) listPaths() *openapi.Paths {
|
||||
var strChunk string
|
||||
if n.constant != nil {
|
||||
strChunk = *n.constant
|
||||
} else if n.param != nil {
|
||||
strChunk = "{" + n.param.Name + "}"
|
||||
} else if n.pathParam != nil {
|
||||
strChunk = "{" + n.pathParam.Name + "}"
|
||||
} else {
|
||||
// this is the root node
|
||||
}
|
||||
|
||||
// add self
|
||||
if n.ops != nil {
|
||||
paths.Items[openapi.PathValue(strChunk)] = n.ops
|
||||
if n.pathObj != nil {
|
||||
fillPathParams(n, n.pathObj)
|
||||
paths.Items[openapi.PathValue(strChunk)] = n.pathObj
|
||||
}
|
||||
|
||||
// recurse into children
|
||||
@@ -175,6 +189,29 @@ func (n *Node) listPaths() *openapi.Paths {
|
||||
return paths
|
||||
}
|
||||
|
||||
func fillPathParams(n *Node, pathObj *openapi.PathObj) {
|
||||
// collect all path parameters from parent hierarchy
|
||||
node := n
|
||||
for {
|
||||
if node.pathParam != nil {
|
||||
initParams(&pathObj.Parameters)
|
||||
|
||||
idx, paramObj := findParamByName(pathObj.Parameters, openapi.InPath, node.pathParam.Name)
|
||||
if paramObj == nil {
|
||||
appended := append(*pathObj.Parameters, node.pathParam)
|
||||
pathObj.Parameters = &appended
|
||||
} else {
|
||||
(*pathObj.Parameters)[idx] = paramObj
|
||||
}
|
||||
}
|
||||
|
||||
node = node.parent
|
||||
if node == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type PathAndOp struct {
|
||||
path string
|
||||
op *openapi.Operation
|
||||
@@ -194,7 +231,7 @@ func (n *Node) countParentParams() int {
|
||||
res := 0
|
||||
node := n
|
||||
for {
|
||||
if node.param != nil {
|
||||
if node.pathParam != nil {
|
||||
res++
|
||||
}
|
||||
|
||||
|
||||
@@ -8,19 +8,30 @@ import (
|
||||
|
||||
func TestTree(t *testing.T) {
|
||||
testCases := []struct {
|
||||
inp string
|
||||
inp string
|
||||
numParams int
|
||||
label string
|
||||
}{
|
||||
{"/"},
|
||||
{"/v1.0.0/config/launcher/sp_nKNHCzsN/f34efcae-6583-11eb-908a-00b0fcb9d4f6/vendor,init,conversation"},
|
||||
{"/", 0, ""},
|
||||
{"/v1.0.0/config/launcher/sp_nKNHCzsN/f34efcae-6583-11eb-908a-00b0fcb9d4f6/vendor,init,conversation", 1, "vendor,init,conversation"},
|
||||
{"/v1.0.0/config/launcher/sp_nKNHCzsN/{f34efcae-6583-11eb-908a-00b0fcb9d4f6}/vendor,init,conversation", 0, "vendor,init,conversation"},
|
||||
{"/getSvgs/size/small/brand/SFLY/layoutId/170943/layoutVersion/1/sizeId/742/surface/0/isLandscape/true/childSkus/%7B%7D", 1, ""},
|
||||
}
|
||||
|
||||
tree := new(Node)
|
||||
for _, tc := range testCases {
|
||||
split := strings.Split(tc.inp, "/")
|
||||
node := tree.getOrSet(split, new(openapi.PathObj))
|
||||
pathObj := new(openapi.PathObj)
|
||||
node := tree.getOrSet(split, pathObj)
|
||||
|
||||
fillPathParams(node, pathObj)
|
||||
|
||||
if node.constant == nil {
|
||||
t.Errorf("nil constant: %s", tc.inp)
|
||||
if node.constant != nil && *node.constant != tc.label {
|
||||
t.Errorf("Constant does not match: %s != %s", *node.constant, tc.label)
|
||||
}
|
||||
|
||||
if tc.numParams > 0 && (pathObj.Parameters == nil || len(*pathObj.Parameters) < tc.numParams) {
|
||||
t.Errorf("Wrong num of params, expected: %d", tc.numParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,9 +71,10 @@ func createSimpleParam(name string, in openapi.In, ptype openapi.SchemaType) *op
|
||||
return &newParam
|
||||
}
|
||||
|
||||
func findParamByName(params *openapi.ParameterList, in openapi.In, name string) (pathParam *openapi.ParameterObj) {
|
||||
func findParamByName(params *openapi.ParameterList, in openapi.In, name string) (idx int, pathParam *openapi.ParameterObj) {
|
||||
caseInsensitive := in == openapi.InHeader
|
||||
for _, param := range *params {
|
||||
for i, param := range *params {
|
||||
idx = i
|
||||
paramObj, err := param.ResolveParameter(paramResolver)
|
||||
if err != nil {
|
||||
logger.Log.Warningf("Failed to resolve reference: %s", err)
|
||||
@@ -89,7 +90,8 @@ func findParamByName(params *openapi.ParameterList, in openapi.In, name string)
|
||||
break
|
||||
}
|
||||
}
|
||||
return pathParam
|
||||
|
||||
return idx, pathParam
|
||||
}
|
||||
|
||||
func findHeaderByName(headers *openapi.Headers, name string) *openapi.HeaderObj {
|
||||
@@ -107,37 +109,16 @@ func findHeaderByName(headers *openapi.Headers, name string) *openapi.HeaderObj
|
||||
return nil
|
||||
}
|
||||
|
||||
type NVPair struct {
|
||||
Name string
|
||||
Value string
|
||||
}
|
||||
|
||||
type nvParams struct {
|
||||
In openapi.In
|
||||
Pairs func() []NVPair
|
||||
Pairs []har.NVP
|
||||
IsIgnored func(name string) bool
|
||||
GeneralizeName func(name string) string
|
||||
}
|
||||
|
||||
func qstrToNVP(list []har.QueryString) []NVPair {
|
||||
res := make([]NVPair, len(list))
|
||||
for idx, val := range list {
|
||||
res[idx] = NVPair{Name: val.Name, Value: val.Value}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func hdrToNVP(list []har.Header) []NVPair {
|
||||
res := make([]NVPair, len(list))
|
||||
for idx, val := range list {
|
||||
res[idx] = NVPair{Name: val.Name, Value: val.Value}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func handleNameVals(gw nvParams, params **openapi.ParameterList) {
|
||||
visited := map[string]*openapi.ParameterObj{}
|
||||
for _, pair := range gw.Pairs() {
|
||||
for _, pair := range gw.Pairs {
|
||||
if gw.IsIgnored(pair.Name) {
|
||||
continue
|
||||
}
|
||||
@@ -145,7 +126,7 @@ func handleNameVals(gw nvParams, params **openapi.ParameterList) {
|
||||
nameGeneral := gw.GeneralizeName(pair.Name)
|
||||
|
||||
initParams(params)
|
||||
param := findParamByName(*params, gw.In, pair.Name)
|
||||
_, param := findParamByName(*params, gw.In, pair.Name)
|
||||
if param == nil {
|
||||
param = createSimpleParam(nameGeneral, gw.In, openapi.TypeString)
|
||||
appended := append(**params, param)
|
||||
@@ -342,3 +323,26 @@ func anyJSON(text string) (anyVal interface{}, isJSON bool) {
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func cleanNonAlnum(s []byte) string {
|
||||
j := 0
|
||||
for _, b := range s {
|
||||
if ('a' <= b && b <= 'z') ||
|
||||
('A' <= b && b <= 'Z') ||
|
||||
('0' <= b && b <= '9') ||
|
||||
b == ' ' {
|
||||
s[j] = b
|
||||
j++
|
||||
}
|
||||
}
|
||||
return string(s[:j])
|
||||
}
|
||||
|
||||
func isAlpha(s string) bool {
|
||||
for _, r := range s {
|
||||
if (r < 'a' || r > 'z') && (r < 'A' || r > 'Z') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -23,8 +23,8 @@ type Provider struct {
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
const DefaultRetries = 20
|
||||
const DefaultTimeout = 5 * time.Second
|
||||
const DefaultRetries = 3
|
||||
const DefaultTimeout = 2 * time.Second
|
||||
|
||||
func NewProvider(url string, retries int, timeout time.Duration) *Provider {
|
||||
return &Provider{
|
||||
@@ -36,16 +36,6 @@ func NewProvider(url string, retries int, timeout time.Duration) *Provider {
|
||||
}
|
||||
}
|
||||
|
||||
func NewProviderWithoutRetries(url string, timeout time.Duration) *Provider {
|
||||
return &Provider{
|
||||
url: url,
|
||||
retries: 1,
|
||||
client: &http.Client{
|
||||
Timeout: timeout,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *Provider) TestConnection() error {
|
||||
retriesLeft := provider.retries
|
||||
for retriesLeft > 0 {
|
||||
|
||||
@@ -3,13 +3,14 @@ package cmd
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
"github.com/up9inc/mizu/cli/apiserver"
|
||||
"github.com/up9inc/mizu/cli/config"
|
||||
"github.com/up9inc/mizu/cli/uiUtils"
|
||||
"github.com/up9inc/mizu/shared/kubernetes"
|
||||
"github.com/up9inc/mizu/shared/logger"
|
||||
"github.com/up9inc/mizu/shared/semver"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
func runMizuCheck() {
|
||||
@@ -96,7 +97,7 @@ func checkServerConnection(kubernetesProvider *kubernetes.Provider) bool {
|
||||
|
||||
serverUrl := GetApiServerUrl()
|
||||
|
||||
apiServerProvider := apiserver.NewProviderWithoutRetries(serverUrl, apiserver.DefaultTimeout)
|
||||
apiServerProvider := apiserver.NewProvider(serverUrl, 1, apiserver.DefaultTimeout)
|
||||
if err := apiServerProvider.TestConnection(); err == nil {
|
||||
logger.Log.Infof("%v found Mizu server tunnel available and connected successfully to API server", fmt.Sprintf(uiUtils.Green, "√"))
|
||||
return true
|
||||
@@ -169,32 +170,32 @@ func checkAllResourcesExist(ctx context.Context, kubernetesProvider *kubernetes.
|
||||
allResourcesExist := checkResourceExist(config.Config.MizuResourcesNamespace, "namespace", exist, err)
|
||||
|
||||
exist, err = kubernetesProvider.DoesConfigMapExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.ConfigMapName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ConfigMapName, "config map", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ConfigMapName, "config map", exist, err) && allResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesServiceAccountExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.ServiceAccountName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ServiceAccountName, "service account", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ServiceAccountName, "service account", exist, err) && allResourcesExist
|
||||
|
||||
if config.Config.IsNsRestrictedMode() {
|
||||
exist, err = kubernetesProvider.DoesRoleExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.RoleName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.RoleName, "role", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.RoleName, "role", exist, err) && allResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesRoleBindingExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.RoleBindingName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.RoleBindingName, "role binding", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.RoleBindingName, "role binding", exist, err) && allResourcesExist
|
||||
} else {
|
||||
exist, err = kubernetesProvider.DoesClusterRoleExist(ctx, kubernetes.ClusterRoleName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ClusterRoleName, "cluster role", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ClusterRoleName, "cluster role", exist, err) && allResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesClusterRoleBindingExist(ctx, kubernetes.ClusterRoleBindingName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ClusterRoleBindingName, "cluster role binding", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ClusterRoleBindingName, "cluster role binding", exist, err) && allResourcesExist
|
||||
}
|
||||
|
||||
exist, err = kubernetesProvider.DoesServiceExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.ApiServerPodName)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ApiServerPodName, "service", exist, err)
|
||||
allResourcesExist = checkResourceExist(kubernetes.ApiServerPodName, "service", exist, err) && allResourcesExist
|
||||
|
||||
if isInstallCommand {
|
||||
allResourcesExist = checkInstallResourcesExist(ctx, kubernetesProvider)
|
||||
allResourcesExist = checkInstallResourcesExist(ctx, kubernetesProvider) && allResourcesExist
|
||||
} else {
|
||||
allResourcesExist = checkTapResourcesExist(ctx, kubernetesProvider)
|
||||
allResourcesExist = checkTapResourcesExist(ctx, kubernetesProvider) && allResourcesExist
|
||||
}
|
||||
|
||||
return allResourcesExist
|
||||
@@ -205,13 +206,13 @@ func checkInstallResourcesExist(ctx context.Context, kubernetesProvider *kuberne
|
||||
installResourcesExist := checkResourceExist(kubernetes.DaemonRoleName, "role", exist, err)
|
||||
|
||||
exist, err = kubernetesProvider.DoesRoleBindingExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.DaemonRoleBindingName)
|
||||
installResourcesExist = checkResourceExist(kubernetes.DaemonRoleBindingName, "role binding", exist, err)
|
||||
installResourcesExist = checkResourceExist(kubernetes.DaemonRoleBindingName, "role binding", exist, err) && installResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesPersistentVolumeClaimExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.PersistentVolumeClaimName)
|
||||
installResourcesExist = checkResourceExist(kubernetes.PersistentVolumeClaimName, "persistent volume claim", exist, err)
|
||||
installResourcesExist = checkResourceExist(kubernetes.PersistentVolumeClaimName, "persistent volume claim", exist, err) && installResourcesExist
|
||||
|
||||
exist, err = kubernetesProvider.DoesDeploymentExist(ctx, config.Config.MizuResourcesNamespace, kubernetes.ApiServerPodName)
|
||||
installResourcesExist = checkResourceExist(kubernetes.ApiServerPodName, "deployment", exist, err)
|
||||
installResourcesExist = checkResourceExist(kubernetes.ApiServerPodName, "deployment", exist, err) && installResourcesExist
|
||||
|
||||
return installResourcesExist
|
||||
}
|
||||
|
||||
@@ -5,6 +5,10 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"path"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/cli/apiserver"
|
||||
"github.com/up9inc/mizu/cli/config/configStructs"
|
||||
"github.com/up9inc/mizu/cli/errormessage"
|
||||
@@ -13,9 +17,6 @@ import (
|
||||
"github.com/up9inc/mizu/cli/resources"
|
||||
"github.com/up9inc/mizu/cli/uiUtils"
|
||||
"github.com/up9inc/mizu/shared"
|
||||
"path"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/up9inc/mizu/cli/config"
|
||||
"github.com/up9inc/mizu/shared/kubernetes"
|
||||
@@ -35,10 +36,10 @@ func startProxyReportErrorIfAny(kubernetesProvider *kubernetes.Provider, ctx con
|
||||
return
|
||||
}
|
||||
|
||||
apiProvider = apiserver.NewProviderWithoutRetries(GetApiServerUrl(), time.Second) // short check for proxy
|
||||
apiProvider = apiserver.NewProvider(GetApiServerUrl(), apiserver.DefaultRetries, apiserver.DefaultTimeout)
|
||||
if err := apiProvider.TestConnection(); err != nil {
|
||||
logger.Log.Debugf("Couldn't connect using proxy, stopping proxy and trying to create port-forward")
|
||||
if err := httpServer.Shutdown(context.Background()); err != nil {
|
||||
if err := httpServer.Shutdown(ctx); err != nil {
|
||||
logger.Log.Debugf("Error occurred while stopping proxy %v", errormessage.FormatError(err))
|
||||
}
|
||||
|
||||
@@ -50,7 +51,7 @@ func startProxyReportErrorIfAny(kubernetesProvider *kubernetes.Provider, ctx con
|
||||
return
|
||||
}
|
||||
|
||||
apiProvider = apiserver.NewProvider(GetApiServerUrl(), apiserver.DefaultRetries, apiserver.DefaultTimeout) // long check for port-forward
|
||||
apiProvider = apiserver.NewProvider(GetApiServerUrl(), apiserver.DefaultRetries, apiserver.DefaultTimeout)
|
||||
if err := apiProvider.TestConnection(); err != nil {
|
||||
logger.Log.Errorf(uiUtils.Error, fmt.Sprintf("Couldn't connect to API server, for more info check logs at %s", fsUtils.GetLogFilePath()))
|
||||
cancel()
|
||||
|
||||
@@ -74,6 +74,7 @@ func getInstallMizuAgentConfig(maxDBSizeBytes int64, tapperResources shared.Reso
|
||||
StandaloneMode: true,
|
||||
ServiceMap: config.Config.ServiceMap,
|
||||
OAS: config.Config.OAS,
|
||||
Elastic: config.Config.Elastic,
|
||||
}
|
||||
|
||||
return &mizuAgentConfig
|
||||
|
||||
@@ -23,7 +23,7 @@ var logsCmd = &cobra.Command{
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
ctx, _ := context.WithCancel(context.Background())
|
||||
ctx := context.Background()
|
||||
|
||||
if validationErr := config.Config.Logs.Validate(); validationErr != nil {
|
||||
return errormessage.FormatError(validationErr)
|
||||
|
||||
@@ -164,6 +164,7 @@ func getTapMizuAgentConfig() *shared.MizuAgentConfig {
|
||||
AgentDatabasePath: shared.DataDirPath,
|
||||
ServiceMap: config.Config.ServiceMap,
|
||||
OAS: config.Config.OAS,
|
||||
Elastic: config.Config.Elastic,
|
||||
}
|
||||
|
||||
return &mizuAgentConfig
|
||||
@@ -342,7 +343,7 @@ func watchApiServerPod(ctx context.Context, kubernetesProvider *kubernetes.Provi
|
||||
|
||||
if modifiedPod.Status.Phase == core.PodRunning && !isPodReady {
|
||||
isPodReady = true
|
||||
postApiServerStarted(ctx, kubernetesProvider, cancel, err)
|
||||
postApiServerStarted(ctx, kubernetesProvider, cancel)
|
||||
}
|
||||
case kubernetes.EventBookmark:
|
||||
break
|
||||
@@ -405,7 +406,7 @@ func watchApiServerEvents(ctx context.Context, kubernetesProvider *kubernetes.Pr
|
||||
case "FailedScheduling", "Failed":
|
||||
logger.Log.Errorf(uiUtils.Error, fmt.Sprintf("Mizu API Server status: %s - %s", event.Reason, event.Note))
|
||||
cancel()
|
||||
break
|
||||
|
||||
}
|
||||
case err, ok := <-errorChan:
|
||||
if !ok {
|
||||
@@ -421,11 +422,11 @@ func watchApiServerEvents(ctx context.Context, kubernetesProvider *kubernetes.Pr
|
||||
}
|
||||
}
|
||||
|
||||
func postApiServerStarted(ctx context.Context, kubernetesProvider *kubernetes.Provider, cancel context.CancelFunc, err error) {
|
||||
func postApiServerStarted(ctx context.Context, kubernetesProvider *kubernetes.Provider, cancel context.CancelFunc) {
|
||||
startProxyReportErrorIfAny(kubernetesProvider, ctx, cancel)
|
||||
|
||||
options, _ := getMizuApiFilteringOptions()
|
||||
if err = startTapperSyncer(ctx, cancel, kubernetesProvider, state.targetNamespaces, *options, state.startTime); err != nil {
|
||||
if err := startTapperSyncer(ctx, cancel, kubernetesProvider, state.targetNamespaces, *options, state.startTime); err != nil {
|
||||
logger.Log.Errorf(uiUtils.Error, fmt.Sprintf("Error starting mizu tapper syncer: %v", err))
|
||||
cancel()
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ type ConfigStruct struct {
|
||||
Logs configStructs.LogsConfig `yaml:"logs"`
|
||||
Auth configStructs.AuthConfig `yaml:"auth"`
|
||||
Config configStructs.ConfigConfig `yaml:"config,omitempty"`
|
||||
AgentImage string `yaml:"agent-image,omitempty" readonly:""`
|
||||
AgentImage string `yaml:"agent-image,omitempty"`
|
||||
KratosImage string `yaml:"kratos-image,omitempty" readonly:""`
|
||||
KetoImage string `yaml:"keto-image,omitempty" readonly:""`
|
||||
ImagePullPolicyStr string `yaml:"image-pull-policy" default:"Always"`
|
||||
@@ -40,6 +40,7 @@ type ConfigStruct struct {
|
||||
LogLevelStr string `yaml:"log-level,omitempty" default:"INFO" readonly:""`
|
||||
ServiceMap bool `yaml:"service-map,omitempty" default:"false" readonly:""`
|
||||
OAS bool `yaml:"oas,omitempty" default:"false" readonly:""`
|
||||
Elastic shared.ElasticConfig `yaml:"elastic"`
|
||||
}
|
||||
|
||||
func (config *ConfigStruct) validate() error {
|
||||
|
||||
@@ -43,6 +43,13 @@ type MizuAgentConfig struct {
|
||||
StandaloneMode bool `json:"standaloneMode"`
|
||||
ServiceMap bool `json:"serviceMap"`
|
||||
OAS bool `json:"oas"`
|
||||
Elastic ElasticConfig `json:"elastic"`
|
||||
}
|
||||
|
||||
type ElasticConfig struct {
|
||||
User string `yaml:"user,omitempty" default:"" readonly:""`
|
||||
Password string `yaml:"password,omitempty" default:"" readonly:""`
|
||||
Url string `yaml:"url,omitempty" default:"" readonly:""`
|
||||
}
|
||||
|
||||
type WebSocketMessageMetadata struct {
|
||||
|
||||
@@ -100,7 +100,6 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
http2Assembler = createHTTP2Assembler(b)
|
||||
}
|
||||
|
||||
dissected := false
|
||||
switchingProtocolsHTTP2 := false
|
||||
for {
|
||||
if switchingProtocolsHTTP2 {
|
||||
@@ -121,7 +120,7 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
} else if err != nil {
|
||||
continue
|
||||
}
|
||||
dissected = true
|
||||
superIdentifier.Protocol = &http11protocol
|
||||
} else if isClient {
|
||||
var req *http.Request
|
||||
switchingProtocolsHTTP2, req, err = handleHTTP1ClientStream(b, tcpID, counterPair, superTimer, emitter, options)
|
||||
@@ -130,7 +129,7 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
} else if err != nil {
|
||||
continue
|
||||
}
|
||||
dissected = true
|
||||
superIdentifier.Protocol = &http11protocol
|
||||
|
||||
// In case of an HTTP2 upgrade, duplicate the HTTP1 request into HTTP2 with stream ID 1
|
||||
if switchingProtocolsHTTP2 {
|
||||
@@ -161,14 +160,14 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
} else if err != nil {
|
||||
continue
|
||||
}
|
||||
dissected = true
|
||||
superIdentifier.Protocol = &http11protocol
|
||||
}
|
||||
}
|
||||
|
||||
if !dissected {
|
||||
if superIdentifier.Protocol == nil {
|
||||
return err
|
||||
}
|
||||
superIdentifier.Protocol = &http11protocol
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -197,6 +197,12 @@ func (r *RedisInputStream) readLineBytes() ([]byte, error) {
|
||||
line := make([]byte, N)
|
||||
j := 0
|
||||
for i := r.count; i <= N; i++ {
|
||||
if i >= len(buf) {
|
||||
return nil, errors.New("Redis buffer index mismatch.")
|
||||
}
|
||||
if i >= len(line) {
|
||||
return nil, errors.New("Redis line index mismatch.")
|
||||
}
|
||||
line[j] = buf[i]
|
||||
j++
|
||||
}
|
||||
@@ -295,27 +301,44 @@ func (p *RedisProtocol) Read() (packet *RedisPacket, err error) {
|
||||
switch x.(type) {
|
||||
case []interface{}:
|
||||
array := x.([]interface{})
|
||||
switch array[0].(type) {
|
||||
case []uint8:
|
||||
packet.Command = RedisCommand(strings.ToUpper(string(array[0].([]uint8))))
|
||||
if len(array) > 1 {
|
||||
packet.Key = string(array[1].([]uint8))
|
||||
}
|
||||
if len(array) > 2 {
|
||||
packet.Value = string(array[2].([]uint8))
|
||||
}
|
||||
if len(array) > 3 {
|
||||
packet.Value = fmt.Sprintf("[%s", packet.Value)
|
||||
for _, item := range array[3:] {
|
||||
packet.Value = fmt.Sprintf("%s, %s", packet.Value, item.([]uint8))
|
||||
if len(array) > 0 {
|
||||
switch array[0].(type) {
|
||||
case []uint8:
|
||||
packet.Command = RedisCommand(strings.ToUpper(string(array[0].([]uint8))))
|
||||
if len(array) > 1 {
|
||||
switch array[1].(type) {
|
||||
case []uint8:
|
||||
packet.Key = string(array[1].([]uint8))
|
||||
case int64:
|
||||
packet.Key = fmt.Sprintf("%d", array[1].(int64))
|
||||
}
|
||||
}
|
||||
packet.Value = strings.TrimSuffix(packet.Value, ", ")
|
||||
packet.Value = fmt.Sprintf("%s]", packet.Value)
|
||||
if len(array) > 2 {
|
||||
switch array[2].(type) {
|
||||
case []uint8:
|
||||
packet.Value = string(array[2].([]uint8))
|
||||
case int64:
|
||||
packet.Value = fmt.Sprintf("%d", array[2].(int64))
|
||||
}
|
||||
}
|
||||
if len(array) > 3 {
|
||||
packet.Value = fmt.Sprintf("[%s", packet.Value)
|
||||
for _, item := range array[3:] {
|
||||
switch item.(type) {
|
||||
case []uint8:
|
||||
packet.Value = fmt.Sprintf("%s, %s", packet.Value, item.([]uint8))
|
||||
case int64:
|
||||
packet.Value = fmt.Sprintf("%s, %d", packet.Value, item.(int64))
|
||||
}
|
||||
}
|
||||
packet.Value = strings.TrimSuffix(packet.Value, ", ")
|
||||
packet.Value = fmt.Sprintf("%s]", packet.Value)
|
||||
}
|
||||
default:
|
||||
msg := fmt.Sprintf("Unrecognized element in Redis array: %v", reflect.TypeOf(array[0]))
|
||||
err = errors.New(msg)
|
||||
return
|
||||
}
|
||||
default:
|
||||
msg := fmt.Sprintf("Unrecognized element in Redis array: %v", reflect.TypeOf(array[0]))
|
||||
err = errors.New(msg)
|
||||
return
|
||||
}
|
||||
case []uint8:
|
||||
val := string(x.([]uint8))
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
REACT_APP_OVERRIDE_WS_URL="ws://localhost:8899/ws"
|
||||
REACT_APP_OVERRIDE_API_URL="http://localhost:8899/"
|
||||
REACT_APP_OVERRIDE_IS_ENTERPRISE="false"
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
REACT_APP_OVERRIDE_WS_URL="ws://localhost:8899/ws"
|
||||
REACT_APP_OVERRIDE_API_URL="http://localhost:8899/"
|
||||
REACT_APP_OVERRIDE_IS_ENTERPRISE="true"
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
import * as axios from "axios";
|
||||
|
||||
// When working locally cp `cp .env.example .env`
|
||||
export const MizuWebsocketURL = process.env.REACT_APP_OVERRIDE_WS_URL ? process.env.REACT_APP_OVERRIDE_WS_URL :
|
||||
window.location.protocol === 'https:' ? `wss://${window.location.host}/ws` : `ws://${window.location.host}/ws`;
|
||||
export const MizuWebsocketURL = window.location.protocol === 'https:' ? `wss://${window.location.host}/ws` : `ws://${window.location.host}/ws`;
|
||||
|
||||
export const FormValidationErrorType = "formError";
|
||||
|
||||
const CancelToken = axios.CancelToken;
|
||||
|
||||
// When working locally cp `cp .env.example .env`
|
||||
const apiURL = process.env.REACT_APP_OVERRIDE_API_URL ? process.env.REACT_APP_OVERRIDE_API_URL : `${window.location.origin}/`;
|
||||
const apiURL =`${window.location.origin}/`;
|
||||
|
||||
export default class Api {
|
||||
static instance;
|
||||
|
||||
Reference in New Issue
Block a user