diff --git a/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml b/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml index f10d0bb848..8c0b30df61 100644 --- a/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml +++ b/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml @@ -66,28 +66,6 @@ processors: # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. detectors: [env, system] # include ec2 for AWS, gcp for GCP and azure for Azure. timeout: 2s - signozspanmetrics/cumulative: - metrics_exporter: clickhousemetricswrite - latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] - dimensions_cache_size: 100000 - dimensions: - - name: service.namespace - default: default - - name: deployment.environment - default: default - # This is added to ensure the uniqueness of the timeseries - # Otherwise, identical timeseries produced by multiple replicas of - # collectors result in incorrect APM metrics - - name: signoz.collector.id - - name: service.version - - name: browser.platform - - name: browser.mobile - - name: k8s.cluster.name - - name: k8s.node.name - - name: k8s.namespace.name - - name: host.name - - name: host.type - - name: container.name # memory_limiter: # # 80% of maximum memory up to 2G # limit_mib: 1500 @@ -138,6 +116,8 @@ exporters: enabled: true clickhousemetricswrite/prometheus: endpoint: tcp://clickhouse:9000/signoz_metrics + clickhousemetricswritev2: + dsn: tcp://clickhouse:9000/signoz_metrics # logging: {} clickhouselogsexporter: dsn: tcp://clickhouse:9000/signoz_logs @@ -161,20 +141,20 @@ service: pipelines: traces: receivers: [jaeger, otlp] - processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch] + processors: [signozspanmetrics/delta, batch] exporters: [clickhousetraces] metrics: receivers: [otlp] processors: [batch] - exporters: [clickhousemetricswrite] - metrics/generic: + exporters: [clickhousemetricswrite, clickhousemetricswritev2] + metrics/hostmetrics: receivers: [hostmetrics] processors: [resourcedetection, batch] - exporters: [clickhousemetricswrite] + exporters: [clickhousemetricswrite, clickhousemetricswritev2] metrics/prometheus: receivers: [prometheus] processors: [batch] - exporters: [clickhousemetricswrite/prometheus] + exporters: [clickhousemetricswrite/prometheus, clickhousemetricswritev2] logs: receivers: [otlp, tcplog/docker] processors: [batch] diff --git a/deploy/docker/clickhouse-setup/otel-collector-config.yaml b/deploy/docker/clickhouse-setup/otel-collector-config.yaml index 8fef0af791..cba7756d8e 100644 --- a/deploy/docker/clickhouse-setup/otel-collector-config.yaml +++ b/deploy/docker/clickhouse-setup/otel-collector-config.yaml @@ -57,35 +57,11 @@ receivers: labels: job_name: otel-collector - processors: batch: send_batch_size: 10000 send_batch_max_size: 11000 timeout: 10s - signozspanmetrics/cumulative: - metrics_exporter: clickhousemetricswrite - metrics_flush_interval: 60s - latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] - dimensions_cache_size: 100000 - dimensions: - - name: service.namespace - default: default - - name: deployment.environment - default: default - # This is added to ensure the uniqueness of the timeseries - # Otherwise, identical timeseries produced by multiple replicas of - # collectors result in incorrect APM metrics - - name: signoz.collector.id - - name: service.version - - name: browser.platform - - name: browser.mobile - - name: k8s.cluster.name - - name: k8s.node.name - - name: k8s.namespace.name - - name: host.name - - name: host.type - - name: container.name # memory_limiter: # # 80% of maximum memory up to 2G # limit_mib: 1500 @@ -149,6 +125,8 @@ exporters: enabled: true clickhousemetricswrite/prometheus: endpoint: tcp://clickhouse:9000/signoz_metrics + clickhousemetricswritev2: + dsn: tcp://clickhouse:9000/signoz_metrics clickhouselogsexporter: dsn: tcp://clickhouse:9000/signoz_logs timeout: 10s @@ -168,20 +146,20 @@ service: pipelines: traces: receivers: [jaeger, otlp] - processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch] + processors: [signozspanmetrics/delta, batch] exporters: [clickhousetraces] metrics: receivers: [otlp] processors: [batch] - exporters: [clickhousemetricswrite] - metrics/generic: + exporters: [clickhousemetricswrite, clickhousemetricswritev2] + metrics/hostmetrics: receivers: [hostmetrics] processors: [resourcedetection, batch] - exporters: [clickhousemetricswrite] + exporters: [clickhousemetricswrite, clickhousemetricswritev2] metrics/prometheus: receivers: [prometheus] processors: [batch] - exporters: [clickhousemetricswrite/prometheus] + exporters: [clickhousemetricswrite/prometheus, clickhousemetricswritev2] logs: receivers: [otlp, tcplog/docker] processors: [batch] diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index 5b7ca7cbb7..2c1938cd92 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -41,6 +41,7 @@ type APIHandlerOptions struct { FluxInterval time.Duration UseLogsNewSchema bool UseTraceNewSchema bool + UseLicensesV3 bool } type APIHandler struct { @@ -67,6 +68,7 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) { FluxInterval: opts.FluxInterval, UseLogsNewSchema: opts.UseLogsNewSchema, UseTraceNewSchema: opts.UseTraceNewSchema, + UseLicensesV3: opts.UseLicensesV3, }) if err != nil { @@ -175,10 +177,25 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut) router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut) + // v2 router.HandleFunc("/api/v2/licenses", am.ViewAccess(ah.listLicensesV2)). Methods(http.MethodGet) + // v3 + router.HandleFunc("/api/v3/licenses", + am.ViewAccess(ah.listLicensesV3)). + Methods(http.MethodGet) + + router.HandleFunc("/api/v3/licenses", + am.AdminAccess(ah.applyLicenseV3)). + Methods(http.MethodPost) + + router.HandleFunc("/api/v3/licenses", + am.AdminAccess(ah.refreshLicensesV3)). + Methods(http.MethodPut) + + // v4 router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost) // Gateway diff --git a/ee/query-service/app/api/license.go b/ee/query-service/app/api/license.go index 51cfddefb1..0cb7fa2bab 100644 --- a/ee/query-service/app/api/license.go +++ b/ee/query-service/app/api/license.go @@ -9,6 +9,7 @@ import ( "go.signoz.io/signoz/ee/query-service/constants" "go.signoz.io/signoz/ee/query-service/model" + "go.signoz.io/signoz/pkg/http/render" "go.uber.org/zap" ) @@ -59,6 +60,21 @@ type billingDetails struct { } `json:"data"` } +type ApplyLicenseRequest struct { + LicenseKey string `json:"key"` +} + +type ListLicenseResponse map[string]interface{} + +func convertLicenseV3ToListLicenseResponse(licensesV3 []*model.LicenseV3) []ListLicenseResponse { + listLicenses := []ListLicenseResponse{} + + for _, license := range licensesV3 { + listLicenses = append(listLicenses, license.Data) + } + return listLicenses +} + func (ah *APIHandler) listLicenses(w http.ResponseWriter, r *http.Request) { licenses, apiError := ah.LM().GetLicenses(context.Background()) if apiError != nil { @@ -88,6 +104,51 @@ func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) { ah.Respond(w, license) } +func (ah *APIHandler) listLicensesV3(w http.ResponseWriter, r *http.Request) { + licenses, apiError := ah.LM().GetLicensesV3(r.Context()) + + if apiError != nil { + RespondError(w, apiError, nil) + return + } + + ah.Respond(w, convertLicenseV3ToListLicenseResponse(licenses)) +} + +// this function is called by zeus when inserting licenses in the query-service +func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) { + var licenseKey ApplyLicenseRequest + + if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + if licenseKey.LicenseKey == "" { + RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil) + return + } + + _, apiError := ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey) + if apiError != nil { + RespondError(w, apiError, nil) + return + } + + render.Success(w, http.StatusAccepted, nil) +} + +func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) { + + apiError := ah.LM().RefreshLicense(r.Context()) + if apiError != nil { + RespondError(w, apiError, nil) + return + } + + render.Success(w, http.StatusNoContent, nil) +} + func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) { type checkoutResponse struct { @@ -154,11 +215,45 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) { ah.Respond(w, billingResponse.Data) } +func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License { + licensesV2 := []model.License{} + for _, l := range licenses { + licenseV2 := model.License{ + Key: l.Key, + ActivationId: "", + PlanDetails: "", + FeatureSet: l.Features, + ValidationMessage: "", + IsCurrent: l.IsCurrent, + LicensePlan: model.LicensePlan{ + PlanKey: l.PlanName, + ValidFrom: l.ValidFrom, + ValidUntil: l.ValidUntil, + Status: l.Status}, + } + licensesV2 = append(licensesV2, licenseV2) + } + return licensesV2 +} + func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { - licenses, apiError := ah.LM().GetLicenses(context.Background()) - if apiError != nil { - RespondError(w, apiError, nil) + var licenses []model.License + + if ah.UseLicensesV3 { + licensesV3, err := ah.LM().GetLicensesV3(r.Context()) + if err != nil { + RespondError(w, err, nil) + return + } + licenses = convertLicenseV3ToLicenseV2(licensesV3) + } else { + _licenses, apiError := ah.LM().GetLicenses(r.Context()) + if apiError != nil { + RespondError(w, apiError, nil) + return + } + licenses = _licenses } resp := model.Licenses{ diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index e63a3c7893..feae157adf 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -31,7 +31,6 @@ import ( "go.signoz.io/signoz/ee/query-service/rules" baseauth "go.signoz.io/signoz/pkg/query-service/auth" "go.signoz.io/signoz/pkg/query-service/migrate" - "go.signoz.io/signoz/pkg/query-service/model" v3 "go.signoz.io/signoz/pkg/query-service/model/v3" licensepkg "go.signoz.io/signoz/ee/query-service/license" @@ -79,6 +78,7 @@ type ServerOptions struct { GatewayUrl string UseLogsNewSchema bool UseTraceNewSchema bool + UseLicensesV3 bool } // Server runs HTTP api service @@ -135,7 +135,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { } // initiate license manager - lm, err := licensepkg.StartManager("sqlite", localDB) + lm, err := licensepkg.StartManager("sqlite", localDB, serverOptions.UseLicensesV3) if err != nil { return nil, err } @@ -274,6 +274,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { Gateway: gatewayProxy, UseLogsNewSchema: serverOptions.UseLogsNewSchema, UseTraceNewSchema: serverOptions.UseTraceNewSchema, + UseLicensesV3: serverOptions.UseLicensesV3, } apiHandler, err := api.NewAPIHandler(apiOpts) @@ -352,7 +353,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e } if user.User.OrgId == "" { - return nil, model.UnauthorizedError(errors.New("orgId is missing in the claims")) + return nil, basemodel.UnauthorizedError(errors.New("orgId is missing in the claims")) } return user, nil @@ -770,9 +771,11 @@ func makeRulesManager( Cache: cache, EvalDelay: baseconst.GetEvalDelay(), - PrepareTaskFunc: rules.PrepareTaskFunc, - UseLogsNewSchema: useLogsNewSchema, - UseTraceNewSchema: useTraceNewSchema, + PrepareTaskFunc: rules.PrepareTaskFunc, + UseLogsNewSchema: useLogsNewSchema, + UseTraceNewSchema: useTraceNewSchema, + PrepareTaskFunc: rules.PrepareTaskFunc, + PrepareTestRuleFunc: rules.TestNotification, } // create Manager diff --git a/ee/query-service/constants/constants.go b/ee/query-service/constants/constants.go index c1baa6320b..0931fd01fc 100644 --- a/ee/query-service/constants/constants.go +++ b/ee/query-service/constants/constants.go @@ -13,6 +13,7 @@ var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "") var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "") var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false") var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL") +var ZeusURL = GetOrDefaultEnv("ZEUS_URL", "ZeusURL") func GetOrDefaultEnv(key string, fallback string) string { v := os.Getenv(key) diff --git a/ee/query-service/integrations/signozio/response.go b/ee/query-service/integrations/signozio/response.go index 67ad8aac88..f0b0132d1b 100644 --- a/ee/query-service/integrations/signozio/response.go +++ b/ee/query-service/integrations/signozio/response.go @@ -13,3 +13,8 @@ type ActivationResponse struct { ActivationId string `json:"ActivationId"` PlanDetails string `json:"PlanDetails"` } + +type ValidateLicenseResponse struct { + Status status `json:"status"` + Data map[string]interface{} `json:"data"` +} diff --git a/ee/query-service/integrations/signozio/signozio.go b/ee/query-service/integrations/signozio/signozio.go index c18cfb6572..6c0b937c80 100644 --- a/ee/query-service/integrations/signozio/signozio.go +++ b/ee/query-service/integrations/signozio/signozio.go @@ -7,6 +7,7 @@ import ( "fmt" "io" "net/http" + "time" "github.com/pkg/errors" "go.uber.org/zap" @@ -23,12 +24,14 @@ const ( ) type Client struct { - Prefix string + Prefix string + GatewayUrl string } func New() *Client { return &Client{ - Prefix: constants.LicenseSignozIo, + Prefix: constants.LicenseSignozIo, + GatewayUrl: constants.ZeusURL, } } @@ -116,6 +119,60 @@ func ValidateLicense(activationId string) (*ActivationResponse, *model.ApiError) } +func ValidateLicenseV3(licenseKey string) (*model.LicenseV3, *model.ApiError) { + + // Creating an HTTP client with a timeout for better control + client := &http.Client{ + Timeout: 10 * time.Second, + } + + req, err := http.NewRequest("GET", C.GatewayUrl+"/v2/licenses/me", nil) + if err != nil { + return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to create request: %w", err))) + } + + // Setting the custom header + req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey) + + response, err := client.Do(req) + if err != nil { + return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to make post request: %w", err))) + } + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read validation response from %v", C.GatewayUrl))) + } + + defer response.Body.Close() + + switch response.StatusCode { + case 200: + a := ValidateLicenseResponse{} + err = json.Unmarshal(body, &a) + if err != nil { + return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response")) + } + + license, err := model.NewLicenseV3(a.Data) + if err != nil { + return nil, model.BadRequest(errors.Wrap(err, "failed to generate new license v3")) + } + + return license, nil + case 400: + return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)), + fmt.Sprintf("bad request error received from %v", C.GatewayUrl))) + case 401: + return nil, model.Unauthorized(errors.Wrap(fmt.Errorf(string(body)), + fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl))) + default: + return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)), + fmt.Sprintf("internal request error received from %v", C.GatewayUrl))) + } + +} + func NewPostRequestWithCtx(ctx context.Context, url string, contentType string, body io.Reader) (*http.Request, error) { req, err := http.NewRequestWithContext(ctx, POST, url, body) if err != nil { diff --git a/ee/query-service/license/db.go b/ee/query-service/license/db.go index f6ccc88426..eae48e266d 100644 --- a/ee/query-service/license/db.go +++ b/ee/query-service/license/db.go @@ -3,10 +3,12 @@ package license import ( "context" "database/sql" + "encoding/json" "fmt" "time" "github.com/jmoiron/sqlx" + "github.com/mattn/go-sqlite3" "go.signoz.io/signoz/ee/query-service/license/sqlite" "go.signoz.io/signoz/ee/query-service/model" @@ -48,6 +50,34 @@ func (r *Repo) GetLicenses(ctx context.Context) ([]model.License, error) { return licenses, nil } +func (r *Repo) GetLicensesV3(ctx context.Context) ([]*model.LicenseV3, error) { + licensesData := []model.LicenseDB{} + licenseV3Data := []*model.LicenseV3{} + + query := "SELECT id,key,data FROM licenses_v3" + + err := r.db.Select(&licensesData, query) + if err != nil { + return nil, fmt.Errorf("failed to get licenses from db: %v", err) + } + + for _, l := range licensesData { + var licenseData map[string]interface{} + err := json.Unmarshal([]byte(l.Data), &licenseData) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err) + } + + license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData) + if err != nil { + return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err) + } + licenseV3Data = append(licenseV3Data, license) + } + + return licenseV3Data, nil +} + // GetActiveLicense fetches the latest active license from DB. // If the license is not present, expect a nil license and a nil error in the output. func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) { @@ -79,6 +109,45 @@ func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel return active, nil } +func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) { + var err error + licenses := []model.LicenseDB{} + + query := "SELECT id,key,data FROM licenses_v3" + + err = r.db.Select(&licenses, query) + if err != nil { + return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err)) + } + + var active *model.LicenseV3 + for _, l := range licenses { + var licenseData map[string]interface{} + err := json.Unmarshal([]byte(l.Data), &licenseData) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err) + } + + license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData) + if err != nil { + return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err) + } + + if active == nil && + (license.ValidFrom != 0) && + (license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) { + active = license + } + if active != nil && + license.ValidFrom > active.ValidFrom && + (license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) { + active = license + } + } + + return active, nil +} + // InsertLicense inserts a new license in db func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error { @@ -204,3 +273,59 @@ func (r *Repo) InitFeatures(req basemodel.FeatureSet) error { } return nil } + +// InsertLicenseV3 inserts a new license v3 in db +func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) *model.ApiError { + + query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)` + + // licsense is the entity of zeus so putting the entire license here without defining schema + licenseData, err := json.Marshal(l.Data) + if err != nil { + return &model.ApiError{Typ: basemodel.ErrorBadData, Err: err} + } + + _, err = r.db.ExecContext(ctx, + query, + l.ID, + l.Key, + string(licenseData), + ) + + if err != nil { + if sqliteErr, ok := err.(sqlite3.Error); ok { + if sqliteErr.ExtendedCode == sqlite3.ErrConstraintUnique { + zap.L().Error("error in inserting license data: ", zap.Error(sqliteErr)) + return &model.ApiError{Typ: model.ErrorConflict, Err: sqliteErr} + } + } + zap.L().Error("error in inserting license data: ", zap.Error(err)) + return &model.ApiError{Typ: basemodel.ErrorExec, Err: err} + } + + return nil +} + +// UpdateLicenseV3 updates a new license v3 in db +func (r *Repo) UpdateLicenseV3(ctx context.Context, l *model.LicenseV3) error { + + // the key and id for the license can't change so only update the data here! + query := `UPDATE licenses_v3 SET data=$1 WHERE id=$2;` + + license, err := json.Marshal(l.Data) + if err != nil { + return fmt.Errorf("insert license failed: license marshal error") + } + _, err = r.db.ExecContext(ctx, + query, + license, + l.ID, + ) + + if err != nil { + zap.L().Error("error in updating license data: ", zap.Error(err)) + return fmt.Errorf("failed to update license in db: %v", err) + } + + return nil +} diff --git a/ee/query-service/license/manager.go b/ee/query-service/license/manager.go index 800f4b7ff3..6dcc704e3a 100644 --- a/ee/query-service/license/manager.go +++ b/ee/query-service/license/manager.go @@ -7,6 +7,7 @@ import ( "time" "github.com/jmoiron/sqlx" + "github.com/pkg/errors" "sync" @@ -45,11 +46,12 @@ type Manager struct { failedAttempts uint64 // keep track of active license and features - activeLicense *model.License - activeFeatures basemodel.FeatureSet + activeLicense *model.License + activeLicenseV3 *model.LicenseV3 + activeFeatures basemodel.FeatureSet } -func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*Manager, error) { +func StartManager(dbType string, db *sqlx.DB, useLicensesV3 bool, features ...basemodel.Feature) (*Manager, error) { if LM != nil { return LM, nil } @@ -65,7 +67,7 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M repo: &repo, } - if err := m.start(features...); err != nil { + if err := m.start(useLicensesV3, features...); err != nil { return m, err } LM = m @@ -73,8 +75,14 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M } // start loads active license in memory and initiates validator -func (lm *Manager) start(features ...basemodel.Feature) error { - err := lm.LoadActiveLicense(features...) +func (lm *Manager) start(useLicensesV3 bool, features ...basemodel.Feature) error { + + var err error + if useLicensesV3 { + err = lm.LoadActiveLicenseV3(features...) + } else { + err = lm.LoadActiveLicense(features...) + } return err } @@ -108,6 +116,31 @@ func (lm *Manager) SetActive(l *model.License, features ...basemodel.Feature) { go lm.Validator(context.Background()) } +} +func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) { + lm.mutex.Lock() + defer lm.mutex.Unlock() + + if l == nil { + return + } + + lm.activeLicenseV3 = l + lm.activeFeatures = append(l.Features, features...) + // set default features + setDefaultFeatures(lm) + + err := lm.InitFeatures(lm.activeFeatures) + if err != nil { + zap.L().Panic("Couldn't activate features", zap.Error(err)) + } + if !lm.validatorRunning { + // we want to make sure only one validator runs, + // we already have lock() so good to go + lm.validatorRunning = true + go lm.ValidatorV3(context.Background()) + } + } func setDefaultFeatures(lm *Manager) { @@ -137,6 +170,28 @@ func (lm *Manager) LoadActiveLicense(features ...basemodel.Feature) error { return nil } +func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error { + active, err := lm.repo.GetActiveLicenseV3(context.Background()) + if err != nil { + return err + } + if active != nil { + lm.SetActiveV3(active, features...) + } else { + zap.L().Info("No active license found, defaulting to basic plan") + // if no active license is found, we default to basic(free) plan with all default features + lm.activeFeatures = model.BasicPlan + setDefaultFeatures(lm) + err := lm.InitFeatures(lm.activeFeatures) + if err != nil { + zap.L().Error("Couldn't initialize features", zap.Error(err)) + return err + } + } + + return nil +} + func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, apiError *model.ApiError) { licenses, err := lm.repo.GetLicenses(ctx) @@ -163,6 +218,23 @@ func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, a return } +func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.LicenseV3, apiError *model.ApiError) { + + licenses, err := lm.repo.GetLicensesV3(ctx) + if err != nil { + return nil, model.InternalError(err) + } + + for _, l := range licenses { + if lm.activeLicenseV3 != nil && l.Key == lm.activeLicenseV3.Key { + l.IsCurrent = true + } + response = append(response, l) + } + + return response, nil +} + // Validator validates license after an epoch of time func (lm *Manager) Validator(ctx context.Context) { defer close(lm.terminated) @@ -187,6 +259,30 @@ func (lm *Manager) Validator(ctx context.Context) { } } +// Validator validates license after an epoch of time +func (lm *Manager) ValidatorV3(ctx context.Context) { + defer close(lm.terminated) + tick := time.NewTicker(validationFrequency) + defer tick.Stop() + + lm.ValidateV3(ctx) + + for { + select { + case <-lm.done: + return + default: + select { + case <-lm.done: + return + case <-tick.C: + lm.ValidateV3(ctx) + } + } + + } +} + // Validate validates the current active license func (lm *Manager) Validate(ctx context.Context) (reterr error) { zap.L().Info("License validation started") @@ -254,6 +350,54 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) { return nil } +// todo[vikrantgupta25]: check the comparison here between old and new license! +func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError { + + license, apiError := validate.ValidateLicenseV3(lm.activeLicenseV3.Key) + if apiError != nil { + zap.L().Error("failed to validate license", zap.Error(apiError.Err)) + return apiError + } + + err := lm.repo.UpdateLicenseV3(ctx, license) + if err != nil { + return model.BadRequest(errors.Wrap(err, "failed to update the new license")) + } + lm.SetActiveV3(license) + + return nil +} + +func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) { + zap.L().Info("License validation started") + if lm.activeLicenseV3 == nil { + return nil + } + + defer func() { + lm.mutex.Lock() + + lm.lastValidated = time.Now().Unix() + if reterr != nil { + zap.L().Error("License validation completed with error", zap.Error(reterr)) + atomic.AddUint64(&lm.failedAttempts, 1) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED, + map[string]interface{}{"err": reterr.Error()}, "", true, false) + } else { + zap.L().Info("License validation completed with no errors") + } + + lm.mutex.Unlock() + }() + + err := lm.RefreshLicense(ctx) + + if err != nil { + return err + } + return nil +} + // Activate activates a license key with signoz server func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *model.ApiError) { defer func() { @@ -298,6 +442,35 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m return l, nil } +func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseResponse *model.LicenseV3, errResponse *model.ApiError) { + defer func() { + if errResponse != nil { + userEmail, err := auth.GetEmailFromJwt(ctx) + if err == nil { + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, + map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false) + } + } + }() + + license, apiError := validate.ValidateLicenseV3(licenseKey) + if apiError != nil { + zap.L().Error("failed to get the license", zap.Error(apiError.Err)) + return nil, apiError + } + + // insert the new license to the sqlite db + err := lm.repo.InsertLicenseV3(ctx, license) + if err != nil { + zap.L().Error("failed to activate license", zap.Error(err)) + return nil, err + } + + // license is valid, activate it + lm.SetActiveV3(license) + return license, nil +} + // CheckFeature will be internally used by backend routines // for feature gating func (lm *Manager) CheckFeature(featureKey string) error { diff --git a/ee/query-service/license/sqlite/init.go b/ee/query-service/license/sqlite/init.go index c80bbd5a86..cd34081cc9 100644 --- a/ee/query-service/license/sqlite/init.go +++ b/ee/query-service/license/sqlite/init.go @@ -48,5 +48,16 @@ func InitDB(db *sqlx.DB) error { return fmt.Errorf("error in creating feature_status table: %s", err.Error()) } + table_schema = `CREATE TABLE IF NOT EXISTS licenses_v3 ( + id TEXT PRIMARY KEY, + key TEXT NOT NULL UNIQUE, + data TEXT + );` + + _, err = db.Exec(table_schema) + if err != nil { + return fmt.Errorf("error in creating licenses_v3 table: %s", err.Error()) + } + return nil } diff --git a/ee/query-service/main.go b/ee/query-service/main.go index dd52ab73a5..23824bd636 100644 --- a/ee/query-service/main.go +++ b/ee/query-service/main.go @@ -95,6 +95,7 @@ func main() { var useLogsNewSchema bool var useTraceNewSchema bool + var useLicensesV3 bool var cacheConfigPath, fluxInterval string var enableQueryServiceLogOTLPExport bool var preferSpanMetrics bool @@ -106,6 +107,7 @@ func main() { flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs") flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces") + flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses") flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") @@ -146,6 +148,7 @@ func main() { GatewayUrl: gatewayUrl, UseLogsNewSchema: useLogsNewSchema, UseTraceNewSchema: useTraceNewSchema, + UseLicensesV3: useLicensesV3, } // Read the jwt secret key diff --git a/ee/query-service/model/errors.go b/ee/query-service/model/errors.go index 7e7b8410e2..efc780be95 100644 --- a/ee/query-service/model/errors.go +++ b/ee/query-service/model/errors.go @@ -46,6 +46,13 @@ func BadRequest(err error) *ApiError { } } +func Unauthorized(err error) *ApiError { + return &ApiError{ + Typ: basemodel.ErrorUnauthorized, + Err: err, + } +} + // BadRequestStr returns a ApiError object of bad request for string input func BadRequestStr(s string) *ApiError { return &ApiError{ diff --git a/ee/query-service/model/license.go b/ee/query-service/model/license.go index 7ad349c9b7..2f9a0feeda 100644 --- a/ee/query-service/model/license.go +++ b/ee/query-service/model/license.go @@ -3,6 +3,8 @@ package model import ( "encoding/base64" "encoding/json" + "fmt" + "reflect" "time" "github.com/pkg/errors" @@ -104,3 +106,144 @@ type SubscriptionServerResp struct { Status string `json:"status"` Data Licenses `json:"data"` } + +type Plan struct { + Name string `json:"name"` +} + +type LicenseDB struct { + ID string `json:"id"` + Key string `json:"key"` + Data string `json:"data"` +} +type LicenseV3 struct { + ID string + Key string + Data map[string]interface{} + PlanName string + Features basemodel.FeatureSet + Status string + IsCurrent bool + ValidFrom int64 + ValidUntil int64 +} + +func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) { + var zeroValue T + if val, ok := data[key]; ok { + if value, ok := val.(T); ok { + return value, nil + } + return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue)) + } + return zeroValue, fmt.Errorf("%s key is missing", key) +} + +func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) { + var features basemodel.FeatureSet + + // extract id from data + licenseID, err := extractKeyFromMapStringInterface[string](data, "id") + if err != nil { + return nil, err + } + delete(data, "id") + + // extract key from data + licenseKey, err := extractKeyFromMapStringInterface[string](data, "key") + if err != nil { + return nil, err + } + delete(data, "key") + + // extract status from data + status, err := extractKeyFromMapStringInterface[string](data, "status") + if err != nil { + return nil, err + } + + planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan") + if err != nil { + return nil, err + } + + planName, err := extractKeyFromMapStringInterface[string](planMap, "name") + if err != nil { + return nil, err + } + // if license status is inactive then default it to basic + if status == LicenseStatusInactive { + planName = PlanNameBasic + } + + featuresFromZeus := basemodel.FeatureSet{} + if _features, ok := data["features"]; ok { + featuresData, err := json.Marshal(_features) + if err != nil { + return nil, errors.Wrap(err, "failed to marshal features data") + } + + if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil { + return nil, errors.Wrap(err, "failed to unmarshal features data") + } + } + + switch planName { + case PlanNameTeams: + features = append(features, ProPlan...) + case PlanNameEnterprise: + features = append(features, EnterprisePlan...) + case PlanNameBasic: + features = append(features, BasicPlan...) + default: + features = append(features, BasicPlan...) + } + + if len(featuresFromZeus) > 0 { + for _, feature := range featuresFromZeus { + exists := false + for i, existingFeature := range features { + if existingFeature.Name == feature.Name { + features[i] = feature // Replace existing feature + exists = true + break + } + } + if !exists { + features = append(features, feature) // Append if it doesn't exist + } + } + } + data["features"] = features + + _validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from") + if err != nil { + _validFrom = 0 + } + validFrom := int64(_validFrom) + + _validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until") + if err != nil { + _validUntil = 0 + } + validUntil := int64(_validUntil) + + return &LicenseV3{ + ID: licenseID, + Key: licenseKey, + Data: data, + PlanName: planName, + Features: features, + ValidFrom: validFrom, + ValidUntil: validUntil, + Status: status, + }, nil + +} + +func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) { + licenseDataWithIdAndKey := data + licenseDataWithIdAndKey["id"] = id + licenseDataWithIdAndKey["key"] = key + return NewLicenseV3(licenseDataWithIdAndKey) +} diff --git a/ee/query-service/model/license_test.go b/ee/query-service/model/license_test.go new file mode 100644 index 0000000000..1c6150c8ac --- /dev/null +++ b/ee/query-service/model/license_test.go @@ -0,0 +1,170 @@ +package model + +import ( + "encoding/json" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.signoz.io/signoz/pkg/query-service/model" +) + +func TestNewLicenseV3(t *testing.T) { + testCases := []struct { + name string + data []byte + pass bool + expected *LicenseV3 + error error + }{ + { + name: "Error for missing license id", + data: []byte(`{}`), + pass: false, + error: errors.New("id key is missing"), + }, + { + name: "Error for license id not being a valid string", + data: []byte(`{"id": 10}`), + pass: false, + error: errors.New("id key is not a valid string"), + }, + { + name: "Error for missing license key", + data: []byte(`{"id":"does-not-matter"}`), + pass: false, + error: errors.New("key key is missing"), + }, + { + name: "Error for invalid string license key", + data: []byte(`{"id":"does-not-matter","key":10}`), + pass: false, + error: errors.New("key key is not a valid string"), + }, + { + name: "Error for missing license status", + data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`), + pass: false, + error: errors.New("status key is missing"), + }, + { + name: "Error for invalid string license status", + data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`), + pass: false, + error: errors.New("status key is not a valid string"), + }, + { + name: "Error for missing license plan", + data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`), + pass: false, + error: errors.New("plan key is missing"), + }, + { + name: "Error for invalid json license plan", + data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`), + pass: false, + error: errors.New("plan key is not a valid map[string]interface {}"), + }, + { + name: "Error for invalid license plan", + data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`), + pass: false, + error: errors.New("name key is missing"), + }, + { + name: "Parse the entire license properly", + data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`), + pass: true, + expected: &LicenseV3{ + ID: "does-not-matter", + Key: "does-not-matter-key", + Data: map[string]interface{}{ + "plan": map[string]interface{}{ + "name": "TEAMS", + }, + "category": "FREE", + "status": "ACTIVE", + "valid_from": float64(1730899309), + "valid_until": float64(-1), + }, + PlanName: PlanNameTeams, + ValidFrom: 1730899309, + ValidUntil: -1, + Status: "ACTIVE", + IsCurrent: false, + Features: model.FeatureSet{}, + }, + }, + { + name: "Fallback to basic plan if license status is inactive", + data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`), + pass: true, + expected: &LicenseV3{ + ID: "does-not-matter", + Key: "does-not-matter-key", + Data: map[string]interface{}{ + "plan": map[string]interface{}{ + "name": "TEAMS", + }, + "category": "FREE", + "status": "INACTIVE", + "valid_from": float64(1730899309), + "valid_until": float64(-1), + }, + PlanName: PlanNameBasic, + ValidFrom: 1730899309, + ValidUntil: -1, + Status: "INACTIVE", + IsCurrent: false, + Features: model.FeatureSet{}, + }, + }, + { + name: "fallback states for validFrom and validUntil", + data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`), + pass: true, + expected: &LicenseV3{ + ID: "does-not-matter", + Key: "does-not-matter-key", + Data: map[string]interface{}{ + "plan": map[string]interface{}{ + "name": "TEAMS", + }, + "valid_from": 1234.456, + "valid_until": 5678.567, + "category": "FREE", + "status": "ACTIVE", + }, + PlanName: PlanNameTeams, + ValidFrom: 1234, + ValidUntil: 5678, + Status: "ACTIVE", + IsCurrent: false, + Features: model.FeatureSet{}, + }, + }, + } + + for _, tc := range testCases { + var licensePayload map[string]interface{} + err := json.Unmarshal(tc.data, &licensePayload) + require.NoError(t, err) + license, err := NewLicenseV3(licensePayload) + if license != nil { + license.Features = make(model.FeatureSet, 0) + delete(license.Data, "features") + } + + if tc.pass { + require.NoError(t, err) + require.NotNil(t, license) + assert.Equal(t, tc.expected, license) + } else { + require.Error(t, err) + assert.EqualError(t, err, tc.error.Error()) + require.Nil(t, license) + } + + } +} diff --git a/ee/query-service/model/plans.go b/ee/query-service/model/plans.go index c5272340a3..1ac9ac28d6 100644 --- a/ee/query-service/model/plans.go +++ b/ee/query-service/model/plans.go @@ -9,6 +9,17 @@ const SSO = "SSO" const Basic = "BASIC_PLAN" const Pro = "PRO_PLAN" const Enterprise = "ENTERPRISE_PLAN" + +var ( + PlanNameEnterprise = "ENTERPRISE" + PlanNameTeams = "TEAMS" + PlanNameBasic = "BASIC" +) + +var ( + LicenseStatusInactive = "INACTIVE" +) + const DisableUpsell = "DISABLE_UPSELL" const Onboarding = "ONBOARDING" const ChatSupport = "CHAT_SUPPORT" diff --git a/ee/query-service/rules/manager.go b/ee/query-service/rules/manager.go index d0365740c2..7214f338ca 100644 --- a/ee/query-service/rules/manager.go +++ b/ee/query-service/rules/manager.go @@ -1,10 +1,15 @@ package rules import ( + "context" "fmt" "time" + "github.com/google/uuid" + basemodel "go.signoz.io/signoz/pkg/query-service/model" baserules "go.signoz.io/signoz/pkg/query-service/rules" + "go.signoz.io/signoz/pkg/query-service/utils/labels" + "go.uber.org/zap" ) func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) { @@ -80,6 +85,106 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) return task, nil } +// TestNotification prepares a dummy rule for given rule parameters and +// sends a test notification. returns alert count and error (if any) +func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.ApiError) { + + ctx := context.Background() + + if opts.Rule == nil { + return 0, basemodel.BadRequest(fmt.Errorf("rule is required")) + } + + parsedRule := opts.Rule + var alertname = parsedRule.AlertName + if alertname == "" { + // alertname is not mandatory for testing, so picking + // a random string here + alertname = uuid.New().String() + } + + // append name to indicate this is test alert + parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, baserules.TestAlertPostFix) + + var rule baserules.Rule + var err error + + if parsedRule.RuleType == baserules.RuleTypeThreshold { + + // add special labels for test alerts + parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target) + parsedRule.Labels[labels.RuleSourceLabel] = "" + parsedRule.Labels[labels.AlertRuleIdLabel] = "" + + // create a threshold rule + rule, err = baserules.NewThresholdRule( + alertname, + parsedRule, + opts.FF, + opts.Reader, + opts.UseLogsNewSchema, + baserules.WithSendAlways(), + baserules.WithSendUnmatched(), + ) + + if err != nil { + zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err)) + return 0, basemodel.BadRequest(err) + } + + } else if parsedRule.RuleType == baserules.RuleTypeProm { + + // create promql rule + rule, err = baserules.NewPromRule( + alertname, + parsedRule, + opts.Logger, + opts.Reader, + opts.ManagerOpts.PqlEngine, + baserules.WithSendAlways(), + baserules.WithSendUnmatched(), + ) + + if err != nil { + zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err)) + return 0, basemodel.BadRequest(err) + } + } else if parsedRule.RuleType == baserules.RuleTypeAnomaly { + // create anomaly rule + rule, err = NewAnomalyRule( + alertname, + parsedRule, + opts.FF, + opts.Reader, + opts.Cache, + baserules.WithSendAlways(), + baserules.WithSendUnmatched(), + ) + if err != nil { + zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", rule.Name()), zap.Error(err)) + return 0, basemodel.BadRequest(err) + } + } else { + return 0, basemodel.BadRequest(fmt.Errorf("failed to derive ruletype with given information")) + } + + // set timestamp to current utc time + ts := time.Now().UTC() + + count, err := rule.Eval(ctx, ts) + if err != nil { + zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err)) + return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed")) + } + alertsFound, ok := count.(int) + if !ok { + return 0, basemodel.InternalError(fmt.Errorf("something went wrong")) + } + rule.SendAlerts(ctx, ts, 0, time.Duration(1*time.Minute), opts.NotifyFunc) + + return alertsFound, nil +} + // newTask returns an appropriate group for // rule type func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, ruleDB baserules.RuleDB) baserules.Task { diff --git a/frontend/package.json b/frontend/package.json index 82fda7af0a..320fa28f85 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -42,7 +42,7 @@ "@radix-ui/react-tooltip": "1.0.7", "@sentry/react": "7.102.1", "@sentry/webpack-plugin": "2.16.0", - "@signozhq/design-tokens": "0.0.8", + "@signozhq/design-tokens": "1.1.4", "@uiw/react-md-editor": "3.23.5", "@visx/group": "3.3.0", "@visx/shape": "3.5.0", @@ -87,6 +87,8 @@ "lodash-es": "^4.17.21", "lucide-react": "0.379.0", "mini-css-extract-plugin": "2.4.5", + "overlayscrollbars": "^2.8.1", + "overlayscrollbars-react": "^0.5.6", "papaparse": "5.4.1", "posthog-js": "1.160.3", "rc-tween-one": "3.0.6", @@ -107,11 +109,10 @@ "react-query": "3.39.3", "react-redux": "^7.2.2", "react-router-dom": "^5.2.0", + "react-router-dom-v5-compat": "6.27.0", "react-syntax-highlighter": "15.5.0", "react-use": "^17.3.2", "react-virtuoso": "4.0.3", - "overlayscrollbars-react": "^0.5.6", - "overlayscrollbars": "^2.8.1", "redux": "^4.0.5", "redux-thunk": "^2.3.0", "rehype-raw": "7.0.0", diff --git a/frontend/public/locales/en-GB/messagingQueues.json b/frontend/public/locales/en-GB/messagingQueues.json new file mode 100644 index 0000000000..5c446d98d8 --- /dev/null +++ b/frontend/public/locales/en-GB/messagingQueues.json @@ -0,0 +1,24 @@ +{ + "metricGraphCategory": { + "brokerMetrics": { + "title": "Broker Metrics", + "description": "The Kafka Broker metrics here inform you of data loss/delay through unclean leader elections and network throughputs, as well as request fails through request purgatories and timeouts metrics" + }, + "consumerMetrics": { + "title": "Consumer Metrics", + "description": "Kafka Consumer metrics provide insights into lag between message production and consumption, success rates and latency of message delivery, and the volume of data consumed." + }, + "producerMetrics": { + "title": "Producer Metrics", + "description": "Kafka Producers send messages to brokers for storage and distribution by topic. These metrics inform you of the volume and rate of data sent, and the success rate of message delivery." + }, + "brokerJVMMetrics": { + "title": "Broker JVM Metrics", + "description": "Kafka brokers are Java applications that expose JVM metrics to inform on the broker's system health. Garbage collection metrics like those below provide key insights into free memory, broker performance, and heap size. You need to enable new_gc_metrics for this section to populate." + }, + "partitionMetrics": { + "title": "Partition Metrics", + "description": "Kafka partitions are the unit of parallelism in Kafka. These metrics inform you of the number of partitions per topic, the current offset of each partition, the oldest offset, and the number of in-sync replicas." + } + } +} diff --git a/frontend/public/locales/en-GB/messagingQueuesKafkaOverview.json b/frontend/public/locales/en-GB/messagingQueuesKafkaOverview.json index 5061a5ddcb..f298f664d1 100644 --- a/frontend/public/locales/en-GB/messagingQueuesKafkaOverview.json +++ b/frontend/public/locales/en-GB/messagingQueuesKafkaOverview.json @@ -1,30 +1,54 @@ { - "breadcrumb": "Messaging Queues", - "header": "Kafka / Overview", - "overview": { - "title": "Start sending data in as little as 20 minutes", - "subtitle": "Connect and Monitor Your Data Streams" - }, - "configureConsumer": { - "title": "Configure Consumer", - "description": "Add consumer data sources to gain insights and enhance monitoring.", - "button": "Get Started" - }, - "configureProducer": { - "title": "Configure Producer", - "description": "Add producer data sources to gain insights and enhance monitoring.", - "button": "Get Started" - }, - "monitorKafka": { - "title": "Monitor kafka", - "description": "Add your Kafka source to gain insights and enhance activity tracking.", - "button": "Get Started" - }, - "summarySection": { - "viewDetailsButton": "View Details" - }, - "confirmModal": { - "content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.", - "okText": "Proceed" - } -} \ No newline at end of file + "breadcrumb": "Messaging Queues", + "header": "Kafka / Overview", + "overview": { + "title": "Start sending data in as little as 20 minutes", + "subtitle": "Connect and Monitor Your Data Streams" + }, + "configureConsumer": { + "title": "Configure Consumer", + "description": "Add consumer data sources to gain insights and enhance monitoring.", + "button": "Get Started" + }, + "configureProducer": { + "title": "Configure Producer", + "description": "Add producer data sources to gain insights and enhance monitoring.", + "button": "Get Started" + }, + "monitorKafka": { + "title": "Monitor kafka", + "description": "Add your Kafka source to gain insights and enhance activity tracking.", + "button": "Get Started" + }, + "summarySection": { + "viewDetailsButton": "View Details", + "consumer": { + "title": "Consumer lag view", + "description": "Connect and Monitor Your Data Streams" + }, + "producer": { + "title": "Producer latency view", + "description": "Connect and Monitor Your Data Streams" + }, + "partition": { + "title": "Partition Latency view", + "description": "Connect and Monitor Your Data Streams" + }, + "dropRate": { + "title": "Drop Rate view", + "description": "Connect and Monitor Your Data Streams" + }, + "metricPage": { + "title": "Metric View", + "description": "Connect and Monitor Your Data Streams" + } + }, + "confirmModal": { + "content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.", + "okText": "Proceed" + }, + "overviewSummarySection": { + "title": "Monitor Your Data Streams", + "subtitle": "Monitor key Kafka metrics like consumer lag and latency to ensure efficient data flow and troubleshoot in real time." + } +} diff --git a/frontend/public/locales/en/messagingQueues.json b/frontend/public/locales/en/messagingQueues.json new file mode 100644 index 0000000000..5c446d98d8 --- /dev/null +++ b/frontend/public/locales/en/messagingQueues.json @@ -0,0 +1,24 @@ +{ + "metricGraphCategory": { + "brokerMetrics": { + "title": "Broker Metrics", + "description": "The Kafka Broker metrics here inform you of data loss/delay through unclean leader elections and network throughputs, as well as request fails through request purgatories and timeouts metrics" + }, + "consumerMetrics": { + "title": "Consumer Metrics", + "description": "Kafka Consumer metrics provide insights into lag between message production and consumption, success rates and latency of message delivery, and the volume of data consumed." + }, + "producerMetrics": { + "title": "Producer Metrics", + "description": "Kafka Producers send messages to brokers for storage and distribution by topic. These metrics inform you of the volume and rate of data sent, and the success rate of message delivery." + }, + "brokerJVMMetrics": { + "title": "Broker JVM Metrics", + "description": "Kafka brokers are Java applications that expose JVM metrics to inform on the broker's system health. Garbage collection metrics like those below provide key insights into free memory, broker performance, and heap size. You need to enable new_gc_metrics for this section to populate." + }, + "partitionMetrics": { + "title": "Partition Metrics", + "description": "Kafka partitions are the unit of parallelism in Kafka. These metrics inform you of the number of partitions per topic, the current offset of each partition, the oldest offset, and the number of in-sync replicas." + } + } +} diff --git a/frontend/public/locales/en/messagingQueuesKafkaOverview.json b/frontend/public/locales/en/messagingQueuesKafkaOverview.json index 5061a5ddcb..f298f664d1 100644 --- a/frontend/public/locales/en/messagingQueuesKafkaOverview.json +++ b/frontend/public/locales/en/messagingQueuesKafkaOverview.json @@ -1,30 +1,54 @@ { - "breadcrumb": "Messaging Queues", - "header": "Kafka / Overview", - "overview": { - "title": "Start sending data in as little as 20 minutes", - "subtitle": "Connect and Monitor Your Data Streams" - }, - "configureConsumer": { - "title": "Configure Consumer", - "description": "Add consumer data sources to gain insights and enhance monitoring.", - "button": "Get Started" - }, - "configureProducer": { - "title": "Configure Producer", - "description": "Add producer data sources to gain insights and enhance monitoring.", - "button": "Get Started" - }, - "monitorKafka": { - "title": "Monitor kafka", - "description": "Add your Kafka source to gain insights and enhance activity tracking.", - "button": "Get Started" - }, - "summarySection": { - "viewDetailsButton": "View Details" - }, - "confirmModal": { - "content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.", - "okText": "Proceed" - } -} \ No newline at end of file + "breadcrumb": "Messaging Queues", + "header": "Kafka / Overview", + "overview": { + "title": "Start sending data in as little as 20 minutes", + "subtitle": "Connect and Monitor Your Data Streams" + }, + "configureConsumer": { + "title": "Configure Consumer", + "description": "Add consumer data sources to gain insights and enhance monitoring.", + "button": "Get Started" + }, + "configureProducer": { + "title": "Configure Producer", + "description": "Add producer data sources to gain insights and enhance monitoring.", + "button": "Get Started" + }, + "monitorKafka": { + "title": "Monitor kafka", + "description": "Add your Kafka source to gain insights and enhance activity tracking.", + "button": "Get Started" + }, + "summarySection": { + "viewDetailsButton": "View Details", + "consumer": { + "title": "Consumer lag view", + "description": "Connect and Monitor Your Data Streams" + }, + "producer": { + "title": "Producer latency view", + "description": "Connect and Monitor Your Data Streams" + }, + "partition": { + "title": "Partition Latency view", + "description": "Connect and Monitor Your Data Streams" + }, + "dropRate": { + "title": "Drop Rate view", + "description": "Connect and Monitor Your Data Streams" + }, + "metricPage": { + "title": "Metric View", + "description": "Connect and Monitor Your Data Streams" + } + }, + "confirmModal": { + "content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.", + "okText": "Proceed" + }, + "overviewSummarySection": { + "title": "Monitor Your Data Streams", + "subtitle": "Monitor key Kafka metrics like consumer lag and latency to ensure efficient data flow and troubleshoot in real time." + } +} diff --git a/frontend/src/AppRoutes/index.tsx b/frontend/src/AppRoutes/index.tsx index 2d0f4231cc..c4cab69413 100644 --- a/frontend/src/AppRoutes/index.tsx +++ b/frontend/src/AppRoutes/index.tsx @@ -28,6 +28,7 @@ import { Suspense, useEffect, useState } from 'react'; import { useQuery } from 'react-query'; import { useDispatch, useSelector } from 'react-redux'; import { Route, Router, Switch } from 'react-router-dom'; +import { CompatRouter } from 'react-router-dom-v5-compat'; import { Dispatch } from 'redux'; import { AppState } from 'store/reducers'; import AppActions from 'types/actions'; @@ -292,36 +293,38 @@ function App(): JSX.Element { return ( - - - - - - - - - }> - - {routes.map(({ path, component, exact }) => ( - - ))} - - - - - - - - - - - - + + + + + + + + + + }> + + {routes.map(({ path, component, exact }) => ( + + ))} + + + + + + + + + + + + + ); diff --git a/frontend/src/api/messagingQueues/onboarding/getOnboardingStatus.ts b/frontend/src/api/messagingQueues/onboarding/getOnboardingStatus.ts index b5da83aa13..da82e70134 100644 --- a/frontend/src/api/messagingQueues/onboarding/getOnboardingStatus.ts +++ b/frontend/src/api/messagingQueues/onboarding/getOnboardingStatus.ts @@ -16,11 +16,13 @@ export interface OnboardingStatusResponse { const getOnboardingStatus = async (props: { start: number; end: number; + endpointService?: string; }): Promise | ErrorResponse> => { + const { endpointService, ...rest } = props; try { const response = await ApiBaseInstance.post( - '/messaging-queues/kafka/onboarding/consumers', - props, + `/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`, + rest, ); return { diff --git a/frontend/src/components/Logs/LogStateIndicator/utils.test.ts b/frontend/src/components/Logs/LogStateIndicator/utils.test.ts index 17c601ffb4..f940ee3046 100644 --- a/frontend/src/components/Logs/LogStateIndicator/utils.test.ts +++ b/frontend/src/components/Logs/LogStateIndicator/utils.test.ts @@ -17,6 +17,7 @@ describe('getLogIndicatorType', () => { body: 'Sample log Message', resources_string: {}, attributesString: {}, + scope_string: {}, attributes_string: {}, attributesInt: {}, attributesFloat: {}, @@ -40,6 +41,7 @@ describe('getLogIndicatorType', () => { body: 'Sample log Message', resources_string: {}, attributesString: {}, + scope_string: {}, attributes_string: {}, attributesInt: {}, attributesFloat: {}, @@ -62,6 +64,7 @@ describe('getLogIndicatorType', () => { body: 'Sample log Message', resources_string: {}, attributesString: {}, + scope_string: {}, attributes_string: {}, attributesInt: {}, attributesFloat: {}, @@ -83,6 +86,7 @@ describe('getLogIndicatorType', () => { body: 'Sample log', resources_string: {}, attributesString: {}, + scope_string: {}, attributes_string: { log_level: 'INFO' as never, }, @@ -112,6 +116,7 @@ describe('getLogIndicatorTypeForTable', () => { attributesString: {}, attributes_string: {}, attributesInt: {}, + scope_string: {}, attributesFloat: {}, severity_text: 'WARN', }; @@ -130,6 +135,7 @@ describe('getLogIndicatorTypeForTable', () => { severity_number: 0, body: 'Sample log message', resources_string: {}, + scope_string: {}, attributesString: {}, attributes_string: {}, attributesInt: {}, @@ -166,6 +172,7 @@ describe('logIndicatorBySeverityNumber', () => { body: 'Sample log Message', resources_string: {}, attributesString: {}, + scope_string: {}, attributes_string: {}, attributesInt: {}, attributesFloat: {}, diff --git a/frontend/src/constants/query.ts b/frontend/src/constants/query.ts index 7d3cb2603d..56fbd737b5 100644 --- a/frontend/src/constants/query.ts +++ b/frontend/src/constants/query.ts @@ -40,4 +40,5 @@ export enum QueryParams { configDetail = 'configDetail', getStartedSource = 'getStartedSource', getStartedSourceService = 'getStartedSourceService', + mqServiceView = 'mqServiceView', } diff --git a/frontend/src/constants/reactQueryKeys.ts b/frontend/src/constants/reactQueryKeys.ts index ec2353abbf..1dbacde963 100644 --- a/frontend/src/constants/reactQueryKeys.ts +++ b/frontend/src/constants/reactQueryKeys.ts @@ -18,4 +18,5 @@ export const REACT_QUERY_KEY = { GET_ALL_ALLERTS: 'GET_ALL_ALLERTS', REMOVE_ALERT_RULE: 'REMOVE_ALERT_RULE', DUPLICATE_ALERT_RULE: 'DUPLICATE_ALERT_RULE', + UPDATE_ALERT_RULE: 'UPDATE_ALERT_RULE', }; diff --git a/frontend/src/container/CreateAlertRule/defaults.ts b/frontend/src/container/CreateAlertRule/defaults.ts index 44dee01d31..bac7c90865 100644 --- a/frontend/src/container/CreateAlertRule/defaults.ts +++ b/frontend/src/container/CreateAlertRule/defaults.ts @@ -57,6 +57,7 @@ export const alertDefaults: AlertDef = { }, annotations: defaultAnnotations, evalWindow: defaultEvalWindow, + alert: '', }; export const anamolyAlertDefaults: AlertDef = { @@ -94,12 +95,14 @@ export const anamolyAlertDefaults: AlertDef = { matchType: defaultMatchType, algorithm: defaultAlgorithm, seasonality: defaultSeasonality, + target: 3, }, labels: { severity: 'warning', }, annotations: defaultAnnotations, evalWindow: defaultEvalWindow, + alert: '', }; export const logAlertDefaults: AlertDef = { @@ -131,6 +134,7 @@ export const logAlertDefaults: AlertDef = { }, annotations: defaultAnnotations, evalWindow: defaultEvalWindow, + alert: '', }; export const traceAlertDefaults: AlertDef = { @@ -162,6 +166,7 @@ export const traceAlertDefaults: AlertDef = { }, annotations: defaultAnnotations, evalWindow: defaultEvalWindow, + alert: '', }; export const exceptionAlertDefaults: AlertDef = { @@ -193,6 +198,7 @@ export const exceptionAlertDefaults: AlertDef = { }, annotations: defaultAnnotations, evalWindow: defaultEvalWindow, + alert: '', }; export const ALERTS_VALUES_MAP: Record = { diff --git a/frontend/src/container/ExplorerOptions/utils.ts b/frontend/src/container/ExplorerOptions/utils.ts index d94e64161e..2aa6c49b91 100644 --- a/frontend/src/container/ExplorerOptions/utils.ts +++ b/frontend/src/container/ExplorerOptions/utils.ts @@ -1,4 +1,4 @@ -import { Color } from '@signozhq/design-tokens'; +import { Color, ColorType } from '@signozhq/design-tokens'; import { showErrorNotification } from 'components/ExplorerCard/utils'; import { LOCALSTORAGE } from 'constants/localStorage'; import { QueryParams } from 'constants/query'; @@ -8,7 +8,7 @@ import { DataSource } from 'types/common/queryBuilder'; import { SaveNewViewHandlerProps } from './types'; -export const getRandomColor = (): Color => { +export const getRandomColor = (): ColorType => { const colorKeys = Object.keys(Color) as (keyof typeof Color)[]; const randomKey = colorKeys[Math.floor(Math.random() * colorKeys.length)]; return Color[randomKey]; diff --git a/frontend/src/container/FormAlertRules/index.tsx b/frontend/src/container/FormAlertRules/index.tsx index 05c4149d73..5572af8365 100644 --- a/frontend/src/container/FormAlertRules/index.tsx +++ b/frontend/src/container/FormAlertRules/index.tsx @@ -53,6 +53,7 @@ import { QueryFunctionProps, } from 'types/api/queryBuilder/queryBuilderData'; import { EQueryType } from 'types/common/dashboard'; +import { DataSource } from 'types/common/queryBuilder'; import { GlobalReducer } from 'types/reducer/globalTime'; import BasicInfo from './BasicInfo'; @@ -105,6 +106,11 @@ function FormAlertRules({ const location = useLocation(); const queryParams = new URLSearchParams(location.search); + const dataSource = useMemo( + () => urlQuery.get(QueryParams.alertType) as DataSource, + [urlQuery], + ); + // In case of alert the panel types should always be "Graph" only const panelType = PANEL_TYPES.TIME_SERIES; @@ -114,13 +120,12 @@ function FormAlertRules({ handleSetQueryData, handleRunQuery, handleSetConfig, - initialDataSource, redirectWithQueryBuilderData, } = useQueryBuilder(); useEffect(() => { - handleSetConfig(panelType || PANEL_TYPES.TIME_SERIES, initialDataSource); - }, [handleSetConfig, initialDataSource, panelType]); + handleSetConfig(panelType || PANEL_TYPES.TIME_SERIES, dataSource); + }, [handleSetConfig, dataSource, panelType]); // use query client const ruleCache = useQueryClient(); diff --git a/frontend/src/container/FormAlertRules/labels/index.tsx b/frontend/src/container/FormAlertRules/labels/index.tsx index 30583e12f9..250c48be8a 100644 --- a/frontend/src/container/FormAlertRules/labels/index.tsx +++ b/frontend/src/container/FormAlertRules/labels/index.tsx @@ -138,6 +138,9 @@ function LabelSelect({ if (e.key === 'Enter' || e.code === 'Enter' || e.key === ':') { send('NEXT'); } + if (state.value === 'Idle') { + send('NEXT'); + } }} bordered={false} value={currentVal as never} diff --git a/frontend/src/container/LogDetailedView/utils.tsx b/frontend/src/container/LogDetailedView/utils.tsx index 766bb8b5bf..da62f97f8e 100644 --- a/frontend/src/container/LogDetailedView/utils.tsx +++ b/frontend/src/container/LogDetailedView/utils.tsx @@ -157,6 +157,11 @@ export const getFieldAttributes = (field: string): IFieldAttributes => { const stringWithoutPrefix = field.slice('resources_'.length); const parts = splitOnce(stringWithoutPrefix, '.'); [dataType, newField] = parts; + } else if (field.startsWith('scope_string')) { + logType = MetricsType.Scope; + const stringWithoutPrefix = field.slice('scope_'.length); + const parts = splitOnce(stringWithoutPrefix, '.'); + [dataType, newField] = parts; } return { dataType, newField, logType }; @@ -187,6 +192,7 @@ export const aggregateAttributesResourcesToString = (logData: ILog): string => { traceId: logData.traceId, attributes: {}, resources: {}, + scope: {}, severity_text: logData.severity_text, severity_number: logData.severity_number, }; @@ -198,6 +204,9 @@ export const aggregateAttributesResourcesToString = (logData: ILog): string => { } else if (key.startsWith('resources_')) { outputJson.resources = outputJson.resources || {}; Object.assign(outputJson.resources, logData[key as keyof ILog]); + } else if (key.startsWith('scope_string')) { + outputJson.scope = outputJson.scope || {}; + Object.assign(outputJson.scope, logData[key as keyof ILog]); } else { // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore diff --git a/frontend/src/container/LogsExplorerViews/index.tsx b/frontend/src/container/LogsExplorerViews/index.tsx index 8dc46c5a5a..5ce5dbe2be 100644 --- a/frontend/src/container/LogsExplorerViews/index.tsx +++ b/frontend/src/container/LogsExplorerViews/index.tsx @@ -202,6 +202,7 @@ function LogsExplorerViews({ id: 'severity_text--string----true', }, ], + legend: '{{severity_text}}', }; const modifiedQuery: Query = { diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/DBCallQueries.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/DBCallQueries.ts index 91343b7b06..f3124f0ad1 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/DBCallQueries.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/DBCallQueries.ts @@ -58,12 +58,17 @@ export const databaseCallsRPS = ({ const legends = [legend]; const dataSource = DataSource.METRICS; + const timeAggregateOperators = [MetricAggregateOperator.RATE]; + const spaceAggregateOperators = [MetricAggregateOperator.SUM]; + return getQueryBuilderQueries({ autocompleteData, groupBy, legends, filterItems, dataSource, + timeAggregateOperators, + spaceAggregateOperators, }); }; diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/ExternalQueries.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/ExternalQueries.ts index a2c87f0874..6a7ab65906 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/ExternalQueries.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/ExternalQueries.ts @@ -213,12 +213,17 @@ export const externalCallRpsByAddress = ({ const legends = [legend]; const dataSource = DataSource.METRICS; + const timeAggregateOperators = [MetricAggregateOperator.RATE]; + const spaceAggregateOperators = [MetricAggregateOperator.SUM]; + return getQueryBuilderQueries({ autocompleteData, groupBy, legends, filterItems, dataSource, + timeAggregateOperators, + spaceAggregateOperators, }); }; diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory.ts index 71a16fcc07..e8b0fcc807 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/MetricsPageQueriesFactory.ts @@ -25,6 +25,8 @@ export const getQueryBuilderQueries = ({ aggregateOperator, dataSource, queryNameAndExpression, + timeAggregateOperators, + spaceAggregateOperators, }: BuilderQueriesProps): QueryBuilderData => ({ queryFormulas: [], queryData: autocompleteData.map((item, index) => { @@ -50,6 +52,8 @@ export const getQueryBuilderQueries = ({ op: 'AND', }, reduceTo: 'avg', + spaceAggregation: spaceAggregateOperators[index], + timeAggregation: timeAggregateOperators[index], dataSource, }; diff --git a/frontend/src/container/MetricsApplication/MetricsPageQueries/OverviewQueries.ts b/frontend/src/container/MetricsApplication/MetricsPageQueries/OverviewQueries.ts index d27bfc01be..0d2c05a349 100644 --- a/frontend/src/container/MetricsApplication/MetricsPageQueries/OverviewQueries.ts +++ b/frontend/src/container/MetricsApplication/MetricsPageQueries/OverviewQueries.ts @@ -83,6 +83,17 @@ export const latency = ({ const dataSource = isSpanMetricEnable ? DataSource.METRICS : DataSource.TRACES; const queryNameAndExpression = QUERYNAME_AND_EXPRESSION; + const timeAggregateOperators = [ + MetricAggregateOperator.EMPTY, + MetricAggregateOperator.EMPTY, + MetricAggregateOperator.EMPTY, + ]; + const spaceAggregateOperators = [ + MetricAggregateOperator.P50, + MetricAggregateOperator.P90, + MetricAggregateOperator.P99, + ]; + return getQueryBuilderQueries({ autocompleteData, legends, @@ -90,6 +101,8 @@ export const latency = ({ aggregateOperator, dataSource, queryNameAndExpression, + timeAggregateOperators, + spaceAggregateOperators, }); }; @@ -510,11 +523,16 @@ export const operationPerSec = ({ const legends = OPERATION_LEGENDS; const dataSource = DataSource.METRICS; + const timeAggregateOperators = [MetricAggregateOperator.RATE]; + const spaceAggregateOperators = [MetricAggregateOperator.SUM]; + return getQueryBuilderQueries({ autocompleteData, legends, filterItems, dataSource, + timeAggregateOperators, + spaceAggregateOperators, }); }; diff --git a/frontend/src/container/MetricsApplication/Tabs/types.ts b/frontend/src/container/MetricsApplication/Tabs/types.ts index 9b45bd5492..4dcb3bc01e 100644 --- a/frontend/src/container/MetricsApplication/Tabs/types.ts +++ b/frontend/src/container/MetricsApplication/Tabs/types.ts @@ -29,6 +29,8 @@ export interface BuilderQueriesProps { aggregateOperator?: string[]; dataSource: DataSource; queryNameAndExpression?: string[]; + timeAggregateOperators: MetricAggregateOperator[]; + spaceAggregateOperators: MetricAggregateOperator[]; } export interface BuilderQuerieswithFormulaProps { diff --git a/frontend/src/container/MetricsApplication/constant.ts b/frontend/src/container/MetricsApplication/constant.ts index 9c039292bd..75853cc8ea 100644 --- a/frontend/src/container/MetricsApplication/constant.ts +++ b/frontend/src/container/MetricsApplication/constant.ts @@ -2,18 +2,27 @@ import { DownloadOptions } from 'container/Download/Download.types'; import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants'; +import { + MetricAggregateOperator, + TracesAggregatorOperator, +} from 'types/common/queryBuilder'; export const legend = { address: '{{address}}', }; export const QUERYNAME_AND_EXPRESSION = ['A', 'B', 'C']; -export const LATENCY_AGGREGATEOPERATOR = ['p50', 'p90', 'p99']; +export const LATENCY_AGGREGATEOPERATOR = [ + TracesAggregatorOperator.P50, + TracesAggregatorOperator.P90, + TracesAggregatorOperator.P99, +]; export const LATENCY_AGGREGATEOPERATOR_SPAN_METRICS = [ - 'hist_quantile_50', - 'hist_quantile_90', - 'hist_quantile_99', + MetricAggregateOperator.P50, + MetricAggregateOperator.P90, + MetricAggregateOperator.P99, ]; + export const OPERATION_LEGENDS = ['Operations']; export const MENU_ITEMS = [MenuItemKeys.View, MenuItemKeys.CreateAlerts]; @@ -21,8 +30,21 @@ export const MENU_ITEMS = [MenuItemKeys.View, MenuItemKeys.CreateAlerts]; export enum FORMULA { ERROR_PERCENTAGE = 'A*100/B', DATABASE_CALLS_AVG_DURATION = 'A/B', + // The apdex formula is (satisfied_count + 0.5 * tolerating_count + 0 * frustating_count) / total_count + // The satisfied_count is B, tolerating_count is C, total_count is A + // But why do we have (B+C)/2 instead of B + C/2? + // The way we issue the query is latency <= threshold, which means we over count i.e + // query B => durationNano <= 500ms + // query C => durationNano <= 2000ms + // Since <= 2000ms includes <= 500ms, we over count, to correct we subtract B/2 + // so the full expression would be (B + C/2) - B/2 = (B+C)/2 APDEX_TRACES = '((B + C)/2)/A', - APDEX_DELTA_SPAN_METRICS = '((B + C)/2)/A', + // Does the same not apply for delta span metrics? + // No, because the delta metrics store the counts just for the current bucket + // so we don't need to subtract anything + APDEX_DELTA_SPAN_METRICS = '(B + C)/A', + // Cumulative span metrics store the counts for all buckets + // so we need to subtract B/2 to correct the over counting APDEX_CUMULATIVE_SPAN_METRICS = '((B + C)/2)/A', } @@ -53,6 +75,7 @@ export enum KeyOperationTableHeader { export enum MetricsType { Tag = 'tag', Resource = 'resource', + Scope = 'scope', } export enum WidgetKeys { diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-consumers.md b/frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-consumers.md new file mode 100644 index 0000000000..1c521bcba4 --- /dev/null +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-consumers.md @@ -0,0 +1,32 @@ +  + +Once you are done instrumenting your Java application, you can run it using the below commands + +**Note:** +- Ensure you have Java and Maven installed. Compile your Java consumer applications: Ensure your consumer apps are compiled and ready to run. + +**Run Consumer App with Java Agent:** + +```bash +java -javaagent:/path/to/opentelemetry-javaagent.jar \ + -Dotel.service.name=consumer-svc \ + -Dotel.traces.exporter=otlp \ + -Dotel.metrics.exporter=otlp \ + -Dotel.logs.exporter=otlp \ + -Dotel.instrumentation.kafka.producer-propagation.enabled=true \ + -Dotel.instrumentation.kafka.experimental-span-attributes=true \ + -Dotel.instrumentation.kafka.metric-reporter.enabled=true \ + -jar /path/to/your/consumer.jar +``` + + - update it to the path where you downloaded the Java JAR agent in previous step + - Jar file of your application + +  + +**Note:** +- In case you're dockerising your application, make sure to dockerise it along with OpenTelemetry instrumentation done in previous step. + +  + +If you encounter any difficulties, please consult the [troubleshooting section](https://signoz.io/docs/instrumentation/springboot/#troubleshooting-your-installation) for assistance. \ No newline at end of file diff --git a/frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producer.md b/frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producers.md similarity index 82% rename from frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producer.md rename to frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producers.md index 2c463dbf07..01a11a9ad7 100644 --- a/frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producer.md +++ b/frontend/src/container/OnboardingContainer/Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producers.md @@ -1,9 +1,9 @@   -Once you are done intrumenting your Java application, you can run it using the below commands +Once you are done instrumenting your Java application, you can run it using the below commands **Note:** -- Ensure you have Java and Maven installed. Compile your Java producer applications: Ensure your producer and consumer apps are compiled and ready to run. +- Ensure you have Java and Maven installed. Compile your Java producer applications: Ensure your producer apps are compiled and ready to run. **Run Producer App with Java Agent:** diff --git a/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx b/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx index 97238b6553..0fc81c0533 100644 --- a/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx +++ b/frontend/src/container/OnboardingContainer/Steps/ConnectionStatus/ConnectionStatus.tsx @@ -14,6 +14,7 @@ import { useQueryService } from 'hooks/useQueryService'; import useResourceAttribute from 'hooks/useResourceAttribute'; import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils'; import useUrlQuery from 'hooks/useUrlQuery'; +import MessagingQueueHealthCheck from 'pages/MessagingQueues/MessagingQueueHealthCheck/MessagingQueueHealthCheck'; import { getAttributeDataFromOnboardingStatus } from 'pages/MessagingQueues/MessagingQueuesUtils'; import { useEffect, useMemo, useState } from 'react'; import { useDispatch, useSelector } from 'react-redux'; @@ -33,6 +34,9 @@ export default function ConnectionStatus(): JSX.Element { const urlQuery = useUrlQuery(); const getStartedSource = urlQuery.get(QueryParams.getStartedSource); + const getStartedSourceService = urlQuery.get( + QueryParams.getStartedSourceService, + ); const { serviceName, @@ -74,10 +78,14 @@ export default function ConnectionStatus(): JSX.Element { data: onbData, error: onbErr, isFetching: onbFetching, - } = useOnboardingStatus({ - enabled: getStartedSource === 'kafka', - refetchInterval: pollInterval, - }); + } = useOnboardingStatus( + { + enabled: getStartedSource === 'kafka', + refetchInterval: pollInterval, + }, + getStartedSourceService || '', + 'query-key-onboarding-status', + ); const [ shouldRetryOnboardingCall, @@ -326,18 +334,30 @@ export default function ConnectionStatus(): JSX.Element {
{isQueryServiceLoading && } - {!isQueryServiceLoading && isReceivingData && ( - <> - - Success - - )} - {!isQueryServiceLoading && !isReceivingData && ( - <> - - Failed - - )} + {!isQueryServiceLoading && + isReceivingData && + (getStartedSource !== 'kafka' ? ( + <> + + Success + + ) : ( + + ))} + {!isQueryServiceLoading && + !isReceivingData && + (getStartedSource !== 'kafka' ? ( + <> + + Failed + + ) : ( + + ))}
diff --git a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx index f31fad1aa2..0936c4754d 100644 --- a/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx +++ b/frontend/src/container/OnboardingContainer/Steps/DataSource/DataSource.tsx @@ -9,7 +9,10 @@ import cx from 'classnames'; import { QueryParams } from 'constants/query'; import ROUTES from 'constants/routes'; import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext'; -import { useCases } from 'container/OnboardingContainer/OnboardingContainer'; +import { + ModulesMap, + useCases, +} from 'container/OnboardingContainer/OnboardingContainer'; import { getDataSources, getSupportedFrameworks, @@ -49,6 +52,9 @@ export default function DataSource(): JSX.Element { updateSelectedFramework, } = useOnboardingContext(); + const isKafkaAPM = + getStartedSource === 'kafka' && selectedModule?.id === ModulesMap.APM; + const [supportedDataSources, setSupportedDataSources] = useState< DataSourceType[] >([]); @@ -155,14 +161,14 @@ export default function DataSource(): JSX.Element { className={cx( 'supported-language', selectedDataSource?.name === dataSource.name ? 'selected' : '', - getStartedSource === 'kafka' && + isKafkaAPM && !messagingQueueKakfaSupportedDataSources.includes(dataSource?.id || '') ? 'disabled' : '', )} key={dataSource.name} onClick={(): void => { - if (getStartedSource !== 'kafka') { + if (!isKafkaAPM) { updateSelectedFramework(null); updateSelectedEnvironment(null); updateSelectedDataSource(dataSource); diff --git a/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts b/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts index df5e296722..b91d13d3bb 100644 --- a/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts +++ b/frontend/src/container/OnboardingContainer/constants/apmDocFilePaths.ts @@ -252,7 +252,8 @@ import APM_java_springBoot_docker_recommendedSteps_runApplication from '../Modul import APM_java_springBoot_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-installOtelCollector.md'; import APM_java_springBoot_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-instrumentApplication.md'; import APM_java_springBoot_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication.md'; -import APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producer from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producer.md'; +import APM_java_springBoot_kubernetes_recommendedSteps_runApplication_consumers from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-consumers.md'; +import APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producers from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producers.md'; // SpringBoot-LinuxAMD64-quickstart import APM_java_springBoot_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Java/md-docs/SpringBoot/LinuxAMD64/QuickStart/springBoot-linuxamd64-quickStart-instrumentApplication.md'; import APM_java_springBoot_linuxAMD64_quickStart_runApplication from '../Modules/APM/Java/md-docs/SpringBoot/LinuxAMD64/QuickStart/springBoot-linuxamd64-quickStart-runApplication.md'; @@ -1054,7 +1055,8 @@ export const ApmDocFilePaths = { APM_java_springBoot_kubernetes_recommendedSteps_setupOtelCollector, APM_java_springBoot_kubernetes_recommendedSteps_instrumentApplication, APM_java_springBoot_kubernetes_recommendedSteps_runApplication, - APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producer, + APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producers, + APM_java_springBoot_kubernetes_recommendedSteps_runApplication_consumers, // SpringBoot-LinuxAMD64-recommended APM_java_springBoot_linuxAMD64_recommendedSteps_setupOtelCollector, diff --git a/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx b/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx index ee7606ff3f..1c061803be 100644 --- a/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx +++ b/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx @@ -82,7 +82,7 @@ export function AboutSigNozQuestions({ otherInterestInSignoz, }); - logEvent('User Onboarding: About SigNoz Questions Answered', { + logEvent('Org Onboarding: Answered', { hearAboutSignoz, otherAboutSignoz, interestInSignoz, diff --git a/frontend/src/container/OnboardingQuestionaire/InviteTeamMembers/InviteTeamMembers.tsx b/frontend/src/container/OnboardingQuestionaire/InviteTeamMembers/InviteTeamMembers.tsx index fef689de3a..def1cf979d 100644 --- a/frontend/src/container/OnboardingQuestionaire/InviteTeamMembers/InviteTeamMembers.tsx +++ b/frontend/src/container/OnboardingQuestionaire/InviteTeamMembers/InviteTeamMembers.tsx @@ -161,6 +161,13 @@ function InviteTeamMembers({ setInviteUsersSuccessResponse(successfulInvites); + logEvent('Org Onboarding: Invite Team Members Success', { + teamMembers: teamMembersToInvite, + totalInvites: inviteUsersResponse.summary.total_invites, + successfulInvites: inviteUsersResponse.summary.successful_invites, + failedInvites: inviteUsersResponse.summary.failed_invites, + }); + setTimeout(() => { setDisableNextButton(false); onNext(); @@ -172,6 +179,13 @@ function InviteTeamMembers({ setInviteUsersSuccessResponse(successfulInvites); + logEvent('Org Onboarding: Invite Team Members Partial Success', { + teamMembers: teamMembersToInvite, + totalInvites: inviteUsersResponse.summary.total_invites, + successfulInvites: inviteUsersResponse.summary.successful_invites, + failedInvites: inviteUsersResponse.summary.failed_invites, + }); + if (inviteUsersResponse.failed_invites.length > 0) { setHasErrors(true); @@ -182,27 +196,21 @@ function InviteTeamMembers({ } }; - const { - mutate: sendInvites, - isLoading: isSendingInvites, - data: inviteUsersApiResponseData, - } = useMutation(inviteUsers, { - onSuccess: (response: SuccessResponse): void => { - logEvent('User Onboarding: Invite Team Members Sent', { - teamMembers: teamMembersToInvite, - }); - - handleInviteUsersSuccess(response); + const { mutate: sendInvites, isLoading: isSendingInvites } = useMutation( + inviteUsers, + { + onSuccess: (response: SuccessResponse): void => { + handleInviteUsersSuccess(response); + }, + onError: (error: AxiosError): void => { + logEvent('Org Onboarding: Invite Team Members Failed', { + teamMembers: teamMembersToInvite, + }); + + handleError(error); + }, }, - onError: (error: AxiosError): void => { - logEvent('User Onboarding: Invite Team Members Failed', { - teamMembers: teamMembersToInvite, - error, - }); - - handleError(error); - }, - }); + ); const handleNext = (): void => { if (validateAllUsers()) { @@ -254,9 +262,8 @@ function InviteTeamMembers({ }; const handleDoLater = (): void => { - logEvent('User Onboarding: Invite Team Members Skipped', { - teamMembers: teamMembersToInvite, - apiResponse: inviteUsersApiResponseData, + logEvent('Org Onboarding: Clicked Do Later', { + currentPageID: 4, }); onNext(); diff --git a/frontend/src/container/OnboardingQuestionaire/OptimiseSignozNeeds/OptimiseSignozNeeds.tsx b/frontend/src/container/OnboardingQuestionaire/OptimiseSignozNeeds/OptimiseSignozNeeds.tsx index f1be6fb8ee..dc499c9308 100644 --- a/frontend/src/container/OnboardingQuestionaire/OptimiseSignozNeeds/OptimiseSignozNeeds.tsx +++ b/frontend/src/container/OnboardingQuestionaire/OptimiseSignozNeeds/OptimiseSignozNeeds.tsx @@ -122,7 +122,7 @@ function OptimiseSignozNeeds({ }, [services, hostsPerDay, logsPerDay]); const handleOnNext = (): void => { - logEvent('User Onboarding: Optimise SigNoz Needs Answered', { + logEvent('Org Onboarding: Answered', { logsPerDay, hostsPerDay, services, @@ -144,10 +144,8 @@ function OptimiseSignozNeeds({ onWillDoLater(); - logEvent('User Onboarding: Optimise SigNoz Needs Skipped', { - logsPerDay: 0, - hostsPerDay: 0, - services: 0, + logEvent('Org Onboarding: Clicked Do Later', { + currentPageID: 3, }); }; diff --git a/frontend/src/container/OnboardingQuestionaire/OrgQuestions/OrgQuestions.tsx b/frontend/src/container/OnboardingQuestionaire/OrgQuestions/OrgQuestions.tsx index e0376a6559..7569e0fa81 100644 --- a/frontend/src/container/OnboardingQuestionaire/OrgQuestions/OrgQuestions.tsx +++ b/frontend/src/container/OnboardingQuestionaire/OrgQuestions/OrgQuestions.tsx @@ -94,6 +94,13 @@ function OrgQuestions({ organisationName === '' || orgDetails.organisationName === organisationName ) { + logEvent('Org Onboarding: Answered', { + usesObservability, + observabilityTool, + otherTool, + familiarity, + }); + onNext({ organisationName, usesObservability, @@ -121,10 +128,17 @@ function OrgQuestions({ }, }); - logEvent('User Onboarding: Org Name Updated', { + logEvent('Org Onboarding: Org Name Updated', { organisationName: orgDetails.organisationName, }); + logEvent('Org Onboarding: Answered', { + usesObservability, + observabilityTool, + otherTool, + familiarity, + }); + onNext({ organisationName, usesObservability, @@ -133,7 +147,7 @@ function OrgQuestions({ familiarity, }); } else { - logEvent('User Onboarding: Org Name Update Failed', { + logEvent('Org Onboarding: Org Name Update Failed', { organisationName: orgDetails.organisationName, }); diff --git a/frontend/src/container/OnboardingQuestionaire/index.tsx b/frontend/src/container/OnboardingQuestionaire/index.tsx index 3b3ed59354..390ac00212 100644 --- a/frontend/src/container/OnboardingQuestionaire/index.tsx +++ b/frontend/src/container/OnboardingQuestionaire/index.tsx @@ -1,6 +1,7 @@ import './OnboardingQuestionaire.styles.scss'; import { NotificationInstance } from 'antd/es/notification/interface'; +import logEvent from 'api/common/logEvent'; import updateProfileAPI from 'api/onboarding/updateProfile'; import getAllOrgPreferences from 'api/preferences/getAllOrgPreferences'; import updateOrgPreferenceAPI from 'api/preferences/updateOrgPreference'; @@ -61,6 +62,10 @@ const INITIAL_OPTIMISE_SIGNOZ_DETAILS: OptimiseSignozDetails = { services: 0, }; +const BACK_BUTTON_EVENT_NAME = 'Org Onboarding: Back Button Clicked'; +const NEXT_BUTTON_EVENT_NAME = 'Org Onboarding: Next Button Clicked'; +const ONBOARDING_COMPLETE_EVENT_NAME = 'Org Onboarding: Complete'; + function OnboardingQuestionaire(): JSX.Element { const { notifications } = useNotifications(); const { org } = useSelector((state) => state.app); @@ -98,6 +103,13 @@ function OnboardingQuestionaire(): JSX.Element { // eslint-disable-next-line react-hooks/exhaustive-deps }, [org]); + useEffect(() => { + logEvent('Org Onboarding: Started', { + org_id: org?.[0]?.id, + }); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + const { refetch: refetchOrgPreferences } = useQuery({ queryFn: () => getAllOrgPreferences(), queryKey: ['getOrgPreferences'], @@ -120,6 +132,8 @@ function OnboardingQuestionaire(): JSX.Element { setUpdatingOrgOnboardingStatus(false); + logEvent('Org Onboarding: Redirecting to Get Started', {}); + history.push(ROUTES.GET_STARTED); }, onError: () => { @@ -156,6 +170,11 @@ function OnboardingQuestionaire(): JSX.Element { }); const handleUpdateProfile = (): void => { + logEvent(NEXT_BUTTON_EVENT_NAME, { + currentPageID: 3, + nextPageID: 4, + }); + updateProfile({ familiarity_with_observability: orgDetails?.familiarity as string, has_existing_observability_tool: orgDetails?.usesObservability as boolean, @@ -180,6 +199,10 @@ function OnboardingQuestionaire(): JSX.Element { }; const handleOnboardingComplete = (): void => { + logEvent(ONBOARDING_COMPLETE_EVENT_NAME, { + currentPageID: 4, + }); + setUpdatingOrgOnboardingStatus(true); updateOrgPreference({ preferenceID: 'ORG_ONBOARDING', @@ -199,6 +222,11 @@ function OnboardingQuestionaire(): JSX.Element { currentOrgData={currentOrgData} orgDetails={orgDetails} onNext={(orgDetails: OrgDetails): void => { + logEvent(NEXT_BUTTON_EVENT_NAME, { + currentPageID: 1, + nextPageID: 2, + }); + setOrgDetails(orgDetails); setCurrentStep(2); }} @@ -209,8 +237,20 @@ function OnboardingQuestionaire(): JSX.Element { setCurrentStep(1)} - onNext={(): void => setCurrentStep(3)} + onBack={(): void => { + logEvent(BACK_BUTTON_EVENT_NAME, { + currentPageID: 2, + prevPageID: 1, + }); + setCurrentStep(1); + }} + onNext={(): void => { + logEvent(NEXT_BUTTON_EVENT_NAME, { + currentPageID: 2, + nextPageID: 3, + }); + setCurrentStep(3); + }} /> )} @@ -220,9 +260,15 @@ function OnboardingQuestionaire(): JSX.Element { isUpdatingProfile={isUpdatingProfile} optimiseSignozDetails={optimiseSignozDetails} setOptimiseSignozDetails={setOptimiseSignozDetails} - onBack={(): void => setCurrentStep(2)} + onBack={(): void => { + logEvent(BACK_BUTTON_EVENT_NAME, { + currentPageID: 3, + prevPageID: 2, + }); + setCurrentStep(2); + }} onNext={handleUpdateProfile} - onWillDoLater={(): void => setCurrentStep(4)} // This is temporary, only to skip gateway api call as it's not setup on staging yet + onWillDoLater={(): void => setCurrentStep(4)} /> )} @@ -231,7 +277,13 @@ function OnboardingQuestionaire(): JSX.Element { isLoading={updatingOrgOnboardingStatus} teamMembers={teamMembers} setTeamMembers={setTeamMembers} - onBack={(): void => setCurrentStep(3)} + onBack={(): void => { + logEvent(BACK_BUTTON_EVENT_NAME, { + currentPageID: 4, + prevPageID: 3, + }); + setCurrentStep(3); + }} onNext={handleOnboardingComplete} /> )} diff --git a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx index 36c54aa9a1..60905fa33b 100644 --- a/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx +++ b/frontend/src/container/QueryBuilder/filters/AggregatorFilter/AggregatorFilter.tsx @@ -81,8 +81,10 @@ export const AggregatorFilter = memo(function AggregatorFilter({ prefix: item.type || '', condition: !item.isColumn, }), + !item.isColumn && item.type ? item.type : '', )} dataType={item.dataType} + type={item.type || ''} /> ), value: `${item.key}${selectValueDivider}${createIdFromObjectFields( @@ -187,6 +189,9 @@ export const AggregatorFilter = memo(function AggregatorFilter({ prefix: query.aggregateAttribute.type || '', condition: !query.aggregateAttribute.isColumn, }), + !query.aggregateAttribute.isColumn && query.aggregateAttribute.type + ? query.aggregateAttribute.type + : '', ); return ( diff --git a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx index bed3870570..476bf71f21 100644 --- a/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx +++ b/frontend/src/container/QueryBuilder/filters/GroupByFilter/GroupByFilter.tsx @@ -75,8 +75,10 @@ export const GroupByFilter = memo(function GroupByFilter({ prefix: item.type || '', condition: !item.isColumn, }), + !item.isColumn && item.type ? item.type : '', )} dataType={item.dataType || ''} + type={item.type || ''} /> ), value: `${item.id}`, @@ -166,6 +168,7 @@ export const GroupByFilter = memo(function GroupByFilter({ prefix: item.type || '', condition: !item.isColumn, }), + !item.isColumn && item.type ? item.type : '', )}`, value: `${item.id}`, }), diff --git a/frontend/src/container/QueryBuilder/filters/GroupByFilter/utils.ts b/frontend/src/container/QueryBuilder/filters/GroupByFilter/utils.ts index 50dccec4d9..0fb85a7e30 100644 --- a/frontend/src/container/QueryBuilder/filters/GroupByFilter/utils.ts +++ b/frontend/src/container/QueryBuilder/filters/GroupByFilter/utils.ts @@ -1,8 +1,9 @@ import { MetricsType } from 'container/MetricsApplication/constant'; -export function removePrefix(str: string): string { +export function removePrefix(str: string, type: string): string { const tagPrefix = `${MetricsType.Tag}_`; const resourcePrefix = `${MetricsType.Resource}_`; + const scopePrefix = `${MetricsType.Scope}_`; if (str.startsWith(tagPrefix)) { return str.slice(tagPrefix.length); @@ -10,5 +11,9 @@ export function removePrefix(str: string): string { if (str.startsWith(resourcePrefix)) { return str.slice(resourcePrefix.length); } + if (str.startsWith(scopePrefix) && type === MetricsType.Scope) { + return str.slice(scopePrefix.length); + } + return str; } diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx index a7dcef96c3..a93041f5e8 100644 --- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx +++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer.tsx @@ -3,25 +3,23 @@ import './QueryBuilderSearch.styles.scss'; import { Tooltip } from 'antd'; import { TagContainer, TagLabel, TagValue } from './style'; -import { getOptionType } from './utils'; function OptionRenderer({ label, value, dataType, + type, }: OptionRendererProps): JSX.Element { - const optionType = getOptionType(label); - return ( - {optionType ? ( + {type ? (
{value}
Type: - {optionType} + {type} Data type: @@ -43,6 +41,7 @@ interface OptionRendererProps { label: string; value: string; dataType: string; + type: string; } export default OptionRenderer; diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx index c1f4b85a11..ba30d96d9c 100644 --- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx +++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearch/index.tsx @@ -410,6 +410,7 @@ function QueryBuilderSearch({ label={option.label} value={option.value} dataType={option.dataType || ''} + type={option.type || ''} /> {option.selected && } diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2.styles.scss b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2.styles.scss index 7aee4f9414..60eec0bdb6 100644 --- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2.styles.scss +++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2.styles.scss @@ -260,6 +260,20 @@ background: rgba(189, 153, 121, 0.1); } } + + &.scope { + border: 1px solid rgba(113, 144, 249, 0.2); + + .ant-typography { + color: var(--bg-robin-400); + background: rgba(113, 144, 249, 0.1); + font-size: 14px; + } + + .ant-tag-close-icon { + background: rgba(113, 144, 249, 0.1); + } + } } } } diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/Suggestions.styles.scss b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/Suggestions.styles.scss index 1b434316e5..bff02fab3e 100644 --- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/Suggestions.styles.scss +++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/Suggestions.styles.scss @@ -94,6 +94,25 @@ letter-spacing: -0.06px; } } + + &.scope { + border-radius: 50px; + background: rgba(113, 144, 249, 0.1) !important; + color: var(--bg-robin-400) !important; + + .dot { + background-color: var(--bg-robin-400); + } + .text { + color: var(--bg-robin-400); + font-family: Inter; + font-size: 12px; + font-style: normal; + font-weight: 400; + line-height: 18px; /* 150% */ + letter-spacing: -0.06px; + } + } } } .option-meta-data-container { diff --git a/frontend/src/container/QueryBuilder/type.ts b/frontend/src/container/QueryBuilder/type.ts index 183dd157f8..d20925e330 100644 --- a/frontend/src/container/QueryBuilder/type.ts +++ b/frontend/src/container/QueryBuilder/type.ts @@ -16,4 +16,5 @@ export type Option = { selected?: boolean; dataType?: string; isIndexed?: boolean; + type?: string; }; diff --git a/frontend/src/hooks/messagingQueue / onboarding/useOnboardingStatus.tsx b/frontend/src/hooks/messagingQueue / onboarding/useOnboardingStatus.tsx index 897b0d7e33..13ecd15b8b 100644 --- a/frontend/src/hooks/messagingQueue / onboarding/useOnboardingStatus.tsx +++ b/frontend/src/hooks/messagingQueue / onboarding/useOnboardingStatus.tsx @@ -8,15 +8,22 @@ type UseOnboardingStatus = ( options?: UseQueryOptions< SuccessResponse | ErrorResponse >, + endpointService?: string, + queryKey?: string, ) => UseQueryResult | ErrorResponse>; -export const useOnboardingStatus: UseOnboardingStatus = (options) => +export const useOnboardingStatus: UseOnboardingStatus = ( + options, + endpointService, + queryKey, +) => useQuery | ErrorResponse>({ - queryKey: ['onboardingStatus'], + queryKey: [queryKey || `onboardingStatus-${endpointService}`], queryFn: () => getOnboardingStatus({ start: (Date.now() - 15 * 60 * 1000) * 1_000_000, end: Date.now() * 1_000_000, + endpointService, }), ...options, }); diff --git a/frontend/src/hooks/queryBuilder/useOptions.ts b/frontend/src/hooks/queryBuilder/useOptions.ts index 2f24dd0d21..e990f789de 100644 --- a/frontend/src/hooks/queryBuilder/useOptions.ts +++ b/frontend/src/hooks/queryBuilder/useOptions.ts @@ -46,6 +46,7 @@ export const useOptions = ( value: item.key, dataType: item.dataType, isIndexed: item?.isIndexed, + type: item?.type || '', })), [getLabel], ); diff --git a/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/ActionButtons.tsx b/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/ActionButtons.tsx index 2f37c4fc9d..00987a0a66 100644 --- a/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/ActionButtons.tsx +++ b/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/ActionButtons.tsx @@ -2,82 +2,90 @@ import './ActionButtons.styles.scss'; import { Color } from '@signozhq/design-tokens'; import { Divider, Dropdown, MenuProps, Switch, Tooltip } from 'antd'; -import { QueryParams } from 'constants/query'; -import ROUTES from 'constants/routes'; import { useIsDarkMode } from 'hooks/useDarkMode'; -import useUrlQuery from 'hooks/useUrlQuery'; -import history from 'lib/history'; import { Copy, Ellipsis, PenLine, Trash2 } from 'lucide-react'; import { useAlertRuleDelete, useAlertRuleDuplicate, useAlertRuleStatusToggle, + useAlertRuleUpdate, } from 'pages/AlertDetails/hooks'; import CopyToClipboard from 'periscope/components/CopyToClipboard'; import { useAlertRule } from 'providers/Alert'; -import React, { useEffect, useState } from 'react'; +import { useCallback, useEffect, useState } from 'react'; import { CSSProperties } from 'styled-components'; import { AlertDef } from 'types/api/alerts/def'; import { AlertHeaderProps } from '../AlertHeader'; +import RenameModal from './RenameModal'; const menuItemStyle: CSSProperties = { fontSize: '14px', letterSpacing: '0.14px', }; + function AlertActionButtons({ ruleId, alertDetails, + setUpdatedName, }: { ruleId: string; alertDetails: AlertHeaderProps['alertDetails']; + setUpdatedName: (name: string) => void; }): JSX.Element { const { alertRuleState, setAlertRuleState } = useAlertRule(); - const { handleAlertStateToggle } = useAlertRuleStatusToggle({ ruleId }); + const [intermediateName, setIntermediateName] = useState( + alertDetails.alert, + ); + const [isRenameAlertOpen, setIsRenameAlertOpen] = useState(false); + const isDarkMode = useIsDarkMode(); + const { handleAlertStateToggle } = useAlertRuleStatusToggle({ ruleId }); const { handleAlertDuplicate } = useAlertRuleDuplicate({ alertDetails: (alertDetails as unknown) as AlertDef, }); const { handleAlertDelete } = useAlertRuleDelete({ ruleId: Number(ruleId) }); + const { handleAlertUpdate, isLoading } = useAlertRuleUpdate({ + alertDetails: (alertDetails as unknown) as AlertDef, + setUpdatedName, + intermediateName, + }); - const params = useUrlQuery(); - - const handleRename = React.useCallback(() => { - params.set(QueryParams.ruleId, String(ruleId)); - history.push(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`); - }, [params, ruleId]); - - const menu: MenuProps['items'] = React.useMemo( - () => [ - { - key: 'rename-rule', - label: 'Rename', - icon: , - onClick: (): void => handleRename(), - style: menuItemStyle, - }, - { - key: 'duplicate-rule', - label: 'Duplicate', - icon: , - onClick: (): void => handleAlertDuplicate(), - style: menuItemStyle, - }, - { type: 'divider' }, - { - key: 'delete-rule', - label: 'Delete', - icon: , - onClick: (): void => handleAlertDelete(), - style: { - ...menuItemStyle, - color: Color.BG_CHERRY_400, - }, + const handleRename = useCallback(() => { + setIsRenameAlertOpen(true); + }, []); + + const onNameChangeHandler = useCallback(() => { + handleAlertUpdate(); + setIsRenameAlertOpen(false); + }, [handleAlertUpdate]); + + const menuItems: MenuProps['items'] = [ + { + key: 'rename-rule', + label: 'Rename', + icon: , + onClick: handleRename, + style: menuItemStyle, + }, + { + key: 'duplicate-rule', + label: 'Duplicate', + icon: , + onClick: handleAlertDuplicate, + style: menuItemStyle, + }, + { + key: 'delete-rule', + label: 'Delete', + icon: , + onClick: handleAlertDelete, + style: { + ...menuItemStyle, + color: Color.BG_CHERRY_400, }, - ], - [handleAlertDelete, handleAlertDuplicate, handleRename], - ); - const isDarkMode = useIsDarkMode(); + }, + ]; // state for immediate UI feedback rather than waiting for onSuccess of handleAlertStateTiggle to updating the alertRuleState const [isAlertRuleDisabled, setIsAlertRuleDisabled] = useState< @@ -95,35 +103,48 @@ function AlertActionButtons({ // eslint-disable-next-line react-hooks/exhaustive-deps useEffect(() => (): void => setAlertRuleState(undefined), []); + const toggleAlertRule = useCallback(() => { + setIsAlertRuleDisabled((prev) => !prev); + handleAlertStateToggle(); + }, [handleAlertStateToggle]); + return ( -
- - {isAlertRuleDisabled !== undefined && ( - { - setIsAlertRuleDisabled((prev) => !prev); - handleAlertStateToggle(); - }} - checked={!isAlertRuleDisabled} - /> - )} - - - - - - - - + <> +
+ + {isAlertRuleDisabled !== undefined && ( + + )} - -
+ + + + + + + + + +
+ + + ); } diff --git a/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/RenameModal.styles.scss b/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/RenameModal.styles.scss new file mode 100644 index 0000000000..d3552d8143 --- /dev/null +++ b/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/RenameModal.styles.scss @@ -0,0 +1,138 @@ +.rename-alert { + .ant-modal-content { + width: 384px; + flex-shrink: 0; + border-radius: 4px; + border: 1px solid var(--bg-slate-500); + background: var(--bg-ink-400); + box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2); + padding: 0px; + + .ant-modal-header { + height: 52px; + padding: 16px; + background: var(--bg-ink-400); + border-bottom: 1px solid var(--bg-slate-500); + margin-bottom: 0px; + .ant-modal-title { + color: var(--bg-vanilla-100); + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 20px; /* 142.857% */ + width: 349px; + height: 20px; + } + } + + .ant-modal-body { + padding: 16px; + + .alert-content { + display: flex; + flex-direction: column; + gap: 8px; + + .name-text { + color: var(--bg-vanilla-100); + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 500; + line-height: 20px; /* 142.857% */ + } + + .alert-name-input { + display: flex; + padding: 6px 6px 6px 8px; + align-items: center; + gap: 4px; + align-self: stretch; + border-radius: 0px 2px 2px 0px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + } + } + } + + .ant-modal-footer { + padding: 16px; + margin-top: 0px; + .alert-rename { + display: flex; + flex-direction: row-reverse; + gap: 12px; + + .cancel-btn { + display: flex; + padding: 4px 8px; + justify-content: center; + align-items: center; + gap: 4px; + border-radius: 2px; + background: var(--bg-slate-500); + + .ant-btn-icon { + margin-inline-end: 0px; + } + } + + .rename-btn { + display: flex; + align-items: center; + display: flex; + padding: 4px 8px; + justify-content: center; + align-items: center; + gap: 4px; + border-radius: 2px; + background: var(--bg-robin-500); + + .ant-btn-icon { + margin-inline-end: 0px; + } + } + } + } + } +} + +.lightMode { + .rename-alert { + .ant-modal-content { + border: 1px solid var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + + .ant-modal-header { + background: var(--bg-vanilla-100); + border-bottom: 1px solid var(--bg-vanilla-300); + + .ant-modal-title { + color: var(--bg-ink-300); + } + } + + .ant-modal-body { + .alert-content { + .name-text { + color: var(--bg-ink-300); + } + + .alert-name-input { + border: 1px solid var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + } + } + } + + .ant-modal-footer { + .alert-rename { + .cancel-btn { + background: var(--bg-vanilla-300); + } + } + } + } + } +} diff --git a/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/RenameModal.tsx b/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/RenameModal.tsx new file mode 100644 index 0000000000..ce73260fb3 --- /dev/null +++ b/frontend/src/pages/AlertDetails/AlertHeader/ActionButtons/RenameModal.tsx @@ -0,0 +1,95 @@ +import './RenameModal.styles.scss'; + +import { Button, Input, InputRef, Modal, Typography } from 'antd'; +import { Check, X } from 'lucide-react'; +import { useCallback, useEffect, useRef } from 'react'; + +type Props = { + isOpen: boolean; + setIsOpen: (isOpen: boolean) => void; + onNameChangeHandler: () => void; + isLoading: boolean; + intermediateName: string; + setIntermediateName: (name: string) => void; +}; + +function RenameModal({ + isOpen, + setIsOpen, + onNameChangeHandler, + isLoading, + intermediateName, + setIntermediateName, +}: Props): JSX.Element { + const inputRef = useRef(null); + + useEffect(() => { + if (isOpen && inputRef.current) { + inputRef.current.focus(); + } + }, [isOpen]); + + const handleClose = useCallback((): void => setIsOpen(false), [setIsOpen]); + + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent): void => { + if (isOpen) { + if (e.key === 'Enter') { + onNameChangeHandler(); + } else if (e.key === 'Escape') { + handleClose(); + } + } + }; + + document.addEventListener('keydown', handleKeyDown); + + return (): void => { + document.removeEventListener('keydown', handleKeyDown); + }; + }, [isOpen, onNameChangeHandler, handleClose]); + + return ( + + + +
+ } + > +
+ Enter a new name + setIntermediateName(e.target.value)} + /> +
+ + ); +} + +export default RenameModal; diff --git a/frontend/src/pages/AlertDetails/AlertHeader/AlertHeader.tsx b/frontend/src/pages/AlertDetails/AlertHeader/AlertHeader.tsx index 04edd6a8b0..f617a6d78e 100644 --- a/frontend/src/pages/AlertDetails/AlertHeader/AlertHeader.tsx +++ b/frontend/src/pages/AlertDetails/AlertHeader/AlertHeader.tsx @@ -2,7 +2,7 @@ import './AlertHeader.styles.scss'; import LineClampedText from 'periscope/components/LineClampedText/LineClampedText'; import { useAlertRule } from 'providers/Alert'; -import { useMemo } from 'react'; +import { useMemo, useState } from 'react'; import AlertActionButtons from './ActionButtons/ActionButtons'; import AlertLabels from './AlertLabels/AlertLabels'; @@ -19,7 +19,9 @@ export type AlertHeaderProps = { }; }; function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element { - const { state, alert, labels } = alertDetails; + const { state, alert: alertName, labels } = alertDetails; + const { alertRuleState } = useAlertRule(); + const [updatedName, setUpdatedName] = useState(alertName); const labelsWithoutSeverity = useMemo( () => @@ -29,8 +31,6 @@ function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element { [labels], ); - const { alertRuleState } = useAlertRule(); - return (
@@ -38,7 +38,7 @@ function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element {
- +
@@ -54,7 +54,11 @@ function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element {
- +
); diff --git a/frontend/src/pages/AlertDetails/hooks.tsx b/frontend/src/pages/AlertDetails/hooks.tsx index 8a630a6374..c159d2169b 100644 --- a/frontend/src/pages/AlertDetails/hooks.tsx +++ b/frontend/src/pages/AlertDetails/hooks.tsx @@ -57,8 +57,11 @@ export const useAlertHistoryQueryParams = (): { const startTime = params.get(QueryParams.startTime); const endTime = params.get(QueryParams.endTime); + const relativeTimeParam = params.get(QueryParams.relativeTime); + const relativeTime = - params.get(QueryParams.relativeTime) ?? RelativeTimeMap['6hr']; + (relativeTimeParam === 'null' ? null : relativeTimeParam) ?? + RelativeTimeMap['6hr']; const intStartTime = parseInt(startTime || '0', 10); const intEndTime = parseInt(endTime || '0', 10); @@ -464,6 +467,44 @@ export const useAlertRuleDuplicate = ({ return { handleAlertDuplicate }; }; +export const useAlertRuleUpdate = ({ + alertDetails, + setUpdatedName, + intermediateName, +}: { + alertDetails: AlertDef; + setUpdatedName: (name: string) => void; + intermediateName: string; +}): { + handleAlertUpdate: () => void; + isLoading: boolean; +} => { + const { notifications } = useNotifications(); + const handleError = useAxiosError(); + + const { mutate: updateAlertRule, isLoading } = useMutation( + [REACT_QUERY_KEY.UPDATE_ALERT_RULE, alertDetails.id], + save, + { + onMutate: () => setUpdatedName(intermediateName), + onSuccess: () => + notifications.success({ message: 'Alert renamed successfully' }), + onError: (error) => { + setUpdatedName(alertDetails.alert); + handleError(error); + }, + }, + ); + + const handleAlertUpdate = (): void => { + updateAlertRule({ + data: { ...alertDetails, alert: intermediateName }, + id: alertDetails.id, + }); + }; + + return { handleAlertUpdate, isLoading }; +}; export const useAlertRuleDelete = ({ ruleId, diff --git a/frontend/src/pages/MessagingQueues/MQDetailPage/MQDetailPage.tsx b/frontend/src/pages/MessagingQueues/MQDetailPage/MQDetailPage.tsx index 931502b8e1..5793d40b7b 100644 --- a/frontend/src/pages/MessagingQueues/MQDetailPage/MQDetailPage.tsx +++ b/frontend/src/pages/MessagingQueues/MQDetailPage/MQDetailPage.tsx @@ -1,9 +1,12 @@ +/* eslint-disable no-nested-ternary */ import '../MessagingQueues.styles.scss'; import { Select, Typography } from 'antd'; import logEvent from 'api/common/logEvent'; +import { QueryParams } from 'constants/query'; import ROUTES from 'constants/routes'; import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2'; +import useUrlQuery from 'hooks/useUrlQuery'; import { ListMinus } from 'lucide-react'; import { useEffect, useState } from 'react'; import { useHistory } from 'react-router-dom'; @@ -13,8 +16,9 @@ import { MessagingQueuesViewTypeOptions, ProducerLatencyOptions, } from '../MessagingQueuesUtils'; -import { SelectLabelWithComingSoon } from '../MQCommon/MQCommon'; +import DropRateView from '../MQDetails/DropRateView/DropRateView'; import MessagingQueueOverview from '../MQDetails/MessagingQueueOverview'; +import MetricPage from '../MQDetails/MetricPage/MetricPage'; import MessagingQueuesDetails from '../MQDetails/MQDetails'; import MessagingQueuesConfigOptions from '../MQGraph/MQConfigOptions'; import MessagingQueuesGraph from '../MQGraph/MQGraph'; @@ -33,10 +37,34 @@ function MQDetailPage(): JSX.Element { setproducerLatencyOption, ] = useState(ProducerLatencyOptions.Producers); + const mqServiceView = useUrlQuery().get( + QueryParams.mqServiceView, + ) as MessagingQueuesViewTypeOptions; + useEffect(() => { logEvent('Messaging Queues: Detail page visited', {}); }, []); + useEffect(() => { + if (mqServiceView) { + setSelectedView(mqServiceView); + } + }, [mqServiceView]); + + const updateUrlQuery = (query: Record): void => { + const searchParams = new URLSearchParams(history.location.search); + Object.keys(query).forEach((key) => { + searchParams.set(key, query[key].toString()); + }); + history.push({ + search: searchParams.toString(), + }); + }; + + const showMessagingQueueDetails = + selectedView !== MessagingQueuesViewType.dropRate.value && + selectedView !== MessagingQueuesViewType.metricPage.value; + return (
@@ -55,7 +83,11 @@ function MQDetailPage(): JSX.Element { className="messaging-queue-options" defaultValue={MessagingQueuesViewType.consumerLag.value} popupClassName="messaging-queue-options-popup" - onChange={(value): void => setSelectedView(value)} + onChange={(value): void => { + setSelectedView(value); + updateUrlQuery({ [QueryParams.mqServiceView]: value }); + }} + value={selectedView} options={[ { label: MessagingQueuesViewType.consumerLag.label, @@ -70,37 +102,42 @@ function MQDetailPage(): JSX.Element { value: MessagingQueuesViewType.producerLatency.value, }, { - label: ( - - ), - value: MessagingQueuesViewType.consumerLatency.value, - disabled: true, + label: MessagingQueuesViewType.dropRate.label, + value: MessagingQueuesViewType.dropRate.value, + }, + { + label: MessagingQueuesViewType.metricPage.label, + value: MessagingQueuesViewType.metricPage.value, }, ]} />
-
- - {selectedView === MessagingQueuesViewType.consumerLag.value ? ( + {selectedView === MessagingQueuesViewType.consumerLag.value ? ( +
+ - ) : ( - - )} -
-
- + ) : selectedView === MessagingQueuesViewType.dropRate.value ? ( + + ) : selectedView === MessagingQueuesViewType.metricPage.value ? ( + + ) : ( + -
+ )} + {showMessagingQueueDetails && ( +
+ +
+ )}
); } diff --git a/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/DropRateView.styles.scss b/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/DropRateView.styles.scss new file mode 100644 index 0000000000..39481dbd7c --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/DropRateView.styles.scss @@ -0,0 +1,43 @@ +.evaluation-time-selector { + display: flex; + align-items: center; + gap: 8px; + + .eval-title { + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 500; + line-height: 28px; + color: var(--bg-vanilla-200); + } + + .ant-selector { + background-color: var(--bg-ink-400); + border-radius: 4px; + border: 1px solid var(--bg-slate-400); + box-shadow: none; + } +} + +.select-dropdown-render { + padding: 8px; + display: flex; + justify-content: center; + align-items: center; + width: 200px; + margin: 6px; +} + +.lightMode { + .evaluation-time-selector { + .eval-title { + color: var(--bg-ink-400); + } + + .ant-selector { + background-color: var(--bg-vanilla-200); + border: 1px solid var(--bg-ink-400); + } + } +} diff --git a/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/DropRateView.tsx b/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/DropRateView.tsx new file mode 100644 index 0000000000..d7eb3c2562 --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/DropRateView.tsx @@ -0,0 +1,249 @@ +/* eslint-disable sonarjs/no-duplicate-string */ +import '../MQDetails.style.scss'; + +import { Table, Typography } from 'antd'; +import axios from 'axios'; +import cx from 'classnames'; +import { SOMETHING_WENT_WRONG } from 'constants/api'; +import ROUTES from 'constants/routes'; +import { useNotifications } from 'hooks/useNotifications'; +import { isNumber } from 'lodash-es'; +import { + convertToTitleCase, + MessagingQueuesViewType, + RowData, +} from 'pages/MessagingQueues/MessagingQueuesUtils'; +import { useEffect, useMemo, useState } from 'react'; +import { useMutation } from 'react-query'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import { MessagingQueueServicePayload } from '../MQTables/getConsumerLagDetails'; +import { getKafkaSpanEval } from '../MQTables/getKafkaSpanEval'; +import { + convertToMilliseconds, + DropRateAPIResponse, + DropRateResponse, +} from './dropRateViewUtils'; +import EvaluationTimeSelector from './EvaluationTimeSelector'; + +export function getTableData(data: DropRateResponse[]): RowData[] { + if (data?.length === 0) { + return []; + } + + const tableData: RowData[] = + data?.map( + (row: DropRateResponse, index: number): RowData => ({ + ...(row.data as any), // todo-sagar + key: index, + }), + ) || []; + + return tableData; +} + +// eslint-disable-next-line sonarjs/cognitive-complexity +export function getColumns( + data: DropRateResponse[], + visibleCounts: Record, + handleShowMore: (index: number) => void, +): any[] { + if (data?.length === 0) { + return []; + } + + const columnsOrder = [ + 'producer_service', + 'consumer_service', + 'breach_percentage', + 'top_traceIDs', + 'breached_spans', + 'total_spans', + ]; + + const columns: { + title: string; + dataIndex: string; + key: string; + }[] = columnsOrder.map((column) => ({ + title: convertToTitleCase(column), + dataIndex: column, + key: column, + render: ( + text: string | string[], + _record: any, + index: number, + ): JSX.Element => { + if (Array.isArray(text)) { + const visibleCount = visibleCounts[index] || 4; + const visibleItems = text.slice(0, visibleCount); + const remainingCount = (text || []).length - visibleCount; + + return ( +
+
+ {visibleItems.map((item, idx) => { + const shouldShowMore = remainingCount > 0 && idx === visibleCount - 1; + return ( +
+ { + window.open(`${ROUTES.TRACE}/${item}`, '_blank'); + }} + > + {item} + + {shouldShowMore && ( + handleShowMore(index)} + className="remaing-count" + > + + {remainingCount} more + + )} +
+ ); + })} +
+
+ ); + } + + if (column === 'consumer_service' || column === 'producer_service') { + return ( + { + e.preventDefault(); + e.stopPropagation(); + window.open(`/services/${encodeURIComponent(text)}`, '_blank'); + }} + > + {text} + + ); + } + + if (column === 'breach_percentage' && text) { + if (!isNumber(text)) + return {text.toString()}; + return ( + + {(typeof text === 'string' ? parseFloat(text) : text).toFixed(2)} % + + ); + } + + return {text}; + }, + })); + + return columns; +} + +const showPaginationItem = (total: number, range: number[]): JSX.Element => ( + <> + + {range[0]} — {range[1]} + + of {total} + +); + +function DropRateView(): JSX.Element { + const [columns, setColumns] = useState([]); + const [tableData, setTableData] = useState([]); + const { notifications } = useNotifications(); + const { maxTime, minTime } = useSelector( + (state) => state.globalTime, + ); + const [data, setData] = useState< + DropRateAPIResponse['data']['result'][0]['list'] + >([]); + const [interval, setInterval] = useState(''); + + const [visibleCounts, setVisibleCounts] = useState>({}); + + const paginationConfig = useMemo( + () => + tableData?.length > 10 && { + pageSize: 10, + showTotal: showPaginationItem, + showSizeChanger: false, + hideOnSinglePage: true, + }, + [tableData], + ); + + const evaluationTime = useMemo(() => convertToMilliseconds(interval), [ + interval, + ]); + const tableApiPayload: MessagingQueueServicePayload = useMemo( + () => ({ + start: minTime, + end: maxTime, + evalTime: evaluationTime * 1e6, + }), + [evaluationTime, maxTime, minTime], + ); + + const handleOnError = (error: Error): void => { + notifications.error({ + message: axios.isAxiosError(error) ? error?.message : SOMETHING_WENT_WRONG, + }); + }; + + const handleShowMore = (index: number): void => { + setVisibleCounts((prevCounts) => ({ + ...prevCounts, + [index]: (prevCounts[index] || 4) + 4, + })); + }; + + const { mutate: getViewDetails, isLoading } = useMutation(getKafkaSpanEval, { + onSuccess: (data) => { + if (data.payload) { + setData(data.payload.result[0].list); + } + }, + onError: handleOnError, + }); + + useEffect(() => { + if (data?.length > 0) { + setColumns(getColumns(data, visibleCounts, handleShowMore)); + setTableData(getTableData(data)); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [data, visibleCounts]); + + useEffect(() => { + if (evaluationTime) { + getViewDetails(tableApiPayload); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [minTime, maxTime, evaluationTime]); + + return ( +
+
+ {MessagingQueuesViewType.dropRate.label} + +
+ + + ); +} + +export default DropRateView; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/EvaluationTimeSelector.tsx b/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/EvaluationTimeSelector.tsx new file mode 100644 index 0000000000..2ca2e9c301 --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/EvaluationTimeSelector.tsx @@ -0,0 +1,111 @@ +import './DropRateView.styles.scss'; + +import { Input, Select, Typography } from 'antd'; +import { Dispatch, SetStateAction, useEffect, useState } from 'react'; + +const { Option } = Select; + +interface SelectDropdownRenderProps { + menu: React.ReactNode; + inputValue: string; + handleInputChange: (e: React.ChangeEvent) => void; + handleKeyDown: (e: React.KeyboardEvent) => void; + handleAddCustomValue: () => void; +} + +function SelectDropdownRender({ + menu, + inputValue, + handleInputChange, + handleAddCustomValue, + handleKeyDown, +}: SelectDropdownRenderProps): JSX.Element { + return ( + <> + {menu} + + + ); +} + +function EvaluationTimeSelector({ + setInterval, +}: { + setInterval: Dispatch>; +}): JSX.Element { + const [inputValue, setInputValue] = useState(''); + const [selectedInterval, setSelectedInterval] = useState('5ms'); + const [dropdownOpen, setDropdownOpen] = useState(false); + + const handleInputChange = (e: React.ChangeEvent): void => { + setInputValue(e.target.value); + }; + + const handleSelectChange = (value: string): void => { + setSelectedInterval(value); + setInputValue(''); + setDropdownOpen(false); + }; + + const handleAddCustomValue = (): void => { + setSelectedInterval(inputValue); + setInputValue(inputValue); + setDropdownOpen(false); + }; + + const handleKeyDown = (e: React.KeyboardEvent): void => { + if (e.key === 'Enter') { + e.preventDefault(); + e.stopPropagation(); + handleAddCustomValue(); + } + }; + + const renderDropdown = (menu: React.ReactNode): JSX.Element => ( + + ); + + useEffect(() => { + if (selectedInterval) { + setInterval(() => selectedInterval); + } + }, [selectedInterval, setInterval]); + + return ( +
+ + Evaluation Interval: + + +
+ ); +} + +export default EvaluationTimeSelector; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/dropRateViewUtils.ts b/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/dropRateViewUtils.ts new file mode 100644 index 0000000000..49d751e722 --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/DropRateView/dropRateViewUtils.ts @@ -0,0 +1,46 @@ +export function convertToMilliseconds(timeInput: string): number { + if (!timeInput.trim()) { + return 0; + } + + const match = timeInput.match(/^(\d+)(ms|s|ns)?$/); // Match number and optional unit + if (!match) { + throw new Error(`Invalid time format: ${timeInput}`); + } + + const value = parseInt(match[1], 10); + const unit = match[2] || 'ms'; // Default to 'ms' if no unit is provided + + switch (unit) { + case 's': + return value * 1e3; + case 'ms': + return value; + case 'ns': + return value / 1e6; + default: + throw new Error('Invalid time format'); + } +} + +export interface DropRateResponse { + timestamp: string; + data: { + breach_percentage: number; + breached_spans: number; + consumer_service: string; + producer_service: string; + top_traceIDs: string[]; + total_spans: number; + }; +} +export interface DropRateAPIResponse { + status: string; + data: { + resultType: string; + result: { + queryName: string; + list: DropRateResponse[]; + }[]; + }; +} diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MQDetails.style.scss b/frontend/src/pages/MessagingQueues/MQDetails/MQDetails.style.scss index c4995a1812..0e78feabf4 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MQDetails.style.scss +++ b/frontend/src/pages/MessagingQueues/MQDetails/MQDetails.style.scss @@ -17,6 +17,11 @@ background: var(--bg-ink-500); .mq-overview-title { + display: flex; + justify-content: space-between; + align-items: center; + width: 100%; + color: var(--bg-vanilla-200); font-family: Inter; @@ -43,3 +48,133 @@ } } } + +.droprate-view { + .mq-table { + width: 100%; + + .ant-table-content { + border-radius: 6px; + border: 1px solid var(--bg-slate-500); + box-shadow: 0px 4px 12px 0px rgba(0, 0, 0, 0.1); + } + + .ant-table-tbody { + .ant-table-cell { + max-width: 250px; + border-bottom: none; + } + } + + .ant-table-thead { + .ant-table-cell { + background-color: var(--bg-ink-500); + border-bottom: 1px solid var(--bg-slate-500); + } + } + } + + .trace-id-list { + display: flex; + flex-direction: column; + gap: 4px; + width: max-content; + + .traceid-style { + display: flex; + gap: 8px; + align-items: center; + + .traceid-text { + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-slate-400); + padding: 2px; + cursor: pointer; + } + + .remaing-count { + cursor: pointer; + color: var(--bg-vanilla-100); + font-family: Inter; + font-size: 12px; + font-style: normal; + font-weight: 400; + line-height: normal; + letter-spacing: -0.06px; + } + } + } +} + +.pagination-left { + &.mq-table { + .ant-pagination { + justify-content: flex-start; + } + } +} + +.lightMode { + .mq-overview-container { + background: var(--bg-vanilla-200); + border: 1px solid var(--bg-vanilla-300); + + .mq-overview-title { + color: var(--bg-ink-400); + } + + .mq-details-options { + .ant-radio-button-wrapper { + border-color: var(--bg-vanilla-300); + color: var(--bg-slate-200); + } + .ant-radio-button-wrapper-checked { + color: var(--bg-slate-200); + background: var(--bg-vanilla-300); + } + .ant-radio-button-wrapper-disabled { + background: var(--bg-vanilla-100); + color: var(--bg-vanilla-400); + } + } + } + + .droprate-view { + .mq-table { + .ant-table-content { + border: 1px solid var(--bg-vanilla-300); + } + + .ant-table-tbody { + .ant-table-cell { + background-color: var(--bg-vanilla-100); + } + } + + .ant-table-thead { + .ant-table-cell { + background-color: var(--bg-vanilla-100); + border-bottom: 1px solid var(--bg-vanilla-300); + } + } + } + + .no-data-style { + border: 1px solid var(--bg-vanilla-300); + } + } + + .trace-id-list { + .traceid-style { + .traceid-text { + border: 1px solid var(--bg-vanilla-300); + background: var(--bg-vanilla-300); + } + + .remaing-count { + color: var(--bg-ink-400); + } + } + } +} diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MQDetails.tsx b/frontend/src/pages/MessagingQueues/MQDetails/MQDetails.tsx index 6ec3d45f28..9f30fbbd00 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MQDetails.tsx +++ b/frontend/src/pages/MessagingQueues/MQDetails/MQDetails.tsx @@ -18,7 +18,6 @@ import { ProducerLatencyOptions, SelectedTimelineQuery, } from '../MessagingQueuesUtils'; -import { ComingSoon } from '../MQCommon/MQCommon'; import MessagingQueuesTable from './MQTables/MQTables'; const MQServiceDetailTypePerView = ( @@ -28,7 +27,6 @@ const MQServiceDetailTypePerView = ( MessagingQueueServiceDetailType.ConsumerDetails, MessagingQueueServiceDetailType.ProducerDetails, MessagingQueueServiceDetailType.NetworkLatency, - MessagingQueueServiceDetailType.PartitionHostMetrics, ], [MessagingQueuesViewType.partitionLatency.value]: [ MessagingQueueServiceDetailType.ConsumerDetails, @@ -62,22 +60,8 @@ function MessagingQueuesOptions({ const detailTypes = MQServiceDetailTypePerView(producerLatencyOption)[selectedView] || []; return detailTypes.map((detailType) => ( - + {ConsumerLagDetailTitle[detailType]} - {detailType === MessagingQueueServiceDetailType.PartitionHostMetrics && ( - - )} )); }; @@ -116,12 +100,7 @@ const checkValidityOfDetailConfigs = ( return false; } - if (currentTab === MessagingQueueServiceDetailType.ConsumerDetails) { - return Boolean(configDetails?.topic && configDetails?.partition); - } - return Boolean( - configDetails?.group && configDetails?.topic && configDetails?.partition, - ); + return Boolean(configDetails?.topic && configDetails?.partition); } if (selectedView === MessagingQueuesViewType.producerLatency.value) { @@ -139,7 +118,7 @@ const checkValidityOfDetailConfigs = ( return Boolean(configDetails?.topic && configDetails?.service_name); } - return false; + return selectedView === MessagingQueuesViewType.dropRate.value; }; function MessagingQueuesDetails({ @@ -213,14 +192,14 @@ function MessagingQueuesDetails({ ); diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/MQTables.tsx b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/MQTables.tsx index 52c01fce45..73fd1b2f41 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/MQTables.tsx +++ b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/MQTables.tsx @@ -1,3 +1,4 @@ +/* eslint-disable no-nested-ternary */ /* eslint-disable react/require-default-props */ import './MQTables.styles.scss'; @@ -32,6 +33,8 @@ import { MessagingQueuesPayloadProps, } from './getConsumerLagDetails'; +const INITIAL_PAGE_SIZE = 10; + // eslint-disable-next-line sonarjs/cognitive-complexity export function getColumns( data: MessagingQueuesPayloadProps['payload'], @@ -154,8 +157,8 @@ function MessagingQueuesTable({ const paginationConfig = useMemo( () => - tableData?.length > 20 && { - pageSize: 20, + tableData?.length > INITIAL_PAGE_SIZE && { + pageSize: INITIAL_PAGE_SIZE, showTotal: showPaginationItem, showSizeChanger: false, hideOnSinglePage: true, @@ -169,15 +172,18 @@ function MessagingQueuesTable({ }); }; - const { mutate: getViewDetails, isLoading } = useMutation(tableApi, { - onSuccess: (data) => { - if (data.payload) { - setColumns(getColumns(data?.payload, history)); - setTableData(getTableData(data?.payload)); - } + const { mutate: getViewDetails, isLoading, error, isError } = useMutation( + tableApi, + { + onSuccess: (data) => { + if (data.payload) { + setColumns(getColumns(data?.payload, history)); + setTableData(getTableData(data?.payload)); + } + }, + onError: handleConsumerDetailsOnError, }, - onError: handleConsumerDetailsOnError, - }); + ); useEffect( () => { @@ -230,6 +236,10 @@ function MessagingQueuesTable({ + ) : isError ? ( +
+ {error?.message || SOMETHING_WENT_WRONG} +
) : ( <> {currentTab && ( @@ -241,7 +251,7 @@ function MessagingQueuesTable({
| ErrorResponse > => { const { detailType, ...restProps } = props; - try { - const response = await axios.post( - `/messaging-queues/kafka/consumer-lag/${props.detailType}`, - { - ...restProps, - }, - ); + const response = await axios.post( + `/messaging-queues/kafka/consumer-lag/${props.detailType}`, + { + ...restProps, + }, + ); - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG); - } + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; }; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getKafkaSpanEval.tsx b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getKafkaSpanEval.tsx new file mode 100644 index 0000000000..1f77faff2d --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getKafkaSpanEval.tsx @@ -0,0 +1,23 @@ +import axios from 'api'; +import { ErrorResponse, SuccessResponse } from 'types/api'; + +import { DropRateAPIResponse } from '../DropRateView/dropRateViewUtils'; +import { MessagingQueueServicePayload } from './getConsumerLagDetails'; + +export const getKafkaSpanEval = async ( + props: Omit, +): Promise | ErrorResponse> => { + const { start, end, evalTime } = props; + const response = await axios.post(`messaging-queues/kafka/span/evaluation`, { + start, + end, + eval_time: evalTime, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; +}; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getPartitionLatencyDetails.ts b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getPartitionLatencyDetails.ts index 8c0b26f594..1897609aa0 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getPartitionLatencyDetails.ts +++ b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getPartitionLatencyDetails.ts @@ -1,7 +1,4 @@ import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { SOMETHING_WENT_WRONG } from 'constants/api'; import { MessagingQueueServiceDetailType } from 'pages/MessagingQueues/MessagingQueuesUtils'; import { ErrorResponse, SuccessResponse } from 'types/api'; @@ -22,18 +19,15 @@ export const getPartitionLatencyDetails = async ( } else { endpoint = `/messaging-queues/kafka/consumer-lag/producer-details`; } - try { - const response = await axios.post(endpoint, { - ...rest, - }); - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG); - } + const response = await axios.post(endpoint, { + ...rest, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; }; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getPartitionLatencyOverview.ts b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getPartitionLatencyOverview.ts index 49c8eed757..cdc7fb0cb8 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getPartitionLatencyOverview.ts +++ b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getPartitionLatencyOverview.ts @@ -1,7 +1,4 @@ import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { SOMETHING_WENT_WRONG } from 'constants/api'; import { ErrorResponse, SuccessResponse } from 'types/api'; import { @@ -14,21 +11,17 @@ export const getPartitionLatencyOverview = async ( ): Promise< SuccessResponse | ErrorResponse > => { - try { - const response = await axios.post( - `/messaging-queues/kafka/partition-latency/overview`, - { - ...props, - }, - ); + const response = await axios.post( + `/messaging-queues/kafka/partition-latency/overview`, + { + ...props, + }, + ); - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG); - } + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; }; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getTopicThroughputDetails.ts b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getTopicThroughputDetails.ts index 3a995ef590..fb5817abd0 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getTopicThroughputDetails.ts +++ b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getTopicThroughputDetails.ts @@ -1,7 +1,4 @@ import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { SOMETHING_WENT_WRONG } from 'constants/api'; import { ErrorResponse, SuccessResponse } from 'types/api'; import { @@ -16,18 +13,14 @@ export const getTopicThroughputDetails = async ( > => { const { detailType, ...rest } = props; const endpoint = `/messaging-queues/kafka/topic-throughput/${detailType}`; - try { - const response = await axios.post(endpoint, { - ...rest, - }); + const response = await axios.post(endpoint, { + ...rest, + }); - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG); - } + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; }; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getTopicThroughputOverview.ts b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getTopicThroughputOverview.ts index 7ed896d8ca..ac955e8405 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getTopicThroughputOverview.ts +++ b/frontend/src/pages/MessagingQueues/MQDetails/MQTables/getTopicThroughputOverview.ts @@ -1,7 +1,4 @@ import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { SOMETHING_WENT_WRONG } from 'constants/api'; import { ErrorResponse, SuccessResponse } from 'types/api'; import { @@ -15,23 +12,18 @@ export const getTopicThroughputOverview = async ( SuccessResponse | ErrorResponse > => { const { detailType, start, end } = props; - console.log(detailType); - try { - const response = await axios.post( - `messaging-queues/kafka/topic-throughput/${detailType}`, - { - start, - end, - }, - ); + const response = await axios.post( + `messaging-queues/kafka/topic-throughput/${detailType}`, + { + start, + end, + }, + ); - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG); - } + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; }; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MessagingQueueOverview.tsx b/frontend/src/pages/MessagingQueues/MQDetails/MessagingQueueOverview.tsx index 104b7ec237..ea55384ac3 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MessagingQueueOverview.tsx +++ b/frontend/src/pages/MessagingQueues/MQDetails/MessagingQueueOverview.tsx @@ -12,6 +12,7 @@ import { ProducerLatencyOptions, } from '../MessagingQueuesUtils'; import { MessagingQueueServicePayload } from './MQTables/getConsumerLagDetails'; +import { getKafkaSpanEval } from './MQTables/getKafkaSpanEval'; import { getPartitionLatencyOverview } from './MQTables/getPartitionLatencyOverview'; import { getTopicThroughputOverview } from './MQTables/getTopicThroughputOverview'; import MessagingQueuesTable from './MQTables/MQTables'; @@ -51,6 +52,9 @@ const getTableApi = (selectedView: MessagingQueuesViewTypeOptions): any => { if (selectedView === MessagingQueuesViewType.producerLatency.value) { return getTopicThroughputOverview; } + if (selectedView === MessagingQueuesViewType.dropRate.value) { + return getKafkaSpanEval; + } return getPartitionLatencyOverview; }; @@ -78,6 +82,10 @@ function MessagingQueueOverview({ ? 'producer' : 'consumer' : undefined, + evalTime: + selectedView === MessagingQueuesViewType.dropRate.value + ? 2363404 + : undefined, }; return ( diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricColumnGraphs.tsx b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricColumnGraphs.tsx new file mode 100644 index 0000000000..a88db1efc7 --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricColumnGraphs.tsx @@ -0,0 +1,115 @@ +import { Typography } from 'antd'; +import { CardContainer } from 'container/GridCardLayout/styles'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { useTranslation } from 'react-i18next'; +import { Widgets } from 'types/api/dashboard/getAll'; + +import MetricPageGridGraph from './MetricPageGraph'; +import { + averageRequestLatencyWidgetData, + brokerCountWidgetData, + brokerNetworkThroughputWidgetData, + bytesConsumedWidgetData, + consumerFetchRateWidgetData, + consumerGroupMemberWidgetData, + consumerLagByGroupWidgetData, + consumerOffsetWidgetData, + ioWaitTimeWidgetData, + kafkaProducerByteRateWidgetData, + messagesConsumedWidgetData, + producerFetchRequestPurgatoryWidgetData, + requestResponseWidgetData, + requestTimesWidgetData, +} from './MetricPageUtil'; + +interface MetricSectionProps { + title: string; + description: string; + graphCount: Widgets[]; +} + +function MetricSection({ + title, + description, + graphCount, +}: MetricSectionProps): JSX.Element { + const isDarkMode = useIsDarkMode(); + + return ( +
+ +
+ {title} +
+
+ + {description} + +
+ {graphCount.map((widgetData) => ( + + ))} +
+
+ ); +} + +function MetricColumnGraphs(): JSX.Element { + const { t } = useTranslation('messagingQueues'); + + const metricsData = [ + { + title: t('metricGraphCategory.brokerMetrics.title'), + description: t('metricGraphCategory.brokerMetrics.description'), + graphCount: [ + brokerCountWidgetData, + requestTimesWidgetData, + producerFetchRequestPurgatoryWidgetData, + brokerNetworkThroughputWidgetData, + ], + id: 'broker-metrics', + }, + { + title: t('metricGraphCategory.producerMetrics.title'), + description: t('metricGraphCategory.producerMetrics.description'), + graphCount: [ + ioWaitTimeWidgetData, + requestResponseWidgetData, + averageRequestLatencyWidgetData, + kafkaProducerByteRateWidgetData, + bytesConsumedWidgetData, + ], + id: 'producer-metrics', + }, + { + title: t('metricGraphCategory.consumerMetrics.title'), + description: t('metricGraphCategory.consumerMetrics.description'), + graphCount: [ + consumerOffsetWidgetData, + consumerGroupMemberWidgetData, + consumerLagByGroupWidgetData, + consumerFetchRateWidgetData, + messagesConsumedWidgetData, + ], + id: 'consumer-metrics', + }, + ]; + + return ( +
+ {metricsData.map((metric) => ( + + ))} +
+ ); +} + +export default MetricColumnGraphs; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPage.styles.scss b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPage.styles.scss new file mode 100644 index 0000000000..cc955c0739 --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPage.styles.scss @@ -0,0 +1,128 @@ +.metric-page { + padding: 20px; + display: flex; + flex-direction: column; + gap: 32px; + + .metric-page-container { + display: flex; + flex-direction: column; + + .row-panel { + padding-left: 10px; + } + + .metric-page-grid { + display: grid; + grid-template-columns: repeat(3, 1fr); + align-items: flex-start; + gap: 10px; + + .metric-graph { + height: 320px; + padding: 10px; + width: 100%; + box-sizing: border-box; + } + } + + @media (max-width: 768px) { + .metric-page-grid { + grid-template-columns: 1fr; + } + } + + .graph-description { + padding: 16px 10px 16px 10px; + } + } + + .row-panel { + border-radius: 4px; + background: rgba(18, 19, 23, 0.4); + padding: 8px; + display: flex; + gap: 6px; + align-items: center; + height: 48px !important; + + .ant-typography { + font-size: 14px; + font-weight: 500; + } + + .row-panel-section { + display: flex; + gap: 6px; + align-items: center; + + .row-icon { + color: var(--bg-vanilla-400); + cursor: pointer; + } + + .section-title { + color: var(--bg-vanilla-400); + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 20px; + letter-spacing: -0.07px; + } + } + } + + .metric-column-graph-container { + display: grid; + grid-template-columns: 1fr 1fr 1fr; + gap: 10px; + + .metric-column-graph { + display: flex; + flex-direction: column; + gap: 10px; + + .row-panel { + justify-content: center; + } + + .metric-page-grid { + display: flex; + flex-direction: column; + flex: 1; + min-width: 0; + gap: 10px; + + .metric-graph { + height: 320px; + padding: 10px; + width: 100%; + box-sizing: border-box; + } + } + } + } + + @media (max-width: 768px) { + .metric-column-graph-container { + grid-template-columns: 1fr; + } + } +} + +.lightMode { + .metric-page { + .row-panel { + .row-panel-section { + .row-icon { + color: var(--bg-ink-300); + } + + .section-title { + color: var(--bg-ink-300); + } + } + } + } +} diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPage.tsx b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPage.tsx new file mode 100644 index 0000000000..3c997da459 --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPage.tsx @@ -0,0 +1,134 @@ +import './MetricPage.styles.scss'; + +import { Typography } from 'antd'; +import cx from 'classnames'; +import { CardContainer } from 'container/GridCardLayout/styles'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { ChevronDown, ChevronUp } from 'lucide-react'; +import { useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Widgets } from 'types/api/dashboard/getAll'; + +import MetricColumnGraphs from './MetricColumnGraphs'; +import MetricPageGridGraph from './MetricPageGraph'; +import { + cpuRecentUtilizationWidgetData, + currentOffsetPartitionWidgetData, + insyncReplicasWidgetData, + jvmGcCollectionsElapsedWidgetData, + jvmGCCountWidgetData, + jvmMemoryHeapWidgetData, + oldestOffsetWidgetData, + partitionCountPerTopicWidgetData, +} from './MetricPageUtil'; + +interface CollapsibleMetricSectionProps { + title: string; + description: string; + graphCount: Widgets[]; + isCollapsed: boolean; + onToggle: () => void; +} + +function CollapsibleMetricSection({ + title, + description, + graphCount, + isCollapsed, + onToggle, +}: CollapsibleMetricSectionProps): JSX.Element { + const isDarkMode = useIsDarkMode(); + + return ( +
+ +
+
+ {title} + {isCollapsed ? ( + + ) : ( + + )} +
+
+
+ {!isCollapsed && ( + <> + + {description} + +
+ {graphCount.map((widgetData) => ( + + ))} +
+ + )} +
+ ); +} + +function MetricPage(): JSX.Element { + const [collapsedSections, setCollapsedSections] = useState<{ + [key: string]: boolean; + }>({ + producerMetrics: false, + consumerMetrics: false, + }); + + const toggleCollapse = (key: string): void => { + setCollapsedSections((prev) => ({ + ...prev, + [key]: !prev[key], + })); + }; + + const { t } = useTranslation('messagingQueues'); + + const metricSections = [ + { + key: 'bokerJVMMetrics', + title: t('metricGraphCategory.brokerJVMMetrics.title'), + description: t('metricGraphCategory.brokerJVMMetrics.description'), + graphCount: [ + jvmGCCountWidgetData, + jvmGcCollectionsElapsedWidgetData, + cpuRecentUtilizationWidgetData, + jvmMemoryHeapWidgetData, + ], + }, + { + key: 'partitionMetrics', + title: t('metricGraphCategory.partitionMetrics.title'), + description: t('metricGraphCategory.partitionMetrics.description'), + graphCount: [ + partitionCountPerTopicWidgetData, + currentOffsetPartitionWidgetData, + oldestOffsetWidgetData, + insyncReplicasWidgetData, + ], + }, + ]; + + return ( +
+ + {metricSections.map(({ key, title, description, graphCount }) => ( + toggleCollapse(key)} + /> + ))} +
+ ); +} + +export default MetricPage; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageGraph.tsx b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageGraph.tsx new file mode 100644 index 0000000000..248dc35178 --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageGraph.tsx @@ -0,0 +1,59 @@ +import './MetricPage.styles.scss'; + +import { QueryParams } from 'constants/query'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { ViewMenuAction } from 'container/GridCardLayout/config'; +import GridCard from 'container/GridCardLayout/GridCard'; +import { Card } from 'container/GridCardLayout/styles'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import useUrlQuery from 'hooks/useUrlQuery'; +import { useCallback } from 'react'; +import { useDispatch } from 'react-redux'; +import { useHistory, useLocation } from 'react-router-dom'; +import { UpdateTimeInterval } from 'store/actions'; +import { Widgets } from 'types/api/dashboard/getAll'; + +function MetricPageGridGraph({ + widgetData, +}: { + widgetData: Widgets; +}): JSX.Element { + const history = useHistory(); + const { pathname } = useLocation(); + const dispatch = useDispatch(); + const urlQuery = useUrlQuery(); + const isDarkMode = useIsDarkMode(); + + const onDragSelect = useCallback( + (start: number, end: number) => { + const startTimestamp = Math.trunc(start); + const endTimestamp = Math.trunc(end); + + urlQuery.set(QueryParams.startTime, startTimestamp.toString()); + urlQuery.set(QueryParams.endTime, endTimestamp.toString()); + const generatedUrl = `${pathname}?${urlQuery.toString()}`; + history.push(generatedUrl); + + if (startTimestamp !== endTimestamp) { + dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp])); + } + }, + [dispatch, history, pathname, urlQuery], + ); + + return ( + + + + ); +} + +export default MetricPageGridGraph; diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil.ts b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil.ts new file mode 100644 index 0000000000..144b573c5f --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil.ts @@ -0,0 +1,1092 @@ +/* eslint-disable sonarjs/no-duplicate-string */ +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { GetWidgetQueryBuilderProps } from 'container/MetricsApplication/types'; +import { Widgets } from 'types/api/dashboard/getAll'; +import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; +import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; +import { EQueryType } from 'types/common/dashboard'; +import { DataSource } from 'types/common/queryBuilder'; +import { v4 as uuid } from 'uuid'; + +interface GetWidgetQueryProps { + title: string; + description: string; + queryData: IBuilderQuery[]; +} + +interface GetWidgetQueryPropsReturn extends GetWidgetQueryBuilderProps { + description?: string; + nullZeroValues: string; +} + +export const getWidgetQueryBuilder = ({ + query, + title = '', + panelTypes, + yAxisUnit = '', + fillSpans = false, + id, + nullZeroValues, + description, +}: GetWidgetQueryPropsReturn): Widgets => ({ + description: description || '', + id: id || uuid(), + isStacked: false, + nullZeroValues: nullZeroValues || '', + opacity: '1', + panelTypes, + query, + timePreferance: 'GLOBAL_TIME', + title, + yAxisUnit, + softMax: null, + softMin: null, + selectedLogFields: [], + selectedTracesFields: [], + fillSpans, +}); + +export function getWidgetQuery( + props: GetWidgetQueryProps, +): GetWidgetQueryPropsReturn { + const { title, description } = props; + return { + title, + yAxisUnit: 'none', + panelTypes: PANEL_TYPES.TIME_SERIES, + fillSpans: false, + description, + nullZeroValues: 'zero', + query: { + queryType: EQueryType.QUERY_BUILDER, + promql: [], + builder: { + queryData: props.queryData, + queryFormulas: [], + }, + clickhouse_sql: [], + id: uuid(), + }, + }; +} + +export const requestTimesWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_request_time_avg--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_request_time_avg', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Request Times', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Request Times', + description: + 'This metric is used to measure the average latency experienced by requests across the Kafka broker.', + }), +); + +export const brokerCountWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_brokers--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_brokers', + type: 'Gauge', + }, + aggregateOperator: 'sum', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Broker count', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'sum', + }, + ], + title: 'Broker Count', + description: 'Total number of active brokers in the Kafka cluster.\n', + }), +); + +export const producerFetchRequestPurgatoryWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_purgatory_size--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_purgatory_size', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Producer and Fetch Request Purgatory', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Producer and Fetch Request Purgatory', + description: + 'Measures the number of requests that Kafka brokers have received but cannot immediately fulfill', + }), +); + +export const brokerNetworkThroughputWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: + 'kafka_server_brokertopicmetrics_bytesoutpersec_oneminuterate--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_server_brokertopicmetrics_bytesoutpersec_oneminuterate', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Broker Network Throughput', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Broker Network Throughput', + description: + 'Helps gauge the data throughput from the Kafka broker to consumer clients, focusing on the network usage associated with serving messages to consumers.', + }), +); + +export const ioWaitTimeWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_producer_io_waittime_total--float64--Sum--true', + isColumn: true, + isJSON: false, + key: 'kafka_producer_io_waittime_total', + type: 'Sum', + }, + aggregateOperator: 'rate', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'I/O Wait Time', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'sum', + stepInterval: 60, + timeAggregation: 'rate', + }, + ], + title: 'I/O Wait Time', + description: + 'This metric measures the total time that producers are in an I/O wait state, indicating potential bottlenecks in data transmission from producers to Kafka brokers.', + }), +); + +export const requestResponseWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_producer_request_rate--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_producer_request_rate', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Request Rate', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_producer_response_rate--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_producer_response_rate', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'B', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Response Rate', + limit: null, + orderBy: [], + queryName: 'B', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Request and Response Rate', + description: + "Indicates how many requests the producer is sending per second, reflecting the intensity of the producer's interaction with the Kafka cluster. Also, helps Kafka administrators gauge the responsiveness of brokers to producer requests.", + }), +); + +export const averageRequestLatencyWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_producer_request_latency_avg--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_producer_request_latency_avg', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Average Request Latency', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Average Request Latency', + description: + 'Helps Kafka administrators and developers understand the average latency experienced by producer requests.', + }), +); + +export const kafkaProducerByteRateWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_producer_byte_rate--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_producer_byte_rate', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'topic--string--tag--false', + isColumn: false, + isJSON: false, + key: 'topic', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'kafka_producer_byte_rate', + description: + 'Helps measure the data output rate from the producer, indicating the load a producer is placing on Kafka brokers.', + }), +); + +export const bytesConsumedWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_consumer_bytes_consumed_rate--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_consumer_bytes_consumed_rate', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Bytes Consumed', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Bytes Consumed', + description: + 'Helps Kafka administrators monitor the data consumption rate of a consumer group, showing how much data (in bytes) is being read from the Kafka cluster over time.', + }), +); + +export const consumerOffsetWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_consumer_group_offset--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_consumer_group_offset', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'group--string--tag--false', + isColumn: false, + isJSON: false, + key: 'group', + type: 'tag', + }, + { + dataType: DataTypes.String, + id: 'topic--string--tag--false', + isColumn: false, + isJSON: false, + key: 'topic', + type: 'tag', + }, + { + dataType: DataTypes.String, + id: 'partition--string--tag--false', + isColumn: false, + isJSON: false, + key: 'partition', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Consumer Offest', + description: 'Current offset of each consumer group for each topic partition', + }), +); + +export const consumerGroupMemberWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_consumer_group_members--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_consumer_group_members', + type: 'Gauge', + }, + aggregateOperator: 'sum', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'group--string--tag--false', + isColumn: false, + isJSON: false, + key: 'group', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'sum', + stepInterval: 60, + timeAggregation: 'sum', + }, + ], + title: 'Consumer Group Members', + description: 'Number of active users in each group', + }), +); + +export const consumerLagByGroupWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_consumer_group_lag--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_consumer_group_lag', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'group--string--tag--false', + isColumn: false, + isJSON: false, + key: 'group', + type: 'tag', + }, + { + dataType: DataTypes.String, + id: 'topic--string--tag--false', + isColumn: false, + isJSON: false, + key: 'topic', + type: 'tag', + }, + { + dataType: DataTypes.String, + id: 'partition--string--tag--false', + isColumn: false, + isJSON: false, + key: 'partition', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Consumer Lag by Group', + description: + 'Helps Kafka administrators assess whether consumer groups are keeping up with the incoming data stream or falling behind', + }), +); + +export const consumerFetchRateWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_consumer_fetch_rate--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_consumer_fetch_rate', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'service_name--string--tag--false', + isColumn: false, + isJSON: false, + key: 'service_name', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Consumer Fetch Rate', + description: + 'Metric measures the rate at which fetch requests are made by a Kafka consumer to the broker, typically in requests per second.', + }), +); + +export const messagesConsumedWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_consumer_records_consumed_rate--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_consumer_records_consumed_rate', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'Messages Consumed', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Messages Consumed', + description: + 'Measures the rate at which a Kafka consumer is consuming records (messages) per second from Kafka brokers.', + }), +); + +export const jvmGCCountWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'jvm_gc_collections_count--float64--Sum--true', + isColumn: true, + isJSON: false, + key: 'jvm_gc_collections_count', + type: 'Sum', + }, + aggregateOperator: 'rate', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'JVM GC Count', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'sum', + stepInterval: 60, + timeAggregation: 'rate', + }, + ], + title: 'JVM GC Count', + description: + 'Tracks the total number of garbage collection (GC) events that have occurred in the Java Virtual Machine (JVM).', + }), +); + +export const jvmGcCollectionsElapsedWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'jvm_gc_collections_elapsed--float64--Sum--true', + isColumn: true, + isJSON: false, + key: 'jvm_gc_collections_elapsed', + type: 'Sum', + }, + aggregateOperator: 'rate', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'garbagecollector', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'sum', + stepInterval: 60, + timeAggregation: 'rate', + }, + ], + title: 'jvm_gc_collections_elapsed', + description: + 'Measures the total time (usually in milliseconds) spent on garbage collection (GC) events in the Java Virtual Machine (JVM).', + }), +); + +export const cpuRecentUtilizationWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'jvm_cpu_recent_utilization--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'jvm_cpu_recent_utilization', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'CPU utilization', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'CPU Recent Utilization', + description: + 'This metric measures the recent CPU usage by the Java Virtual Machine (JVM), typically expressed as a percentage.', + }), +); + +export const jvmMemoryHeapWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'jvm_memory_heap_max--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'jvm_memory_heap_max', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [], + having: [], + legend: 'JVM memory heap', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'JVM memory heap', + description: + 'The metric represents the maximum amount of heap memory available to the Java Virtual Machine (JVM)', + }), +); + +export const partitionCountPerTopicWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_topic_partitions--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_topic_partitions', + type: 'Gauge', + }, + aggregateOperator: 'sum', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'topic--string--tag--false', + isColumn: false, + isJSON: false, + key: 'topic', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'sum', + stepInterval: 60, + timeAggregation: 'sum', + }, + ], + title: 'Partition Count per Topic', + description: 'Number of partitions for each topic', + }), +); + +export const currentOffsetPartitionWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_partition_current_offset--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_partition_current_offset', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'topic--string--tag--false', + isColumn: false, + isJSON: false, + key: 'topic', + type: 'tag', + }, + { + dataType: DataTypes.String, + id: 'partition--string--tag--false', + isColumn: false, + isJSON: false, + key: 'partition', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Current Offset ( Partition )', + description: + 'Current offset of each partition, showing the latest position in each partition', + }), +); + +export const oldestOffsetWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_partition_oldest_offset--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_partition_oldest_offset', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'topic--string--tag--false', + isColumn: false, + isJSON: false, + key: 'topic', + type: 'tag', + }, + { + dataType: DataTypes.String, + id: 'partition--string--tag--false', + isColumn: false, + isJSON: false, + key: 'partition', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'Oldest Offset (Partition)', + description: + 'Oldest offset of each partition to identify log retention and offset range.', + }), +); + +export const insyncReplicasWidgetData = getWidgetQueryBuilder( + getWidgetQuery({ + queryData: [ + { + aggregateAttribute: { + dataType: DataTypes.Float64, + id: 'kafka_partition_replicas_in_sync--float64--Gauge--true', + isColumn: true, + isJSON: false, + key: 'kafka_partition_replicas_in_sync', + type: 'Gauge', + }, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + disabled: false, + expression: 'A', + filters: { + items: [], + op: 'AND', + }, + functions: [], + groupBy: [ + { + dataType: DataTypes.String, + id: 'topic--string--tag--false', + isColumn: false, + isJSON: false, + key: 'topic', + type: 'tag', + }, + { + dataType: DataTypes.String, + id: 'partition--string--tag--false', + isColumn: false, + isJSON: false, + key: 'partition', + type: 'tag', + }, + ], + having: [], + legend: '', + limit: null, + orderBy: [], + queryName: 'A', + reduceTo: 'avg', + spaceAggregation: 'avg', + stepInterval: 60, + timeAggregation: 'avg', + }, + ], + title: 'In-Sync Replicas (ISR)', + description: + 'Count of in-sync replicas for each partition to ensure data availability.', + }), +); diff --git a/frontend/src/pages/MessagingQueues/MessagingQueueHealthCheck/AttributeCheckList.tsx b/frontend/src/pages/MessagingQueues/MessagingQueueHealthCheck/AttributeCheckList.tsx new file mode 100644 index 0000000000..08b2ce6cfa --- /dev/null +++ b/frontend/src/pages/MessagingQueues/MessagingQueueHealthCheck/AttributeCheckList.tsx @@ -0,0 +1,270 @@ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ +import './MessagingQueueHealthCheck.styles.scss'; + +import { CaretDownOutlined, LoadingOutlined } from '@ant-design/icons'; +import { + Modal, + Select, + Spin, + Tooltip, + Tree, + TreeDataNode, + Typography, +} from 'antd'; +import { OnboardingStatusResponse } from 'api/messagingQueues/onboarding/getOnboardingStatus'; +import { QueryParams } from 'constants/query'; +import ROUTES from 'constants/routes'; +import { History } from 'history'; +import { Bolt, Check, OctagonAlert, X } from 'lucide-react'; +import { ReactNode, useEffect, useState } from 'react'; +import { useHistory } from 'react-router-dom'; +import { isCloudUser } from 'utils/app'; +import { v4 as uuid } from 'uuid'; + +import { + KAFKA_SETUP_DOC_LINK, + MessagingQueueHealthCheckService, +} from '../MessagingQueuesUtils'; + +interface AttributeCheckListProps { + visible: boolean; + onClose: () => void; + onboardingStatusResponses: { + title: string; + data: OnboardingStatusResponse['data']; + errorMsg?: string; + }[]; + loading: boolean; +} + +export enum AttributesFilters { + ALL = 'all', + SUCCESS = 'success', + ERROR = 'error', +} + +function ErrorTitleAndKey({ + title, + parentTitle, + history, + isCloudUserVal, + errorMsg, + isLeaf, +}: { + title: string; + parentTitle: string; + isCloudUserVal: boolean; + history: History; + errorMsg?: string; + isLeaf?: boolean; +}): TreeDataNode { + const handleRedirection = (): void => { + let link = ''; + + switch (parentTitle) { + case 'Consumers': + link = `${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Consumers}`; + break; + case 'Producers': + link = `${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Producers}`; + break; + case 'Kafka': + link = `${ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Kafka}`; + break; + default: + link = ''; + } + + if (isCloudUserVal && !!link) { + history.push(link); + } else { + window.open(KAFKA_SETUP_DOC_LINK, '_blank'); + } + }; + return { + key: `${title}-key-${uuid()}`, + title: ( +
+ + {title} + + +
{ + e.preventDefault(); + handleRedirection(); + }} + > + + Fix +
+
+
+ ), + isLeaf, + }; +} + +function AttributeLabels({ title }: { title: ReactNode }): JSX.Element { + return ( +
+ + {title} +
+ ); +} + +function treeTitleAndKey({ + title, + isLeaf, +}: { + title: string; + isLeaf?: boolean; +}): TreeDataNode { + return { + key: `${title}-key-${uuid()}`, + title: ( +
+ + {title} + + {isLeaf && ( +
+ + + +
+ )} +
+ ), + isLeaf, + }; +} + +function generateTreeDataNodes( + response: OnboardingStatusResponse['data'], + parentTitle: string, + isCloudUserVal: boolean, + history: History, +): TreeDataNode[] { + return response + .map((item) => { + if (item.attribute) { + if (item.status === '1') { + return treeTitleAndKey({ title: item.attribute, isLeaf: true }); + } + if (item.status === '0') { + return ErrorTitleAndKey({ + title: item.attribute, + errorMsg: item.error_message || '', + parentTitle, + history, + isCloudUserVal, + }); + } + } + return null; + }) + .filter(Boolean) as TreeDataNode[]; +} + +function AttributeCheckList({ + visible, + onClose, + onboardingStatusResponses, + loading, +}: AttributeCheckListProps): JSX.Element { + const [filter, setFilter] = useState(AttributesFilters.ALL); + const [treeData, setTreeData] = useState([]); + + const handleFilterChange = (value: AttributesFilters): void => { + setFilter(value); + }; + const isCloudUserVal = isCloudUser(); + const history = useHistory(); + + useEffect(() => { + const filteredData = onboardingStatusResponses.map((response) => { + if (response.errorMsg) { + return ErrorTitleAndKey({ + title: response.title, + errorMsg: response.errorMsg, + isLeaf: true, + parentTitle: response.title, + history, + isCloudUserVal, + }); + } + let filteredData = response.data; + + if (filter === AttributesFilters.SUCCESS) { + filteredData = response.data.filter((item) => item.status === '1'); + } else if (filter === AttributesFilters.ERROR) { + filteredData = response.data.filter((item) => item.status === '0'); + } + + return { + ...treeTitleAndKey({ title: response.title }), + children: generateTreeDataNodes( + filteredData, + response.title, + isCloudUserVal, + history, + ), + }; + }); + + setTreeData(filteredData); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [filter, onboardingStatusResponses]); + + return ( + } + > + {loading ? ( +
+ } size="large" /> +
+ ) : ( +
+