diff --git a/CHANGELOG.md b/CHANGELOG.md index c3c8ed1b4d7..3a1992488e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -61,6 +61,7 @@ To learn more about active deprecations, we recommend checking [GitHub Discussio - TODO ([#XXX](https://github.com/kedacore/keda/issues/XXX)) - **General**: Add --ca-dir flag to KEDA operator to specify directories with CA certificates for scalers to authenticate TLS connections (defaults to /custom/ca) ([#5860](https://github.com/kedacore/keda/issues/5860)) - **General**: Declarative parsing of scaler config ([#5037](https://github.com/kedacore/keda/issues/5037)|[#5797](https://github.com/kedacore/keda/issues/5797)) +- **General**: Introduce new Splunk Scaler ([#5904](https://github.com/kedacore/keda/issues/5904)) - **General**: Remove deprecated Kustomize commonLabels ([#5888](https://github.com/kedacore/keda/pull/5888)) - **General**: Support for Kubernetes v1.30 ([#5828](https://github.com/kedacore/keda/issues/5828)) diff --git a/pkg/scalers/splunk/splunk.go b/pkg/scalers/splunk/splunk.go new file mode 100644 index 00000000000..0bc61c5e1fe --- /dev/null +++ b/pkg/scalers/splunk/splunk.go @@ -0,0 +1,120 @@ +package splunk + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "github.com/kedacore/keda/v2/pkg/scalers/scalersconfig" + kedautil "github.com/kedacore/keda/v2/pkg/util" +) + +const ( + savedSearchPathTemplateStr = "/servicesNS/%s/search/search/jobs/export" +) + +// Config contains the information required to authenticate with a Splunk instance. +type Config struct { + Host string + Username string + Password string + APIToken string + HTTPTimeout time.Duration + UnsafeSsl bool +} + +// Client contains Splunk config information as well as an http client for requests. +type Client struct { + *Config + *http.Client +} + +// SearchResponse is used for unmarshalling search results. +type SearchResponse struct { + Result map[string]string `json:"result"` +} + +// NewClient returns a new Splunk client. +func NewClient(c *Config, sc *scalersconfig.ScalerConfig) (*Client, error) { + if c.Username == "" { + return nil, errors.New("username was not set") + } + + if c.APIToken != "" && c.Password != "" { + return nil, errors.New("API token and Password were all set. If APIToken is set, username and password must not be used") + } + + httpClient := kedautil.CreateHTTPClient(sc.GlobalHTTPTimeout, c.UnsafeSsl) + + client := &Client{ + c, + httpClient, + } + + return client, nil +} + +// SavedSearch fetches the results of a saved search/report in Splunk. +func (c *Client) SavedSearch(name string) (*SearchResponse, error) { + savedSearchAPIPath := fmt.Sprintf(savedSearchPathTemplateStr, c.Username) + endpoint := fmt.Sprintf("%s%s", c.Host, savedSearchAPIPath) + + body := strings.NewReader(fmt.Sprintf("search=savedsearch %s", name)) + req, err := http.NewRequest(http.MethodPost, endpoint, body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", "application/x-www-form-urlencoded") + if c.APIToken != "" { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", c.APIToken)) + } else { + req.SetBasicAuth(c.Username, c.Password) + } + + req.URL.RawQuery = url.Values{ + "output_mode": {"json"}, + }.Encode() + + resp, err := c.Client.Do(req) + if err != nil { + return nil, err + } + + defer resp.Body.Close() + + if resp.StatusCode > 399 { + bodyText, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + return nil, errors.New(string(bodyText)) + } + + result := &SearchResponse{} + + err = json.NewDecoder(resp.Body).Decode(&result) + + return result, err +} + +// ToMetric converts a search response to a consumable metric value. +func (s *SearchResponse) ToMetric(valueField string) (float64, error) { + metricValueStr, ok := s.Result[valueField] + if !ok { + return 0, fmt.Errorf("field: %s not found in search results", valueField) + } + + metricValueInt, err := strconv.ParseFloat(metricValueStr, 64) + if err != nil { + return 0, fmt.Errorf("value: %s is not a float value", valueField) + } + + return metricValueInt, nil +} diff --git a/pkg/scalers/splunk/splunk_test.go b/pkg/scalers/splunk/splunk_test.go new file mode 100644 index 00000000000..6201311b05a --- /dev/null +++ b/pkg/scalers/splunk/splunk_test.go @@ -0,0 +1,303 @@ +package splunk + +import ( + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + "github.com/kedacore/keda/v2/pkg/scalers/scalersconfig" +) + +func TestNewClient(t *testing.T) { + tests := []struct { + name string + config *Config + expectErr bool + }{ + { + name: "Valid Basic Auth Config", + config: &Config{ + Username: "fake", + Password: "fake", + }, + }, + { + name: "Valid Bearer + Username Auth Config", + config: &Config{ + APIToken: "fake", + Username: "fake", + }, + }, + { + name: "Missing username", + config: &Config{}, + expectErr: true, + }, + { + name: "Invalid Bearer + Password Auth Config", + config: &Config{ + APIToken: "fake", + Password: "fake", + }, + expectErr: true, + }, + { + name: "UnsafeSsl config", + config: &Config{ + APIToken: "fake", + Username: "fake", + UnsafeSsl: false, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client, err := NewClient(test.config, &scalersconfig.ScalerConfig{}) + + if test.expectErr && err != nil { + return + } + + if test.expectErr && err == nil { + t.Error("Expected error, got nil") + } + + if test.config.UnsafeSsl && client.Client.Transport == nil { + t.Error("Expected SSL client config to be set, but was nil") + } + }) + } +} + +func TestSavedSearch(t *testing.T) { + tests := []struct { + name string + config *Config + expectErr bool + metricValue string + valueField string + response SearchResponse + savedSearchName string + statusCode int + }{ + { + name: "Count - 1", + config: &Config{ + Username: "admin", + Password: "password", + }, + metricValue: "1", + valueField: "count", + response: SearchResponse{Result: map[string]string{"count": "1"}}, + savedSearchName: "testsearch1", + statusCode: http.StatusOK, + }, + { + name: "Count - 100", + config: &Config{ + Username: "admin2", + Password: "password2", + }, + metricValue: "100", + valueField: "count", + response: SearchResponse{Result: map[string]string{"count": "100"}}, + savedSearchName: "testsearch2", + statusCode: http.StatusOK, + }, + { + name: "StatusBadRequest", + config: &Config{ + Username: "admin", + Password: "password", + }, + expectErr: true, + response: SearchResponse{Result: map[string]string{}}, + savedSearchName: "testsearch4", + statusCode: http.StatusBadRequest, + }, + { + name: "StatusForbidden", + config: &Config{ + Username: "admin", + Password: "password", + }, + expectErr: true, + response: SearchResponse{Result: map[string]string{}}, + savedSearchName: "testsearch5", + statusCode: http.StatusForbidden, + }, + { + name: "Validate Bearer Token", + config: &Config{ + APIToken: "sometoken", + Username: "fake", + }, + expectErr: true, + response: SearchResponse{Result: map[string]string{}}, + savedSearchName: "testsearch5", + statusCode: http.StatusForbidden, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + expectedReqPath := fmt.Sprintf(savedSearchPathTemplateStr, test.config.Username) + if r.URL.Path != fmt.Sprintf(savedSearchPathTemplateStr, test.config.Username) { + t.Errorf("Expected request path '%s', got: %s", expectedReqPath, r.URL.Path) + } + + err := r.ParseForm() + if err != nil { + t.Errorf("Expected no error parsing form data, but got '%s'", err.Error()) + } + + searchFormData := r.FormValue("search") + if searchFormData != fmt.Sprintf("savedsearch %s", test.savedSearchName) { + t.Errorf("Expected form data to be 'savedsearch %s' '%s'", test.savedSearchName, searchFormData) + } + + q, err := url.ParseQuery(r.URL.RawQuery) + if err != nil { + t.Errorf("Expected query parsing err to be nil, got %s", err.Error()) + } + + outputMode := q.Get("output_mode") + if outputMode != "json" { + t.Errorf("Expected output_mode query string to be '%s', got: %s", "json", outputMode) + } + + // Bearer token auth + if test.config.APIToken != "" { + actual := r.Header.Get("Authorization") + expected := fmt.Sprintf("Bearer %s", test.config.APIToken) + if actual != expected { + t.Errorf("APIToken is set. Expected Authorization header to be '%s', got: %s", actual, expected) + } + } else { + // Basic auth + reqUsername, reqPassword, ok := r.BasicAuth() + if !ok { + t.Error("Expected basic auth to be set, but was not") + } + if test.config.Username != reqUsername { + t.Errorf("Expected request username to be '%s', got: %s", test.config.Username, reqUsername) + } + if test.config.Password != reqPassword { + t.Errorf("Expected request password to be '%s', got: %s", test.config.Password, reqPassword) + } + } + + w.WriteHeader(test.statusCode) + w.Header().Set("Content-Type", "application/json") + err = json.NewEncoder(w).Encode(test.response) + if err != nil { + http.Error(w, fmt.Sprintf("error building the response, %v", err), http.StatusInternalServerError) + return + } + })) + defer server.Close() + + test.config.Host = server.URL + s, err := NewClient(test.config, &scalersconfig.ScalerConfig{}) + if err != nil { + t.Errorf("Expected err to be nil, got %s", err.Error()) + } + + splunkResponse, err := s.SavedSearch(test.savedSearchName) + + if test.expectErr && err != nil { + return + } + + if test.expectErr && err == nil { + t.Error("Expected error, got nil") + } + + if err != nil { + t.Errorf("Expected err to be nil, got %s", err.Error()) + } + + v, ok := splunkResponse.Result[test.valueField] + if !ok { + t.Errorf("Expected value field to be %s to exist but did not", test.valueField) + } + + if v != test.metricValue { + t.Errorf("Expected metric value to be %s, got %s", test.metricValue, v) + } + }) + } +} + +func TestToMetric(t *testing.T) { + tests := []struct { + name string + expectErr bool + expectedMetricValue float64 + response *SearchResponse + valueField string + }{ + { + name: "Successful metric conversion - 1", + expectedMetricValue: 1.000000, + response: &SearchResponse{ + Result: map[string]string{ + "count": "1", + }, + }, + valueField: "count", + }, + { + name: "Successful metric conversion - 100", + expectedMetricValue: 100.000000, + response: &SearchResponse{ + Result: map[string]string{ + "count": "100", + }, + }, + valueField: "count", + }, + { + name: "Failed metric type conversion", + expectErr: true, + response: &SearchResponse{ + Result: map[string]string{ + "count": "A", + }, + }, + valueField: "count", + }, + { + name: "Value field not found", + expectErr: true, + response: &SearchResponse{ + Result: map[string]string{ + "fake": "1", + }, + }, + valueField: "count", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + metric, err := test.response.ToMetric(test.valueField) + if test.expectErr && err != nil { + return + } + + if test.expectErr && err == nil { + t.Error("Expected error, got nil") + } + + if test.expectedMetricValue != metric { + t.Errorf("Expected metric value '%f', got: %f", test.expectedMetricValue, metric) + } + }) + } +} diff --git a/pkg/scalers/splunk_scaler.go b/pkg/scalers/splunk_scaler.go new file mode 100644 index 00000000000..d11586df88d --- /dev/null +++ b/pkg/scalers/splunk_scaler.go @@ -0,0 +1,118 @@ +package scalers + +import ( + "context" + "errors" + "fmt" + "net/url" + + "github.com/go-logr/logr" + v2 "k8s.io/api/autoscaling/v2" + "k8s.io/metrics/pkg/apis/external_metrics" + + "github.com/kedacore/keda/v2/pkg/scalers/scalersconfig" + "github.com/kedacore/keda/v2/pkg/scalers/splunk" + kedautil "github.com/kedacore/keda/v2/pkg/util" +) + +// SplunkScaler assigns struct data pointer to metadata variable +type SplunkScaler struct { + client *splunk.Client + metricType v2.MetricTargetType + metadata SplunkMetadata + logger logr.Logger +} + +// SplunkMetadata Metadata used by KEDA to search Splunk events and scale +type SplunkMetadata struct { + APIToken string `keda:"name=apiToken, order=authParams, optional"` + Password string `keda:"name=password, order=authParams, optional"` + Username string `keda:"name=username, order=authParams"` + Host string `keda:"name=host, order=triggerMetadata"` + UnsafeSsl bool `keda:"name=unsafeSsl, order=triggerMetadata, optional"` + TargetValue int `keda:"name=targetValue, order=triggerMetadata"` + ActivationValue int `keda:"name=activationValue, order=triggerMetadata"` + SavedSearchName string `keda:"name=savedSearchName, order=triggerMetadata"` + ValueField string `keda:"name=valueField, order=triggerMetadata"` + triggerIndex int +} + +func NewSplunkScaler(config *scalersconfig.ScalerConfig) (Scaler, error) { + metricType, err := GetMetricTargetType(config) + if err != nil { + return nil, fmt.Errorf("error getting scaler metric type: %w", err) + } + + meta, err := parseSplunkMetadata(config) + if err != nil { + return nil, fmt.Errorf("error parsing Splunk metadata: %w", err) + } + + client, err := splunk.NewClient(&splunk.Config{ + APIToken: meta.APIToken, + Password: meta.Password, + Username: meta.Username, + Host: meta.Host, + UnsafeSsl: meta.UnsafeSsl, + }, config) + if err != nil { + return nil, err + } + + return &SplunkScaler{ + client: client, + metricType: metricType, + logger: InitializeLogger(config, "splunk_scaler"), + metadata: *meta, + }, nil +} + +func (s *SplunkScaler) Close(context.Context) error { + return nil +} + +func parseSplunkMetadata(config *scalersconfig.ScalerConfig) (*SplunkMetadata, error) { + meta := &SplunkMetadata{} + meta.triggerIndex = config.TriggerIndex + if err := config.TypedConfig(meta); err != nil { + return nil, fmt.Errorf("error parsing splunk metadata: %w", err) + } + + _, err := url.ParseRequestURI(meta.Host) + if err != nil { + return meta, errors.New("invalid value for host. Must be a valid URL such as https://localhost:8089") + } + + return meta, nil +} + +// GetMetricSpecForScaling returns the MetricSpec for the Horizontal Pod Autoscaler +func (s *SplunkScaler) GetMetricSpecForScaling(context.Context) []v2.MetricSpec { + externalMetric := &v2.ExternalMetricSource{ + Metric: v2.MetricIdentifier{ + Name: GenerateMetricNameWithIndex(s.metadata.triggerIndex, kedautil.NormalizeString(fmt.Sprintf("splunk-%s", s.metadata.SavedSearchName))), + }, + Target: GetMetricTarget(s.metricType, int64(s.metadata.TargetValue)), + } + metricSpec := v2.MetricSpec{ + External: externalMetric, + Type: externalMetricType, + } + return []v2.MetricSpec{metricSpec} +} + +// GetMetricsAndActivity returns value for a supported metric and an error if there is a problem getting the metric +func (s *SplunkScaler) GetMetricsAndActivity(_ context.Context, metricName string) ([]external_metrics.ExternalMetricValue, bool, error) { + response, err := s.client.SavedSearch(s.metadata.SavedSearchName) + if err != nil { + return []external_metrics.ExternalMetricValue{}, false, fmt.Errorf("error fetching saved search data from splunk: %w", err) + } + + metricValue, err := response.ToMetric(s.metadata.ValueField) + if err != nil { + return []external_metrics.ExternalMetricValue{}, false, fmt.Errorf("error finding metric value field: %w", err) + } + + metric := GenerateMetricInMili(metricName, metricValue) + return []external_metrics.ExternalMetricValue{metric}, int(metricValue) > s.metadata.ActivationValue, nil +} diff --git a/pkg/scalers/splunk_scaler_test.go b/pkg/scalers/splunk_scaler_test.go new file mode 100644 index 00000000000..17e2409cc3c --- /dev/null +++ b/pkg/scalers/splunk_scaler_test.go @@ -0,0 +1,95 @@ +package scalers + +import ( + "context" + "testing" + + "github.com/kedacore/keda/v2/pkg/scalers/scalersconfig" +) + +type parseSplunkMetadataTestData struct { + metadata map[string]string + authParams map[string]string + isError bool +} + +type SplunkMetricIdentifier struct { + metadataTestData *parseSplunkMetadataTestData + triggerIndex int + name string +} + +var validSplunkAuthParams = map[string]string{ + "username": "fake", +} + +var validSplunkMetadata = map[string]string{ + "host": "https://localhost:8089", + "unsafeSsl": "false", + "targetValue": "1", + "activationValue": "5", + "savedSearchName": "fakeSavedSearchName", + "valueField": "count", +} + +var testSplunkMetadata = []parseSplunkMetadataTestData{ + // Valid metadata for api token auth, pass. + {validSplunkMetadata, map[string]string{"username": "fake", "apiToken": "fake"}, false}, + // Valid metadata for basic auth, pass. + {validSplunkMetadata, map[string]string{"username": "fake", "password": "fake"}, false}, + // No params, missing username, fail. + {map[string]string{}, map[string]string{}, true}, + // No params, missing host, fail. + {map[string]string{}, validSplunkAuthParams, true}, + // Invalid host, fail. + {map[string]string{"host": "missinghttpURIScheme:8089"}, validSplunkAuthParams, true}, + // Invalid unsafeSsl value, fail. + {map[string]string{"host": "https://localhost:8089", "unsafeSsl": "invalid"}, validSplunkAuthParams, true}, + // Missing targetValue, fail. + {map[string]string{"host": "https://localhost:8089", "unsafeSsl": "false"}, validSplunkAuthParams, true}, + // Invalid targetValue, fail. + {map[string]string{"host": "https://localhost:8089", "unsafeSsl": "false", "targetValue": "invalid"}, validSplunkAuthParams, true}, + // Missing activationValue, fail. + {map[string]string{"host": "https://localhost:8089", "unsafeSsl": "false", "targetValue": "1"}, validSplunkAuthParams, true}, + // Invalid activationValue, fail. + {map[string]string{"host": "https://localhost:8089", "unsafeSsl": "false", "targetValue": "1", "activationValue": "invalid"}, validSplunkAuthParams, true}, + // Missing savedSearchName, fail. + {map[string]string{"host": "https://localhost:8089", "unsafeSsl": "false", "targetValue": "1", "activationValue": "5"}, validSplunkAuthParams, true}, + // Missing valueField, fail. + {map[string]string{"host": "https://localhost:8089", "unsafeSsl": "false", "targetValue": "1", "activationValue": "5", "savedSearchName": "fakeSavedSearchName"}, validSplunkAuthParams, true}, +} + +var SplunkMetricIdentifiers = []SplunkMetricIdentifier{ + {&testSplunkMetadata[0], 0, "s0-splunk-fakeSavedSearchName"}, + {&testSplunkMetadata[0], 1, "s1-splunk-fakeSavedSearchName"}, +} + +func TestSplunkParseMetadata(t *testing.T) { + for _, testData := range testSplunkMetadata { + _, err := parseSplunkMetadata(&scalersconfig.ScalerConfig{TriggerMetadata: testData.metadata, AuthParams: testData.authParams}) + if err != nil && !testData.isError { + t.Error("Expected success but got error", err) + } else if testData.isError && err == nil { + t.Error("Expected error but got success") + } + } +} + +func TestSplunkGetMetricSpecForScaling(t *testing.T) { + for _, testData := range SplunkMetricIdentifiers { + ctx := context.Background() + meta, err := parseSplunkMetadata(&scalersconfig.ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, AuthParams: validSplunkAuthParams, TriggerIndex: testData.triggerIndex}) + if err != nil { + t.Fatal("Could not parse metadata:", err) + } + mockSplunkScaler := SplunkScaler{ + metadata: *meta, + } + + metricSpec := mockSplunkScaler.GetMetricSpecForScaling(ctx) + metricName := metricSpec[0].External.Metric.Name + if metricName != testData.name { + t.Error("Wrong External metric source name:", metricName) + } + } +} diff --git a/pkg/scaling/scalers_builder.go b/pkg/scaling/scalers_builder.go index 889871fce88..7f6071d54f9 100644 --- a/pkg/scaling/scalers_builder.go +++ b/pkg/scaling/scalers_builder.go @@ -245,6 +245,8 @@ func buildScaler(ctx context.Context, client client.Client, triggerType string, return scalers.NewSolaceScaler(config) case "solr": return scalers.NewSolrScaler(config) + case "splunk": + return scalers.NewSplunkScaler(config) case "stan": return scalers.NewStanScaler(config) default: diff --git a/tests/scalers/splunk/splunk_test.go b/tests/scalers/splunk/splunk_test.go new file mode 100644 index 00000000000..5fd2d3bf417 --- /dev/null +++ b/tests/scalers/splunk/splunk_test.go @@ -0,0 +1,317 @@ +//go:build e2e +// +build e2e + +package splunk_test + +import ( + "encoding/base64" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "k8s.io/client-go/kubernetes" + + . "github.com/kedacore/keda/v2/tests/helper" +) + +const ( + testName = "splunk-test" +) + +var ( + testNamespace = fmt.Sprintf("%s-ns", testName) + configMapName = fmt.Sprintf("%s-configmap", testName) + deploymentName = fmt.Sprintf("%s-deployment", testName) + scaledObjectName = fmt.Sprintf("%s-so", testName) + secretName = fmt.Sprintf("%s-secret", testName) + username = "admin" + password = "password" + savedSearchName = "e2eSavedSearch" + apiPort = 8089 + maxReplicaCount = 2 + minReplicaCount = 0 + scaleInTargetValue = "10" + scaleInActivationValue = "15" +) + +type templateData struct { + TestNamespace string + ConfigMapName string + DeploymentName string + ScaledObjectName string + SecretName string + SplunkUsername string + SplunkUsernameBase64 string + SplunkPassword string + SplunkPasswordBase64 string + SavedSearchName string + APIPort int + MinReplicaCount string + MaxReplicaCount string + // Preconfigured saved search returns a static value of 10 + // so we need to change the scaled object values at different phases to test scale in + out + TargetValue string + ActivationValue string +} + +const ( + secretTemplate = ` +apiVersion: v1 +kind: Secret +metadata: + name: {{.SecretName}} + namespace: {{.TestNamespace}} +data: + username: {{.SplunkUsernameBase64}} + password: {{.SplunkPasswordBase64}} +` + + configMapTemplate = ` +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{.ConfigMapName}} + namespace: {{.TestNamespace}} +data: + default.yml: | + splunk: + conf: + - key: savedsearches + value: + directory: /opt/splunk/etc/users/admin/search/local + content: + {{.SavedSearchName}}: + action.email.useNSSubject: 1 + action.webhook.enable_allowlist: 0 + alert.track: 0 + cron_schedule: '*/1 * * * *' + dispatch.earliest_time: -15m + dispatch.latest_time: now + display.general.type: statistics + display.page.search.tab: statistics + display.visualizations.show: 0 + enableSched: 1 + request.ui_dispatch_app: search + request.ui_dispatch_view: search + search: index=_internal | tail | stats count +` + + triggerAuthenticationTemplate = ` +apiVersion: keda.sh/v1alpha1 +kind: TriggerAuthentication +metadata: + name: keda-trigger-auth-splunk-secret + namespace: {{.TestNamespace}} +spec: + secretTargetRef: + - parameter: username + name: {{.SecretName}} + key: username + - parameter: password + name: {{.SecretName}} + key: password +` + + deploymentTemplate = ` +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{.DeploymentName}} + namespace: {{.TestNamespace}} + labels: + app: {{.DeploymentName}} +spec: + replicas: 0 + selector: + matchLabels: + app: {{.DeploymentName}} + template: + metadata: + labels: + app: {{.DeploymentName}} + spec: + containers: + - name: nginx + image: nginxinc/nginx-unprivileged + ports: + - containerPort: 80 +` + splunkDeploymentTemplate = ` +apiVersion: apps/v1 +kind: Deployment +metadata: + name: splunk + namespace: {{.TestNamespace}} +spec: + replicas: 1 + selector: + matchLabels: + name: splunk + template: + metadata: + labels: + name: splunk + spec: + containers: + - name: splunk + image: splunk/splunk:9.2 + imagePullPolicy: IfNotPresent + env: + - name: SPLUNK_START_ARGS + value: --accept-license + - name: SPLUNK_PASSWORD + value: {{.SplunkPassword}} + ports: + - containerPort: {{.APIPort}} + name: api + protocol: TCP + volumeMounts: + - name: splunkconf-volume + mountPath: /tmp/defaults + volumes: + - name: splunkconf-volume + configMap: + name: {{.ConfigMapName}} +` + + serviceTemplate = `apiVersion: v1 +kind: Service +metadata: + name: {{.DeploymentName}} + namespace: {{.TestNamespace}} +spec: + type: ClusterIP + ports: + - name: api + port: {{.APIPort}} + targetPort: {{.APIPort}} + protocol: TCP + selector: + name: splunk +` + + scaledObjectTemplate = ` +apiVersion: keda.sh/v1alpha1 +kind: ScaledObject +metadata: + name: {{.ScaledObjectName}} + namespace: {{.TestNamespace}} + labels: + app: {{.DeploymentName}} +spec: + scaleTargetRef: + name: {{.DeploymentName}} + minReplicaCount: {{.MinReplicaCount}} + maxReplicaCount: {{.MaxReplicaCount}} + pollingInterval: 3 + cooldownPeriod: 1 + triggers: + - type: splunk + metadata: + host: "https://{{.DeploymentName}}.{{.TestNamespace}}.svc:{{.APIPort}}" + username: {{.SplunkUsername}} + unsafeSsl: "true" + targetValue: "{{.TargetValue}}" + activationValue: "{{.ActivationValue}}" + savedSearchName: {{.SavedSearchName}} + valueField: count + authenticationRef: + name: keda-trigger-auth-splunk-secret +` +) + +func TestSplunkScaler(t *testing.T) { + kc := GetKubernetesClient(t) + data, templates := getTemplateData() + t.Cleanup(func() { + DeleteKubernetesResources(t, testNamespace, data, templates) + }) + + // Create kubernetes resources + CreateKubernetesResources(t, kc, testNamespace, data, templates) + + // Wait for splunk to start + assert.True(t, WaitForDeploymentReplicaReadyCount(t, kc, "splunk", testNamespace, 1, 180, 3), + "replica count should be %d after 3 minutes", 1) + + // Ensure nginx deployment is at min replica count + assert.True(t, WaitForDeploymentReplicaReadyCount(t, kc, deploymentName, testNamespace, minReplicaCount, 180, 3), + "replica count should be %d after 3 minutes", minReplicaCount) + + // test scaling + testActivation(t, kc) + testScaleOut(t, kc) + testScaleIn(t, kc) +} + +func testActivation(t *testing.T, kc *kubernetes.Clientset) { + t.Log("--- testing activation ---") + + AssertReplicaCountNotChangeDuringTimePeriod(t, kc, deploymentName, testNamespace, minReplicaCount, 60) +} + +func testScaleOut(t *testing.T, kc *kubernetes.Clientset) { + t.Log("--- testing scale out ---") + + // Saved Search returns 10, let's change the scaled object resource to force scaling out + data := getScaledObjectTemplateData("1", "9") + KubectlApplyWithTemplate(t, data, "scaledObjectTemplateToScaleOut", scaledObjectTemplate) + + assert.True(t, WaitForDeploymentReplicaReadyCount(t, kc, deploymentName, testNamespace, maxReplicaCount, 60, 3), + "replica count should be %d after 3 minutes", maxReplicaCount) +} + +func testScaleIn(t *testing.T, kc *kubernetes.Clientset) { + t.Log("--- testing scale in ---") + + // Saved Search returns 10, let's change the scaled object resource to force scaling in + data := getScaledObjectTemplateData(scaleInTargetValue, scaleInActivationValue) + KubectlApplyWithTemplate(t, data, "scaledObjectTemplateToScaleIn", scaledObjectTemplate) + + assert.True(t, WaitForDeploymentReplicaReadyCount(t, kc, deploymentName, testNamespace, minReplicaCount, 60, 3), + "replica count should be %d after 3 minutes", minReplicaCount) +} + +func getTemplateData() (templateData, []Template) { + return templateData{ + TestNamespace: testNamespace, + ConfigMapName: configMapName, + DeploymentName: deploymentName, + ScaledObjectName: scaledObjectName, + SecretName: secretName, + SplunkUsername: username, + SplunkUsernameBase64: base64.StdEncoding.EncodeToString([]byte(username)), + SplunkPassword: password, + SplunkPasswordBase64: base64.StdEncoding.EncodeToString([]byte(password)), + SavedSearchName: savedSearchName, + APIPort: apiPort, + MinReplicaCount: fmt.Sprintf("%v", minReplicaCount), + MaxReplicaCount: fmt.Sprintf("%v", maxReplicaCount), + // Ensure no scaling out since saved search returns 10 by default + TargetValue: scaleInTargetValue, + ActivationValue: scaleInActivationValue, + }, []Template{ + {Name: "secretTemplate", Config: secretTemplate}, + {Name: "configMapTemplate", Config: configMapTemplate}, + {Name: "triggerAuthenticationTemplate", Config: triggerAuthenticationTemplate}, + {Name: "serviceTemplate", Config: serviceTemplate}, + {Name: "splunkDeploymentTemplate", Config: splunkDeploymentTemplate}, + {Name: "deploymentTemplate", Config: deploymentTemplate}, + {Name: "scaledObjectTemplate", Config: scaledObjectTemplate}, + } +} + +func getScaledObjectTemplateData(targetValue, activationValue string) templateData { + return templateData{ + TestNamespace: testNamespace, + DeploymentName: deploymentName, + ScaledObjectName: scaledObjectName, + SplunkUsername: username, + SavedSearchName: savedSearchName, + APIPort: apiPort, + MinReplicaCount: fmt.Sprintf("%v", minReplicaCount), + MaxReplicaCount: fmt.Sprintf("%v", maxReplicaCount), + TargetValue: targetValue, + ActivationValue: activationValue, + } +}