From 93a39f9dbddad2f9f19dd5acb64a8d34256cc6aa Mon Sep 17 00:00:00 2001 From: Viktor Date: Sat, 10 Feb 2018 12:43:59 +0100 Subject: [PATCH] [release]Added jobs scrape interval and timeout.Fixes #30 --- docs/usage.md | 4 +++- prometheus/config.go | 8 ++++---- prometheus/config_test.go | 32 ++++++++++++++++++++++++++------ prometheus/run_test.go | 1 - prometheus/types.go | 10 ++++++---- server/server.go | 1 + server/server_test.go | 4 +++- 7 files changed, 43 insertions(+), 17 deletions(-) diff --git a/docs/usage.md b/docs/usage.md index e808b6c..f9d8e25 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -16,11 +16,13 @@ Query parameters that follow should be added to the base address **[MONITOR_IP]: |Query |Description |Required| |---------------|------------------------------------------------------------------------------------------|--------| |metricsPath |The path of the metrics endpoint. Defaults to `/metrics`. |No | +|scrapeInterval |How frequently to scrape targets from this job. |No | +|scrapeTimeout |Per-scrape timeout when scraping this job. |No | |scrapePort |The port through which metrics are exposed. |Yes | |serviceName |The name of the service that exports metrics. |Yes | |scrapeType |A set of targets and parameters describing how to scrape metrics. |No | -You can find more about scrapeType's on [Scrape Config](https://prometheus.io/docs/operating/configuration/#scrape_config). +You can find more about scrapeType's on [Scrape Config](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config). ### Alert Parameters diff --git a/prometheus/config.go b/prometheus/config.go index b53062b..20eedf1 100644 --- a/prometheus/config.go +++ b/prometheus/config.go @@ -105,8 +105,6 @@ func (c *Config) InsertScrapes(scrapes map[string]Scrape) { Targets: []string{fmt.Sprintf("%s:%d", s.ServiceName, s.ScrapePort)}, }}, }, - JobName: s.ServiceName, - MetricsPath: metricsPath, } } else { newScrape = &ScrapeConfig{ @@ -117,10 +115,12 @@ func (c *Config) InsertScrapes(scrapes map[string]Scrape) { Type: "A", }}, }, - JobName: s.ServiceName, - MetricsPath: metricsPath, } } + newScrape.JobName = s.ServiceName + newScrape.MetricsPath = metricsPath + newScrape.ScrapeInterval = s.ScrapeInterval + newScrape.ScrapeTimeout = s.ScrapeTimeout c.ScrapeConfigs = append(c.ScrapeConfigs, newScrape) } } diff --git a/prometheus/config_test.go b/prometheus/config_test.go index cdf5a70..8aa5eb3 100644 --- a/prometheus/config_test.go +++ b/prometheus/config_test.go @@ -367,8 +367,20 @@ func (s *ConfigTestSuite) Test_InsertScrape_ConfigWithData() { scrapes := map[string]Scrape{ "service-1": {ServiceName: "service-1", ScrapePort: 1234}, - "service-2": {ServiceName: "service-2", ScrapePort: 5678}, - "service-3": {ServiceName: "service-3", ScrapePort: 4321, ScrapeType: "static_configs", MetricsPath: "/something"}, + "service-2": { + ServiceName: "service-2", + ScrapePort: 5678, + ScrapeInterval: "32s", + ScrapeTimeout: "11s", + }, + "service-3": { + ServiceName: "service-3", + ScrapeInterval: "23s", + ScrapeTimeout: "21s", + ScrapePort: 4321, + ScrapeType: "static_configs", + MetricsPath: "/something", + }, } c := &Config{} @@ -382,13 +394,21 @@ func (s *ConfigTestSuite) Test_InsertScrape_ConfigWithData() { s.Equal(expectedC.ScrapePort, sc.ServiceDiscoveryConfig.DNSSDConfigs[0].Port) s.Equal("A", sc.ServiceDiscoveryConfig.DNSSDConfigs[0].Type) s.Equal("/metrics", sc.MetricsPath) - s.Equal(fmt.Sprintf("tasks.%s", expectedC.ServiceName), - sc.ServiceDiscoveryConfig.DNSSDConfigs[0].Names[0]) + s.Equal( + fmt.Sprintf("tasks.%s", expectedC.ServiceName), + sc.ServiceDiscoveryConfig.DNSSDConfigs[0].Names[0], + ) + s.Equal(expectedC.ScrapeInterval, sc.ScrapeInterval) + s.Equal(expectedC.ScrapeTimeout, sc.ScrapeTimeout) } else { s.Equal(expectedC.ServiceName, sc.JobName) s.Equal(expectedC.MetricsPath, sc.MetricsPath) - s.Equal(fmt.Sprintf("%s:%d", expectedC.ServiceName, expectedC.ScrapePort), - sc.ServiceDiscoveryConfig.StaticConfigs[0].Targets[0]) + s.Equal( + fmt.Sprintf("%s:%d", expectedC.ServiceName, expectedC.ScrapePort), + sc.ServiceDiscoveryConfig.StaticConfigs[0].Targets[0], + ) + s.Equal(expectedC.ScrapeInterval, sc.ScrapeInterval) + s.Equal(expectedC.ScrapeTimeout, sc.ScrapeTimeout) } } } diff --git a/prometheus/run_test.go b/prometheus/run_test.go index 8153cf9..aa914c7 100644 --- a/prometheus/run_test.go +++ b/prometheus/run_test.go @@ -57,7 +57,6 @@ func (s *RunTestSuite) Test_Run_AddsArguments() { return nil } - println("000") Run() s.Equal([]string{"/bin/sh", "-c", "prometheus --config.file=\"/etc/prometheus/prometheus.yml\" --storage.tsdb.path=\"/prometheus\" --web.console.libraries=\"/usr/share/prometheus/console_libraries\" --web.console.templates=\"/usr/share/prometheus/consoles\" --web.route-prefix=\"/something\""}, actualArgs) diff --git a/prometheus/types.go b/prometheus/types.go index b9d1749..26a928a 100644 --- a/prometheus/types.go +++ b/prometheus/types.go @@ -216,8 +216,10 @@ type Alert struct { // Scrape defines data used to create scraping configuration snippet type Scrape struct { - MetricsPath string `json:"metricsPath,string,omitempty"` - ScrapePort int `json:"scrapePort,string,omitempty"` - ServiceName string `json:"serviceName"` - ScrapeType string `json:"scrapeType"` + MetricsPath string `json:"metricsPath,string,omitempty"` + ScrapeInterval string `json:"scrapeInterval,string,omitempty"` + ScrapeTimeout string `json:"scrapeTimeout,string,omitempty"` + ScrapePort int `json:"scrapePort,string,omitempty"` + ServiceName string `json:"serviceName"` + ScrapeType string `json:"scrapeType"` } diff --git a/server/server.go b/server/server.go index 59dfdb2..4cd39e7 100644 --- a/server/server.go +++ b/server/server.go @@ -173,6 +173,7 @@ func (s *serve) getScrapeFromMap(data map[string]string) (prometheus.Scrape, err if port, err := strconv.Atoi(data["scrapePort"]); err == nil { scrape.ScrapePort = port } + scrape.ScrapeInterval = data["scrapeInterval"] scrape.ServiceName = data["serviceName"] scrape.ScrapeType = data["scrapeType"] diff --git a/server/server_test.go b/server/server_test.go index fbe8164..4613b77 100644 --- a/server/server_test.go +++ b/server/server_test.go @@ -565,6 +565,8 @@ rule_files: - alert.rules scrape_configs: - job_name: my-service + scrape_interval: 15s + scrape_timeout: 11s metrics_path: /metrics dns_sd_configs: - names: @@ -573,7 +575,7 @@ scrape_configs: port: 1234 ` rwMock := ResponseWriterMock{} - addr := "/v1/docker-flow-monitor?serviceName=my-service&scrapePort=1234&alertName=my-alert&alertIf=my-if&alertFor=my-for" + addr := "/v1/docker-flow-monitor?serviceName=my-service&scrapePort=1234&scrapeInterval=15s&scrapeTimeout=11s&alertName=my-alert&alertIf=my-if&alertFor=my-for" req, _ := http.NewRequest("GET", addr, nil) fsOrig := prometheus.FS defer func() { prometheus.FS = fsOrig }()