Skip to content

Commit

Permalink
[release]Added jobs scrape interval and timeout.Fixes #30
Browse files Browse the repository at this point in the history
  • Loading branch information
vfarcic committed Feb 10, 2018
1 parent 751d12d commit 93a39f9
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 17 deletions.
4 changes: 3 additions & 1 deletion docs/usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,13 @@ Query parameters that follow should be added to the base address **[MONITOR_IP]:
|Query |Description |Required|
|---------------|------------------------------------------------------------------------------------------|--------|
|metricsPath |The path of the metrics endpoint. Defaults to `/metrics`. |No |
|scrapeInterval |How frequently to scrape targets from this job. |No |
|scrapeTimeout |Per-scrape timeout when scraping this job. |No |
|scrapePort |The port through which metrics are exposed. |Yes |
|serviceName |The name of the service that exports metrics. |Yes |
|scrapeType |A set of targets and parameters describing how to scrape metrics. |No |

You can find more about scrapeType's on [Scrape Config](https://prometheus.io/docs/operating/configuration/#scrape_config).
You can find more about scrapeType's on [Scrape Config](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config).

### Alert Parameters

Expand Down
8 changes: 4 additions & 4 deletions prometheus/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,6 @@ func (c *Config) InsertScrapes(scrapes map[string]Scrape) {
Targets: []string{fmt.Sprintf("%s:%d", s.ServiceName, s.ScrapePort)},
}},
},
JobName: s.ServiceName,
MetricsPath: metricsPath,
}
} else {
newScrape = &ScrapeConfig{
Expand All @@ -117,10 +115,12 @@ func (c *Config) InsertScrapes(scrapes map[string]Scrape) {
Type: "A",
}},
},
JobName: s.ServiceName,
MetricsPath: metricsPath,
}
}
newScrape.JobName = s.ServiceName
newScrape.MetricsPath = metricsPath
newScrape.ScrapeInterval = s.ScrapeInterval
newScrape.ScrapeTimeout = s.ScrapeTimeout
c.ScrapeConfigs = append(c.ScrapeConfigs, newScrape)
}
}
Expand Down
32 changes: 26 additions & 6 deletions prometheus/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -367,8 +367,20 @@ func (s *ConfigTestSuite) Test_InsertScrape_ConfigWithData() {

scrapes := map[string]Scrape{
"service-1": {ServiceName: "service-1", ScrapePort: 1234},
"service-2": {ServiceName: "service-2", ScrapePort: 5678},
"service-3": {ServiceName: "service-3", ScrapePort: 4321, ScrapeType: "static_configs", MetricsPath: "/something"},
"service-2": {
ServiceName: "service-2",
ScrapePort: 5678,
ScrapeInterval: "32s",
ScrapeTimeout: "11s",
},
"service-3": {
ServiceName: "service-3",
ScrapeInterval: "23s",
ScrapeTimeout: "21s",
ScrapePort: 4321,
ScrapeType: "static_configs",
MetricsPath: "/something",
},
}

c := &Config{}
Expand All @@ -382,13 +394,21 @@ func (s *ConfigTestSuite) Test_InsertScrape_ConfigWithData() {
s.Equal(expectedC.ScrapePort, sc.ServiceDiscoveryConfig.DNSSDConfigs[0].Port)
s.Equal("A", sc.ServiceDiscoveryConfig.DNSSDConfigs[0].Type)
s.Equal("/metrics", sc.MetricsPath)
s.Equal(fmt.Sprintf("tasks.%s", expectedC.ServiceName),
sc.ServiceDiscoveryConfig.DNSSDConfigs[0].Names[0])
s.Equal(
fmt.Sprintf("tasks.%s", expectedC.ServiceName),
sc.ServiceDiscoveryConfig.DNSSDConfigs[0].Names[0],
)
s.Equal(expectedC.ScrapeInterval, sc.ScrapeInterval)
s.Equal(expectedC.ScrapeTimeout, sc.ScrapeTimeout)
} else {
s.Equal(expectedC.ServiceName, sc.JobName)
s.Equal(expectedC.MetricsPath, sc.MetricsPath)
s.Equal(fmt.Sprintf("%s:%d", expectedC.ServiceName, expectedC.ScrapePort),
sc.ServiceDiscoveryConfig.StaticConfigs[0].Targets[0])
s.Equal(
fmt.Sprintf("%s:%d", expectedC.ServiceName, expectedC.ScrapePort),
sc.ServiceDiscoveryConfig.StaticConfigs[0].Targets[0],
)
s.Equal(expectedC.ScrapeInterval, sc.ScrapeInterval)
s.Equal(expectedC.ScrapeTimeout, sc.ScrapeTimeout)
}
}
}
Expand Down
1 change: 0 additions & 1 deletion prometheus/run_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ func (s *RunTestSuite) Test_Run_AddsArguments() {
return nil
}

println("000")
Run()

s.Equal([]string{"/bin/sh", "-c", "prometheus --config.file=\"/etc/prometheus/prometheus.yml\" --storage.tsdb.path=\"/prometheus\" --web.console.libraries=\"/usr/share/prometheus/console_libraries\" --web.console.templates=\"/usr/share/prometheus/consoles\" --web.route-prefix=\"/something\""}, actualArgs)
Expand Down
10 changes: 6 additions & 4 deletions prometheus/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -216,8 +216,10 @@ type Alert struct {

// Scrape defines data used to create scraping configuration snippet
type Scrape struct {
MetricsPath string `json:"metricsPath,string,omitempty"`
ScrapePort int `json:"scrapePort,string,omitempty"`
ServiceName string `json:"serviceName"`
ScrapeType string `json:"scrapeType"`
MetricsPath string `json:"metricsPath,string,omitempty"`
ScrapeInterval string `json:"scrapeInterval,string,omitempty"`
ScrapeTimeout string `json:"scrapeTimeout,string,omitempty"`
ScrapePort int `json:"scrapePort,string,omitempty"`
ServiceName string `json:"serviceName"`
ScrapeType string `json:"scrapeType"`
}
1 change: 1 addition & 0 deletions server/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ func (s *serve) getScrapeFromMap(data map[string]string) (prometheus.Scrape, err
if port, err := strconv.Atoi(data["scrapePort"]); err == nil {
scrape.ScrapePort = port
}
scrape.ScrapeInterval = data["scrapeInterval"]
scrape.ServiceName = data["serviceName"]
scrape.ScrapeType = data["scrapeType"]

Expand Down
4 changes: 3 additions & 1 deletion server/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -565,6 +565,8 @@ rule_files:
- alert.rules
scrape_configs:
- job_name: my-service
scrape_interval: 15s
scrape_timeout: 11s
metrics_path: /metrics
dns_sd_configs:
- names:
Expand All @@ -573,7 +575,7 @@ scrape_configs:
port: 1234
`
rwMock := ResponseWriterMock{}
addr := "/v1/docker-flow-monitor?serviceName=my-service&scrapePort=1234&alertName=my-alert&alertIf=my-if&alertFor=my-for"
addr := "/v1/docker-flow-monitor?serviceName=my-service&scrapePort=1234&scrapeInterval=15s&scrapeTimeout=11s&alertName=my-alert&alertIf=my-if&alertFor=my-for"
req, _ := http.NewRequest("GET", addr, nil)
fsOrig := prometheus.FS
defer func() { prometheus.FS = fsOrig }()
Expand Down

0 comments on commit 93a39f9

Please sign in to comment.