Skip to content

Commit

Permalink
Merge pull request #13 from monitoring-tools/gathering_metrics_improv…
Browse files Browse the repository at this point in the history
…ements

Gathering metrics improvements.
  • Loading branch information
vlamug authored Jul 24, 2018
2 parents 14b13f8 + fff3124 commit f05999e
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 59 deletions.
13 changes: 1 addition & 12 deletions Gopkg.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ For linux:
$ make build-linux
>> installing application dependencies
...
$ ./linux_amd64/nginx-plus-exporter listen-address="localhost:9005" --metrics-path="/metrics" --namespace="nginxplus" --nginx-stats-urls="localhost:9002/status" --nginx-stats-urls="localhost:9003/status" --nginx-plus-stats-urls="localhost:9004/status"
$ ./linux_amd64/nginx-plus-exporter --listen-address="localhost:9005" --metrics-path="/metrics" --namespace="nginxplus" --nginx-stats-urls="localhost:9002/status" --nginx-stats-urls="localhost:9003/status" --nginx-plus-stats-urls="localhost:9004/status"
```

For darwin:
Expand All @@ -21,7 +21,7 @@ For darwin:
$ make build-darwin
>> installing application dependencies
...
$ ./darwin_amd64/nginx-plus-exporter listen-address="localhost:9005" --metrics-path="/metrics" --namespace="nginxplus" --nginx-stats-urls="localhost:9002/status" --nginx-stats-urls="localhost:9003/status" --nginx-plus-stats-urls="localhost:9004/status"
$ ./darwin_amd64/nginx-plus-exporter --listen-address="localhost:9005" --metrics-path="/metrics" --namespace="nginxplus" --nginx-stats-urls="localhost:9002/status" --nginx-stats-urls="localhost:9003/status" --nginx-plus-stats-urls="localhost:9004/status"
```

### Other useful make commands:
Expand Down
68 changes: 32 additions & 36 deletions exporter/exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ type nginxPlusExporter struct {

duration prometheus.Summary
totalScrapes prometheus.Counter
metrics map[string]*prometheus.GaugeVec

sync.RWMutex
}
Expand Down Expand Up @@ -70,16 +69,11 @@ func NewNginxPlusExporter(
nginxPlusScraper: nginxPlusScraper,
duration: duration,
totalScrapes: totalScrapes,
metrics: map[string]*prometheus.GaugeVec{},
}
}

// Describe describes nginx and nginx plus metrics
func (exp *nginxPlusExporter) Describe(ch chan<- *prometheus.Desc) {
for _, item := range exp.metrics {
item.Describe(ch)
}

ch <- exp.duration.Desc()
ch <- exp.totalScrapes.Desc()
}
Expand All @@ -89,8 +83,7 @@ func (exp *nginxPlusExporter) Collect(ch chan<- prometheus.Metric) {
exp.Lock()
defer exp.Unlock()

exp.save(exp.scrape())
exp.expose(ch)
exp.expose(ch, exp.collect(exp.scrape()))
}

// scrape scrapes nginx or nginx plus stats for the passed urls
Expand All @@ -112,50 +105,53 @@ func (exp *nginxPlusExporter) scrape() chan metric.Metric {
return metrics
}

// save saves metrics in the internal struct
func (exp *nginxPlusExporter) save(metrics <-chan metric.Metric) {
// expose returns metrics to base metric channel
func (exp *nginxPlusExporter) expose(ch chan<- prometheus.Metric, metrics map[string]*prometheus.GaugeVec) {

ch <- exp.duration
ch <- exp.totalScrapes

for _, m := range metrics {
m.Collect(ch)
}
}

// collect collects all metrics to map
func (exp *nginxPlusExporter) collect(metrics <-chan metric.Metric) map[string]*prometheus.GaugeVec {
m := map[string]*prometheus.GaugeVec{}

for item := range metrics {
metricKey := exp.namespace + "_" + item.Name

if _, ok := exp.metrics[metricKey]; !ok {
gaugeOpt := prometheus.GaugeOpts{
Namespace: exp.namespace,
Name: item.Name,
}

labelNames := make([]string, 0, len(item.Labels))
for labelName := range item.Labels {
labelNames = append(labelNames, labelName)
}
gaugeOpt := prometheus.GaugeOpts{
Namespace: exp.namespace,
Name: item.Name,
}

exp.metrics[metricKey] = prometheus.NewGaugeVec(gaugeOpt, labelNames)
labelNames := make([]string, 0, len(item.Labels))
for labelName := range item.Labels {
labelNames = append(labelNames, labelName)
}

m[metricKey] = prometheus.NewGaugeVec(gaugeOpt, labelNames)

if val, err := common.ConvertValueToFloat64(item.Value); err != nil {
log.Errorf("Convert error for metric '%s': %s", item.Name, err)
log.Errorf("convert error for metric '%s': %s", item.Name, err)
continue
} else {
exp.metrics[metricKey].With(item.Labels).Set(val)
m[metricKey].With(item.Labels).Set(val)
}
}
}

// expose returns metrics to base metric channel
func (exp *nginxPlusExporter) expose(ch chan<- prometheus.Metric) {
ch <- exp.duration
ch <- exp.totalScrapes

for _, m := range exp.metrics {
m.Collect(ch)
}
return m
}

// scrapeModule scrapes stats for module(nginx or nginx plus)
func (exp *nginxPlusExporter) scrapeModule(module string, urls []string, metrics chan<- metric.Metric) {
for _, u := range urls {
addr, err := url.Parse(u)
if err != nil {
log.Fatalf("Unable to parse address '%s': %s", u, err)
log.Fatalf("unable to parse address '%s': %s", u, err)
}

labels := map[string]string{
Expand All @@ -174,7 +170,7 @@ func (exp *nginxPlusExporter) scrapeModule(module string, urls []string, metrics
func (exp *nginxPlusExporter) scrapeURL(module string, addr *url.URL, metrics chan<- metric.Metric, labels map[string]string) error {
resp, err := exp.client.Get(addr.String())
if err != nil {
return fmt.Errorf("Error making HTTP request to '%s': %s", addr.String(), err)
return fmt.Errorf("error making HTTP request to '%s': %s", addr.String(), err)
}
defer resp.Body.Close()

Expand All @@ -187,14 +183,14 @@ func (exp *nginxPlusExporter) scrapeURL(module string, addr *url.URL, metrics ch
if module == nginxModule {
err = exp.nginxScraper.Scrape(resp.Body, metrics, labels)
if err != nil {
return fmt.Errorf("Error scraping nginx stats using address '%s': %s", addr.String(), err)
return fmt.Errorf("error scraping nginx stats using address '%s': %s", addr.String(), err)
}

return nil
} else if module == nginxPlusModule && contentType == "application/json" {
err = exp.nginxPlusScraper.Scrape(resp.Body, metrics, labels)
if err != nil {
return fmt.Errorf("Error scraping nginx plus stats using address '%s': %s", addr.String(), err)
return fmt.Errorf("error scraping nginx plus stats using address '%s': %s", addr.String(), err)
}

return nil
Expand Down
7 changes: 1 addition & 6 deletions scraper/nginx_plus.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"io"
"strconv"

"github.com/monitoring-tools/prom-nginx-exporter/metric"
)
Expand All @@ -24,7 +23,7 @@ func (scr *NginxPlusScraper) Scrape(body io.Reader, metrics chan<- metric.Metric

status := &Status{}
if err := dec.Decode(status); err != nil {
return fmt.Errorf("Error while decoding JSON response")
return fmt.Errorf("error while decoding JSON response")
}

scr.scrapeProcesses(status, metrics, labels)
Expand Down Expand Up @@ -125,9 +124,6 @@ func (scr *NginxPlusScraper) scrapeUpstream(status *Status, metrics chan<- metri
peerLabels[k] = v
}
peerLabels["serverAddress"] = peer.Server
if peer.ID != nil {
peerLabels["id"] = strconv.Itoa(*peer.ID)
}

metrics <- metric.NewMetric("upstream_peer_backup", peer.Backup, peerLabels)
metrics <- metric.NewMetric("upstream_peer_weight", peer.Weight, peerLabels)
Expand Down Expand Up @@ -245,7 +241,6 @@ func (scr *NginxPlusScraper) scrapeStream(status *Status, metrics chan<- metric.
peerLables[k] = v
}
peerLables["serverAddress"] = peer.Server
peerLables["id"] = strconv.Itoa(peer.ID)

metrics <- metric.NewMetric("stream_upstream_peer_backup", peer.Backup, peerLables)
metrics <- metric.NewMetric("stream_upstream_peer_weight", peer.Weight, peerLables)
Expand Down
4 changes: 1 addition & 3 deletions scraper/nginx_plus_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,6 @@ func (s NginxPlusScraperSuite) TestScrape_Success(c *C) {
for l, v := range upstreamLabels {
peerLabels[l] = v
}
peerLabels["id"] = "0"
peerLabels["serverAddress"] = "1.2.3.123:80"

m = <-metrics
Expand Down Expand Up @@ -701,7 +700,6 @@ func (s NginxPlusScraperSuite) TestScrape_Success(c *C) {
streamPeerLabels[l] = v
}
streamPeerLabels["serverAddress"] = "5.4.3.2:2345"
streamPeerLabels["id"] = "1"

m = <-metrics
c.Assert(m.Name, Equals, "stream_upstream_peer_backup", Commentf("incorrect metrics name of 'stream_upstream_peer_backup' field"))
Expand Down Expand Up @@ -808,5 +806,5 @@ func (s NginxPlusScraperSuite) TestScrape_Fail(c *C) {

err := nginxPlusScraper.Scrape(reader, metrics, labels)
c.Assert(err, NotNil, Commentf("error should be occurred"))
c.Assert(err.Error(), Equals, "Error while decoding JSON response", Commentf("incorrect error massage of parsing json"))
c.Assert(err.Error(), Equals, "error while decoding JSON response", Commentf("incorrect error massage of parsing json"))
}

0 comments on commit f05999e

Please sign in to comment.