Commit d4bc92b2 by bergquist

feat(tsdb): default tsdb httpclient

parent 9b28bf25
......@@ -2,7 +2,6 @@ package graphite
import (
"context"
"crypto/tls"
"encoding/json"
"fmt"
"io/ioutil"
......@@ -10,7 +9,6 @@ import (
"net/url"
"path"
"strings"
"time"
"golang.org/x/net/context/ctxhttp"
......@@ -36,14 +34,7 @@ func init() {
glog = log.New("tsdb.graphite")
tsdb.RegisterExecutor("graphite", NewGraphiteExecutor)
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
HttpClient = &http.Client{
Timeout: time.Duration(15 * time.Second),
Transport: tr,
}
HttpClient = tsdb.GetDefaultClient()
}
func (e *GraphiteExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult {
......
package tsdb
import (
"crypto/tls"
"net"
"net/http"
"time"
)
func GetDefaultClient() *http.Client {
tr := &http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).DialContext,
MaxIdleConns: 100,
IdleConnTimeout: 90 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
return &http.Client{
Timeout: time.Duration(30 * time.Second),
Transport: tr,
}
}
......@@ -2,13 +2,11 @@ package influxdb
import (
"context"
"crypto/tls"
"encoding/json"
"fmt"
"net/http"
"net/url"
"path"
"time"
"golang.org/x/net/context/ctxhttp"
......@@ -41,14 +39,7 @@ func init() {
glog = log.New("tsdb.influxdb")
tsdb.RegisterExecutor("influxdb", NewInfluxDBExecutor)
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
HttpClient = &http.Client{
Timeout: time.Duration(15 * time.Second),
Transport: tr,
}
HttpClient = tsdb.GetDefaultClient()
}
func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult {
......
......@@ -2,19 +2,17 @@ package opentsdb
import (
"context"
"crypto/tls"
"fmt"
"path"
"strconv"
"strings"
"time"
"golang.org/x/net/context/ctxhttp"
"encoding/json"
"io/ioutil"
"net/http"
"net/url"
"encoding/json"
"gopkg.in/guregu/null.v3"
......@@ -40,14 +38,7 @@ func init() {
plog = log.New("tsdb.opentsdb")
tsdb.RegisterExecutor("opentsdb", NewOpenTsdbExecutor)
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
HttpClient = &http.Client{
Timeout: time.Duration(15 * time.Second),
Transport: tr,
}
HttpClient = tsdb.GetDefaultClient()
}
func (e *OpenTsdbExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult {
......@@ -58,9 +49,9 @@ func (e *OpenTsdbExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
tsdbQuery.Start = queryContext.TimeRange.GetFromAsMsEpoch()
tsdbQuery.End = queryContext.TimeRange.GetToAsMsEpoch()
for _ , query := range queries {
metric := e.buildMetric(query)
tsdbQuery.Queries = append(tsdbQuery.Queries, metric)
for _, query := range queries {
metric := e.buildMetric(query)
tsdbQuery.Queries = append(tsdbQuery.Queries, metric)
}
if setting.Env == setting.DEV {
......@@ -104,7 +95,7 @@ func (e *OpenTsdbExecutor) createRequest(data OpenTsdbQuery) (*http.Request, err
if e.BasicAuth {
req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword)
}
return req, err
}
......@@ -152,61 +143,61 @@ func (e *OpenTsdbExecutor) parseResponse(query OpenTsdbQuery, res *http.Response
return queryResults, nil
}
func (e *OpenTsdbExecutor) buildMetric(query *tsdb.Query) (map[string]interface{}) {
func (e *OpenTsdbExecutor) buildMetric(query *tsdb.Query) map[string]interface{} {
metric := make(map[string]interface{})
// Setting metric and aggregator
metric["metric"] = query.Model.Get("metric").MustString()
metric["aggregator"] = query.Model.Get("aggregator").MustString()
// Setting metric and aggregator
metric["metric"] = query.Model.Get("metric").MustString()
metric["aggregator"] = query.Model.Get("aggregator").MustString()
// Setting downsampling options
disableDownsampling := query.Model.Get("disableDownsampling").MustBool()
if !disableDownsampling {
downsampleInterval := query.Model.Get("downsampleInterval").MustString()
if downsampleInterval == "" {
downsampleInterval = "1m" //default value for blank
}
downsample := downsampleInterval + "-" + query.Model.Get("downsampleAggregator").MustString()
if query.Model.Get("downsampleFillPolicy").MustString() != "none" {
metric["downsample"] = downsample + "-" + query.Model.Get("downsampleFillPolicy").MustString()
} else {
metric["downsample"] = downsample
}
// Setting downsampling options
disableDownsampling := query.Model.Get("disableDownsampling").MustBool()
if !disableDownsampling {
downsampleInterval := query.Model.Get("downsampleInterval").MustString()
if downsampleInterval == "" {
downsampleInterval = "1m" //default value for blank
}
downsample := downsampleInterval + "-" + query.Model.Get("downsampleAggregator").MustString()
if query.Model.Get("downsampleFillPolicy").MustString() != "none" {
metric["downsample"] = downsample + "-" + query.Model.Get("downsampleFillPolicy").MustString()
} else {
metric["downsample"] = downsample
}
}
// Setting rate options
if query.Model.Get("shouldComputeRate").MustBool() {
metric["rate"] = true
rateOptions := make(map[string]interface{})
rateOptions["counter"] = query.Model.Get("isCounter").MustBool()
// Setting rate options
if query.Model.Get("shouldComputeRate").MustBool() {
counterMax, counterMaxCheck := query.Model.CheckGet("counterMax")
if counterMaxCheck {
rateOptions["counterMax"] = counterMax.MustFloat64()
}
resetValue, resetValueCheck := query.Model.CheckGet("counterResetValue")
if resetValueCheck {
rateOptions["resetValue"] = resetValue.MustFloat64()
}
metric["rate"] = true
rateOptions := make(map[string]interface{})
rateOptions["counter"] = query.Model.Get("isCounter").MustBool()
metric["rateOptions"] = rateOptions
counterMax, counterMaxCheck := query.Model.CheckGet("counterMax")
if counterMaxCheck {
rateOptions["counterMax"] = counterMax.MustFloat64()
}
// Setting tags
tags, tagsCheck := query.Model.CheckGet("tags")
if tagsCheck && len(tags.MustMap()) > 0 {
metric["tags"] = tags.MustMap()
resetValue, resetValueCheck := query.Model.CheckGet("counterResetValue")
if resetValueCheck {
rateOptions["resetValue"] = resetValue.MustFloat64()
}
// Setting filters
filters, filtersCheck := query.Model.CheckGet("filters")
if filtersCheck && len(filters.MustArray()) > 0 {
metric["filters"] = filters.MustArray()
}
metric["rateOptions"] = rateOptions
}
// Setting tags
tags, tagsCheck := query.Model.CheckGet("tags")
if tagsCheck && len(tags.MustMap()) > 0 {
metric["tags"] = tags.MustMap()
}
// Setting filters
filters, filtersCheck := query.Model.CheckGet("filters")
if filtersCheck && len(filters.MustArray()) > 0 {
metric["filters"] = filters.MustArray()
}
return metric
return metric
}
......@@ -3,7 +3,6 @@ package prometheus
import (
"context"
"fmt"
"net/http"
"regexp"
"strings"
"time"
......@@ -25,8 +24,7 @@ func NewPrometheusExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
}
var (
plog log.Logger
HttpClient http.Client
plog log.Logger
)
func init() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment