Commit 4840adff by Marcus Efraimsson

elasticsearch: refactor query handling and use new es simple client

Removes moment dependency.
Adds response parser tests (based on frontend tests).
Adds time series query tests (based on frontend tests).
Fixes various issues related to response parsing and building search request queries.
Added support for extended stats metrics and geo hash grid aggregations.
parent e171ed89
......@@ -310,12 +310,6 @@
[[projects]]
branch = "master"
name = "github.com/leibowitz/moment"
packages = ["."]
revision = "8548108dcca204a1110b99e5fec966817499fe84"
[[projects]]
branch = "master"
name = "github.com/lib/pq"
packages = [
".",
......@@ -667,6 +661,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "4039f122ac5dd045948e003eb7a74c8864df1759b25147f1b2e2e8ad7a8414d6"
inputs-digest = "bd54a1a836599d90b36d4ac1af56d716ef9ca5be4865e217bddd49e3d32a1997"
solver-name = "gps-cdcl"
solver-version = 1
......@@ -199,7 +199,3 @@ ignored = [
[[constraint]]
name = "github.com/denisenkom/go-mssqldb"
revision = "270bc3860bb94dd3a3ffd047377d746c5e276726"
[[constraint]]
branch = "master"
name = "github.com/leibowitz/moment"
......@@ -3,17 +3,14 @@ package elasticsearch
import (
"context"
"fmt"
"net/http"
"net/url"
"path"
"strings"
"time"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
)
// ElasticsearchExecutor represents a handler for handling elasticsearch datasource request
type ElasticsearchExecutor struct{}
var (
......@@ -21,43 +18,28 @@ var (
intervalCalculator tsdb.IntervalCalculator
)
// NewElasticsearchExecutor creates a new elasticsearch executor
func NewElasticsearchExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return &ElasticsearchExecutor{}, nil
}
func init() {
glog = log.New("tsdb.elasticsearch")
intervalCalculator = tsdb.NewIntervalCalculator(nil)
tsdb.RegisterTsdbQueryEndpoint("elasticsearch", NewElasticsearchExecutor)
intervalCalculator = tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: time.Millisecond * 1})
}
// Query handles an elasticsearch datasource request
func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
if len(tsdbQuery.Queries) == 0 {
return nil, fmt.Errorf("query contains no queries")
}
return e.executeTimeSeriesQuery(ctx, dsInfo, tsdbQuery)
}
func (e *ElasticsearchExecutor) createRequest(dsInfo *models.DataSource, query string) (*http.Request, error) {
u, _ := url.Parse(dsInfo.Url)
u.Path = path.Join(u.Path, "_msearch")
req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(query))
client, err := es.NewClient(ctx, dsInfo, tsdbQuery.TimeRange)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Grafana")
req.Header.Set("Content-Type", "application/json")
if dsInfo.BasicAuth {
req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword)
}
if !dsInfo.BasicAuth && dsInfo.User != "" {
req.SetBasicAuth(dsInfo.User, dsInfo.Password)
}
glog.Debug("Elasticsearch request", "url", req.URL.String())
glog.Debug("Elasticsearch request", "body", query)
return req, nil
query := newTimeSeriesQuery(client, tsdbQuery, intervalCalculator)
return query.execute()
}
package elasticsearch
import (
"github.com/grafana/grafana/pkg/components/simplejson"
"time"
)
var avgWithMovingAvg = Query{
TimeField: "timestamp",
RawQuery: "(test:query) AND (name:sample)",
Interval: time.Millisecond,
BucketAggs: []*BucketAgg{{
Field: "timestamp",
ID: "2",
Type: "date_histogram",
Settings: simplejson.NewFromAny(map[string]interface{}{
"interval": "auto",
"min_doc_count": 0,
"trimEdges": 0,
}),
}},
Metrics: []*Metric{{
Field: "value",
ID: "1",
Type: "avg",
Settings: simplejson.NewFromAny(map[string]interface{}{
"script": map[string]string{
"inline": "_value * 2",
},
}),
}, {
Field: "1",
ID: "3",
Type: "moving_avg",
PipelineAggregate: "1",
Settings: simplejson.NewFromAny(map[string]interface{}{
"minimize": false,
"model": "simple",
"window": 5,
}),
}},
}
var wildcardsAndQuotes = Query{
TimeField: "timestamp",
RawQuery: "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\"",
Interval: time.Millisecond,
BucketAggs: []*BucketAgg{{
Field: "timestamp",
ID: "2",
Type: "date_histogram",
Settings: simplejson.NewFromAny(map[string]interface{}{}),
}},
Metrics: []*Metric{{
Field: "value",
ID: "1",
Type: "sum",
Settings: simplejson.NewFromAny(map[string]interface{}{}),
}},
}
var termAggs = Query{
TimeField: "timestamp",
RawQuery: "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)",
Interval: time.Millisecond,
BucketAggs: []*BucketAgg{{
Field: "name_raw",
ID: "4",
Type: "terms",
Settings: simplejson.NewFromAny(map[string]interface{}{
"order": "desc",
"orderBy": "_term",
"size": "10",
}),
}, {
Field: "timestamp",
ID: "2",
Type: "date_histogram",
Settings: simplejson.NewFromAny(map[string]interface{}{
"interval": "auto",
"min_doc_count": 0,
"trimEdges": 0,
}),
}},
Metrics: []*Metric{{
Field: "value",
ID: "1",
Type: "sum",
Settings: simplejson.NewFromAny(map[string]interface{}{}),
}},
}
var filtersAggs = Query{
TimeField: "time",
RawQuery: "*",
Interval: time.Millisecond,
BucketAggs: []*BucketAgg{{
ID: "3",
Type: "filters",
Settings: simplejson.NewFromAny(map[string]interface{}{
"filters": []interface{}{
map[string]interface{}{"label": "hello", "query": "host:\"67.65.185.232\""},
},
}),
}, {
Field: "timestamp",
ID: "2",
Type: "date_histogram",
Settings: simplejson.NewFromAny(map[string]interface{}{
"interval": "auto",
"min_doc_count": 0,
"trimEdges": 0,
}),
}},
Metrics: []*Metric{{
Field: "bytesSent",
ID: "1",
Type: "count",
PipelineAggregate: "select metric",
Settings: simplejson.NewFromAny(map[string]interface{}{}),
}},
}
package elasticsearch
import (
"bytes"
"encoding/json"
"fmt"
"github.com/grafana/grafana/pkg/components/simplejson"
)
// Query represents the time series query model of the datasource
type Query struct {
TimeField string `json:"timeField"`
RawQuery string `json:"query"`
BucketAggs []*BucketAgg `json:"bucketAggs"`
Metrics []*MetricAgg `json:"metrics"`
Alias string `json:"alias"`
Interval string
RefID string
}
// BucketAgg represents a bucket aggregation of the time series query model of the datasource
type BucketAgg struct {
Field string `json:"field"`
ID string `json:"id"`
......@@ -14,120 +23,55 @@ type BucketAgg struct {
Type string `jsons:"type"`
}
type Metric struct {
// MetricAgg represents a metric aggregation of the time series query model of the datasource
type MetricAgg struct {
Field string `json:"field"`
Hide bool `json:"hide"`
ID string `json:"id"`
PipelineAggregate string `json:"pipelineAgg"`
Settings *simplejson.Json `json:"settings"`
Meta *simplejson.Json `json:"meta"`
Type string `json:"type"`
}
type QueryHeader struct {
SearchType string `json:"search_type"`
IgnoreUnavailable bool `json:"ignore_unavailable"`
Index interface{} `json:"index"`
MaxConcurrentShardRequests int `json:"max_concurrent_shard_requests,omitempty"`
}
func (q *QueryHeader) String() string {
r, _ := json.Marshal(q)
return string(r)
}
type Request struct {
Query map[string]interface{} `json:"query"`
Aggs Aggs `json:"aggs"`
Size int `json:"size"`
}
type Aggs map[string]interface{}
type HistogramAgg struct {
Interval string `json:"interval,omitempty"`
Field string `json:"field"`
MinDocCount int `json:"min_doc_count"`
Missing string `json:"missing,omitempty"`
}
type DateHistogramAgg struct {
HistogramAgg
ExtendedBounds ExtendedBounds `json:"extended_bounds"`
Format string `json:"format"`
var metricAggType = map[string]string{
"count": "Count",
"avg": "Average",
"sum": "Sum",
"max": "Max",
"min": "Min",
"extended_stats": "Extended Stats",
"percentiles": "Percentiles",
"cardinality": "Unique Count",
"moving_avg": "Moving Average",
"derivative": "Derivative",
"raw_document": "Raw Document",
}
type FiltersAgg struct {
Filters map[string]interface{} `json:"filters"`
var extendedStats = map[string]string{
"avg": "Avg",
"min": "Min",
"max": "Max",
"sum": "Sum",
"count": "Count",
"std_deviation": "Std Dev",
"std_deviation_bounds_upper": "Std Dev Upper",
"std_deviation_bounds_lower": "Std Dev Lower",
}
type TermsAgg struct {
Field string `json:"field"`
Size int `json:"size"`
Order map[string]interface{} `json:"order"`
Missing string `json:"missing,omitempty"`
var pipelineAggType = map[string]string{
"moving_avg": "moving_avg",
"derivative": "derivative",
}
type TermsAggWrap struct {
Terms TermsAgg `json:"terms"`
Aggs Aggs `json:"aggs"`
}
type ExtendedBounds struct {
Min string `json:"min"`
Max string `json:"max"`
}
type RangeFilter struct {
Range map[string]RangeFilterSetting `json:"range"`
}
type RangeFilterSetting struct {
Gte string `json:"gte"`
Lte string `json:"lte"`
Format string `json:"format"`
}
func newRangeFilter(field string, rangeFilterSetting RangeFilterSetting) *RangeFilter {
return &RangeFilter{
map[string]RangeFilterSetting{field: rangeFilterSetting}}
}
type QueryStringFilter struct {
QueryString QueryStringFilterSetting `json:"query_string"`
}
type QueryStringFilterSetting struct {
AnalyzeWildcard bool `json:"analyze_wildcard"`
Query string `json:"query"`
}
func newQueryStringFilter(analyzeWildcard bool, query string) *QueryStringFilter {
return &QueryStringFilter{QueryStringFilterSetting{AnalyzeWildcard: analyzeWildcard, Query: query}}
}
type BoolQuery struct {
Filter []interface{} `json:"filter"`
}
type Responses struct {
Responses []Response `json:"responses"`
}
type Response struct {
Status int `json:"status"`
Err map[string]interface{} `json:"error"`
Aggregations map[string]interface{} `json:"aggregations"`
}
func (r *Response) getErrMsg() string {
var msg bytes.Buffer
errJson := simplejson.NewFromAny(r.Err)
errType, err := errJson.Get("type").String()
if err == nil {
msg.WriteString(fmt.Sprintf("type:%s", errType))
func isPipelineAgg(metricType string) bool {
if _, ok := pipelineAggType[metricType]; ok {
return true
}
return false
}
reason, err := errJson.Get("type").String()
if err == nil {
msg.WriteString(fmt.Sprintf("reason:%s", reason))
}
return msg.String()
func describeMetric(metricType, field string) string {
text := metricAggType[metricType]
return text + " " + field
}
package elasticsearch
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/leibowitz/moment"
)
var rangeFilterSetting = RangeFilterSetting{Gte: "$timeFrom",
Lte: "$timeTo",
Format: "epoch_millis"}
type Query struct {
TimeField string `json:"timeField"`
RawQuery string `json:"query"`
BucketAggs []*BucketAgg `json:"bucketAggs"`
Metrics []*Metric `json:"metrics"`
Alias string `json:"alias"`
Interval time.Duration
}
func (q *Query) Build(queryContext *tsdb.TsdbQuery, dsInfo *models.DataSource) (string, error) {
var req Request
req.Size = 0
q.renderReqQuery(&req)
// handle document query
if q.isRawDocumentQuery() {
return "", errors.New("alert not support Raw_Document")
}
err := q.parseAggs(&req)
if err != nil {
return "", err
}
reqBytes, err := json.Marshal(req)
reqHeader := getRequestHeader(queryContext.TimeRange, dsInfo)
payload := bytes.Buffer{}
payload.WriteString(reqHeader.String() + "\n")
payload.WriteString(string(reqBytes) + "\n")
return q.renderTemplate(payload.String(), queryContext)
}
func (q *Query) isRawDocumentQuery() bool {
if len(q.BucketAggs) == 0 {
if len(q.Metrics) > 0 {
metric := simplejson.NewFromAny(q.Metrics[0])
if metric.Get("type").MustString("") == "raw_document" {
return true
}
}
}
return false
}
func (q *Query) renderReqQuery(req *Request) {
req.Query = make(map[string]interface{})
boolQuery := BoolQuery{}
boolQuery.Filter = append(boolQuery.Filter, newRangeFilter(q.TimeField, rangeFilterSetting))
boolQuery.Filter = append(boolQuery.Filter, newQueryStringFilter(true, q.RawQuery))
req.Query["bool"] = boolQuery
}
func (q *Query) parseAggs(req *Request) error {
aggs := make(Aggs)
nestedAggs := aggs
for _, agg := range q.BucketAggs {
esAggs := make(Aggs)
switch agg.Type {
case "date_histogram":
esAggs["date_histogram"] = q.getDateHistogramAgg(agg)
case "histogram":
esAggs["histogram"] = q.getHistogramAgg(agg)
case "filters":
esAggs["filters"] = q.getFilters(agg)
case "terms":
terms := q.getTerms(agg)
esAggs["terms"] = terms.Terms
esAggs["aggs"] = terms.Aggs
case "geohash_grid":
return errors.New("alert not support Geo_Hash_Grid")
}
if _, ok := nestedAggs["aggs"]; !ok {
nestedAggs["aggs"] = make(Aggs)
}
if aggs, ok := (nestedAggs["aggs"]).(Aggs); ok {
aggs[agg.ID] = esAggs
}
nestedAggs = esAggs
}
nestedAggs["aggs"] = make(Aggs)
for _, metric := range q.Metrics {
subAgg := make(Aggs)
if metric.Type == "count" {
continue
}
settings := metric.Settings.MustMap(make(map[string]interface{}))
if isPipelineAgg(metric.Type) {
if _, err := strconv.Atoi(metric.PipelineAggregate); err == nil {
settings["buckets_path"] = metric.PipelineAggregate
} else {
continue
}
} else {
settings["field"] = metric.Field
}
subAgg[metric.Type] = settings
nestedAggs["aggs"].(Aggs)[metric.ID] = subAgg
}
req.Aggs = aggs["aggs"].(Aggs)
return nil
}
func (q *Query) getDateHistogramAgg(target *BucketAgg) *DateHistogramAgg {
agg := &DateHistogramAgg{}
interval, err := target.Settings.Get("interval").String()
if err == nil {
agg.Interval = interval
}
agg.Field = q.TimeField
agg.MinDocCount = target.Settings.Get("min_doc_count").MustInt(0)
agg.ExtendedBounds = ExtendedBounds{"$timeFrom", "$timeTo"}
agg.Format = "epoch_millis"
if agg.Interval == "auto" {
agg.Interval = "$__interval"
}
missing, err := target.Settings.Get("missing").String()
if err == nil {
agg.Missing = missing
}
return agg
}
func (q *Query) getHistogramAgg(target *BucketAgg) *HistogramAgg {
agg := &HistogramAgg{}
interval, err := target.Settings.Get("interval").String()
if err == nil {
agg.Interval = interval
}
if target.Field != "" {
agg.Field = target.Field
}
agg.MinDocCount = target.Settings.Get("min_doc_count").MustInt(0)
missing, err := target.Settings.Get("missing").String()
if err == nil {
agg.Missing = missing
}
return agg
}
func (q *Query) getFilters(target *BucketAgg) *FiltersAgg {
agg := &FiltersAgg{}
agg.Filters = map[string]interface{}{}
for _, filter := range target.Settings.Get("filters").MustArray() {
filterJson := simplejson.NewFromAny(filter)
query := filterJson.Get("query").MustString("")
label := filterJson.Get("label").MustString("")
if label == "" {
label = query
}
agg.Filters[label] = newQueryStringFilter(true, query)
}
return agg
}
func (q *Query) getTerms(target *BucketAgg) *TermsAggWrap {
agg := &TermsAggWrap{Aggs: make(Aggs)}
agg.Terms.Field = target.Field
if len(target.Settings.MustMap()) == 0 {
return agg
}
sizeStr := target.Settings.Get("size").MustString("")
size, err := strconv.Atoi(sizeStr)
if err != nil {
size = 500
}
agg.Terms.Size = size
orderBy, err := target.Settings.Get("orderBy").String()
if err == nil {
agg.Terms.Order = make(map[string]interface{})
agg.Terms.Order[orderBy] = target.Settings.Get("order").MustString("")
if _, err := strconv.Atoi(orderBy); err != nil {
for _, metricI := range q.Metrics {
metric := simplejson.NewFromAny(metricI)
metricId := metric.Get("id").MustString()
if metricId == orderBy {
subAggs := make(Aggs)
metricField := metric.Get("field").MustString()
metricType := metric.Get("type").MustString()
subAggs[metricType] = map[string]string{"field": metricField}
agg.Aggs = make(Aggs)
agg.Aggs[metricId] = subAggs
break
}
}
}
}
missing, err := target.Settings.Get("missing").String()
if err == nil {
agg.Terms.Missing = missing
}
return agg
}
func (q *Query) renderTemplate(payload string, queryContext *tsdb.TsdbQuery) (string, error) {
timeRange := queryContext.TimeRange
interval := intervalCalculator.Calculate(timeRange, q.Interval)
payload = strings.Replace(payload, "$timeFrom", fmt.Sprintf("%d", timeRange.GetFromAsMsEpoch()), -1)
payload = strings.Replace(payload, "$timeTo", fmt.Sprintf("%d", timeRange.GetToAsMsEpoch()), -1)
payload = strings.Replace(payload, "$interval", interval.Text, -1)
payload = strings.Replace(payload, "$__interval_ms", strconv.FormatInt(interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1)
payload = strings.Replace(payload, "$__interval", interval.Text, -1)
return payload, nil
}
func getRequestHeader(timeRange *tsdb.TimeRange, dsInfo *models.DataSource) *QueryHeader {
var header QueryHeader
esVersion := dsInfo.JsonData.Get("esVersion").MustInt()
searchType := "query_then_fetch"
if esVersion < 5 {
searchType = "count"
}
header.SearchType = searchType
header.IgnoreUnavailable = true
header.Index = getIndexList(dsInfo.Database, dsInfo.JsonData.Get("interval").MustString(), timeRange)
if esVersion >= 56 {
header.MaxConcurrentShardRequests = dsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt()
}
return &header
}
func getIndexList(pattern string, interval string, timeRange *tsdb.TimeRange) string {
if interval == "" {
return pattern
}
var indexes []string
indexParts := strings.Split(strings.TrimLeft(pattern, "["), "]")
indexBase := indexParts[0]
if len(indexParts) <= 1 {
return pattern
}
indexDateFormat := indexParts[1]
start := moment.NewMoment(timeRange.MustGetFrom())
end := moment.NewMoment(timeRange.MustGetTo())
indexes = append(indexes, fmt.Sprintf("%s%s", indexBase, start.Format(indexDateFormat)))
for start.IsBefore(*end) {
switch interval {
case "Hourly":
start = start.AddHours(1)
case "Daily":
start = start.AddDay()
case "Weekly":
start = start.AddWeeks(1)
case "Monthly":
start = start.AddMonths(1)
case "Yearly":
start = start.AddYears(1)
}
indexes = append(indexes, fmt.Sprintf("%s%s", indexBase, start.Format(indexDateFormat)))
}
return strings.Join(indexes, ",")
}
package elasticsearch
var metricAggType = map[string]string{
"count": "Count",
"avg": "Average",
"sum": "Sum",
"max": "Max",
"min": "Min",
"extended_stats": "Extended Stats",
"percentiles": "Percentiles",
"cardinality": "Unique Count",
"moving_avg": "Moving Average",
"derivative": "Derivative",
"raw_document": "Raw Document",
}
var extendedStats = map[string]string{
"avg": "Avg",
"min": "Min",
"max": "Max",
"sum": "Sum",
"count": "Count",
"std_deviation": "Std Dev",
"std_deviation_bounds_upper": "Std Dev Upper",
"std_deviation_bounds_lower": "Std Dev Lower",
}
var pipelineOptions = map[string]string{
"moving_avg": "moving_avg",
"derivative": "derivative",
}
func isPipelineAgg(metricType string) bool {
if _, ok := pipelineOptions[metricType]; ok {
return true
}
return false
}
func describeMetric(metricType, field string) string {
text := metricAggType[metricType]
return text + " " + field
}
package elasticsearch
import (
"encoding/json"
"fmt"
"reflect"
"strconv"
"strings"
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
func testElasticSearchResponse(query Query, expectedElasticSearchRequestJSON string) {
var queryExpectedJSONInterface, queryJSONInterface interface{}
jsonDate, _ := simplejson.NewJson([]byte(`{"esVersion":2}`))
dsInfo := &models.DataSource{
Database: "grafana-test",
JsonData: jsonDate,
}
testTimeRange := tsdb.NewTimeRange("5m", "now")
s, err := query.Build(&tsdb.TsdbQuery{TimeRange: testTimeRange}, dsInfo)
So(err, ShouldBeNil)
queryJSON := strings.Split(s, "\n")[1]
err = json.Unmarshal([]byte(queryJSON), &queryJSONInterface)
So(err, ShouldBeNil)
expectedElasticSearchRequestJSON = strings.Replace(
expectedElasticSearchRequestJSON,
"<FROM_TIMESTAMP>",
strconv.FormatInt(testTimeRange.GetFromAsMsEpoch(), 10),
-1,
)
expectedElasticSearchRequestJSON = strings.Replace(
expectedElasticSearchRequestJSON,
"<TO_TIMESTAMP>",
strconv.FormatInt(testTimeRange.GetToAsMsEpoch(), 10),
-1,
)
err = json.Unmarshal([]byte(expectedElasticSearchRequestJSON), &queryExpectedJSONInterface)
So(err, ShouldBeNil)
result := reflect.DeepEqual(queryExpectedJSONInterface, queryJSONInterface)
if !result {
fmt.Printf("ERROR: %s \n != \n %s", expectedElasticSearchRequestJSON, queryJSON)
}
So(result, ShouldBeTrue)
}
func TestElasticSearchQueryBuilder(t *testing.T) {
Convey("Elasticsearch QueryBuilder query testing", t, func() {
Convey("Build test average metric with moving average", func() {
var expectedElasticsearchQueryJSON = `
{
"size": 0,
"query": {
"bool": {
"filter": [
{
"range": {
"timestamp": {
"gte": "<FROM_TIMESTAMP>",
"lte": "<TO_TIMESTAMP>",
"format": "epoch_millis"
}
}
},
{
"query_string": {
"analyze_wildcard": true,
"query": "(test:query) AND (name:sample)"
}
}
]
}
},
"aggs": {
"2": {
"date_histogram": {
"interval": "200ms",
"field": "timestamp",
"min_doc_count": 0,
"extended_bounds": {
"min": "<FROM_TIMESTAMP>",
"max": "<TO_TIMESTAMP>"
},
"format": "epoch_millis"
},
"aggs": {
"1": {
"avg": {
"field": "value",
"script": {
"inline": "_value * 2"
}
}
},
"3": {
"moving_avg": {
"buckets_path": "1",
"window": 5,
"model": "simple",
"minimize": false
}
}
}
}
}
}`
testElasticSearchResponse(avgWithMovingAvg, expectedElasticsearchQueryJSON)
})
Convey("Test Wildcards and Quotes", func() {
expectedElasticsearchQueryJSON := `
{
"size": 0,
"query": {
"bool": {
"filter": [
{
"range": {
"timestamp": {
"gte": "<FROM_TIMESTAMP>",
"lte": "<TO_TIMESTAMP>",
"format": "epoch_millis"
}
}
},
{
"query_string": {
"analyze_wildcard": true,
"query": "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\""
}
}
]
}
},
"aggs": {
"2": {
"aggs": {
"1": {
"sum": {
"field": "value"
}
}
},
"date_histogram": {
"extended_bounds": {
"max": "<TO_TIMESTAMP>",
"min": "<FROM_TIMESTAMP>"
},
"field": "timestamp",
"format": "epoch_millis",
"min_doc_count": 0
}
}
}
}`
testElasticSearchResponse(wildcardsAndQuotes, expectedElasticsearchQueryJSON)
})
Convey("Test Term Aggregates", func() {
expectedElasticsearchQueryJSON := `
{
"size": 0,
"query": {
"bool": {
"filter": [
{
"range": {
"timestamp": {
"gte": "<FROM_TIMESTAMP>",
"lte": "<TO_TIMESTAMP>",
"format": "epoch_millis"
}
}
},
{
"query_string": {
"analyze_wildcard": true,
"query": "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)"
}
}
]
}
},
"aggs": {"4":{"aggs":{"2":{"aggs":{"1":{"sum":{"field":"value"}}},"date_histogram":{"extended_bounds":{"max":"<TO_TIMESTAMP>","min":"<FROM_TIMESTAMP>"},"field":"timestamp","format":"epoch_millis","interval":"200ms","min_doc_count":0}}},"terms":{"field":"name_raw","order":{"_term":"desc"},"size":10}}}
}`
testElasticSearchResponse(termAggs, expectedElasticsearchQueryJSON)
})
Convey("Test Filters Aggregates", func() {
expectedElasticsearchQueryJSON := `{
"size": 0,
"query": {
"bool": {
"filter": [
{
"range": {
"time": {
"gte": "<FROM_TIMESTAMP>",
"lte": "<TO_TIMESTAMP>",
"format": "epoch_millis"
}
}
},
{
"query_string": {
"analyze_wildcard": true,
"query": "*"
}
}
]
}
},
"aggs": {
"3": {
"filters": {
"filters": {
"hello": {
"query_string": {
"query": "host:\"67.65.185.232\"",
"analyze_wildcard": true
}
}
}
},
"aggs": {
"2": {
"date_histogram": {
"interval": "200ms",
"field": "time",
"min_doc_count": 0,
"extended_bounds": {
"min": "<FROM_TIMESTAMP>",
"max": "<TO_TIMESTAMP>"
},
"format": "epoch_millis"
},
"aggs": {}
}
}
}
}
}
`
testElasticSearchResponse(filtersAggs, expectedElasticsearchQueryJSON)
})
})
}
func makeTime(hour int) string {
//unixtime 1500000000 == 2017-07-14T02:40:00+00:00
return strconv.Itoa((1500000000 + hour*60*60) * 1000)
}
func getIndexListByTime(pattern string, interval string, hour int) string {
timeRange := &tsdb.TimeRange{
From: makeTime(0),
To: makeTime(hour),
}
return getIndexList(pattern, interval, timeRange)
}
func TestElasticsearchGetIndexList(t *testing.T) {
Convey("Test Elasticsearch getIndex ", t, func() {
Convey("Parse Interval Formats", func() {
So(getIndexListByTime("[logstash-]YYYY.MM.DD", "Daily", 48),
ShouldEqual, "logstash-2017.07.14,logstash-2017.07.15,logstash-2017.07.16")
So(len(strings.Split(getIndexListByTime("[logstash-]YYYY.MM.DD.HH", "Hourly", 3), ",")),
ShouldEqual, 4)
So(getIndexListByTime("[logstash-]YYYY.W", "Weekly", 100),
ShouldEqual, "logstash-2017.28,logstash-2017.29")
So(getIndexListByTime("[logstash-]YYYY.MM", "Monthly", 700),
ShouldEqual, "logstash-2017.07,logstash-2017.08")
So(getIndexListByTime("[logstash-]YYYY", "Yearly", 10000),
ShouldEqual, "logstash-2017,logstash-2018,logstash-2019")
})
Convey("No Interval", func() {
index := getIndexListByTime("logstash-test", "", 1)
So(index, ShouldEqual, "logstash-test")
})
})
}
package moment
import (
"fmt"
"math"
"time"
)
// @todo In months/years requires the old and new to calculate correctly, right?
// @todo decide how to handle rounding (i.e. always floor?)
type Diff struct {
duration time.Duration
}
func (d *Diff) InSeconds() int {
return int(d.duration.Seconds())
}
func (d *Diff) InMinutes() int {
return int(d.duration.Minutes())
}
func (d *Diff) InHours() int {
return int(d.duration.Hours())
}
func (d *Diff) InDays() int {
return int(math.Floor(float64(d.InSeconds()) / 86400))
}
// This depends on where the weeks fall?
func (d *Diff) InWeeks() int {
return int(math.Floor(float64(d.InDays() / 7)))
}
func (d *Diff) InMonths() int {
return 0
}
func (d *Diff) InYears() int {
return 0
}
// http://momentjs.com/docs/#/durations/humanize/
func (d *Diff) Humanize() string {
diffInSeconds := d.InSeconds()
if diffInSeconds <= 45 {
return fmt.Sprintf("%d seconds ago", diffInSeconds)
} else if diffInSeconds <= 90 {
return "a minute ago"
}
diffInMinutes := d.InMinutes()
if diffInMinutes <= 45 {
return fmt.Sprintf("%d minutes ago", diffInMinutes)
} else if diffInMinutes <= 90 {
return "an hour ago"
}
diffInHours := d.InHours()
if diffInHours <= 22 {
return fmt.Sprintf("%d hours ago", diffInHours)
} else if diffInHours <= 36 {
return "a day ago"
}
return "diff is in days"
}
// In Months
// In years
package moment
import (
"regexp"
"strings"
)
type MomentParser struct{}
var (
date_pattern = regexp.MustCompile("(LT|LL?L?L?|l{1,4}|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|SS?S?|X|zz?|ZZ?|Q)")
)
/*
+ <stdOrdinal> S (makes any number before it ordinal)
+ stdDayOfYear 1,2,365
+ stdDayOfYearZero 001, 002, 365
+ stdDayOfWeek w 0, 1, 2 numeric day of the week (0 = sunday)
+ stdDayOfWeekISO N 1 = Monday
+ stdWeekOfYear W Iso week number of year
+ stdUnix U
+ stdQuarter
*/
// Thanks to https://github.com/fightbulc/moment.php for replacement keys and regex
var moment_replacements = map[string]string{
"M": "1", // stdNumMonth 1 2 ... 11 12
"Mo": "1<stdOrdinal>", // stdNumMonth 1st 2nd ... 11th 12th
"MM": "01", // stdZeroMonth 01 02 ... 11 12
"MMM": "Jan", // stdMonth Jan Feb ... Nov Dec
"MMMM": "January", // stdLongMonth January February ... November December
"D": "2", // stdDay 1 2 ... 30 30
"Do": "2<stdOrdinal>", // stdDay 1st 2nd ... 30th 31st @todo support st nd th etch
"DD": "02", // stdZeroDay 01 02 ... 30 31
"DDD": "<stdDayOfYear>", // Day of the year 1 2 ... 364 365
"DDDo": "<stdDayOfYear><stdOrdinal>", // Day of the year 1st 2nd ... 364th 365th
"DDDD": "<stdDayOfYearZero>", // Day of the year 001 002 ... 364 365 @todo****
"d": "<stdDayOfWeek>", // Numeric representation of day of the week 0 1 ... 5 6
"do": "<stdDayOfWeek><stdOrdinal>", // 0th 1st ... 5th 6th
"dd": "Mon", // ***Su Mo ... Fr Sa @todo
"ddd": "Mon", // Sun Mon ... Fri Sat
"dddd": "Monday", // stdLongWeekDay Sunday Monday ... Friday Saturday
"e": "<stdDayOfWeek>", // Numeric representation of day of the week 0 1 ... 5 6 @todo
"E": "<stdDayOfWeekISO>", // ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) 1 2 ... 6 7 @todo
"w": "<stdWeekOfYear>", // 1 2 ... 52 53
"wo": "<stdWeekOfYear><stdOrdinal>", // 1st 2nd ... 52nd 53rd
"ww": "<stdWeekOfYear>", // ***01 02 ... 52 53 @todo
"W": "<stdWeekOfYear>", // 1 2 ... 52 53
"Wo": "<stdWeekOfYear><stdOrdinal>", // 1st 2nd ... 52nd 53rd
"WW": "<stdWeekOfYear>", // ***01 02 ... 52 53 @todo
"YY": "06", // stdYear 70 71 ... 29 30
"YYYY": "2006", // stdLongYear 1970 1971 ... 2029 2030
// "gg" : "o", // ISO-8601 year number 70 71 ... 29 30 @todo
// "gggg" : "o", // ***1970 1971 ... 2029 2030 @todo
// "GG" : "o", //70 71 ... 29 30 @todo
// "GGGG" : "o", // ***1970 1971 ... 2029 2030 @todo
"Q": "<stdQuarter>",
"A": "PM", // stdPM AM PM
"a": "pm", // stdpm am pm
"H": "<stdHourNoZero>", // stdHour 0 1 ... 22 23
"HH": "15", // 00 01 ... 22 23
"h": "3", // stdHour12 1 2 ... 11 12
"hh": "03", // stdZeroHour12 01 02 ... 11 12
"m": "4", // stdZeroMinute 0 1 ... 58 59
"mm": "04", // stdZeroMinute 00 01 ... 58 59
"s": "5", // stdSecond 0 1 ... 58 59
"ss": "05", // stdZeroSecond ***00 01 ... 58 59
// "S" : "", //0 1 ... 8 9
// "SS" : "", //0 1 ... 98 99
// "SSS" : "", //0 1 ... 998 999
"z": "MST", //EST CST ... MST PST
"zz": "MST", //EST CST ... MST PST
"Z": "Z07:00", // stdNumColonTZ -07:00 -06:00 ... +06:00 +07:00
"ZZ": "-0700", // stdNumTZ -0700 -0600 ... +0600 +0700
"X": "<stdUnix>", // Seconds since unix epoch 1360013296
"LT": "3:04 PM", // 8:30 PM
"L": "01/02/2006", //09/04/1986
"l": "1/2/2006", //9/4/1986
"LL": "January 2<stdOrdinal> 2006", //September 4th 1986 the php s flag isn't supported
"ll": "Jan 2 2006", //Sep 4 1986
"LLL": "January 2<stdOrdinal> 2006 3:04 PM", //September 4th 1986 8:30 PM @todo the php s flag isn't supported
"lll": "Jan 2 2006 3:04 PM", //Sep 4 1986 8:30 PM
"LLLL": "Monday, January 2<stdOrdinal> 2006 3:04 PM", //Thursday, September 4th 1986 8:30 PM the php s flag isn't supported
"llll": "Mon, Jan 2 2006 3:04 PM", //Thu, Sep 4 1986 8:30 PM
}
func (p *MomentParser) Convert(layout string) string {
var match [][]string
if match = date_pattern.FindAllStringSubmatch(layout, -1); match == nil {
return layout
}
for i := range match {
if replace, ok := moment_replacements[match[i][0]]; ok {
layout = strings.Replace(layout, match[i][0], replace, 1)
}
}
return layout
}
package moment
import (
"fmt"
"strings"
"time"
)
var (
days = []time.Weekday{
time.Sunday,
time.Monday,
time.Tuesday,
time.Wednesday,
time.Thursday,
time.Friday,
time.Saturday,
}
)
func ParseWeekDay(day string) (time.Weekday, error) {
day = strings.ToLower(day)
for _, d := range days {
if day == strings.ToLower(d.String()) {
return d, nil
}
}
return -1, fmt.Errorf("Unable to parse %s as week day", day)
}
package moment
import (
"regexp"
"strings"
)
type StrftimeParser struct{}
var (
replacements_pattern = regexp.MustCompile("%[mbhBedjwuaAVgyGYpPkHlIMSZzsTrRTDFXx]")
)
// Not implemented
// U
// C
var strftime_replacements = map[string]string{
"%m": "01", // stdZeroMonth 01 02 ... 11 12
"%b": "Jan", // stdMonth Jan Feb ... Nov Dec
"%h": "Jan",
"%B": "January", // stdLongMonth January February ... November December
"%e": "2", // stdDay 1 2 ... 30 30
"%d": "02", // stdZeroDay 01 02 ... 30 31
"%j": "<stdDayOfYear>", // Day of the year ***001 002 ... 364 365 @todo****
"%w": "<stdDayOfWeek>", // Numeric representation of day of the week 0 1 ... 5 6
"%u": "<stdDayOfWeekISO>", // ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) 1 2 ... 6 7 @todo
"%a": "Mon", // Sun Mon ... Fri Sat
"%A": "Monday", // stdLongWeekDay Sunday Monday ... Friday Saturday
"%V": "<stdWeekOfYear>", // ***01 02 ... 52 53 @todo begin with zeros
"%g": "06", // stdYear 70 71 ... 29 30
"%y": "06",
"%G": "2006", // stdLongYear 1970 1971 ... 2029 2030
"%Y": "2006",
"%p": "PM", // stdPM AM PM
"%P": "pm", // stdpm am pm
"%k": "15", // stdHour 0 1 ... 22 23
"%H": "15", // 00 01 ... 22 23
"%l": "3", // stdHour12 1 2 ... 11 12
"%I": "03", // stdZeroHour12 01 02 ... 11 12
"%M": "04", // stdZeroMinute 00 01 ... 58 59
"%S": "05", // stdZeroSecond ***00 01 ... 58 59
"%Z": "MST", //EST CST ... MST PST
"%z": "-0700", // stdNumTZ -0700 -0600 ... +0600 +0700
"%s": "<stdUnix>", // Seconds since unix epoch 1360013296
"%r": "03:04:05 PM",
"%R": "15:04",
"%T": "15:04:05",
"%D": "01/02/06",
"%F": "2006-01-02",
"%X": "15:04:05",
"%x": "01/02/06",
}
func (p *StrftimeParser) Convert(layout string) string {
var match [][]string
if match = replacements_pattern.FindAllStringSubmatch(layout, -1); match == nil {
return layout
}
for i := range match {
if replace, ok := strftime_replacements[match[i][0]]; ok {
layout = strings.Replace(layout, match[i][0], replace, 1)
}
}
return layout
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment