Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[prometheusremotewriteexporter] Translate resource to the target info metric #8493

Merged
merged 7 commits into from
Apr 6, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
- `filestorageextension`: Change bbolt DB settings for better performance (#9004)
- `jaegerremotesamplingextension`: Add local and remote sampling stores (#8818)
- `attributesprocessor`: Add support to filter on log body (#8996)
- `prometheusremotewriteexporter`: Translate resource attributes to the target info metric (#8493)

### 🛑 Breaking changes 🛑

Expand Down
2 changes: 1 addition & 1 deletion exporter/prometheusremotewriteexporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ func Test_PushMetrics(t *testing.T) {
"intSum_case",
&intSumBatch,
checkFunc,
2,
3,
http.StatusAccepted,
false,
false,
Expand Down
3 changes: 3 additions & 0 deletions pkg/translator/prometheusremotewrite/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,11 @@ require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/kr/pretty v0.3.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.48.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
go.uber.org/atomic v1.9.0 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
)

replace github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal => ../../../internal/coreinternal
2 changes: 2 additions & 0 deletions pkg/translator/prometheusremotewrite/go.sum

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

133 changes: 101 additions & 32 deletions pkg/translator/prometheusremotewrite/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,10 @@ const (
maxExemplarRunes = 128
// Trace and Span id keys are defined as part of the spec:
// https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification%2Fmetrics%2Fdatamodel.md#exemplars-2
traceIDKey = "trace_id"
spanIDKey = "span_id"
traceIDKey = "trace_id"
spanIDKey = "span_id"
infoType = "info"
targetMetricName = "target"
)

type bucketBoundsData struct {
Expand All @@ -75,13 +77,13 @@ func (a ByLabelName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
// creates a new TimeSeries in the map if not found and returns the time series signature.
// tsMap will be unmodified if either labels or sample is nil, but can still be modified if the exemplar is nil.
func addSample(tsMap map[string]*prompb.TimeSeries, sample *prompb.Sample, labels []prompb.Label,
metric pdata.Metric) string {
datatype string) string {

if sample == nil || labels == nil || tsMap == nil {
return ""
}

sig := timeSeriesSignature(metric, &labels)
sig := timeSeriesSignature(datatype, &labels)
ts, ok := tsMap[sig]

if ok {
Expand Down Expand Up @@ -137,9 +139,9 @@ func addExemplar(tsMap map[string]*prompb.TimeSeries, bucketBounds []bucketBound
// TYPE-label1-value1- ... -labelN-valueN
// the label slice should not contain duplicate label names; this method sorts the slice by label name before creating
// the signature.
func timeSeriesSignature(metric pdata.Metric, labels *[]prompb.Label) string {
func timeSeriesSignature(datatype string, labels *[]prompb.Label) string {
b := strings.Builder{}
b.WriteString(metric.DataType().String())
b.WriteString(datatype)

sort.Sort(ByLabelName(*labels))

Expand All @@ -160,6 +162,23 @@ func createAttributes(resource pdata.Resource, attributes pdata.Map, externalLab
// map ensures no duplicate label name
l := map[string]prompb.Label{}

// Ensure attributes are sorted by key for consistent merging of keys which
// collide when sanitized.
attributes.Sort()
attributes.Range(func(key string, value pdata.Value) bool {
if existingLabel, alreadyExists := l[sanitize(key)]; alreadyExists {
existingLabel.Value = existingLabel.Value + ";" + value.AsString()
l[sanitize(key)] = existingLabel
} else {
l[sanitize(key)] = prompb.Label{
Name: sanitize(key),
Value: value.AsString(),
}
}

return true
})

// Map service.name + service.namespace to job
if serviceName, ok := resource.Attributes().Get(conventions.AttributeServiceName); ok {
val := serviceName.AsString()
Expand All @@ -178,24 +197,6 @@ func createAttributes(resource pdata.Resource, attributes pdata.Map, externalLab
Value: instance.AsString(),
}
}

// Ensure attributes are sorted by key for consistent merging of keys which
// collide when sanitized.
attributes.Sort()
attributes.Range(func(key string, value pdata.Value) bool {
if existingLabel, alreadyExists := l[sanitize(key)]; alreadyExists {
existingLabel.Value = existingLabel.Value + ";" + value.AsString()
l[sanitize(key)] = existingLabel
} else {
l[sanitize(key)] = prompb.Label{
Name: sanitize(key),
Value: value.AsString(),
}
}

return true
})

for key, value := range externalLabels {
// External labels have already been sanitized
if _, alreadyExists := l[key]; alreadyExists {
Expand Down Expand Up @@ -280,7 +281,7 @@ func addSingleNumberDataPoint(pt pdata.NumberDataPoint, resource pdata.Resource,
if pt.Flags().HasFlag(pdata.MetricDataPointFlagNoRecordedValue) {
sample.Value = math.Float64frombits(value.StaleNaN)
}
addSample(tsMap, sample, labels, metric)
addSample(tsMap, sample, labels, metric.DataType().String())
}

// addSingleHistogramDataPoint converts pt to 2 + min(len(ExplicitBounds), len(BucketCount)) + 1 samples. It
Expand All @@ -299,7 +300,7 @@ func addSingleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Res
}

sumlabels := createAttributes(resource, pt.Attributes(), settings.ExternalLabels, nameStr, baseName+sumStr)
addSample(tsMap, sum, sumlabels, metric)
addSample(tsMap, sum, sumlabels, metric.DataType().String())

// treat count as a sample in an individual TimeSeries
count := &prompb.Sample{
Expand All @@ -311,7 +312,7 @@ func addSingleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Res
}

countlabels := createAttributes(resource, pt.Attributes(), settings.ExternalLabels, nameStr, baseName+countStr)
addSample(tsMap, count, countlabels, metric)
addSample(tsMap, count, countlabels, metric.DataType().String())

// cumulative count for conversion to cumulative histogram
var cumulativeCount uint64
Expand All @@ -335,7 +336,7 @@ func addSingleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Res
}
boundStr := strconv.FormatFloat(bound, 'f', -1, 64)
labels := createAttributes(resource, pt.Attributes(), settings.ExternalLabels, nameStr, baseName+bucketStr, leStr, boundStr)
sig := addSample(tsMap, bucket, labels, metric)
sig := addSample(tsMap, bucket, labels, metric.DataType().String())

bucketBounds = append(bucketBounds, bucketBoundsData{sig: sig, bound: bound})
}
Expand All @@ -350,7 +351,7 @@ func addSingleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Res
infBucket.Value = float64(cumulativeCount)
}
infLabels := createAttributes(resource, pt.Attributes(), settings.ExternalLabels, nameStr, baseName+bucketStr, leStr, pInfStr)
sig := addSample(tsMap, infBucket, infLabels, metric)
sig := addSample(tsMap, infBucket, infLabels, metric.DataType().String())

bucketBounds = append(bucketBounds, bucketBoundsData{sig: sig, bound: math.Inf(1)})
addExemplars(tsMap, promExemplars, bucketBounds)
Expand Down Expand Up @@ -411,6 +412,42 @@ func getPromExemplars(pt pdata.HistogramDataPoint) []prompb.Exemplar {
return promExemplars
}

// mostRecentTimestampInMetric returns the latest timestamp in a batch of metrics
func mostRecentTimestampInMetric(metric pdata.Metric) pdata.Timestamp {
var ts pdata.Timestamp
// handle individual metric based on type
switch metric.DataType() {
case pdata.MetricDataTypeGauge:
dataPoints := metric.Gauge().DataPoints()
for x := 0; x < dataPoints.Len(); x++ {
ts = maxTimestamp(ts, dataPoints.At(x).Timestamp())
}
case pdata.MetricDataTypeSum:
dataPoints := metric.Sum().DataPoints()
for x := 0; x < dataPoints.Len(); x++ {
ts = maxTimestamp(ts, dataPoints.At(x).Timestamp())
}
case pdata.MetricDataTypeHistogram:
dataPoints := metric.Histogram().DataPoints()
for x := 0; x < dataPoints.Len(); x++ {
ts = maxTimestamp(ts, dataPoints.At(x).Timestamp())
}
case pdata.MetricDataTypeSummary:
dataPoints := metric.Summary().DataPoints()
for x := 0; x < dataPoints.Len(); x++ {
ts = maxTimestamp(ts, dataPoints.At(x).Timestamp())
}
}
return ts
}

func maxTimestamp(a, b pdata.Timestamp) pdata.Timestamp {
if a > b {
return a
}
return b
}

// addSingleSummaryDataPoint converts pt to len(QuantileValues) + 2 samples.
func addSingleSummaryDataPoint(pt pdata.SummaryDataPoint, resource pdata.Resource, metric pdata.Metric, settings Settings,
tsMap map[string]*prompb.TimeSeries) {
Expand All @@ -426,7 +463,7 @@ func addSingleSummaryDataPoint(pt pdata.SummaryDataPoint, resource pdata.Resourc
sum.Value = math.Float64frombits(value.StaleNaN)
}
sumlabels := createAttributes(resource, pt.Attributes(), settings.ExternalLabels, nameStr, baseName+sumStr)
addSample(tsMap, sum, sumlabels, metric)
addSample(tsMap, sum, sumlabels, metric.DataType().String())

// treat count as a sample in an individual TimeSeries
count := &prompb.Sample{
Expand All @@ -437,7 +474,7 @@ func addSingleSummaryDataPoint(pt pdata.SummaryDataPoint, resource pdata.Resourc
count.Value = math.Float64frombits(value.StaleNaN)
}
countlabels := createAttributes(resource, pt.Attributes(), settings.ExternalLabels, nameStr, baseName+countStr)
addSample(tsMap, count, countlabels, metric)
addSample(tsMap, count, countlabels, metric.DataType().String())

// process each percentile/quantile
for i := 0; i < pt.QuantileValues().Len(); i++ {
Expand All @@ -451,8 +488,40 @@ func addSingleSummaryDataPoint(pt pdata.SummaryDataPoint, resource pdata.Resourc
}
percentileStr := strconv.FormatFloat(qt.Quantile(), 'f', -1, 64)
qtlabels := createAttributes(resource, pt.Attributes(), settings.ExternalLabels, nameStr, baseName, quantileStr, percentileStr)
addSample(tsMap, quantile, qtlabels, metric)
addSample(tsMap, quantile, qtlabels, metric.DataType().String())
}
}

// addResourceTargetInfo converts the resource to the target info metric
func addResourceTargetInfo(resource pdata.Resource, settings Settings, timestamp pdata.Timestamp, tsMap map[string]*prompb.TimeSeries) {
if resource.Attributes().Len() == 0 {
return
}
// create parameters for addSample
name := targetMetricName
if len(settings.Namespace) > 0 {
name = settings.Namespace + "_" + name
}
// Use resource attributes (other than those used for job+instance) as the
// metric labels for the target info metric
attributes := pdata.NewMap()
resource.Attributes().CopyTo(attributes)
attributes.RemoveIf(func(k string, _ pdata.Value) bool {
switch k {
case conventions.AttributeServiceName, conventions.AttributeServiceNamespace, conventions.AttributeServiceInstanceID:
// Remove resource attributes used for job + instance
return true
default:
return false
}
})
labels := createAttributes(resource, attributes, settings.ExternalLabels, nameStr, name)
sample := &prompb.Sample{
Value: float64(1),
// convert ns to ms
Timestamp: convertTimeStamp(timestamp),
}
addSample(tsMap, sample, labels, infoType)
}

// copied from prometheus-go-metric-exporter
Expand Down
Loading