Skip to content

Commit

Permalink
Include all string attributes unless skipped
Browse files Browse the repository at this point in the history
Do not limit attribute generation from string fields to the ones that
are nested in map or slice. This removes the need for `attr` tag and
parent in traversal context is no longer necessary.

Skip verbose fields like connection string from attributes.
  • Loading branch information
masih committed Jun 19, 2023
1 parent 36f80d1 commit a9be8d8
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 42 deletions.
2 changes: 0 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@
* Periodically fetches and caches FoundationDB status.
* Exposes a REST endpoint at `/status` to serve the raw JSON for further debugging.
* Supports custom struct tags to customize metric reporting using the key `fdbmeter`; see [`model`](model.go).
* `fdbmeter:"attr"` explicitly includes a `string` field as an attribute.
* Note that strings in array or map are automatically added as Prometheus metric attributes.
* `fdbmeter:"skip"` excludes a field from metric reporting or an attribute.
* `fdbmeter:"key=<custom-key>"` customizes the `key` attribute of `map` fields.
## Metrics
Expand Down
49 changes: 18 additions & 31 deletions metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ func (m *Metrics) notifyGetStatusLatency(ctx context.Context, latency time.Durat
func (m *Metrics) notifyStatus(ctx context.Context, status Status) {
observables := make(map[string][]observable)
populateObservables(ctx, traversalContext{
f: reflect.ValueOf(status),
field: reflect.ValueOf(status),
}, observables, m.commonAttrs)

m.observablesLock.Lock()
Expand All @@ -96,61 +96,50 @@ func (m *Metrics) notifyStatus(ctx context.Context, status Status) {
}

type traversalContext struct {
parent *reflect.Value
f reflect.Value
field reflect.Value
metricName string
attrs []attribute.KeyValue
tag reflect.StructTag
}

func populateObservables(ctx context.Context, tctx traversalContext, observables map[string][]observable, commonAttrs []attribute.KeyValue) {
switch tctx.f.Kind() {
switch tctx.field.Kind() {
case reflect.Struct:
if tctx.metricName != "" {
tctx.metricName = tctx.metricName + "_"
}
FieldAttrLoop:
for i := 0; i < tctx.f.Type().NumField(); i++ {
for i := 0; i < tctx.field.Type().NumField(); i++ {
select {
case <-ctx.Done():
return
default:
field := tctx.f.Type().Field(i)
field := tctx.field.Type().Field(i)
for _, tagItem := range strings.Split(field.Tag.Get("fdbmeter"), ",") {
if tagItem == "skip" {
continue FieldAttrLoop
}
}
if field.Type.Kind() == reflect.String {
v := tctx.f.Field(i).String()
if tctx.parent != nil {
switch tctx.parent.Kind() {
case reflect.Slice, reflect.Map:
tctx.attrs = append(tctx.attrs, attribute.String(field.Tag.Get("json"), v))
continue
}
}
if strings.Contains(field.Tag.Get("fdbmeter"), "attr") {
tctx.attrs = append(tctx.attrs, attribute.String(field.Tag.Get("json"), v))
}
v := tctx.field.Field(i).String()
tctx.attrs = append(tctx.attrs, attribute.String(field.Tag.Get("json"), v))
}
}
}
FieldObserveLoop:
for i := 0; i < tctx.f.Type().NumField(); i++ {
for i := 0; i < tctx.field.Type().NumField(); i++ {
select {
case <-ctx.Done():
return
default:
field := tctx.f.Type().Field(i)
field := tctx.field.Type().Field(i)
for _, tagItem := range strings.Split(field.Tag.Get("fdbmeter"), ",") {
if tagItem == "skip" {
continue FieldObserveLoop
}
}
populateObservables(ctx, traversalContext{
parent: tctx.parent,
f: tctx.f.Field(i),
field: tctx.field.Field(i),
metricName: tctx.metricName + field.Tag.Get("json"),
attrs: tctx.attrs,
tag: field.Tag,
Expand All @@ -163,16 +152,15 @@ func populateObservables(ctx context.Context, tctx traversalContext, observables
return
}
}
elems, ok := tctx.f.Interface().([]any)
elems, ok := tctx.field.Interface().([]any)
if ok {
for _, elem := range elems {
select {
case <-ctx.Done():
return
default:
populateObservables(ctx, traversalContext{
parent: &tctx.f,
f: reflect.ValueOf(elem),
field: reflect.ValueOf(elem),
metricName: tctx.metricName,
attrs: tctx.attrs,
tag: tctx.tag,
Expand All @@ -191,7 +179,7 @@ func populateObservables(ctx context.Context, tctx traversalContext, observables
attrKey = strings.TrimPrefix(tagItem, "key=")
}
}
for r := tctx.f.MapRange(); r.Next(); {
for r := tctx.field.MapRange(); r.Next(); {
select {
case <-ctx.Done():
return
Expand All @@ -201,8 +189,7 @@ func populateObservables(ctx context.Context, tctx traversalContext, observables
tctx.attrs = append(tctx.attrs, attribute.String(attrKey, r.Key().String()))
}
populateObservables(ctx, traversalContext{
parent: &tctx.f,
f: r.Value(),
field: r.Value(),
metricName: tctx.metricName,
attrs: tctx.attrs,
tag: tctx.tag,
Expand All @@ -211,7 +198,7 @@ func populateObservables(ctx context.Context, tctx traversalContext, observables
}
case reflect.Bool:
var v int64
if tctx.f.Bool() {
if tctx.field.Bool() {
v = 1
}
observables[tctx.metricName] = append(observables[tctx.metricName], observable{
Expand All @@ -220,17 +207,17 @@ func populateObservables(ctx context.Context, tctx traversalContext, observables
})
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
observables[tctx.metricName] = append(observables[tctx.metricName], observable{
int64Value: tctx.f.Int(),
int64Value: tctx.field.Int(),
attrs: attribute.NewSet(append(commonAttrs, tctx.attrs...)...),
})
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
observables[tctx.metricName] = append(observables[tctx.metricName], observable{
int64Value: int64(tctx.f.Uint()),
int64Value: int64(tctx.field.Uint()),
attrs: attribute.NewSet(append(commonAttrs, tctx.attrs...)...),
})
case reflect.Float32, reflect.Float64:
observables[tctx.metricName] = append(observables[tctx.metricName], observable{
float64Value: tctx.f.Float(),
float64Value: tctx.field.Float(),
attrs: attribute.NewSet(append(commonAttrs, tctx.attrs...)...),
})
}
Expand Down
25 changes: 22 additions & 3 deletions metrics_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,20 @@ func TestMetricsPopulatesValues(t *testing.T) {
},
},
"cluster_data_state_healthy": {
{int64Value: 0, attrs: attribute.NewSet(attribute.String("description", "Only one replica remains of some data"), attribute.String("name", "healing"))},
{int64Value: 0, attrs: attribute.NewSet(
attribute.String("active_primary_dc", "abc"),
attribute.String("description", "Only one replica remains of some data"),
attribute.String("name", "healing"),
attribute.String("protocol_version", "fdb00b071010000"),
)},
},
"cluster_qos_performance_limited_by_reason_id": {
{int64Value: 1, attrs: attribute.NewSet(
attribute.String("active_primary_dc", "abc"),
attribute.String("description", "Storage server performance (storage queue)."),
attribute.String("name", "storage_server_write_queue_size"),
attribute.String("protocol_version", "fdb00b071010000"),
)},
},
},
},
Expand All @@ -35,13 +48,19 @@ func TestMetricsPopulatesValues(t *testing.T) {
"cluster_workload_operations_writes_hz": {
{
float64Value: 342887,
attrs: attribute.NewSet(),
attrs: attribute.NewSet(
attribute.String("active_primary_dc", ""),
attribute.String("protocol_version", "fdb00b071010000"),
),
},
},
"cluster_latency_probe_immediate_priority_transaction_start_seconds": {
{
float64Value: 0.6852229999999999,
attrs: attribute.NewSet(),
attrs: attribute.NewSet(
attribute.String("active_primary_dc", ""),
attribute.String("protocol_version", "fdb00b071010000"),
),
},
},
},
Expand Down
10 changes: 5 additions & 5 deletions model.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package fdbmeter
type Status struct {
Client struct {
ClusterFile struct {
Path string `json:"path"`
Path string `json:"path" fdbmeter:"skip"`
UpToDate bool `json:"up_to_date"`
} `json:"cluster_file"`
Coordinators struct {
Expand Down Expand Up @@ -67,7 +67,7 @@ type Status struct {
TenantMode string `json:"tenant_mode"`
UsableRegions int `json:"usable_regions"`
} `json:"configuration"`
ConnectionString string `json:"connection_string"`
ConnectionString string `json:"connection_string" fdbmeter:"skip"`
Data struct {
AveragePartitionSizeBytes int `json:"average_partition_size_bytes"`
LeastOperatingSpaceBytesLogServer int64 `json:"least_operating_space_bytes_log_server"`
Expand All @@ -80,10 +80,10 @@ type Status struct {
} `json:"moving_data"`
PartitionsCount int `json:"partitions_count"`
State struct {
Description string `json:"description" fdbmeter:"attr"`
Description string `json:"description"`
Healthy bool `json:"healthy"`
MinReplicasRemaining int `json:"min_replicas_remaining"`
Name string `json:"name" fdbmeter:"attr"`
Name string `json:"name"`
} `json:"state"`
SystemKvSizeBytes int `json:"system_kv_size_bytes"`
TeamTrackers []struct {
Expand Down Expand Up @@ -123,7 +123,7 @@ type Status struct {
TransactionStartSeconds float64 `json:"transaction_start_seconds"`
} `json:"latency_probe"`
Layers struct {
Error string `json:"_error" fdbmeter:"attr"`
Error string `json:"_error"`
Valid bool `json:"_valid"`
} `json:"layers"`
Logs []struct {
Expand Down
2 changes: 1 addition & 1 deletion testdata/status1.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"timestamp": 1686137027
},
"cluster": {
"active_primary_dc": "",
"active_primary_dc": "abc",
"active_tss_count": 0,
"bounce_impact": {
"can_clean_bounce": true
Expand Down

0 comments on commit a9be8d8

Please sign in to comment.