Skip to content

Commit

Permalink
[receiver/azuremonitorreceiver] feat: Allow to not split result by di…
Browse files Browse the repository at this point in the history
…mension

Signed-off-by: Célian Garcia <[email protected]>
  • Loading branch information
celian-garcia committed Nov 21, 2024
1 parent 8daf962 commit 1d2a929
Show file tree
Hide file tree
Showing 4 changed files with 108 additions and 34 deletions.
27 changes: 27 additions & 0 deletions .chloggen/split-dimensions-optout.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: receiver/azuremonitorreceiver

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: "Add dimensions.enabled and dimensions.overrides which allows to opt out from automatically split by all the dimensions of the resource type"

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [36240]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: []
16 changes: 16 additions & 0 deletions receiver/azuremonitorreceiver/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ The following settings are optional:
- `maximum_number_of_records_per_resource` (default = 10): Maximum number of records to fetch per resource.
- `initial_delay` (default = `1s`): defines how long this receiver waits before starting.
- `cloud` (default = `AzureCloud`): defines which Azure cloud to use. Valid values: `AzureCloud`, `AzureUSGovernment`, `AzureChinaCloud`.
- `dimensions.enabled` (default = `true`): allows to opt out from automatically split by all the dimensions of the resource type.
- `dimensions.overrides` (default = `{}`): if dimensions are enabled, it allows you to specify a set of dimensions for a particular metric. This is a two levels map with first key being the resource type and second key being the metric name.

Authenticating using service principal requires following additional settings:

Expand Down Expand Up @@ -101,6 +103,20 @@ receivers:
auth: "default_credentials"
```
Overriding dimensions for a particular metric:
```yaml
receivers:
azuremonitor:
dimensions:
enabled: true
overrides:
"Microsoft.Network/azureFirewalls":
# Real example of an Azure limitation here:
# Dimensions exposed are Reason, Status, Protocol,
# but when selecting Protocol in the filters, it returns nothing.
"Network rules hit count": [Reason, Status]
```
## Metrics
Expand Down
6 changes: 6 additions & 0 deletions receiver/azuremonitorreceiver/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,11 @@ var (
}
)

type DimensionsConfig struct {
Enabled *bool `mapstructure:"enabled"`
Overrides map[string]map[string][]string `mapstructure:"overrides"`
}

// Config defines the configuration for the various elements of the receiver agent.
type Config struct {
scraperhelper.ControllerConfig `mapstructure:",squash"`
Expand All @@ -246,6 +251,7 @@ type Config struct {
MaximumNumberOfMetricsInACall int `mapstructure:"maximum_number_of_metrics_in_a_call"`
MaximumNumberOfRecordsPerResource int32 `mapstructure:"maximum_number_of_records_per_resource"`
AppendTagsAsAttributes bool `mapstructure:"append_tags_as_attributes"`
Dimensions DimensionsConfig `mapstructure:"dimensions"`
}

const (
Expand Down
93 changes: 59 additions & 34 deletions receiver/azuremonitorreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ type azureResource struct {
metricsByCompositeKey map[metricsCompositeKey]*azureResourceMetrics
metricsDefinitionsUpdated time.Time
tags map[string]*string
resourceType *string
}

type metricsCompositeKey struct {
Expand Down Expand Up @@ -281,8 +282,9 @@ func (s *azureScraper) getResources(ctx context.Context) {
attributes[attributeLocation] = resource.Location
}
s.resources[*resource.ID] = &azureResource{
attributes: attributes,
tags: resource.Tags,
attributes: attributes,
tags: resource.Tags,
resourceType: resource.Type,
}
}
delete(existingResources, *resource.ID)
Expand Down Expand Up @@ -338,25 +340,64 @@ func (s *azureScraper) getResourceMetricsDefinitions(ctx context.Context, resour

for _, v := range nextResult.Value {
timeGrain := *v.MetricAvailabilities[0].TimeGrain
name := *v.Name.Value
compositeKey := metricsCompositeKey{timeGrain: timeGrain}

if len(v.Dimensions) > 0 {
var dimensionsSlice []string
for _, dimension := range v.Dimensions {
if len(strings.TrimSpace(*dimension.Value)) > 0 {
dimensionsSlice = append(dimensionsSlice, *dimension.Value)
}
}
sort.Strings(dimensionsSlice)
compositeKey.dimensions = strings.Join(dimensionsSlice, ",")
metricName := *v.Name.Value
dimensions := filterDimensions(v.Dimensions, s.cfg.Dimensions, *s.resources[resourceID].resourceType, metricName)
compositeKey := metricsCompositeKey{
timeGrain: timeGrain,
dimensions: serializeDimensions(dimensions),
}
s.storeMetricsDefinition(resourceID, name, compositeKey)
s.storeMetricsDefinition(resourceID, metricName, compositeKey)
}
}
s.resources[resourceID].metricsDefinitionsUpdated = time.Now()
}

func filterDimensions(dimensions []*armmonitor.LocalizableString, cfg DimensionsConfig, resourceType, metricName string) []string {
// Only skip if explicitly disabled. Enabled by default.
if cfg.Enabled != nil && *cfg.Enabled == false {
return nil
}

// If dimensions are overridden for that resource type and metric name, we take it
if _, resourceTypeFound := cfg.Overrides[resourceType]; resourceTypeFound {
if newDimensions, metricNameFound := cfg.Overrides[resourceType][metricName]; metricNameFound {
return newDimensions
}
}
// Otherwise we get all dimensions
var result []string
for _, dimension := range dimensions {
result = append(result, *dimension.Value)
}
return result
}
func serializeDimensions(dimensions []string) string {
var dimensionsSlice []string
for _, dimension := range dimensions {
if len(strings.TrimSpace(dimension)) > 0 {
dimensionsSlice = append(dimensionsSlice, dimension)
}
}
sort.Strings(dimensionsSlice)
return strings.Join(dimensionsSlice, ",")
}
func buildDimensionsFilter(dimensionsStr string) *string {
if len(dimensionsStr) == 0 {
return nil
}
var dimensionsFilter bytes.Buffer
dimensions := strings.Split(dimensionsStr, ",")
for i, dimension := range dimensions {
dimensionsFilter.WriteString(dimension)
dimensionsFilter.WriteString(" eq '*' ")
if i < len(dimensions)-1 {
dimensionsFilter.WriteString(" and ")
}
}
result := dimensionsFilter.String()
return &result
}

func (s *azureScraper) storeMetricsDefinition(resourceID, name string, compositeKey metricsCompositeKey) {
if _, ok := s.resources[resourceID].metricsByCompositeKey[compositeKey]; ok {
s.resources[resourceID].metricsByCompositeKey[compositeKey].metrics = append(
Expand Down Expand Up @@ -439,30 +480,14 @@ func getResourceMetricsValuesRequestOptions(
end int,
top int32,
) armmonitor.MetricsClientListOptions {
resType := strings.Join(metrics[start:end], ",")
filter := armmonitor.MetricsClientListOptions{
Metricnames: &resType,
return armmonitor.MetricsClientListOptions{
Metricnames: to.Ptr(strings.Join(metrics[start:end], ",")),
Interval: to.Ptr(timeGrain),
Timespan: to.Ptr(timeGrain),
Aggregation: to.Ptr(strings.Join(aggregations, ",")),
Top: to.Ptr(top),
Filter: buildDimensionsFilter(dimensionsStr),
}

if len(dimensionsStr) > 0 {
var dimensionsFilter bytes.Buffer
dimensions := strings.Split(dimensionsStr, ",")
for i, dimension := range dimensions {
dimensionsFilter.WriteString(dimension)
dimensionsFilter.WriteString(" eq '*' ")
if i < len(dimensions)-1 {
dimensionsFilter.WriteString(" and ")
}
}
dimensionFilterString := dimensionsFilter.String()
filter.Filter = &dimensionFilterString
}

return filter
}

func (s *azureScraper) processTimeseriesData(
Expand Down

0 comments on commit 1d2a929

Please sign in to comment.