Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow matching on rule state #1079

Merged
merged 3 commits into from
Aug 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 24 additions & 126 deletions cmd/pint/scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,32 +2,19 @@ package main

import (
"context"
"errors"
"fmt"
"log/slog"
"sync"
"time"

"go.uber.org/atomic"

"github.com/cloudflare/pint/internal/checks"
"github.com/cloudflare/pint/internal/comments"
"github.com/cloudflare/pint/internal/config"
"github.com/cloudflare/pint/internal/discovery"
"github.com/cloudflare/pint/internal/parser"
"github.com/cloudflare/pint/internal/promapi"
"github.com/cloudflare/pint/internal/reporter"
)

const (
yamlParseReporter = "yaml/parse"
ignoreFileReporter = "ignore/file"
pintCommentReporter = "pint/comment"

yamlDetails = `This Prometheus rule is not valid.
This usually means that it's missing some required fields.`
)

func checkRules(ctx context.Context, workers int, isOffline bool, gen *config.PrometheusGenerator, cfg config.Config, entries []discovery.Entry) (summary reporter.Summary, err error) {
if isOffline {
slog.Info("Offline mode, skipping Prometheus discovery")
Expand Down Expand Up @@ -84,7 +71,7 @@ func checkRules(ctx context.Context, workers int, isOffline bool, gen *config.Pr
continue
case entry.Rule.Error.Err != nil && entry.State == discovery.Removed:
continue
case entry.PathError == nil && entry.Rule.Error.Err == nil:
default:
if entry.Rule.RecordingRule != nil {
rulesParsedTotal.WithLabelValues(config.RecordingRuleType).Inc()
slog.Debug("Found recording rule",
Expand All @@ -101,9 +88,16 @@ func checkRules(ctx context.Context, workers int, isOffline bool, gen *config.Pr
slog.String("lines", entry.Rule.Lines.String()),
)
}
if entry.Rule.Error.Err != nil {
slog.Debug("Found invalid rule",
slog.String("path", entry.Path.Name),
slog.String("lines", entry.Rule.Lines.String()),
)
rulesParsedTotal.WithLabelValues(config.InvalidRuleType).Inc()
}

checkedEntriesCount.Inc()
checkList := cfg.GetChecksForRule(ctx, gen, entry, entry.DisabledChecks)
checkList := cfg.GetChecksForEntry(ctx, gen, entry)
for _, check := range checkList {
checkIterationChecks.Inc()
if check.Meta().IsOnline {
Expand All @@ -113,15 +107,6 @@ func checkRules(ctx context.Context, workers int, isOffline bool, gen *config.Pr
}
jobs <- scanJob{entry: entry, allEntries: entries, check: check}
}
default:
if entry.Rule.Error.Err != nil {
slog.Debug("Found invalid rule",
slog.String("path", entry.Path.Name),
slog.String("lines", entry.Rule.Lines.String()),
)
rulesParsedTotal.WithLabelValues(config.InvalidRuleType).Inc()
}
jobs <- scanJob{entry: entry, allEntries: entries, check: nil}
}
}
defer close(jobs)
Expand Down Expand Up @@ -154,115 +139,28 @@ func scanWorker(ctx context.Context, jobs <-chan scanJob, results chan<- reporte
case <-ctx.Done():
return
default:
var commentErr comments.CommentError
var ignoreErr discovery.FileIgnoreError
switch {
case errors.As(job.entry.PathError, &ignoreErr):
results <- reporter.Report{
Path: discovery.Path{
Name: job.entry.Path.Name,
SymlinkTarget: job.entry.Path.SymlinkTarget,
},
ModifiedLines: job.entry.ModifiedLines,
Problem: checks.Problem{
Lines: parser.LineRange{
First: ignoreErr.Line,
Last: ignoreErr.Line,
},
Reporter: ignoreFileReporter,
Text: ignoreErr.Error(),
Severity: checks.Information,
},
Owner: job.entry.Owner,
}
case errors.As(job.entry.PathError, &commentErr):
results <- reporter.Report{
Path: discovery.Path{
Name: job.entry.Path.Name,
SymlinkTarget: job.entry.Path.SymlinkTarget,
},
ModifiedLines: job.entry.ModifiedLines,
Problem: checks.Problem{
Lines: parser.LineRange{
First: commentErr.Line,
Last: commentErr.Line,
},
Reporter: pintCommentReporter,
Text: "This comment is not a valid pint control comment: " + commentErr.Error(),
Severity: checks.Warning,
},
Owner: job.entry.Owner,
}
case job.entry.PathError != nil:
results <- reporter.Report{
Path: discovery.Path{
Name: job.entry.Path.Name,
SymlinkTarget: job.entry.Path.SymlinkTarget,
},
ModifiedLines: job.entry.ModifiedLines,
Problem: checks.Problem{
Lines: parser.LineRange{
First: 1,
Last: 1,
},
Reporter: yamlParseReporter,
Text: fmt.Sprintf("YAML parser returned an error when reading this file: `%s`.", job.entry.PathError),
Details: `pint cannot read this file because YAML parser returned an error.
This usually means that you have an indention error or the file doesn't have the YAML structure required by Prometheus for [recording](https://prometheus.io/docs/prometheus/latest/configuration/recording_rules/) and [alerting](https://prometheus.io/docs/prometheus/latest/configuration/alerting_rules/) rules.
If this file is a template that will be rendered into valid YAML then you can instruct pint to ignore some lines using comments, see [pint docs](https://cloudflare.github.io/pint/ignoring.html).
`,
Severity: checks.Fatal,
},
Owner: job.entry.Owner,
}
case job.entry.Rule.Error.Err != nil:
details := yamlDetails
if job.entry.Rule.Error.Details != "" {
details = job.entry.Rule.Error.Details
}
if job.entry.State == discovery.Unknown {
slog.Warn(
"Bug: unknown rule state",
slog.String("path", job.entry.Path.String()),
slog.Int("line", job.entry.Rule.Lines.First),
slog.String("name", job.entry.Rule.Name()),
)
}

start := time.Now()
problems := job.check.Check(ctx, job.entry.Path, job.entry.Rule, job.allEntries)
checkDuration.WithLabelValues(job.check.Reporter()).Observe(time.Since(start).Seconds())
for _, problem := range problems {
results <- reporter.Report{
Path: discovery.Path{
Name: job.entry.Path.Name,
SymlinkTarget: job.entry.Path.SymlinkTarget,
},
ModifiedLines: job.entry.ModifiedLines,
Rule: job.entry.Rule,
Problem: checks.Problem{
Lines: parser.LineRange{
First: job.entry.Rule.Error.Line,
Last: job.entry.Rule.Error.Line,
},
Reporter: yamlParseReporter,
Text: fmt.Sprintf("This rule is not a valid Prometheus rule: `%s`.", job.entry.Rule.Error.Err.Error()),
Details: details,
Severity: checks.Fatal,
},
Owner: job.entry.Owner,
}
default:
if job.entry.State == discovery.Unknown {
slog.Warn(
"Bug: unknown rule state",
slog.String("path", job.entry.Path.String()),
slog.Int("line", job.entry.Rule.Lines.First),
slog.String("name", job.entry.Rule.Name()),
)
}

start := time.Now()
problems := job.check.Check(ctx, job.entry.Path, job.entry.Rule, job.allEntries)
checkDuration.WithLabelValues(job.check.Reporter()).Observe(time.Since(start).Seconds())
for _, problem := range problems {
results <- reporter.Report{
Path: discovery.Path{
Name: job.entry.Path.Name,
SymlinkTarget: job.entry.Path.SymlinkTarget,
},
ModifiedLines: job.entry.ModifiedLines,
Rule: job.entry.Rule,
Problem: problem,
Owner: job.entry.Owner,
}
Problem: problem,
Owner: job.entry.Owner,
}
}
}
Expand Down
1 change: 1 addition & 0 deletions cmd/pint/tests/0083_github_action.txt
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,4 @@ groups:

-- src/.pint.hcl --
repository {}

45 changes: 45 additions & 0 deletions cmd/pint/tests/0185_state_empty.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
mkdir testrepo
cd testrepo
exec git init --initial-branch=main .

cp ../src/rules.yml rules.yml
cp ../src/.pint.hcl .
env GIT_AUTHOR_NAME=pint
env GIT_AUTHOR_EMAIL=pint@example.com
env GIT_COMMITTER_NAME=pint
env GIT_COMMITTER_EMAIL=pint@example.com
exec git add .
exec git commit -am 'import rules and config'

exec git checkout -b v2
exec touch .keep
exec git add .keep
exec git commit -am 'v2'

pint.ok --no-color ci
! stdout .
cmp stderr ../stderr.txt

-- stderr.txt --
level=INFO msg="Loading configuration file" path=.pint.hcl
level=INFO msg="Finding all rules to check on current git branch" base=main
-- src/rules.yml --
- record: rule1
expr: sum(foo) by(job)
- record: rule2
expr: sum(foo)

-- src/.pint.hcl --
ci {
baseBranch = "main"
}
parser {
relaxed = [".*"]
include = ["rules.yml"]
}
rule {
aggregate ".+" {
keep = [ "job" ]
severity = "bug"
}
}
54 changes: 54 additions & 0 deletions cmd/pint/tests/0186_ci_state_any.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
mkdir testrepo
cd testrepo
exec git init --initial-branch=main .

cp ../src/rules.yml rules.yml
cp ../src/.pint.hcl .
env GIT_AUTHOR_NAME=pint
env GIT_AUTHOR_EMAIL=pint@example.com
env GIT_COMMITTER_NAME=pint
env GIT_COMMITTER_EMAIL=pint@example.com
exec git add .
exec git commit -am 'import rules and config'

exec git checkout -b v2
exec touch .keep
exec git add .keep
exec git commit -am 'v2'

pint.error --no-color ci
! stdout .
cmp stderr ../stderr.txt

-- stderr.txt --
level=INFO msg="Loading configuration file" path=.pint.hcl
level=INFO msg="Finding all rules to check on current git branch" base=main
level=INFO msg="Problems found" Bug=1
rules.yml:4 Bug: `job` label is required and should be preserved when aggregating `^.+$` rules, use `by(job, ...)`. (promql/aggregate)
4 | expr: sum(foo)

level=ERROR msg="Fatal error" err="problems found"
-- src/rules.yml --
- record: rule1
expr: sum(foo) by(job)
- record: rule2
expr: sum(foo)

-- src/.pint.hcl --
ci {
baseBranch = "main"
}
parser {
relaxed = [".*"]
include = ["rules.yml"]
}
rule {
match {
kind = "recording"
state = ["any"]
}
aggregate ".+" {
keep = [ "job" ]
severity = "bug"
}
}
1 change: 1 addition & 0 deletions docs/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
- [promql/range_query](checks/promql/range_query.md) now allows to configure a custom maximum
duration for range queries - #1064.
- Added `--enabled` flag to the pint command. Passing this flag will only run selected check(s).
- Added `state` option to the rule `match` block. See [configuration](configuration.md) docs for details.

### Fixed

Expand Down
Loading
Loading