Skip to content

Commit

Permalink
Add support for journal matches (only conjuntions)
Browse files Browse the repository at this point in the history
  • Loading branch information
carlospeon committed Aug 12, 2022
1 parent 3170edf commit 32b82f8
Show file tree
Hide file tree
Showing 5 changed files with 62 additions and 3 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
##### Enhancements
* [6395](https://github.com/grafana/loki/pull/6395) **DylanGuedes**: Add encoding support
* [6828](https://github.com/grafana/loki/pull/6828) **alexandre1984rj** Add the BotScore and BotScoreSrc fields once the Cloudflare API returns those two fields on the list of all available log fields.
* [6656](https://github.com/grafana/loki/pull/6656) **carlospeon**: Allow promtail to add matches to the journal reader

##### Fixes
* [6766](https://github.com/grafana/loki/pull/6766) **kavirajk**: fix(logql): Make `LabelSampleExtractor` ignore processing the line if it doesn't contain that specific label. Fixes unwrap behavior explained in the issue https://github.com/grafana/loki/issues/6713
Expand Down
4 changes: 4 additions & 0 deletions clients/pkg/promtail/scrapeconfig/scrapeconfig.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,10 @@ type JournalTargetConfig struct {
// Path to a directory to read journal entries from. Defaults to system path
// if empty.
Path string `yaml:"path"`

// Journal matches to filter. Character (+) is not supported, only logical AND
// matches will be added.
Matches string `yaml:"matches"`
}

// SyslogTargetConfig describes a scrape config that listens for log lines over syslog.
Expand Down
20 changes: 18 additions & 2 deletions clients/pkg/promtail/targets/journal/journaltarget.go
Original file line number Diff line number Diff line change
Expand Up @@ -174,12 +174,26 @@ func journalTargetWithReader(
return nil, errors.Wrap(err, "parsing journal reader 'max_age' config value")
}

cfg := t.generateJournalConfig(journalConfigBuilder{
cb := journalConfigBuilder{
JournalPath: targetConfig.Path,
Position: position,
MaxAge: maxAge,
EntryFunc: entryFunc,
})
}

matches := strings.Fields(targetConfig.Matches)
for _, m := range matches {
fv := strings.Split(m, "=")
if len(fv) != 2 {
return nil, errors.New("Error parsing journal reader 'matches' config value")
}
cb.Matches = append(cb.Matches, sdjournal.Match{
Field: fv[0],
Value: fv[1],
})
}

cfg := t.generateJournalConfig(cb)
t.r, err = readerFunc(cfg)
if err != nil {
return nil, errors.Wrap(err, "creating journal reader")
Expand Down Expand Up @@ -208,6 +222,7 @@ func journalTargetWithReader(
type journalConfigBuilder struct {
JournalPath string
Position string
Matches []sdjournal.Match
MaxAge time.Duration
EntryFunc journalEntryFunc
}
Expand All @@ -221,6 +236,7 @@ func (t *JournalTarget) generateJournalConfig(

cfg := sdjournal.JournalReaderConfig{
Path: cb.JournalPath,
Matches: cb.Matches,
Formatter: t.formatter,
}

Expand Down
35 changes: 35 additions & 0 deletions clients/pkg/promtail/targets/journal/journaltarget_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -385,3 +385,38 @@ func Test_MakeJournalFields(t *testing.T) {
}
assert.Equal(t, expectedFields, receivedFields)
}

func TestJournalTarget_Matches(t *testing.T) {
w := log.NewSyncWriter(os.Stderr)
logger := log.NewLogfmtLogger(w)

testutils.InitRandom()
dirName := "/tmp/" + testutils.RandName()
positionsFileName := dirName + "/positions.yml"

// Set the sync period to a really long value, to guarantee the sync timer
// never runs, this way we know everything saved was done through channel
// notifications when target.stop() was called.
ps, err := positions.New(logger, positions.Config{
SyncPeriod: 10 * time.Second,
PositionsFile: positionsFileName,
})
if err != nil {
t.Fatal(err)
}

client := fake.New(func() {})

cfg := scrapeconfig.JournalTargetConfig{
Matches: "UNIT=foo.service PRIORITY=1",
}

jt, err := journalTargetWithReader(NewMetrics(prometheus.NewRegistry()), logger, client, ps, "test", nil,
&cfg, newMockJournalReader, newMockJournalEntry(nil))
require.NoError(t, err)

r := jt.r.(*mockJournalReader)
matches := []sdjournal.Match{{Field: "UNIT", Value: "foo.service"}, {Field: "PRIORITY", Value: "1"}}
require.Equal(t, r.config.Matches, matches)
client.Stop()
}
5 changes: 4 additions & 1 deletion docs/sources/clients/promtail/scraping.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ scrape_configs:
json: false
max_age: 12h
path: /var/log/journal
matches: _TRANSPORT=kernel
labels:
job: systemd-journal
relabel_configs:
Expand All @@ -109,7 +110,9 @@ here for reference. The `max_age` field ensures that no older entry than the
time specified will be sent to Loki; this circumvents "entry too old" errors.
The `path` field tells Promtail where to read journal entries from. The labels
map defines a constant list of labels to add to every journal entry that Promtail
reads.
reads. The `matches` field adds journal filters. If multiple filters are specified
matching different fields, the log entries are filtered by both, if two filters
apply to the same field, then they are automatically matched as alternatives.

When the `json` field is set to `true`, messages from the journal will be
passed through the pipeline as JSON, keeping all of the original fields from the
Expand Down

0 comments on commit 32b82f8

Please sign in to comment.