Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 62 additions & 20 deletions pkg/aggregation/aggregator.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package aggregation

import (
"context"
"errors"
"fmt"
html "html/template"
"log/slog"
Expand All @@ -22,13 +23,15 @@ import (

"github.com/synyx/tuwat/pkg/config"
"github.com/synyx/tuwat/pkg/connectors"
"github.com/synyx/tuwat/pkg/ruleengine"
)

type Aggregate struct {
CheckTime time.Time
Alerts []Alert
GroupedAlerts []AlertGroup
Blocked []BlockedAlert
Downtimes []BlockedAlert
}

type Alert struct {
Expand Down Expand Up @@ -75,10 +78,11 @@ type Aggregator struct {
}

type result struct {
connector connectors.Connector
tag string
alerts []connectors.Alert
downtimes []connectors.Downtime
error error
connector connectors.Connector
}

var (
Expand Down Expand Up @@ -216,24 +220,40 @@ func (a *Aggregator) collect(ctx context.Context, collect chan<- result) {
wg.Add(1)
go func(c connectors.Connector) {
defer wg.Done()

alerts, err := c.Collect(ctx)

// Be graceful on errors accessing the handed-in channel
defer func() {
if e := recover(); e != nil {
err = fmt.Errorf("error delivering result %w", e.(error))
err := fmt.Errorf("error delivering result %w", e.(error))
slog.InfoContext(ctx, "Collection cycle error",
slog.String("collector", c.String()),
slog.String("tag", c.Tag()),
slog.Any("error", err))
}
slog.InfoContext(ctx, "Collected alerts",
}()

alerts, err := c.Collect(ctx)
slog.InfoContext(ctx, "Collected alerts",
slog.String("collector", c.String()),
slog.String("tag", c.Tag()),
slog.Int("count", len(alerts)),
slog.Any("error", err))

var downtimes []connectors.Downtime
if dc, ok := c.(connectors.DowntimeCollector); ok {
var err2 error
downtimes, err2 = dc.CollectDowntimes(ctx)
slog.InfoContext(ctx, "Collected downtimes",
slog.String("collector", c.String()),
slog.String("tag", c.Tag()),
slog.Int("count", len(alerts)),
slog.Any("error", err))
}()
slog.Int("count", len(downtimes)),
slog.Any("error", err2))
err = errors.Join(err, err2)
}

r := result{
tag: c.Tag(),
alerts: alerts,
downtimes: downtimes,
error: err,
connector: c,
}
Expand Down Expand Up @@ -263,6 +283,7 @@ func (a *Aggregator) aggregate(ctx context.Context, dashboard *config.Dashboard,

var alerts []Alert
var blockedAlerts []BlockedAlert
var downtimedAlerts []BlockedAlert

for _, r := range results {
if r.error != nil {
Expand All @@ -277,6 +298,8 @@ func (a *Aggregator) aggregate(ctx context.Context, dashboard *config.Dashboard,
alerts = append(alerts, alert)
}

downtimeRules := a.downtimeRules(r.downtimes)

for _, al := range r.alerts {
labels := make(map[string]string)
for k, v := range al.Labels {
Expand Down Expand Up @@ -310,7 +333,15 @@ func (a *Aggregator) aggregate(ctx context.Context, dashboard *config.Dashboard,
html.HTML(`<form class="txtform" action="/alerts/`+alert.Id+`/silence" method="post"><button class="txtbtn" value="silence" type="submit">🔇</button></form>`))
}

if reason := a.allow(dashboard, alert); reason == "" {
if reason, downtimeIdx, ok := a.downtimed(alert, downtimeRules); ok {
downtimedAlerts = append(blockedAlerts, BlockedAlert{Alert: alert, Reason: reason})

downtime := r.downtimes[downtimeIdx]
alert.Labels["DowntimeStart"] = strconv.FormatInt(downtime.StartTime.Unix(), 10)
alert.Labels["DowntimeEnd"] = strconv.FormatInt(downtime.EndTime.Unix(), 10)
alert.Labels["DowntimeAuthor"] = downtime.Author
alert.Links = append(alert.Links, a.downtimeLinks(downtime.Comment)...)
} else if reason := a.allow(alert, dashboard); reason == "" {
alerts = append(alerts, alert)
} else {
blockedAlerts = append(blockedAlerts, BlockedAlert{Alert: alert, Reason: reason})
Expand All @@ -332,13 +363,18 @@ func (a *Aggregator) aggregate(ctx context.Context, dashboard *config.Dashboard,
return blockedAlerts[i].When < blockedAlerts[j].When
})

sort.Slice(downtimedAlerts, func(i, j int) bool {
return downtimedAlerts[i].When < downtimedAlerts[j].When
})

a.amu.Lock()
a.CheckTime = a.clock.Now()
a.current[dashboard.Name] = Aggregate{
CheckTime: a.CheckTime,
Alerts: alerts,
GroupedAlerts: alertGroups,
Blocked: blockedAlerts,
Downtimes: downtimedAlerts,
}
a.amu.Unlock()

Expand Down Expand Up @@ -449,29 +485,29 @@ func (a *Aggregator) Reconfigure(cfg *config.Config) {
}

// allow will match rules against the ruleset.
func (a *Aggregator) allow(dashboard *config.Dashboard, alert Alert) string {
reason := a.matchAlertWithReason(dashboard, alert)
func (a *Aggregator) allow(alert Alert, dashboard *config.Dashboard) string {
rule, _, matched := a.matchAlert(alert, dashboard.Filter)

switch dashboard.Mode {
case config.Including:
// Revert logic when the dashboard configuration is in `including` mode.
if reason == "" {
if !matched {
return "Unmatched"
} else {
return ""
}
case config.Excluding:
return reason
return rule.Description
}
panic("unknown mode: " + dashboard.Mode.String())
}

// matchAlertWithReason will match anything which does match against any of the
// matchAlert will match anything which does match against any of the
// configured rules.
func (a *Aggregator) matchAlertWithReason(dashboard *config.Dashboard, alert Alert) string {
func (a *Aggregator) matchAlert(alert Alert, rules []ruleengine.Rule) (ruleengine.Rule, int, bool) {
nextRule:
for _, rule := range dashboard.Filter {
matchers := make(map[string]config.RuleMatcher)
for idx, rule := range rules {
matchers := make(map[string]ruleengine.RuleMatcher)

// if it's a rule working on top level concepts:
if rule.What != nil && rule.What.MatchString(alert.What) {
Expand Down Expand Up @@ -508,11 +544,17 @@ nextRule:
}
}
if matchCount > 0 && matchCount == len(matchers) {
return rule.Description
return rule, idx, true
}
}

return ""
return ruleengine.Rule{}, -1, false
}

// downtimed will match rules against the downtimes.
func (a *Aggregator) downtimed(alert Alert, rules []ruleengine.Rule) (string, int, bool) {
match, idx, matched := a.matchAlert(alert, rules)
return match.Description, idx, matched
}

func (a *Aggregator) Silence(ctx context.Context, alertId, user string) {
Expand Down
25 changes: 13 additions & 12 deletions pkg/aggregation/aggregator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,14 @@ import (
"github.com/synyx/tuwat/pkg/config"
"github.com/synyx/tuwat/pkg/connectors"
"github.com/synyx/tuwat/pkg/log"
"github.com/synyx/tuwat/pkg/ruleengine"
)

func TestAggregation(t *testing.T) {
filter := config.Rule{
filter := ruleengine.Rule{
Description: "Ignore MRs",
Labels: map[string]config.RuleMatcher{
"Hostname": config.ParseRuleMatcher("~= gitlab"),
Labels: map[string]ruleengine.RuleMatcher{
"Hostname": ruleengine.ParseRuleMatcher("~= gitlab"),
},
}

Expand All @@ -28,10 +29,10 @@ func TestAggregation(t *testing.T) {
}

func TestGroupedAggregation(t *testing.T) {
filter := config.Rule{
filter := ruleengine.Rule{
Description: "Ignore MRs",
Labels: map[string]config.RuleMatcher{
"Hostname": config.ParseRuleMatcher("~= gitlab"),
Labels: map[string]ruleengine.RuleMatcher{
"Hostname": ruleengine.ParseRuleMatcher("~= gitlab"),
},
}

Expand All @@ -43,12 +44,12 @@ func TestGroupedAggregation(t *testing.T) {
}

func TestWhen(t *testing.T) {
filter := config.Rule{
filter := ruleengine.Rule{
Description: "Non-Escalated",
When: config.ParseRuleMatcher("< 86400"), // < 2d
What: config.ParseRuleMatcher(": Update"),
Labels: map[string]config.RuleMatcher{
"Type": config.ParseRuleMatcher("PullRequest"),
When: ruleengine.ParseRuleMatcher("< 86400"), // < 2d
What: ruleengine.ParseRuleMatcher(": Update"),
Labels: map[string]ruleengine.RuleMatcher{
"Type": ruleengine.ParseRuleMatcher("PullRequest"),
},
}

Expand All @@ -62,7 +63,7 @@ func TestWhen(t *testing.T) {
}
}

func aggregator(mode config.DashboardMode, groupAlerts bool, filters ...config.Rule) *Aggregator {
func aggregator(mode config.DashboardMode, groupAlerts bool, filters ...ruleengine.Rule) *Aggregator {
cfg, _ := config.NewConfiguration()
log.Initialize(cfg)

Expand Down
61 changes: 61 additions & 0 deletions pkg/aggregation/downtime.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
package aggregation

import (
"fmt"
html "html/template"
"strings"
"time"

"github.com/synyx/tuwat/pkg/connectors"
"github.com/synyx/tuwat/pkg/ruleengine"
)

func (a *Aggregator) downtimeLinks(comment string) []html.HTML {
var urls []html.HTML
for _, u := range extractUrls(comment) {
urls = append(urls, html.HTML(`<a href="`+u+`">🔗</a>`))
}
return urls
}

func (a *Aggregator) downtimeRules(downtimes []connectors.Downtime) []ruleengine.Rule {
rules := make([]ruleengine.Rule, 0, len(downtimes))
for _, dt := range downtimes {
rule := ruleengine.Rule{
Description: a.downtimeDescription(dt),
Labels: dt.Matchers,
}
rules = append(rules, rule)
}

return rules
}

func (a *Aggregator) downtimeDescription(dt connectors.Downtime) string {
// Strip all URLs as those will be extracted and shown elsewhere
comment := dt.Comment
for _, u := range extractUrls(comment) {
comment = strings.Replace(comment, u, "", 1)
}

description := fmt.Sprintf("Downtimed %s: %s", a.niceDate(dt.EndTime), comment)
if len(description) > 100 {
description = description[:99] + "…"
}
return description
}

func (a *Aggregator) niceDate(t time.Time) string {
d := a.clock.Now().Sub(t)
if d > 2*time.Hour*24 {
return t.Format("until 2006-01-02")
} else if d > 2*time.Hour {
return fmt.Sprintf("for %.0fh", d.Hours())
} else if d > 2*time.Minute {
return fmt.Sprintf("for %.0fm", d.Minutes())
} else if d > 0 {
return fmt.Sprintf("for %.0fs", d.Seconds())
} else {
return t.Format("ended 2006-01-02 15:04")
}
}
71 changes: 71 additions & 0 deletions pkg/aggregation/urlextraction.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
package aggregation

/*
Regular expression for extracting URLs from https://github.com/mvdan/xurls

Copyright (c) 2015, Daniel Martí. All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:

* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import (
"net/url"
"regexp"
)

const allowedUcsChar = "¡-ᙿᚁ-\u1fff\u200b-‧\u202a-\u202e‰-⁞\u2060-\u2fff、-\ud7ff豈-﷏ﷰ-\uffef𐀀-\U0001fffd𠀀-\U0002fffd𰀀-\U0003fffd\U00040000-\U0004fffd\U00050000-\U0005fffd\U00060000-\U0006fffd\U00070000-\U0007fffd\U00080000-\U0008fffd\U00090000-\U0009fffd\U000a0000-\U000afffd\U000b0000-\U000bfffd\U000c0000-\U000cfffd\U000d0000-\U000dfffd\U000e1000-\U000efffd"
const allowedUcsCharMinusPunctuation = "¢-¦¨-µ¸-¾À-ͽͿ-ΆΈ-ՙՠ-ֈ֊-ֿׁ-ׂׄ-ׇׅ-ײ\u05f5-؈؋؎-ؚ\u061cؠ-٩ٮ-ۓە-ۿ\u070e-߶ߺ-\u082f\u083f-\u085d\u085f-ॣ०-९ॱ-ৼ৾-ੵ\u0a77-૯૱-\u0c76౸-ಃಅ-ෳ\u0df5-๎๐-๙\u0e5c-༃༓༕-྄྆-࿏࿕-࿘\u0fdb-၉ၐ-ჺჼ-፟፩-᙭ᙯ-ᙿᚁ-ᛪᛮ-᜴\u1737-៓ៗ៛-\u17ff᠆᠋-\u1943᥆-\u1a1dᨠ-\u1a9fᪧ\u1aae-᭙᭡-᭼\u1b7f-\u1bfbᰀ-\u1c3a᱀-ᱽᲀ-Ჿ\u1cc8-᳔᳒-\u1fff\u200b-―‘-‟\u202a-\u202e‹-›‿-⁀⁄-⁆⁒⁔\u2060-\u2cf8⳽ⴀ-ⵯ\u2d71-ⷿ⸂-⸅⸉-⸊⸌-⸍⸗⸚⸜-⸝⸠-⸩ⸯ⸺-⸻⹀⹂⹐-⹑⹕-\u2fff〄-〼〾-ヺー-ꓽꔀ-ꘌꘐ-꙲ꙴ-꙽ꙿ-꛱\ua6f8-ꡳ\ua878-\ua8cd꣐-ꣷꣻꣽ-꤭ꤰ-\ua95eꥠ-꧀\ua9ce-\ua9ddꧠ-\uaa5bꩠ-ꫝꫠ-ꫯꫲ-ꯪ꯬-\ud7ff豈-﷏ﷰ-️︗-︘\ufe1a-︯︱-﹄﹇-﹈﹍-﹏\ufe53﹘-﹞﹢-\ufe67﹩\ufe6c-\uff00$(-)+-0-9<->A-[]-⦆「-」ヲ-\uffef𐀀-\U000100ff\U00010103-\U0001039e𐎠-𐏏𐏑-\U0001056e𐕰-\U00010856𐡘-\U0001091e𐤠-\U0001093e\U00010940-\U00010a4f\U00010a59-𐩾𐪀-𐫯\U00010af7-\U00010b38𐭀-\U00010b98\U00010b9d-𐽔\U00010f5a-𐾅\U00010f8a-𑁆\U0001104e-𑂺\U000110bd𑃂-𑄿𑅄-𑅳𑅶-𑇄𑇉-𑇌𑇎-𑇚𑇜\U000111e0-𑈷𑈾-𑊨\U000112aa-𑑊𑑐-𑑙\U0001145c𑑞-𑓅𑓇-𑗀𑗘-𑙀𑙄-\U0001165f\U0001166d-𑚸\U000116ba-𑜻𑜿-𑠺\U0001183c-𑥃\U00011947-𑧡𑧣-𑨾𑩇-𑪙𑪝\U00011aa3-\U00011aff\U00011b0a-𑱀\U00011c46-\U00011c6f𑱲-𑻶\U00011ef9-𑽂𑽐-\U00011ffe𒀀-\U0001246f\U00012475-𒿰\U00012ff3-\U00016a6d𖩰-𖫴\U00016af6-𖬶𖬼-𖭃𖭅-𖺖\U00016e9b-𖿡𖿣-𛲞\U0001bca0-𝪆\U0001da8c-\U0001e95d\U0001e960-\U0001fffd𠀀-\U0002fffd𰀀-\U0003fffd\U00040000-\U0004fffd\U00050000-\U0005fffd\U00060000-\U0006fffd\U00070000-\U0007fffd\U00080000-\U0008fffd\U00090000-\U0009fffd\U000a0000-\U000afffd\U000b0000-\U000bfffd\U000c0000-\U000cfffd\U000d0000-\U000dfffd\U000e1000-\U000efffd"

const (
unreservedChar = `a-zA-Z0-9\-._~`
endUnreservedChar = `a-zA-Z0-9\-_~`
midSubDelimChar = `!$&'*+,;=`
endSubDelimChar = `$&+=`
midIPathSegmentChar = unreservedChar + `%` + midSubDelimChar + `:@` + allowedUcsChar
endIPathSegmentChar = endUnreservedChar + `%` + endSubDelimChar + allowedUcsCharMinusPunctuation
iPrivateChar = `\x{E000}-\x{F8FF}\x{F0000}-\x{FFFFD}\x{100000}-\x{10FFFD}`
midIChar = `/?#\\` + midIPathSegmentChar + iPrivateChar
endIChar = `/#` + endIPathSegmentChar + iPrivateChar
wellParen = `\((?:[` + midIChar + `]|\([` + midIChar + `]*\))*\)`
wellBracket = `\[(?:[` + midIChar + `]|\[[` + midIChar + `]*\])*\]`
wellBrace = `\{(?:[` + midIChar + `]|\{[` + midIChar + `]*\})*\}`
wellAll = wellParen + `|` + wellBracket + `|` + wellBrace
pathCont = `(?:[` + midIChar + `]*(?:` + wellAll + `|[` + endIChar + `]))+`
schemes = `(?:(?i)(?:http|https)://)`
)

func extractUrls(text string) []string {
re := regexp.MustCompile(schemes + pathCont)
re.Longest()

var urls []string
for _, match := range re.FindAllString(text, -1) {
if _, err := url.Parse(match); err == nil {
urls = append(urls, match)
}
}
return urls
}
Loading