Refactoring: migrate translations to singleton

It was using maps in each context, which would be merged between
contexts, then injected each time we needed a message to display.

It had a limitation on complicated operator setups: historical
information would be overriden by newer associations.
(e.g, that IP was for node0 yesterday, now it's node1, so associations
have been overwritten and incorrect)

It also introduced complexity, such as forcing to define closures too
many times, merging maps, it would be harder to debug, and every files
were starting from empty translation maps.
Moreover, iterating on maps is guaranteed to be random so it could create
hard-to-debug output variations on complex cases.

Now it is a singleton in translate package, still using maps but now it
associates an array of "units" storing the timestamp with each piece of information.
It is protected by rwmutex, because map are not threadsafe. (there's no
parallel processing for now)

No regressions, and it passes "operator_ambiguous_ips_list_all_no_color"
where the old system failed.
It nows also can be used as an easy to read source of information in
itself
This commit is contained in:
Yoann La Cancellera
2023-10-26 11:36:34 +02:00
committed by Sveta Smirnova
parent e56fc45a05
commit 2091c1a1f0
36 changed files with 92907 additions and 1679 deletions

View File

@@ -27,7 +27,7 @@ func (c *conflicts) Run() error {
return err
}
ctxs := timeline.GetLatestUpdatedContextsByNodes()
ctxs := timeline.GetLatestContextsByNodes()
for _, ctx := range ctxs {
if len(ctx.Conflicts) == 0 {
continue

View File

@@ -5,7 +5,7 @@ import (
"fmt"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/regex"
"github.com/pkg/errors"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/translate"
)
type ctx struct {
@@ -18,15 +18,13 @@ func (c *ctx) Help() string {
func (c *ctx) Run() error {
if len(c.Paths) != 1 {
return errors.New("can only use 1 path at a time for ctx subcommand")
}
timeline, err := timelineFromPaths(c.Paths, regex.AllRegexes())
if err != nil {
return err
}
fmt.Println(translate.DBToJson())
for _, t := range timeline {
out, err := json.MarshalIndent(t[len(t)-1].Ctx, "", "\t")
if err != nil {

View File

@@ -22,9 +22,9 @@ func TimelineCLI(timeline types.Timeline, verbosity types.Verbosity) {
// to hold the current context for each node
// "keys" is needed, because iterating over a map must give a different order each time
// a slice keeps its order
keys, currentContext := initKeysContext(timeline) // currentcontext to follow when important thing changed
latestContext := timeline.GetLatestUpdatedContextsByNodes() // so that we have fully updated context when we print
lastContext := map[string]types.LogCtx{} // just to follow when important thing changed
keys, currentContext := initKeysContext(timeline) // currentcontext to follow when important thing changed
latestContext := timeline.GetLatestContextsByNodes() // so that we have fully updated context when we print
lastContext := map[string]types.LogCtx{} // just to follow when important thing changed
w := tabwriter.NewWriter(os.Stdout, 8, 8, 3, ' ', tabwriter.DiscardEmptyColumns)
defer w.Flush()

View File

@@ -6,6 +6,7 @@ import (
"os/exec"
"runtime"
"strings"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/regex"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
@@ -169,9 +170,9 @@ func sanitizeLine(s string) string {
func iterateOnGrepResults(path string, regexes types.RegexMap, grepStdout <-chan string) types.LocalTimeline {
var (
lt types.LocalTimeline
recentEnough bool
displayer types.LogDisplayer
lt types.LocalTimeline
displayer types.LogDisplayer
timestamp time.Time
)
ctx := types.NewLogCtx()
ctx.FilePath = path
@@ -182,18 +183,23 @@ func iterateOnGrepResults(path string, regexes types.RegexMap, grepStdout <-chan
var date *types.Date
t, layout, ok := regex.SearchDateFromLog(line)
if ok {
// diff between date and timestamp:
// timestamp is an internal usage to handle translations, it must be non-empty
// date is something that will be displayed ultimately, it can empty
date = types.NewDate(t, layout)
}
timestamp = t
} // else, keep the previous timestamp
// If it's recentEnough, it means we already validated a log: every next logs necessarily happened later
// this is useful because not every logs have a date attached, and some without date are very useful
if !recentEnough && CLI.Since != nil && (date == nil || (date != nil && CLI.Since.After(date.Time))) {
//if !recentEnough && CLI.Since != nil && (!foundDate || (foundDate && CLI.Since.After(date.Time))) {
if CLI.Since != nil && CLI.Since.After(timestamp) {
continue
}
if CLI.Until != nil && date != nil && CLI.Until.Before(date.Time) {
if CLI.Until != nil && CLI.Until.Before(timestamp) {
return lt
}
recentEnough = true
// recentEnough = true
filetype := regex.FileType(line, CLI.PxcOperator)
ctx.FileType = filetype
@@ -204,7 +210,7 @@ func iterateOnGrepResults(path string, regexes types.RegexMap, grepStdout <-chan
if !regex.Regex.MatchString(line) || utils.SliceContains(CLI.ExcludeRegexes, key) {
continue
}
ctx, displayer = regex.Handle(ctx, line)
ctx, displayer = regex.Handle(ctx, line, timestamp)
li := types.NewLogInfo(date, displayer, line, regex, key, ctx, filetype)
lt = lt.Add(li)
}

View File

@@ -5,6 +5,7 @@ import (
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/display"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/regex"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/translate"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/pkg/errors"
)
@@ -48,6 +49,10 @@ func (l *list) Run() error {
return errors.Wrap(err, "could not list events")
}
if CLI.Verbosity == types.Debug {
fmt.Println(translate.DBToJson())
}
display.TimelineCLI(timeline, CLI.Verbosity)
return nil

View File

@@ -6,6 +6,7 @@ import (
"time"
"github.com/alecthomas/kong"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/translate"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
"github.com/rs/zerolog"
@@ -66,6 +67,8 @@ func main() {
}
utils.SkipColor = CLI.NoColor
translate.AssumeIPStable = !CLI.PxcOperator
err := ctx.Run()
ctx.FatalIfErrorf(err)
}

View File

@@ -91,8 +91,15 @@ func TestMain(t *testing.T) {
cmd: []string{"list", "--all", "--pxc-operator", "--no-color"},
path: "tests/logs/operator_concurrent_ssts/*",
},
{
name: "operator_ambiguous_ips_list_all_no_color",
cmd: []string{"list", "--all", "--pxc-operator", "--no-color"},
path: "tests/logs/operator_ambiguous_ips/*",
},
}
TESTS:
for _, test := range tests {
filepaths, err := filepath.Glob(test.path)
if err != nil {
@@ -114,6 +121,7 @@ func TestMain(t *testing.T) {
if !cmp.Equal(out, expected) {
t.Errorf("%s: test %s failed: %s\nout: %s", toolname, test.name, strings.Join(test.cmd, " "), cmp.Diff(string(expected), string(out)))
continue TESTS
}
}
}

View File

@@ -2,6 +2,7 @@ package regex
import (
"regexp"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
@@ -16,7 +17,7 @@ var ApplicativeMap = types.RegexMap{
"RegexDesync": &types.LogRegex{
Regex: regexp.MustCompile("desyncs itself from group"),
InternalRegex: regexp.MustCompile("\\(" + regexNodeName + "\\) desyncs"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.Desynced = true
node := submatches[groupNodeName]
@@ -32,7 +33,7 @@ var ApplicativeMap = types.RegexMap{
"RegexResync": &types.LogRegex{
Regex: regexp.MustCompile("resyncs itself to group"),
InternalRegex: regexp.MustCompile("\\(" + regexNodeName + "\\) resyncs"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.Desynced = false
node := submatches[groupNodeName]
return ctx, func(ctx types.LogCtx) string {
@@ -47,7 +48,7 @@ var ApplicativeMap = types.RegexMap{
"RegexInconsistencyVoteInit": &types.LogRegex{
Regex: regexp.MustCompile("initiates vote on"),
InternalRegex: regexp.MustCompile("Member " + regexIdx + "\\(" + regexNodeName + "\\) initiates vote on " + regexUUID + ":" + regexSeqno + "," + regexErrorMD5 + ": (?P<error>.*), Error_code:"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
node := submatches[groupNodeName]
seqno := submatches[groupSeqno]
@@ -76,7 +77,7 @@ var ApplicativeMap = types.RegexMap{
"RegexInconsistencyVoteRespond": &types.LogRegex{
Regex: regexp.MustCompile("responds to vote on "),
InternalRegex: regexp.MustCompile("Member " + regexIdx + "\\(" + regexNodeName + "\\) responds to vote on " + regexUUID + ":" + regexSeqno + "," + regexErrorMD5 + ": (?P<error>.*)"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
node := submatches[groupNodeName]
seqno := submatches[groupSeqno]
@@ -105,27 +106,31 @@ var ApplicativeMap = types.RegexMap{
},
},
//2023-10-20T03:33:48.155825Z 16 [ERROR] [MY-000000] [Galera] Vote 0 (success) on 7b1a6710-18da-11ed-b777-42b15728f657:8847713 is inconsistent with group. Leaving cluster.
// This one does not need to be variabilized
// percona-xtradb-cluster-galera/galera/src/replicator_smm.cpp:2405
// case 1: /* majority disagrees */
// msg << "Vote 0 (success) on " << gtid
// << " is inconsistent with group. Leaving cluster.";
// goto fail;
"RegexInconsistencyVoteInconsistentWithGroup": &types.LogRegex{
Regex: regexp.MustCompile("is inconsistent with group. Leaving cluster"),
InternalRegex: regexp.MustCompile("Vote [0-9] \\((?P<error>.*)\\) on " + regexUUID + ":" + regexSeqno + " is inconsistent with group. Leaving cluster"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
InternalRegex: regexp.MustCompile("Vote [0-9] \\(success\\) on " + regexUUID + ":" + regexSeqno + " is inconsistent with group. Leaving cluster"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
seqno := submatches[groupSeqno]
errorstring := submatches["error"]
latestConflict := ctx.Conflicts.ConflictWithSeqno(seqno)
if latestConflict == nil {
return ctx, nil
}
if len(ctx.OwnNames) > 0 {
latestConflict.VotePerNode[ctx.OwnNames[len(ctx.OwnNames)-1]] = types.ConflictVote{Error: errorstring}
latestConflict.VotePerNode[ctx.OwnNames[len(ctx.OwnNames)-1]] = types.ConflictVote{Error: "Success", MD5: "0000000000000000"}
}
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "vote inconsistent, leaving cluster"))
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "vote (success) inconsistent, leaving cluster"))
},
},
"RegexInconsistencyVoted": &types.LogRegex{
Regex: regexp.MustCompile("Inconsistency detected: Inconsistent by consensus"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "found inconsistent by vote"))
},
},
@@ -133,7 +138,7 @@ var ApplicativeMap = types.RegexMap{
"RegexInconsistencyWinner": &types.LogRegex{
Regex: regexp.MustCompile("Winner: "),
InternalRegex: regexp.MustCompile("Winner: " + regexErrorMD5),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
errormd5 := submatches[groupErrorMD5]
@@ -175,7 +180,7 @@ var ApplicativeMap = types.RegexMap{
"RegexInconsistencyRecovery": &types.LogRegex{
Regex: regexp.MustCompile("Recovering vote result from history"),
InternalRegex: regexp.MustCompile("Recovering vote result from history: " + regexUUID + ":" + regexSeqno + "," + regexErrorMD5),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
if len(ctx.OwnNames) == 0 {
return ctx, nil
}

View File

@@ -0,0 +1,165 @@
package regex
import (
"testing"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
)
func TestApplicativeRegex(t *testing.T) {
tests := []regexTest{
{
log: "2001-01-01 1:01:01 0 [Note] WSREP: Member 0.0 (node) desyncs itself from group",
expected: regexTestState{
Ctx: types.LogCtx{Desynced: true},
},
expectedOut: "node desyncs itself from group",
key: "RegexDesync",
},
{
log: "2001-01-01 1:01:01 0 [Note] WSREP: Member 0.0 (node) resyncs itself to group",
expected: regexTestState{
Ctx: types.LogCtx{Desynced: false},
},
input: regexTestState{
Ctx: types.LogCtx{Desynced: true},
},
expectedOut: "node resyncs itself to group",
key: "RegexResync",
},
{
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Member 1(node1) initiates vote on 8c9b5610-e020-11ed-a5ea-e253cc5f629d:20,bdb2b9234ae75cb3: some error, Error_code: 123;\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
expectedOut: "inconsistency vote started by node1(seqno:20)",
expected: regexTestState{
Ctx: types.LogCtx{Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
key: "RegexInconsistencyVoteInit",
},
{
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Member 1(node1) initiates vote on 8c9b5610-e020-11ed-a5ea-e253cc5f629d:20,bdb2b9234ae75cb3: some error, Error_code: 123;\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node1"}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node1"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expectedOut: "inconsistency vote started(seqno:20)",
key: "RegexInconsistencyVoteInit",
},
{
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Member 2(node2) responds to vote on 8c9b5610-e020-11ed-a5ea-e253cc5f629d:20,0000000000000000: Success\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}, "node2": types.ConflictVote{MD5: "0000000000000000", Error: "Success"}}}}},
},
expectedOut: "consistency vote(seqno:20): voted Success",
key: "RegexInconsistencyVoteRespond",
},
{
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Member 2(node2) responds to vote on 8c9b5610-e020-11ed-a5ea-e253cc5f629d:20,bdb2b9234ae75cb3: some error\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}, "node2": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expectedOut: "consistency vote(seqno:20): voted same error",
key: "RegexInconsistencyVoteRespond",
},
{
// could not actually find a "responds to" with any error for now
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Member 2(node2) responds to vote on 8c9b5610-e020-11ed-a5ea-e253cc5f629d:20,ed9774a3cad44656: some different error\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}, "node2": types.ConflictVote{MD5: "ed9774a3cad44656", Error: "some different error"}}}}},
},
expectedOut: "consistency vote(seqno:20): voted different error",
key: "RegexInconsistencyVoteRespond",
},
{
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 1 [ERROR] [MY-000000] [Galera] Inconsistency detected: Inconsistent by consensus on 8c9b5610-e020-11ed-a5ea-e253cc5f629d:127\n\t at galera/src/replicator_smm.cpp:process_apply_error():1469\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
expectedOut: "found inconsistent by vote",
key: "RegexInconsistencyVoted",
},
{
log: "Winner: bdb2b9234ae75cb3",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node1"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node1"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expectedOut: "consistency vote(seqno:20): won",
key: "RegexInconsistencyWinner",
},
{
log: "Winner: 0000000000000000",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node1"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}, "node2": types.ConflictVote{MD5: "0000000000000000", Error: "Success"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node1"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "0000000000000000", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}, "node2": types.ConflictVote{MD5: "0000000000000000", Error: "Success"}}}}},
},
expectedOut: "consistency vote(seqno:20): lost",
key: "RegexInconsistencyWinner",
},
{
name: "already voted conflict, should not print anything",
log: "Winner: 0000000000000000",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node1"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node1"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
displayerExpectedNil: true,
key: "RegexInconsistencyWinner",
},
{
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 1 [ERROR] [MY-000000] [Galera] Recovering vote result from history: 8c9b5610-e020-11ed-a5ea-e253cc5f629d:20,bdb2b9234ae75cb3\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}, "node2": types.ConflictVote{MD5: "bdb2b9234ae75cb3"}}}}},
},
expectedOut: "consistency vote(seqno:20): voted same error",
key: "RegexInconsistencyRecovery",
},
{
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 1 [ERROR] [MY-000000] [Galera] Recovering vote result from history: 8c9b5610-e020-11ed-a5ea-e253cc5f629d:20,0000000000000000\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}, "node2": types.ConflictVote{MD5: "0000000000000000"}}}}},
},
expectedOut: "consistency vote(seqno:20): voted Success",
key: "RegexInconsistencyRecovery",
},
{
log: "2001-01-01T01:01:01.000000Z 16 [ERROR] [MY-000000] [Galera] Vote 0 (success) on 7b1a6710-18da-11ed-b777-42b15728f657:20 is inconsistent with group. Leaving cluster.",
input: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}}}}},
},
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"node2"}, Conflicts: types.Conflicts{&types.Conflict{InitiatedBy: []string{"node1"}, Winner: "bdb2b9234ae75cb3", Seqno: "20", VotePerNode: map[string]types.ConflictVote{"node1": types.ConflictVote{MD5: "bdb2b9234ae75cb3", Error: "some error"}, "node2": types.ConflictVote{Error: "Success", MD5: "0000000000000000"}}}}},
},
expectedOut: "vote (success) inconsistent, leaving cluster",
key: "RegexInconsistencyVoteInconsistentWithGroup",
},
}
iterateRegexTest(t, ApplicativeMap, tests)
}

View File

@@ -3,6 +3,7 @@ package regex
import (
"regexp"
"strings"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
@@ -16,7 +17,7 @@ var EventsMap = types.RegexMap{
"RegexStarting": &types.LogRegex{
Regex: regexp.MustCompile("starting as process"),
InternalRegex: regexp.MustCompile("\\(mysqld " + regexVersion + ".*\\)"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.Version = submatches[groupVersion]
msg := "starting(" + ctx.Version
@@ -31,7 +32,7 @@ var EventsMap = types.RegexMap{
},
"RegexShutdownComplete": &types.LogRegex{
Regex: regexp.MustCompile("mysqld: Shutdown complete"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "shutdown complete"))
@@ -39,7 +40,7 @@ var EventsMap = types.RegexMap{
},
"RegexTerminated": &types.LogRegex{
Regex: regexp.MustCompile("mysqld: Terminated"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "terminated"))
@@ -47,21 +48,21 @@ var EventsMap = types.RegexMap{
},
"RegexGotSignal6": &types.LogRegex{
Regex: regexp.MustCompile("mysqld got signal 6"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "crash: got signal 6"))
},
},
"RegexGotSignal11": &types.LogRegex{
Regex: regexp.MustCompile("mysqld got signal 11"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "crash: got signal 11"))
},
},
"RegexShutdownSignal": &types.LogRegex{
Regex: regexp.MustCompile("Normal|Received shutdown"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "received shutdown"))
@@ -72,7 +73,7 @@ var EventsMap = types.RegexMap{
"RegexAborting": &types.LogRegex{
Regex: regexp.MustCompile("Aborting"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "ABORTING"))
@@ -81,7 +82,7 @@ var EventsMap = types.RegexMap{
"RegexWsrepLoad": &types.LogRegex{
Regex: regexp.MustCompile("wsrep_load\\(\\): loading provider library"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("OPEN")
if regexWsrepLoadNone.MatchString(log) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.GreenText, "started(standalone)"))
@@ -92,7 +93,7 @@ var EventsMap = types.RegexMap{
"RegexWsrepRecovery": &types.LogRegex{
// INFO: WSREP: Recovered position 00000000-0000-0000-0000-000000000000:-1
Regex: regexp.MustCompile("Recovered position"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
msg := "wsrep recovery"
// if state is joiner, it can be due to sst
@@ -108,7 +109,7 @@ var EventsMap = types.RegexMap{
"RegexUnknownConf": &types.LogRegex{
Regex: regexp.MustCompile("unknown variable"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
split := strings.Split(log, "'")
v := "?"
if len(split) > 0 {
@@ -123,7 +124,7 @@ var EventsMap = types.RegexMap{
"RegexAssertionFailure": &types.LogRegex{
Regex: regexp.MustCompile("Assertion failure"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "ASSERTION FAILURE"))
@@ -131,7 +132,7 @@ var EventsMap = types.RegexMap{
},
"RegexBindAddressAlreadyUsed": &types.LogRegex{
Regex: regexp.MustCompile("asio error .bind: Address already in use"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "bind address already used"))
@@ -139,7 +140,7 @@ var EventsMap = types.RegexMap{
},
"RegexTooManyConnections": &types.LogRegex{
Regex: regexp.MustCompile("Too many connections"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "too many connections"))
},
},
@@ -147,7 +148,7 @@ var EventsMap = types.RegexMap{
"RegexReversingHistory": &types.LogRegex{
Regex: regexp.MustCompile("Reversing history"),
InternalRegex: regexp.MustCompile("Reversing history: " + regexSeqno + " -> [0-9]*, this member has applied (?P<diff>[0-9]*) more events than the primary component"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.BrightRedText, "having "+submatches["diff"]+" more events than the other nodes, data loss possible"))
},

View File

@@ -0,0 +1,292 @@
package regex
import (
"testing"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
)
func TestEventsRegex(t *testing.T) {
tests := []regexTest{
{
name: "8.0.30-22",
log: "2001-01-01T01:01:01.000000Z 0 [System] [MY-010116] [Server] /usr/sbin/mysqld (mysqld 8.0.30-22) starting as process 1",
expected: regexTestState{
Ctx: types.LogCtx{Version: "8.0.30"},
State: "OPEN",
},
expectedOut: "starting(8.0.30)",
key: "RegexStarting",
},
{
name: "8.0.2-22",
log: "2001-01-01T01:01:01.000000Z 0 [System] [MY-010116] [Server] /usr/sbin/mysqld (mysqld 8.0.2-22) starting as process 1",
expected: regexTestState{
Ctx: types.LogCtx{Version: "8.0.2"},
State: "OPEN",
},
expectedOut: "starting(8.0.2)",
key: "RegexStarting",
},
{
name: "5.7.31-34-log",
log: "2001-01-01T01:01:01.000000Z 0 [Note] /usr/sbin/mysqld (mysqld 5.7.31-34-log) starting as process 2 ...",
expected: regexTestState{
Ctx: types.LogCtx{Version: "5.7.31"},
State: "OPEN",
},
expectedOut: "starting(5.7.31)",
key: "RegexStarting",
},
{
name: "10.4.25-MariaDB-log",
log: "2001-01-01 01:01:01 0 [Note] /usr/sbin/mysqld (mysqld 10.4.25-MariaDB-log) starting as process 2 ...",
expected: regexTestState{
Ctx: types.LogCtx{Version: "10.4.25"},
State: "OPEN",
},
expectedOut: "starting(10.4.25)",
key: "RegexStarting",
},
{
name: "10.2.31-MariaDB-1:10.2.31+maria~bionic-log",
log: "2001-01-01 01:01:01 0 [Note] /usr/sbin/mysqld (mysqld 10.2.31-MariaDB-1:10.2.31+maria~bionic-log) starting as process 2 ...",
expected: regexTestState{
Ctx: types.LogCtx{Version: "10.2.31"},
State: "OPEN",
},
expectedOut: "starting(10.2.31)",
key: "RegexStarting",
},
{
name: "5.7.28-enterprise-commercial-advanced-log",
log: "2001-01-01T01:01:01.000000Z 0 [Note] /usr/sbin/mysqld (mysqld 5.7.28-enterprise-commercial-advanced-log) starting as process 2 ...",
expected: regexTestState{
Ctx: types.LogCtx{Version: "5.7.28"},
State: "OPEN",
},
expectedOut: "starting(5.7.28)",
key: "RegexStarting",
},
{
name: "8.0.30 operator",
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 0 [System] [MY-010116] [Server] /usr/sbin/mysqld (mysqld 8.0.30-22.1) starting as process 1\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
expected: regexTestState{
Ctx: types.LogCtx{Version: "8.0.30"},
State: "OPEN",
},
expectedOut: "starting(8.0.30)",
key: "RegexStarting",
},
{
name: "wrong version 7.0.0",
log: "2001-01-01T01:01:01.000000Z 0 [System] [MY-010116] [Server] /usr/sbin/mysqld (mysqld 7.0.0-22) starting as process 1",
displayerExpectedNil: true,
key: "RegexStarting",
},
{
name: "wrong version 8.12.0",
log: "2001-01-01T01:01:01.000000Z 0 [System] [MY-010116] [Server] /usr/sbin/mysqld (mysqld 8.12.0-22) starting as process 1",
displayerExpectedNil: true,
key: "RegexStarting",
},
{
name: "could not catch how it stopped",
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 0 [System] [MY-010116] [Server] /usr/sbin/mysqld (mysqld 8.0.30-22.1) starting as process 1\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
expected: regexTestState{
Ctx: types.LogCtx{Version: "8.0.30"},
State: "OPEN",
},
input: regexTestState{
State: "OPEN",
},
expectedOut: "starting(8.0.30, could not catch how/when it stopped)",
key: "RegexStarting",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [System] [MY-010910] [Server] /usr/sbin/mysqld: Shutdown complete (mysqld 8.0.23-14.1) Percona XtraDB Cluster (GPL), Release rel14, Revision d3b9a1d, WSREP version 26.4.3.",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "shutdown complete",
key: "RegexShutdownComplete",
},
{
log: "2001-01-01 01:01:01 140430087788288 [Note] WSREP: /opt/rh-mariadb102/root/usr/libexec/mysqld: Terminated.",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "terminated",
key: "RegexTerminated",
},
{
log: "2001-01-01T01:01:01.000000Z 8 [Note] WSREP: /usr/sbin/mysqld: Terminated.",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "terminated",
key: "RegexTerminated",
},
{
log: "01:01:01 UTC - mysqld got signal 6 ;",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "crash: got signal 6",
key: "RegexGotSignal6",
},
{
log: "01:01:01 UTC - mysqld got signal 11 ;",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "crash: got signal 11",
key: "RegexGotSignal11",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [WSREP] Received shutdown signal. Will sleep for 10 secs before initiating shutdown. pxc_maint_mode switched to SHUTDOWN",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "received shutdown",
key: "RegexShutdownSignal",
},
{
log: "2001-01-01 01:01:01 139688443508480 [Note] /opt/rh-mariadb102/root/usr/libexec/mysqld (unknown): Normal shutdown",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "received shutdown",
key: "RegexShutdownSignal",
},
{
log: "2001-01-01 1:01:01 0 [Note] /usr/sbin/mariadbd (initiated by: unknown): Normal shutdown",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "received shutdown",
key: "RegexShutdownSignal",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [ERROR] [MY-010119] [Server] Aborting",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "ABORTING",
key: "RegexAborting",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] wsrep_load(): loading provider library '/usr/lib64/galera4/libgalera_smm.so'",
expected: regexTestState{
State: "OPEN",
},
expectedOut: "started(cluster)",
key: "RegexWsrepLoad",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] wsrep_load(): loading provider library 'none'",
expected: regexTestState{
State: "OPEN",
},
expectedOut: "started(standalone)",
key: "RegexWsrepLoad",
},
{
log: "2001-01-01 01:01:01 140557650536640 [Note] WSREP: wsrep_load(): loading provider library '/opt/rh-mariadb102/root/usr/lib64/galera/libgalera_smm.so'",
expected: regexTestState{
State: "OPEN",
},
expectedOut: "started(cluster)",
key: "RegexWsrepLoad",
},
{
log: "2001-01-01T01:01:01.000000Z 3 [Note] [MY-000000] [Galera] Recovered position from storage: 7780bb61-87cf-11eb-b53b-6a7c64b0fee3:23506640",
expected: regexTestState{
State: "RECOVERY",
},
expectedOut: "wsrep recovery",
key: "RegexWsrepRecovery",
},
{
log: " INFO: WSREP: Recovered position 9a4db4a5-5cf1-11ec-940d-6ba8c5905c02:30",
expected: regexTestState{
State: "RECOVERY",
},
expectedOut: "wsrep recovery",
key: "RegexWsrepRecovery",
},
{
log: " INFO: WSREP: Recovered position 00000000-0000-0000-0000-000000000000:-1",
expected: regexTestState{
State: "RECOVERY",
},
expectedOut: "wsrep recovery",
key: "RegexWsrepRecovery",
},
{
name: "not unknown",
log: " INFO: WSREP: Recovered position 00000000-0000-0000-0000-000000000000:-1",
expected: regexTestState{
State: "RECOVERY",
},
input: regexTestState{
State: "OPEN",
},
expectedOut: "wsrep recovery",
key: "RegexWsrepRecovery",
},
{
name: "could not catch how it stopped",
log: " INFO: WSREP: Recovered position 00000000-0000-0000-0000-000000000000:-1",
expected: regexTestState{
State: "RECOVERY",
},
input: regexTestState{
State: "SYNCED",
},
expectedOut: "wsrep recovery(could not catch how/when it stopped)",
key: "RegexWsrepRecovery",
},
{
log: "2001-01-01T01:01:01.045425-05:00 0 [ERROR] unknown variable 'validate_password_length=8'",
expectedOut: "unknown variable: validate_password_le...",
key: "RegexUnknownConf",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [ERROR] [MY-013183] [InnoDB] Assertion failure: btr0cur.cc:296:btr_page_get_prev(get_block->frame, mtr) == page_get_page_no(page) thread 139538894652992",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "ASSERTION FAILURE",
key: "RegexAssertionFailure",
},
{
log: "2001-01-01 5:06:12 47285568576576 [ERROR] WSREP: failed to open gcomm backend connection: 98: error while trying to listen 'tcp://0.0.0.0:4567?socket.non_blocking=1', asio error 'bind: Address already in use': 98 (Address already in use)",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "bind address already used",
key: "RegexBindAddressAlreadyUsed",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [ERROR] [MY-000000] [Galera] gcs/src/gcs_group.cpp:group_post_state_exchange():431: Reversing history: 150 -> 10, this member has applied 140 more events than the primary component.Data loss is possible. Must abort.",
expectedOut: "having 140 more events than the other nodes, data loss possible",
key: "RegexReversingHistory",
},
}
iterateRegexTest(t, EventsMap, tests)
}

View File

@@ -3,7 +3,9 @@ package regex
import (
"regexp"
"strconv"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/translate"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
)
@@ -18,10 +20,10 @@ var IdentsMap = types.RegexMap{
"RegexSourceNode": &types.LogRegex{
Regex: regexp.MustCompile("(local endpoint for a connection, blacklisting address)|(points to own listening address, blacklisting)"),
InternalRegex: regexp.MustCompile("\\(" + regexNodeHash + ", '.+'\\).+" + regexNodeIPMethod),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ip := submatches[groupNodeIP]
ctx.AddOwnIP(ip)
ctx.AddOwnIP(ip, date)
return ctx, types.SimpleDisplayer(ip + " is local")
},
Verbosity: types.DebugMySQL,
@@ -31,10 +33,10 @@ var IdentsMap = types.RegexMap{
"RegexBaseHost": &types.LogRegex{
Regex: regexp.MustCompile("base_host"),
InternalRegex: regexp.MustCompile("base_host = " + regexNodeIP),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ip := submatches[groupNodeIP]
ctx.AddOwnIP(ip)
ctx.AddOwnIP(ip, date)
return ctx, types.SimpleDisplayer(ctx.OwnIPs[len(ctx.OwnIPs)-1] + " is local")
},
Verbosity: types.DebugMySQL,
@@ -46,25 +48,24 @@ var IdentsMap = types.RegexMap{
"RegexMemberAssociations": &types.LogRegex{
Regex: regexp.MustCompile("[0-9]: [a-z0-9]+-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]+, [a-zA-Z0-9-_]+"),
InternalRegex: regexp.MustCompile(regexIdx + ": " + regexUUID + ", " + regexNodeName),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
idx := submatches[groupIdx]
hash := submatches[groupUUID]
hash := utils.UUIDToShortUUID(submatches[groupUUID])
nodename := utils.ShortNodeName(submatches[groupNodeName])
// nodenames are truncated after 32 characters ...
if len(nodename) == 31 {
return ctx, nil
}
shorthash := utils.UUIDToShortUUID(hash)
ctx.HashToNodeName[shorthash] = nodename
translate.AddHashToNodeName(hash, nodename, date)
if ctx.MyIdx == idx && (ctx.IsPrimary() || ctx.MemberCount == 1) {
ctx.AddOwnHash(shorthash)
ctx.AddOwnName(nodename)
ctx.AddOwnHash(hash, date)
ctx.AddOwnName(nodename, date)
}
return ctx, types.SimpleDisplayer(shorthash + " is " + nodename)
return ctx, types.SimpleDisplayer(hash + " is " + nodename)
},
Verbosity: types.DebugMySQL,
},
@@ -72,7 +73,7 @@ var IdentsMap = types.RegexMap{
"RegexMemberCount": &types.LogRegex{
Regex: regexp.MustCompile("members.[0-9]+.:"),
InternalRegex: regexp.MustCompile(regexMembers),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
members := submatches[groupMembers]
@@ -91,13 +92,13 @@ var IdentsMap = types.RegexMap{
"RegexOwnUUID": &types.LogRegex{
Regex: regexp.MustCompile("My UUID"),
InternalRegex: regexp.MustCompile("My UUID: " + regexUUID),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
shorthash := utils.UUIDToShortUUID(submatches[groupUUID])
hash := utils.UUIDToShortUUID(submatches[groupUUID])
ctx.AddOwnHash(shorthash)
ctx.AddOwnHash(hash, date)
return ctx, types.SimpleDisplayer(shorthash + " is local")
return ctx, types.SimpleDisplayer(hash + " is local")
},
Verbosity: types.DebugMySQL,
},
@@ -106,10 +107,10 @@ var IdentsMap = types.RegexMap{
"RegexOwnUUIDFromMessageRelay": &types.LogRegex{
Regex: regexp.MustCompile("turning message relay requesting"),
InternalRegex: regexp.MustCompile("\\(" + regexNodeHash + ", '" + regexNodeIPMethod + "'\\)"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
hash := submatches[groupNodeHash]
ctx.AddOwnHash(hash)
ctx.AddOwnHash(hash, date)
return ctx, types.SimpleDisplayer(hash + " is local")
},
@@ -120,7 +121,7 @@ var IdentsMap = types.RegexMap{
"RegexMyIDXFromComponent": &types.LogRegex{
Regex: regexp.MustCompile("New COMPONENT:"),
InternalRegex: regexp.MustCompile("New COMPONENT:.*my_idx = " + regexIdx),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
idx := submatches[groupIdx]
ctx.MyIdx = idx
@@ -153,7 +154,7 @@ var IdentsMap = types.RegexMap{
"RegexOwnNameFromStateExchange": &types.LogRegex{
Regex: regexp.MustCompile("STATE EXCHANGE: got state msg"),
InternalRegex: regexp.MustCompile("STATE EXCHANGE:.* from " + regexIdx + " \\(" + regexNodeName + "\\)"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
r, err := internalRegexSubmatch(internalRegex, log)
if err != nil {
return ctx, nil
@@ -182,8 +183,8 @@ func init_add_regexes() {
IdentsMap["RegexOwnUUIDFromEstablished"] = &types.LogRegex{
Regex: regexp.MustCompile("connection established to"),
InternalRegex: IdentsMap["RegexOwnUUIDFromMessageRelay"].InternalRegex,
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
return IdentsMap["RegexOwnUUIDFromMessageRelay"].Handler(submatches, ctx, log)
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return IdentsMap["RegexOwnUUIDFromMessageRelay"].Handler(submatches, ctx, log, date)
},
Verbosity: types.DebugMySQL,
}
@@ -191,8 +192,8 @@ func init_add_regexes() {
IdentsMap["RegexOwnIndexFromView"] = &types.LogRegex{
Regex: regexp.MustCompile("own_index:"),
InternalRegex: regexp.MustCompile("own_index: " + regexIdx),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
return IdentsMap["RegexMyIDXFromComponent"].Handler(submatches, ctx, log)
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return IdentsMap["RegexMyIDXFromComponent"].Handler(submatches, ctx, log, date)
},
Verbosity: types.DebugMySQL,
}
@@ -203,7 +204,7 @@ func init_add_regexes() {
IdentsMap["RegexMyIDXFromClusterView"] = &types.LogRegex{
Regex: regexp.MustCompile("New cluster view:"),
InternalRegex: regexp.MustCompile("New cluster view:.*my index: " + regexIdx + ","),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return IdentsMap["RegexMyIDXFromComponent"].Handler(internalRegex, ctx, log)
},
Verbosity: types.DebugMySQL,

View File

@@ -0,0 +1,270 @@
package regex
import (
"testing"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
)
func TestIdentsRegex(t *testing.T) {
tests := []regexTest{
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] (90002222-1111, 'ssl://0.0.0.0:4567') Found matching local endpoint for a connection, blacklisting address ssl://127.0.0.1:4567",
expected: regexTestState{
Ctx: types.LogCtx{OwnIPs: []string{"127.0.0.1"}},
},
expectedOut: "127.0.0.1 is local",
key: "RegexSourceNode",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Passing config to GCS: base_dir = /var/lib/mysql/; base_host = 127.0.0.1; base_port = 4567; cert.log_conflicts = no; cert.optimistic_pa = no; debug = no; evs.auto_evict = 0; evs.delay_margin = PT1S; evs.delayed_keep_period = PT30S; evs.inactive_check_period = PT0.5S; evs.inactive_timeout = PT15S; evs.join_retrans_period = PT1S; evs.max_install_timeouts = 3; evs.send_window = 10; evs.stats_report_period = PT1M; evs.suspect_timeout = PT5S; evs.user_send_window = 4; evs.view_forget_timeout = PT24H; gcache.dir = /data/mysql/; gcache.freeze_purge_at_seqno = -1; gcache.keep_pages_count = 0; gcache.keep_pages_size = 0; gcache.mem_size = 0; gcache.name = galera.cache; gcache.page_size = 128M; gcache.recover = yes; gcache.size = 128M; gcomm.thread_prio = ; gcs.fc_debug = 0; gcs.fc_factor = 1.0; gcs.fc_limit = 100; gcs.fc_master_slave = no; gcs.max_packet_size = 64500; gcs.max_throttle = 0.25; gcs.recv_q_hard_limit = 9223372036854775807; gcs.recv_q_soft_limit = 0.25; gcs.sync_donor = no; gmcast.segment = 0; gmcast.version = 0; pc.announce_timeout = PT3S; pc.checksum = false; pc.ignore_quorum = false; pc.ignore_sb = false; pc.npvo = false; pc.recovery = true; pc.version = 0; pc.wait_prim = true; pc.wait_prim_timeout = PT30S; pc.weight = 1; protonet.backend = asio; protonet.version = 0; repl.causal_read_timeout = PT30S; repl.commit_order = 3; repl.key_format = FLAT8; repl.max_ws_size = 2147483647; repl.proto_max = 10; socket.checksum = 2; socket.recv_buf_size = auto; socket.send_buf_size = auto; socket.ssl_ca = ca.pem; socket.ssl_cert = server-cert.pem; socket.ssl_cipher = ; socket.ssl_compression = YES; socket.ssl_key = server-key.pem;",
expected: regexTestState{
Ctx: types.LogCtx{OwnIPs: []string{"127.0.0.1"}},
},
expectedOut: "127.0.0.1 is local",
key: "RegexBaseHost",
},
{
log: " 0: 015702fc-32f5-11ed-a4ca-267f97316394, node1",
input: regexTestState{
Ctx: types.LogCtx{
MyIdx: "0",
MemberCount: 1,
OwnHashes: []string{},
OwnNames: []string{},
},
HashToNodeNames: map[string]string{},
State: "PRIMARY",
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "0",
MemberCount: 1,
OwnHashes: []string{"015702fc-a4ca"},
OwnNames: []string{"node1"},
},
HashToNodeNames: map[string]string{"015702fc-a4ca": "node1"},
State: "PRIMARY",
},
expectedOut: "015702fc-a4ca is node1",
key: "RegexMemberAssociations",
},
{
log: " 0: 015702fc-32f5-11ed-a4ca-267f97316394, node1",
input: regexTestState{
Ctx: types.LogCtx{
MyIdx: "0",
MemberCount: 1,
OwnHashes: []string{},
OwnNames: []string{},
},
HashToNodeNames: map[string]string{},
State: "NON-PRIMARY",
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "0",
MemberCount: 1,
OwnHashes: []string{"015702fc-a4ca"},
OwnNames: []string{"node1"},
},
HashToNodeNames: map[string]string{"015702fc-a4ca": "node1"},
State: "NON-PRIMARY",
},
expectedOut: "015702fc-a4ca is node1",
key: "RegexMemberAssociations",
},
{
log: " 0: 015702fc-32f5-11ed-a4ca-267f97316394, node1",
input: regexTestState{
Ctx: types.LogCtx{
MyIdx: "0",
MemberCount: 2,
},
HashToNodeNames: map[string]string{},
State: "NON-PRIMARY",
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "0",
MemberCount: 2,
},
HashToNodeNames: map[string]string{"015702fc-a4ca": "node1"},
State: "NON-PRIMARY",
},
expectedOut: "015702fc-a4ca is node1",
key: "RegexMemberAssociations",
},
{
log: " 1: 015702fc-32f5-11ed-a4ca-267f97316394, node1",
input: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
MemberCount: 1,
OwnHashes: []string{},
OwnNames: []string{},
},
HashToNodeNames: map[string]string{},
State: "PRIMARY",
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
MemberCount: 1,
OwnHashes: []string{"015702fc-a4ca"},
OwnNames: []string{"node1"},
},
HashToNodeNames: map[string]string{"015702fc-a4ca": "node1"},
State: "PRIMARY",
},
expectedOut: "015702fc-a4ca is node1",
key: "RegexMemberAssociations",
},
{
log: " 0: 015702fc-32f5-11ed-a4ca-267f97316394, node1",
input: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
MemberCount: 1,
},
HashToNodeNames: map[string]string{},
State: "PRIMARY",
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
MemberCount: 1,
},
HashToNodeNames: map[string]string{"015702fc-a4ca": "node1"},
State: "PRIMARY",
},
expectedOut: "015702fc-a4ca is node1",
key: "RegexMemberAssociations",
},
{
log: " 0: 015702fc-32f5-11ed-a4ca-267f97316394, node1.with.complete.fqdn",
input: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
MemberCount: 1,
},
HashToNodeNames: map[string]string{},
State: "PRIMARY",
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
MemberCount: 1,
},
HashToNodeNames: map[string]string{"015702fc-a4ca": "node1"},
State: "PRIMARY",
},
expectedOut: "015702fc-a4ca is node1",
key: "RegexMemberAssociations",
},
{
name: "name too long and truncated",
log: " 0: 015702fc-32f5-11ed-a4ca-267f97316394, name_so_long_it_will_get_trunca",
input: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
MemberCount: 1,
},
State: "PRIMARY",
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
MemberCount: 1,
},
State: "PRIMARY",
},
expectedOut: "",
displayerExpectedNil: true,
key: "RegexMemberAssociations",
},
{
log: " members(1):",
expectedOut: "view member count: 1",
expected: regexTestState{
Ctx: types.LogCtx{MemberCount: 1},
},
key: "RegexMemberCount",
},
{
log: "2001-01-01T01:01:01.000000Z 1 [Note] [MY-000000] [Galera] ####### My UUID: 60205de0-5cf6-11ec-8884-3a01908be11a",
input: regexTestState{
Ctx: types.LogCtx{},
},
expected: regexTestState{
Ctx: types.LogCtx{
OwnHashes: []string{"60205de0-8884"},
},
},
expectedOut: "60205de0-8884 is local",
key: "RegexOwnUUID",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: (9509c194, 'tcp://0.0.0.0:4567') turning message relay requesting on, nonlive peers:",
input: regexTestState{
Ctx: types.LogCtx{},
},
expected: regexTestState{
Ctx: types.LogCtx{
OwnHashes: []string{"9509c194"},
},
},
expectedOut: "9509c194 is local",
key: "RegexOwnUUIDFromMessageRelay",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: New COMPONENT: primary = yes, bootstrap = no, my_idx = 0, memb_num = 2",
input: regexTestState{
Ctx: types.LogCtx{},
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "0",
},
},
expectedOut: "my_idx=0",
key: "RegexMyIDXFromComponent",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: (9509c194, 'tcp://0.0.0.0:4567') connection established to 838ebd6d tcp://172.17.0.2:4567",
input: regexTestState{
Ctx: types.LogCtx{},
},
expected: regexTestState{
Ctx: types.LogCtx{
OwnHashes: []string{"9509c194"},
},
},
expectedOut: "9509c194 is local",
key: "RegexOwnUUIDFromEstablished",
},
{
log: " own_index: 1",
input: regexTestState{
Ctx: types.LogCtx{},
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "1",
},
},
expectedOut: "my_idx=1",
key: "RegexOwnIndexFromView",
},
}
iterateRegexTest(t, IdentsMap, tests)
}

View File

@@ -3,6 +3,7 @@ package regex
import (
"regexp"
"strings"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
)
@@ -19,11 +20,11 @@ var PXCOperatorMap = types.RegexMap{
"RegexNodeNameFromEnv": &types.LogRegex{
Regex: regexp.MustCompile(". NODE_NAME="),
InternalRegex: regexp.MustCompile("NODE_NAME=" + regexNodeName),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
nodename := submatches[groupNodeName]
nodename, _, _ = strings.Cut(nodename, ".")
ctx.AddOwnName(nodename)
ctx.AddOwnName(nodename, date)
return ctx, types.SimpleDisplayer("local name:" + nodename)
},
Verbosity: types.DebugMySQL,
@@ -32,10 +33,10 @@ var PXCOperatorMap = types.RegexMap{
"RegexNodeIPFromEnv": &types.LogRegex{
Regex: regexp.MustCompile(". NODE_IP="),
InternalRegex: regexp.MustCompile("NODE_IP=" + regexNodeIP),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ip := submatches[groupNodeIP]
ctx.AddOwnIP(ip)
ctx.AddOwnIP(ip, date)
return ctx, types.SimpleDisplayer("local ip:" + ip)
},
Verbosity: types.DebugMySQL,
@@ -48,7 +49,7 @@ var PXCOperatorMap = types.RegexMap{
// those "operators" regexes do not have the log prefix added implicitely. It's not strictly needed, but
// it will help to avoid catching random piece of log out of order
Regex: regexp.MustCompile(k8sprefix + ".*GCache::RingBuffer initial scan"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer("recovering gcache")
},
},
@@ -59,7 +60,7 @@ var PXCOperatorMap = types.RegexMap{
"RegexOperatorMemberAssociations": &types.LogRegex{
Regex: regexp.MustCompile("================================================.*View:"),
InternalRegex: regexp.MustCompile("own_index: " + regexIdx + ".*(?P<memberlog>" + IdentsMap["RegexMemberCount"].Regex.String() + ")(?P<compiledAssocations>(....-?[0-9]{1,2}(\\.-?[0-9])?: [a-z0-9]+-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]+, [a-zA-Z0-9-_\\.]+)+)"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.MyIdx = submatches[groupIdx]
@@ -68,7 +69,7 @@ var PXCOperatorMap = types.RegexMap{
msg string
)
ctx, displayer = IdentsMap["RegexMemberCount"].Handle(ctx, submatches["memberlog"])
ctx, displayer = IdentsMap["RegexMemberCount"].Handle(ctx, submatches["memberlog"], date)
msg += displayer(ctx) + "; "
subAssociations := strings.Split(submatches["compiledAssocations"], "\\n\\t")
@@ -77,7 +78,7 @@ var PXCOperatorMap = types.RegexMap{
}
for _, subAssocation := range subAssociations[1:] {
// better to reuse the idents regex
ctx, displayer = IdentsMap["RegexMemberAssociations"].Handle(ctx, subAssocation)
ctx, displayer = IdentsMap["RegexMemberAssociations"].Handle(ctx, subAssocation, date)
msg += displayer(ctx) + "; "
}
return ctx, types.SimpleDisplayer(msg)

View File

@@ -0,0 +1,62 @@
package regex
import (
"testing"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
)
func TestPXCOperatorRegex(t *testing.T) {
tests := []regexTest{
{
log: "{\"log\":\"2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] ================================================\\nView:\\n id: 9f191762-2542-11ee-89be-13bdb1218f0e:9375811\\n status: primary\\n protocol_version: 4\\n capabilities: MULTI-MASTER, CERTIFICATION, PARALLEL_APPLYING, REPLAY, ISOLATION, PAUSE, CAUSAL_READ, INCREMENTAL_WS, UNORDERED, PREORDERED, STREAMING, NBO\\n final: no\\n own_index: 0\\n members(3):\\n\\t0: 45406e8d-2de0-11ee-95fc-f29a5fdf1ee0, cluster1-0\\n\\t1: 5bf18376-2de0-11ee-8333-6e755a3456ca, cluster1-2\\n\\t2: 66e2b7bf-2de0-11ee-8000-f7d68b5cf6f6, cluster1-1\\n=================================================\\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
input: regexTestState{
Ctx: types.LogCtx{
OwnHashes: []string{},
OwnNames: []string{},
},
HashToNodeNames: map[string]string{},
State: "PRIMARY",
},
expected: regexTestState{
Ctx: types.LogCtx{
MyIdx: "0",
MemberCount: 3,
OwnHashes: []string{"45406e8d-95fc"},
OwnNames: []string{"cluster1-0"},
},
HashToNodeNames: map[string]string{"45406e8d-95fc": "cluster1-0", "5bf18376-8333": "cluster1-2", "66e2b7bf-8000": "cluster1-1"},
State: "PRIMARY",
},
expectedOut: "view member count: 3; 45406e8d-95fc is cluster1-0; 5bf18376-8333 is cluster1-2; 66e2b7bf-8000 is cluster1-1; ",
key: "RegexOperatorMemberAssociations",
},
{
log: "+ NODE_NAME=cluster1-pxc-0.cluster1-pxc.test-percona.svc.cluster.local",
expected: regexTestState{
Ctx: types.LogCtx{OwnNames: []string{"cluster1-pxc-0"}},
},
expectedOut: "local name:cluster1-pxc-0",
key: "RegexNodeNameFromEnv",
},
{
log: "+ NODE_IP=172.17.0.2",
expected: regexTestState{
Ctx: types.LogCtx{OwnIPs: []string{"172.17.0.2"}},
},
expectedOut: "local ip:172.17.0.2",
key: "RegexNodeIPFromEnv",
},
{
log: "{\"log\":\"2023-07-05T08:17:23.447015Z 0 [Note] [MY-000000] [Galera] GCache::RingBuffer initial scan... 0.0% ( 0/1073741848 bytes) complete.\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"}",
expectedOut: "recovering gcache",
key: "RegexGcacheScan",
},
}
iterateRegexTest(t, PXCOperatorMap, tests)
}

File diff suppressed because it is too large Load Diff

View File

@@ -2,6 +2,7 @@ package regex
import (
"regexp"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
@@ -16,7 +17,7 @@ var SSTMap = types.RegexMap{
"RegexSSTRequestSuccess": &types.LogRegex{
Regex: regexp.MustCompile("requested state transfer.*Selected"),
InternalRegex: regexp.MustCompile("Member " + regexIdx + " \\(" + regexNodeName + "\\) requested state transfer.*Selected " + regexIdx + " \\(" + regexNodeName2 + "\\)\\("),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
joiner := utils.ShortNodeName(submatches[groupNodeName])
donor := utils.ShortNodeName(submatches[groupNodeName2])
@@ -51,7 +52,7 @@ var SSTMap = types.RegexMap{
"RegexSSTResourceUnavailable": &types.LogRegex{
Regex: regexp.MustCompile("requested state transfer.*Resource temporarily unavailable"),
InternalRegex: regexp.MustCompile("Member .* \\(" + regexNodeName + "\\) requested state transfer"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
joiner := submatches[groupNodeName]
if utils.SliceContains(ctx.OwnNames, joiner) {
@@ -67,7 +68,7 @@ var SSTMap = types.RegexMap{
"RegexSSTComplete": &types.LogRegex{
Regex: regexp.MustCompile("State transfer to.*complete"),
InternalRegex: regexp.MustCompile(regexIdx + " \\(" + regexNodeName + "\\): State transfer to " + regexIdx + " \\(" + regexNodeName2 + "\\) complete"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
donor := utils.ShortNodeName(submatches[groupNodeName])
joiner := utils.ShortNodeName(submatches[groupNodeName2])
@@ -95,7 +96,7 @@ var SSTMap = types.RegexMap{
"RegexSSTCompleteUnknown": &types.LogRegex{
Regex: regexp.MustCompile("State transfer to.*left the group.*complete"),
InternalRegex: regexp.MustCompile(regexIdx + " \\(" + regexNodeName + "\\): State transfer.*\\(left the group\\) complete"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
donor := utils.ShortNodeName(submatches[groupNodeName])
delete(ctx.SSTs, donor)
@@ -106,7 +107,7 @@ var SSTMap = types.RegexMap{
"RegexSSTFailedUnknown": &types.LogRegex{
Regex: regexp.MustCompile("State transfer to.*left the group.*failed"),
InternalRegex: regexp.MustCompile(regexIdx + " \\(" + regexNodeName + "\\): State transfer.*\\(left the group\\) failed"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
donor := utils.ShortNodeName(submatches[groupNodeName])
delete(ctx.SSTs, donor)
@@ -117,7 +118,7 @@ var SSTMap = types.RegexMap{
"RegexSSTStateTransferFailed": &types.LogRegex{
Regex: regexp.MustCompile("State transfer to.*failed:"),
InternalRegex: regexp.MustCompile(regexIdx + " \\(" + regexNodeName + "\\): State transfer to " + regexIdx + " \\(" + regexNodeName2 + "\\) failed"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
donor := utils.ShortNodeName(submatches[groupNodeName])
joiner := utils.ShortNodeName(submatches[groupNodeName2])
@@ -128,7 +129,7 @@ var SSTMap = types.RegexMap{
"RegexSSTError": &types.LogRegex{
Regex: regexp.MustCompile("Process completed with error: wsrep_sst"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "SST error"))
},
@@ -137,7 +138,7 @@ var SSTMap = types.RegexMap{
"RegexSSTInitiating": &types.LogRegex{
Regex: regexp.MustCompile("Initiating SST.IST transfer on DONOR side"),
InternalRegex: regexp.MustCompile("DONOR side \\((?P<scriptname>[a-zA-Z0-9-_]*) --role 'donor' --address '" + regexNodeIP + ":(?P<sstport>[0-9]*)\\)"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer("init sst using " + submatches["scriptname"])
},
@@ -145,7 +146,7 @@ var SSTMap = types.RegexMap{
"RegexSSTCancellation": &types.LogRegex{
Regex: regexp.MustCompile("Initiating SST cancellation"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "former SST cancelled"))
},
@@ -153,7 +154,7 @@ var SSTMap = types.RegexMap{
"RegexSSTProceeding": &types.LogRegex{
Regex: regexp.MustCompile("Proceeding with SST"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("JOINER")
ctx.SetSSTTypeMaybe("SST")
@@ -164,14 +165,12 @@ var SSTMap = types.RegexMap{
"RegexSSTStreamingTo": &types.LogRegex{
Regex: regexp.MustCompile("Streaming the backup to"),
InternalRegex: regexp.MustCompile("Streaming the backup to joiner at " + regexNodeIP),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("DONOR")
joiner := submatches[groupNodeIP]
return ctx, func(ctx types.LogCtx) string {
return utils.Paint(utils.YellowText, "SST to ") + types.DisplayNodeSimplestForm(ctx, joiner)
}
return ctx, types.FormatByIPDisplayer(utils.Paint(utils.YellowText, "SST to ")+"%s", joiner, date)
},
},
@@ -180,7 +179,7 @@ var SSTMap = types.RegexMap{
// the UUID here is not from a node, it's a cluster state UUID, this is only used to ensure it's correctly parsed
InternalRegex: regexp.MustCompile("IST received: " + regexUUID + ":" + regexSeqno),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
seqno := submatches[groupSeqno]
return ctx, types.SimpleDisplayer(utils.Paint(utils.GreenText, "IST received") + "(seqno:" + seqno + ")")
@@ -192,16 +191,14 @@ var SSTMap = types.RegexMap{
// TODO: sometimes, it's a hostname here
InternalRegex: regexp.MustCompile("IST sender starting to serve " + regexNodeIPMethod + " sending [0-9]+-" + regexSeqno),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("DONOR")
ctx.SetSSTTypeMaybe("IST")
seqno := submatches[groupSeqno]
node := submatches[groupNodeIP]
joiner := submatches[groupNodeIP]
return ctx, func(ctx types.LogCtx) string {
return utils.Paint(utils.YellowText, "IST to ") + types.DisplayNodeSimplestForm(ctx, node) + "(seqno:" + seqno + ")"
}
return ctx, types.FormatByIPDisplayer(utils.Paint(utils.YellowText, "IST to ")+"%s(seqno:"+seqno+")", joiner, date)
},
},
@@ -209,7 +206,7 @@ var SSTMap = types.RegexMap{
Regex: regexp.MustCompile("Prepared IST receiver"),
InternalRegex: regexp.MustCompile("Prepared IST receiver( for (?P<startingseqno>[0-9]+)-" + regexSeqno + ")?"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("JOINER")
seqno := submatches[groupSeqno]
@@ -236,7 +233,7 @@ var SSTMap = types.RegexMap{
"RegexFailedToPrepareIST": &types.LogRegex{
Regex: regexp.MustCompile("Failed to prepare for incremental state transfer"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetSSTTypeMaybe("SST")
return ctx, types.SimpleDisplayer("IST is not applicable")
},
@@ -244,7 +241,7 @@ var SSTMap = types.RegexMap{
"RegexXtrabackupISTReceived": &types.LogRegex{
Regex: regexp.MustCompile("xtrabackup_ist received from donor"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetSSTTypeMaybe("IST")
return ctx, types.SimpleDisplayer("IST running")
},
@@ -254,7 +251,7 @@ var SSTMap = types.RegexMap{
// could not find production examples yet, but it did exist in older version there also was "Bypassing state dump"
"RegexBypassSST": &types.LogRegex{
Regex: regexp.MustCompile("Bypassing SST"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetSSTTypeMaybe("IST")
return ctx, types.SimpleDisplayer("IST will be used")
},
@@ -262,7 +259,7 @@ var SSTMap = types.RegexMap{
"RegexSocatConnRefused": &types.LogRegex{
Regex: regexp.MustCompile("E connect.*Connection refused"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "socat: connection refused"))
},
},
@@ -270,21 +267,21 @@ var SSTMap = types.RegexMap{
// 2023-05-12T02:52:33.767132Z 0 [Note] [MY-000000] [WSREP-SST] Preparing the backup at /var/lib/mysql/sst-xb-tmpdir
"RegexPreparingBackup": &types.LogRegex{
Regex: regexp.MustCompile("Preparing the backup at"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer("preparing SST backup")
},
},
"RegexTimeoutReceivingFirstData": &types.LogRegex{
Regex: regexp.MustCompile("Possible timeout in receving first data from donor in gtid/keyring stage"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "timeout from donor in gtid/keyring stage"))
},
},
"RegexWillNeverReceive": &types.LogRegex{
Regex: regexp.MustCompile("Will never receive state. Need to abort"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "will never receive SST, aborting"))
},
},
@@ -292,14 +289,12 @@ var SSTMap = types.RegexMap{
"RegexISTFailed": &types.LogRegex{
Regex: regexp.MustCompile("async IST sender failed to serve"),
InternalRegex: regexp.MustCompile("IST sender failed to serve " + regexNodeIPMethod + ":.*asio error '.*: [0-9]+ \\((?P<error>[\\w\\s]+)\\)"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
node := submatches[groupNodeIP]
joiner := submatches[groupNodeIP]
istError := submatches["error"]
return ctx, func(ctx types.LogCtx) string {
return "IST to " + types.DisplayNodeSimplestForm(ctx, node) + utils.Paint(utils.RedText, " failed: ") + istError
}
return ctx, types.FormatByIPDisplayer("IST to %s"+utils.Paint(utils.RedText, " failed: ")+istError, joiner, date)
},
},
}

View File

@@ -0,0 +1,482 @@
package regex
import (
"testing"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
)
func TestSSTRegex(t *testing.T) {
tests := []regexTest{
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Member 2.0 (node2) requested state transfer from '*any*'. Selected 0.0 (node1)(SYNCED) as donor.",
input: regexTestState{
Ctx: types.LogCtx{SSTs: map[string]types.SST{}},
},
expected: regexTestState{
Ctx: types.LogCtx{SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", SelectionTimestamp: timeMustParse("2001-01-01T01:01:01.000000Z")}}},
},
expectedOut: "node1 will resync node2",
key: "RegexSSTRequestSuccess",
},
{
name: "with fqdn",
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Member 2.0 (node2.host.com) requested state transfer from '*any*'. Selected 0.0 (node1.host.com)(SYNCED) as donor.",
input: regexTestState{
Ctx: types.LogCtx{SSTs: map[string]types.SST{}},
},
expected: regexTestState{
Ctx: types.LogCtx{SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", SelectionTimestamp: timeMustParse("2001-01-01T01:01:01.000000Z")}}},
},
expectedOut: "node1 will resync node2",
key: "RegexSSTRequestSuccess",
},
{
name: "joining",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Member 2.0 (node2) requested state transfer from '*any*'. Selected 0.0 (node1)(SYNCED) as donor.",
input: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node2"},
SSTs: map[string]types.SST{},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node2"},
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", SelectionTimestamp: timeMustParse("2001-01-01T01:01:01.000000Z")}},
},
},
expectedOut: "node1 will resync local node",
key: "RegexSSTRequestSuccess",
},
{
name: "donor",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Member 2.0 (node2) requested state transfer from '*any*'. Selected 0.0 (node1)(SYNCED) as donor.",
input: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node1"},
SSTs: map[string]types.SST{},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node1"},
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", SelectionTimestamp: timeMustParse("2001-01-01T01:01:01.000000Z")}},
},
},
expectedOut: "local node will resync node2",
key: "RegexSSTRequestSuccess",
},
{
log: "2001-01-01 01:01:01.164 WARN: Member 1.0 (node2) requested state transfer from 'node1', but it is impossible to select State Transfer donor: Resource temporarily unavailable",
input: regexTestState{
Ctx: types.LogCtx{},
},
expected: regexTestState{
Ctx: types.LogCtx{},
},
expectedOut: "node2 cannot find donor",
key: "RegexSSTResourceUnavailable",
},
{
name: "local",
log: "2001-01-01 01:01:01.164 WARN: Member 1.0 (node2) requested state transfer from 'node1', but it is impossible to select State Transfer donor: Resource temporarily unavailable",
input: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node2"},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node2"},
},
},
expectedOut: "cannot find donor",
key: "RegexSSTResourceUnavailable",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: 0.0 (node1): State transfer to 2.0 (node2) complete.",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{},
},
},
expectedOut: "node1 synced node2",
key: "RegexSSTComplete",
},
{
name: "joiner",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: 0.0 (node1): State transfer to 2.0 (node2) complete.",
input: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node2"},
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{},
OwnNames: []string{"node2"},
},
},
expectedOut: "got SST from node1",
key: "RegexSSTComplete",
},
{
name: "joiner ist",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: 0.0 (node1): State transfer to 2.0 (node2) complete.",
input: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node2"},
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "IST"}},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{},
OwnNames: []string{"node2"},
},
},
expectedOut: "got IST from node1",
key: "RegexSSTComplete",
},
{
name: "donor",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: 0.0 (node1): State transfer to 2.0 (node2) complete.",
input: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node1"},
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{},
OwnNames: []string{"node1"},
},
},
expectedOut: "finished sending SST to node2",
key: "RegexSSTComplete",
},
{
name: "donor ist",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: 0.0 (node1): State transfer to 2.0 (node2) complete.",
input: regexTestState{
Ctx: types.LogCtx{
OwnNames: []string{"node1"},
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "IST"}},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{},
OwnNames: []string{"node1"},
},
},
expectedOut: "finished sending IST to node2",
key: "RegexSSTComplete",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: 0.0 (node1): State transfer to -1.-1 (left the group) complete.",
input: regexTestState{
Ctx: types.LogCtx{},
},
expected: regexTestState{
Ctx: types.LogCtx{},
},
expectedOut: "node1 synced ??(node left)",
key: "RegexSSTCompleteUnknown",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [ERROR] [MY-000000] [WSREP] Process completed with error: wsrep_sst_xtrabackup-v2 --role 'donor' --address '172.17.0.2:4444/xtrabackup_sst//1' --socket '/var/lib/mysql/mysql.sock' --datadir '/var/lib/mysql/' --basedir '/usr/' --plugindir '/usr/lib64/mysql/plugin/' --defaults-file '/etc/my.cnf' --defaults-group-suffix '' --mysqld-version '8.0.28-19.1' '' --gtid '9db0bcdf-b31a-11ed-a398-2a4cfdd82049:1' : 22 (Invalid argument)",
expectedOut: "SST error",
key: "RegexSSTError",
},
{
log: "2001-01-01T01:01:01.000000Z 1328586 [Note] [MY-000000] [WSREP] Initiating SST cancellation",
expectedOut: "former SST cancelled",
key: "RegexSSTCancellation",
},
{
log: "2001-01-01T01:01:01.000000Z WSREP_SST: [INFO] Proceeding with SST.........",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node2"},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "SST"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expectedOut: "receiving SST",
key: "RegexSSTProceeding",
},
{
log: "2001-01-01T01:01:01.000000Z WSREP_SST: [INFO] Streaming the backup to joiner at 172.17.0.2 4444",
expected: regexTestState{
State: "DONOR",
},
expectedOut: "SST to 172.17.0.2",
key: "RegexSSTStreamingTo",
},
{
log: "2001-01-01 01:01:01 140446376740608 [Note] WSREP: IST received: e00c4fff-c4b0-11e9-96a8-0f9789de42ad:69472531",
expectedOut: "IST received(seqno:69472531)",
key: "RegexISTReceived",
},
{
log: "2001-01-01 1:01:01 140433613571840 [Note] WSREP: async IST sender starting to serve tcp://172.17.0.2:4568 sending 2-116",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node1"},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "IST"}},
OwnNames: []string{"node1"},
},
State: "DONOR",
},
expectedOut: "IST to 172.17.0.2(seqno:116)",
key: "RegexISTSender",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Prepared IST receiver for 114-116, listening at: ssl://172.17.0.2:4568",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node2"},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "IST"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expectedOut: "will receive IST(seqno:116)",
key: "RegexISTReceiver",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] Prepared IST receiver for 0-116, listening at: ssl://172.17.0.2:4568",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node2"},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "SST"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expectedOut: "will receive SST",
key: "RegexISTReceiver",
},
{
name: "mdb variant",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Prepared IST receiver, listening at: ssl://172.17.0.2:4568",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node2"},
},
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "IST"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expectedOut: "will receive IST",
key: "RegexISTReceiver",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Warning] [MY-000000] [Galera] 0.1 (node): State transfer to -1.-1 (left the group) failed: -111 (Connection refused)",
expectedOut: "node failed to sync ??(node left)",
key: "RegexSSTFailedUnknown",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Warning] [MY-000000] [Galera] 0.1 (node): State transfer to 0.2 (node2) failed: -111 (Connection refused)",
expectedOut: "node failed to sync node2",
key: "RegexSSTStateTransferFailed",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Warning] [MY-000000] [Galera] 0.1 (node): State transfer to -1.-1 (left the group) failed: -111 (Connection refused)",
displayerExpectedNil: true,
key: "RegexSSTStateTransferFailed",
},
{
log: "2001-01-01T01:01:01.000000Z 1 [Note] WSREP: Failed to prepare for incremental state transfer: Local state UUID (00000000-0000-0000-0000-000000000000) does not match group state UUID (ed16c932-84b3-11ed-998c-8e3ae5bc328f): 1 (Operation not permitted)",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "SST"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expectedOut: "IST is not applicable",
key: "RegexFailedToPrepareIST",
},
{
log: "2001-01-01T01:01:01.000000Z 1 [Warning] WSREP: Failed to prepare for incremental state transfer: Local state seqno is undefined: 1 (Operation not permitted)",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "SST"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expectedOut: "IST is not applicable",
key: "RegexFailedToPrepareIST",
},
{
log: "2001-01-01T01:01:01.000000Z WSREP_SST: [INFO] Bypassing SST. Can work it through IST",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "IST"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expectedOut: "IST will be used",
key: "RegexBypassSST",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [WSREP-SST] xtrabackup_ist received from donor: Running IST",
input: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expected: regexTestState{
Ctx: types.LogCtx{
SSTs: map[string]types.SST{"node1": types.SST{Donor: "node1", Joiner: "node2", Type: "IST"}},
OwnNames: []string{"node2"},
},
State: "JOINER",
},
expectedOut: "IST running",
key: "RegexXtrabackupISTReceived",
},
{
log: "2001/01/01 01:01:01 socat[23579] E connect(62, AF=2 172.17.0.20:4444, 16): Connection refused",
expectedOut: "socat: connection refused",
key: "RegexSocatConnRefused",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [WSREP-SST] Preparing the backup at /var/lib/mysql/sst-xb-tmpdir",
expectedOut: "preparing SST backup",
key: "RegexPreparingBackup",
},
{
log: "2001-01-01T01:01:01.000000Z WSREP_SST: [ERROR] Possible timeout in receving first data from donor in gtid/keyring stage",
expectedOut: "timeout from donor in gtid/keyring stage",
key: "RegexTimeoutReceivingFirstData",
},
{
log: "2001-01-01 01:01:01 140666176771840 [ERROR] WSREP: gcs/src/gcs_group.cpp:gcs_group_handle_join_msg():736: Will never receive state. Need to abort.",
expectedOut: "will never receive SST, aborting",
key: "RegexWillNeverReceive",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [WSREP-SST] Preparing the backup at /var/lib/mysql/sst-xb-tmpdir",
expectedOut: "preparing SST backup",
key: "RegexPreparingBackup",
},
{
log: "2001-01-01T01:01:01.000000Z WSREP_SST: [ERROR] Possible timeout in receving first data from donor in gtid/keyring stage",
expectedOut: "timeout from donor in gtid/keyring stage",
key: "RegexTimeoutReceivingFirstData",
},
{
log: "2001-01-01 01:01:01 140666176771840 [ERROR] WSREP: gcs/src/gcs_group.cpp:gcs_group_handle_join_msg():736: Will never receive state. Need to abort.",
expectedOut: "will never receive SST, aborting",
key: "RegexWillNeverReceive",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [ERROR] WSREP: async IST sender failed to serve tcp://172.17.0.2:4568: ist send failed: asio.system:32', asio error 'write: Broken pipe': 32 (Broken pipe)",
expectedOut: "IST to 172.17.0.2 failed: Broken pipe",
key: "RegexISTFailed",
},
{
log: "2001-01-01 01:10:01 28949 [ERROR] WSREP: async IST sender failed to serve tcp://172.17.0.2:4568: ist send failed: asio.system:104', asio error 'write: Connection reset by peer': 104 (Connection reset by peer)",
expectedOut: "IST to 172.17.0.2 failed: Connection reset by peer",
key: "RegexISTFailed",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [ERROR] [MY-000000] [Galera] async IST sender failed to serve ssl://172.17.0.2:4568: ist send failed: ', asio error 'Got unexpected return from write: eof: 71 (Protocol error)",
expectedOut: "IST to 172.17.0.2 failed: Protocol error",
key: "RegexISTFailed",
},
{
log: `{\"log\":\"2001-01-01T01:01:01.000000Z 0 [ERROR] [MY-000000] [Galera] async IST sender failed to serve ssl://172.17.0.2:4568: ist send failed: ', asio error 'Got unexpected return from write: eof: 71 (Protocol error)\n\t at galerautils/src/gu_asio_stream_react.cpp:write():195': 71 (Protocol error)\n\t at galera/src/ist.cpp:send():856\n\",\"file\":\"/var/lib/mysql/mysqld-error.log\"
`,
expectedOut: "IST to 172.17.0.2 failed: Protocol error",
key: "RegexISTFailed",
},
}
iterateRegexTest(t, SSTMap, tests)
}

View File

@@ -2,6 +2,7 @@ package regex
import (
"regexp"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
@@ -12,7 +13,7 @@ func init() {
}
var (
shiftFunc = func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
shiftFunc = func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
newState := submatches["state2"]
ctx.SetState(newState)
@@ -41,9 +42,9 @@ var StatesMap = types.RegexMap{
"RegexRestoredState": &types.LogRegex{
Regex: regexp.MustCompile("Restored state"),
InternalRegex: shiftRegex,
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
var displayer types.LogDisplayer
ctx, displayer = shiftFunc(submatches, ctx, log)
ctx, displayer = shiftFunc(submatches, ctx, log, date)
return ctx, types.SimpleDisplayer("(restored)" + displayer(ctx))
},

View File

@@ -0,0 +1,46 @@
package regex
import (
"testing"
)
func TestStatesRegex(t *testing.T) {
tests := []regexTest{
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Shifting OPEN -> CLOSED (TO: 1922878)",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "OPEN -> CLOSED",
key: "RegexShift",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Shifting SYNCED -> DONOR/DESYNCED (TO: 21582507)",
expected: regexTestState{
State: "DONOR",
},
expectedOut: "SYNCED -> DONOR",
key: "RegexShift",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Shifting DONOR/DESYNCED -> JOINED (TO: 21582507)",
expected: regexTestState{
State: "JOINED",
},
expectedOut: "DESYNCED -> JOINED",
key: "RegexShift",
},
{
log: "2001-01-01 01:01:01 140446385440512 [Note] WSREP: Restored state OPEN -> SYNCED (72438094)",
expected: regexTestState{
State: "SYNCED",
},
expectedOut: "(restored)OPEN -> SYNCED",
key: "RegexRestoredState",
},
}
iterateRegexTest(t, StatesMap, tests)
}

View File

@@ -4,7 +4,9 @@ import (
"regexp"
"strconv"
"strings"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/translate"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
)
@@ -18,14 +20,15 @@ var ViewsMap = types.RegexMap{
"RegexNodeEstablished": &types.LogRegex{
Regex: regexp.MustCompile("connection established"),
InternalRegex: regexp.MustCompile("established to " + regexNodeHash + " " + regexNodeIPMethod),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ip := submatches[groupNodeIP]
ctx.HashToIP[submatches[groupNodeHash]] = ip
hash := submatches[groupNodeHash]
translate.AddHashToIP(hash, ip, date)
if utils.SliceContains(ctx.OwnIPs, ip) {
return ctx, nil
}
return ctx, func(ctx types.LogCtx) string { return types.DisplayNodeSimplestForm(ctx, ip) + " established" }
return ctx, types.FormatByHashDisplayer("%s established", hash, date)
},
Verbosity: types.DebugMySQL,
},
@@ -33,26 +36,26 @@ var ViewsMap = types.RegexMap{
"RegexNodeJoined": &types.LogRegex{
Regex: regexp.MustCompile("declaring .* stable"),
InternalRegex: regexp.MustCompile("declaring " + regexNodeHash + " at " + regexNodeIPMethod),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ip := submatches[groupNodeIP]
ctx.HashToIP[submatches[groupNodeHash]] = ip
ctx.IPToMethod[ip] = submatches[groupMethod]
return ctx, func(ctx types.LogCtx) string {
return types.DisplayNodeSimplestForm(ctx, ip) + utils.Paint(utils.GreenText, " joined")
}
hash := submatches[groupNodeHash]
translate.AddHashToIP(hash, ip, date)
translate.AddIPToMethod(ip, submatches[groupMethod], date)
return ctx, types.FormatByHashDisplayer("%s"+utils.Paint(utils.GreenText, " joined"), hash, date)
},
},
"RegexNodeLeft": &types.LogRegex{
Regex: regexp.MustCompile("forgetting"),
InternalRegex: regexp.MustCompile("forgetting " + regexNodeHash + " \\(" + regexNodeIPMethod),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ip := submatches[groupNodeIP]
return ctx, func(ctx types.LogCtx) string {
return types.DisplayNodeSimplestForm(ctx, ip) + utils.Paint(utils.RedText, " left")
}
hash := submatches[groupNodeHash]
translate.AddHashToIP(hash, ip, date)
translate.AddIPToMethod(ip, submatches[groupMethod], date)
return ctx, types.FormatByHashDisplayer("%s"+utils.Paint(utils.RedText, " left"), hash, date)
},
},
@@ -60,7 +63,7 @@ var ViewsMap = types.RegexMap{
"RegexNewComponent": &types.LogRegex{
Regex: regexp.MustCompile("New COMPONENT:"),
InternalRegex: regexp.MustCompile("New COMPONENT: primary = (?P<primary>.+), bootstrap = (?P<bootstrap>.*), my_idx = .*, memb_num = (?P<memb_num>[0-9]{1,2})"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
primary := submatches["primary"] == "yes"
membNum := submatches["memb_num"]
@@ -92,46 +95,31 @@ var ViewsMap = types.RegexMap{
"RegexNodeSuspect": &types.LogRegex{
Regex: regexp.MustCompile("suspecting node"),
InternalRegex: regexp.MustCompile("suspecting node: " + regexNodeHash),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
hash := submatches[groupNodeHash]
ip, ok := ctx.HashToIP[hash]
if ok {
return ctx, func(ctx types.LogCtx) string {
return types.DisplayNodeSimplestForm(ctx, ip) + utils.Paint(utils.YellowText, " suspected to be down")
}
}
return ctx, types.SimpleDisplayer(hash + utils.Paint(utils.YellowText, " suspected to be down"))
return ctx, types.FormatByHashDisplayer("%s"+utils.Paint(utils.YellowText, " suspected to be down"), hash, date)
},
},
"RegexNodeChangedIdentity": &types.LogRegex{
Regex: regexp.MustCompile("remote endpoint.*changed identity"),
InternalRegex: regexp.MustCompile("remote endpoint " + regexNodeIPMethod + " changed identity " + regexNodeHash + " -> " + strings.Replace(regexNodeHash, groupNodeHash, groupNodeHash+"2", -1)),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
hash := submatches[groupNodeHash]
hash2 := submatches[groupNodeHash+"2"]
ip, ok := ctx.HashToIP[hash]
if !ok && IsNodeUUID(hash) {
ip, ok = ctx.HashToIP[utils.UUIDToShortUUID(hash)]
// there could have additional corner case to discover yet
if !ok {
return ctx, types.SimpleDisplayer(hash + utils.Paint(utils.YellowText, " changed identity"))
}
hash2 = utils.UUIDToShortUUID(hash2)
}
ctx.HashToIP[hash2] = ip
return ctx, func(ctx types.LogCtx) string {
return types.DisplayNodeSimplestForm(ctx, ip) + utils.Paint(utils.YellowText, " changed identity")
hash := utils.UUIDToShortUUID(submatches[groupNodeHash])
hash2 := utils.UUIDToShortUUID(submatches[groupNodeHash+"2"])
if ip := translate.GetIPFromHash(hash); ip != "" {
translate.AddHashToIP(hash2, ip, date)
}
return ctx, types.FormatByHashDisplayer("%s"+utils.Paint(utils.YellowText, " changed identity"), hash, date)
},
},
"RegexWsrepUnsafeBootstrap": &types.LogRegex{
Regex: regexp.MustCompile("ERROR.*not be safe to bootstrap"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "not safe to bootstrap"))
@@ -139,7 +127,7 @@ var ViewsMap = types.RegexMap{
},
"RegexWsrepConsistenctyCompromised": &types.LogRegex{
Regex: regexp.MustCompile(".ode consistency compromi.ed"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
ctx.SetState("CLOSED")
return ctx, types.SimpleDisplayer(utils.Paint(utils.RedText, "consistency compromised"))
@@ -147,33 +135,33 @@ var ViewsMap = types.RegexMap{
},
"RegexWsrepNonPrimary": &types.LogRegex{
Regex: regexp.MustCompile("failed to reach primary view"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer("received " + utils.Paint(utils.RedText, "non primary"))
},
},
"RegexBootstrap": &types.LogRegex{
Regex: regexp.MustCompile("gcomm: bootstrapping new group"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.YellowText, "bootstrapping"))
},
},
"RegexSafeToBoostrapSet": &types.LogRegex{
Regex: regexp.MustCompile("safe_to_bootstrap: 1"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.YellowText, "safe_to_bootstrap: 1"))
},
},
"RegexNoGrastate": &types.LogRegex{
Regex: regexp.MustCompile("Could not open state file for reading.*grastate.dat"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.YellowText, "no grastate.dat file"))
},
},
"RegexBootstrapingDefaultState": &types.LogRegex{
Regex: regexp.MustCompile("Bootstraping with default state"),
Handler: func(submatches map[string]string, ctx types.LogCtx, log string) (types.LogCtx, types.LogDisplayer) {
Handler: func(submatches map[string]string, ctx types.LogCtx, log string, date time.Time) (types.LogCtx, types.LogDisplayer) {
return ctx, types.SimpleDisplayer(utils.Paint(utils.YellowText, "bootstrapping(empty grastate)"))
},
},
@@ -251,4 +239,7 @@ d4397932 at tcp://ip:4567
[Warning] WSREP: FLOW message from member -12921743687968 in non-primary configuration. Ignored.
0 [Note] [MY-000000] [Galera] (<hash>, 'tcp://0.0.0.0:4567') reconnecting to <hash (tcp://<ip>:4567), attempt 0
*/

View File

@@ -0,0 +1,226 @@
package regex
import (
"testing"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
)
func TestViewsRegex(t *testing.T) {
tests := []regexTest{
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] (60205de0-8884, 'ssl://0.0.0.0:4567') connection established to 5873acd0-baa8 ssl://172.17.0.2:4567",
input: regexTestState{
HashToIP: map[string]string{},
},
expected: regexTestState{
HashToIP: map[string]string{"5873acd0-baa8": "172.17.0.2"},
},
expectedOut: "172.17.0.2 established",
key: "RegexNodeEstablished",
},
{
name: "established to node's own ip",
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] (60205de0-8884, 'ssl://0.0.0.0:4567') connection established to 5873acd0-baa8 ssl://172.17.0.2:4567",
input: regexTestState{
Ctx: types.LogCtx{
OwnIPs: []string{"172.17.0.2"},
},
HashToIP: map[string]string{},
},
expected: regexTestState{
Ctx: types.LogCtx{
OwnIPs: []string{"172.17.0.2"},
},
HashToIP: map[string]string{"5873acd0-baa8": "172.17.0.2"},
},
expectedOut: "",
displayerExpectedNil: true,
key: "RegexNodeEstablished",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] declaring 5873acd0-baa8 at ssl://172.17.0.2:4567 stable",
input: regexTestState{
HashToIP: map[string]string{},
IPToMethods: map[string]string{},
},
expected: regexTestState{
HashToIP: map[string]string{"5873acd0-baa8": "172.17.0.2"},
IPToMethods: map[string]string{"172.17.0.2": "ssl"},
},
expectedOut: "172.17.0.2 joined",
key: "RegexNodeJoined",
},
{
name: "mariadb variation",
log: "2001-01-01 1:01:30 0 [Note] WSREP: declaring 5873acd0-baa8 at tcp://172.17.0.2:4567 stable",
input: regexTestState{
HashToIP: map[string]string{},
IPToMethods: map[string]string{},
},
expected: regexTestState{
HashToIP: map[string]string{"5873acd0-baa8": "172.17.0.2"},
IPToMethods: map[string]string{"172.17.0.2": "tcp"},
},
expectedOut: "172.17.0.2 joined",
key: "RegexNodeJoined",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] forgetting 871c35de-99ae (ssl://172.17.0.2:4567)",
expectedOut: "172.17.0.2 left",
key: "RegexNodeLeft",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: New COMPONENT: primary = yes, bootstrap = no, my_idx = 0, memb_num = 2",
expected: regexTestState{
Ctx: types.LogCtx{MemberCount: 2},
State: "PRIMARY",
},
expectedOut: "PRIMARY(n=2)",
key: "RegexNewComponent",
},
{
name: "bootstrap",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: New COMPONENT: primary = yes, bootstrap = yes, my_idx = 0, memb_num = 2",
expected: regexTestState{
Ctx: types.LogCtx{MemberCount: 2},
State: "PRIMARY",
},
expectedOut: "PRIMARY(n=2),bootstrap",
key: "RegexNewComponent",
},
{
name: "don't set primary",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: New COMPONENT: primary = yes, bootstrap = no, my_idx = 0, memb_num = 2",
input: regexTestState{
State: "JOINER",
},
expected: regexTestState{
Ctx: types.LogCtx{MemberCount: 2},
State: "JOINER",
},
expectedOut: "PRIMARY(n=2)",
key: "RegexNewComponent",
},
{
name: "non-primary",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: New COMPONENT: primary = no, bootstrap = no, my_idx = 0, memb_num = 2",
expected: regexTestState{
Ctx: types.LogCtx{MemberCount: 2},
State: "NON-PRIMARY",
},
expectedOut: "NON-PRIMARY(n=2)",
key: "RegexNewComponent",
},
{
log: "2001-01-01T01:01:01.000000Z 84580 [Note] [MY-000000] [Galera] evs::proto(9a826787-9e98, LEAVING, view_id(REG,4971d113-87b0,22)) suspecting node: 4971d113-87b0",
input: regexTestState{
HashToIP: map[string]string{},
},
expected: regexTestState{
HashToIP: map[string]string{},
},
expectedOut: "4971d113-87b0 suspected to be down",
key: "RegexNodeSuspect",
},
{
name: "with known ip",
log: "2001-01-01T01:01:01.000000Z 84580 [Note] [MY-000000] [Galera] evs::proto(9a826787-9e98, LEAVING, view_id(REG,4971d113-87b0,22)) suspecting node: 4971d113-87b0",
input: regexTestState{
HashToIP: map[string]string{"4971d113-87b0": "172.17.0.2"},
},
expected: regexTestState{
HashToIP: map[string]string{"4971d113-87b0": "172.17.0.2"},
},
expectedOut: "172.17.0.2 suspected to be down",
key: "RegexNodeSuspect",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: remote endpoint tcp://172.17.0.2:4567 changed identity 84953af9 -> 5a478da2",
input: regexTestState{
HashToIP: map[string]string{"84953af9": "172.17.0.2"},
},
expected: regexTestState{
HashToIP: map[string]string{"84953af9": "172.17.0.2", "5a478da2": "172.17.0.2"},
},
expectedOut: "172.17.0.2 changed identity",
key: "RegexNodeChangedIdentity",
},
{
name: "with complete uuid",
log: "2001-01-01T01:01:01.000000Z 0 [Note] [MY-000000] [Galera] remote endpoint ssl://172.17.0.2:4567 changed identity 595812bc-9c79-11ec-ad3f-3a7953bcc2fc -> 595812bc-9c79-11ec-ad40-3a7953bcc2fc",
input: regexTestState{
HashToIP: map[string]string{"595812bc-ad3f": "172.17.0.2"},
},
expected: regexTestState{
HashToIP: map[string]string{"595812bc-ad3f": "172.17.0.2", "595812bc-ad40": "172.17.0.2"},
},
expectedOut: "172.17.0.2 changed identity",
key: "RegexNodeChangedIdentity",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [ERROR] [MY-000000] [Galera] It may not be safe to bootstrap the cluster from this node. It was not the last one to leave the cluster and may not contain all the updates. To force cluster bootstrap with this node, edit the grastate.dat file manually and set safe_to_bootstrap to 1 .",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "not safe to bootstrap",
key: "RegexWsrepUnsafeBootstrap",
},
{
log: "2001-01-01T01:01:01.481967+09:00 4 [ERROR] WSREP: Node consistency compromised, aborting...",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "consistency compromised",
key: "RegexWsrepConsistenctyCompromised",
},
{
log: "2001-01-01T01:01:01.000000Z 86 [ERROR] WSREP: Node consistency compromized, aborting...",
expected: regexTestState{
State: "CLOSED",
},
expectedOut: "consistency compromised",
key: "RegexWsrepConsistenctyCompromised",
},
{
log: "2001-01-01 5:06:12 47285568576576 [Note] WSREP: gcomm: bootstrapping new group 'cluster'",
expectedOut: "bootstrapping",
key: "RegexBootstrap",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Found saved state: 8e862473-455e-11e8-a0ca-3fcd8faf3209:-1, safe_to_bootstrap: 1",
expectedOut: "safe_to_bootstrap: 1",
key: "RegexSafeToBoostrapSet",
},
{
name: "should not match",
log: "2001-01-01T01:01:01.000000Z 0 [Note] WSREP: Found saved state: 8e862473-455e-11e8-a0ca-3fcd8faf3209:-1, safe_to_bootstrap: 0",
expectedErr: true,
key: "RegexSafeToBoostrapSet",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Warning] [MY-000000] [Galera] Could not open state file for reading: '/var/lib/mysql//grastate.dat'",
expectedOut: "no grastate.dat file",
key: "RegexNoGrastate",
},
{
log: "2001-01-01T01:01:01.000000Z 0 [Warning] [MY-000000] [Galera] No persistent state found. Bootstraping with default state",
expectedOut: "bootstrapping(empty grastate)",
key: "RegexBootstrapingDefaultState",
},
}
iterateRegexTest(t, ViewsMap, tests)
}

View File

@@ -34,7 +34,7 @@ func (s *sed) Run() error {
if err != nil {
return errors.Wrap(err, "found nothing worth replacing")
}
ctxs := timeline.GetLatestUpdatedContextsByNodes()
ctxs := timeline.GetLatestContextsByNodes()
args := []string{}
for key, ctx := range ctxs {

View File

@@ -658,6 +658,14 @@ mysql version 8.0.28
2023-03-17T04:46:57.990506+02:00 PRIMARY(n=1) | |
2023-03-17T04:46:57.990658+02:00 (restored)OPEN -> JOINED | |
2023-03-17T04:46:57.990686+02:00 JOINED -> SYNCED | |
tests/logs/merge_rotated_daily/node2.20230316.log
(file path)
 V 
tests/logs/merge_rotated_daily/node2.20230317.log
tests/logs/merge_rotated_daily/node3.20230316.log
(file path)
 V 
tests/logs/merge_rotated_daily/node3.20230317.log
tests/logs/merge_rotated_daily/node1.20230317.log
(file path)
 V 
@@ -668,11 +676,11 @@ mysql version 8.0.28
2023-03-18T21:18:12.740972+02:00 starting(8.0.28) | |
2023-03-18T21:18:12.746454+02:00 started(cluster) | |
2023-03-18T21:18:12.749007+02:00 safe_to_bootstrap: 1 | |
tests/logs/merge_rotated_daily/node2.20230316.log
tests/logs/merge_rotated_daily/node2.20230317.log
(file path)
 V 
tests/logs/merge_rotated_daily/node2.20230318.log
tests/logs/merge_rotated_daily/node3.20230316.log
tests/logs/merge_rotated_daily/node3.20230317.log
(file path)
 V 
tests/logs/merge_rotated_daily/node3.20230318.log

View File

@@ -661,21 +661,21 @@ mysql version 8.0.31 8.
2023-05-29T07:16:49.685402Z cluster1-2 joined | |
2023-05-29T07:16:49.685497Z | NON-PRIMARY(n=3) |
2023-05-29T07:16:49.686149Z NON-PRIMARY(n=3) | |
2023-05-29T07:16:49.686256Z | | cluster1-0 joined
2023-05-29T07:16:49.686256Z | | unspecified joined
2023-05-29T07:16:49.686310Z | | cluster1-1 joined
2023-05-29T07:16:49.687051Z | | NON-PRIMARY(n=3)
2023-05-29T07:17:06.846743Z | | received shutdown
2023-05-29T07:17:15.812255Z received shutdown | |
2023-05-29T07:17:16.848791Z | cluster1-0 joined |
2023-05-29T07:17:16.848924Z | cluster1-2 left |
2023-05-29T07:17:16.849374Z cluster1-1 joined | |
2023-05-29T07:17:16.849499Z cluster1-2 left | |
2023-05-29T07:17:16.848791Z | unspecified joined |
2023-05-29T07:17:16.848924Z | unspecified left |
2023-05-29T07:17:16.849374Z unspecified joined | |
2023-05-29T07:17:16.849499Z unspecified left | |
2023-05-29T07:17:16.850012Z | NON-PRIMARY(n=2) |
2023-05-29T07:17:16.850477Z | | NON-PRIMARY(n=1)
2023-05-29T07:17:16.850489Z NON-PRIMARY(n=2) | |
2023-05-29T07:17:16.850794Z | | OPEN -> CLOSED
2023-05-29T07:17:17.387918Z | received shutdown |
2023-05-29T07:17:26.815037Z | cluster1-0 left |
2023-05-29T07:17:26.815037Z | unspecified left |
2023-05-29T07:17:26.815352Z | NON-PRIMARY(n=1) |
2023-05-29T07:17:26.816357Z NON-PRIMARY(n=1) | |
2023-05-29T07:17:26.816671Z OPEN -> CLOSED | |

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,202 @@
package translate
import (
"encoding/json"
"sort"
"sync"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
)
type translationUnit struct {
Value string
Timestamp time.Time
}
type translationsDB struct {
// 1 hash: only 1 IP. wsrep_node_address is not dynamic
// if there's a restart, the hash will change as well anyway
HashToIP map[string]translationUnit
// wsrep_node_name is dynamic
HashToNodeNames map[string][]translationUnit
IPToNodeNames map[string][]translationUnit
// incase methods changed in the middle, tls=>ssl
IPToMethods map[string][]translationUnit
rwlock *sync.RWMutex
}
var AssumeIPStable bool = true
var db = translationsDB{}
func init() {
initTranslationsDB()
}
func initTranslationsDB() {
db = translationsDB{
HashToIP: map[string]translationUnit{},
HashToNodeNames: map[string][]translationUnit{},
IPToMethods: map[string][]translationUnit{},
IPToNodeNames: map[string][]translationUnit{},
rwlock: &sync.RWMutex{},
}
}
// only useful for tests
func ResetDB() {
initTranslationsDB()
}
func DBToJson() string {
out, _ := json.MarshalIndent(db, "", "\t")
return string(out)
}
func GetDB() translationsDB {
return db
}
func AddHashToIP(hash, ip string, ts time.Time) {
db.rwlock.Lock()
defer db.rwlock.Unlock()
db.HashToIP[hash] = translationUnit{Value: ip, Timestamp: ts}
}
func AddHashToNodeName(hash, name string, ts time.Time) {
db.rwlock.Lock()
defer db.rwlock.Unlock()
name = utils.ShortNodeName(name)
if len(db.HashToNodeNames[hash]) > 0 && db.HashToNodeNames[hash][len(db.HashToNodeNames[hash])-1].Value == name {
return
}
db.HashToNodeNames[hash] = append(db.HashToNodeNames[hash], translationUnit{Value: name, Timestamp: ts})
}
func AddIPToNodeName(ip, name string, ts time.Time) {
db.rwlock.Lock()
defer db.rwlock.Unlock()
name = utils.ShortNodeName(name)
if len(db.IPToNodeNames[ip]) > 0 && db.IPToNodeNames[ip][len(db.IPToNodeNames[ip])-1].Value == name {
return
}
db.IPToNodeNames[ip] = append(db.IPToNodeNames[ip], translationUnit{Value: name, Timestamp: ts})
}
func AddIPToMethod(ip, method string, ts time.Time) {
db.rwlock.Lock()
defer db.rwlock.Unlock()
if len(db.IPToMethods[ip]) > 0 && db.IPToMethods[ip][len(db.IPToMethods[ip])-1].Value == method {
return
}
db.IPToMethods[ip] = append(db.IPToMethods[ip], translationUnit{Value: method, Timestamp: ts})
}
func GetIPFromHash(hash string) string {
db.rwlock.RLock()
defer db.rwlock.RUnlock()
return db.HashToIP[hash].Value
}
func mostAppropriateValueFromTS(units []translationUnit, ts time.Time) string {
if len(units) == 0 {
return ""
}
// We start from the first unit, this ensures we can retroactively use information that were
// seen in the future.
// the first ever information will be the base, then we will override if there is a more recent version
cur := units[0]
for _, unit := range units[1:] {
if unit.Timestamp.After(cur.Timestamp) && (unit.Timestamp.Before(ts) || unit.Timestamp.Equal(ts)) {
cur = unit
}
}
return cur.Value
}
func GetNodeNameFromHash(hash string, ts time.Time) string {
db.rwlock.RLock()
names := db.HashToNodeNames[hash]
db.rwlock.RUnlock()
return mostAppropriateValueFromTS(names, ts)
}
func GetNodeNameFromIP(ip string, ts time.Time) string {
db.rwlock.RLock()
names := db.IPToNodeNames[ip]
db.rwlock.RUnlock()
return mostAppropriateValueFromTS(names, ts)
}
func GetMethodFromIP(ip string, ts time.Time) string {
db.rwlock.RLock()
methods := db.IPToMethods[ip]
db.rwlock.RUnlock()
return mostAppropriateValueFromTS(methods, ts)
}
func (db *translationsDB) getHashSliceFromIP(ip string) []translationUnit {
db.rwlock.RLock()
defer db.rwlock.RUnlock()
units := []translationUnit{}
for hash, unit := range db.HashToIP {
if unit.Value == ip {
units = append(units, translationUnit{Value: hash, Timestamp: unit.Timestamp})
}
}
sort.Slice(units, func(i, j int) bool {
return units[i].Timestamp.Before(units[j].Timestamp)
})
return units
}
func (db *translationsDB) getHashFromIP(ip string, ts time.Time) string {
units := db.getHashSliceFromIP(ip)
return mostAppropriateValueFromTS(units, ts)
}
// SimplestInfoFromIP is useful to get the most easily to read string for a given IP
// This only has impacts on display
// In order of preference: wsrep_node_name (or galera "node" name), hostname, ip
func SimplestInfoFromIP(ip string, date time.Time) string {
if nodename := GetNodeNameFromIP(ip, date); nodename != "" {
return nodename
}
// This means we trust the fact that some nodes hashes/names sharing the same IP
// will ultimately be from the same node. On on-premise setups this is safe to assume
if AssumeIPStable {
for _, units := range db.getHashSliceFromIP(ip) {
if nodename := GetNodeNameFromHash(units.Value, date); nodename != "" {
return nodename
}
}
// on k8s setups, we cannot assume this, IPs are reused between nodes.
// we have to strictly use ip=>hash pairs we saw in logs at specific timeframe
} else {
if hash := db.getHashFromIP(ip, date); hash != "" {
if nodename := GetNodeNameFromHash(hash, date); nodename != "" {
return nodename
}
}
}
return ip
}
func SimplestInfoFromHash(hash string, date time.Time) string {
if nodename := GetNodeNameFromHash(hash, date); nodename != "" {
return nodename
}
if ip := GetIPFromHash(hash); ip != "" {
return SimplestInfoFromIP(ip, date)
}
return hash
}

View File

@@ -4,6 +4,7 @@ import (
"encoding/json"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/translate"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
)
@@ -33,13 +34,6 @@ type LogCtx struct {
Desynced bool
minVerbosity Verbosity
Conflicts Conflicts
// translation maps, the only elements that can be shared and copied by pointer
HashToIP map[string]string
HashToNodeName map[string]string
IPToHostname map[string]string
IPToMethod map[string]string
IPToNodeName map[string]string
}
func NewLogCtx() LogCtx {
@@ -50,11 +44,6 @@ func NewLogCtx() LogCtx {
func (ctx *LogCtx) InitMaps() {
ctx.SSTs = map[string]SST{}
ctx.HashToIP = map[string]string{}
ctx.IPToHostname = map[string]string{}
ctx.IPToMethod = map[string]string{}
ctx.IPToNodeName = map[string]string{}
ctx.HashToNodeName = map[string]string{}
}
// State will return the wsrep state of the current file type
@@ -109,67 +98,8 @@ func (ctx *LogCtx) IsPrimary() bool {
return utils.SliceContains([]string{"SYNCED", "DONOR", "DESYNCED", "JOINER", "PRIMARY"}, ctx.State())
}
func (ctx *LogCtx) OwnHostname() string {
for _, ip := range ctx.OwnIPs {
if hn, ok := ctx.IPToHostname[ip]; ok {
return hn
}
}
for _, hash := range ctx.OwnHashes {
if hn, ok := ctx.IPToHostname[ctx.HashToIP[hash]]; ok {
return hn
}
}
return ""
}
func (ctx *LogCtx) HashesFromIP(ip string) []string {
hashes := []string{}
for hash, ip2 := range ctx.HashToIP {
if ip == ip2 {
hashes = append(hashes, hash)
}
}
return hashes
}
func (ctx *LogCtx) HashesFromNodeName(nodename string) []string {
hashes := []string{}
for hash, nodename2 := range ctx.HashToNodeName {
if nodename == nodename2 {
hashes = append(hashes, hash)
}
}
return hashes
}
func (ctx *LogCtx) IPsFromNodeName(nodename string) []string {
ips := []string{}
for ip, nodename2 := range ctx.IPToNodeName {
if nodename == nodename2 {
ips = append(ips, ip)
}
}
return ips
}
func (ctx *LogCtx) AllNodeNames() []string {
nodenames := ctx.OwnNames
for _, nn := range ctx.HashToNodeName {
if !utils.SliceContains(nodenames, nn) {
nodenames = append(nodenames, nn)
}
}
for _, nn := range ctx.IPToNodeName {
if !utils.SliceContains(nodenames, nn) {
nodenames = append(nodenames, nn)
}
}
return nodenames
}
// AddOwnName propagates a name into the translation maps using the trusted node's known own hashes and ips
func (ctx *LogCtx) AddOwnName(name string) {
func (ctx *LogCtx) AddOwnName(name string, date time.Time) {
// used to be a simple "if utils.SliceContains", changed to "is it the last known name?"
// because somes names/ips come back and forth, we should keep track of that
name = utils.ShortNodeName(name)
@@ -178,61 +108,37 @@ func (ctx *LogCtx) AddOwnName(name string) {
}
ctx.OwnNames = append(ctx.OwnNames, name)
for _, hash := range ctx.OwnHashes {
ctx.HashToNodeName[hash] = name
translate.AddHashToNodeName(hash, name, date)
}
for _, ip := range ctx.OwnIPs {
ctx.IPToNodeName[ip] = name
translate.AddIPToNodeName(ip, name, date)
}
}
// AddOwnHash propagates a hash into the translation maps
func (ctx *LogCtx) AddOwnHash(hash string) {
func (ctx *LogCtx) AddOwnHash(hash string, date time.Time) {
if utils.SliceContains(ctx.OwnHashes, hash) {
return
}
ctx.OwnHashes = append(ctx.OwnHashes, hash)
for _, ip := range ctx.OwnIPs {
ctx.HashToIP[hash] = ip
translate.AddHashToIP(hash, ip, date)
}
for _, name := range ctx.OwnNames {
ctx.HashToNodeName[hash] = name
translate.AddHashToNodeName(hash, name, date)
}
}
// AddOwnIP propagates a ip into the translation maps
func (ctx *LogCtx) AddOwnIP(ip string) {
func (ctx *LogCtx) AddOwnIP(ip string, date time.Time) {
// see AddOwnName comment
if len(ctx.OwnIPs) > 0 && ctx.OwnIPs[len(ctx.OwnIPs)-1] == ip {
return
}
ctx.OwnIPs = append(ctx.OwnIPs, ip)
for _, name := range ctx.OwnNames {
ctx.IPToNodeName[ip] = name
}
}
// MergeMapsWith will take a slice of contexts and merge every translation maps
// into the base context. It won't touch "local" infos such as "ownNames"
func (base *LogCtx) MergeMapsWith(ctxs []LogCtx) {
for _, ctx := range ctxs {
for hash, ip := range ctx.HashToIP {
base.HashToIP[hash] = ip
}
for hash, nodename := range ctx.HashToNodeName {
base.HashToNodeName[hash] = nodename
}
for ip, hostname := range ctx.IPToHostname {
base.IPToHostname[ip] = hostname
}
for ip, nodename := range ctx.IPToNodeName {
base.IPToNodeName[ip] = nodename
}
for ip, method := range ctx.IPToMethod {
base.IPToMethod[ip] = method
}
translate.AddIPToNodeName(ip, name, date)
}
}
@@ -248,7 +154,6 @@ func (base *LogCtx) Inherit(ctx LogCtx) {
base.Version = ctx.Version
}
base.Conflicts = append(ctx.Conflicts, base.Conflicts...)
base.MergeMapsWith([]LogCtx{ctx})
}
func (ctx *LogCtx) SetSSTTypeMaybe(ssttype string) {
@@ -268,10 +173,10 @@ func (ctx *LogCtx) ConfirmSSTMetadata(shiftTimestamp time.Time) {
for key, sst := range ctx.SSTs {
if sst.MustHaveHappenedLocally(shiftTimestamp) {
if ctx.State() == "DONOR" {
ctx.AddOwnName(key)
ctx.AddOwnName(key, shiftTimestamp)
}
if ctx.State() == "JOINER" {
ctx.AddOwnName(sst.Joiner)
ctx.AddOwnName(sst.Joiner, shiftTimestamp)
}
}
}
@@ -295,11 +200,6 @@ func (l LogCtx) MarshalJSON() ([]byte, error) {
MyIdx string
MemberCount int
Desynced bool
HashToIP map[string]string
HashToNodeName map[string]string
IPToHostname map[string]string
IPToMethod map[string]string
IPToNodeName map[string]string
MinVerbosity Verbosity
Conflicts Conflicts
}{
@@ -316,11 +216,6 @@ func (l LogCtx) MarshalJSON() ([]byte, error) {
MyIdx: l.MyIdx,
MemberCount: l.MemberCount,
Desynced: l.Desynced,
HashToIP: l.HashToIP,
HashToNodeName: l.HashToNodeName,
IPToHostname: l.IPToHostname,
IPToMethod: l.IPToMethod,
IPToNodeName: l.IPToNodeName,
MinVerbosity: l.minVerbosity,
Conflicts: l.Conflicts,
})

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/translate"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
)
@@ -93,3 +94,15 @@ type LogDisplayer func(LogCtx) string
func SimpleDisplayer(s string) LogDisplayer {
return func(_ LogCtx) string { return s }
}
func FormatByIPDisplayer(layout, ip string, date time.Time) LogDisplayer {
return func(_ LogCtx) string {
return fmt.Sprintf(layout, translate.SimplestInfoFromIP(ip, date))
}
}
func FormatByHashDisplayer(layout, hash string, date time.Time) LogDisplayer {
return func(_ LogCtx) string {
return fmt.Sprintf(layout, translate.SimplestInfoFromHash(hash, date))
}
}

View File

@@ -3,6 +3,7 @@ package types
import (
"encoding/json"
"regexp"
"time"
)
// LogRegex is the work struct to work on lines that were sent by "grep"
@@ -15,17 +16,17 @@ type LogRegex struct {
// Taking into arguments the current context and log line, returning an updated context and a closure to get the msg to display
// Why a closure: to later inject an updated context instead of the current partial context
// This ensure every hash/ip/nodenames are already known when crafting the message
Handler func(map[string]string, LogCtx, string) (LogCtx, LogDisplayer)
Handler func(map[string]string, LogCtx, string, time.Time) (LogCtx, LogDisplayer)
Verbosity Verbosity // To be able to hide details from summaries
}
func (l *LogRegex) Handle(ctx LogCtx, line string) (LogCtx, LogDisplayer) {
func (l *LogRegex) Handle(ctx LogCtx, line string, date time.Time) (LogCtx, LogDisplayer) {
if ctx.minVerbosity > l.Verbosity {
ctx.minVerbosity = l.Verbosity
}
mergedResults := map[string]string{}
if l.InternalRegex == nil {
return l.Handler(mergedResults, ctx, line)
return l.Handler(mergedResults, ctx, line, date)
}
slice := l.InternalRegex.FindStringSubmatch(line)
if len(slice) == 0 {
@@ -37,7 +38,7 @@ func (l *LogRegex) Handle(ctx LogCtx, line string) (LogCtx, LogDisplayer) {
}
mergedResults[subexpname] = slice[l.InternalRegex.SubexpIndex(subexpname)]
}
return l.Handler(mergedResults, ctx, line)
return l.Handler(mergedResults, ctx, line, date)
}
func (l *LogRegex) MarshalJSON() ([]byte, error) {

View File

@@ -31,7 +31,7 @@ func (timeline Timeline) MergeByIdentifier(lt LocalTimeline) {
// identify the node with the easiest to read information
// this is critical part to aggregate logs: this is what enable to merge logs
// ultimately the "identifier" will be used for columns header
node := Identifier(lt[len(lt)-1].Ctx)
node := Identifier(lt[len(lt)-1].Ctx, getlasttime(lt))
if lt2, ok := timeline[node]; ok {
lt = MergeTimeline(lt2, lt)
}
@@ -147,24 +147,14 @@ func CutTimelineAt(t LocalTimeline, at time.Time) LocalTimeline {
return t[i:]
}
func (t *Timeline) GetLatestUpdatedContextsByNodes() map[string]LogCtx {
updatedCtxs := map[string]LogCtx{}
latestctxs := []LogCtx{}
func (t *Timeline) GetLatestContextsByNodes() map[string]LogCtx {
latestctxs := map[string]LogCtx{}
for key, localtimeline := range *t {
if len(localtimeline) == 0 {
updatedCtxs[key] = NewLogCtx()
continue
}
latestctx := localtimeline[len(localtimeline)-1].Ctx
latestctxs = append(latestctxs, latestctx)
updatedCtxs[key] = latestctx
latestctxs[key] = localtimeline[len(localtimeline)-1].Ctx
}
for _, ctx := range updatedCtxs {
ctx.MergeMapsWith(latestctxs)
}
return updatedCtxs
return latestctxs
}
// iterateNode is used to search the source node(s) that contains the next chronological events

View File

@@ -1,8 +1,9 @@
package types
import (
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
"github.com/rs/zerolog/log"
"time"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/translate"
)
// Identifier is used to identify a node timeline.
@@ -10,47 +11,17 @@ import (
// It will also impacts how logs are merged if we have multiple logs per nodes
//
// In order of preference: wsrep_node_name (or galera "node" name), hostname, ip, filepath
func Identifier(ctx LogCtx) string {
func Identifier(ctx LogCtx, date time.Time) string {
if len(ctx.OwnNames) > 0 {
return ctx.OwnNames[len(ctx.OwnNames)-1]
}
if len(ctx.OwnIPs) > 0 {
return DisplayNodeSimplestForm(ctx, ctx.OwnIPs[len(ctx.OwnIPs)-1])
return translate.SimplestInfoFromIP(ctx.OwnIPs[len(ctx.OwnIPs)-1], date)
}
if len(ctx.OwnHashes) > 0 {
if name, ok := ctx.HashToNodeName[ctx.OwnHashes[0]]; ok {
return name
}
if ip, ok := ctx.HashToIP[ctx.OwnHashes[0]]; ok {
return DisplayNodeSimplestForm(ctx, ip)
for _, hash := range ctx.OwnHashes {
if out := translate.SimplestInfoFromHash(hash, date); out != hash {
return out
}
}
return ctx.FilePath
}
// DisplayNodeSimplestForm is useful to get the most easily to read string for a given IP
// This only has impacts on display
// In order of preference: wsrep_node_name (or galera "node" name), hostname, ip
func DisplayNodeSimplestForm(ctx LogCtx, ip string) string {
if nodename, ok := ctx.IPToNodeName[ip]; ok {
s := utils.ShortNodeName(nodename)
log.Debug().Str("ip", ip).Str("simplestform", s).Str("from", "IPToNodeName").Msg("nodeSimplestForm")
return s
}
for hash, storedip := range ctx.HashToIP {
if ip == storedip {
if nodename, ok := ctx.HashToNodeName[hash]; ok {
s := utils.ShortNodeName(nodename)
log.Debug().Str("ip", ip).Str("simplestform", s).Str("from", "HashToNodeName").Msg("nodeSimplestForm")
return s
}
}
}
if hostname, ok := ctx.IPToHostname[ip]; ok {
log.Debug().Str("ip", ip).Str("simplestform", hostname).Str("from", "IPToHostname").Msg("nodeSimplestForm")
return hostname
}
log.Debug().Str("ip", ip).Str("simplestform", ip).Str("from", "default").Msg("nodeSimplestForm")
return ip
}

View File

@@ -6,7 +6,6 @@ import (
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/regex"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/types"
"github.com/percona/percona-toolkit/src/go/pt-galera-log-explainer/utils"
"github.com/pkg/errors"
)
@@ -28,7 +27,7 @@ func (w *whois) Run() error {
if err != nil {
return errors.Wrap(err, "found nothing to translate")
}
ctxs := timeline.GetLatestUpdatedContextsByNodes()
ctxs := timeline.GetLatestContextsByNodes()
ni := whoIs(ctxs, CLI.Whois.Search)
@@ -41,62 +40,64 @@ func (w *whois) Run() error {
}
func whoIs(ctxs map[string]types.LogCtx, search string) types.NodeInfo {
ni := types.NodeInfo{Input: search}
if regex.IsNodeUUID(search) {
search = utils.UUIDToShortUUID(search)
}
var (
ips []string
hashes []string
nodenames []string
)
for _, ctx := range ctxs {
if utils.SliceContains(ctx.OwnNames, search) || utils.SliceContains(ctx.OwnHashes, search) || utils.SliceContains(ctx.OwnIPs, search) {
ni.NodeNames = ctx.OwnNames
ni.NodeUUIDs = ctx.OwnHashes
ni.IPs = ctx.OwnIPs
ni.Hostname = ctx.OwnHostname()
/*
ni := types.NodeInfo{Input: search}
if regex.IsNodeUUID(search) {
search = utils.UUIDToShortUUID(search)
}
var (
ips []string
hashes []string
nodenames []string
)
for _, ctx := range ctxs {
if utils.SliceContains(ctx.OwnNames, search) || utils.SliceContains(ctx.OwnHashes, search) || utils.SliceContains(ctx.OwnIPs, search) {
ni.NodeNames = ctx.OwnNames
ni.NodeUUIDs = ctx.OwnHashes
ni.IPs = ctx.OwnIPs
ni.Hostname = ctx.OwnHostname()
}
if nodename, ok := ctx.HashToNodeName[search]; ok {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{nodename})
hashes = utils.SliceMergeDeduplicate(hashes, []string{search})
}
if nodename, ok := ctx.HashToNodeName[search]; ok {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{nodename})
hashes = utils.SliceMergeDeduplicate(hashes, []string{search})
}
if ip, ok := ctx.HashToIP[search]; ok {
ips = utils.SliceMergeDeduplicate(ips, []string{ip})
hashes = utils.SliceMergeDeduplicate(hashes, []string{search})
if ip, ok := ctx.HashToIP[search]; ok {
ips = utils.SliceMergeDeduplicate(ips, []string{ip})
hashes = utils.SliceMergeDeduplicate(hashes, []string{search})
} else if nodename, ok := ctx.IPToNodeName[search]; ok {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{nodename})
ips = utils.SliceMergeDeduplicate(ips, []string{search})
} else if nodename, ok := ctx.IPToNodeName[search]; ok {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{nodename})
ips = utils.SliceMergeDeduplicate(ips, []string{search})
} else if utils.SliceContains(ctx.AllNodeNames(), search) {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{search})
}
} else if utils.SliceContains(ctx.AllNodeNames(), search) {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{search})
}
for _, nodename := range nodenames {
hashes = utils.SliceMergeDeduplicate(hashes, ctx.HashesFromNodeName(nodename))
ips = utils.SliceMergeDeduplicate(ips, ctx.IPsFromNodeName(nodename))
}
for _, nodename := range nodenames {
hashes = utils.SliceMergeDeduplicate(hashes, ctx.HashesFromNodeName(nodename))
ips = utils.SliceMergeDeduplicate(ips, ctx.IPsFromNodeName(nodename))
}
for _, ip := range ips {
hashes = utils.SliceMergeDeduplicate(hashes, ctx.HashesFromIP(ip))
nodename, ok := ctx.IPToNodeName[ip]
if ok {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{nodename})
for _, ip := range ips {
hashes = utils.SliceMergeDeduplicate(hashes, ctx.HashesFromIP(ip))
nodename, ok := ctx.IPToNodeName[ip]
if ok {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{nodename})
}
}
for _, hash := range hashes {
nodename, ok := ctx.HashToNodeName[hash]
if ok {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{nodename})
}
}
}
}
for _, hash := range hashes {
nodename, ok := ctx.HashToNodeName[hash]
if ok {
nodenames = utils.SliceMergeDeduplicate(nodenames, []string{nodename})
}
}
}
ni.NodeNames = nodenames
ni.NodeUUIDs = hashes
ni.IPs = ips
return ni
ni.NodeNames = nodenames
ni.NodeUUIDs = hashes
ni.IPs = ips
return ni
*/
return types.NodeInfo{}
}