PT-2101 pt-mongodb-query-digest does not work on standalone server (#630)

* PT-2101 - pt-mongodb-query-digest doesn't work on standalone server

Restoring test case, disabled for new sandbox that was never created.
Added debugging output to find out why the tool behaves not as expected.
Most of changes into main.go will be removed after the fix is done.

* PT-2101 - pt-mongodb-query-digest doesn't work on standalone server

- Changed code so it works with the standalone server
- Updated main_test.go so it works for MongoDB 5.0
- Removed eval.js and group.js, because these command are not supported since MongoDB 4.2

* PT-2101 - pt-mongodb-query-digest doesn't work on standalone server

Updated go.mod and go.sum from the 3.x branch
This commit is contained in:
Sveta Smirnova
2023-06-14 15:04:57 +03:00
committed by GitHub
parent b566350b64
commit e998bd5b55
7 changed files with 534 additions and 500 deletions

View File

@@ -25,18 +25,18 @@ import (
) )
// Enable enabled the mongo profiler // Enable enabled the mongo profiler
func Enable(ctx context.Context, client *mongo.Client) error { func Enable(ctx context.Context, client *mongo.Client, database string) error {
res := client.Database("admin").RunCommand(ctx, primitive.M{"profile": 2}) res := client.Database(database).RunCommand(ctx, primitive.M{"profile": 2})
return res.Err() return res.Err()
} }
// Disable disables the mongo profiler // Disable disables the mongo profiler
func Disable(ctx context.Context, client *mongo.Client) error { func Disable(ctx context.Context, client *mongo.Client, database string) error {
res := client.Database("admin").RunCommand(ctx, primitive.M{"profile": 0}) res := client.Database(database).RunCommand(ctx, primitive.M{"profile": 0})
return res.Err() return res.Err()
} }
// Drop drops the system.profile collection for clean up // Drop drops the system.profile collection for clean up
func Drop(ctx context.Context, client *mongo.Client) error { func Drop(ctx context.Context, client *mongo.Client, database string) error {
return client.Database("").Collection("system.profile").Drop(ctx) return client.Database(database).Collection("system.profile").Drop(ctx)
} }

View File

@@ -156,6 +156,10 @@ func (f *Fingerprinter) Fingerprint(doc proto.SystemProfile) (Fingerprint, error
op = "eval" op = "eval"
collection = "" collection = ""
retKeys = []string{} retKeys = []string{}
case "drop":
retKeys = []string{}
case "createIndexes":
retKeys = []string{}
} }
default: default:
op = doc.Op op = doc.Op
@@ -177,7 +181,6 @@ func (f *Fingerprinter) Fingerprint(doc proto.SystemProfile) (Fingerprint, error
if keys != "" { if keys != "" {
parts = append(parts, keys) parts = append(parts, keys)
} }
ns = []string{} ns = []string{}
if database != "" { if database != "" {
ns = append(ns, database) ns = append(ns, database)

View File

@@ -138,7 +138,7 @@ func (p *Profile) getDocs(ctx context.Context) {
for _, filter := range p.filters { for _, filter := range p.filters {
if !filter(doc) { if !filter(doc) {
valid = false valid = false
return break
} }
} }
if !valid { if !valid {

View File

@@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"context" "context"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"os" "os"
"sort" "sort"
@@ -133,7 +134,7 @@ func main() {
log.Fatalf("Cannot connect to MongoDB: %s", err) log.Fatalf("Cannot connect to MongoDB: %s", err)
} }
isProfilerEnabled, err := isProfilerEnabled(ctx, clientOptions) isProfilerEnabled, err := isProfilerEnabled(ctx, clientOptions, opts.Database)
if err != nil { if err != nil {
log.Errorf("Cannot get profiler status: %s", err.Error()) log.Errorf("Cannot get profiler status: %s", err.Error())
os.Exit(4) os.Exit(4)
@@ -523,19 +524,37 @@ func sortQueries(queries []stats.QueryStats, orderby []string) []stats.QueryStat
return queries return queries
} }
func isProfilerEnabled(ctx context.Context, clientOptions *options.ClientOptions) (bool, error) { func isProfilerEnabled(ctx context.Context, clientOptions *options.ClientOptions, dbname string) (bool, error) {
var ps proto.ProfilerStatus var ps proto.ProfilerStatus
replicaMembers, err := util.GetReplicasetMembers(ctx, clientOptions) replicaMembers, err := util.GetReplicasetMembers(ctx, clientOptions)
if err != nil { if err != nil && !errors.Is(err, util.ShardingNotEnabledError) {
return false, err return false, err
} }
if len(replicaMembers) == 0 {
client, err := mongo.NewClient(clientOptions)
if err != nil {
return false, err
}
if err = client.Connect(ctx); err != nil {
return false, err
}
client.Database(dbname).RunCommand(ctx, primitive.M{"profile": -1}).Decode(&ps)
if ps.Was == 0 {
return false, nil
}
}
for _, member := range replicaMembers { for _, member := range replicaMembers {
// Stand alone instances return state = REPLICA_SET_MEMBER_STARTUP
client, err := util.GetClientForHost(clientOptions, member.Name) client, err := util.GetClientForHost(clientOptions, member.Name)
if err != nil { if err != nil {
continue continue
} }
if err := client.Connect(ctx); err != nil {
log.Fatalf("Cannot connect to MongoDB: %s", err)
}
isReplicaEnabled := isReplicasetEnabled(ctx, client) isReplicaEnabled := isReplicasetEnabled(ctx, client)
@@ -546,7 +565,7 @@ func isProfilerEnabled(ctx context.Context, clientOptions *options.ClientOptions
if isReplicaEnabled && member.State != proto.REPLICA_SET_MEMBER_PRIMARY { if isReplicaEnabled && member.State != proto.REPLICA_SET_MEMBER_PRIMARY {
continue continue
} }
if err := client.Database("admin").RunCommand(ctx, primitive.M{"profile": -1}).Decode(&ps); err != nil { if err := client.Database(dbname).RunCommand(ctx, primitive.M{"profile": -1}).Decode(&ps); err != nil {
continue continue
} }

View File

@@ -1,475 +1,502 @@
package main package main
// TODO: Rewrite tests to use the new sandbox import (
"bufio"
"bytes"
"context"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"reflect"
"regexp"
"runtime"
"sort"
"strings"
"testing"
"text/template"
"time"
// const ( "go.mongodb.org/mongo-driver/mongo"
// samples = "/src/go/tests/" "go.mongodb.org/mongo-driver/mongo/options"
// )
// "github.com/pborman/getopt"
// type testVars struct { "github.com/percona/percona-toolkit/src/go/lib/profiling"
// RootPath string "github.com/percona/percona-toolkit/src/go/lib/tutil"
// } "github.com/percona/percona-toolkit/src/go/mongolib/stats"
// )
// var vars testVars
// var Server dbtest.DBServer const (
// samples = "/src/go/tests/"
// func TestMain(m *testing.M) { )
// var err error
// if vars.RootPath, err = tutil.RootPath(); err != nil { type testVars struct {
// log.Printf("cannot get root path: %s", err.Error()) RootPath string
// os.Exit(1) }
// }
// os.Exit(m.Run()) type Data struct {
// bin string
// // The tempdir is created so MongoDB has a location to store its files. url string
// // Contents are wiped once the server stops db string
// os.Setenv("CHECK_SESSIONS", "0") }
// tempDir, _ := ioutil.TempDir("", "testing")
// Server.SetPath(tempDir) var vars testVars
// var client *mongo.Client
// retCode := m.Run()
// func TestMain(m *testing.M) {
// Server.Session().Close() var err error
// Server.Session().DB("samples").DropDatabase() if vars.RootPath, err = tutil.RootPath(); err != nil {
// log.Printf("cannot get root path: %s", err.Error())
// // Stop shuts down the temporary server and removes data on disk. os.Exit(1)
// Server.Stop() }
//
// // call with result of m.Run() client, err = mongo.Connect(context.TODO(), options.Client().ApplyURI(os.Getenv("PT_TEST_MONGODB_DSN")))
// os.Exit(retCode) if err != nil {
// } log.Printf("Cannot connect: %s", err.Error())
// os.Exit(1)
// func TestIsProfilerEnabled(t *testing.T) { }
// mongoDSN := os.Getenv("PT_TEST_MONGODB_DSN")
// if mongoDSN == "" { err = profiling.Disable(context.TODO(), client, "test")
// t.Skip("Skippping TestIsProfilerEnabled. It runs only in integration tests") if err != nil {
// } log.Printf("Cannot disable profile: %s", err.Error())
// os.Exit(1)
// dialer := pmgo.NewDialer() }
// di, _ := pmgo.ParseURL(mongoDSN) err = profiling.Drop(context.TODO(), client, "test")
// if err != nil {
// enabled, err := isProfilerEnabled(dialer, di) log.Printf("Cannot drop profile database: %s", err.Error())
// os.Exit(1)
// if err != nil { }
// t.Errorf("Cannot check if profiler is enabled: %s", err.Error()) err = profiling.Enable(context.TODO(), client, "test")
// } if err != nil {
// if enabled != true { log.Printf("Cannot enable profile: %s", err.Error())
// t.Error("Profiler must be enabled") os.Exit(1)
// } }
//
// } retCode := m.Run()
//
// func TestParseArgs(t *testing.T) { err = profiling.Disable(context.TODO(), client, "test")
// tests := []struct { if err != nil {
// args []string log.Printf("Cannot disable profile: %s", err.Error())
// want *options os.Exit(1)
// }{ }
// {
// args: []string{TOOLNAME}, // arg[0] is the command itself os.Exit(retCode)
// want: &options{ }
// Host: DEFAULT_HOST,
// LogLevel: DEFAULT_LOGLEVEL, func TestIsProfilerEnabled(t *testing.T) {
// OrderBy: strings.Split(DEFAULT_ORDERBY, ","), mongoDSN := os.Getenv("PT_TEST_MONGODB_DSN")
// SkipCollections: strings.Split(DEFAULT_SKIPCOLLECTIONS, ","), if mongoDSN == "" {
// AuthDB: DEFAULT_AUTHDB, t.Skip("Skippping TestIsProfilerEnabled. It runs only in integration tests")
// OutputFormat: "text", }
// },
// }, enabled, err := isProfilerEnabled(context.TODO(), options.Client().ApplyURI(mongoDSN), "test")
// { //
// args: []string{TOOLNAME, "zapp.brannigan.net:27018/samples", "--help"}, if err != nil {
// want: nil, t.Errorf("Cannot check if profiler is enabled: %s", err.Error())
// }, }
// { if enabled != true {
// args: []string{TOOLNAME, "zapp.brannigan.net:27018/samples"}, t.Error("Profiler must be enabled")
// want: &options{ }
// Host: "zapp.brannigan.net:27018/samples", }
// LogLevel: DEFAULT_LOGLEVEL,
// OrderBy: strings.Split(DEFAULT_ORDERBY, ","), func TestParseArgs(t *testing.T) {
// SkipCollections: strings.Split(DEFAULT_SKIPCOLLECTIONS, ","), tests := []struct {
// AuthDB: DEFAULT_AUTHDB, args []string
// Help: false, want *cliOptions
// OutputFormat: "text", }{
// }, {
// }, args: []string{TOOLNAME}, // arg[0] is the command itself
// } want: &cliOptions{
// for i, test := range tests { Host: "mongodb://" + DEFAULT_HOST,
// getopt.Reset() LogLevel: DEFAULT_LOGLEVEL,
// os.Args = test.args OrderBy: strings.Split(DEFAULT_ORDERBY, ","),
// got, err := getOptions() SkipCollections: strings.Split(DEFAULT_SKIPCOLLECTIONS, ","),
// if err != nil { AuthDB: DEFAULT_AUTHDB,
// t.Errorf("error parsing command line arguments: %s", err.Error()) OutputFormat: "text",
// } },
// if !reflect.DeepEqual(got, test.want) { },
// t.Errorf("invalid command line options test %d\ngot %+v\nwant %+v\n", i, got, test.want) {
// } args: []string{TOOLNAME, "zapp.brannigan.net:27018/samples", "--help"},
// } want: nil,
// },
// } {
// args: []string{TOOLNAME, "zapp.brannigan.net:27018/samples"},
// type Data struct { want: &cliOptions{
// bin string Host: "mongodb://zapp.brannigan.net:27018/samples",
// url string LogLevel: DEFAULT_LOGLEVEL,
// } OrderBy: strings.Split(DEFAULT_ORDERBY, ","),
// SkipCollections: strings.Split(DEFAULT_SKIPCOLLECTIONS, ","),
// func TestPTMongoDBQueryDigest(t *testing.T) { AuthDB: DEFAULT_AUTHDB,
// var err error Help: false,
// OutputFormat: "text",
// binDir, err := ioutil.TempDir("/tmp", "pmm-client-test-bindir-") },
// if err != nil { },
// t.Error(err) }
// } for i, test := range tests {
// defer func() { getopt.Reset()
// err := os.RemoveAll(binDir) os.Args = test.args
// if err != nil { //disabling Stdout to avoid printing help message to the screen
// t.Error(err) sout := os.Stdout
// } os.Stdout = nil
// }() got, err := getOptions()
// os.Stdout = sout
// bin := binDir + "/pt-mongodb-query-digest" if err != nil {
// xVariables := map[string]string{ t.Errorf("error parsing command line arguments: %s", err.Error())
// "main.Build": "<Build>", }
// "main.Version": "<Version>", if !reflect.DeepEqual(got, test.want) {
// "main.GoVersion": "<GoVersion>", t.Errorf("invalid command line options test %d\ngot %+v\nwant %+v\n", i, got, test.want)
// } }
// var ldflags []string }
// for x, value := range xVariables { }
// ldflags = append(ldflags, fmt.Sprintf("-X %s=%s", x, value))
// } func TestPTMongoDBQueryDigest(t *testing.T) {
// cmd := exec.Command( var err error
// "go", //
// "build", binDir, err := ioutil.TempDir("/tmp", "pt-test-bindir")
// "-o", if err != nil {
// bin, t.Error(err)
// "-ldflags", }
// strings.Join(ldflags, " "), defer func() {
// ) err := os.RemoveAll(binDir)
// cmd.Stdout = os.Stdout if err != nil {
// cmd.Stderr = os.Stderr t.Error(err)
// err = cmd.Run() }
// if err != nil { }()
// t.Error(err) //
// } bin := binDir + "/pt-mongodb-query-digest"
// xVariables := map[string]string{
// data := Data{ "main.Build": "<Build>",
// bin: bin, "main.Version": "<Version>",
// } "main.GoVersion": "<GoVersion>",
// tests := []func(*testing.T, Data){ "main.Commit": "<Commit>",
// testVersion, }
// testEmptySystemProfile, var ldflags []string
// testAllOperationsTemplate, for x, value := range xVariables {
// } ldflags = append(ldflags, fmt.Sprintf("-X %s=%s", x, value))
// t.Run("pmm-admin", func(t *testing.T) { }
// for _, f := range tests { cmd := exec.Command(
// f := f // capture range variable "go",
// fName := runtime.FuncForPC(reflect.ValueOf(f).Pointer()).Name() "build",
// t.Run(fName, func(t *testing.T) { "-o",
// // Clean up system.profile bin,
// var err error "-ldflags",
// data.url = "127.0.0.1/test" strings.Join(ldflags, " "),
// err = profiling.Disable(data.url) )
// if err != nil { cmd.Stdout = os.Stdout
// t.Error(err) cmd.Stderr = os.Stderr
// } err = cmd.Run()
// profiling.Drop(data.url) if err != nil {
// err = profiling.Enable(data.url) t.Error(err)
// if err != nil { }
// t.Error(err) //
// } data := Data{
// defer profiling.Disable(data.url) bin: bin,
// url: os.Getenv("PT_TEST_MONGODB_DSN"),
// // t.Parallel() db: "test",
// f(t, data) }
// }) tests := []func(*testing.T, Data){
// } testVersion,
// }) testEmptySystemProfile,
// testAllOperationsTemplate,
// } }
//
// func testVersion(t *testing.T, data Data) { t.Run("pt-mongodb-query-digest", func(t *testing.T) {
// cmd := exec.Command( for _, f := range tests {
// data.bin, f := f // capture range variable
// "--version", fName := runtime.FuncForPC(reflect.ValueOf(f).Pointer()).Name()
// ) t.Run(fName, func(t *testing.T) {
// output, err := cmd.CombinedOutput() // Clean up system.profile
// if err != nil { var err error
// t.Error(err) err = profiling.Disable(context.TODO(), client, data.db)
// } if err != nil {
// expected := `pt-mongodb-query-digest t.Error(err)
// Version <Version> }
// Build: <Build> using <GoVersion>` profiling.Drop(context.TODO(), client, data.db)
// err = profiling.Enable(context.TODO(), client, data.db)
// assertRegexpLines(t, expected, string(output)) if err != nil {
// } t.Error(err)
// }
// func testEmptySystemProfile(t *testing.T, data Data) { defer profiling.Disable(context.TODO(), client, data.db)
// cmd := exec.Command( //
// data.bin, // t.Parallel()
// data.url, f(t, data)
// ) })
// output, err := cmd.CombinedOutput() }
// if err != nil { })
// t.Error(err) }
// }
// func testVersion(t *testing.T, data Data) {
// expected := "No queries found in profiler information for database \\\"test\\\"" cmd := exec.Command(
// if !strings.Contains(string(output), expected) { data.bin,
// t.Errorf("Empty system.profile.\nGot:\n%s\nWant:\n%s\n", string(output), expected) "--version",
// } )
// } output, err := cmd.CombinedOutput()
// if err != nil {
// func testAllOperationsTemplate(t *testing.T, data Data) { t.Error(err)
// dir := vars.RootPath + samples + "/doc/script/profile/" }
// files, err := ioutil.ReadDir(dir) expected := `pt-mongodb-query-digest
// if err != nil { Version <Version>
// t.Fatalf("cannot list samples: %s", err) Build: <Build> using <GoVersion>
// } Commit: <Commit>`
// //
// fs := []string{} assertRegexpLines(t, expected, string(output))
// for _, file := range files { }
// fs = append(fs, dir+file.Name())
// } func testEmptySystemProfile(t *testing.T, data Data) {
// sort.Strings(fs) cmd := exec.Command(
// err = run(fs...) data.bin,
// if err != nil { data.url,
// t.Fatalf("cannot execute queries: %s", err) "--database="+data.db,
// } )
// output, err := cmd.CombinedOutput()
// // disable profiling so pt-mongodb-query digest reads rows from `system.profile` if err != nil {
// profiling.Disable(data.url) t.Error(err)
// }
// // run profiler //
// cmd := exec.Command( expected := "No queries found in profiler information for database \\\"" + data.db + "\\\""
// data.bin, if !strings.Contains(string(output), expected) {
// data.url, t.Errorf("Empty system.profile.\nGot:\n%s\nWant:\n%s\n", string(output), expected)
// ) }
// }
// output, err := cmd.CombinedOutput()
// if err != nil { func testAllOperationsTemplate(t *testing.T, data Data) {
// t.Error(err) dir := vars.RootPath + samples + "/doc/script/profile/"
// } files, err := ioutil.ReadDir(dir)
// if err != nil {
// queries := []stats.QueryStats{ t.Fatalf("cannot list samples: %s", err)
// { }
// ID: "e357abe482dcc0cd03ab742741bf1c86", //
// Namespace: "test.coll", fs := []string{}
// Operation: "INSERT", for _, file := range files {
// Fingerprint: "INSERT coll", fs = append(fs, dir+file.Name())
// }, }
// { sort.Strings(fs)
// ID: "c9b40ce564762834d12b0390a292645c", fs = append([]string{os.Getenv("PT_TEST_MONGODB_DSN")}, fs...)
// Namespace: "test.coll", err = run(fs...)
// Operation: "DROP", if err != nil {
// Fingerprint: "DROP coll drop", t.Fatalf("cannot execute queries: %s", err)
// }, }
// { //
// ID: "db759bfd83441deecc71382323041ce6", // disable profiling so pt-mongodb-query digest reads rows from `system.profile`
// Namespace: "test.coll",
// Operation: "GETMORE", profiling.Disable(context.TODO(), client, data.db)
// Fingerprint: "GETMORE coll", //
// }, // run profiler
// { cmd := exec.Command(
// ID: "e72ad41302045bd6c2bcad76511f915a", data.bin,
// Namespace: "test.coll", data.url,
// Operation: "REMOVE", "--database="+data.db,
// Fingerprint: "REMOVE coll a,b", )
// }, //
// { output, err := cmd.CombinedOutput()
// ID: "30dbfbc89efd8cfd40774dff0266a28f", if err != nil {
// Namespace: "test.coll", t.Error(err)
// Operation: "AGGREGATE", }
// Fingerprint: "AGGREGATE coll a", //
// }, queries := []stats.QueryStats{
// { {
// ID: "a6782ae38ef891d5506341a4b0ab2747", ID: "e357abe482dcc0cd03ab742741bf1c86",
// Namespace: "test", Namespace: "test.coll",
// Operation: "EVAL", Operation: "INSERT",
// Fingerprint: "EVAL", Fingerprint: "INSERT coll",
// }, },
// { {
// ID: "76d7662df07b44135ac3e07e44a6eb39", ID: "22eda5c05290c1af6dbffd8c38aceff6",
// Namespace: "", Namespace: "test.coll",
// Operation: "EXPLAIN", Operation: "DROP",
// Fingerprint: "EXPLAIN", Fingerprint: "DROP coll",
// }, },
// { {
// ID: "e8a3f05a4bd3f0bfa7d38eb2372258b1", ID: "ba1d8c1620d1aaf36c1010c809ec462b",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "FINDANDMODIFY", Operation: "CREATEINDEXES",
// Fingerprint: "FINDANDMODIFY coll a", Fingerprint: "CREATEINDEXES coll",
// }, },
// { {
// ID: "2a639e77efe3e68399ef9482575b3421", ID: "db759bfd83441deecc71382323041ce6",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "FIND", Operation: "GETMORE",
// Fingerprint: "FIND coll", Fingerprint: "GETMORE coll",
// }, },
// { {
// ID: "fe0bf975a044fe47fd32b835ceba612d", ID: "e72ad41302045bd6c2bcad76511f915a",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "FIND", Operation: "REMOVE",
// Fingerprint: "FIND coll a", Fingerprint: "REMOVE coll a,b",
// }, },
// { {
// ID: "20fe80188ec82c9d3c3dcf3f4817f8f9", ID: "2a639e77efe3e68399ef9482575b3421",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "FIND", Operation: "FIND",
// Fingerprint: "FIND coll b,c", Fingerprint: "FIND coll",
// }, },
// { {
// ID: "02104210d67fe680273784d833f86831", ID: "76d7662df07b44135ac3e07e44a6eb39",
// Namespace: "test.coll", Namespace: "",
// Operation: "FIND", Operation: "EXPLAIN",
// Fingerprint: "FIND coll c,k,pad", Fingerprint: "EXPLAIN",
// }, },
// { {
// ID: "5efe4738d807c74b3980de76c37a0870", ID: "e8a3f05a4bd3f0bfa7d38eb2372258b1",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "FIND", Operation: "FINDANDMODIFY",
// Fingerprint: "FIND coll k", Fingerprint: "FINDANDMODIFY coll a",
// }, },
// { {
// ID: "798d7c1cd25b63cb6a307126a25910d6", ID: "30dbfbc89efd8cfd40774dff0266a28f",
// Namespace: "test.system.js", Namespace: "test.coll",
// Operation: "FIND", Operation: "AGGREGATE",
// Fingerprint: "FIND system.js", Fingerprint: "AGGREGATE coll a",
// }, },
// { {
// ID: "c70403cbd55ffbb07f08c0cb77a24b19", ID: "fe0bf975a044fe47fd32b835ceba612d",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "GEONEAR", Operation: "FIND",
// Fingerprint: "GEONEAR coll", Fingerprint: "FIND coll a",
// }, },
// { {
// ID: "e4122a58c99ab0a4020ce7d195c5a8cb", ID: "20fe80188ec82c9d3c3dcf3f4817f8f9",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "DISTINCT", Operation: "FIND",
// Fingerprint: "DISTINCT coll a,b", Fingerprint: "FIND coll b,c",
// }, },
// { {
// ID: "ca8bb19386488570447f5753741fb494", ID: "02104210d67fe680273784d833f86831",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "GROUP", Operation: "FIND",
// Fingerprint: "GROUP coll a,b", Fingerprint: "FIND coll c,k,pad",
// }, },
// { {
// ID: "10b8f47b366fbfd1fb01f8d17d75b1a2", ID: "5efe4738d807c74b3980de76c37a0870",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "COUNT", Operation: "FIND",
// Fingerprint: "COUNT coll a", Fingerprint: "FIND coll k",
// }, },
// { {
// ID: "cc3cb3824eea4094eb042f5ca76bd385", ID: "e4122a58c99ab0a4020ce7d195c5a8cb",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "MAPREDUCE", Operation: "DISTINCT",
// Fingerprint: "MAPREDUCE coll a", Fingerprint: "DISTINCT coll a,b",
// }, },
// { {
// ID: "cba2dff0740762c6e5769f0e300df676", ID: "10b8f47b366fbfd1fb01f8d17d75b1a2",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "COUNT", Operation: "COUNT",
// Fingerprint: "COUNT coll", Fingerprint: "COUNT coll a",
// }, },
// { {
// ID: "f74a5120ac22d02120ccbf6d478b0dbc", ID: "cc3cb3824eea4094eb042f5ca76bd385",
// Namespace: "test.coll", Namespace: "test.coll",
// Operation: "UPDATE", Operation: "MAPREDUCE",
// Fingerprint: "UPDATE coll a", Fingerprint: "MAPREDUCE coll a",
// }, },
// } {
// ID: "cba2dff0740762c6e5769f0e300df676",
// expected := `Profiler is disabled for the "test" database but there are \s*[0-9]+ documents in the system.profile collection. Namespace: "test.coll",
// Using those documents for the stats Operation: "COUNT",
// Fingerprint: "COUNT coll",
// # Totals },
// # Ratio [0-9\.]+ \(docs scanned/returned\) {
// # Attribute pct total min max avg 95% stddev median ID: "f74a5120ac22d02120ccbf6d478b0dbc",
// # ================== === ======== ======== ======== ======== ======== ======= ======== Namespace: "test.coll",
// # Count \(docs\) (\s*[0-9]+)\s Operation: "UPDATE",
// # Exec Time ms (\s*[0-9]+){8}\s Fingerprint: "UPDATE coll a",
// # Docs Scanned (\s*[0-9\.]+){8}\s },
// # Docs Returned (\s*[0-9\.]+){8}\s }
// # Bytes sent (\s*[0-9\.K]+){8}(K|\s) //
// #\s expected := `Profiler is disabled for the "test" database but there are \s*[0-9]+ documents in the system.profile collection.
// ` Using those documents for the stats
//
// queryTpl := ` # Totals
// # Query [0-9]+: [0-9\.]+ QPS, ID {{.ID}} # Ratio [0-9\.]+ \(docs scanned/returned\)
// # Ratio [0-9\.]+ \(docs scanned/returned\) # Attribute pct total min max avg 95% stddev median
// # Time range: .* to .* # ================== === ======== ======== ======== ======== ======== ======= ========
// # Attribute pct total min max avg 95% stddev median # Count \(docs\) (\s*[0-9]+)\s
// # ================== === ======== ======== ======== ======== ======== ======= ======== # Exec Time ms (\s*[0-9]+){8}\s
// # Count \(docs\) (\s*[0-9]+)\s # Docs Scanned (\s*[0-9\.]+){8}\s
// # Exec Time ms (\s*[0-9]+){8}\s # Docs Returned (\s*[0-9\.]+){8}\s
// # Docs Scanned (\s*[0-9\.]+){8}\s # Bytes sent (\s*[0-9\.K]+){8}(K|\s)
// # Docs Returned (\s*[0-9\.]+){8}\s #\s
// # Bytes sent (\s*[0-9\.K]+){8}(K|\s) `
// # String: //
// # Namespace {{.Namespace}} queryTpl := `
// # Operation {{.Operation}} # Query [0-9]+: [0-9\.]+ QPS, ID {{.ID}}
// # Fingerprint {{.Fingerprint}} # Ratio [0-9\.]+ \(docs scanned/returned\)
// # Query .* # Time range: .* to .*
// # Attribute pct total min max avg 95% stddev median
// ` # ================== === ======== ======== ======== ======== ======== ======= ========
// # Count \(docs\) (\s*[0-9]+)\s
// tpl, _ := template.New("query").Parse(queryTpl) # Exec Time ms (\s*[0-9]+){8}\s
// for _, query := range queries { # Docs Scanned (\s*[0-9\.]+){8}\s
// buf := bytes.Buffer{} # Docs Returned (\s*[0-9\.]+){8}\s
// err := tpl.Execute(&buf, query) # Bytes sent (\s*[0-9\.K]+){8}(K|\s)
// if err != nil { # String:
// t.Error(err) # Namespace {{.Namespace}}
// } # Operation {{.Operation}}
// # Fingerprint {{.Fingerprint}}
// expected += buf.String() # Query .*
// } `
// expected += "\n" // Looks like we expect additional line //
// tpl, _ := template.New("query").Parse(queryTpl)
// assertRegexpLines(t, expected, string(output)) for _, query := range queries {
// } buf := bytes.Buffer{}
// err := tpl.Execute(&buf, query)
// // assertRegexpLines matches regexp line by line to corresponding line of text if err != nil {
// func assertRegexpLines(t *testing.T, rx string, str string, msgAndArgs ...interface{}) bool { t.Error(err)
// expectedScanner := bufio.NewScanner(strings.NewReader(rx)) }
// defer func() { //
// if err := expectedScanner.Err(); err != nil { expected += buf.String()
// t.Fatal(err) }
// } expected += "\n" // Looks like we expect additional line
// }() //
// assertRegexpLines(t, expected, string(output))
// actualScanner := bufio.NewScanner(strings.NewReader(str)) }
// defer func() {
// if err := actualScanner.Err(); err != nil { // assertRegexpLines matches regexp line by line to corresponding line of text
// t.Fatal(err) func assertRegexpLines(t *testing.T, rx string, str string, msgAndArgs ...interface{}) bool {
// } expectedScanner := bufio.NewScanner(strings.NewReader(rx))
// }() defer func() {
// if err := expectedScanner.Err(); err != nil {
// ok := true t.Fatal(err)
// for { }
// asOk := actualScanner.Scan() }()
// esOk := expectedScanner.Scan() //
// actualScanner := bufio.NewScanner(strings.NewReader(str))
// switch { defer func() {
// case asOk && esOk: if err := actualScanner.Err(); err != nil {
// ok, err := regexp.MatchString("^"+expectedScanner.Text()+"$", actualScanner.Text()) t.Fatal(err)
// if err != nil { }
// t.Error(err) }()
// } //
// if !ok { ok := true
// t.Errorf("regexp '%s' doesn't match '%s'", expectedScanner.Text(), actualScanner.Text()) for {
// } asOk := actualScanner.Scan()
// case asOk: esOk := expectedScanner.Scan()
// t.Errorf("didn't expect more lines but got: %s", actualScanner.Text()) //
// ok = false switch {
// case esOk: case asOk && esOk:
// t.Errorf("didn't got line but expected it to match against: %s", expectedScanner.Text()) ok, err := regexp.MatchString("^"+expectedScanner.Text()+"$", actualScanner.Text())
// ok = false if err != nil {
// default: t.Error(err)
// return ok }
// } if !ok {
// } t.Errorf("regexp '%s' doesn't match '%s'", expectedScanner.Text(), actualScanner.Text())
// } }
// case asOk:
// func run(arg ...string) error { t.Errorf("didn't expect more lines but got: %s", actualScanner.Text())
// ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) ok = false
// defer cancel() case esOk:
// return exec.CommandContext(ctx, "mongo", arg...).Run() t.Errorf("didn't got line but expected it to match against: %s", expectedScanner.Text())
// } ok = false
default:
return ok
}
}
}
func run(arg ...string) error {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
return exec.CommandContext(ctx, "mongo", arg...).Run()
}

View File

@@ -1 +0,0 @@
db.eval("1");

View File

@@ -1,14 +0,0 @@
var coll = db.coll;
coll.drop();
for (var i = 0; i < 10; ++i) {
coll.insert({a: i, b: i % 5});
}
coll.createIndex({b: -1});
coll.group({
key: {a: 1, b: 1},
cond: {b: 3},
reduce: function() {},
initial: {}
});