From 8509bcd8cdd6ae8ab5f647a1f62e6a6af0b2ebe6 Mon Sep 17 00:00:00 2001 From: tharun0064 Date: Thu, 16 Jan 2025 17:02:25 +0530 Subject: [PATCH] resolved: review comments (#44) * resolved: review comments --- src/main.go | 3 +- .../{common-helpers.go => common_helpers.go} | 1 + ...helpers_test.go => common_helpers_test.go} | 4 +- .../common-utils/ingestion-helpers.go | 20 +- ...elper_test.go => ingestion_helper_test.go} | 18 +- ...etch-helpers.go => query_fetch_helpers.go} | 2 +- ...rs_test.go => query_fetch_helpers_test.go} | 0 .../global-variables/global_variables.go | 27 ++ .../performance-metrics/blocking_sessions.go | 19 +- .../blocking_sessions_test.go | 25 +- .../execution_plan_metrics.go | 12 +- .../execution_plan_metrics_test.go | 6 +- .../individual_query_metrics.go | 25 +- .../individual_query_metrics_test.go | 29 +- .../performance-metrics/slow_query_metrics.go | 16 +- .../slow_query_metrics_test.go | 26 +- .../performance-metrics/wait_event_metrics.go | 14 +- .../wait_event_metrics_test.go | 28 +- .../queries/queries.go | 392 +++++++++--------- .../query_performance_main.go | 15 +- 20 files changed, 364 insertions(+), 318 deletions(-) rename src/query-performance-monitoring/common-utils/{common-helpers.go => common_helpers.go} (92%) rename src/query-performance-monitoring/common-utils/{commmon-helpers_test.go => common_helpers_test.go} (95%) rename src/query-performance-monitoring/common-utils/{ingestion-helper_test.go => ingestion_helper_test.go} (81%) rename src/query-performance-monitoring/common-utils/{query-fetch-helpers.go => query_fetch_helpers.go} (96%) rename src/query-performance-monitoring/common-utils/{query-fetch-helpers_test.go => query_fetch_helpers_test.go} (100%) create mode 100644 src/query-performance-monitoring/global-variables/global_variables.go diff --git a/src/main.go b/src/main.go index bc755eb2..63fd9481 100644 --- a/src/main.go +++ b/src/main.go @@ -1,3 +1,4 @@ +//go:generate goversioninfo package main import ( @@ -91,7 +92,7 @@ func main() { log.Error(err.Error()) } - if args.EnableQueryMonitoring { + if args.EnableQueryMonitoring && args.HasMetrics() { queryperformancemonitoring.QueryPerformanceMain(args, pgIntegration, collectionList) } diff --git a/src/query-performance-monitoring/common-utils/common-helpers.go b/src/query-performance-monitoring/common-utils/common_helpers.go similarity index 92% rename from src/query-performance-monitoring/common-utils/common-helpers.go rename to src/query-performance-monitoring/common-utils/common_helpers.go index b16bb7bf..af4607ca 100644 --- a/src/query-performance-monitoring/common-utils/common-helpers.go +++ b/src/query-performance-monitoring/common-utils/common_helpers.go @@ -11,6 +11,7 @@ import ( "github.com/newrelic/nri-postgresql/src/collection" ) +// re is a regular expression that matches single-quoted strings, numbers, or double-quoted strings var re = regexp.MustCompile(`'[^']*'|\d+|".*?"`) func GetQuotedStringFromArray(array []string) string { diff --git a/src/query-performance-monitoring/common-utils/commmon-helpers_test.go b/src/query-performance-monitoring/common-utils/common_helpers_test.go similarity index 95% rename from src/query-performance-monitoring/common-utils/commmon-helpers_test.go rename to src/query-performance-monitoring/common-utils/common_helpers_test.go index 6576128c..3268dc83 100644 --- a/src/query-performance-monitoring/common-utils/commmon-helpers_test.go +++ b/src/query-performance-monitoring/common-utils/common_helpers_test.go @@ -18,13 +18,13 @@ func TestGetQuotedStringFromArray(t *testing.T) { } func TestGetDatabaseListInString(t *testing.T) { - dbListKeys := []string{"db1", "db2"} + dbListKeys := []string{"db1"} sort.Strings(dbListKeys) // Sort the keys to ensure consistent order dbList := collection.DatabaseList{} for _, key := range dbListKeys { dbList[key] = collection.SchemaList{} } - expected := "'db1','db2'" + expected := "'db1'" result := commonutils.GetDatabaseListInString(dbList) assert.Equal(t, expected, result) diff --git a/src/query-performance-monitoring/common-utils/ingestion-helpers.go b/src/query-performance-monitoring/common-utils/ingestion-helpers.go index 6cf68d05..f360e7ee 100644 --- a/src/query-performance-monitoring/common-utils/ingestion-helpers.go +++ b/src/query-performance-monitoring/common-utils/ingestion-helpers.go @@ -4,10 +4,11 @@ import ( "fmt" "reflect" + globalvariables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" + "github.com/newrelic/infra-integrations-sdk/v3/data/metric" "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/infra-integrations-sdk/v3/log" - "github.com/newrelic/nri-postgresql/src/args" ) func SetMetric(metricSet *metric.Set, name string, value interface{}, sourceType string) { @@ -33,8 +34,9 @@ func SetMetric(metricSet *metric.Set, name string, value interface{}, sourceType } } -func IngestMetric(metricList []interface{}, eventName string, pgIntegration *integration.Integration, args args.ArgumentList) { - instanceEntity, err := CreateEntity(pgIntegration, args) +// IngestMetric is a util by which we publish data in batches .Reason for this is to avoid publishing large data in one go and its a limitation for NewRelic. +func IngestMetric(metricList []interface{}, eventName string, pgIntegration *integration.Integration, gv *globalvariables.GlobalVariables) { + instanceEntity, err := CreateEntity(pgIntegration, gv) if err != nil { log.Error("Error creating entity: %v", err) return @@ -58,22 +60,22 @@ func IngestMetric(metricList []interface{}, eventName string, pgIntegration *int if metricCount == PublishThreshold || metricCount == lenOfMetricList { metricCount = 0 - if err := PublishMetrics(pgIntegration, &instanceEntity, args); err != nil { + if err := PublishMetrics(pgIntegration, &instanceEntity, gv); err != nil { log.Error("Error publishing metrics: %v", err) return } } } if metricCount > 0 { - if err := PublishMetrics(pgIntegration, &instanceEntity, args); err != nil { + if err := PublishMetrics(pgIntegration, &instanceEntity, gv); err != nil { log.Error("Error publishing metrics: %v", err) return } } } -func CreateEntity(pgIntegration *integration.Integration, args args.ArgumentList) (*integration.Entity, error) { - return pgIntegration.Entity(fmt.Sprintf("%s:%s", args.Hostname, args.Port), "pg-instance") +func CreateEntity(pgIntegration *integration.Integration, gv *globalvariables.GlobalVariables) (*integration.Entity, error) { + return pgIntegration.Entity(fmt.Sprintf("%s:%s", gv.Hostname, gv.Port), "pg-instance") } func ProcessModel(model interface{}, metricSet *metric.Set) error { @@ -108,11 +110,11 @@ func ProcessModel(model interface{}, metricSet *metric.Set) error { return nil } -func PublishMetrics(pgIntegration *integration.Integration, instanceEntity **integration.Entity, args args.ArgumentList) error { +func PublishMetrics(pgIntegration *integration.Integration, instanceEntity **integration.Entity, gv *globalvariables.GlobalVariables) error { if err := pgIntegration.Publish(); err != nil { return err } var err error - *instanceEntity, err = pgIntegration.Entity(fmt.Sprintf("%s:%s", args.Hostname, args.Port), "pg-instance") + *instanceEntity, err = CreateEntity(pgIntegration, gv) return err } diff --git a/src/query-performance-monitoring/common-utils/ingestion-helper_test.go b/src/query-performance-monitoring/common-utils/ingestion_helper_test.go similarity index 81% rename from src/query-performance-monitoring/common-utils/ingestion-helper_test.go rename to src/query-performance-monitoring/common-utils/ingestion_helper_test.go index c10bd72f..e30078f0 100644 --- a/src/query-performance-monitoring/common-utils/ingestion-helper_test.go +++ b/src/query-performance-monitoring/common-utils/ingestion_helper_test.go @@ -3,6 +3,8 @@ package commonutils_test import ( "testing" + global_variables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" + "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/nri-postgresql/src/args" commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" @@ -12,15 +14,11 @@ import ( func TestSetMetric(t *testing.T) { pgIntegration, _ := integration.New("test", "1.0.0") entity, _ := pgIntegration.Entity("test-entity", "test-type") - metricSet := entity.NewMetricSet("test-event") - commonutils.SetMetric(metricSet, "testGauge", 123.0, "gauge") assert.Equal(t, 123.0, metricSet.Metrics["testGauge"]) - commonutils.SetMetric(metricSet, "testAttribute", "value", "attribute") assert.Equal(t, "value", metricSet.Metrics["testAttribute"]) - commonutils.SetMetric(metricSet, "testDefault", 456.0, "unknown") assert.Equal(t, 456.0, metricSet.Metrics["testDefault"]) } @@ -31,13 +29,13 @@ func TestIngestMetric(t *testing.T) { Hostname: "localhost", Port: "5432", } + gv := global_variables.SetGlobalVariables(args, uint64(14), "testdb") metricList := []interface{}{ struct { TestField int `metric_name:"testField" source_type:"gauge"` }{TestField: 123}, } - - commonutils.IngestMetric(metricList, "testEvent", pgIntegration, args) + commonutils.IngestMetric(metricList, "testEvent", pgIntegration, gv) assert.NotEmpty(t, pgIntegration.Entities) } @@ -47,8 +45,9 @@ func TestCreateEntity(t *testing.T) { Hostname: "localhost", Port: "5432", } + gv := global_variables.SetGlobalVariables(args, uint64(14), "testdb") - entity, err := commonutils.CreateEntity(pgIntegration, args) + entity, err := commonutils.CreateEntity(pgIntegration, gv) assert.NoError(t, err) assert.NotNil(t, entity) assert.Equal(t, "localhost:5432", entity.Metadata.Name) @@ -75,9 +74,10 @@ func TestPublishMetrics(t *testing.T) { Hostname: "localhost", Port: "5432", } - entity, _ := commonutils.CreateEntity(pgIntegration, args) + gv := global_variables.SetGlobalVariables(args, uint64(14), "testdb") + entity, _ := commonutils.CreateEntity(pgIntegration, gv) - err := commonutils.PublishMetrics(pgIntegration, &entity, args) + err := commonutils.PublishMetrics(pgIntegration, &entity, gv) assert.NoError(t, err) assert.NotNil(t, entity) } diff --git a/src/query-performance-monitoring/common-utils/query-fetch-helpers.go b/src/query-performance-monitoring/common-utils/query_fetch_helpers.go similarity index 96% rename from src/query-performance-monitoring/common-utils/query-fetch-helpers.go rename to src/query-performance-monitoring/common-utils/query_fetch_helpers.go index 972be3ae..b2409eb1 100644 --- a/src/query-performance-monitoring/common-utils/query-fetch-helpers.go +++ b/src/query-performance-monitoring/common-utils/query_fetch_helpers.go @@ -30,7 +30,7 @@ func FetchVersionSpecificIndividualQueries(version uint64) (string, error) { switch { case version == PostgresVersion12: return queries.IndividualQuerySearchV12, nil - case version >= PostgresVersion12: + case version > PostgresVersion12: return queries.IndividualQuerySearchV13AndAbove, nil default: return "", ErrUnsupportedVersion diff --git a/src/query-performance-monitoring/common-utils/query-fetch-helpers_test.go b/src/query-performance-monitoring/common-utils/query_fetch_helpers_test.go similarity index 100% rename from src/query-performance-monitoring/common-utils/query-fetch-helpers_test.go rename to src/query-performance-monitoring/common-utils/query_fetch_helpers_test.go diff --git a/src/query-performance-monitoring/global-variables/global_variables.go b/src/query-performance-monitoring/global-variables/global_variables.go new file mode 100644 index 00000000..6cd3bd33 --- /dev/null +++ b/src/query-performance-monitoring/global-variables/global_variables.go @@ -0,0 +1,27 @@ +package globalvariables + +import ( + "github.com/newrelic/nri-postgresql/src/args" +) + +type GlobalVariables struct { + QueryResponseTimeThreshold int + QueryCountThreshold int + Version uint64 + DatabaseString string + Hostname string + Port string + Arguments args.ArgumentList +} + +func SetGlobalVariables(args args.ArgumentList, version uint64, databaseString string) *GlobalVariables { + return &GlobalVariables{ + QueryResponseTimeThreshold: args.QueryResponseTimeThreshold, + QueryCountThreshold: args.QueryCountThreshold, + Version: version, + DatabaseString: databaseString, + Hostname: args.Hostname, + Port: args.Port, + Arguments: args, + } +} diff --git a/src/query-performance-monitoring/performance-metrics/blocking_sessions.go b/src/query-performance-monitoring/performance-metrics/blocking_sessions.go index ebe0c398..97c7f7a9 100644 --- a/src/query-performance-monitoring/performance-metrics/blocking_sessions.go +++ b/src/query-performance-monitoring/performance-metrics/blocking_sessions.go @@ -3,18 +3,19 @@ package performancemetrics import ( "fmt" + globalvariables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" + "github.com/newrelic/infra-integrations-sdk/v3/integration" commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations" "github.com/newrelic/infra-integrations-sdk/v3/log" - "github.com/newrelic/nri-postgresql/src/args" performancedbconnection "github.com/newrelic/nri-postgresql/src/connection" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels" ) -func PopulateBlockingMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, args args.ArgumentList, databaseName string, version uint64) error { - isEligible, enableCheckError := validations.CheckBlockingSessionMetricsFetchEligibility(conn, version) +func PopulateBlockingMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, gv *globalvariables.GlobalVariables) error { + isEligible, enableCheckError := validations.CheckBlockingSessionMetricsFetchEligibility(conn, gv.Version) if enableCheckError != nil { log.Debug("Error executing query: %v in PopulateBlockingMetrics", enableCheckError) return commonutils.ErrUnExpectedError @@ -23,7 +24,7 @@ func PopulateBlockingMetrics(conn *performancedbconnection.PGSQLConnection, pgIn log.Debug("Extension 'pg_stat_statements' is not enabled or unsupported version.") return commonutils.ErrNotEligible } - blockingQueriesMetricsList, blockQueryFetchErr := GetBlockingMetrics(conn, args, databaseName, version) + blockingQueriesMetricsList, blockQueryFetchErr := GetBlockingMetrics(conn, gv) if blockQueryFetchErr != nil { log.Error("Error fetching Blocking queries: %v", blockQueryFetchErr) return commonutils.ErrUnExpectedError @@ -32,18 +33,18 @@ func PopulateBlockingMetrics(conn *performancedbconnection.PGSQLConnection, pgIn log.Debug("No Blocking queries found.") return nil } - commonutils.IngestMetric(blockingQueriesMetricsList, "PostgresBlockingSessions", pgIntegration, args) + commonutils.IngestMetric(blockingQueriesMetricsList, "PostgresBlockingSessions", pgIntegration, gv) return nil } -func GetBlockingMetrics(conn *performancedbconnection.PGSQLConnection, args args.ArgumentList, databaseName string, version uint64) ([]interface{}, error) { +func GetBlockingMetrics(conn *performancedbconnection.PGSQLConnection, gv *globalvariables.GlobalVariables) ([]interface{}, error) { var blockingQueriesMetricsList []interface{} - versionSpecificBlockingQuery, err := commonutils.FetchVersionSpecificBlockingQueries(version) + versionSpecificBlockingQuery, err := commonutils.FetchVersionSpecificBlockingQueries(gv.Version) if err != nil { log.Error("Unsupported postgres version: %v", err) return nil, err } - var query = fmt.Sprintf(versionSpecificBlockingQuery, databaseName, min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) + var query = fmt.Sprintf(versionSpecificBlockingQuery, gv.DatabaseString, min(gv.QueryCountThreshold, commonutils.MaxQueryThreshold)) rows, err := conn.Queryx(query) if err != nil { log.Error("Failed to execute query: %v", err) @@ -54,7 +55,7 @@ func GetBlockingMetrics(conn *performancedbconnection.PGSQLConnection, args args if scanError := rows.StructScan(&blockingQueryMetric); scanError != nil { return nil, scanError } - if version == commonutils.PostgresVersion13 || version == commonutils.PostgresVersion12 { + if gv.Version == commonutils.PostgresVersion13 || gv.Version == commonutils.PostgresVersion12 { *blockingQueryMetric.BlockedQuery = commonutils.AnonymizeQueryText(*blockingQueryMetric.BlockedQuery) *blockingQueryMetric.BlockingQuery = commonutils.AnonymizeQueryText(*blockingQueryMetric.BlockingQuery) } diff --git a/src/query-performance-monitoring/performance-metrics/blocking_sessions_test.go b/src/query-performance-monitoring/performance-metrics/blocking_sessions_test.go index 62847f9d..005c5611 100644 --- a/src/query-performance-monitoring/performance-metrics/blocking_sessions_test.go +++ b/src/query-performance-monitoring/performance-metrics/blocking_sessions_test.go @@ -5,15 +5,15 @@ import ( "regexp" "testing" - commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" - "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" - "gopkg.in/DATA-DOG/go-sqlmock.v1" - "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/nri-postgresql/src/args" "github.com/newrelic/nri-postgresql/src/connection" + commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" + global_variables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" performancemetrics "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/performance-metrics" + "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" "github.com/stretchr/testify/assert" + "gopkg.in/DATA-DOG/go-sqlmock.v1" ) func TestPopulateBlockingMetrics(t *testing.T) { @@ -22,6 +22,7 @@ func TestPopulateBlockingMetrics(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(14) + gv := global_variables.SetGlobalVariables(args, version, databaseName) validationQueryStatStatements := fmt.Sprintf("SELECT count(*) FROM pg_extension WHERE extname = '%s'", "pg_stat_statements") mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1)) expectedQuery := queries.BlockingQueriesForV14AndAbove @@ -34,7 +35,7 @@ func TestPopulateBlockingMetrics(t *testing.T) { 456, "SELECT 2", 4566, "2023-01-01 00:00:00", )) - err := performancemetrics.PopulateBlockingMetrics(conn, pgIntegration, args, databaseName, version) + err := performancemetrics.PopulateBlockingMetrics(conn, pgIntegration, gv) assert.NoError(t, err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -45,6 +46,7 @@ func TestPopulateBlockingMetricsSupportedVersionExtensionNotRequired(t *testing. args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(12) + gv := global_variables.SetGlobalVariables(args, version, databaseName) expectedQuery := queries.BlockingQueriesForV12AndV13 query := fmt.Sprintf(expectedQuery, databaseName, min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{ @@ -54,7 +56,7 @@ func TestPopulateBlockingMetricsSupportedVersionExtensionNotRequired(t *testing. "newrelic_value", 123, "SELECT 1", 1233444, "2023-01-01 00:00:00", "testdb", 456, "SELECT 2", 4566, "2023-01-01 00:00:00", )) - err := performancemetrics.PopulateBlockingMetrics(conn, pgIntegration, args, databaseName, version) + err := performancemetrics.PopulateBlockingMetrics(conn, pgIntegration, gv) assert.NoError(t, err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -65,7 +67,8 @@ func TestPopulateBlockingMetricsUnSupportedVersion(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(11) - err := performancemetrics.PopulateBlockingMetrics(conn, pgIntegration, args, databaseName, version) + gv := global_variables.SetGlobalVariables(args, version, databaseName) + err := performancemetrics.PopulateBlockingMetrics(conn, pgIntegration, gv) assert.EqualError(t, err, commonutils.ErrNotEligible.Error()) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -76,9 +79,10 @@ func TestPopulateBlockingMetricsExtensionsNotEnabled(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(14) + gv := global_variables.SetGlobalVariables(args, version, databaseName) validationQueryStatStatements := fmt.Sprintf("SELECT count(*) FROM pg_extension WHERE extname = '%s'", "pg_stat_statements") mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0)) - err := performancemetrics.PopulateBlockingMetrics(conn, pgIntegration, args, databaseName, version) + err := performancemetrics.PopulateBlockingMetrics(conn, pgIntegration, gv) assert.EqualError(t, err, commonutils.ErrNotEligible.Error()) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -88,6 +92,8 @@ func TestGetBlockingMetrics(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(13) + gv := global_variables.SetGlobalVariables(args, version, databaseName) + expectedQuery := queries.BlockingQueriesForV12AndV13 query := fmt.Sprintf(expectedQuery, databaseName, min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{ @@ -97,8 +103,7 @@ func TestGetBlockingMetrics(t *testing.T) { "newrelic_value", 123, "SELECT 1", 1233444, "2023-01-01 00:00:00", "testdb", 456, "SELECT 2", 4566, "2023-01-01 00:00:00", )) - blockingQueriesMetricsList, err := performancemetrics.GetBlockingMetrics(conn, args, databaseName, version) - + blockingQueriesMetricsList, err := performancemetrics.GetBlockingMetrics(conn, gv) assert.NoError(t, err) assert.Len(t, blockingQueriesMetricsList, 1) assert.NoError(t, mock.ExpectationsWereMet()) diff --git a/src/query-performance-monitoring/performance-metrics/execution_plan_metrics.go b/src/query-performance-monitoring/performance-metrics/execution_plan_metrics.go index 5423bd61..57309eab 100644 --- a/src/query-performance-monitoring/performance-metrics/execution_plan_metrics.go +++ b/src/query-performance-monitoring/performance-metrics/execution_plan_metrics.go @@ -6,26 +6,26 @@ import ( "github.com/go-viper/mapstructure/v2" "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/infra-integrations-sdk/v3/log" - "github.com/newrelic/nri-postgresql/src/args" performancedbconnection "github.com/newrelic/nri-postgresql/src/connection" commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels" + globalvariables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" ) -func PopulateExecutionPlanMetrics(results []datamodels.IndividualQueryMetrics, pgIntegration *integration.Integration, args args.ArgumentList) { +func PopulateExecutionPlanMetrics(results []datamodels.IndividualQueryMetrics, pgIntegration *integration.Integration, gv *globalvariables.GlobalVariables) { if len(results) == 0 { log.Debug("No individual queries found.") return } - executionDetailsList := GetExecutionPlanMetrics(results, args) - commonutils.IngestMetric(executionDetailsList, "PostgresExecutionPlanMetrics", pgIntegration, args) + executionDetailsList := GetExecutionPlanMetrics(results, gv) + commonutils.IngestMetric(executionDetailsList, "PostgresExecutionPlanMetrics", pgIntegration, gv) } -func GetExecutionPlanMetrics(results []datamodels.IndividualQueryMetrics, args args.ArgumentList) []interface{} { +func GetExecutionPlanMetrics(results []datamodels.IndividualQueryMetrics, gv *globalvariables.GlobalVariables) []interface{} { var executionPlanMetricsList []interface{} var groupIndividualQueriesByDatabase = GroupQueriesByDatabase(results) for dbName, individualQueriesList := range groupIndividualQueriesByDatabase { - connectionInfo := performancedbconnection.DefaultConnectionInfo(&args) + connectionInfo := performancedbconnection.DefaultConnectionInfo(&gv.Arguments) dbConn, err := connectionInfo.NewConnection(dbName) if err != nil { log.Error("Error opening database connection: %v", err) diff --git a/src/query-performance-monitoring/performance-metrics/execution_plan_metrics_test.go b/src/query-performance-monitoring/performance-metrics/execution_plan_metrics_test.go index 3d18fbe3..525494e2 100644 --- a/src/query-performance-monitoring/performance-metrics/execution_plan_metrics_test.go +++ b/src/query-performance-monitoring/performance-metrics/execution_plan_metrics_test.go @@ -3,6 +3,8 @@ package performancemetrics_test import ( "testing" + global_variables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" + "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/nri-postgresql/src/args" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels" @@ -14,7 +16,9 @@ func TestPopulateExecutionPlanMetrics(t *testing.T) { pgIntegration, _ := integration.New("test", "1.0.0") args := args.ArgumentList{} results := []datamodels.IndividualQueryMetrics{} - performancemetrics.PopulateExecutionPlanMetrics(results, pgIntegration, args) + gv := global_variables.SetGlobalVariables(args, uint64(13), "testdb") + + performancemetrics.PopulateExecutionPlanMetrics(results, pgIntegration, gv) assert.Empty(t, pgIntegration.Entities) } diff --git a/src/query-performance-monitoring/performance-metrics/individual_query_metrics.go b/src/query-performance-monitoring/performance-metrics/individual_query_metrics.go index e87747ce..796b601f 100644 --- a/src/query-performance-monitoring/performance-metrics/individual_query_metrics.go +++ b/src/query-performance-monitoring/performance-metrics/individual_query_metrics.go @@ -5,15 +5,15 @@ import ( "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/infra-integrations-sdk/v3/log" - "github.com/newrelic/nri-postgresql/src/args" performancedbconnection "github.com/newrelic/nri-postgresql/src/connection" commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels" + globalvariables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations" ) -func PopulateIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnection, slowRunningQueries []datamodels.SlowRunningQueryMetrics, pgIntegration *integration.Integration, args args.ArgumentList, databaseNames string, version uint64) []datamodels.IndividualQueryMetrics { - isEligible, err := validations.CheckIndividualQueryMetricsFetchEligibility(conn, version) +func PopulateIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnection, slowRunningQueries []datamodels.SlowRunningQueryMetrics, pgIntegration *integration.Integration, gv *globalvariables.GlobalVariables) []datamodels.IndividualQueryMetrics { + isEligible, err := validations.CheckIndividualQueryMetricsFetchEligibility(conn, gv.Version) if err != nil { log.Error("Error executing query: %v", err) return nil @@ -23,21 +23,16 @@ func PopulateIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnectio return nil } log.Debug("Extension 'pg_stat_monitor' enabled.") - individualQueryMetricsInterface, individualQueriesForExecPlan := GetIndividualQueryMetrics(conn, slowRunningQueries, args, databaseNames, version) + individualQueryMetricsInterface, individualQueriesForExecPlan := GetIndividualQueryMetrics(conn, slowRunningQueries, gv) if len(individualQueryMetricsInterface) == 0 { log.Debug("No individual queries found.") return nil } - commonutils.IngestMetric(individualQueryMetricsInterface, "PostgresIndividualQueries", pgIntegration, args) + commonutils.IngestMetric(individualQueryMetricsInterface, "PostgresIndividualQueries", pgIntegration, gv) return individualQueriesForExecPlan } -func ConstructIndividualQuery(slowRunningQueries datamodels.SlowRunningQueryMetrics, args args.ArgumentList, databaseNames string, versionSpecificQuery string) string { - query := fmt.Sprintf(versionSpecificQuery, *slowRunningQueries.QueryID, databaseNames, args.QueryResponseTimeThreshold, min(args.QueryCountThreshold, commonutils.MaxIndividualQueryThreshold)) - return query -} - -func GetIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnection, slowRunningQueries []datamodels.SlowRunningQueryMetrics, args args.ArgumentList, databaseNames string, version uint64) ([]interface{}, []datamodels.IndividualQueryMetrics) { +func GetIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnection, slowRunningQueries []datamodels.SlowRunningQueryMetrics, gv *globalvariables.GlobalVariables) ([]interface{}, []datamodels.IndividualQueryMetrics) { if len(slowRunningQueries) == 0 { log.Debug("No slow running queries found.") return nil, nil @@ -45,7 +40,7 @@ func GetIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnection, sl var individualQueryMetricsForExecPlanList []datamodels.IndividualQueryMetrics var individualQueryMetricsListInterface []interface{} anonymizedQueriesByDB := processForAnonymizeQueryMap(slowRunningQueries) - versionSpecificIndividualQuery, err := commonutils.FetchVersionSpecificIndividualQueries(version) + versionSpecificIndividualQuery, err := commonutils.FetchVersionSpecificIndividualQueries(gv.Version) if err != nil { log.Error("Unsupported postgres version: %v", err) return nil, nil @@ -55,13 +50,13 @@ func GetIndividualQueryMetrics(conn *performancedbconnection.PGSQLConnection, sl if slowRunningMetric.QueryID == nil { continue } - getIndividualQueriesSamples(conn, slowRunningMetric, args, databaseNames, anonymizedQueriesByDB, &individualQueryMetricsForExecPlanList, &individualQueryMetricsListInterface, versionSpecificIndividualQuery) + getIndividualQueriesSamples(conn, slowRunningMetric, gv, anonymizedQueriesByDB, &individualQueryMetricsForExecPlanList, &individualQueryMetricsListInterface, versionSpecificIndividualQuery) } return individualQueryMetricsListInterface, individualQueryMetricsForExecPlanList } -func getIndividualQueriesSamples(conn *performancedbconnection.PGSQLConnection, slowRunningQueries datamodels.SlowRunningQueryMetrics, args args.ArgumentList, databaseNames string, anonymizedQueriesByDB map[string]map[string]string, individualQueryMetricsForExecPlanList *[]datamodels.IndividualQueryMetrics, individualQueryMetricsListInterface *[]interface{}, versionSpecificIndividualQuery string) { - query := ConstructIndividualQuery(slowRunningQueries, args, databaseNames, versionSpecificIndividualQuery) +func getIndividualQueriesSamples(conn *performancedbconnection.PGSQLConnection, slowRunningQueries datamodels.SlowRunningQueryMetrics, gv *globalvariables.GlobalVariables, anonymizedQueriesByDB map[string]map[string]string, individualQueryMetricsForExecPlanList *[]datamodels.IndividualQueryMetrics, individualQueryMetricsListInterface *[]interface{}, versionSpecificIndividualQuery string) { + query := fmt.Sprintf(versionSpecificIndividualQuery, *slowRunningQueries.QueryID, gv.DatabaseString, gv.QueryResponseTimeThreshold, min(gv.QueryCountThreshold, commonutils.MaxIndividualQueryThreshold)) if query == "" { log.Debug("Error constructing individual query") return diff --git a/src/query-performance-monitoring/performance-metrics/individual_query_metrics_test.go b/src/query-performance-monitoring/performance-metrics/individual_query_metrics_test.go index d31bef21..1835ead7 100644 --- a/src/query-performance-monitoring/performance-metrics/individual_query_metrics_test.go +++ b/src/query-performance-monitoring/performance-metrics/individual_query_metrics_test.go @@ -5,16 +5,16 @@ import ( "regexp" "testing" - "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" - "gopkg.in/DATA-DOG/go-sqlmock.v1" - "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/nri-postgresql/src/args" "github.com/newrelic/nri-postgresql/src/connection" commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels" + global_variables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" performancemetrics "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/performance-metrics" + "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" "github.com/stretchr/testify/assert" + "gopkg.in/DATA-DOG/go-sqlmock.v1" ) func TestPopulateIndividualQueryMetrics(t *testing.T) { @@ -23,11 +23,10 @@ func TestPopulateIndividualQueryMetrics(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(13) - // Mock the extension check + gv := global_variables.SetGlobalVariables(args, version, databaseName) mock.ExpectQuery(regexp.QuoteMeta("SELECT count(*) FROM pg_extension WHERE extname = 'pg_stat_monitor'")).WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1)) mockQueryID := "-123" mockQueryText := "SELECT 1" - // Mock the individual query query := fmt.Sprintf(queries.IndividualQuerySearchV13AndAbove, mockQueryID, databaseName, args.QueryResponseTimeThreshold, min(args.QueryCountThreshold, commonutils.MaxIndividualQueryThreshold)) mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{ "newrelic", "query", "queryid", "datname", "planid", "avg_cpu_time_ms", "avg_exec_time_ms", @@ -43,27 +42,12 @@ func TestPopulateIndividualQueryMetrics(t *testing.T) { }, } - individualQueryMetrics := performancemetrics.PopulateIndividualQueryMetrics(conn, slowRunningQueries, pgIntegration, args, databaseName, version) + individualQueryMetrics := performancemetrics.PopulateIndividualQueryMetrics(conn, slowRunningQueries, pgIntegration, gv) assert.Len(t, individualQueryMetrics, 1) assert.NoError(t, mock.ExpectationsWereMet()) } -func TestConstructIndividualQuery(t *testing.T) { - mockQueryID := "-123" - slowRunningQuery := datamodels.SlowRunningQueryMetrics{ - QueryID: &mockQueryID, - } - args := args.ArgumentList{QueryResponseTimeThreshold: 100} - databaseName := "testdb" - versionSpecificQuery := queries.IndividualQuerySearchV13AndAbove - - actual := performancemetrics.ConstructIndividualQuery(slowRunningQuery, args, databaseName, versionSpecificQuery) - expected := fmt.Sprintf(versionSpecificQuery, *slowRunningQuery.QueryID, databaseName, args.QueryResponseTimeThreshold, min(args.QueryCountThreshold, commonutils.MaxIndividualQueryThreshold)) - - assert.Equal(t, actual, expected) -} - func TestGetIndividualQueryMetrics(t *testing.T) { conn, mock := connection.CreateMockSQL(t) args := args.ArgumentList{QueryCountThreshold: 10} @@ -71,6 +55,7 @@ func TestGetIndividualQueryMetrics(t *testing.T) { version := uint64(13) mockQueryID := "-123" mockQueryText := "SELECT 1" + gv := global_variables.SetGlobalVariables(args, version, databaseName) // Mock the individual query query := fmt.Sprintf(queries.IndividualQuerySearchV13AndAbove, mockQueryID, databaseName, args.QueryResponseTimeThreshold, min(args.QueryCountThreshold, commonutils.MaxIndividualQueryThreshold)) @@ -88,7 +73,7 @@ func TestGetIndividualQueryMetrics(t *testing.T) { }, } - individualQueryMetricsInterface, individualQueryMetrics := performancemetrics.GetIndividualQueryMetrics(conn, slowRunningQueries, args, databaseName, version) + individualQueryMetricsInterface, individualQueryMetrics := performancemetrics.GetIndividualQueryMetrics(conn, slowRunningQueries, gv) assert.Len(t, individualQueryMetricsInterface, 1) assert.Len(t, individualQueryMetrics, 1) diff --git a/src/query-performance-monitoring/performance-metrics/slow_query_metrics.go b/src/query-performance-monitoring/performance-metrics/slow_query_metrics.go index 10c6af56..92d93151 100644 --- a/src/query-performance-monitoring/performance-metrics/slow_query_metrics.go +++ b/src/query-performance-monitoring/performance-metrics/slow_query_metrics.go @@ -5,22 +5,22 @@ import ( "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/infra-integrations-sdk/v3/log" - "github.com/newrelic/nri-postgresql/src/args" performancedbconnection "github.com/newrelic/nri-postgresql/src/connection" commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels" + globalvariables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations" ) -func GetSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, args args.ArgumentList, databaseNames string, version uint64) ([]datamodels.SlowRunningQueryMetrics, []interface{}, error) { +func GetSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, gv *globalvariables.GlobalVariables) ([]datamodels.SlowRunningQueryMetrics, []interface{}, error) { var slowQueryMetricsList []datamodels.SlowRunningQueryMetrics var slowQueryMetricsListInterface []interface{} - versionSpecificSlowQuery, err := commonutils.FetchVersionSpecificSlowQueries(version) + versionSpecificSlowQuery, err := commonutils.FetchVersionSpecificSlowQueries(gv.Version) if err != nil { log.Error("Unsupported postgres version: %v", err) return nil, nil, err } - var query = fmt.Sprintf(versionSpecificSlowQuery, databaseNames, min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) + var query = fmt.Sprintf(versionSpecificSlowQuery, gv.DatabaseString, min(gv.QueryCountThreshold, commonutils.MaxQueryThreshold)) rows, err := conn.Queryx(query) if err != nil { return nil, nil, err @@ -40,8 +40,8 @@ func GetSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, args a return slowQueryMetricsList, slowQueryMetricsListInterface, nil } -func PopulateSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, args args.ArgumentList, databaseNames string, version uint64) []datamodels.SlowRunningQueryMetrics { - isEligible, err := validations.CheckSlowQueryMetricsFetchEligibility(conn, version) +func PopulateSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, gv *globalvariables.GlobalVariables) []datamodels.SlowRunningQueryMetrics { + isEligible, err := validations.CheckSlowQueryMetricsFetchEligibility(conn, gv.Version) if err != nil { log.Error("Error executing query: %v", err) return nil @@ -51,7 +51,7 @@ func PopulateSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, p return nil } - slowQueryMetricsList, slowQueryMetricsListInterface, err := GetSlowRunningMetrics(conn, args, databaseNames, version) + slowQueryMetricsList, slowQueryMetricsListInterface, err := GetSlowRunningMetrics(conn, gv) if err != nil { log.Error("Error fetching slow-running queries: %v", err) return nil @@ -61,6 +61,6 @@ func PopulateSlowRunningMetrics(conn *performancedbconnection.PGSQLConnection, p log.Debug("No slow-running queries found.") return nil } - commonutils.IngestMetric(slowQueryMetricsListInterface, "PostgresSlowQueries", pgIntegration, args) + commonutils.IngestMetric(slowQueryMetricsListInterface, "PostgresSlowQueries", pgIntegration, gv) return slowQueryMetricsList } diff --git a/src/query-performance-monitoring/performance-metrics/slow_query_metrics_test.go b/src/query-performance-monitoring/performance-metrics/slow_query_metrics_test.go index c25c05a1..69bceac4 100644 --- a/src/query-performance-monitoring/performance-metrics/slow_query_metrics_test.go +++ b/src/query-performance-monitoring/performance-metrics/slow_query_metrics_test.go @@ -5,15 +5,15 @@ import ( "regexp" "testing" - commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" - "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" - "gopkg.in/DATA-DOG/go-sqlmock.v1" - "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/nri-postgresql/src/args" "github.com/newrelic/nri-postgresql/src/connection" + commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" + global_variables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" performancemetrics "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/performance-metrics" + "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" "github.com/stretchr/testify/assert" + "gopkg.in/DATA-DOG/go-sqlmock.v1" ) func TestPopulateSlowMetrics(t *testing.T) { @@ -22,6 +22,8 @@ func TestPopulateSlowMetrics(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(13) + gv := global_variables.SetGlobalVariables(args, version, databaseName) + validationQuery := fmt.Sprintf("SELECT count(*) FROM pg_extension WHERE extname = '%s'", "pg_stat_statements") mock.ExpectQuery(regexp.QuoteMeta(validationQuery)).WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1)) expectedQuery := queries.SlowQueriesForV13AndAbove @@ -34,7 +36,7 @@ func TestPopulateSlowMetrics(t *testing.T) { 15.0, 5, 2, "SELECT", "2023-01-01T00:00:00Z", )) - performancemetrics.PopulateSlowRunningMetrics(conn, pgIntegration, args, databaseName, version) + performancemetrics.PopulateSlowRunningMetrics(conn, pgIntegration, gv) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -45,9 +47,11 @@ func TestPopulateSlowMetricsInEligibility(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(13) + gv := global_variables.SetGlobalVariables(args, version, databaseName) + validationQuery := fmt.Sprintf("SELECT count(*) FROM pg_extension WHERE extname = '%s'", "pg_stat_statements") mock.ExpectQuery(regexp.QuoteMeta(validationQuery)).WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0)) - slowqueryList := performancemetrics.PopulateSlowRunningMetrics(conn, pgIntegration, args, databaseName, version) + slowqueryList := performancemetrics.PopulateSlowRunningMetrics(conn, pgIntegration, gv) assert.Len(t, slowqueryList, 0) assert.NoError(t, mock.ExpectationsWereMet()) @@ -57,6 +61,7 @@ func runSlowQueryTest(t *testing.T, query string, version uint64, expectedLength conn, mock := connection.CreateMockSQL(t) args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" + gv := global_variables.SetGlobalVariables(args, version, databaseName) query = fmt.Sprintf(query, "testdb", min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{ @@ -66,8 +71,7 @@ func runSlowQueryTest(t *testing.T, query string, version uint64, expectedLength "newrelic_value", "queryid1", "SELECT 1", "testdb", "public", 10, 15.0, 5, 2, "SELECT", "2023-01-01T00:00:00Z", )) - slowQueryList, _, err := performancemetrics.GetSlowRunningMetrics(conn, args, databaseName, version) - + slowQueryList, _, err := performancemetrics.GetSlowRunningMetrics(conn, gv) assert.NoError(t, err) assert.Len(t, slowQueryList, expectedLength) assert.NoError(t, mock.ExpectationsWereMet()) @@ -86,13 +90,14 @@ func TestGetSlowRunningEmptyMetrics(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(13) + gv := global_variables.SetGlobalVariables(args, version, databaseName) expectedQuery := queries.SlowQueriesForV13AndAbove query := fmt.Sprintf(expectedQuery, "testdb", min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{ "newrelic", "query_id", "query_text", "database_name", "schema_name", "execution_count", "avg_elapsed_time_ms", "avg_disk_reads", "avg_disk_writes", "statement_type", "collection_timestamp", })) - slowQueryList, _, err := performancemetrics.GetSlowRunningMetrics(conn, args, databaseName, version) + slowQueryList, _, err := performancemetrics.GetSlowRunningMetrics(conn, gv) assert.NoError(t, err) assert.Len(t, slowQueryList, 0) @@ -104,7 +109,8 @@ func TestGetSlowRunningMetricsUnsupportedVersion(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(11) - slowQueryList, _, err := performancemetrics.GetSlowRunningMetrics(conn, args, databaseName, version) + gv := global_variables.SetGlobalVariables(args, version, databaseName) + slowQueryList, _, err := performancemetrics.GetSlowRunningMetrics(conn, gv) assert.EqualError(t, err, commonutils.ErrUnsupportedVersion.Error()) assert.Len(t, slowQueryList, 0) assert.NoError(t, mock.ExpectationsWereMet()) diff --git a/src/query-performance-monitoring/performance-metrics/wait_event_metrics.go b/src/query-performance-monitoring/performance-metrics/wait_event_metrics.go index 84bbb15a..4db4f087 100644 --- a/src/query-performance-monitoring/performance-metrics/wait_event_metrics.go +++ b/src/query-performance-monitoring/performance-metrics/wait_event_metrics.go @@ -5,16 +5,16 @@ import ( "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/infra-integrations-sdk/v3/log" - "github.com/newrelic/nri-postgresql/src/args" performancedbconnection "github.com/newrelic/nri-postgresql/src/connection" commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/datamodels" + globalvariables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/validations" ) -func PopulateWaitEventMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, args args.ArgumentList, databaseNames string, version uint64) error { - isEligible, err := validations.CheckWaitEventMetricsFetchEligibility(conn, version) +func PopulateWaitEventMetrics(conn *performancedbconnection.PGSQLConnection, pgIntegration *integration.Integration, gv *globalvariables.GlobalVariables) error { + isEligible, err := validations.CheckWaitEventMetricsFetchEligibility(conn, gv.Version) if err != nil { log.Error("Error executing query: %v", err) return commonutils.ErrUnExpectedError @@ -23,7 +23,7 @@ func PopulateWaitEventMetrics(conn *performancedbconnection.PGSQLConnection, pgI log.Debug("Extension 'pg_wait_sampling' or 'pg_stat_statement' is not enabled or unsupported version.") return commonutils.ErrNotEligible } - waitEventMetricsList, err := GetWaitEventMetrics(conn, args, databaseNames) + waitEventMetricsList, err := GetWaitEventMetrics(conn, gv) if err != nil { log.Error("Error fetching wait event queries: %v", err) return commonutils.ErrUnExpectedError @@ -33,13 +33,13 @@ func PopulateWaitEventMetrics(conn *performancedbconnection.PGSQLConnection, pgI log.Debug("No wait event queries found.") return nil } - commonutils.IngestMetric(waitEventMetricsList, "PostgresWaitEvents", pgIntegration, args) + commonutils.IngestMetric(waitEventMetricsList, "PostgresWaitEvents", pgIntegration, gv) return nil } -func GetWaitEventMetrics(conn *performancedbconnection.PGSQLConnection, args args.ArgumentList, databaseNames string) ([]interface{}, error) { +func GetWaitEventMetrics(conn *performancedbconnection.PGSQLConnection, gv *globalvariables.GlobalVariables) ([]interface{}, error) { var waitEventMetricsList []interface{} - var query = fmt.Sprintf(queries.WaitEvents, databaseNames, min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) + var query = fmt.Sprintf(queries.WaitEvents, gv.DatabaseString, min(gv.QueryCountThreshold, commonutils.MaxQueryThreshold)) rows, err := conn.Queryx(query) if err != nil { return nil, err diff --git a/src/query-performance-monitoring/performance-metrics/wait_event_metrics_test.go b/src/query-performance-monitoring/performance-metrics/wait_event_metrics_test.go index eeea9a36..251053e2 100644 --- a/src/query-performance-monitoring/performance-metrics/wait_event_metrics_test.go +++ b/src/query-performance-monitoring/performance-metrics/wait_event_metrics_test.go @@ -5,15 +5,15 @@ import ( "regexp" "testing" - commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" - "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" - "gopkg.in/DATA-DOG/go-sqlmock.v1" - "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/nri-postgresql/src/args" "github.com/newrelic/nri-postgresql/src/connection" + commonutils "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/common-utils" + global_variables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" performancemetrics "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/performance-metrics" + "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/queries" "github.com/stretchr/testify/assert" + "gopkg.in/DATA-DOG/go-sqlmock.v1" ) func TestPopulateWaitEventMetrics(t *testing.T) { @@ -22,6 +22,8 @@ func TestPopulateWaitEventMetrics(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(13) + gv := global_variables.SetGlobalVariables(args, version, databaseName) + validationQuery := fmt.Sprintf("SELECT count(*) FROM pg_extension WHERE extname = '%s'", "pg_wait_sampling") mock.ExpectQuery(regexp.QuoteMeta(validationQuery)).WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1)) validationQueryStatStatements := fmt.Sprintf("SELECT count(*) FROM pg_extension WHERE extname = '%s'", "pg_stat_statements") @@ -33,7 +35,7 @@ func TestPopulateWaitEventMetrics(t *testing.T) { "Locks:Lock", "Locks", 1000.0, "2023-01-01T00:00:00Z", "queryid1", "SELECT 1", "testdb", )) - err := performancemetrics.PopulateWaitEventMetrics(conn, pgIntegration, args, databaseName, version) + err := performancemetrics.PopulateWaitEventMetrics(conn, pgIntegration, gv) assert.NoError(t, err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -44,7 +46,9 @@ func TestPopulateWaitEventMetricsInEligibility(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(11) - err := performancemetrics.PopulateWaitEventMetrics(conn, pgIntegration, args, databaseName, version) + gv := global_variables.SetGlobalVariables(args, version, databaseName) + + err := performancemetrics.PopulateWaitEventMetrics(conn, pgIntegration, gv) assert.EqualError(t, err, commonutils.ErrNotEligible.Error()) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -55,12 +59,14 @@ func TestPopulateWaitEventMetricsExtensionsNotEnable(t *testing.T) { args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" version := uint64(13) + gv := global_variables.SetGlobalVariables(args, version, databaseName) + validationQuery := fmt.Sprintf("SELECT count(*) FROM pg_extension WHERE extname = '%s'", "pg_wait_sampling") mock.ExpectQuery(regexp.QuoteMeta(validationQuery)).WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0)) validationQueryStatStatements := fmt.Sprintf("SELECT count(*) FROM pg_extension WHERE extname = '%s'", "pg_stat_statements") mock.ExpectQuery(regexp.QuoteMeta(validationQueryStatStatements)).WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0)) - err := performancemetrics.PopulateWaitEventMetrics(conn, pgIntegration, args, databaseName, version) + err := performancemetrics.PopulateWaitEventMetrics(conn, pgIntegration, gv) assert.EqualError(t, err, commonutils.ErrNotEligible.Error()) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -69,13 +75,15 @@ func TestGetWaitEventMetrics(t *testing.T) { conn, mock := connection.CreateMockSQL(t) args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" + gv := global_variables.SetGlobalVariables(args, uint64(14), databaseName) + var query = fmt.Sprintf(queries.WaitEvents, databaseName, min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{ "wait_event_name", "wait_category", "total_wait_time_ms", "collection_timestamp", "query_id", "query_text", "database_name", }).AddRow( "Locks:Lock", "Locks", 1000.0, "2023-01-01T00:00:00Z", "queryid1", "SELECT 1", "testdb", )) - waitEventsList, err := performancemetrics.GetWaitEventMetrics(conn, args, databaseName) + waitEventsList, err := performancemetrics.GetWaitEventMetrics(conn, gv) assert.NoError(t, err) assert.Len(t, waitEventsList, 1) @@ -86,11 +94,13 @@ func TestGetWaitEventEmptyMetrics(t *testing.T) { conn, mock := connection.CreateMockSQL(t) args := args.ArgumentList{QueryCountThreshold: 10} databaseName := "testdb" + gv := global_variables.SetGlobalVariables(args, uint64(14), databaseName) + var query = fmt.Sprintf(queries.WaitEvents, databaseName, min(args.QueryCountThreshold, commonutils.MaxQueryThreshold)) mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(sqlmock.NewRows([]string{ "wait_event_name", "wait_category", "total_wait_time_ms", "collection_timestamp", "query_id", "query_text", "database_name", })) - waitEventsList, err := performancemetrics.GetWaitEventMetrics(conn, args, databaseName) + waitEventsList, err := performancemetrics.GetWaitEventMetrics(conn, gv) assert.NoError(t, err) assert.Len(t, waitEventsList, 0) assert.NoError(t, mock.ExpectationsWereMet()) diff --git a/src/query-performance-monitoring/queries/queries.go b/src/query-performance-monitoring/queries/queries.go index c427fcc4..22b22e26 100644 --- a/src/query-performance-monitoring/queries/queries.go +++ b/src/query-performance-monitoring/queries/queries.go @@ -2,208 +2,216 @@ package queries const ( - SlowQueriesForV13AndAbove = `SELECT 'newrelic' as newrelic, - pss.queryid AS query_id, - LEFT(pss.query, 4095) AS query_text, - pd.datname AS database_name, - current_schema() AS schema_name, - pss.calls AS execution_count, - ROUND((pss.total_exec_time / pss.calls)::numeric, 3) AS avg_elapsed_time_ms, - pss.shared_blks_read / pss.calls AS avg_disk_reads, - pss.shared_blks_written / pss.calls AS avg_disk_writes, - CASE - WHEN pss.query ILIKE 'SELECT%%' THEN 'SELECT' - WHEN pss.query ILIKE 'INSERT%%' THEN 'INSERT' - WHEN pss.query ILIKE 'UPDATE%%' THEN 'UPDATE' - WHEN pss.query ILIKE 'DELETE%%' THEN 'DELETE' - ELSE 'OTHER' - END AS statement_type, - to_char(NOW() AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS collection_timestamp - FROM - pg_stat_statements pss - JOIN - pg_database pd ON pss.dbid = pd.oid - WHERE - pd.datname in (%s) - AND pss.query NOT ILIKE 'EXPLAIN (FORMAT JSON) %%' - AND pss.query NOT ILIKE 'SELECT $1 as newrelic%%' - AND pss.query NOT ILIKE 'WITH wait_history AS%%' - AND pss.query NOT ILIKE 'select -- BLOATQUERY%%' - AND pss.query NOT ILIKE 'select -- INDEXQUERY%%' - AND pss.query NOT ILIKE 'SELECT -- TABLEQUERY%%' - AND pss.query NOT ILIKE 'SELECT table_schema%%' - ORDER BY - avg_elapsed_time_ms DESC -- Order by the average elapsed time in descending order - LIMIT %d;` + SlowQueriesForV13AndAbove = `SELECT 'newrelic' as newrelic, -- Common value to filter with like operator in slow query metrics + pss.queryid AS query_id, -- Unique identifier for the query + LEFT(pss.query, 4095) AS query_text, -- Query text truncated to 4095 characters + pd.datname AS database_name, -- Name of the database + current_schema() AS schema_name, -- Name of the current schema + pss.calls AS execution_count, -- Number of times the query was executed + ROUND((pss.total_exec_time / pss.calls)::numeric, 3) AS avg_elapsed_time_ms, -- Average execution time in milliseconds + pss.shared_blks_read / pss.calls AS avg_disk_reads, -- Average number of disk reads per execution + pss.shared_blks_written / pss.calls AS avg_disk_writes, -- Average number of disk writes per execution + CASE + WHEN pss.query ILIKE 'SELECT%%' THEN 'SELECT' -- Query type is SELECT + WHEN pss.query ILIKE 'INSERT%%' THEN 'INSERT' -- Query type is INSERT + WHEN pss.query ILIKE 'UPDATE%%' THEN 'UPDATE' -- Query type is UPDATE + WHEN pss.query ILIKE 'DELETE%%' THEN 'DELETE' -- Query type is DELETE + ELSE 'OTHER' -- Query type is OTHER + END AS statement_type, -- Type of SQL statement + to_char(NOW() AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS collection_timestamp -- Timestamp of data collection + FROM + pg_stat_statements pss + JOIN + pg_database pd ON pss.dbid = pd.oid + WHERE + pd.datname in (%s) -- List of database names + AND pss.query NOT ILIKE 'EXPLAIN (FORMAT JSON) %%' -- Exclude EXPLAIN queries + AND pss.query NOT ILIKE 'SELECT $1 as newrelic%%' -- Exclude specific New Relic queries + AND pss.query NOT ILIKE 'WITH wait_history AS%%' -- Exclude specific WITH queries + AND pss.query NOT ILIKE 'select -- BLOATQUERY%%' -- Exclude BLOATQUERY + AND pss.query NOT ILIKE 'select -- INDEXQUERY%%' -- Exclude INDEXQUERY + AND pss.query NOT ILIKE 'SELECT -- TABLEQUERY%%' -- Exclude TABLEQUERY + AND pss.query NOT ILIKE 'SELECT table_schema%%' -- Exclude table_schema queries + ORDER BY + avg_elapsed_time_ms DESC -- Order by the average elapsed time in descending order + LIMIT %d;` - SlowQueriesForV12 = `SELECT 'newrelic' as newrelic, - pss.queryid AS query_id, - LEFT(pss.query, 4095) AS query_text, - pd.datname AS database_name, - current_schema() AS schema_name, - pss.calls AS execution_count, - ROUND((pss.total_time / pss.calls)::numeric, 3) AS avg_elapsed_time_ms, - pss.shared_blks_read / pss.calls AS avg_disk_reads, - pss.shared_blks_written / pss.calls AS avg_disk_writes, - CASE - WHEN pss.query ILIKE 'SELECT%%' THEN 'SELECT' - WHEN pss.query ILIKE 'INSERT%%' THEN 'INSERT' - WHEN pss.query ILIKE 'UPDATE%%' THEN 'UPDATE' - WHEN pss.query ILIKE 'DELETE%%' THEN 'DELETE' - ELSE 'OTHER' - END AS statement_type, - to_char(NOW() AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS collection_timestamp - FROM - pg_stat_statements pss - JOIN - pg_database pd ON pss.dbid = pd.oid - WHERE - pd.datname in (%s) - AND pss.query NOT ILIKE 'EXPLAIN (FORMAT JSON) %%' - AND pss.query NOT ILIKE 'SELECT $1 as newrelic%%' - AND pss.query NOT ILIKE 'WITH wait_history AS%%' - AND pss.query NOT ILIKE 'select -- BLOATQUERY%%' - AND pss.query NOT ILIKE 'select -- INDEXQUERY%%' - AND pss.query NOT ILIKE 'SELECT -- TABLEQUERY%%' - AND pss.query NOT ILIKE 'SELECT table_schema%%' - AND pss.query NOT ILIKE 'SELECT D.datname%%' - ORDER BY - avg_elapsed_time_ms DESC -- Order by the average elapsed time in descending order - LIMIT - %d;` + // SlowQueriesForV12 retrieves slow queries and their statistics for PostgreSQL version 12 + SlowQueriesForV12 = `SELECT 'newrelic' as newrelic, -- Common value to filter with like operator in slow query metrics + pss.queryid AS query_id, -- Unique identifier for the query + LEFT(pss.query, 4095) AS query_text, -- Query text truncated to 4095 characters + pd.datname AS database_name, -- Name of the database + current_schema() AS schema_name, -- Name of the current schema + pss.calls AS execution_count, -- Number of times the query was executed + ROUND((pss.total_time / pss.calls)::numeric, 3) AS avg_elapsed_time_ms, -- Average execution time in milliseconds + pss.shared_blks_read / pss.calls AS avg_disk_reads, -- Average number of disk reads per execution + pss.shared_blks_written / pss.calls AS avg_disk_writes, -- Average number of disk writes per execution + CASE + WHEN pss.query ILIKE 'SELECT%%' THEN 'SELECT' -- Query type is SELECT + WHEN pss.query ILIKE 'INSERT%%' THEN 'INSERT' -- Query type is INSERT + WHEN pss.query ILIKE 'UPDATE%%' THEN 'UPDATE' -- Query type is UPDATE + WHEN pss.query ILIKE 'DELETE%%' THEN 'DELETE' -- Query type is DELETE + ELSE 'OTHER' -- Query type is OTHER + END AS statement_type, -- Type of SQL statement + to_char(NOW() AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS collection_timestamp -- Timestamp of data collection + FROM + pg_stat_statements pss + JOIN + pg_database pd ON pss.dbid = pd.oid + WHERE + pd.datname in (%s) -- List of database names + AND pss.query NOT ILIKE 'EXPLAIN (FORMAT JSON) %%' -- Exclude EXPLAIN queries + AND pss.query NOT ILIKE 'SELECT $1 as newrelic%%' -- Exclude specific New Relic queries + AND pss.query NOT ILIKE 'WITH wait_history AS%%' -- Exclude specific WITH queries + AND pss.query NOT ILIKE 'select -- BLOATQUERY%%' -- Exclude BLOATQUERY + AND pss.query NOT ILIKE 'select -- INDEXQUERY%%' -- Exclude INDEXQUERY + AND pss.query NOT ILIKE 'SELECT -- TABLEQUERY%%' -- Exclude TABLEQUERY + AND pss.query NOT ILIKE 'SELECT table_schema%%' -- Exclude table_schema queries + AND pss.query NOT ILIKE 'SELECT D.datname%%' -- Exclude specific datname queries + ORDER BY + avg_elapsed_time_ms DESC -- Order by the average elapsed time in descending order + LIMIT + %d; -- Limit the number of results` + // WaitEvents retrieves wait events and their statistics WaitEvents = `WITH wait_history AS ( - SELECT - wh.pid, - wh.event_type, - wh.event, - wh.ts, - pg_database.datname AS database_name, - LEAD(wh.ts) OVER (PARTITION BY wh.pid ORDER BY wh.ts) - wh.ts AS duration, - LEFT(sa.query, 4095) AS query_text, - sa.queryid AS query_id - FROM - pg_wait_sampling_history wh - LEFT JOIN - pg_stat_statements sa ON wh.queryid = sa.queryid - LEFT JOIN - pg_database ON pg_database.oid = sa.dbid - WHERE pg_database.datname in (%s) - ) - SELECT - event_type || ':' || event AS wait_event_name, - CASE - WHEN event_type IN ('LWLock', 'Lock') THEN 'Locks' - WHEN event_type = 'IO' THEN 'Disk IO' - WHEN event_type = 'CPU' THEN 'CPU' - ELSE 'Other' - END AS wait_category, - EXTRACT(EPOCH FROM SUM(duration)) * 1000 AS total_wait_time_ms, -- Convert duration to milliseconds - to_char(NOW() AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS collection_timestamp, - query_id, - query_text, - database_name - FROM wait_history - WHERE query_text NOT LIKE 'EXPLAIN (FORMAT JSON) %%' AND query_id IS NOT NULL AND event_type IS NOT NULL - GROUP BY event_type, event, query_id, query_text, database_name - ORDER BY total_wait_time_ms DESC - LIMIT %d;` + SELECT + wh.pid, -- Process ID + wh.event_type, -- Type of the wait event + wh.event, -- Wait event + wh.ts, -- Timestamp of the wait event + pg_database.datname AS database_name, -- Name of the database + LEAD(wh.ts) OVER (PARTITION BY wh.pid ORDER BY wh.ts) - wh.ts AS duration, -- Duration of the wait event + LEFT(sa.query, 4095) AS query_text, -- Query text truncated to 4095 characters + sa.queryid AS query_id -- Unique identifier for the query + FROM + pg_wait_sampling_history wh + LEFT JOIN + pg_stat_statements sa ON wh.queryid = sa.queryid + LEFT JOIN + pg_database ON pg_database.oid = sa.dbid + WHERE pg_database.datname in (%s) -- List of database names + ) + SELECT + event_type || ':' || event AS wait_event_name, -- Concatenated wait event name + CASE + WHEN event_type IN ('LWLock', 'Lock') THEN 'Locks' -- Wait category is Locks + WHEN event_type = 'IO' THEN 'Disk IO' -- Wait category is Disk IO + WHEN event_type = 'CPU' THEN 'CPU' -- Wait category is CPU + ELSE 'Other' -- Wait category is Other + END AS wait_category, -- Category of the wait event + EXTRACT(EPOCH FROM SUM(duration)) * 1000 AS total_wait_time_ms, -- Convert duration to milliseconds + to_char(NOW() AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS collection_timestamp, -- Timestamp of data collection + query_id, -- Unique identifier for the query + query_text, -- Query text + database_name -- Name of the database + FROM wait_history + WHERE query_text NOT LIKE 'EXPLAIN (FORMAT JSON) %%' AND query_id IS NOT NULL AND event_type IS NOT NULL + GROUP BY event_type, event, query_id, query_text, database_name + ORDER BY total_wait_time_ms DESC -- Order by the total wait time in descending order + LIMIT %d; -- Limit the number of results` + + // BlockingQueriesForV14AndAbove retrieves information about blocking and blocked queries for PostgreSQL version 14 and above + BlockingQueriesForV14AndAbove = `SELECT 'newrelic' as newrelic, -- Common value to filter with like operator in slow query metrics + blocked_activity.pid AS blocked_pid, -- Process ID of the blocked query + LEFT(blocked_statements.query, 4095) AS blocked_query, -- Blocked query text truncated to 4095 characters + blocked_statements.queryid AS blocked_query_id, -- Unique identifier for the blocked query + blocked_activity.query_start AS blocked_query_start, -- Start time of the blocked query + blocked_activity.datname AS database_name, -- Name of the database + blocking_activity.pid AS blocking_pid, -- Process ID of the blocking query + LEFT(blocking_statements.query, 4095) AS blocking_query, -- Blocking query text truncated to 4095 characters + blocking_statements.queryid AS blocking_query_id, -- Unique identifier for the blocking query + blocking_activity.query_start AS blocking_query_start -- Start time of the blocking query + FROM pg_stat_activity AS blocked_activity + JOIN pg_stat_statements AS blocked_statements ON blocked_activity.query_id = blocked_statements.queryid + JOIN pg_locks blocked_locks ON blocked_activity.pid = blocked_locks.pid + JOIN pg_locks blocking_locks ON blocked_locks.locktype = blocking_locks.locktype + AND blocked_locks.database IS NOT DISTINCT FROM blocking_locks.database + AND blocked_locks.relation IS NOT DISTINCT FROM blocking_locks.relation + AND blocked_locks.page IS NOT DISTINCT FROM blocking_locks.page + AND blocked_locks.tuple IS NOT DISTINCT FROM blocking_locks.tuple + AND blocked_locks.transactionid IS NOT DISTINCT FROM blocking_locks.transactionid + AND blocked_locks.classid IS NOT DISTINCT FROM blocking_locks.classid + AND blocked_locks.objid IS NOT DISTINCT FROM blocking_locks.objid + AND blocked_locks.objsubid IS NOT DISTINCT FROM blocking_locks.objsubid + AND blocked_locks.pid <> blocking_locks.pid + JOIN pg_stat_activity AS blocking_activity ON blocking_locks.pid = blocking_activity.pid + JOIN pg_stat_statements AS blocking_statements ON blocking_activity.query_id = blocking_statements.queryid + WHERE NOT blocked_locks.granted + AND blocked_activity.datname IN (%s) -- List of database names + AND blocked_statements.query NOT LIKE 'EXPLAIN (FORMAT JSON) %%' -- Exclude EXPLAIN queries + AND blocking_statements.query NOT LIKE 'EXPLAIN (FORMAT JSON) %%' -- Exclude EXPLAIN queries + ORDER BY blocked_activity.query_start ASC -- Order by the start time of the blocked query in ascending order + LIMIT %d; -- Limit the number of results` - BlockingQueriesForV14AndAbove = `SELECT 'newrelic' as newrelic, - blocked_activity.pid AS blocked_pid, - LEFT(blocked_statements.query,4095) AS blocked_query, - blocked_statements.queryid AS blocked_query_id, - blocked_activity.query_start AS blocked_query_start, - blocked_activity.datname AS database_name, - blocking_activity.pid AS blocking_pid, - LEFT(blocking_statements.query,4095) AS blocking_query, - blocking_statements.queryid AS blocking_query_id, - blocking_activity.query_start AS blocking_query_start - FROM pg_stat_activity AS blocked_activity - JOIN pg_stat_statements as blocked_statements on blocked_activity.query_id = blocked_statements.queryid - JOIN pg_locks blocked_locks ON blocked_activity.pid = blocked_locks.pid - JOIN pg_locks blocking_locks ON blocked_locks.locktype = blocking_locks.locktype - AND blocked_locks.database IS NOT DISTINCT FROM blocking_locks.database - AND blocked_locks.relation IS NOT DISTINCT FROM blocking_locks.relation - AND blocked_locks.page IS NOT DISTINCT FROM blocking_locks.page - AND blocked_locks.tuple IS NOT DISTINCT FROM blocking_locks.tuple - AND blocked_locks.transactionid IS NOT DISTINCT FROM blocking_locks.transactionid - AND blocked_locks.classid IS NOT DISTINCT FROM blocking_locks.classid - AND blocked_locks.objid IS NOT DISTINCT FROM blocking_locks.objid - AND blocked_locks.objsubid IS NOT DISTINCT FROM blocking_locks.objsubid - AND blocked_locks.pid <> blocking_locks.pid - JOIN pg_stat_activity AS blocking_activity ON blocking_locks.pid = blocking_activity.pid - JOIN pg_stat_statements as blocking_statements on blocking_activity.query_id = blocking_statements.queryid - WHERE NOT blocked_locks.granted - AND blocked_activity.datname IN (%s) - AND blocked_statements.query NOT LIKE 'EXPLAIN (FORMAT JSON) %%' - AND blocking_statements.query NOT LIKE 'EXPLAIN (FORMAT JSON) %%' - ORDER BY blocked_activity.query_start ASC - LIMIT %d; -` - BlockingQueriesForV12AndV13 = `SELECT 'newrelic' as newrelic, - blocked_activity.pid AS blocked_pid, - LEFT(blocked_activity.query, 4095) AS blocked_query, - blocked_activity.query_start AS blocked_query_start, - blocked_activity.datname AS database_name, - blocking_activity.pid AS blocking_pid, - LEFT(blocking_activity.query, 4095) AS blocking_query, - blocking_activity.query_start AS blocking_query_start - FROM pg_stat_activity AS blocked_activity - JOIN pg_locks blocked_locks ON blocked_activity.pid = blocked_locks.pid - JOIN pg_locks blocking_locks ON blocked_locks.locktype = blocking_locks.locktype - AND blocked_locks.database IS NOT DISTINCT FROM blocking_locks.database - AND blocked_locks.relation IS NOT DISTINCT FROM blocking_locks.relation - AND blocked_locks.page IS NOT DISTINCT FROM blocking_locks.page - AND blocked_locks.tuple IS NOT DISTINCT FROM blocking_locks.tuple - AND blocked_locks.transactionid IS NOT DISTINCT FROM blocking_locks.transactionid - AND blocked_locks.classid IS NOT DISTINCT FROM blocking_locks.classid - AND blocked_locks.objid IS NOT DISTINCT FROM blocking_locks.objid - AND blocked_locks.objsubid IS NOT DISTINCT FROM blocking_locks.objsubid - AND blocked_locks.pid <> blocking_locks.pid - JOIN pg_stat_activity AS blocking_activity ON blocking_locks.pid = blocking_activity.pid - WHERE NOT blocked_locks.granted - AND blocked_activity.datname IN (%s) - AND blocked_activity.query NOT LIKE 'EXPLAIN (FORMAT JSON) %%' - AND blocking_activity.query NOT LIKE 'EXPLAIN (FORMAT JSON) %%' - ORDER BY blocked_activity.query_start ASC - LIMIT %d;` + // BlockingQueriesForV12AndV13 retrieves information about blocking and blocked queries for PostgreSQL versions 12 and 13 + BlockingQueriesForV12AndV13 = `SELECT 'newrelic' as newrelic, -- Common value to filter with like operator in slow query metrics + blocked_activity.pid AS blocked_pid, -- Process ID of the blocked query + LEFT(blocked_activity.query, 4095) AS blocked_query, -- Blocked query text truncated to 4095 characters + blocked_activity.query_start AS blocked_query_start, -- Start time of the blocked query + blocked_activity.datname AS database_name, -- Name of the database + blocking_activity.pid AS blocking_pid, -- Process ID of the blocking query + LEFT(blocking_activity.query, 4095) AS blocking_query, -- Blocking query text truncated to 4095 characters + blocking_activity.query_start AS blocking_query_start -- Start time of the blocking query + FROM pg_stat_activity AS blocked_activity + JOIN pg_locks blocked_locks ON blocked_activity.pid = blocked_locks.pid + JOIN pg_locks blocking_locks ON blocked_locks.locktype = blocking_locks.locktype + AND blocked_locks.database IS NOT DISTINCT FROM blocking_locks.database + AND blocked_locks.relation IS NOT DISTINCT FROM blocking_locks.relation + AND blocked_locks.page IS NOT DISTINCT FROM blocking_locks.page + AND blocked_locks.tuple IS NOT DISTINCT FROM blocking_locks.tuple + AND blocked_locks.transactionid IS NOT DISTINCT FROM blocking_locks.transactionid + AND blocked_locks.classid IS NOT DISTINCT FROM blocking_locks.classid + AND blocked_locks.objid IS NOT DISTINCT FROM blocking_locks.objid + AND blocked_locks.objsubid IS NOT DISTINCT FROM blocking_locks.objsubid + AND blocked_locks.pid <> blocking_locks.pid + JOIN pg_stat_activity AS blocking_activity ON blocking_locks.pid = blocking_activity.pid + WHERE NOT blocked_locks.granted + AND blocked_activity.datname IN (%s) -- List of database names + AND blocked_activity.query NOT LIKE 'EXPLAIN (FORMAT JSON) %%' -- Exclude EXPLAIN queries + AND blocking_activity.query NOT LIKE 'EXPLAIN (FORMAT JSON) %%' -- Exclude EXPLAIN queries + ORDER BY blocked_activity.query_start ASC -- Order by the start time of the blocked query in ascending order + LIMIT %d; -- Limit the number of results` - IndividualQuerySearchV13AndAbove = `SELECT 'newrelic' as newrelic, - LEFT(query,4095) as query, - queryid, - datname, - planid, - ROUND(((cpu_user_time + cpu_sys_time) / NULLIF(calls, 0))::numeric, 3) AS avg_cpu_time_ms, - total_exec_time / NULLIF(calls, 0) AS avg_exec_time_ms + // IndividualQuerySearchV13AndAbove retrieves individual query statistics for PostgreSQL version 13 and above + IndividualQuerySearchV13AndAbove = `SELECT 'newrelic' as newrelic, -- Common value to filter with like operator in slow query metrics + LEFT(query, 4095) as query, -- Query text truncated to 4095 characters + queryid, -- Unique identifier for the query + datname, -- Name of the database + planid, -- Plan identifier + ROUND(((cpu_user_time + cpu_sys_time) / NULLIF(calls, 0))::numeric, 3) AS avg_cpu_time_ms, -- Average CPU time in milliseconds + total_exec_time / NULLIF(calls, 0) AS avg_exec_time_ms -- Average execution time in milliseconds FROM - pg_stat_monitor - WHERE - queryid = %s - AND datname IN (%s) - AND (total_exec_time / NULLIF(calls, 0)) > %d - AND bucket_start_time >= NOW() - INTERVAL '60 seconds' + pg_stat_monitor + WHERE + queryid = %s -- Query identifier + AND datname IN (%s) -- List of database names + AND (total_exec_time / NULLIF(calls, 0)) > %d -- Minimum average execution time + AND bucket_start_time >= NOW() - INTERVAL '60 seconds' -- Time interval GROUP BY - query, queryid, datname, planid, cpu_user_time, cpu_sys_time, calls, total_exec_time + query, queryid, datname, planid, cpu_user_time, cpu_sys_time, calls, total_exec_time ORDER BY - avg_exec_time_ms DESC LIMIT %d;` + avg_exec_time_ms DESC -- Order by average execution time in descending order + LIMIT %d; -- Limit the number of results` - IndividualQuerySearchV12 = `SELECT 'newrelic' as newrelic, - LEFT(query,4095) as query, - queryid, - datname, - planid, - ROUND(((cpu_user_time + cpu_sys_time) / NULLIF(calls, 0))::numeric, 3) AS avg_cpu_time_ms, - total_time / NULLIF(calls, 0) AS avg_exec_time_ms + // IndividualQuerySearchV12 retrieves individual query statistics for PostgreSQL version 12 + IndividualQuerySearchV12 = `SELECT 'newrelic' as newrelic, -- Common value to filter with like operator in slow query metrics + LEFT(query, 4095) as query, -- Query text truncated to 4095 characters + queryid, -- Unique identifier for the query + datname, -- Name of the database + planid, -- Plan identifier + ROUND(((cpu_user_time + cpu_sys_time) / NULLIF(calls, 0))::numeric, 3) AS avg_cpu_time_ms, -- Average CPU time in milliseconds + total_time / NULLIF(calls, 0) AS avg_exec_time_ms -- Average execution time in milliseconds FROM - pg_stat_monitor - WHERE - queryid = %s - AND datname IN (%s) - AND (total_time / NULLIF(calls, 0)) > %d - AND bucket_start_time >= NOW() - INTERVAL '60 seconds' + pg_stat_monitor + WHERE + queryid = %s -- Query identifier + AND datname IN (%s) -- List of database names + AND (total_time / NULLIF(calls, 0)) > %d -- Minimum average execution time + AND bucket_start_time >= NOW() - INTERVAL '60 seconds' -- Time interval GROUP BY - query, queryid, datname, planid, cpu_user_time, cpu_sys_time, calls, total_time + query, queryid, datname, planid, cpu_user_time, cpu_sys_time, calls, total_time ORDER BY - avg_exec_time_ms DESC LIMIT %d;` + avg_exec_time_ms DESC -- Order by average execution time in descending order + LIMIT %d; -- Limit the number of results` ) diff --git a/src/query-performance-monitoring/query_performance_main.go b/src/query-performance-monitoring/query_performance_main.go index 2783b6cd..64a1f847 100644 --- a/src/query-performance-monitoring/query_performance_main.go +++ b/src/query-performance-monitoring/query_performance_main.go @@ -4,6 +4,8 @@ package queryperformancemonitoring import ( "time" + global_variables "github.com/newrelic/nri-postgresql/src/query-performance-monitoring/global-variables" + "github.com/newrelic/infra-integrations-sdk/v3/integration" "github.com/newrelic/infra-integrations-sdk/v3/log" "github.com/newrelic/nri-postgresql/src/args" @@ -16,7 +18,6 @@ import ( func QueryPerformanceMain(args args.ArgumentList, pgIntegration *integration.Integration, databaseList collection.DatabaseList) { connectionInfo := performancedbconnection.DefaultConnectionInfo(&args) - databaseStringList := commonutils.GetDatabaseListInString(databaseList) if len(databaseList) == 0 { log.Debug("No databases found") return @@ -34,30 +35,30 @@ func QueryPerformanceMain(args args.ArgumentList, pgIntegration *integration.Int log.Debug("Error fetching version: ", versionErr) return } - + gv := global_variables.SetGlobalVariables(args, versionInt, commonutils.GetDatabaseListInString(databaseList)) start := time.Now() log.Debug("Starting PopulateSlowRunningMetrics at ", start) - slowRunningQueries := performancemetrics.PopulateSlowRunningMetrics(newConnection, pgIntegration, args, databaseStringList, versionInt) + slowRunningQueries := performancemetrics.PopulateSlowRunningMetrics(newConnection, pgIntegration, gv) log.Debug("PopulateSlowRunningMetrics completed in ", time.Since(start)) start = time.Now() log.Debug("Starting PopulateWaitEventMetrics at ", start) - _ = performancemetrics.PopulateWaitEventMetrics(newConnection, pgIntegration, args, databaseStringList, versionInt) + _ = performancemetrics.PopulateWaitEventMetrics(newConnection, pgIntegration, gv) log.Debug("PopulateWaitEventMetrics completed in ", time.Since(start)) start = time.Now() log.Debug("Starting PopulateBlockingMetrics at ", start) - _ = performancemetrics.PopulateBlockingMetrics(newConnection, pgIntegration, args, databaseStringList, versionInt) + _ = performancemetrics.PopulateBlockingMetrics(newConnection, pgIntegration, gv) log.Debug("PopulateBlockingMetrics completed in ", time.Since(start)) start = time.Now() log.Debug("Starting PopulateIndividualQueryMetrics at ", start) - individualQueries := performancemetrics.PopulateIndividualQueryMetrics(newConnection, slowRunningQueries, pgIntegration, args, databaseStringList, versionInt) + individualQueries := performancemetrics.PopulateIndividualQueryMetrics(newConnection, slowRunningQueries, pgIntegration, gv) log.Debug("PopulateIndividualQueryMetrics completed in ", time.Since(start)) start = time.Now() log.Debug("Starting PopulateExecutionPlanMetrics at ", start) - performancemetrics.PopulateExecutionPlanMetrics(individualQueries, pgIntegration, args) + performancemetrics.PopulateExecutionPlanMetrics(individualQueries, pgIntegration, gv) log.Debug("PopulateExecutionPlanMetrics completed in ", time.Since(start)) log.Debug("Query analysis completed.")