Skip to content

Commit c68de8e

Browse files
Merge pull request #11 from form3tech-oss/alessio-upstream-sync
chore: sync with upstream v0.15.0
2 parents 30fdb25 + 738b7fb commit c68de8e

18 files changed

+372
-115
lines changed

.github/workflows/ci.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ jobs:
1616
- name: Checkout Code
1717
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # v3.5.0
1818
- name: Setup Golang
19-
uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0
19+
uses: actions/setup-go@v5
2020
with:
2121
go-version-file: go.mod
2222
- name: Test
@@ -28,7 +28,7 @@ jobs:
2828
- name: Checkout Code
2929
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # v3.5.0
3030
- name: Setup Golang
31-
uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0
31+
uses: actions/setup-go@v5
3232
with:
3333
go-version-file: go.mod
3434
- name: Build

.github/workflows/golangci-lint.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,11 @@ jobs:
1818
runs-on: ubuntu-latest
1919
steps:
2020
- name: Checkout repository
21-
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
21+
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
2222
- name: install Go
2323
uses: actions/setup-go@6edd4406fa81c3da01a34fa6f6343087c207a568 # v3.5.0
2424
with:
25-
go-version: 1.20.x
25+
go-version: 1.21.x
2626
- name: Install snmp_exporter/generator dependencies
2727
run: sudo apt-get update && sudo apt-get -y install libsnmp-dev
2828
if: github.repository == 'prometheus/snmp_exporter'

.promu.yml

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
go:
22
# This must match .circle/config.yml.
3-
version: 1.20
3+
version: 1.21
44
repository:
55
path: github.com/form3tech-oss/postgres_exporter
66
build:
77
binaries:
88
- name: postgres_exporter
99
path: ./cmd/postgres_exporter
10-
flags: -a -tags 'netgo static_build'
1110
ldflags: |
1211
-X github.com/prometheus/common/version.Version={{.Version}}
1312
-X github.com/prometheus/common/version.Revision={{.Revision}}

CHANGELOG.md

+7
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,10 @@
1+
## 0.15.0 / 2023-10-27
2+
3+
* [ENHANCEMENT] Add 1kB and 2kB units #915
4+
* [BUGFIX] Add error log when probe collector creation fails #918
5+
* [BUGFIX] Fix test build failures on 32-bit arch #919
6+
* [BUGFIX] Adjust collector to use separate connection per scrape #936
7+
18
## 0.14.0 / 2023-09-11
29

310
* [CHANGE] Add `state` label to pg_process_idle_seconds #862

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
Prometheus exporter for PostgreSQL server metrics.
99

10-
CI Tested PostgreSQL versions: `10`, `11`, `12`, `13`, `14`, `15`
10+
CI Tested PostgreSQL versions: `11`, `12`, `13`, `14`, `15`, `16`
1111

1212
## Quick Start
1313
This package is available for Docker:

VERSION

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
0.15.0

cmd/postgres_exporter/main.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ func main() {
9393
}
9494

9595
excludedDatabases := strings.Split(*excludeDatabases, ",")
96-
logger.Log("msg", "Excluded databases", "databases", fmt.Sprintf("%v", excludedDatabases))
96+
level.Info(logger).Log("msg", "Excluded databases", "databases", fmt.Sprintf("%v", excludedDatabases))
9797

9898
if *queriesPath != "" {
9999
level.Warn(logger).Log("msg", "The extended queries.yaml config is DEPRECATED", "file", *queriesPath)

cmd/postgres_exporter/pg_setting_test.go

+11-11
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ var fixtures = []fixture{
4040
unit: "seconds",
4141
err: "",
4242
},
43-
d: `Desc{fqName: "pg_settings_seconds_fixture_metric_seconds", help: "Server Parameter: seconds_fixture_metric [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
43+
d: `Desc{fqName: "pg_settings_seconds_fixture_metric_seconds", help: "Server Parameter: seconds_fixture_metric [Units converted to seconds.]", constLabels: {}, variableLabels: {}}`,
4444
v: 5,
4545
},
4646
{
@@ -56,7 +56,7 @@ var fixtures = []fixture{
5656
unit: "seconds",
5757
err: "",
5858
},
59-
d: `Desc{fqName: "pg_settings_milliseconds_fixture_metric_seconds", help: "Server Parameter: milliseconds_fixture_metric [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
59+
d: `Desc{fqName: "pg_settings_milliseconds_fixture_metric_seconds", help: "Server Parameter: milliseconds_fixture_metric [Units converted to seconds.]", constLabels: {}, variableLabels: {}}`,
6060
v: 5,
6161
},
6262
{
@@ -72,7 +72,7 @@ var fixtures = []fixture{
7272
unit: "bytes",
7373
err: "",
7474
},
75-
d: `Desc{fqName: "pg_settings_eight_kb_fixture_metric_bytes", help: "Server Parameter: eight_kb_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
75+
d: `Desc{fqName: "pg_settings_eight_kb_fixture_metric_bytes", help: "Server Parameter: eight_kb_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: {}}`,
7676
v: 139264,
7777
},
7878
{
@@ -88,7 +88,7 @@ var fixtures = []fixture{
8888
unit: "bytes",
8989
err: "",
9090
},
91-
d: `Desc{fqName: "pg_settings_16_kb_real_fixture_metric_bytes", help: "Server Parameter: 16_kb_real_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
91+
d: `Desc{fqName: "pg_settings_16_kb_real_fixture_metric_bytes", help: "Server Parameter: 16_kb_real_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: {}}`,
9292
v: 49152,
9393
},
9494
{
@@ -104,7 +104,7 @@ var fixtures = []fixture{
104104
unit: "bytes",
105105
err: "",
106106
},
107-
d: `Desc{fqName: "pg_settings_16_mb_real_fixture_metric_bytes", help: "Server Parameter: 16_mb_real_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
107+
d: `Desc{fqName: "pg_settings_16_mb_real_fixture_metric_bytes", help: "Server Parameter: 16_mb_real_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: {}}`,
108108
v: 5.0331648e+07,
109109
},
110110
{
@@ -120,7 +120,7 @@ var fixtures = []fixture{
120120
unit: "bytes",
121121
err: "",
122122
},
123-
d: `Desc{fqName: "pg_settings_32_mb_real_fixture_metric_bytes", help: "Server Parameter: 32_mb_real_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
123+
d: `Desc{fqName: "pg_settings_32_mb_real_fixture_metric_bytes", help: "Server Parameter: 32_mb_real_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: {}}`,
124124
v: 1.00663296e+08,
125125
},
126126
{
@@ -136,7 +136,7 @@ var fixtures = []fixture{
136136
unit: "bytes",
137137
err: "",
138138
},
139-
d: `Desc{fqName: "pg_settings_64_mb_real_fixture_metric_bytes", help: "Server Parameter: 64_mb_real_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: []}`,
139+
d: `Desc{fqName: "pg_settings_64_mb_real_fixture_metric_bytes", help: "Server Parameter: 64_mb_real_fixture_metric [Units converted to bytes.]", constLabels: {}, variableLabels: {}}`,
140140
v: 2.01326592e+08,
141141
},
142142
{
@@ -152,7 +152,7 @@ var fixtures = []fixture{
152152
unit: "",
153153
err: "",
154154
},
155-
d: `Desc{fqName: "pg_settings_bool_on_fixture_metric", help: "Server Parameter: bool_on_fixture_metric", constLabels: {}, variableLabels: []}`,
155+
d: `Desc{fqName: "pg_settings_bool_on_fixture_metric", help: "Server Parameter: bool_on_fixture_metric", constLabels: {}, variableLabels: {}}`,
156156
v: 1,
157157
},
158158
{
@@ -168,7 +168,7 @@ var fixtures = []fixture{
168168
unit: "",
169169
err: "",
170170
},
171-
d: `Desc{fqName: "pg_settings_bool_off_fixture_metric", help: "Server Parameter: bool_off_fixture_metric", constLabels: {}, variableLabels: []}`,
171+
d: `Desc{fqName: "pg_settings_bool_off_fixture_metric", help: "Server Parameter: bool_off_fixture_metric", constLabels: {}, variableLabels: {}}`,
172172
v: 0,
173173
},
174174
{
@@ -184,7 +184,7 @@ var fixtures = []fixture{
184184
unit: "seconds",
185185
err: "",
186186
},
187-
d: `Desc{fqName: "pg_settings_special_minus_one_value_seconds", help: "Server Parameter: special_minus_one_value [Units converted to seconds.]", constLabels: {}, variableLabels: []}`,
187+
d: `Desc{fqName: "pg_settings_special_minus_one_value_seconds", help: "Server Parameter: special_minus_one_value [Units converted to seconds.]", constLabels: {}, variableLabels: {}}`,
188188
v: -1,
189189
},
190190
{
@@ -200,7 +200,7 @@ var fixtures = []fixture{
200200
unit: "",
201201
err: "",
202202
},
203-
d: `Desc{fqName: "pg_settings_rds_rds_superuser_reserved_connections", help: "Server Parameter: rds.rds_superuser_reserved_connections", constLabels: {}, variableLabels: []}`,
203+
d: `Desc{fqName: "pg_settings_rds_rds_superuser_reserved_connections", help: "Server Parameter: rds.rds_superuser_reserved_connections", constLabels: {}, variableLabels: {}}`,
204204
v: 2,
205205
},
206206
{

collector/pg_database.go

+26-5
Original file line numberDiff line numberDiff line change
@@ -53,12 +53,21 @@ var (
5353
"Disk space used by the database",
5454
[]string{"datname"}, nil,
5555
)
56+
pgDatabaseConnectionLimitsDesc = prometheus.NewDesc(
57+
prometheus.BuildFQName(
58+
namespace,
59+
databaseSubsystem,
60+
"connection_limit",
61+
),
62+
"Connection limit set for the database",
63+
[]string{"datname"}, nil,
64+
)
5665

57-
pgDatabaseQuery = "SELECT pg_database.datname FROM pg_database;"
66+
pgDatabaseQuery = "SELECT pg_database.datname, pg_database.datconnlimit FROM pg_database;"
5867
pgDatabaseSizeQuery = "SELECT pg_database_size($1)"
5968
)
6069

61-
// Update implements Collector and exposes database size.
70+
// Update implements Collector and exposes database size and connection limits.
6271
// It is called by the Prometheus registry when collecting metrics.
6372
// The list of databases is retrieved from pg_database and filtered
6473
// by the excludeDatabase config parameter. The tradeoff here is that
@@ -81,21 +90,32 @@ func (c PGDatabaseCollector) Update(ctx context.Context, instance *instance, ch
8190

8291
for rows.Next() {
8392
var datname sql.NullString
84-
if err := rows.Scan(&datname); err != nil {
93+
var connLimit sql.NullInt64
94+
if err := rows.Scan(&datname, &connLimit); err != nil {
8595
return err
8696
}
8797

8898
if !datname.Valid {
8999
continue
90100
}
101+
database := datname.String
91102
// Ignore excluded databases
92103
// Filtering is done here instead of in the query to avoid
93104
// a complicated NOT IN query with a variable number of parameters
94-
if sliceContains(c.excludedDatabases, datname.String) {
105+
if sliceContains(c.excludedDatabases, database) {
95106
continue
96107
}
97108

98-
databases = append(databases, datname.String)
109+
databases = append(databases, database)
110+
111+
connLimitMetric := 0.0
112+
if connLimit.Valid {
113+
connLimitMetric = float64(connLimit.Int64)
114+
}
115+
ch <- prometheus.MustNewConstMetric(
116+
pgDatabaseConnectionLimitsDesc,
117+
prometheus.GaugeValue, connLimitMetric, database,
118+
)
99119
}
100120

101121
// Query the size of the databases
@@ -114,6 +134,7 @@ func (c PGDatabaseCollector) Update(ctx context.Context, instance *instance, ch
114134
pgDatabaseSizeDesc,
115135
prometheus.GaugeValue, sizeMetric, datname,
116136
)
137+
117138
}
118139
return rows.Err()
119140
}

collector/pg_database_test.go

+6-4
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,8 @@ func TestPGDatabaseCollector(t *testing.T) {
3131

3232
inst := &instance{db: db}
3333

34-
mock.ExpectQuery(sanitizeQuery(pgDatabaseQuery)).WillReturnRows(sqlmock.NewRows([]string{"datname"}).
35-
AddRow("postgres"))
34+
mock.ExpectQuery(sanitizeQuery(pgDatabaseQuery)).WillReturnRows(sqlmock.NewRows([]string{"datname", "datconnlimit"}).
35+
AddRow("postgres", 15))
3636

3737
mock.ExpectQuery(sanitizeQuery(pgDatabaseSizeQuery)).WithArgs("postgres").WillReturnRows(sqlmock.NewRows([]string{"pg_database_size"}).
3838
AddRow(1024))
@@ -47,6 +47,7 @@ func TestPGDatabaseCollector(t *testing.T) {
4747
}()
4848

4949
expected := []MetricResult{
50+
{labels: labelMap{"datname": "postgres"}, value: 15, metricType: dto.MetricType_GAUGE},
5051
{labels: labelMap{"datname": "postgres"}, value: 1024, metricType: dto.MetricType_GAUGE},
5152
}
5253
convey.Convey("Metrics comparison", t, func() {
@@ -71,8 +72,8 @@ func TestPGDatabaseCollectorNullMetric(t *testing.T) {
7172

7273
inst := &instance{db: db}
7374

74-
mock.ExpectQuery(sanitizeQuery(pgDatabaseQuery)).WillReturnRows(sqlmock.NewRows([]string{"datname"}).
75-
AddRow("postgres"))
75+
mock.ExpectQuery(sanitizeQuery(pgDatabaseQuery)).WillReturnRows(sqlmock.NewRows([]string{"datname", "datconnlimit"}).
76+
AddRow("postgres", nil))
7677

7778
mock.ExpectQuery(sanitizeQuery(pgDatabaseSizeQuery)).WithArgs("postgres").WillReturnRows(sqlmock.NewRows([]string{"pg_database_size"}).
7879
AddRow(nil))
@@ -88,6 +89,7 @@ func TestPGDatabaseCollectorNullMetric(t *testing.T) {
8889

8990
expected := []MetricResult{
9091
{labels: labelMap{"datname": "postgres"}, value: 0, metricType: dto.MetricType_GAUGE},
92+
{labels: labelMap{"datname": "postgres"}, value: 0, metricType: dto.MetricType_GAUGE},
9193
}
9294
convey.Convey("Metrics comparison", t, func() {
9395
for _, expect := range expected {

0 commit comments

Comments
 (0)