Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit c382d94

Browse files
committed
Add validation to prevent setting pgbackrest log-path via global.
Add validation tests for log path and additional volumes.
1 parent 7e288b9 commit c382d94

File tree

3 files changed

+182
-0
lines changed

3 files changed

+182
-0
lines changed

config/crd/bases/postgres-operator.crunchydata.com_postgresclusters.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4624,6 +4624,9 @@ spec:
46244624
volume
46254625
rule: '!has(self.jobs) || !has(self.jobs.log) || !has(self.jobs.log.path)
46264626
|| self.jobs.volumes.additional.exists(x, self.jobs.log.path.startsWith("/volumes/"+x.name))'
4627+
- message: pgbackrest log-path must be set via the various log.path
4628+
fields in the spec
4629+
rule: '!has(self.global) || !(''log-path'' in self.global)'
46274630
snapshots:
46284631
description: VolumeSnapshot configuration
46294632
properties:
Lines changed: 178 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,178 @@
1+
// Copyright 2021 - 2025 Crunchy Data Solutions, Inc.
2+
//
3+
// SPDX-License-Identifier: Apache-2.0
4+
5+
package validation
6+
7+
import (
8+
"context"
9+
"testing"
10+
11+
"gotest.tools/v3/assert"
12+
apierrors "k8s.io/apimachinery/pkg/api/errors"
13+
"sigs.k8s.io/controller-runtime/pkg/client"
14+
15+
"github.com/crunchydata/postgres-operator/internal/testing/require"
16+
v1 "github.com/crunchydata/postgres-operator/pkg/apis/postgres-operator.crunchydata.com/v1"
17+
)
18+
19+
func TestV1PGBackRestLogging(t *testing.T) {
20+
ctx := context.Background()
21+
cc := require.Kubernetes(t)
22+
t.Parallel()
23+
24+
namespace := require.Namespace(t, cc)
25+
26+
base := v1.NewPostgresCluster()
27+
base.Namespace = namespace.Name
28+
base.Name = "pgbackrest-logging"
29+
// required fields
30+
require.UnmarshalInto(t, &base.Spec, `{
31+
postgresVersion: 16,
32+
instances: [{
33+
dataVolumeClaimSpec: {
34+
accessModes: [ReadWriteOnce],
35+
resources: { requests: { storage: 1Mi } },
36+
},
37+
}],
38+
backups: {
39+
pgbackrest: {
40+
repos: [{
41+
name: repo1,
42+
}]
43+
},
44+
},
45+
}`)
46+
47+
assert.NilError(t, cc.Create(ctx, base.DeepCopy(), client.DryRunAll),
48+
"expected this base to be valid")
49+
50+
t.Run("Cannot set log-path via global", func(t *testing.T) {
51+
tmp := base.DeepCopy()
52+
53+
require.UnmarshalInto(t, &tmp.Spec.Backups.PGBackRest, `{
54+
global: {
55+
log-path: "/anything"
56+
}
57+
}`)
58+
err := cc.Create(ctx, tmp.DeepCopy(), client.DryRunAll)
59+
assert.Assert(t, apierrors.IsInvalid(err))
60+
assert.ErrorContains(t, err, "pgbackrest log-path must be set via the various log.path fields in the spec")
61+
})
62+
63+
t.Run("Cannot set pgbackrest sidecar's log.path without correct subdir", func(t *testing.T) {
64+
tmp := base.DeepCopy()
65+
66+
require.UnmarshalInto(t, &tmp.Spec.Backups.PGBackRest, `{
67+
log: {
68+
path: "/something/wrong"
69+
}
70+
}`)
71+
72+
err := cc.Create(ctx, tmp.DeepCopy(), client.DryRunAll)
73+
assert.Assert(t, apierrors.IsInvalid(err))
74+
assert.ErrorContains(t, err, "pgbackrest sidecar log path is restricted to an existing additional volume")
75+
76+
require.UnmarshalInto(t, &tmp.Spec.Backups.PGBackRest, `{
77+
log: {
78+
path: "/volumes/this/should/pass"
79+
}
80+
}`)
81+
82+
assert.NilError(t, cc.Create(ctx, tmp.DeepCopy(), client.DryRunAll), "expected log.path to be valid")
83+
})
84+
85+
t.Run("Cannot set logging on volumes that don't exist", func(t *testing.T) {
86+
t.Run("Repo Host", func(t *testing.T) {
87+
tmp := base.DeepCopy()
88+
89+
require.UnmarshalInto(t, &tmp.Spec.Backups.PGBackRest, `{
90+
repoHost: {
91+
log: {
92+
path: "/volumes/wrong"
93+
},
94+
volumes: {
95+
additional: [
96+
{
97+
name: logging,
98+
claimName: required-1
99+
}]
100+
}
101+
}
102+
}`)
103+
104+
err := cc.Create(ctx, tmp.DeepCopy(), client.DryRunAll)
105+
assert.Assert(t, apierrors.IsInvalid(err))
106+
assert.ErrorContains(t, err, "repo host log path is restricted to an existing additional volume")
107+
})
108+
109+
t.Run("Backup Jobs", func(t *testing.T) {
110+
tmp := base.DeepCopy()
111+
112+
require.UnmarshalInto(t, &tmp.Spec.Backups.PGBackRest, `{
113+
jobs: {
114+
log: {
115+
path: "/volumes/wrong"
116+
},
117+
volumes: {
118+
additional: [
119+
{
120+
name: logging,
121+
claimName: required-1
122+
}]
123+
}
124+
}
125+
}`)
126+
127+
err := cc.Create(ctx, tmp.DeepCopy(), client.DryRunAll)
128+
assert.Assert(t, apierrors.IsInvalid(err))
129+
assert.ErrorContains(t, err, "backup jobs log path is restricted to an existing additional volume")
130+
})
131+
})
132+
133+
t.Run("Can set logging on volumes that do exist", func(t *testing.T) {
134+
t.Run("Repo Host", func(t *testing.T) {
135+
tmp := base.DeepCopy()
136+
137+
require.UnmarshalInto(t, &tmp.Spec.Backups.PGBackRest, `{
138+
repoHost: {
139+
log: {
140+
path: "/volumes/logging/logs"
141+
},
142+
volumes: {
143+
additional: [
144+
{
145+
name: logging,
146+
claimName: required-1
147+
}]
148+
}
149+
}
150+
}`)
151+
152+
assert.NilError(t, cc.Create(ctx, tmp.DeepCopy(), client.DryRunAll),
153+
"expected this configuration to be valid")
154+
})
155+
156+
t.Run("Backup Jobs", func(t *testing.T) {
157+
tmp := base.DeepCopy()
158+
159+
require.UnmarshalInto(t, &tmp.Spec.Backups.PGBackRest, `{
160+
jobs: {
161+
log: {
162+
path: "/volumes/logging/logs"
163+
},
164+
volumes: {
165+
additional: [
166+
{
167+
name: logging,
168+
claimName: required-1
169+
}]
170+
}
171+
}
172+
}`)
173+
174+
assert.NilError(t, cc.Create(ctx, tmp.DeepCopy(), client.DryRunAll),
175+
"expected this configuration to be valid")
176+
})
177+
})
178+
}

pkg/apis/postgres-operator.crunchydata.com/v1/pgbackrest_types.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ import (
1212
// +kubebuilder:validation:XValidation:rule=`!has(self.log) || !has(self.log.path) || self.log.path.startsWith("/volumes/")`,message=`pgbackrest sidecar log path is restricted to an existing additional volume`
1313
// +kubebuilder:validation:XValidation:rule=`!has(self.repoHost) || !has(self.repoHost.log) || !has(self.repoHost.log.path) || self.repoHost.volumes.additional.exists(x, self.repoHost.log.path.startsWith("/volumes/"+x.name))`,message=`repo host log path is restricted to an existing additional volume`
1414
// +kubebuilder:validation:XValidation:rule=`!has(self.jobs) || !has(self.jobs.log) || !has(self.jobs.log.path) || self.jobs.volumes.additional.exists(x, self.jobs.log.path.startsWith("/volumes/"+x.name))`,message=`backup jobs log path is restricted to an existing additional volume`
15+
// +kubebuilder:validation:XValidation:rule=`!has(self.global) || !('log-path' in self.global)`,message=`pgbackrest log-path must be set via the various log.path fields in the spec`
1516
type PGBackRestArchive struct {
1617
v1beta1.PGBackRestArchive `json:",inline"`
1718
}

0 commit comments

Comments
 (0)