Preflight docs and template subcommands (#1847)

* Added docs and template subcommands with test files

* uses helm templating preflight yaml files

* merge doc requirements for multiple inputs

* Helm aware rendering and markdown output

* v1beta3 yaml structure better mirrors beta2

* Update sample-preflight-templated.yaml

* Added docs and template subcommands with test files

* uses helm templating preflight yaml files

* merge doc requirements for multiple inputs

* Helm aware rendering and markdown output

* v1beta3 yaml structure better mirrors beta2

* Update sample-preflight-templated.yaml

* Added/updated documentation on subcommands

* Update docs.go

* commit to trigger actions
This commit is contained in:
Noah Campbell
2025-09-16 14:12:09 -05:00
committed by GitHub
parent 8027e273e4
commit acc1aad843
12 changed files with 1132 additions and 3 deletions

1
.gitignore vendored
View File

@@ -48,3 +48,4 @@ sbom/
# Ignore generated support bundles
*.tar.gz
!testdata/supportbundle/*.tar.gz
preflight

385
cmd/preflight/cli/docs.go Normal file
View File

@@ -0,0 +1,385 @@
package cli
import (
"bytes"
"fmt"
"os"
"strings"
"text/template"
"github.com/Masterminds/sprig/v3"
"github.com/pkg/errors"
"github.com/replicatedhq/troubleshoot/pkg/preflight"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"gopkg.in/yaml.v2"
"helm.sh/helm/v3/pkg/strvals"
)
func DocsCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "docs [preflight-file...]",
Short: "Extract and display documentation from a preflight spec",
Long: `Extract all docString fields from enabled requirements in one or more preflight YAML files.
This command processes templated preflight specs, evaluates conditionals, and outputs
only the documentation for requirements that would be included based on the provided values.
Examples:
# Extract docs with default values
preflight docs ml-platform-preflight.yaml
# Extract docs from multiple specs with values from files
preflight docs spec1.yaml spec2.yaml --values base-values.yaml --values prod-values.yaml
# Extract docs with inline values
preflight docs ml-platform-preflight.yaml --set jupyter.enabled=true --set monitoring.enabled=false
# Extract docs and save to file
preflight docs ml-platform-preflight.yaml --output requirements.md`,
Args: cobra.MinimumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
v := viper.GetViper()
templateFiles := args
valuesFiles := v.GetStringSlice("values")
outputFile := v.GetString("output")
setValues := v.GetStringSlice("set")
return extractDocs(templateFiles, valuesFiles, setValues, outputFile)
},
}
cmd.Flags().StringSlice("values", []string{}, "Path to YAML files containing template values (can be used multiple times)")
cmd.Flags().StringSlice("set", []string{}, "Set template values on the command line (can be used multiple times)")
cmd.Flags().StringP("output", "o", "", "Output file (default: stdout)")
// Bind flags to viper
viper.BindPFlag("values", cmd.Flags().Lookup("values"))
viper.BindPFlag("set", cmd.Flags().Lookup("set"))
viper.BindPFlag("output", cmd.Flags().Lookup("output"))
return cmd
}
// PreflightDoc supports both legacy (requirements) and beta3 (spec.analyzers)
type PreflightDoc struct {
APIVersion string `yaml:"apiVersion"`
Kind string `yaml:"kind"`
Metadata map[string]interface{} `yaml:"metadata"`
Spec struct {
Analyzers []map[string]interface{} `yaml:"analyzers"`
} `yaml:"spec"`
// Legacy (pre-beta3 drafts)
Requirements []Requirement `yaml:"requirements"`
}
type Requirement struct {
Name string `yaml:"name"`
DocString string `yaml:"docString"`
Checks []map[string]interface{} `yaml:"checks,omitempty"`
}
func extractDocs(templateFiles []string, valuesFiles []string, setValues []string, outputFile string) error {
// Prepare the values map (merge all files, then apply sets)
values := make(map[string]interface{})
for _, valuesFile := range valuesFiles {
fileValues, err := loadValuesFile(valuesFile)
if err != nil {
return errors.Wrapf(err, "failed to load values file %s", valuesFile)
}
values = mergeMaps(values, fileValues)
}
// Normalize maps for Helm set merging
values = normalizeStringMaps(values)
for _, setValue := range setValues {
if err := applySetValue(values, setValue); err != nil {
return errors.Wrapf(err, "failed to apply set value: %s", setValue)
}
}
var combinedDocs strings.Builder
for _, templateFile := range templateFiles {
templateContent, err := os.ReadFile(templateFile)
if err != nil {
return errors.Wrapf(err, "failed to read template file %s", templateFile)
}
useHelm := shouldUseHelmEngine(string(templateContent))
var rendered string
if useHelm {
rendered, err = preflight.RenderWithHelmTemplate(string(templateContent), values)
if err != nil {
execValues := legacyContext(values)
rendered, err = renderTemplate(string(templateContent), execValues)
if err != nil {
return errors.Wrap(err, "failed to render template (helm fallback also failed)")
}
}
} else {
execValues := legacyContext(values)
rendered, err = renderTemplate(string(templateContent), execValues)
if err != nil {
return errors.Wrap(err, "failed to render template")
}
}
docs, err := extractDocStrings(rendered)
if err != nil {
return errors.Wrap(err, "failed to extract documentation")
}
if strings.TrimSpace(docs) != "" {
if combinedDocs.Len() > 0 {
combinedDocs.WriteString("\n\n")
}
combinedDocs.WriteString(docs)
}
}
if outputFile != "" {
if err := os.WriteFile(outputFile, []byte(combinedDocs.String()), 0644); err != nil {
return errors.Wrapf(err, "failed to write output file %s", outputFile)
}
fmt.Printf("Documentation extracted successfully to %s\n", outputFile)
} else {
fmt.Print(combinedDocs.String())
}
return nil
}
func shouldUseHelmEngine(content string) bool {
return strings.Contains(content, ".Values")
}
func legacyContext(values map[string]interface{}) map[string]interface{} {
ctx := make(map[string]interface{}, len(values)+1)
for k, v := range values {
ctx[k] = v
}
ctx["Values"] = values
return ctx
}
func normalizeStringMaps(v interface{}) map[string]interface{} {
// Avoid unsafe type assertion; normalizeMap may return non-map types.
if v == nil {
return map[string]interface{}{}
}
normalized := normalizeMap(v)
if m, ok := normalized.(map[string]interface{}); ok {
return m
}
return map[string]interface{}{}
}
func normalizeMap(v interface{}) interface{} {
switch t := v.(type) {
case map[string]interface{}:
m := make(map[string]interface{}, len(t))
for k, val := range t {
m[k] = normalizeMap(val)
}
return m
case map[interface{}]interface{}:
m := make(map[string]interface{}, len(t))
for k, val := range t {
key := fmt.Sprintf("%v", k)
m[key] = normalizeMap(val)
}
return m
case []interface{}:
a := make([]interface{}, len(t))
for i, val := range t {
a[i] = normalizeMap(val)
}
return a
default:
return v
}
}
func extractDocStrings(yamlContent string) (string, error) {
var preflightDoc PreflightDoc
if err := yaml.Unmarshal([]byte(yamlContent), &preflightDoc); err != nil {
return "", errors.Wrap(err, "failed to parse YAML")
}
var docs strings.Builder
first := true
// Prefer beta3 analyzers docStrings
if len(preflightDoc.Spec.Analyzers) > 0 {
for _, analyzer := range preflightDoc.Spec.Analyzers {
if raw, ok := analyzer["docString"]; ok {
text, _ := raw.(string)
text = strings.TrimSpace(text)
if text == "" {
continue
}
if !first {
docs.WriteString("\n\n")
}
first = false
writeMarkdownSection(&docs, text, "")
}
}
return docs.String(), nil
}
// Fallback: legacy requirements with docString
for _, req := range preflightDoc.Requirements {
if strings.TrimSpace(req.DocString) == "" {
continue
}
if !first {
docs.WriteString("\n\n")
}
first = false
writeMarkdownSection(&docs, req.DocString, req.Name)
}
return docs.String(), nil
}
// writeMarkdownSection prints a heading from Title: or name, then the rest
func writeMarkdownSection(b *strings.Builder, docString string, fallbackName string) {
lines := strings.Split(docString, "\n")
title := strings.TrimSpace(fallbackName)
contentStart := 0
for i, line := range lines {
trim := strings.TrimSpace(line)
if strings.HasPrefix(trim, "Title:") {
parts := strings.SplitN(trim, ":", 2)
if len(parts) == 2 {
t := strings.TrimSpace(parts[1])
if t != "" {
title = t
}
}
contentStart = i + 1
break
}
}
if title != "" {
b.WriteString("### ")
b.WriteString(title)
b.WriteString("\n\n")
}
remaining := strings.Join(lines[contentStart:], "\n")
remaining = strings.TrimSpace(remaining)
if remaining != "" {
b.WriteString(remaining)
b.WriteString("\n")
}
}
// loadValuesFile loads values from a YAML file
func loadValuesFile(filename string) (map[string]interface{}, error) {
data, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
var values map[string]interface{}
if err := yaml.Unmarshal(data, &values); err != nil {
return nil, errors.Wrap(err, "failed to parse values file as YAML")
}
return values, nil
}
// applySetValue applies a single --set value to the values map (Helm semantics)
func applySetValue(values map[string]interface{}, setValue string) error {
if idx := strings.Index(setValue, "="); idx > 0 {
key := setValue[:idx]
val := setValue[idx+1:]
if strings.HasPrefix(key, "Values.") {
key = strings.TrimPrefix(key, "Values.")
setValue = key + "=" + val
}
}
if err := strvals.ParseInto(setValue, values); err != nil {
return fmt.Errorf("parsing --set: %w", err)
}
return nil
}
// setNestedValue sets a value in a nested map structure
func setNestedValue(m map[string]interface{}, keys []string, value interface{}) {
if len(keys) == 0 {
return
}
if len(keys) == 1 {
m[keys[0]] = value
return
}
if _, ok := m[keys[0]]; !ok {
m[keys[0]] = make(map[string]interface{})
}
if nextMap, ok := m[keys[0]].(map[string]interface{}); ok {
setNestedValue(nextMap, keys[1:], value)
} else {
m[keys[0]] = make(map[string]interface{})
setNestedValue(m[keys[0]].(map[string]interface{}), keys[1:], value)
}
}
func mergeMaps(base, overlay map[string]interface{}) map[string]interface{} {
result := make(map[string]interface{})
for k, v := range base {
result[k] = v
}
for k, v := range overlay {
if baseVal, exists := result[k]; exists {
if baseMap, ok := baseVal.(map[string]interface{}); ok {
if overlayMap, ok := v.(map[string]interface{}); ok {
result[k] = mergeMaps(baseMap, overlayMap)
continue
}
}
}
result[k] = v
}
return result
}
func renderTemplate(templateContent string, values map[string]interface{}) (string, error) {
tmpl := template.New("preflight").Funcs(sprig.FuncMap())
tmpl, err := tmpl.Parse(templateContent)
if err != nil {
return "", errors.Wrap(err, "failed to parse template")
}
var buf bytes.Buffer
if err := tmpl.Execute(&buf, values); err != nil {
return "", errors.Wrap(err, "failed to execute template")
}
result := cleanRenderedYAML(buf.String())
return result, nil
}
func cleanRenderedYAML(content string) string {
lines := strings.Split(content, "\n")
var cleaned []string
var lastWasEmpty bool
for _, line := range lines {
trimmed := strings.TrimRight(line, " \t")
if trimmed == "" {
if !lastWasEmpty {
cleaned = append(cleaned, "")
lastWasEmpty = true
}
} else {
cleaned = append(cleaned, trimmed)
lastWasEmpty = false
}
}
for len(cleaned) > 0 && cleaned[len(cleaned)-1] == "" {
cleaned = cleaned[:len(cleaned)-1]
}
return strings.Join(cleaned, "\n") + "\n"
}

View File

@@ -86,6 +86,8 @@ that a cluster meets the requirements to run an application.`,
cmd.AddCommand(util.VersionCmd())
cmd.AddCommand(OciFetchCmd())
cmd.AddCommand(TemplateCmd())
cmd.AddCommand(DocsCmd())
preflight.AddFlags(cmd.PersistentFlags())
// Dry run flag should be in cmd.PersistentFlags() flags made available to all subcommands

View File

@@ -0,0 +1,42 @@
package cli
import (
"github.com/replicatedhq/troubleshoot/pkg/preflight"
"github.com/spf13/cobra"
)
func TemplateCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "template [template-file]",
Short: "Render a templated preflight spec with values",
Long: `Process a templated preflight YAML file, substituting variables and removing conditional sections based on provided values.
Examples:
# Render template with default values
preflight template sample-preflight-templated.yaml
# Render template with values from files
preflight template sample-preflight-templated.yaml --values values-base.yaml --values values-prod.yaml
# Render template with inline values
preflight template sample-preflight-templated.yaml --set postgres.enabled=true --set cluster.minNodes=5
# Render template and save to file
preflight template sample-preflight-templated.yaml --output rendered.yaml`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
templateFile := args[0]
valuesFiles, _ := cmd.Flags().GetStringSlice("values")
outputFile, _ := cmd.Flags().GetString("output")
setValues, _ := cmd.Flags().GetStringSlice("set")
return preflight.RunTemplate(templateFile, valuesFiles, setValues, outputFile)
},
}
cmd.Flags().StringSlice("values", []string{}, "Path to YAML files containing template values (can be used multiple times)")
cmd.Flags().StringSlice("set", []string{}, "Set template values on the command line (can be used multiple times)")
cmd.Flags().StringP("output", "o", "", "Output file (default: stdout)")
return cmd
}

View File

@@ -1,4 +1,4 @@
## preflight
## preflight
Run and retrieve preflight checks in a cluster
@@ -53,7 +53,9 @@ preflight [url] [flags]
### SEE ALSO
* [preflight oci-fetch](preflight_oci-fetch.md) - Fetch a preflight from an OCI registry and print it to standard out
* [preflight version](preflight_version.md) - Print the current version and exit
* [preflight oci-fetch](preflight_oci-fetch.md) - Fetch a preflight from an OCI registry and print it to standard out
* [preflight template](preflight_template.md) - Render a templated preflight spec with values
* [preflight docs](preflight_docs.md) - Extract and display documentation from a preflight spec
* [preflight version](preflight_version.md) - Print the current version and exit
###### Auto generated by spf13/cobra on 15-Sep-2025

60
docs/preflight_docs.md Normal file
View File

@@ -0,0 +1,60 @@
## preflight docs
Extract and display documentation from a preflight spec
### Synopsis
Extract all `docString` fields from enabled analyzers in one or more preflight YAML files. Templating is evaluated first using the provided values, so only documentation for analyzers that are enabled is emitted. The output is Markdown.
```
preflight docs [preflight-file...] [flags]
```
### Examples
```
# Extract docs with defaults
preflight docs ml-platform-preflight.yaml
# Multiple specs with values files (later values override earlier ones)
preflight docs spec1.yaml spec2.yaml \
--values values-base.yaml --values values-prod.yaml
# Inline overrides (Helm-style --set)
preflight docs ml-platform-preflight.yaml \
--set monitoring.enabled=true --set ingress.enabled=false
# Save to file
preflight docs ml-platform-preflight.yaml -o requirements.md
```
### Options
```
--values stringArray Path to YAML files containing template values (can be used multiple times)
--set stringArray Set template values on the command line (can be used multiple times)
-o, --output string Output file (default: stdout)
```
### Behavior
- Accepts one or more preflight specs; all are rendered, and their docStrings are concatenated in input order.
- Values merge: deep-merged left-to-right across `--values` files. `--set` overrides win last.
- Rendering engine:
- If a spec references `.Values`, it is rendered with the Helm engine; otherwise Go text/template is used. A fallback to the legacy engine is applied for mixed templates.
- Map normalization: values maps are normalized to `map[string]interface{}` before applying `--set` to avoid type errors.
- Markdown formatting:
- The first line starting with `Title:` in a `docString` becomes a Markdown heading.
- If no `Title:` is present, the analyzer (or requirement) name is used.
- Sections are separated by blank lines.
### v1beta3 docString extraction
- v1beta3 layout uses `spec.analyzers: [...]`.
- Each analyzer may include a sibling `docString` string.
- The docs command extracts `spec.analyzers[*].docString` after rendering.
- Backward compatibility: legacy `requirements` blocks are still supported and extracted when present.
### SEE ALSO
* [preflight](preflight.md) - Run and retrieve preflight checks in a cluster

View File

@@ -0,0 +1,56 @@
## preflight template
Render a templated preflight spec with values
### Synopsis
Process a templated preflight YAML file, substituting variables and removing conditional sections based on provided values. Supports multiple values files and inline overrides. Outputs the fully-resolved YAML (no conditional logic remains).
```
preflight template [template-file] [flags]
```
### Examples
```
# Render with defaults only
preflight template sample-preflight-templated.yaml
# Render with multiple values files (later files override earlier ones)
preflight template sample-preflight-templated.yaml \
--values values-base.yaml --values values-prod.yaml
# Inline overrides (Helm-style --set)
preflight template sample-preflight-templated.yaml \
--set kubernetes.minVersion=v1.24.0 --set storage.enabled=true
# Save to file
preflight template sample-preflight-templated.yaml -o rendered.yaml
```
### Options
```
--values stringArray Path to YAML files containing template values (can be used multiple times)
--set stringArray Set template values on the command line (can be used multiple times)
-o, --output string Output file (default: stdout)
```
### Behavior
- Values merge: deep-merged left-to-right across multiple `--values` files. `--set` overrides win last.
- Rendering engine:
- v1beta3 specs (Helm-style templates using `.Values.*`) are rendered with the Helm engine.
- Legacy templates are rendered with Go text/template; mixed templates are supported.
- Map normalization: values files are normalized to `map[string]interface{}` before applying `--set` (avoids type errors when merging Helm `strvals`).
### v1beta3 spec decisions
- Layout aligns with v1beta2: `spec.analyzers: [...]`.
- Each analyzer accepts an optional `docString` used by `preflight docs`.
- Templating style is Helm-oriented (`.Values.*`).
- Modularity via conditional analyzers is supported, e.g. `{{- if .Values.ingress.enabled }}`.
### SEE ALSO
* [preflight](preflight.md) - Run and retrieve preflight checks in a cluster

View File

@@ -0,0 +1,65 @@
package preflight
import (
"fmt"
"sort"
"helm.sh/helm/v3/pkg/chart"
"helm.sh/helm/v3/pkg/chartutil"
"helm.sh/helm/v3/pkg/engine"
)
// keepHelmImports ensures Helm modules are retained by the linker until we wire them in.
var _ any = func() any {
_ = engine.Engine{}
_ = chart.Chart{}
_ = chartutil.Values{}
return nil
}()
// RenderWithHelmTemplate renders a single YAML template string using Helm's engine
// with the provided values (corresponding to .Values in Helm templates).
func RenderWithHelmTemplate(templateContent string, values map[string]interface{}) (string, error) {
ch := &chart.Chart{
Metadata: &chart.Metadata{
Name: "preflight-templating",
APIVersion: chart.APIVersionV2,
Type: "application",
},
Templates: []*chart.File{
{
Name: "templates/preflight.yaml",
Data: []byte(templateContent),
},
},
}
releaseOpts := chartutil.ReleaseOptions{
Name: "preflight",
Namespace: "default",
IsInstall: true,
IsUpgrade: false,
Revision: 1,
}
caps := chartutil.DefaultCapabilities
renderVals, err := chartutil.ToRenderValues(ch, chartutil.Values(values), releaseOpts, caps)
if err != nil {
return "", fmt.Errorf("build render values: %w", err)
}
eng := engine.Engine{}
out, err := eng.Render(ch, renderVals)
if err != nil {
return "", fmt.Errorf("helm render: %w", err)
}
if len(out) == 0 {
return "", nil
}
keys := make([]string, 0, len(out))
for k := range out {
keys = append(keys, k)
}
sort.Strings(keys)
return out[keys[0]], nil
}

186
pkg/preflight/template.go Normal file
View File

@@ -0,0 +1,186 @@
package preflight
import (
"bytes"
"fmt"
"os"
"strings"
"text/template"
"github.com/Masterminds/sprig/v3"
"github.com/pkg/errors"
"gopkg.in/yaml.v2"
"helm.sh/helm/v3/pkg/strvals"
)
// RunTemplate processes a templated preflight spec file with provided values
func RunTemplate(templateFile string, valuesFiles []string, setValues []string, outputFile string) error {
// Read the template file
templateContent, err := os.ReadFile(templateFile)
if err != nil {
return errors.Wrapf(err, "failed to read template file %s", templateFile)
}
// Prepare the values map
values := make(map[string]interface{})
// Load values from files if provided
for _, valuesFile := range valuesFiles {
if valuesFile == "" {
continue
}
fileValues, err := loadValuesFile(valuesFile)
if err != nil {
return errors.Wrapf(err, "failed to load values file %s", valuesFile)
}
values = mergeMaps(values, fileValues)
}
// Apply --set values (Helm semantics)
for _, setValue := range setValues {
if err := applySetValue(values, setValue); err != nil {
return errors.Wrapf(err, "failed to apply set value: %s", setValue)
}
}
// Choose engine based on apiVersion
apiVersion := detectAPIVersion(string(templateContent))
var rendered string
if strings.HasSuffix(apiVersion, "/v1beta3") || apiVersion == "v1beta3" {
// Helm for v1beta3
rendered, err = RenderWithHelmTemplate(string(templateContent), values)
if err != nil {
return errors.Wrap(err, "failed to render template using Helm")
}
} else {
// Legacy renderer for older API versions
rendered, err = renderLegacyTemplate(string(templateContent), values)
if err != nil {
return errors.Wrap(err, "failed to render template using legacy renderer")
}
}
// Output the result
if outputFile != "" {
if err := os.WriteFile(outputFile, []byte(rendered), 0644); err != nil {
return errors.Wrapf(err, "failed to write output file %s", outputFile)
}
fmt.Printf("Template rendered successfully to %s\n", outputFile)
} else {
fmt.Print(rendered)
}
return nil
}
// loadValuesFile loads values from a YAML file
func loadValuesFile(filename string) (map[string]interface{}, error) {
data, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
var values map[string]interface{}
if err := yaml.Unmarshal(data, &values); err != nil {
return nil, errors.Wrap(err, "failed to parse values file as YAML")
}
return values, nil
}
// applySetValue applies a single --set value to the values map using Helm semantics
func applySetValue(values map[string]interface{}, setValue string) error {
// Normalize optional "Values." prefix so both --set test.enabled and --set Values.test.enabled work
if idx := strings.Index(setValue, "="); idx > 0 {
key := setValue[:idx]
val := setValue[idx+1:]
if strings.HasPrefix(key, "Values.") {
key = strings.TrimPrefix(key, "Values.")
setValue = key + "=" + val
}
}
if err := strvals.ParseInto(setValue, values); err != nil {
return fmt.Errorf("parsing --set: %w", err)
}
return nil
}
// detectAPIVersion attempts to read apiVersion from the raw YAML header
func detectAPIVersion(content string) string {
lines := strings.Split(content, "\n")
for _, line := range lines {
l := strings.TrimSpace(line)
if strings.HasPrefix(l, "apiVersion:") {
parts := strings.SplitN(l, ":", 2)
if len(parts) == 2 {
return strings.TrimSpace(parts[1])
}
}
if strings.HasPrefix(l, "kind:") || strings.HasPrefix(l, "metadata:") {
break
}
}
return ""
}
// renderLegacyTemplate uses Go text/template with Sprig and passes values at root
func renderLegacyTemplate(templateContent string, values map[string]interface{}) (string, error) {
tmpl := template.New("preflight").Funcs(sprig.FuncMap())
tmpl, err := tmpl.Parse(templateContent)
if err != nil {
return "", errors.Wrap(err, "failed to parse template")
}
var buf bytes.Buffer
if err := tmpl.Execute(&buf, values); err != nil {
return "", errors.Wrap(err, "failed to execute template")
}
return cleanRenderedYAML(buf.String()), nil
}
func cleanRenderedYAML(content string) string {
lines := strings.Split(content, "\n")
var cleaned []string
var lastWasEmpty bool
for _, line := range lines {
trimmed := strings.TrimRight(line, " \t")
if trimmed == "" {
if !lastWasEmpty {
cleaned = append(cleaned, "")
lastWasEmpty = true
}
} else {
cleaned = append(cleaned, trimmed)
lastWasEmpty = false
}
}
for len(cleaned) > 0 && cleaned[len(cleaned)-1] == "" {
cleaned = cleaned[:len(cleaned)-1]
}
return strings.Join(cleaned, "\n") + "\n"
}
// mergeMaps recursively merges two maps
func mergeMaps(base, overlay map[string]interface{}) map[string]interface{} {
result := make(map[string]interface{})
// Copy base map
for k, v := range base {
result[k] = v
}
// Overlay values
for k, v := range overlay {
if baseVal, exists := result[k]; exists {
// If both are maps, merge recursively
if baseMap, ok := baseVal.(map[string]interface{}); ok {
if overlayMap, ok := v.(map[string]interface{}); ok {
result[k] = mergeMaps(baseMap, overlayMap)
continue
}
}
}
result[k] = v
}
return result
}

View File

@@ -0,0 +1,227 @@
apiVersion: troubleshoot.sh/v1beta3
kind: Preflight
metadata:
name: templated-requirements-example
spec:
analyzers:
- docString: |
Title: Kubernetes Control Plane Requirements
Requirement:
- Version:
- Minimum: {{ .Values.kubernetes.minVersion | default "v1.22.0" }}
- Supported: v1.22.x v1.29.x (stable releases only)
- APIs required (must be enabled, GA):
- admissionregistration.k8s.io/v1
- apiextensions.k8s.io/v1
- apps/v1
- batch/v1
- networking.k8s.io/v1
- policy/v1
- rbac.authorization.k8s.io/v1
- storage.k8s.io/v1
clusterVersion:
checkName: Kubernetes version
outcomes:
- fail:
when: '< {{ .Values.kubernetes.minVersion | default "1.22.0" }}'
message: Requires Kubernetes >= {{ .Values.kubernetes.minVersion | default "1.22.0" }}
- pass:
when: '>= {{ .Values.kubernetes.minVersion | default "1.22.0" }}'
message: Kubernetes version is supported
- docString: |
Title: Container Runtime Requirements
Requirement:
- Runtime: containerd (CRI) version ≥ 1.5
- Kubelet cgroup driver: systemd
- CRI socket path: /run/containerd/containerd.sock
- Security hardening:
- Seccomp: enabled (default profiles permitted)
- AppArmor: enabled where supported
containerRuntime:
outcomes:
- pass:
when: '== containerd'
message: containerd runtime detected
- fail:
message: Unsupported container runtime; containerd required
{{- if .Values.storage.enabled }}
- docString: |
Title: Default StorageClass Requirements
Requirement:
- A StorageClass named "{{ .Values.storage.className | default "default" }}" must exist and be annotated as cluster default
- AccessMode: ReadWriteOnce (RWO) required (RWX optional)
- VolumeBindingMode: WaitForFirstConsumer preferred
- allowVolumeExpansion: true recommended
- Baseline performance per volume:
- Minimum: {{ .Values.storage.minIOPS | default "1000" }} write IOPS, {{ .Values.storage.minReadIOPS | default "3000" }} read IOPS
- Recommended: 3000+ write IOPS, 6000+ read IOPS, 250+ MB/s throughput
- Encryption at rest: {{ if .Values.storage.encryption }}enabled{{ else }}optional{{ end }}
storageClass:
checkName: Default StorageClass
storageClassName: '{{ .Values.storage.className | default "default" }}'
outcomes:
- fail:
message: Default StorageClass not found
- pass:
message: Default StorageClass present
{{- end }}
- docString: |
Title: Cluster Size and Aggregate Capacity
Requirement:
- Node count: Minimum {{ .Values.cluster.minNodes | default "3" }} nodes (HA baseline), Recommended {{ .Values.cluster.recommendedNodes | default "5" }} nodes
- Total CPU: Minimum {{ .Values.cluster.minCPU | default "4" }} vCPU, Recommended 8+ vCPU
- Total Memory: Minimum {{ .Values.cluster.minMemory | default "16" }} GiB, Recommended 32+ GiB
- Control plane sizing:
- Managed control planes supported (EKS/GKE/AKS)
- Self-managed: 3 control-plane nodes recommended
nodeResources:
checkName: Cluster capacity
outcomes:
- fail:
when: 'count() < {{ .Values.cluster.minNodes | default "3" }}'
message: Requires at least {{ .Values.cluster.minNodes | default "3" }} nodes
- warn:
when: 'count() < {{ .Values.cluster.recommendedNodes | default "5" }}'
message: {{ .Values.cluster.recommendedNodes | default "5" }} nodes recommended for headroom
- pass:
message: Node count is sufficient
nodeResources:
checkName: Cluster CPU total
outcomes:
- fail:
when: 'sum(cpuCapacity) < {{ .Values.cluster.minCPU | default "4" }}'
message: Requires at least {{ .Values.cluster.minCPU | default "4" }} vCPU total
- pass:
message: CPU total is sufficient
nodeResources:
checkName: Cluster memory total
outcomes:
- fail:
when: 'sum(memoryCapacity) < {{ .Values.cluster.minMemory | default "16" }}Gi'
message: Requires at least {{ .Values.cluster.minMemory | default "16" }} GiB total memory
- pass:
message: Memory total is sufficient
{{- if .Values.postgres.enabled }}
- docString: |
Title: Postgres Platform Requirements
Requirement:
- Database: PostgreSQL {{ .Values.postgres.version | default "14+" }}
- Connection: {{ .Values.postgres.uri | default "postgresql://postgres@postgres:5432/postgres" }}
- StorageClass: {{ .Values.postgres.storageClass | default "default" }} with:
- Latency p99 ≤ 5 ms
- ≥ 3000 read IOPS, ≥ 1000 write IOPS
- allowVolumeExpansion: true
- Memory per node: Minimum {{ .Values.postgres.minMemory | default "8" }} GiB; Recommended 32 GiB
- CPU per node: Minimum {{ .Values.postgres.minCPU | default "2" }} vCPU; Recommended 4+ vCPU
storageClass:
checkName: Postgres storage class
storageClassName: '{{ .Values.postgres.storageClass | default "default" }}'
outcomes:
- fail:
message: Postgres StorageClass not found
- pass:
message: Postgres StorageClass present
nodeResources:
checkName: Postgres memory guidance
outcomes:
- fail:
when: 'min(memoryCapacity) < {{ .Values.postgres.minMemory | default "8" }}Gi'
message: All nodes must have at least {{ .Values.postgres.minMemory | default "8" }} GiB of memory for Postgres
- warn:
when: 'min(memoryCapacity) < 32Gi'
message: Nodes are recommended to have at least 32 GiB of memory for Postgres
- pass:
message: Nodes have sufficient memory for Postgres
{{- end }}
{{- if .Values.redis.enabled }}
- docString: |
Title: Redis Platform Requirements
Requirement:
- Database: Redis {{ .Values.redis.version | default "6.2+" }}
- Connection: {{ .Values.redis.uri | default "redis://default:@redis:6379" }}
- Ephemeral storage per node: Minimum 40 GiB; Recommended 100 GiB
- If persistence enabled: SSD-backed StorageClass with low-latency reads/writes
- Memory per node: Baseline {{ .Values.redis.minMemory | default "4" }} GiB; Recommended sized to dataset with 30% headroom
nodeResources:
checkName: Redis ephemeral storage
outcomes:
- fail:
when: 'min(ephemeralStorageCapacity) < 40Gi'
message: Each node must have at least 40 GiB ephemeral storage for Redis
- warn:
when: 'min(ephemeralStorageCapacity) < 100Gi'
message: 100 GiB per node recommended for Redis
- pass:
message: Nodes have sufficient ephemeral storage for Redis
{{- end }}
{{- if .Values.ingress.enabled }}
- docString: |
Title: Required CRDs and Ingress Capabilities
Requirement:
- Ingress Controller: {{ .Values.ingress.type | default "Contour" }}
{{- if eq (.Values.ingress.type | default "Contour") "Contour" }}
- CRD must be present:
- Group: heptio.com
- Kind: IngressRoute
- Version: v1beta1 or later served version
{{- end }}
- Ingress capability:
- Layer-7 HTTP/HTTPS routing with TLS termination supported
- Wildcard certificates permitted (optional)
{{- if .Values.ingress.customDomain }}
- Custom domain: {{ .Values.ingress.customDomain }}
{{- end }}
{{- if eq (.Values.ingress.type | default "Contour") "Contour" }}
customResourceDefinition:
checkName: Contour IngressRoute CRD
customResourceDefinitionName: ingressroutes.contour.heptio.com
outcomes:
- fail:
message: Contour IngressRoute CRD not found; required for ingress routing
- pass:
message: Contour IngressRoute CRD present
{{- end }}
{{- end }}
{{- if .Values.monitoring.enabled }}
- docString: |
Title: Monitoring and Observability Requirements
Requirement:
- Monitoring: {{ .Values.monitoring.type | default "Prometheus" }}
- Metrics retention: {{ .Values.monitoring.retention | default "15 days" }}
- Storage required: {{ .Values.monitoring.storageSize | default "50Gi" }}
- Components:
{{- if .Values.monitoring.prometheus }}
- Prometheus for metrics collection
{{- end }}
{{- if .Values.monitoring.grafana }}
- Grafana for visualization
{{- end }}
{{- if .Values.monitoring.alertmanager }}
- AlertManager for alerting
{{- end }}
storageClass:
checkName: Monitoring storage
storageClassName: '{{ .Values.monitoring.storageClass | default "default" }}'
outcomes:
- fail:
message: Monitoring StorageClass not found
- pass:
message: Monitoring StorageClass present
{{- end }}
- docString: |
Title: OS and Kernel Requirements
Requirement:
- Nodes: Linux x86_64 (amd64) or arm64 on supported distributions
- Supported OS: {{ range $i, $v := .Values.os.supported }}{{ if $i }}, {{ end }}{{ $v }}{{ end }}
- Kernel: ≥ {{ .Values.os.minKernel | default "5.4" }} with cgroups v1 or v2 (v2 preferred)
- Time sync: chrony or systemd-timesyncd active; clock drift < 500 ms
- Filesystems: ext4 or xfs for container layers and volumes
- SELinux/AppArmor: enforcing/permissive accepted

63
values-sample-full.yaml Normal file
View File

@@ -0,0 +1,63 @@
# Full Configuration for sample-preflight-templated.yaml
# All features enabled with production-grade settings
# Kubernetes cluster requirements
kubernetes:
minVersion: "v1.27.0"
# Storage configuration
storage:
enabled: true
className: "fast-ssd"
minIOPS: 5000
minReadIOPS: 10000
encryption: true
# Cluster sizing
cluster:
minNodes: 5
recommendedNodes: 7
minCPU: 8
minMemory: 32
# PostgreSQL database
postgres:
enabled: true
version: "15+"
uri: "postgresql://postgres@postgres-primary.database.svc.cluster.local:5432/production"
storageClass: "fast-ssd"
minMemory: 16
minCPU: 4
# Redis cache
redis:
enabled: true
version: "7.2+"
uri: "redis://default:@redis-sentinel.cache.svc.cluster.local:26379"
minMemory: 8
# Ingress configuration
ingress:
enabled: true
type: "Contour"
customDomain: "*.apps.production.example.com"
# Monitoring stack
monitoring:
enabled: true
type: "Prometheus"
retention: "30 days"
storageSize: "100Gi"
storageClass: "fast-ssd"
prometheus: true
grafana: true
alertmanager: true
# Operating system
os:
minKernel: "5.15"
supported:
- "Ubuntu 22.04 LTS"
- "RHEL 9"
- "Rocky Linux 9"
- "Amazon Linux 2023"

View File

@@ -0,0 +1,40 @@
# Minimal Configuration for sample-preflight-templated.yaml
# Only essential features enabled
# Kubernetes cluster requirements
kubernetes:
minVersion: "v1.25.0"
# Storage disabled
storage:
enabled: false
# Minimal cluster sizing
cluster:
minNodes: 3
recommendedNodes: 3
minCPU: 4
minMemory: 16
# PostgreSQL disabled
postgres:
enabled: false
# Redis disabled
redis:
enabled: false
# Ingress disabled
ingress:
enabled: false
# Monitoring disabled
monitoring:
enabled: false
# Operating system
os:
minKernel: "5.4"
supported:
- "Ubuntu 20.04+"
- "RHEL 8+"