refactor: refactor packages and create verification options (#129)

Signed-off-by: Asra Ali <asraa@google.com>
This commit is contained in:
asraa
2022-07-13 11:52:43 -05:00
committed by GitHub
parent 3b17f9c76f
commit 66533faf78
9 changed files with 1033 additions and 983 deletions

49
main.go
View File

@@ -29,9 +29,7 @@ var (
var defaultRekorAddr = "https://rekor.sigstore.dev"
func verify(ctx context.Context,
provenance []byte, artifactHash, source, branch string,
tag, versiontag *string,
) ([]byte, error) {
provenance []byte, artifactHash, source string, provenanceOpts *pkg.ProvenanceOpts) ([]byte, error) {
rClient, err := rekor.NewClient(defaultRekorAddr)
if err != nil {
return nil, err
@@ -56,36 +54,17 @@ func verify(ctx context.Context,
}
fmt.Fprintf(os.Stderr, "Signing certificate information:\n %s\n", b)
/* Verify properties of the SLSA provenance. */
// Verify the workflow identity.
if err := pkg.VerifyWorkflowIdentity(workflowInfo, source); err != nil {
return nil, err
}
/* Verify properties of the SLSA provenance. */
// Unpack and verify info in the provenance, including the Subject Digest.
if err := pkg.VerifyProvenance(env, artifactHash); err != nil {
if err := pkg.VerifyProvenance(env, provenanceOpts); err != nil {
return nil, err
}
// Verify the branch.
if err := pkg.VerifyBranch(env, branch); err != nil {
return nil, err
}
// Verify the tag.
if tag != nil {
if err := pkg.VerifyTag(env, *tag); err != nil {
return nil, err
}
}
// Verify the versioned tag.
if versiontag != nil {
if err := pkg.VerifyVersionedTag(env, *versiontag); err != nil {
return nil, err
}
}
// Return verified provenance.
return base64.StdEncoding.DecodeString(env.Payload)
}
@@ -117,13 +96,12 @@ func main() {
pversiontag = &versiontag
}
if pversiontag != nil && ptag != nil {
if ptag != nil && pversiontag != nil {
fmt.Fprintf(os.Stderr, "'version' and 'tag' options cannot be used together\n")
os.Exit(1)
}
verifiedProvenance, err := runVerify(artifactPath, provenancePath, source, branch,
ptag, pversiontag)
verifiedProvenance, err := runVerify(artifactPath, provenancePath, source, branch, ptag, pversiontag)
if err != nil {
fmt.Fprintf(os.Stderr, "FAILED: SLSA verification failed: %v\n", err)
os.Exit(2)
@@ -146,9 +124,7 @@ func isFlagPassed(name string) bool {
return found
}
func runVerify(artifactPath, provenancePath, source, branch string,
ptag, pversiontag *string,
) ([]byte, error) {
func runVerify(artifactPath, provenancePath, source, branch string, ptag, pversiontag *string) ([]byte, error) {
f, err := os.Open(artifactPath)
if err != nil {
log.Fatal(err)
@@ -164,10 +140,17 @@ func runVerify(artifactPath, provenancePath, source, branch string,
if _, err := io.Copy(h, f); err != nil {
log.Panic(err)
}
artifactHash := hex.EncodeToString(h.Sum(nil))
provenanceOpts := &pkg.ProvenanceOpts{
ExpectedBranch: branch,
ExpectedDigest: artifactHash,
ExpectedVersionedTag: pversiontag,
ExpectedTag: ptag,
}
ctx := context.Background()
return verify(ctx, provenance,
hex.EncodeToString(h.Sum(nil)),
source, branch,
ptag, pversiontag)
artifactHash,
source, provenanceOpts)
}

118
pkg/builder.go Normal file
View File

@@ -0,0 +1,118 @@
package pkg
import (
"crypto/x509"
"errors"
"fmt"
"strings"
"golang.org/x/mod/semver"
)
var (
trustedBuilderRepository = "slsa-framework/slsa-github-generator"
e2eTestRepository = "slsa-framework/example-package"
certOidcIssuer = "https://token.actions.githubusercontent.com"
)
var trustedReusableWorkflows = map[string]bool{
trustedBuilderRepository + "/.github/workflows/generator_generic_slsa3.yml": true,
trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml": true,
}
// VerifyWorkflowIdentity verifies the signing certificate information
func VerifyWorkflowIdentity(id *WorkflowIdentity, source string) error {
// cert URI path is /org/repo/path/to/workflow@ref
workflowPath := strings.SplitN(id.JobWobWorkflowRef, "@", 2)
if len(workflowPath) < 2 {
return fmt.Errorf("%w: %s", errorMalformedWorkflowURI, id.JobWobWorkflowRef)
}
// Trusted workflow verification by name.
reusableWorkflowName := strings.Trim(workflowPath[0], "/")
if _, ok := trustedReusableWorkflows[reusableWorkflowName]; !ok {
return fmt.Errorf("%w: %s", ErrorUntrustedReusableWorkflow, reusableWorkflowName)
}
// Verify the ref.
if err := verifyTrustedBuilderRef(id, strings.Trim(workflowPath[1], "/")); err != nil {
return err
}
// Issuer verification.
if !strings.EqualFold(id.Issuer, certOidcIssuer) {
return fmt.Errorf("untrusted token issuer: %s", id.Issuer)
}
// The caller repository in the x509 extension is not fully qualified. It only contains
// {org}/{repository}.
expectedSource := strings.TrimPrefix(source, "github.com/")
if !strings.EqualFold(id.CallerRepository, expectedSource) {
return fmt.Errorf("%w: expected source '%s', got '%s'", ErrorMismatchRepository,
expectedSource, id.CallerRepository)
}
return nil
}
// Only allow `@refs/heads/main` for the builder and the e2e tests that need to work at HEAD.
// This lets us use the pre-build builder binary generated during release (release happen at main).
// For other projects, we only allow semantic versions that map to a release.
func verifyTrustedBuilderRef(id *WorkflowIdentity, ref string) error {
if (id.CallerRepository == trustedBuilderRepository ||
id.CallerRepository == e2eTestRepository) &&
strings.EqualFold("refs/heads/main", ref) {
return nil
}
if !strings.HasPrefix(ref, "refs/tags/") {
return fmt.Errorf("%w: %s: not of the form 'refs/tags/name'", errorInvalidRef, ref)
}
// Valid semver of the form vX.Y.Z with no metadata.
pin := strings.TrimPrefix(ref, "refs/tags/")
if !(semver.IsValid(pin) &&
len(strings.Split(pin, ".")) == 3 &&
semver.Prerelease(pin) == "" &&
semver.Build(pin) == "") {
return fmt.Errorf("%w: %s: not of the form vX.Y.Z", errorInvalidRef, pin)
}
return nil
}
func getExtension(cert *x509.Certificate, oid string) string {
for _, ext := range cert.Extensions {
if strings.Contains(ext.Id.String(), oid) {
return string(ext.Value)
}
}
return ""
}
type WorkflowIdentity struct {
// The caller repository
CallerRepository string `json:"caller"`
// The commit SHA where the workflow was triggered
CallerHash string `json:"commit"`
// Current workflow (reuseable workflow) ref
JobWobWorkflowRef string `json:"job_workflow_ref"`
// Trigger
Trigger string `json:"trigger"`
// Issuer
Issuer string `json:"issuer"`
}
// GetWorkflowFromCertificate gets the workflow identity from the Fulcio authenticated content.
func GetWorkflowInfoFromCertificate(cert *x509.Certificate) (*WorkflowIdentity, error) {
if len(cert.URIs) == 0 {
return nil, errors.New("missing URI information from certificate")
}
return &WorkflowIdentity{
CallerRepository: getExtension(cert, "1.3.6.1.4.1.57264.1.5"),
Issuer: getExtension(cert, "1.3.6.1.4.1.57264.1.1"),
Trigger: getExtension(cert, "1.3.6.1.4.1.57264.1.2"),
CallerHash: getExtension(cert, "1.3.6.1.4.1.57264.1.3"),
JobWobWorkflowRef: cert.URIs[0].Path,
}, nil
}

330
pkg/builder_test.go Normal file
View File

@@ -0,0 +1,330 @@
package pkg
import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
)
func Test_VerifyWorkflowIdentity(t *testing.T) {
t.Parallel()
tests := []struct {
name string
workflow *WorkflowIdentity
source string
err error
}{
{
name: "invalid job workflow ref",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: "random/workflow/ref",
Trigger: "workflow_dispatch",
Issuer: "https://token.actions.githubusercontent.com",
},
source: "asraa/slsa-on-github-test",
err: errorMalformedWorkflowURI,
},
{
name: "untrusted job workflow ref",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: "/malicious/slsa-go/.github/workflows/builder.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: "https://token.actions.githubusercontent.com",
},
source: "asraa/slsa-on-github-test",
err: ErrorUntrustedReusableWorkflow,
},
{
name: "untrusted job workflow ref for general repos",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: "https://bad.issuer.com",
},
source: "asraa/slsa-on-github-test",
err: errorInvalidRef,
},
{
name: "valid main ref for trusted builder",
workflow: &WorkflowIdentity{
CallerRepository: trustedBuilderRepository,
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: "https://token.actions.githubusercontent.com",
},
source: trustedBuilderRepository,
},
{
name: "valid main ref for e2e test",
workflow: &WorkflowIdentity{
CallerRepository: e2eTestRepository,
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: e2eTestRepository,
},
{
name: "unexpected source for e2e test",
workflow: &WorkflowIdentity{
CallerRepository: e2eTestRepository,
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "malicious/source",
err: ErrorMismatchRepository,
},
{
name: "valid main ref for builder",
workflow: &WorkflowIdentity{
CallerRepository: trustedBuilderRepository,
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "malicious/source",
err: ErrorMismatchRepository,
},
{
name: "unexpected source",
workflow: &WorkflowIdentity{
CallerRepository: "malicious/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
err: ErrorMismatchRepository,
},
{
name: "valid workflow identity",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
},
{
name: "invalid workflow identity with prerelease",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3-alpha",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
err: errorInvalidRef,
},
{
name: "invalid workflow identity with build",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3+123",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
err: errorInvalidRef,
},
{
name: "invalid workflow identity with metadata",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3-alpha+123",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
err: errorInvalidRef,
},
{
name: "valid workflow identity with fully qualified source",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "github.com/asraa/slsa-on-github-test",
},
}
for _, tt := range tests {
tt := tt // Re-initializing variable so it is not changed while executing the closure below
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
err := VerifyWorkflowIdentity(tt.workflow, tt.source)
if !errCmp(err, tt.err) {
t.Errorf(cmp.Diff(err, tt.err, cmpopts.EquateErrors()))
}
})
}
}
func Test_verifyTrustedBuilderRef(t *testing.T) {
t.Parallel()
tests := []struct {
name string
callerRepo string
builderRef string
expected error
}{
// Trusted repo.
{
name: "main allowed for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/heads/main",
},
{
name: "full semver for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2.3",
},
{
name: "no patch semver for other builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2",
expected: errorInvalidRef,
},
{
name: "no min semver for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1",
expected: errorInvalidRef,
},
{
name: "full semver with prerelease for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2.3-alpha",
expected: errorInvalidRef,
},
{
name: "full semver with build for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2.3+123",
expected: errorInvalidRef,
},
{
name: "full semver with build/prerelease for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2.3-alpha+123",
expected: errorInvalidRef,
},
// E2e tests repo.
{
name: "main allowed for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/heads/main",
},
{
name: "full semver for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2.3",
},
{
name: "no patch semver for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2",
expected: errorInvalidRef,
},
{
name: "no min semver for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1",
expected: errorInvalidRef,
},
{
name: "full semver with prerelease for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2.3-alpha",
expected: errorInvalidRef,
},
{
name: "full semver with build for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2.3+123",
expected: errorInvalidRef,
},
{
name: "full semver with build/prerelease for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2.3-alpha+123",
expected: errorInvalidRef,
},
// Other repos.
{
name: "main not allowed for other repos",
callerRepo: "some/repo",
builderRef: "refs/heads/main",
expected: errorInvalidRef,
},
{
name: "full semver for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2.3",
},
{
name: "no patch semver for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2",
expected: errorInvalidRef,
},
{
name: "no min semver for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1",
expected: errorInvalidRef,
},
{
name: "full semver with prerelease for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2.3-alpha",
expected: errorInvalidRef,
},
{
name: "full semver with build for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2.3+123",
expected: errorInvalidRef,
},
{
name: "full semver with build/prerelease for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2.3-alpha+123",
expected: errorInvalidRef,
},
}
for _, tt := range tests {
tt := tt // Re-initializing variable so it is not changed while executing the closure below
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
wf := WorkflowIdentity{
CallerRepository: tt.callerRepo,
}
err := verifyTrustedBuilderRef(&wf, tt.builderRef)
if !errCmp(err, tt.expected) {
t.Errorf(cmp.Diff(err, tt.expected, cmpopts.EquateErrors()))
}
})
}
}

18
pkg/errors.go Normal file
View File

@@ -0,0 +1,18 @@
package pkg
import "errors"
var (
ErrorInvalidDssePayload = errors.New("invalid DSSE envelope payload")
ErrorMismatchBranch = errors.New("branch used to generate the binary does not match provenance")
ErrorMismatchRepository = errors.New("repository used to generate the binary does not match provenance")
ErrorMismatchTag = errors.New("tag used to generate the binary does not match provenance")
ErrorMismatchVersionedTag = errors.New("tag used to generate the binary does not match provenance")
ErrorInvalidSemver = errors.New("invalid semantic version")
ErrorRekorSearch = errors.New("error searching rekor entries")
errorMismatchHash = errors.New("binary artifact hash does not match provenance subject")
errorInvalidRef = errors.New("invalid ref")
errorMalformedWorkflowURI = errors.New("malformed URI for workflow")
ErrorUntrustedReusableWorkflow = errors.New("untrusted reusable workflow")
ErrorNoValidRekorEntries = errors.New("could not find a matching valid signature entry")
)

17
pkg/options.go Normal file
View File

@@ -0,0 +1,17 @@
package pkg
// ProvenanceOpts are the options for checking provenance information.
type ProvenanceOpts struct {
// ExpectedDigest is the expected artifact sha included in the provenance
ExpectedDigest string
// ExpectedBranch is the expected branch (github_ref or github_base_ref) in
// the invocation parameters.
ExpectedBranch string
// ExpectedTag is the expected tag, github_ref, in the invocation parameters.
ExpectedTag *string
// ExpectedVersionedTag is the expected versioned tag
ExpectedVersionedTag *string
}

View File

@@ -1,78 +1,19 @@
package pkg
import (
"bytes"
"context"
"crypto"
"crypto/ecdsa"
"crypto/x509"
"encoding/base64"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"os"
"strings"
"time"
merkleproof "github.com/transparency-dev/merkle/proof"
"golang.org/x/mod/semver"
cjson "github.com/docker/go/canonical/json"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
intoto "github.com/in-toto/in-toto-golang/in_toto"
dsselib "github.com/secure-systems-lab/go-securesystemslib/dsse"
"github.com/sigstore/sigstore/pkg/signature"
"github.com/sigstore/sigstore/pkg/signature/dsse"
"github.com/transparency-dev/merkle/rfc6962"
"github.com/sigstore/cosign/cmd/cosign/cli/fulcio"
"github.com/sigstore/cosign/pkg/cosign"
"github.com/sigstore/cosign/pkg/cosign/bundle"
"github.com/sigstore/rekor/pkg/generated/client"
"github.com/sigstore/rekor/pkg/generated/client/entries"
"github.com/sigstore/rekor/pkg/generated/client/index"
"github.com/sigstore/rekor/pkg/generated/client/tlog"
"github.com/sigstore/rekor/pkg/generated/models"
"github.com/sigstore/rekor/pkg/sharding"
"github.com/sigstore/rekor/pkg/types"
intotod "github.com/sigstore/rekor/pkg/types/intoto/v0.0.1"
"github.com/sigstore/rekor/pkg/util"
"github.com/sigstore/sigstore/pkg/cryptoutils"
"github.com/slsa-framework/slsa-github-generator/signing/envelope"
)
const (
defaultRekorAddr = "https://rekor.sigstore.dev"
certOidcIssuer = "https://token.actions.githubusercontent.com"
)
var (
trustedBuilderRepository = "slsa-framework/slsa-github-generator"
e2eTestRepository = "slsa-framework/example-package"
)
var trustedReusableWorkflows = map[string]bool{
trustedBuilderRepository + "/.github/workflows/generator_generic_slsa3.yml": true,
trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml": true,
}
var (
ErrorInvalidDssePayload = errors.New("invalid DSSE envelope payload")
ErrorMismatchBranch = errors.New("branch used to generate the binary does not match provenance")
ErrorMismatchRepository = errors.New("repository used to generate the binary does not match provenance")
ErrorMismatchTag = errors.New("tag used to generate the binary does not match provenance")
ErrorMismatchVersionedTag = errors.New("tag used to generate the binary does not match provenance")
ErrorInvalidSemver = errors.New("invalid semantic version")
ErrorRekorSearch = errors.New("error searching rekor entries")
errorMismatchHash = errors.New("binary artifact hash does not match provenance subject")
errorInvalidRef = errors.New("invalid ref")
errorMalformedWorkflowURI = errors.New("malformed URI for workflow")
ErrorUntrustedReusableWorkflow = errors.New("untrusted reusable workflow")
ErrorNoValidRekorEntries = errors.New("could not find a matching valid signature entry")
)
func EnvelopeFromBytes(payload []byte) (env *dsselib.Envelope, err error) {
@@ -81,29 +22,20 @@ func EnvelopeFromBytes(payload []byte) (env *dsselib.Envelope, err error) {
return
}
func intotoEntry(certPem []byte, provenance []byte) (*intotod.V001Entry, error) {
cert := strfmt.Base64(certPem)
return &intotod.V001Entry{
IntotoObj: models.IntotoV001Schema{
Content: &models.IntotoV001SchemaContent{
Envelope: string(provenance),
},
PublicKey: &cert,
},
}, nil
func provenanceFromEnv(env *dsselib.Envelope) (prov *intoto.ProvenanceStatement, err error) {
pyld, err := base64.StdEncoding.DecodeString(env.Payload)
if err != nil {
return nil, fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "decoding payload")
}
prov = &intoto.ProvenanceStatement{}
if err := json.Unmarshal(pyld, prov); err != nil {
return nil, fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "unmarshalling json")
}
return
}
// Verify SHA256 Subject Digest from the provenance statement.
func verifySha256Digest(env *dsselib.Envelope, expectedHash string) error {
pyld, err := base64.StdEncoding.DecodeString(env.Payload)
if err != nil {
return fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "decoding payload")
}
prov := &intoto.ProvenanceStatement{}
if err := json.Unmarshal(pyld, prov); err != nil {
return fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "unmarshalling json")
}
func verifySha256Digest(prov *intoto.ProvenanceStatement, expectedHash string) error {
if len(prov.Subject) == 0 {
return fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "no subjects")
}
@@ -123,280 +55,6 @@ func verifySha256Digest(env *dsselib.Envelope, expectedHash string) error {
return fmt.Errorf("expected hash '%s' not found: %w", expectedHash, errorMismatchHash)
}
// GetRekorEntries finds all entry UUIDs by the digest of the artifact binary.
func GetRekorEntries(rClient *client.Rekor, artifactHash string) ([]string, error) {
// Use search index to find rekor entry UUIDs that match Subject Digest.
params := index.NewSearchIndexParams()
params.Query = &models.SearchIndex{Hash: fmt.Sprintf("sha256:%v", artifactHash)}
resp, err := rClient.Index.SearchIndex(params)
if err != nil {
return nil, fmt.Errorf("%w: %s", ErrorRekorSearch, err.Error())
}
if len(resp.Payload) == 0 {
return nil, fmt.Errorf("%w: no matching entries found", ErrorRekorSearch)
}
return resp.GetPayload(), nil
}
// GetRekorEntriesWithCert finds all entry UUIDs with the full intoto attestation.
// The attestation generated by the slsa-github-generator libraries contain a signing certificate.
func GetRekorEntriesWithCert(rClient *client.Rekor, provenance []byte) (*dsselib.Envelope, *x509.Certificate, error) {
// Use intoto attestation to find rekor entry UUIDs.
params := entries.NewSearchLogQueryParams()
searchLogQuery := models.SearchLogQuery{}
certPem, err := envelope.GetCertFromEnvelope(provenance)
if err != nil {
return nil, nil, fmt.Errorf("error getting certificate from provenance: %w", err)
}
e, err := intotoEntry(certPem, provenance)
if err != nil {
return nil, nil, fmt.Errorf("error creating intoto entry: %w", err)
}
entry := models.Intoto{
APIVersion: swag.String(e.APIVersion()),
Spec: e.IntotoObj,
}
entries := []models.ProposedEntry{&entry}
searchLogQuery.SetEntries(entries)
params.SetEntry(&searchLogQuery)
resp, err := rClient.Entries.SearchLogQuery(params)
if err != nil {
return nil, nil, fmt.Errorf("%w: %s", ErrorRekorSearch, err.Error())
}
if len(resp.GetPayload()) != 1 {
return nil, nil, fmt.Errorf("%w: %s", ErrorRekorSearch, "no matching rekor entries")
}
logEntry := resp.Payload[0]
for uuid, e := range logEntry {
if _, err := verifyTlogEntry(context.Background(), rClient, uuid, e); err != nil {
return nil, nil, fmt.Errorf("error verifying tlog entry: %w", err)
}
url := fmt.Sprintf("%v/%v/%v", defaultRekorAddr, "api/v1/log/entries", uuid)
fmt.Fprintf(os.Stderr, "Verified signature against tlog entry index %d at URL: %s\n", *e.LogIndex, url)
}
env, err := EnvelopeFromBytes(provenance)
if err != nil {
return nil, nil, err
}
certs, err := cryptoutils.UnmarshalCertificatesFromPEM(certPem)
if err != nil {
return nil, nil, err
}
if len(certs) != 1 {
return nil, nil, fmt.Errorf("error unmarshaling certificate from pem")
}
return env, certs[0], nil
}
func verifyRootHash(ctx context.Context, rekorClient *client.Rekor, proof *models.InclusionProof, pub *ecdsa.PublicKey) error {
infoParams := tlog.NewGetLogInfoParamsWithContext(ctx)
result, err := rekorClient.Tlog.GetLogInfo(infoParams)
if err != nil {
return err
}
logInfo := result.GetPayload()
sth := util.SignedCheckpoint{}
if err := sth.UnmarshalText([]byte(*logInfo.SignedTreeHead)); err != nil {
return err
}
verifier, err := signature.LoadVerifier(pub, crypto.SHA256)
if err != nil {
return err
}
if !sth.Verify(verifier) {
return errors.New("signature on tree head did not verify")
}
rootHash, err := hex.DecodeString(*proof.RootHash)
if err != nil {
return errors.New("error decoding root hash in inclusion proof")
}
if *proof.TreeSize == int64(sth.Size) {
if !bytes.Equal(rootHash, sth.Hash) {
return errors.New("root hash returned from server does not match inclusion proof hash")
}
} else if *proof.TreeSize < int64(sth.Size) {
consistencyParams := tlog.NewGetLogProofParamsWithContext(ctx)
consistencyParams.FirstSize = proof.TreeSize // Root hash at the time the proof was returned
consistencyParams.LastSize = int64(sth.Size) // Root hash verified with rekor pubkey
consistencyProof, err := rekorClient.Tlog.GetLogProof(consistencyParams)
if err != nil {
return err
}
var hashes [][]byte
for _, h := range consistencyProof.Payload.Hashes {
b, err := hex.DecodeString(h)
if err != nil {
return errors.New("error decoding consistency proof hashes")
}
hashes = append(hashes, b)
}
if err := merkleproof.VerifyConsistency(rfc6962.DefaultHasher,
uint64(*proof.TreeSize), sth.Size, hashes, rootHash, sth.Hash); err != nil {
return err
}
} else if *proof.TreeSize > int64(sth.Size) {
return errors.New("inclusion proof returned a tree size larger than the verified tree size")
}
return nil
}
func verifyTlogEntryByUUID(ctx context.Context, rekorClient *client.Rekor, entryUUID string) (*models.LogEntryAnon, error) {
params := entries.NewGetLogEntryByUUIDParamsWithContext(ctx)
params.EntryUUID = entryUUID
lep, err := rekorClient.Entries.GetLogEntryByUUID(params)
if err != nil {
return nil, err
}
if len(lep.Payload) != 1 {
return nil, errors.New("UUID value can not be extracted")
}
uuid, err := sharding.GetUUIDFromIDString(params.EntryUUID)
if err != nil {
return nil, err
}
var e models.LogEntryAnon
for k, entry := range lep.Payload {
if k != uuid {
return nil, errors.New("expected matching UUID")
}
e = entry
}
return verifyTlogEntry(ctx, rekorClient, uuid, e)
}
func verifyTlogEntry(ctx context.Context, rekorClient *client.Rekor, uuid string, e models.LogEntryAnon) (*models.LogEntryAnon, error) {
if e.Verification == nil || e.Verification.InclusionProof == nil {
return nil, errors.New("inclusion proof not provided")
}
var hashes [][]byte
for _, h := range e.Verification.InclusionProof.Hashes {
hb, err := hex.DecodeString(h)
if err != nil {
return nil, errors.New("error decoding inclusion proof hashes")
}
hashes = append(hashes, hb)
}
rootHash, err := hex.DecodeString(*e.Verification.InclusionProof.RootHash)
if err != nil {
return nil, errors.New("error decoding hex encoded root hash")
}
leafHash, err := hex.DecodeString(uuid)
if err != nil {
return nil, errors.New("error decoding hex encoded leaf hash")
}
// Verify the root hash against the current Signed Entry Tree Head
pubs, err := cosign.GetRekorPubs(ctx, nil)
if err != nil {
return nil, fmt.Errorf("%w: %s", err, "unable to fetch Rekor public keys from TUF repository")
}
var entryVerError error
for _, pubKey := range pubs {
// Verify inclusion against the signed tree head
entryVerError = verifyRootHash(ctx, rekorClient, e.Verification.InclusionProof, pubKey.PubKey)
if entryVerError == nil {
break
}
}
if entryVerError != nil {
return nil, fmt.Errorf("%w: %s", err, "error verifying root hash")
}
// Verify the entry's inclusion
if err := merkleproof.VerifyInclusion(rfc6962.DefaultHasher,
uint64(*e.Verification.InclusionProof.LogIndex),
uint64(*e.Verification.InclusionProof.TreeSize), leafHash, hashes, rootHash); err != nil {
return nil, fmt.Errorf("%w: %s", err, "verifying inclusion proof")
}
// Verify rekor's signature over the SET.
payload := bundle.RekorPayload{
Body: e.Body,
IntegratedTime: *e.IntegratedTime,
LogIndex: *e.LogIndex,
LogID: *e.LogID,
}
var setVerError error
for _, pubKey := range pubs {
setVerError = cosign.VerifySET(payload, e.Verification.SignedEntryTimestamp, pubKey.PubKey)
// Return once the SET is verified successfully.
if setVerError == nil {
break
}
}
return &e, setVerError
}
func extractCert(e *models.LogEntryAnon) (*x509.Certificate, error) {
b, err := base64.StdEncoding.DecodeString(e.Body.(string))
if err != nil {
return nil, err
}
pe, err := models.UnmarshalProposedEntry(bytes.NewReader(b), runtime.JSONConsumer())
if err != nil {
return nil, err
}
eimpl, err := types.NewEntry(pe)
if err != nil {
return nil, err
}
var publicKeyB64 []byte
switch e := eimpl.(type) {
case *intotod.V001Entry:
publicKeyB64, err = e.IntotoObj.PublicKey.MarshalText()
if err != nil {
return nil, err
}
default:
return nil, errors.New("unexpected tlog entry type")
}
publicKey, err := base64.StdEncoding.DecodeString(string(publicKeyB64))
if err != nil {
return nil, err
}
certs, err := cryptoutils.UnmarshalCertificatesFromPEM(publicKey)
if err != nil {
return nil, err
}
if len(certs) != 1 {
return nil, errors.New("unexpected number of cert pem tlog entry")
}
return certs[0], err
}
// VerifyProvenanceSignature returns the verified DSSE envelope containing the provenance
// and the signing certificate given the provenance and artifact hash.
func VerifyProvenanceSignature(ctx context.Context, rClient *client.Rekor, provenance []byte, artifactHash string) (*dsselib.Envelope, *x509.Certificate, error) {
@@ -425,167 +83,41 @@ func VerifyProvenanceSignature(ctx context.Context, rClient *client.Rekor, prove
return env, cert, nil
}
// FindSigningCertificate finds and verifies a matching signing certificate from a list of Rekor entry UUIDs.
func FindSigningCertificate(ctx context.Context, uuids []string, dssePayload dsselib.Envelope, rClient *client.Rekor) (*x509.Certificate, error) {
attBytes, err := cjson.MarshalCanonical(dssePayload)
func VerifyProvenance(env *dsselib.Envelope, opts *ProvenanceOpts) error {
prov, err := provenanceFromEnv(env)
if err != nil {
return nil, err
}
// Iterate through each matching UUID and perform:
// * Verify TLOG entry (inclusion and signed entry timestamp against Rekor pubkey).
// * Verify the signing certificate against the Fulcio root CA.
// * Verify dsse envelope signature against signing certificate.
// * Check signature expiration against IntegratedTime in entry.
// * If all succeed, return the signing certificate.
for _, uuid := range uuids {
entry, err := verifyTlogEntryByUUID(ctx, rClient, uuid)
if err != nil {
continue
}
cert, err := extractCert(entry)
if err != nil {
continue
}
roots, err := fulcio.GetRoots()
if err != nil {
continue
}
co := &cosign.CheckOpts{
RootCerts: roots,
CertOidcIssuer: certOidcIssuer,
}
verifier, err := cosign.ValidateAndUnpackCert(cert, co)
if err != nil {
continue
}
verifier = dsse.WrapVerifier(verifier)
if err := verifier.VerifySignature(bytes.NewReader(attBytes), bytes.NewReader(attBytes)); err != nil {
continue
}
it := time.Unix(*entry.IntegratedTime, 0)
if err := cosign.CheckExpiry(cert, it); err != nil {
continue
}
uuid, err := cosign.ComputeLeafHash(entry)
if err != nil {
fmt.Fprintf(os.Stderr, "Error computing leaf hash for tlog entry at index: %d\n", *entry.LogIndex)
continue
}
// success!
url := fmt.Sprintf("%v/%v/%v", defaultRekorAddr, "api/v1/log/entries", hex.EncodeToString(uuid))
fmt.Fprintf(os.Stderr, "Verified signature against tlog entry index %d at URL: %s\n", *entry.LogIndex, url)
return cert, nil
}
return nil, ErrorNoValidRekorEntries
}
func getExtension(cert *x509.Certificate, oid string) string {
for _, ext := range cert.Extensions {
if strings.Contains(ext.Id.String(), oid) {
return string(ext.Value)
}
}
return ""
}
type WorkflowIdentity struct {
// The caller repository
CallerRepository string `json:"caller"`
// The commit SHA where the workflow was triggered
CallerHash string `json:"commit"`
// Current workflow (reuseable workflow) ref
JobWobWorkflowRef string `json:"job_workflow_ref"`
// Trigger
Trigger string `json:"trigger"`
// Issuer
Issuer string `json:"issuer"`
}
// GetWorkflowInfoFromCertificate gets the workflow identity from the Fulcio authenticated content.
func GetWorkflowInfoFromCertificate(cert *x509.Certificate) (*WorkflowIdentity, error) {
if len(cert.URIs) == 0 {
return nil, errors.New("missing URI information from certificate")
}
return &WorkflowIdentity{
CallerRepository: getExtension(cert, "1.3.6.1.4.1.57264.1.5"),
Issuer: getExtension(cert, "1.3.6.1.4.1.57264.1.1"),
Trigger: getExtension(cert, "1.3.6.1.4.1.57264.1.2"),
CallerHash: getExtension(cert, "1.3.6.1.4.1.57264.1.3"),
JobWobWorkflowRef: cert.URIs[0].Path,
}, nil
}
// VerifyWorkflowIdentity verifies the signing certificate information
func VerifyWorkflowIdentity(id *WorkflowIdentity, source string) error {
// cert URI path is /org/repo/path/to/workflow@ref
workflowPath := strings.SplitN(id.JobWobWorkflowRef, "@", 2)
if len(workflowPath) < 2 {
return fmt.Errorf("%w: %s", errorMalformedWorkflowURI, id.JobWobWorkflowRef)
}
// Trusted workflow verification by name.
reusableWorkflowName := strings.Trim(workflowPath[0], "/")
if _, ok := trustedReusableWorkflows[reusableWorkflowName]; !ok {
return fmt.Errorf("%w: %s", ErrorUntrustedReusableWorkflow, reusableWorkflowName)
}
// Verify the ref.
if err := verifyTrustedBuilderRef(id, strings.Trim(workflowPath[1], "/")); err != nil {
return err
}
// Issue verification.
if !strings.EqualFold(id.Issuer, certOidcIssuer) {
return fmt.Errorf("untrusted token issuer: %s", id.Issuer)
// Verify subject digest.
if err := verifySha256Digest(prov, opts.ExpectedDigest); err != nil {
return err
}
// The caller repository in the x509 extension is not fully qualified. It only contains
// {org}/{repository}.
expectedSource := strings.TrimPrefix(source, "github.com/")
if !strings.EqualFold(id.CallerRepository, expectedSource) {
return fmt.Errorf("%w: expected source '%s', got '%s'", ErrorMismatchRepository,
expectedSource, id.CallerRepository)
// Verify the branch.
if err := VerifyBranch(prov, opts.ExpectedBranch); err != nil {
return err
}
// Verify the tag.
if opts.ExpectedTag != nil {
if err := VerifyTag(prov, *opts.ExpectedTag); err != nil {
return err
}
}
// Verify the versioned tag.
if opts.ExpectedVersionedTag != nil {
if err := VerifyVersionedTag(prov, *opts.ExpectedVersionedTag); err != nil {
return err
}
}
return nil
}
// Only allow `@refs/heads/main` for the builder and the e2e tests that need to work at HEAD.
// This lets us use the pre-build builder binary generated during release (release happen at main).
// For other projects, we only allow semantic versions that map to a release.
func verifyTrustedBuilderRef(id *WorkflowIdentity, ref string) error {
if (id.CallerRepository == trustedBuilderRepository ||
id.CallerRepository == e2eTestRepository) &&
strings.EqualFold("refs/heads/main", ref) {
return nil
}
if !strings.HasPrefix(ref, "refs/tags/") {
return fmt.Errorf("%w: %s: not of the form 'refs/tags/name'", errorInvalidRef, ref)
}
// Valid semver of the form vX.Y.Z with no metadata.
pin := strings.TrimPrefix(ref, "refs/tags/")
if !(semver.IsValid(pin) &&
len(strings.Split(pin, ".")) == 3 &&
semver.Prerelease(pin) == "" &&
semver.Build(pin) == "") {
return fmt.Errorf("%w: %s: not of the form vX.Y.Z", errorInvalidRef, pin)
}
return nil
}
func VerifyProvenance(env *dsselib.Envelope, expectedHash string) error {
return verifySha256Digest(env, expectedHash)
}
func VerifyBranch(env *dsselib.Envelope, expectedBranch string) error {
branch, err := getBranch(env)
func VerifyBranch(prov *intoto.ProvenanceStatement, expectedBranch string) error {
branch, err := getBranch(prov)
if err != nil {
return err
}
@@ -598,8 +130,8 @@ func VerifyBranch(env *dsselib.Envelope, expectedBranch string) error {
return nil
}
func VerifyTag(env *dsselib.Envelope, expectedTag string) error {
tag, err := getTag(env)
func VerifyTag(prov *intoto.ProvenanceStatement, expectedTag string) error {
tag, err := getTag(prov)
if err != nil {
return err
}
@@ -612,7 +144,7 @@ func VerifyTag(env *dsselib.Envelope, expectedTag string) error {
return nil
}
func VerifyVersionedTag(env *dsselib.Envelope, expectedTag string) error {
func VerifyVersionedTag(prov *intoto.ProvenanceStatement, expectedTag string) error {
// Validate and canonicalize the provenance tag.
if !semver.IsValid(expectedTag) {
return fmt.Errorf("%s: %w", expectedTag, ErrorInvalidSemver)
@@ -622,7 +154,7 @@ func VerifyVersionedTag(env *dsselib.Envelope, expectedTag string) error {
// Note: prerelease is validated as part of patch validation
// and must be equal. Build is discarded as per https://semver.org/:
// "Build metadata MUST be ignored when determining version precedence",
tag, err := getTag(env)
tag, err := getTag(prov)
if err != nil {
return err
}
@@ -796,17 +328,7 @@ func getBranchForTag(environment map[string]interface{}) (string, error) {
}
// Get tag from the provenance invocation parameters.
func getTag(env *dsselib.Envelope) (string, error) {
pyld, err := base64.StdEncoding.DecodeString(env.Payload)
if err != nil {
return "", fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "decoding payload")
}
var prov intoto.ProvenanceStatement
if err := json.Unmarshal(pyld, &prov); err != nil {
return "", fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "unmarshalling json")
}
func getTag(prov *intoto.ProvenanceStatement) (string, error) {
environment, ok := prov.Predicate.Invocation.Environment.(map[string]interface{})
if !ok {
return "", fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "parameters type")
@@ -829,17 +351,7 @@ func getTag(env *dsselib.Envelope) (string, error) {
}
// Get branch from the provenance invocation parameters.
func getBranch(env *dsselib.Envelope) (string, error) {
pyld, err := base64.StdEncoding.DecodeString(env.Payload)
if err != nil {
return "", fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "decoding payload")
}
var prov intoto.ProvenanceStatement
if err := json.Unmarshal(pyld, &prov); err != nil {
return "", fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "unmarshalling json")
}
func getBranch(prov *intoto.ProvenanceStatement) (string, error) {
environment, ok := prov.Predicate.Invocation.Environment.(map[string]interface{})
if !ok {
return "", fmt.Errorf("%w: %s", ErrorInvalidDssePayload, "parameters type")

View File

@@ -1,103 +1,23 @@
package pkg
import (
"encoding/json"
"errors"
"fmt"
"os"
"testing"
"github.com/go-openapi/runtime"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
dsselib "github.com/secure-systems-lab/go-securesystemslib/dsse"
"github.com/sigstore/rekor/pkg/generated/client"
"github.com/sigstore/rekor/pkg/generated/client/index"
intoto "github.com/in-toto/in-toto-golang/in_toto"
)
type searchResult struct {
resp *index.SearchIndexOK
err error
}
func envelopeFromBytes(payload []byte) (env *dsselib.Envelope, err error) {
env = &dsselib.Envelope{}
err = json.Unmarshal(payload, env)
return
}
type MockIndexClient struct {
result searchResult
}
func (m *MockIndexClient) SearchIndex(params *index.SearchIndexParams,
opts ...index.ClientOption) (*index.SearchIndexOK, error) {
return m.result.resp, m.result.err
}
func (m *MockIndexClient) SetTransport(transport runtime.ClientTransport) {
}
func errCmp(e1, e2 error) bool {
return errors.Is(e1, e2) || errors.Is(e2, e1)
}
func Test_GetRekorEntries(t *testing.T) {
t.Parallel()
tests := []struct {
name string
artifactHash string
res searchResult
expected error
}{
{
name: "rekor search result error",
artifactHash: "0ae7e4fa71686538440012ee36a2634dbaa19df2dd16a466f52411fb348bbc4e",
res: searchResult{
err: index.NewSearchIndexDefault(500),
},
expected: ErrorRekorSearch,
},
{
name: "no rekor entries found",
artifactHash: "0ae7e4fa71686538440012ee36a2634dbaa19df2dd16a466f52411fb348bbc4e",
res: searchResult{
err: nil,
resp: &index.SearchIndexOK{
Payload: []string{},
},
},
expected: ErrorRekorSearch,
},
{
name: "valid rekor entries found",
artifactHash: "0ae7e4fa71686538440012ee36a2634dbaa19df2dd16a466f52411fb348bbc4e",
res: searchResult{
err: nil,
resp: &index.SearchIndexOK{
Payload: []string{"39d5109436c43dad92897d50f3b271aa456382875a922b28fedef9038b8f683a"},
},
},
expected: nil,
},
}
for _, tt := range tests {
tt := tt // Re-initializing variable so it is not changed while executing the closure below
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
var mClient client.Rekor
mClient.Index = &MockIndexClient{result: tt.res}
_, err := GetRekorEntries(&mClient, tt.artifactHash)
if !errCmp(err, tt.expected) {
t.Errorf(cmp.Diff(err, tt.expected))
}
})
func provenanceFromBytes(payload []byte) (*intoto.ProvenanceStatement, error) {
env, err := EnvelopeFromBytes(payload)
if err != nil {
return nil, err
}
return provenanceFromEnv(env)
}
func Test_VerifyProvenance(t *testing.T) {
func Test_VerifySha256Subject(t *testing.T) {
t.Parallel()
tests := []struct {
name string
@@ -163,12 +83,12 @@ func Test_VerifyProvenance(t *testing.T) {
if err != nil {
panic(fmt.Errorf("os.ReadFile: %w", err))
}
env, err := envelopeFromBytes(content)
prov, err := provenanceFromBytes(content)
if err != nil {
panic(fmt.Errorf("envelopeFromBytes: %w", err))
panic(fmt.Errorf("provenanceFromBytes: %w", err))
}
err = VerifyProvenance(env, tt.artifactHash)
err = verifySha256Digest(prov, tt.artifactHash)
if !errCmp(err, tt.expected) {
t.Errorf(cmp.Diff(err, tt.expected))
}
@@ -176,178 +96,6 @@ func Test_VerifyProvenance(t *testing.T) {
}
}
func Test_VerifyWorkflowIdentity(t *testing.T) {
t.Parallel()
tests := []struct {
name string
workflow *WorkflowIdentity
source string
err error
}{
{
name: "invalid job workflow ref",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: "random/workflow/ref",
Trigger: "workflow_dispatch",
Issuer: "https://token.actions.githubusercontent.com",
},
source: "asraa/slsa-on-github-test",
err: errorMalformedWorkflowURI,
},
{
name: "untrusted job workflow ref",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: "/malicious/slsa-go/.github/workflows/builder.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: "https://token.actions.githubusercontent.com",
},
source: "asraa/slsa-on-github-test",
err: ErrorUntrustedReusableWorkflow,
},
{
name: "untrusted job workflow ref for general repos",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: "https://bad.issuer.com",
},
source: "asraa/slsa-on-github-test",
err: errorInvalidRef,
},
{
name: "valid main ref for trusted builder",
workflow: &WorkflowIdentity{
CallerRepository: trustedBuilderRepository,
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: "https://token.actions.githubusercontent.com",
},
source: trustedBuilderRepository,
},
{
name: "valid main ref for e2e test",
workflow: &WorkflowIdentity{
CallerRepository: e2eTestRepository,
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: e2eTestRepository,
},
{
name: "unexpected source for e2e test",
workflow: &WorkflowIdentity{
CallerRepository: e2eTestRepository,
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "malicious/source",
err: ErrorMismatchRepository,
},
{
name: "valid main ref for builder",
workflow: &WorkflowIdentity{
CallerRepository: trustedBuilderRepository,
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/heads/main",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "malicious/source",
err: ErrorMismatchRepository,
},
{
name: "unexpected source",
workflow: &WorkflowIdentity{
CallerRepository: "malicious/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
err: ErrorMismatchRepository,
},
{
name: "valid workflow identity",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
},
{
name: "invalid workflow identity with prerelease",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3-alpha",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
err: errorInvalidRef,
},
{
name: "invalid workflow identity with build",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3+123",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
err: errorInvalidRef,
},
{
name: "invalid workflow identity with metadata",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3-alpha+123",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "asraa/slsa-on-github-test",
err: errorInvalidRef,
},
{
name: "valid workflow identity with fully qualified source",
workflow: &WorkflowIdentity{
CallerRepository: "asraa/slsa-on-github-test",
CallerHash: "0dfcd24824432c4ce587f79c918eef8fc2c44d7b",
JobWobWorkflowRef: trustedBuilderRepository + "/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.3",
Trigger: "workflow_dispatch",
Issuer: certOidcIssuer,
},
source: "github.com/asraa/slsa-on-github-test",
},
}
for _, tt := range tests {
tt := tt // Re-initializing variable so it is not changed while executing the closure below
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
err := VerifyWorkflowIdentity(tt.workflow, tt.source)
if !errCmp(err, tt.err) {
t.Errorf(cmp.Diff(err, tt.err, cmpopts.EquateErrors()))
}
})
}
}
func Test_VerifyBranch(t *testing.T) {
t.Parallel()
tests := []struct {
@@ -391,12 +139,12 @@ func Test_VerifyBranch(t *testing.T) {
if err != nil {
panic(fmt.Errorf("os.ReadFile: %w", err))
}
env, err := envelopeFromBytes(content)
prov, err := provenanceFromBytes(content)
if err != nil {
panic(fmt.Errorf("envelopeFromBytes: %w", err))
panic(fmt.Errorf("provenanceFromBytes: %w", err))
}
err = VerifyBranch(env, tt.branch)
err = VerifyBranch(prov, tt.branch)
if !errCmp(err, tt.expected) {
t.Errorf(cmp.Diff(err, tt.expected))
}
@@ -442,12 +190,12 @@ func Test_VerifyTag(t *testing.T) {
if err != nil {
panic(fmt.Errorf("os.ReadFile: %w", err))
}
env, err := envelopeFromBytes(content)
prov, err := provenanceFromBytes(content)
if err != nil {
panic(fmt.Errorf("envelopeFromBytes: %w", err))
panic(fmt.Errorf("provenanceFromBytes: %w", err))
}
err = VerifyTag(env, tt.tag)
err = VerifyTag(prov, tt.tag)
if !errCmp(err, tt.expected) {
t.Errorf(cmp.Diff(err, tt.expected))
}
@@ -455,156 +203,6 @@ func Test_VerifyTag(t *testing.T) {
}
}
func Test_verifyTrustedBuilderRef(t *testing.T) {
t.Parallel()
tests := []struct {
name string
callerRepo string
builderRef string
expected error
}{
// Trusted repo.
{
name: "main allowed for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/heads/main",
},
{
name: "full semver for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2.3",
},
{
name: "no patch semver for other builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2",
expected: errorInvalidRef,
},
{
name: "no min semver for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1",
expected: errorInvalidRef,
},
{
name: "full semver with prerelease for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2.3-alpha",
expected: errorInvalidRef,
},
{
name: "full semver with build for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2.3+123",
expected: errorInvalidRef,
},
{
name: "full semver with build/prerelease for builder",
callerRepo: trustedBuilderRepository,
builderRef: "refs/tags/v1.2.3-alpha+123",
expected: errorInvalidRef,
},
// E2e tests repo.
{
name: "main allowed for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/heads/main",
},
{
name: "full semver for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2.3",
},
{
name: "no patch semver for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2",
expected: errorInvalidRef,
},
{
name: "no min semver for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1",
expected: errorInvalidRef,
},
{
name: "full semver with prerelease for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2.3-alpha",
expected: errorInvalidRef,
},
{
name: "full semver with build for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2.3+123",
expected: errorInvalidRef,
},
{
name: "full semver with build/prerelease for test repo",
callerRepo: e2eTestRepository,
builderRef: "refs/tags/v1.2.3-alpha+123",
expected: errorInvalidRef,
},
// Other repos.
{
name: "main not allowed for other repos",
callerRepo: "some/repo",
builderRef: "refs/heads/main",
expected: errorInvalidRef,
},
{
name: "full semver for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2.3",
},
{
name: "no patch semver for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2",
expected: errorInvalidRef,
},
{
name: "no min semver for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1",
expected: errorInvalidRef,
},
{
name: "full semver with prerelease for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2.3-alpha",
expected: errorInvalidRef,
},
{
name: "full semver with build for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2.3+123",
expected: errorInvalidRef,
},
{
name: "full semver with build/prerelease for other repos",
callerRepo: "some/repo",
builderRef: "refs/tags/v1.2.3-alpha+123",
expected: errorInvalidRef,
},
}
for _, tt := range tests {
tt := tt // Re-initializing variable so it is not changed while executing the closure below
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
wf := WorkflowIdentity{
CallerRepository: tt.callerRepo,
}
err := verifyTrustedBuilderRef(&wf, tt.builderRef)
if !errCmp(err, tt.expected) {
t.Errorf(cmp.Diff(err, tt.expected, cmpopts.EquateErrors()))
}
})
}
}
func Test_VerifyVersionedTag(t *testing.T) {
t.Parallel()
tests := []struct {
@@ -922,12 +520,12 @@ func Test_VerifyVersionedTag(t *testing.T) {
if err != nil {
panic(fmt.Errorf("os.ReadFile: %w", err))
}
env, err := envelopeFromBytes(content)
prov, err := provenanceFromBytes(content)
if err != nil {
panic(fmt.Errorf("envelopeFromBytes: %w", err))
panic(fmt.Errorf("provenanceFromBytes: %w", err))
}
err = VerifyVersionedTag(env, tt.tag)
err = VerifyVersionedTag(prov, tt.tag)
if !errCmp(err, tt.expected) {
t.Errorf(cmp.Diff(err, tt.expected))
}

387
pkg/rekor.go Normal file
View File

@@ -0,0 +1,387 @@
package pkg
import (
"bytes"
"context"
"crypto"
"crypto/ecdsa"
"crypto/x509"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"os"
"time"
cjson "github.com/docker/go/canonical/json"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
"github.com/google/trillian/merkle/rfc6962"
dsselib "github.com/secure-systems-lab/go-securesystemslib/dsse"
"github.com/sigstore/cosign/cmd/cosign/cli/fulcio"
"github.com/sigstore/cosign/pkg/cosign"
"github.com/sigstore/cosign/pkg/cosign/bundle"
"github.com/sigstore/rekor/pkg/generated/client"
"github.com/sigstore/rekor/pkg/generated/client/entries"
"github.com/sigstore/rekor/pkg/generated/client/index"
"github.com/sigstore/rekor/pkg/generated/client/tlog"
"github.com/sigstore/rekor/pkg/generated/models"
"github.com/sigstore/rekor/pkg/sharding"
"github.com/sigstore/rekor/pkg/types"
intotod "github.com/sigstore/rekor/pkg/types/intoto/v0.0.1"
"github.com/sigstore/rekor/pkg/util"
"github.com/sigstore/sigstore/pkg/cryptoutils"
"github.com/sigstore/sigstore/pkg/signature"
"github.com/sigstore/sigstore/pkg/signature/dsse"
"github.com/slsa-framework/slsa-github-generator/signing/envelope"
"github.com/transparency-dev/merkle/proof"
)
const (
defaultRekorAddr = "https://rekor.sigstore.dev"
)
func verifyRootHash(ctx context.Context, rekorClient *client.Rekor, eproof *models.InclusionProof, pub *ecdsa.PublicKey) error {
infoParams := tlog.NewGetLogInfoParamsWithContext(ctx)
result, err := rekorClient.Tlog.GetLogInfo(infoParams)
if err != nil {
return err
}
logInfo := result.GetPayload()
sth := util.SignedCheckpoint{}
if err := sth.UnmarshalText([]byte(*logInfo.SignedTreeHead)); err != nil {
return err
}
verifier, err := signature.LoadVerifier(pub, crypto.SHA256)
if err != nil {
return err
}
if !sth.Verify(verifier) {
return errors.New("signature on tree head did not verify")
}
rootHash, err := hex.DecodeString(*eproof.RootHash)
if err != nil {
return errors.New("error decoding root hash in inclusion proof")
}
if *eproof.TreeSize == int64(sth.Size) {
if !bytes.Equal(rootHash, sth.Hash) {
return errors.New("root hash returned from server does not match inclusion proof hash")
}
} else if *eproof.TreeSize < int64(sth.Size) {
consistencyParams := tlog.NewGetLogProofParamsWithContext(ctx)
consistencyParams.FirstSize = eproof.TreeSize // Root hash at the time the proof was returned
consistencyParams.LastSize = int64(sth.Size) // Root hash verified with rekor pubkey
consistencyProof, err := rekorClient.Tlog.GetLogProof(consistencyParams)
if err != nil {
return err
}
var hashes [][]byte
for _, h := range consistencyProof.Payload.Hashes {
b, err := hex.DecodeString(h)
if err != nil {
return errors.New("error decoding consistency proof hashes")
}
hashes = append(hashes, b)
}
if err := proof.VerifyConsistency(rfc6962.DefaultHasher,
uint64(*eproof.TreeSize), sth.Size, hashes, rootHash, sth.Hash); err != nil {
return err
}
} else if *eproof.TreeSize > int64(sth.Size) {
return errors.New("inclusion proof returned a tree size larger than the verified tree size")
}
return nil
}
func verifyTlogEntryByUUID(ctx context.Context, rekorClient *client.Rekor, entryUUID string) (*models.LogEntryAnon, error) {
params := entries.NewGetLogEntryByUUIDParamsWithContext(ctx)
params.EntryUUID = entryUUID
lep, err := rekorClient.Entries.GetLogEntryByUUID(params)
if err != nil {
return nil, err
}
if len(lep.Payload) != 1 {
return nil, errors.New("UUID value can not be extracted")
}
uuid, err := sharding.GetUUIDFromIDString(params.EntryUUID)
if err != nil {
return nil, err
}
var e models.LogEntryAnon
for k, entry := range lep.Payload {
if k != uuid {
return nil, errors.New("expected matching UUID")
}
e = entry
}
return verifyTlogEntry(ctx, rekorClient, uuid, e)
}
func verifyTlogEntry(ctx context.Context, rekorClient *client.Rekor, uuid string, e models.LogEntryAnon) (*models.LogEntryAnon, error) {
if e.Verification == nil || e.Verification.InclusionProof == nil {
return nil, errors.New("inclusion proof not provided")
}
var hashes [][]byte
for _, h := range e.Verification.InclusionProof.Hashes {
hb, err := hex.DecodeString(h)
if err != nil {
return nil, errors.New("error decoding inclusion proof hashes")
}
hashes = append(hashes, hb)
}
rootHash, err := hex.DecodeString(*e.Verification.InclusionProof.RootHash)
if err != nil {
return nil, errors.New("error decoding hex encoded root hash")
}
leafHash, err := hex.DecodeString(uuid)
if err != nil {
return nil, errors.New("error decoding hex encoded leaf hash")
}
// Verify the root hash against the current Signed Entry Tree Head
pubs, err := cosign.GetRekorPubs(ctx, nil)
if err != nil {
return nil, fmt.Errorf("%w: %s", err, "unable to fetch Rekor public keys from TUF repository")
}
var entryVerError error
for _, pubKey := range pubs {
// Verify inclusion against the signed tree head
entryVerError = verifyRootHash(ctx, rekorClient, e.Verification.InclusionProof, pubKey.PubKey)
if entryVerError == nil {
break
}
}
if entryVerError != nil {
return nil, fmt.Errorf("%w: %s", err, "error verifying root hash")
}
// Verify the entry's inclusion
if err := proof.VerifyInclusion(rfc6962.DefaultHasher,
uint64(*e.Verification.InclusionProof.LogIndex),
uint64(*e.Verification.InclusionProof.TreeSize), leafHash, hashes, rootHash); err != nil {
return nil, fmt.Errorf("%w: %s", err, "verifying inclusion proof")
}
// Verify rekor's signature over the SET.
payload := bundle.RekorPayload{
Body: e.Body,
IntegratedTime: *e.IntegratedTime,
LogIndex: *e.LogIndex,
LogID: *e.LogID,
}
var setVerError error
for _, pubKey := range pubs {
setVerError = cosign.VerifySET(payload, e.Verification.SignedEntryTimestamp, pubKey.PubKey)
// Return once the SET is verified successfully.
if setVerError == nil {
break
}
}
return &e, setVerError
}
func extractCert(e *models.LogEntryAnon) (*x509.Certificate, error) {
b, err := base64.StdEncoding.DecodeString(e.Body.(string))
if err != nil {
return nil, err
}
pe, err := models.UnmarshalProposedEntry(bytes.NewReader(b), runtime.JSONConsumer())
if err != nil {
return nil, err
}
eimpl, err := types.NewEntry(pe)
if err != nil {
return nil, err
}
var publicKeyB64 []byte
switch e := eimpl.(type) {
case *intotod.V001Entry:
publicKeyB64, err = e.IntotoObj.PublicKey.MarshalText()
if err != nil {
return nil, err
}
default:
return nil, errors.New("unexpected tlog entry type")
}
publicKey, err := base64.StdEncoding.DecodeString(string(publicKeyB64))
if err != nil {
return nil, err
}
certs, err := cryptoutils.UnmarshalCertificatesFromPEM(publicKey)
if err != nil {
return nil, err
}
if len(certs) != 1 {
return nil, errors.New("unexpected number of cert pem tlog entry")
}
return certs[0], err
}
func intotoEntry(certPem []byte, provenance []byte) (*intotod.V001Entry, error) {
cert := strfmt.Base64(certPem)
return &intotod.V001Entry{
IntotoObj: models.IntotoV001Schema{
Content: &models.IntotoV001SchemaContent{
Envelope: string(provenance),
},
PublicKey: &cert,
},
}, nil
}
// GetRekorEntries finds all entry UUIDs by the digest of the artifact binary.
func GetRekorEntries(rClient *client.Rekor, artifactHash string) ([]string, error) {
// Use search index to find rekor entry UUIDs that match Subject Digest.
params := index.NewSearchIndexParams()
params.Query = &models.SearchIndex{Hash: fmt.Sprintf("sha256:%v", artifactHash)}
resp, err := rClient.Index.SearchIndex(params)
if err != nil {
return nil, fmt.Errorf("%w: %s", ErrorRekorSearch, err.Error())
}
if len(resp.Payload) == 0 {
return nil, fmt.Errorf("%w: no matching entries found", ErrorRekorSearch)
}
return resp.GetPayload(), nil
}
// GetRekorEntriesWithCert finds all entry UUIDs with the full intoto attestation.
// The attestation generated by the slsa-github-generator libraries contain a signing certificate.
func GetRekorEntriesWithCert(rClient *client.Rekor, provenance []byte) (*dsselib.Envelope, *x509.Certificate, error) {
// Use intoto attestation to find rekor entry UUIDs.
params := entries.NewSearchLogQueryParams()
searchLogQuery := models.SearchLogQuery{}
certPem, err := envelope.GetCertFromEnvelope(provenance)
if err != nil {
return nil, nil, fmt.Errorf("error getting certificate from provenance: %w", err)
}
e, err := intotoEntry(certPem, provenance)
if err != nil {
return nil, nil, fmt.Errorf("error creating intoto entry: %w", err)
}
entry := models.Intoto{
APIVersion: swag.String(e.APIVersion()),
Spec: e.IntotoObj,
}
entries := []models.ProposedEntry{&entry}
searchLogQuery.SetEntries(entries)
params.SetEntry(&searchLogQuery)
resp, err := rClient.Entries.SearchLogQuery(params)
if err != nil {
return nil, nil, fmt.Errorf("%w: %s", ErrorRekorSearch, err.Error())
}
if len(resp.GetPayload()) != 1 {
return nil, nil, fmt.Errorf("%w: %s", ErrorRekorSearch, "no matching rekor entries")
}
logEntry := resp.Payload[0]
for uuid, e := range logEntry {
if _, err := verifyTlogEntry(context.Background(), rClient, uuid, e); err != nil {
return nil, nil, fmt.Errorf("error verifying tlog entry: %w", err)
}
url := fmt.Sprintf("%v/%v/%v", defaultRekorAddr, "api/v1/log/entries", uuid)
fmt.Fprintf(os.Stderr, "Verified signature against tlog entry index %d at URL: %s\n", *e.LogIndex, url)
}
env, err := EnvelopeFromBytes(provenance)
if err != nil {
return nil, nil, err
}
certs, err := cryptoutils.UnmarshalCertificatesFromPEM(certPem)
if err != nil {
return nil, nil, err
}
if len(certs) != 1 {
return nil, nil, fmt.Errorf("error unmarshaling certificate from pem")
}
return env, certs[0], nil
}
// FindSigningCertificate finds and verifies a matching signing certificate from a list of Rekor entry UUIDs.
func FindSigningCertificate(ctx context.Context, uuids []string, dssePayload dsselib.Envelope, rClient *client.Rekor) (*x509.Certificate, error) {
attBytes, err := cjson.MarshalCanonical(dssePayload)
if err != nil {
return nil, err
}
// Iterate through each matching UUID and perform:
// * Verify TLOG entry (inclusion and signed entry timestamp against Rekor pubkey).
// * Verify the signing certificate against the Fulcio root CA.
// * Verify dsse envelope signature against signing certificate.
// * Check signature expiration against IntegratedTime in entry.
// * If all succeed, return the signing certificate.
for _, uuid := range uuids {
entry, err := verifyTlogEntryByUUID(ctx, rClient, uuid)
if err != nil {
continue
}
cert, err := extractCert(entry)
if err != nil {
continue
}
roots, err := fulcio.GetRoots()
if err != nil {
continue
}
co := &cosign.CheckOpts{
RootCerts: roots,
CertOidcIssuer: certOidcIssuer,
}
verifier, err := cosign.ValidateAndUnpackCert(cert, co)
if err != nil {
continue
}
verifier = dsse.WrapVerifier(verifier)
if err := verifier.VerifySignature(bytes.NewReader(attBytes), bytes.NewReader(attBytes)); err != nil {
continue
}
it := time.Unix(*entry.IntegratedTime, 0)
if err := cosign.CheckExpiry(cert, it); err != nil {
continue
}
uuid, err := cosign.ComputeLeafHash(entry)
if err != nil {
fmt.Fprintf(os.Stderr, "Error computing leaf hash for tlog entry at index: %d\n", *entry.LogIndex)
continue
}
// success!
url := fmt.Sprintf("%v/%v/%v", defaultRekorAddr, "api/v1/log/entries", hex.EncodeToString(uuid))
fmt.Fprintf(os.Stderr, "Verified signature against tlog entry index %d at URL: %s\n", *entry.LogIndex, url)
return cert, nil
}
return nil, ErrorNoValidRekorEntries
}

87
pkg/rekor_test.go Normal file
View File

@@ -0,0 +1,87 @@
package pkg
import (
"errors"
"testing"
"github.com/go-openapi/runtime"
"github.com/google/go-cmp/cmp"
"github.com/sigstore/rekor/pkg/generated/client"
"github.com/sigstore/rekor/pkg/generated/client/index"
)
type searchResult struct {
resp *index.SearchIndexOK
err error
}
type MockIndexClient struct {
result searchResult
}
func (m *MockIndexClient) SearchIndex(params *index.SearchIndexParams,
opts ...index.ClientOption) (*index.SearchIndexOK, error) {
return m.result.resp, m.result.err
}
func (m *MockIndexClient) SetTransport(transport runtime.ClientTransport) {
}
func errCmp(e1, e2 error) bool {
return errors.Is(e1, e2) || errors.Is(e2, e1)
}
func Test_GetRekorEntries(t *testing.T) {
t.Parallel()
tests := []struct {
name string
artifactHash string
res searchResult
expected error
}{
{
name: "rekor search result error",
artifactHash: "0ae7e4fa71686538440012ee36a2634dbaa19df2dd16a466f52411fb348bbc4e",
res: searchResult{
err: index.NewSearchIndexDefault(500),
},
expected: ErrorRekorSearch,
},
{
name: "no rekor entries found",
artifactHash: "0ae7e4fa71686538440012ee36a2634dbaa19df2dd16a466f52411fb348bbc4e",
res: searchResult{
err: nil,
resp: &index.SearchIndexOK{
Payload: []string{},
},
},
expected: ErrorRekorSearch,
},
{
name: "valid rekor entries found",
artifactHash: "0ae7e4fa71686538440012ee36a2634dbaa19df2dd16a466f52411fb348bbc4e",
res: searchResult{
err: nil,
resp: &index.SearchIndexOK{
Payload: []string{"39d5109436c43dad92897d50f3b271aa456382875a922b28fedef9038b8f683a"},
},
},
expected: nil,
},
}
for _, tt := range tests {
tt := tt // Re-initializing variable so it is not changed while executing the closure below
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
var mClient client.Rekor
mClient.Index = &MockIndexClient{result: tt.res}
_, err := GetRekorEntries(&mClient, tt.artifactHash)
if !errCmp(err, tt.expected) {
t.Errorf(cmp.Diff(err, tt.expected))
}
})
}
}