diff --git a/pkg/artifacts/signable.go b/pkg/artifacts/signable.go index 6e337459d2..f089243cf7 100644 --- a/pkg/artifacts/signable.go +++ b/pkg/artifacts/signable.go @@ -251,7 +251,7 @@ func ExtractSignableTargetFromResults(ctx context.Context, obj objects.TektonObj if s == nil || s.Digest == "" || s.URI == "" { continue } - if err := checkDigest(s.Digest); err != nil { + if _, _, err := ParseDigest(s.Digest); err != nil { logger.Errorf("error getting digest %s: %v", s.Digest, err) continue } @@ -310,13 +310,11 @@ func RetrieveMaterialsFromStructuredResults(ctx context.Context, obj objects.Tek mats := []common.ProvenanceMaterial{} ssts := ExtractStructuredTargetFromResults(ctx, obj, ArtifactsInputsResultName) for _, s := range ssts { - if err := checkDigest(s.Digest); err != nil { + alg, digest, err := ParseDigest(s.Digest) + if err != nil { logger.Debugf("Digest for %s not in the right format: %s, %v", s.URI, s.Digest, err) continue } - splits := strings.Split(s.Digest, ":") - alg := splits[0] - digest := splits[1] mats = append(mats, common.ProvenanceMaterial{ URI: s.URI, Digest: map[string]string{alg: digest}, @@ -370,16 +368,16 @@ func isStructuredResult(res objects.Result, categoryMarker string) (bool, error) if res.Value.ObjectVal["digest"] == "" { return false, fmt.Errorf("%s should have digest field: %v", res.Name, res.Value.ObjectVal) } - if err := checkDigest(res.Value.ObjectVal["digest"]); err != nil { + if _, _, err := ParseDigest(res.Value.ObjectVal["digest"]); err != nil { return false, fmt.Errorf("error getting digest %s: %v", res.Value.ObjectVal["digest"], err) } return true, nil } -func checkDigest(dig string) error { +func ParseDigest(dig string) (string, string, error) { parts := strings.Split(dig, ":") if len(parts) != 2 { - return fmt.Errorf("digest string %s, not in the format of :", dig) + return "", "", fmt.Errorf("digest string %s, not in the format of :", dig) } algo_string := strings.ToLower(strings.TrimSpace(parts[0])) algo := digest.Algorithm(algo_string) @@ -388,19 +386,19 @@ func checkDigest(dig string) error { switch { case algo.Available(): if err := algo.Validate(hex); err != nil { - return err + return "", "", err } case algo_string == "sha1": // Version 1.0.0, which is the released version, of go_digest does not support SHA1, // hence this has to be handled differently. if !Sha1Regexp.MatchString(hex) { - return fmt.Errorf("sha1 digest %s does not match regexp %s", dig, Sha1Regexp.String()) + return "", "", fmt.Errorf("sha1 digest %s does not match regexp %s", dig, Sha1Regexp.String()) } default: - return fmt.Errorf("unsupported digest algorithm: %s", dig) + return "", "", fmt.Errorf("unsupported digest algorithm: %s", dig) } - return nil + return algo_string, hex, nil } // split allows IMAGES to be separated either by commas (for backwards compatibility) diff --git a/pkg/chains/formats/slsa/internal/material/material.go b/pkg/chains/formats/slsa/internal/material/material.go index b673280083..ebd879739c 100644 --- a/pkg/chains/formats/slsa/internal/material/material.go +++ b/pkg/chains/formats/slsa/internal/material/material.go @@ -33,6 +33,9 @@ import ( const ( uriSeparator = "@" digestSeparator = ":" + DigestAlgorithm = "digestAlgorithm" + DigestValue = "digestValue" + URI = "uri" ) // AddStepImagesToMaterials adds step images to predicate.materials @@ -60,40 +63,45 @@ func AddImageIDToMaterials(imageID string, mats *[]common.ProvenanceMaterial) er m := common.ProvenanceMaterial{ Digest: common.DigestSet{}, } - uriDigest := strings.Split(imageID, uriSeparator) - if len(uriDigest) == 2 { - digest := strings.Split(uriDigest[1], digestSeparator) - if len(digest) == 2 { - // no point in partially populating the material - // do it if both conditions are valid. - uri := strings.TrimPrefix(uriDigest[0], "docker-pullable://") - m.URI = artifacts.OCIScheme + uri - m.Digest[digest[0]] = digest[1] - *mats = append(*mats, m) - } else { - return fmt.Errorf("expected imageID %s to be separable by @ and :", imageID) - } - } else { - return fmt.Errorf("expected imageID %s to be separable by @", imageID) + uriDigest, err := extractUriDigestFromImageID(imageID) + if err != nil { + return err } + m.URI = uriDigest[URI] + m.Digest[uriDigest[DigestAlgorithm]] = uriDigest[DigestValue] + *mats = append(*mats, m) return nil } +// extractUriDigestFromImageID extracts uri and digest from an imageID with format @sha256: +func extractUriDigestFromImageID(imageID string) (map[string]string, error) { + uriDigest := strings.Split(imageID, uriSeparator) + if len(uriDigest) != 2 { + return map[string]string{}, fmt.Errorf("expected imageID %s to be separable by @", imageID) + } + digest := strings.Split(uriDigest[1], digestSeparator) + if len(digest) != 2 { + return map[string]string{}, fmt.Errorf("expected imageID %s to be separable by @ and :", imageID) + } + uri := strings.TrimPrefix(uriDigest[0], "docker-pullable://") + return map[string]string{URI: artifacts.OCIScheme + uri, DigestAlgorithm: digest[0], DigestValue: digest[1]}, nil +} + // Materials constructs `predicate.materials` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. func Materials(ctx context.Context, tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, error) { var mats []common.ProvenanceMaterial // add step images if err := AddStepImagesToMaterials(tro.Status.Steps, &mats); err != nil { - return mats, nil + return nil, err } // add sidecar images if err := AddSidecarImagesToMaterials(tro.Status.Sidecars, &mats); err != nil { - return mats, nil + return nil, err } - gitCommit, gitURL := gitInfo(tro) + gitCommit, gitURL := GitInfo(tro) // Store git rev as Materials and Recipe.Material if gitCommit != "" && gitURL != "" { @@ -107,10 +115,22 @@ func Materials(ctx context.Context, tro *objects.TaskRunObject) ([]common.Proven sms := artifacts.RetrieveMaterialsFromStructuredResults(ctx, tro, artifacts.ArtifactsInputsResultName) mats = append(mats, sms...) - if tro.Spec.Resources != nil { + // add task resources + mats = AddTaskResourcesToMaterials(ctx, tro, mats) + + // remove duplicate materials + mats, err := RemoveDuplicateMaterials(mats) + if err != nil { + return mats, err + } + return mats, nil +} + +func AddTaskResourcesToMaterials(ctx context.Context, tro *objects.TaskRunObject, mats []common.ProvenanceMaterial) []common.ProvenanceMaterial { + if tro.Spec.Resources != nil { //nolint:all //incompatible with pipelines v0.45 // check for a Git PipelineResource - for _, input := range tro.Spec.Resources.Inputs { - if input.ResourceSpec == nil || input.ResourceSpec.Type != backport.PipelineResourceTypeGit { + for _, input := range tro.Spec.Resources.Inputs { //nolint:all //incompatible with pipelines v0.45 + if input.ResourceSpec == nil || input.ResourceSpec.Type != backport.PipelineResourceTypeGit { //nolint:all //incompatible with pipelines v0.45 continue } @@ -143,18 +163,12 @@ func Materials(ctx context.Context, tro *objects.TaskRunObject) ([]common.Proven mats = append(mats, m) } } - - // remove duplicate materials - mats, err := RemoveDuplicateMaterials(mats) - if err != nil { - return mats, err - } - return mats, nil + return mats } -// gitInfo scans over the input parameters and looks for parameters +// GitInfo scans over the input parameters and looks for parameters // with specified names. -func gitInfo(tro *objects.TaskRunObject) (commit string, url string) { +func GitInfo(tro *objects.TaskRunObject) (commit string, url string) { // Scan for git params to use for materials if tro.Status.TaskSpec != nil { for _, p := range tro.Status.TaskSpec.Params { @@ -215,3 +229,55 @@ func RemoveDuplicateMaterials(mats []common.ProvenanceMaterial) ([]common.Proven } return out, nil } + +// AddMaterialsFromPipelineParamsAndResults extracts type hinted params and results and adds the url and digest to materials. +func AddMaterialsFromPipelineParamsAndResults(ctx context.Context, pro *objects.PipelineRunObject, mats []common.ProvenanceMaterial) []common.ProvenanceMaterial { + sms := artifacts.RetrieveMaterialsFromStructuredResults(ctx, pro, artifacts.ArtifactsInputsResultName) + mats = append(mats, sms...) + + var commit, url string + // search status.PipelineSpec.params + if pro.Status.PipelineSpec != nil { + for _, p := range pro.Status.PipelineSpec.Params { + if p.Default == nil { + continue + } + if p.Name == attest.CommitParam { + commit = p.Default.StringVal + continue + } + if p.Name == attest.URLParam { + url = p.Default.StringVal + } + } + } + + // search pipelineRunSpec.params + for _, p := range pro.Spec.Params { + if p.Name == attest.CommitParam { + commit = p.Value.StringVal + continue + } + if p.Name == attest.URLParam { + url = p.Value.StringVal + } + } + + // search status.PipelineRunResults + for _, r := range pro.Status.PipelineResults { + if r.Name == attest.CommitParam { + commit = r.Value.StringVal + } + if r.Name == attest.URLParam { + url = r.Value.StringVal + } + } + if len(commit) > 0 && len(url) > 0 { + url = attest.SPDXGit(url, "") + mats = append(mats, common.ProvenanceMaterial{ + URI: url, + Digest: map[string]string{"sha1": commit}, + }) + } + return mats +} diff --git a/pkg/chains/formats/slsa/internal/material/material_test.go b/pkg/chains/formats/slsa/internal/material/material_test.go index 1701fa0ce8..db9d09f3a6 100644 --- a/pkg/chains/formats/slsa/internal/material/material_test.go +++ b/pkg/chains/formats/slsa/internal/material/material_test.go @@ -97,6 +97,7 @@ func TestMaterials(t *testing.T) { ResourceSpec: &v1alpha1.PipelineResourceSpec{ Params: []v1alpha1.ResourceParam{ {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, + {Name: "revision", Value: "my-revision"}, }, Type: backport.PipelineResourceTypeGit, }, @@ -138,14 +139,14 @@ func TestMaterials(t *testing.T) { }, }, { - URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git", + URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless@my-revision", Digest: common.DigestSet{ "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", }, }, }, }, { - name: "materials from git results", + name: "materials from git results in task run spec", taskRun: &v1beta1.TaskRun{ Spec: v1beta1.TaskRunSpec{ Params: []v1beta1.Param{{ @@ -165,6 +166,67 @@ func TestMaterials(t *testing.T) { }, }, }, + }, { + name: "materials from git results in task spec", + taskRun: &v1beta1.TaskRun{ + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskSpec: &v1beta1.TaskSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_COMMIT", + Default: &v1beta1.ParamValue{ + StringVal: "my-commit", + }, + }, { + Name: "CHAINS-GIT_URL", + Default: &v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: artifacts.GitSchemePrefix + "github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }, + }, + }, { + name: "materials from git results in task spec and taskrun spec", + taskRun: &v1beta1.TaskRun{ + Spec: v1beta1.TaskRunSpec{ + Params: []v1beta1.Param{{ + Name: "CHAINS-GIT_URL", + Value: v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskSpec: &v1beta1.TaskSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_URL", + }, { + Name: "CHAINS-GIT_COMMIT", + Default: &v1beta1.ParamValue{ + StringVal: "my-commit", + }, + }}, + }, + }, + }, + }, + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, }, { name: "materials from step images", taskRun: &v1beta1.TaskRun{ @@ -565,3 +627,122 @@ func TestRemoveDuplicates(t *testing.T) { }) } } + +func TestAddMaterialsFromPipelineParamsAndResults(t *testing.T) { + tests := []struct { + name string + pipelineRun *v1beta1.PipelineRun + want []common.ProvenanceMaterial + }{{ + name: "from results", + pipelineRun: &v1beta1.PipelineRun{ + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineResults: []v1beta1.PipelineRunResult{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1beta1.NewStructuredValues("my-commit"), + }, { + Name: "CHAINS-GIT_URL", + Value: *v1beta1.NewStructuredValues("github.com/something"), + }}, + }, + }, + }, + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }, { + name: "from pipelinespec", + pipelineRun: &v1beta1.PipelineRun{ + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineSpec: &v1beta1.PipelineSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_COMMIT", + Default: &v1beta1.ParamValue{ + StringVal: "my-commit", + }, + }, { + Name: "CHAINS-GIT_URL", + Default: &v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + }, + }, + }, + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }, { + name: "from pipelineRunSpec", + pipelineRun: &v1beta1.PipelineRun{ + Spec: v1beta1.PipelineRunSpec{ + Params: []v1beta1.Param{{ + Name: "CHAINS-GIT_COMMIT", + Value: v1beta1.ParamValue{ + StringVal: "my-commit", + }, + }, { + Name: "CHAINS-GIT_URL", + Value: v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + }, + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }, { + name: "from completeChain", + pipelineRun: &v1beta1.PipelineRun{ + Spec: v1beta1.PipelineRunSpec{ + Params: []v1beta1.Param{{ + Name: "CHAINS-GIT_URL", + Value: v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineSpec: &v1beta1.PipelineSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_URL", + }}, + }, + PipelineResults: []v1beta1.PipelineRunResult{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1beta1.NewStructuredValues("my-commit"), + }}, + }, + }, + }, + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }} + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + got := AddMaterialsFromPipelineParamsAndResults(ctx, objects.NewPipelineRunObject(tc.pipelineRun), []common.ProvenanceMaterial{}) + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("AddMaterialsFromPipelineParamsAndResults(): -want +got: %s", diff) + } + }) + } +} diff --git a/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go b/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go index b63a1dc494..2fad37e306 100644 --- a/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go +++ b/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go @@ -20,7 +20,6 @@ import ( intoto "github.com/in-toto/in-toto-golang/in_toto" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" - "github.com/tektoncd/chains/pkg/artifacts" "github.com/tektoncd/chains/pkg/chains/formats/slsa/attest" "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" @@ -231,53 +230,8 @@ func materials(ctx context.Context, pro *objects.PipelineRunObject) ([]common.Pr } } } - var commit, url string - // search spec.params - for _, p := range pro.Spec.Params { - if p.Name == attest.CommitParam { - commit = p.Value.StringVal - continue - } - if p.Name == attest.URLParam { - url = p.Value.StringVal - } - } - - sms := artifacts.RetrieveMaterialsFromStructuredResults(ctx, pro, artifacts.ArtifactsInputsResultName) - mats = append(mats, sms...) - // search status.PipelineSpec.params - if pro.Status.PipelineSpec != nil { - for _, p := range pro.Status.PipelineSpec.Params { - if p.Default == nil { - continue - } - if p.Name == attest.CommitParam { - commit = p.Default.StringVal - continue - } - if p.Name == attest.URLParam { - url = p.Default.StringVal - } - } - } - - // search status.PipelineRunResults - for _, r := range pro.Status.PipelineResults { - if r.Name == attest.CommitParam { - commit = r.Value.StringVal - } - if r.Name == attest.URLParam { - url = r.Value.StringVal - } - } - if len(commit) > 0 && len(url) > 0 { - url = attest.SPDXGit(url, "") - mats = append(mats, common.ProvenanceMaterial{ - URI: url, - Digest: map[string]string{"sha1": commit}, - }) - } + mats = material.AddMaterialsFromPipelineParamsAndResults(ctx, pro, mats) // remove duplicate materials mats, err := material.RemoveDuplicateMaterials(mats) diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go new file mode 100644 index 0000000000..82998bbc5d --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go @@ -0,0 +1,227 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package resolveddependencies + +import ( + "context" + "encoding/json" + + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + v1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + "github.com/tektoncd/chains/pkg/artifacts" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" + "github.com/tektoncd/chains/pkg/chains/objects" + "go.uber.org/zap" + "knative.dev/pkg/logging" +) + +const ( + // pipelineConfigName is the name of the resolved dependency of the pipelineRef. + pipelineConfigName = "pipeline" + // taskConfigName is the name of the resolved dependency of the top level taskRef. + taskConfigName = "task" + // pipelineTaskConfigName is the name of the resolved dependency of the pipeline task. + pipelineTaskConfigName = "pipelineTask" + // inputResultName is the name of the resolved dependency generated from Type hinted ARTIFACT_INPUTS results + inputResultName = "inputs/result" + // pipelineResourceName is the name of the resolved dependency of pipeline resource. + pipelineResourceName = "pipelineResource" +) + +// TaskRun constructs `predicate.resolvedDependencies` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. +func TaskRun(ctx context.Context, tro *objects.TaskRunObject) ([]*v1.ResourceDescriptor, error) { + var resolvedDependencies []*v1.ResourceDescriptor + var err error + + // add top level task config + if p := tro.Status.Provenance; p != nil && p.RefSource != nil { + rd := v1.ResourceDescriptor{ + Name: taskConfigName, + URI: p.RefSource.URI, + Digest: p.RefSource.Digest, + } + resolvedDependencies = append(resolvedDependencies, &rd) + } + + mats := []common.ProvenanceMaterial{} + + // add step images + if err := material.AddStepImagesToMaterials(tro.Status.Steps, &mats); err != nil { + return nil, err + } + + // add sidecar images + if err := material.AddSidecarImagesToMaterials(tro.Status.Sidecars, &mats); err != nil { + return nil, err + } + + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, "")...) + + // add input artifacts + mats = artifacts.RetrieveMaterialsFromStructuredResults(ctx, tro, artifacts.ArtifactsInputsResultName) + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, inputResultName)...) + + // add task resources + mats = material.AddTaskResourcesToMaterials(ctx, tro, []common.ProvenanceMaterial{}) + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, pipelineResourceName)...) + + // Store git rev as ResolvedDependencies + gitCommit, gitURL := material.GitInfo(tro) + + if gitCommit != "" && gitURL != "" { + resolvedDependencies = append(resolvedDependencies, &v1.ResourceDescriptor{ + Name: inputResultName, + URI: gitURL, + Digest: map[string]string{"sha1": gitCommit}, + }) + } + + // remove duplicate resolved dependencies + resolvedDependencies, err = removeDuplicateResolvedDependencies(resolvedDependencies) + if err != nil { + return nil, err + } + return resolvedDependencies, nil +} + +// PipelineRun constructs `predicate.resolvedDependencies` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. +func PipelineRun(ctx context.Context, pro *objects.PipelineRunObject) ([]*v1.ResourceDescriptor, error) { + var err error + var resolvedDependencies []*v1.ResourceDescriptor + logger := logging.FromContext(ctx) + + // add pipeline config to resolved dependencies + if p := pro.Status.Provenance; p != nil && p.RefSource != nil { + rd := v1.ResourceDescriptor{ + Name: pipelineConfigName, + URI: p.RefSource.URI, + Digest: p.RefSource.Digest, + } + resolvedDependencies = append(resolvedDependencies, &rd) + } + + // add resolved dependencies from pipeline tasks + resolvedDependencies, err = addPipelineTask(logger, pro, resolvedDependencies) + if err != nil { + return nil, err + } + + // add resolved dependencies from pipeline results + mats := material.AddMaterialsFromPipelineParamsAndResults(ctx, pro, []common.ProvenanceMaterial{}) + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, inputResultName)...) + + // remove duplicate resolved dependencies + resolvedDependencies, err = removeDuplicateResolvedDependencies(resolvedDependencies) + if err != nil { + return nil, err + } + return resolvedDependencies, nil +} + +// convertMaterialToResolvedDependency converts a SLSAv0.2 Material to a resolved dependency +func convertMaterialsToResolvedDependencies(mats []common.ProvenanceMaterial, name string) []*v1.ResourceDescriptor { + rds := []*v1.ResourceDescriptor{} + for _, mat := range mats { + rd := v1.ResourceDescriptor{} + rd.URI = mat.URI + rd.Digest = mat.Digest + if len(name) > 0 { + rd.Name = name + } + rds = append(rds, &rd) + } + return rds +} + +// removeDuplicateResolvedDependencies removes duplicate resolved dependencies from the slice of resolved dependencies. +// Original order of resolved dependencies is retained. +func removeDuplicateResolvedDependencies(resolvedDependencies []*v1.ResourceDescriptor) ([]*v1.ResourceDescriptor, error) { + out := make([]*v1.ResourceDescriptor, 0, len(resolvedDependencies)) + + // make map to store seen resolved dependencies + seen := map[string]bool{} + for _, resolvedDependency := range resolvedDependencies { + rDep := v1.ResourceDescriptor{} + rDep.URI = resolvedDependency.URI + rDep.Digest = resolvedDependency.Digest + // This allows us to ignore dependencies that have the same uri and digest. + rd, err := json.Marshal(rDep) + if err != nil { + return nil, err + } + if seen[string(rd)] { + // We dont want to remove the top level pipeline/task config from the resolved dependencies + // because its critical to provide that information in the provenance. In SLSAv0.2 spec, + // we would put this in invocation.ConfigSource. In order to ensure that it is present in + // the resolved dependencies, we dont want to skip it if another resolved dependency from the same + // uri+digest pair was already included before. + if resolvedDependency.Name == taskConfigName || resolvedDependency.Name == pipelineConfigName { + out = append(out, resolvedDependency) + } else { + continue + } + } + seen[string(rd)] = true + out = append(out, resolvedDependency) + } + return out, nil +} + +// addPipelineTask adds the resolved dependencies from pipeline tasks. +func addPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObject, resolvedDependencies []*v1.ResourceDescriptor) ([]*v1.ResourceDescriptor, error) { + pSpec := pro.Status.PipelineSpec + if pSpec != nil { + pipelineTasks := append(pSpec.Tasks, pSpec.Finally...) + for _, t := range pipelineTasks { + tr := pro.GetTaskRunFromTask(t.Name) + // Ignore Tasks that did not execute during the PipelineRun. + if tr == nil || tr.Status.CompletionTime == nil { + logger.Infof("taskrun status not found for task %s", t.Name) + continue + } + // add remote task configsource information in materials + if tr.Status.Provenance != nil && tr.Status.Provenance.RefSource != nil { + rd := v1.ResourceDescriptor{ + Name: pipelineTaskConfigName, + URI: tr.Status.Provenance.RefSource.URI, + Digest: tr.Status.Provenance.RefSource.Digest, + } + resolvedDependencies = append(resolvedDependencies, &rd) + } + + mats := []common.ProvenanceMaterial{} + + // add step images + if err := material.AddStepImagesToMaterials(tr.Status.Steps, &mats); err != nil { + return nil, err + } + + // add sidecar images + if err := material.AddSidecarImagesToMaterials(tr.Status.Sidecars, &mats); err != nil { + return nil, err + } + + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, "")...) + } + } + return resolvedDependencies, nil +} diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go new file mode 100644 index 0000000000..0676dc4b17 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go @@ -0,0 +1,511 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package resolveddependencies + +import ( + "reflect" + "strings" + "testing" + + "github.com/ghodss/yaml" + "github.com/google/go-cmp/cmp" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + v1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + "github.com/tektoncd/chains/internal/backport" + "github.com/tektoncd/chains/pkg/artifacts" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/internal/objectloader" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" + logtesting "knative.dev/pkg/logging/testing" +) + +const digest = "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7" + +var pro *objects.PipelineRunObject +var proStructuredResults *objects.PipelineRunObject + +func init() { + pro = createPro("../../../testdata/pipelinerun1.json") + proStructuredResults = createPro("../../../testdata/pipelinerun_structured_results.json") +} + +func createPro(path string) *objects.PipelineRunObject { + var err error + pr, err := objectloader.PipelineRunFromFile(path) + if err != nil { + panic(err) + } + tr1, err := objectloader.TaskRunFromFile("../../../testdata/taskrun1.json") + if err != nil { + panic(err) + } + tr2, err := objectloader.TaskRunFromFile("../../../testdata/taskrun2.json") + if err != nil { + panic(err) + } + p := objects.NewPipelineRunObject(pr) + p.AppendTaskRun(tr1) + p.AppendTaskRun(tr2) + return p +} + +func TestTaskRunWithTaskRunResults(t *testing.T) { + // make sure this works with Git resources + taskrun := `apiVersion: tekton.dev/v1beta1 +kind: TaskRun +spec: + taskSpec: + resources: + inputs: + - name: repo + type: git +status: + taskResults: + - name: CHAINS-GIT_COMMIT + value: 50c56a48cfb3a5a80fa36ed91c739bdac8381cbe + - name: CHAINS-GIT_URL + value: https://github.com/GoogleContainerTools/distroless` + var taskRun *v1beta1.TaskRun + if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { + t.Fatal(err) + } + + want := []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + Name: "inputs/result", + URI: "git+https://github.com/GoogleContainerTools/distroless.git", + Digest: common.DigestSet{ + "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, + }, + } + + ctx := logtesting.TestContextWithLogger(t) + got, err := TaskRun(ctx, objects.NewTaskRunObject(taskRun)) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + if !reflect.DeepEqual(got, want) { + t.Fatalf("want %v got %v", want, got) + } +} + +func TestTaskRun(t *testing.T) { + tests := []struct { + name string + taskRun *v1beta1.TaskRun + want []*v1.ResourceDescriptor + }{{ + name: "resolvedDependencies from pipeline resources", + taskRun: &v1beta1.TaskRun{ + Spec: v1beta1.TaskRunSpec{ + Resources: &v1beta1.TaskRunResources{ //nolint:all //incompatible with pipelines v0.45 + Inputs: []v1beta1.TaskResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + Name: "nil-resource-spec", + }, + }, { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + Name: "repo", + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:all //incompatible with pipelines v0.45 + Params: []v1alpha1.ResourceParam{ //nolint:all //incompatible with pipelines v0.45 + {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, + }, + Type: backport.PipelineResourceTypeGit, + }, + }, + }, + }, + }, + }, + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskRunResults: []v1beta1.TaskRunResult{ + { + Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, + Value: *v1beta1.NewObject(map[string]string{ + "uri": "gcr.io/foo/bar", + "digest": digest, + }), + }, + }, + ResourcesResult: []v1beta1.PipelineResourceResult{ + { + ResourceName: "repo", + Key: "commit", + Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, { + ResourceName: "repo", + Key: "url", + Value: "https://github.com/GoogleContainerTools/distroless", + }, + }, + }, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + Name: "inputs/result", + URI: "gcr.io/foo/bar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, + }, + &v1.ResourceDescriptor{ + Name: "pipelineResource", + URI: "git+https://github.com/GoogleContainerTools/distroless.git", + Digest: common.DigestSet{ + "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, + }, + }, + }, { + name: "resolvedDependencies from remote task", + taskRun: &v1beta1.TaskRun{ + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Provenance: &v1beta1.Provenance{ + RefSource: &v1beta1.RefSource{ + URI: "git+github.com/something.git", + Digest: map[string]string{ + "sha1": "abcd1234", + }, + }, + }, + }, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + Name: "task", + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "abcd1234", + }, + }, + }, + }, { + name: "resolvedDependencies from git results", + taskRun: &v1beta1.TaskRun{ + Spec: v1beta1.TaskRunSpec{ + Params: []v1beta1.Param{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1beta1.NewStructuredValues("my-commit"), + }, { + Name: "CHAINS-GIT_URL", + Value: *v1beta1.NewStructuredValues("github.com/something"), + }}, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + Name: "inputs/result", + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }, + }, + }, { + name: "resolvedDependencies from step images", + taskRun: &v1beta1.TaskRun{ + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Steps: []v1beta1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + }, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, + }, + }, { + name: "resolvedDependencies from step and sidecar images", + taskRun: &v1beta1.TaskRun{ + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Steps: []v1beta1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + Sidecars: []v1beta1.SidecarState{{ + Name: "sidecar-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }}, + }, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, + }, + }, + }} + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + rd, err := TaskRun(ctx, objects.NewTaskRunObject(tc.taskRun)) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + if diff := cmp.Diff(tc.want, rd); diff != "" { + t.Errorf("ResolvedDependencies(): -want +got: %s", diff) + } + }) + } +} + +func TestRemoveDuplicates(t *testing.T) { + tests := []struct { + name string + rds []*v1.ResourceDescriptor + want []*v1.ResourceDescriptor + }{{ + name: "no duplicate resolvedDependencies", + rds: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, + }, + }, + }, { + name: "same uri and digest", + rds: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + }, { + name: "same uri but different digest", + rds: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, + }, + }, { + name: "same uri but different digest, swap order", + rds: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + want: []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, &v1.ResourceDescriptor{ + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + }} + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + rds, err := removeDuplicateResolvedDependencies(tc.rds) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + if diff := cmp.Diff(tc.want, rds); diff != "" { + t.Errorf("resolvedDependencies(): -want +got: %s", diff) + } + }) + } +} + +func TestPipelineRun(t *testing.T) { + expected := []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{Name: "pipeline", URI: "github.com/test", Digest: common.DigestSet{"sha1": "28b123"}}, + &v1.ResourceDescriptor{Name: "pipelineTask", URI: "github.com/catalog", Digest: common.DigestSet{"sha1": "x123"}}, + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + &v1.ResourceDescriptor{Name: "pipelineTask", URI: "github.com/test", Digest: common.DigestSet{"sha1": "ab123"}}, + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + &v1.ResourceDescriptor{Name: "inputs/result", URI: "abc", Digest: common.DigestSet{"sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}}, + &v1.ResourceDescriptor{Name: "inputs/result", URI: "git+https://git.test.com.git", Digest: common.DigestSet{"sha1": "abcd"}}, + } + ctx := logtesting.TestContextWithLogger(t) + got, err := PipelineRun(ctx, pro) + if err != nil { + t.Error(err) + } + if diff := cmp.Diff(expected, got); diff != "" { + t.Errorf("PipelineRunResolvedDependencies(): -want +got: %s", diff) + } +} + +func TestPipelineRunStructuredResult(t *testing.T) { + want := []*v1.ResourceDescriptor{ + &v1.ResourceDescriptor{Name: "pipeline", URI: "github.com/test", Digest: common.DigestSet{"sha1": "28b123"}}, + &v1.ResourceDescriptor{Name: "pipelineTask", URI: "github.com/catalog", Digest: common.DigestSet{"sha1": "x123"}}, + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + &v1.ResourceDescriptor{Name: "pipelineTask", URI: "github.com/test", Digest: common.DigestSet{"sha1": "ab123"}}, + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + &v1.ResourceDescriptor{ + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + &v1.ResourceDescriptor{ + Name: "inputs/result", + URI: "abcd", + Digest: common.DigestSet{ + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", + }, + }, + } + ctx := logtesting.TestContextWithLogger(t) + got, err := PipelineRun(ctx, proStructuredResults) + if err != nil { + t.Errorf("error while extracting resolvedDependencies: %v", err) + } + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("resolvedDependencies(): -want +got: %s", diff) + } +}