diff --git a/.golangci.yml b/.golangci.yml index 144013072..fb3d977c4 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -488,6 +488,8 @@ issues: linters: [ goheader ] - path: "^registry/app/remote/adapter/native/adapter.go" linters: [ goheader ] + - path: "^registry/app/storage/blobStore.go" + linters: [ gosec ] #Registry Specific ends - text: "mnd: Magic number: \\d" linters: diff --git a/registry/app/pkg/context.go b/registry/app/pkg/context.go index d4e246c8c..b09f902dd 100644 --- a/registry/app/pkg/context.go +++ b/registry/app/pkg/context.go @@ -40,6 +40,15 @@ type RegistryInfo struct { Path string } +type FileInfo struct { + Size int64 + Sha1 string + Sha256 string + Sha512 string + MD5 string + Filename string +} + func (r *RegistryInfo) SetReference(ref string) { r.Reference = ref } diff --git a/registry/app/storage/blobStore.go b/registry/app/storage/blobStore.go new file mode 100644 index 000000000..27cd1518c --- /dev/null +++ b/registry/app/storage/blobStore.go @@ -0,0 +1,129 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "context" + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "fmt" + "io" + "mime/multipart" + + "github.com/harness/gitness/registry/app/dist_temp/dcontext" + "github.com/harness/gitness/registry/app/driver" + "github.com/harness/gitness/registry/app/pkg" + + "github.com/rs/zerolog/log" +) + +type genericBlobStore struct { + repoKey string + driver driver.StorageDriver + rootParentRef string +} + +func (bs *genericBlobStore) Info() string { + return bs.rootParentRef + " " + bs.repoKey +} + +func (bs *genericBlobStore) Get(ctx context.Context, filePath string, size int64) (*FileReader, error) { + dcontext.GetLogger(ctx, log.Ctx(ctx).Debug()).Msg("(*genericBlobStore).Get") + + br, err := NewFileReader(ctx, bs.driver, filePath, size) + if err != nil { + return nil, err + } + return br, nil +} + +var _ GenericBlobStore = &genericBlobStore{} + +// Create begins a blob write session, returning a handle. +func (bs *genericBlobStore) Create(ctx context.Context, filePath string) (driver.FileWriter, error) { + dcontext.GetLogger(ctx, log.Ctx(ctx).Debug()).Msg("(*genericBlobStore).Create") + + path, err := pathFor( + uploadFilePathSpec{ + path: filePath, + }, + ) + if err != nil { + return nil, err + } + + return bs.newBlobUpload(ctx, path, false) +} + +func (bs *genericBlobStore) newBlobUpload( + ctx context.Context, + path string, a bool, +) (driver.FileWriter, error) { + fw, err := bs.driver.Writer(ctx, path, a) + if err != nil { + return nil, err + } + return fw, nil +} + +// Write takes a file writer and a multipart form file, streams the file to the writer, and calculates hashes. +func (bs *genericBlobStore) Write(ctx context.Context, w driver.FileWriter, file multipart.File) (pkg.FileInfo, error) { + // Create new hash.Hash instances for SHA256 and SHA512 + sha1Hasher := sha1.New() + sha256Hasher := sha256.New() + sha512Hasher := sha512.New() + md5Hasher := md5.New() + + // Create a MultiWriter to write to both hashers simultaneously + mw := io.MultiWriter(sha1Hasher, sha256Hasher, sha512Hasher, md5Hasher, w) + // Copy the data from S3 object stream to the MultiWriter + totalBytesWritten, err := io.Copy(mw, file) + if err != nil { + return pkg.FileInfo{}, fmt.Errorf("failed to copy file to s3: %w", err) + } + + err = w.Commit(ctx) + if err != nil { + return pkg.FileInfo{}, err + } + + return pkg.FileInfo{ + Sha1: fmt.Sprintf("%x", sha1Hasher.Sum(nil)), + Sha256: fmt.Sprintf("%x", sha256Hasher.Sum(nil)), + Sha512: fmt.Sprintf("%x", sha512Hasher.Sum(nil)), + MD5: fmt.Sprintf("%x", md5Hasher.Sum(nil)), + Size: totalBytesWritten, + }, nil +} + +func (bs *genericBlobStore) Move(ctx context.Context, srcPath string, dstPath string) error { + dcontext.GetLogger(ctx, log.Ctx(ctx).Debug()).Msg("(*genericBlobStore).Move") + err := bs.driver.Move(ctx, srcPath, dstPath) + if err != nil { + return err + } + return nil +} + +func (bs *genericBlobStore) Delete(ctx context.Context, filePath string) error { + dcontext.GetLogger(ctx, log.Ctx(ctx).Debug()).Msg("(*genericBlobStore).Delete") + err := bs.driver.Delete(ctx, filePath) + if err != nil { + return err + } + return nil +} diff --git a/registry/app/storage/blobs.go b/registry/app/storage/blobs.go index 53e5275a1..01ec1a5e1 100644 --- a/registry/app/storage/blobs.go +++ b/registry/app/storage/blobs.go @@ -25,6 +25,7 @@ import ( "github.com/harness/gitness/registry/app/driver" "github.com/harness/gitness/registry/app/manifest" + "github.com/harness/gitness/registry/app/pkg" "github.com/distribution/reference" "github.com/opencontainers/go-digest" @@ -170,7 +171,9 @@ type GenericBlobStore interface { // returned handle can be written to and later resumed using an opaque // identifier. With this approach, one can Close and Resume a BlobWriter // multiple times until the BlobWriter is committed or cancelled. - Create(ctx context.Context) (driver.FileWriter, error) + Create(ctx context.Context, filePath string) (driver.FileWriter, error) - Write(w driver.FileWriter, file multipart.File) (int, error) + Write(ctx context.Context, w driver.FileWriter, file multipart.File) (pkg.FileInfo, error) + Move(ctx context.Context, srcPath string, dstPath string) error + Delete(ctx context.Context, filePath string) error } diff --git a/registry/app/storage/paths.go b/registry/app/storage/paths.go index c50462d8f..18c523667 100644 --- a/registry/app/storage/paths.go +++ b/registry/app/storage/paths.go @@ -71,6 +71,8 @@ func pathFor(spec pathSpec) (string, error) { ), nil case repositoriesRootPathSpec: return path.Join(rootPrefix...), nil + case uploadFilePathSpec: + return path.Join(append(rootPrefix, v.path)...), nil default: return "", fmt.Errorf("unknown path spec: %#v", v) } @@ -124,6 +126,14 @@ type uploadDataPathSpec struct { func (uploadDataPathSpec) pathSpec() {} +// uploadDataPathSpec defines the path parameters of the data file for +// uploads. +type uploadFilePathSpec struct { + path string +} + +func (uploadFilePathSpec) pathSpec() {} + // uploadHashStatePathSpec defines the path parameters for the file that stores // the hash function state of an upload at a specific byte offset. If `list` is // set, then the path mapper will generate a list prefix for all hash state