diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..7d3ce32 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,36 @@ +name: Data Client CI + +on: + pull_request: + push: + workflow_dispatch: + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + +jobs: + lint-and-test: + name: Lint and Unit Tests + runs-on: ubuntu-latest + + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version-file: go.mod + cache: true + + - name: Download dependencies + run: GOTOOLCHAIN=auto go mod download + + - name: Run go vet + run: GOTOOLCHAIN=auto go vet ./... + + - name: Run unit tests + run: GOTOOLCHAIN=auto go test -v ./... diff --git a/.gitignore b/.gitignore index 6aa2b55..453ca8f 100644 --- a/.gitignore +++ b/.gitignore @@ -28,4 +28,8 @@ # Build artifacts /build/ /bin/ -checksums.txt \ No newline at end of file +checksums.txt +# Local caches and binaries +/.gocache/ +/.tmp/ +/data-client diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..177c5c6 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "ga4gh/data-repository-service-schemas"] + path = ga4gh/data-repository-service-schemas + url = https://github.com/kellrott/data-repository-service-schemas.git + branch = feature/get-by-checksum diff --git a/Makefile b/Makefile index c936cd2..ebf8087 100644 --- a/Makefile +++ b/Makefile @@ -14,6 +14,9 @@ BIN_DIR := ./bin COVERAGE_THRESHOLD := 30 PACKAGE_COVERAGE_THRESHOLD := 20 +# OpenAPI generation now lives in syfon. +SYFON_DIR ?= ../syfon + # --- Targets --- .PHONY: all build test test-coverage coverage-html coverage-check generate tidy clean help @@ -55,6 +58,26 @@ generate: @echo "--> Running code generation (go generate)..." @go generate ./... +## gen: Generates Go models from OpenAPI specs +gen: + @set -euo pipefail; \ + if [[ ! -d "$(SYFON_DIR)" ]]; then \ + echo "ERROR: syfon repo not found at $(SYFON_DIR)"; \ + exit 1; \ + fi; \ + echo "--> OpenAPI generation is centralized in syfon"; \ + $(MAKE) -C "$(SYFON_DIR)" gen + +.PHONY: gen-internal +gen-internal: + @set -euo pipefail; \ + if [[ ! -d "$(SYFON_DIR)" ]]; then \ + echo "ERROR: syfon repo not found at $(SYFON_DIR)"; \ + exit 1; \ + fi; \ + echo "--> Internal model generation is centralized in syfon"; \ + $(MAKE) -C "$(SYFON_DIR)" gen-internal + ## tidy: Cleans up module dependencies and formats go files tidy: @echo "--> Tidying go.mod and formatting files..." @@ -66,4 +89,4 @@ clean: @echo "--> Cleaning up..." @rm -f $(BIN_DIR)/$(TARGET_NAME) @rm -f coverage.out coverage.html - + @rm -rf .tmp diff --git a/cmd/auth.go b/cmd/auth.go index 6e0398a..156030d 100644 --- a/cmd/auth.go +++ b/cmd/auth.go @@ -32,14 +32,14 @@ func init() { log.Fatalf("Fatal NewGen3Interface error: %s\n", err) } - resourceAccess, err := g3i.Fence().CheckPrivileges(context.Background()) + resourceAccess, err := g3i.FenceClient().CheckPrivileges(context.Background()) if err != nil { g3i.Logger().Fatalf("Fatal authentication error: %s\n", err) } else { if len(resourceAccess) == 0 { - g3i.Logger().Printf("\nYou don't currently have access to any resources at %s\n", g3i.GetCredential().APIEndpoint) + g3i.Logger().Printf("\nYou don't currently have access to any resources at %s\n", g3i.Credentials().Current().APIEndpoint) } else { - g3i.Logger().Printf("\nYou have access to the following resource(s) at %s:\n", g3i.GetCredential().APIEndpoint) + g3i.Logger().Printf("\nYou have access to the following resource(s) at %s:\n", g3i.Credentials().Current().APIEndpoint) // Sort by resource name resources := make([]string, 0, len(resourceAccess)) diff --git a/cmd/collaborator.go b/cmd/collaborator.go index 7fc1528..c2b2bf9 100644 --- a/cmd/collaborator.go +++ b/cmd/collaborator.go @@ -14,26 +14,13 @@ import ( "gopkg.in/yaml.v3" ) -var collaboratorCmd = &cobra.Command{ - Use: "collaborator", +var collaboratorsCmd = &cobra.Command{ + Use: "collaborators", Short: "Manage collaborators and access requests", } var emailRegex = regexp.MustCompile(`^[a-z0-9._%+\-]+@[a-z0-9.\-]+\.[a-z]{2,}$`) -func validateProjectAndUser(projectID, username string) error { - if !emailRegex.MatchString(strings.ToLower(username)) { - return fmt.Errorf("invalid username '%s': must be a valid email address", username) - } - - parts := strings.Split(projectID, "-") - if len(parts) != 2 || parts[0] == "" || parts[1] == "" { - return fmt.Errorf("invalid project_id '%s': must be in the form 'program-project'", projectID) - } - - return nil -} - func printRequest(r requestor.Request) { b, err := yaml.Marshal(r) if err != nil { @@ -43,35 +30,37 @@ func printRequest(r requestor.Request) { fmt.Println(string(b)) } -func getRequestorClient() (requestor.RequestorInterface, func()) { - if profile == "" { - fmt.Println("Error: profile is required. Please specify a profile using the --profile flag.") +func getRequestorClient(localProfile string) (requestor.RequestorInterface, func()) { + if localProfile == "" { + fmt.Println("Error: profile is required.") os.Exit(1) } // Initialize logger - logger, logCloser := logs.New(profile) + logger, logCloser := logs.New(localProfile) - // Initialize Gen3Interface handles selective initialization - g3i, err := g3client.NewGen3Interface(profile, logger, g3client.WithClients(g3client.RequestorClient)) + // Initialize base Gen3 interface and build requestor client from it. + g3i, err := g3client.NewGen3Interface(localProfile, logger) if err != nil { fmt.Printf("Error accessing Gen3: %v\n", err) logCloser() os.Exit(1) } - return g3i.Requestor(), logCloser + return requestor.NewRequestorClient(g3i, g3i.Credentials().Current()), logCloser } var collaboratorListCmd = &cobra.Command{ - Use: "ls", + Use: "ls [profile]", Short: "List requests", + Args: cobra.ExactArgs(1), Run: func(cmd *cobra.Command, args []string) { + p := args[0] mine, _ := cmd.Flags().GetBool("mine") active, _ := cmd.Flags().GetBool("active") username, _ := cmd.Flags().GetString("username") - client, closer := getRequestorClient() + client, closer := getRequestorClient(p) defer closer() requests, err := client.ListRequests(cmd.Context(), mine, active, username) @@ -87,10 +76,12 @@ var collaboratorListCmd = &cobra.Command{ } var collaboratorPendingCmd = &cobra.Command{ - Use: "pending", + Use: "pending [profile]", Short: "List pending requests", + Args: cobra.ExactArgs(1), Run: func(cmd *cobra.Command, args []string) { - client, closer := getRequestorClient() + p := args[0] + client, closer := getRequestorClient(p) defer closer() // Fetch all requests @@ -110,22 +101,26 @@ var collaboratorPendingCmd = &cobra.Command{ } var collaboratorAddUserCmd = &cobra.Command{ - Use: "add [project_id] [username]", + Use: "add [profile] [email] [program] [project]", Short: "Add a user to a project", - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(2)(cmd, args); err != nil { - return err - } - return validateProjectAndUser(args[0], args[1]) - }, + Args: cobra.ExactArgs(4), Run: func(cmd *cobra.Command, args []string) { - projectID := args[0] + p := args[0] username := args[1] + program := args[2] + project := args[3] + projectID := fmt.Sprintf("%s-%s", program, project) + + if !emailRegex.MatchString(strings.ToLower(username)) { + fmt.Printf("Error: invalid email address '%s'\n", username) + os.Exit(1) + } + write, _ := cmd.Flags().GetBool("write") guppy, _ := cmd.Flags().GetBool("guppy") approve, _ := cmd.Flags().GetBool("approve") - client, closer := getRequestorClient() + client, closer := getRequestorClient(p) defer closer() reqs, err := client.AddUser(cmd.Context(), projectID, username, write, guppy) @@ -156,20 +151,24 @@ var collaboratorAddUserCmd = &cobra.Command{ } var collaboratorRemoveUserCmd = &cobra.Command{ - Use: "rm [project_id] [username]", + Use: "rm [profile] [email] [program] [project]", Short: "Remove a user from a project", - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(2)(cmd, args); err != nil { - return err - } - return validateProjectAndUser(args[0], args[1]) - }, + Args: cobra.ExactArgs(4), Run: func(cmd *cobra.Command, args []string) { - projectID := args[0] + p := args[0] username := args[1] + program := args[2] + project := args[3] + projectID := fmt.Sprintf("%s-%s", program, project) + + if !emailRegex.MatchString(strings.ToLower(username)) { + fmt.Printf("Error: invalid email address '%s'\n", username) + os.Exit(1) + } + approve, _ := cmd.Flags().GetBool("approve") - client, closer := getRequestorClient() + client, closer := getRequestorClient(p) defer closer() reqs, err := client.RemoveUser(cmd.Context(), projectID, username) @@ -199,13 +198,14 @@ var collaboratorRemoveUserCmd = &cobra.Command{ } var collaboratorApproveCmd = &cobra.Command{ - Use: "approve [request_id]", + Use: "approve [profile] [request_id]", Short: "Approve a request (sign it)", - Args: cobra.ExactArgs(1), + Args: cobra.ExactArgs(2), Run: func(cmd *cobra.Command, args []string) { - requestID := args[0] + p := args[0] + requestID := args[1] - client, closer := getRequestorClient() + client, closer := getRequestorClient(p) defer closer() req, err := client.UpdateRequest(cmd.Context(), requestID, "SIGNED") @@ -220,15 +220,16 @@ var collaboratorApproveCmd = &cobra.Command{ } var collaboratorUpdateCmd = &cobra.Command{ - Use: "update [request_id] [status]", + Use: "update [profile] [request_id] [status]", Short: "Update a request status", Hidden: true, - Args: cobra.ExactArgs(2), + Args: cobra.ExactArgs(3), Run: func(cmd *cobra.Command, args []string) { - requestID := args[0] - status := args[1] + p := args[0] + requestID := args[1] + status := args[2] - client, closer := getRequestorClient() + client, closer := getRequestorClient(p) defer closer() req, err := client.UpdateRequest(cmd.Context(), requestID, status) @@ -242,13 +243,13 @@ var collaboratorUpdateCmd = &cobra.Command{ } func init() { - RootCmd.AddCommand(collaboratorCmd) - collaboratorCmd.AddCommand(collaboratorListCmd) - collaboratorCmd.AddCommand(collaboratorPendingCmd) - collaboratorCmd.AddCommand(collaboratorAddUserCmd) - collaboratorCmd.AddCommand(collaboratorRemoveUserCmd) - collaboratorCmd.AddCommand(collaboratorApproveCmd) - collaboratorCmd.AddCommand(collaboratorUpdateCmd) + RootCmd.AddCommand(collaboratorsCmd) + collaboratorsCmd.AddCommand(collaboratorListCmd) + collaboratorsCmd.AddCommand(collaboratorPendingCmd) + collaboratorsCmd.AddCommand(collaboratorAddUserCmd) + collaboratorsCmd.AddCommand(collaboratorRemoveUserCmd) + collaboratorsCmd.AddCommand(collaboratorApproveCmd) + collaboratorsCmd.AddCommand(collaboratorUpdateCmd) collaboratorListCmd.Flags().Bool("mine", false, "List my requests") collaboratorListCmd.Flags().Bool("active", false, "List only active requests") @@ -259,6 +260,4 @@ func init() { collaboratorAddUserCmd.Flags().BoolP("approve", "a", false, "Automatically approve the requests") collaboratorRemoveUserCmd.Flags().BoolP("approve", "a", false, "Automatically approve the revoke requests") - - collaboratorCmd.PersistentFlags().StringVar(&profile, "profile", "", "Specify profile to use") } diff --git a/cmd/configure.go b/cmd/configure.go index b6eb564..6305696 100644 --- a/cmd/configure.go +++ b/cmd/configure.go @@ -51,7 +51,7 @@ func init() { } g3i := g3client.NewGen3InterfaceFromCredential(cred, logger, g3client.WithClients()) - err := g3i.ExportCredential(context.Background(), cred) + err := g3i.Credentials().Export(context.Background(), cred) if err != nil { logger.Println(err.Error()) } diff --git a/cmd/delete.go b/cmd/delete.go deleted file mode 100644 index 4589577..0000000 --- a/cmd/delete.go +++ /dev/null @@ -1,42 +0,0 @@ -package cmd - -import ( - "context" - - "github.com/calypr/data-client/g3client" - "github.com/calypr/data-client/logs" - "github.com/spf13/cobra" -) - -//Not support yet, place holder only - -func init() { - var guid string - var deleteCmd = &cobra.Command{ // nolint:deadcode,unused,varcheck - Use: "delete", - Short: "Send DELETE HTTP Request for given URI", - Long: `Deletes a given URI from the database. -If no profile is specified, "default" profile is used for authentication.`, - Example: `./data-client delete --uri=v0/submission/bpa/test/entities/example_id - ./data-client delete --profile=user1 --uri=v0/submission/bpa/test/entities/1af1d0ab-efec-4049-98f0-ae0f4bb1bc64`, - Run: func(cmd *cobra.Command, args []string) { - - logger, logCloser := logs.New(profile, logs.WithConsole()) - defer logCloser() - - g3i, err := g3client.NewGen3Interface(profile, logger) - if err != nil { - logger.Fatalf("Fatal NewGen3Interface error: %s\n", err) - } - - msg, err := g3i.Fence().DeleteRecord(context.Background(), guid) - if err != nil { - logger.Fatal(err) - } - logger.Println(msg) - }, - } - - deleteCmd.Flags().StringVar(&profile, "guid", "", "Specify the profile to check your access privileges") - RootCmd.AddCommand(deleteCmd) -} diff --git a/cmd/download-multiple.go b/cmd/download-multiple.go index fa91c15..7f390dc 100644 --- a/cmd/download-multiple.go +++ b/cmd/download-multiple.go @@ -8,9 +8,13 @@ import ( "os" "github.com/calypr/data-client/common" - "github.com/calypr/data-client/download" + "github.com/calypr/data-client/conf" "github.com/calypr/data-client/g3client" "github.com/calypr/data-client/logs" + sydrs "github.com/calypr/syfon/client/drs" + sylogs "github.com/calypr/syfon/client/pkg/logs" + syrequest "github.com/calypr/syfon/client/pkg/request" + sydownload "github.com/calypr/syfon/client/xfer/download" "github.com/vbauerster/mpb/v8" "github.com/vbauerster/mpb/v8/decor" @@ -20,12 +24,8 @@ import ( func init() { var manifestPath string var downloadPath string - var filenameFormat string - var rename bool - var noPrompt bool - var protocol string var numParallel int - var skipCompleted bool + var profile string var downloadMultipleCmd = &cobra.Command{ Use: "download-multiple", @@ -33,27 +33,41 @@ func init() { Long: `Get presigned URLs for multiple of files specified in a manifest file and then download all of them.`, Example: `./data-client download-multiple --profile --manifest --download-path `, Run: func(cmd *cobra.Command, args []string) { - // don't initialize transmission logs for non-uploading related commands - logger, logCloser := logs.New(profile, logs.WithConsole(), logs.WithFailedLog(), logs.WithScoreboard(), logs.WithSucceededLog()) defer logCloser() - g3i, err := g3client.NewGen3Interface(profile, logger) - if err != nil { - log.Fatalf("Failed to parse config on profile %s, %v", profile, err) + var dc sydrs.Client + if backendType == "drs" { + config := conf.NewConfigure(logger.Logger) + cred, err := config.Load(profile) + if err != nil { + log.Fatalf("Failed to parse config on profile %s, %v", profile, err) + } + req := syrequest.NewRequestInterface( + sylogs.NewGen3Logger(logger.Logger, "", ""), + cred, + config, + ) + dc = sydrs.NewLocalDrsClient(req, cred.APIEndpoint, sylogs.NewGen3Logger(logger.Logger, "", "")) + } else { + g3i, err := g3client.NewGen3Interface(profile, logger) + if err != nil { + log.Fatalf("Failed to parse config on profile %s, %v", profile, err) + } + dc = g3i.DRSClient() } manifestPath, _ = common.GetAbsolutePath(manifestPath) manifestFile, err := os.Open(manifestPath) if err != nil { - g3i.Logger().Fatalf("Failed to open manifest file %s, %v\n", manifestPath, err) + logger.Fatalf("Failed to open manifest file %s, %v\n", manifestPath, err) } defer manifestFile.Close() manifestFileStat, err := manifestFile.Stat() if err != nil { - g3i.Logger().Fatalf("Failed to get manifest file stats %s, %v\n", manifestPath, err) + logger.Fatalf("Failed to get manifest file stats %s, %v\n", manifestPath, err) } - g3i.Logger().Println("Reading manifest...") + logger.Println("Reading manifest...") manifestFileSize := manifestFileStat.Size() manifestProgress := mpb.New(mpb.WithOutput(os.Stdout)) manifestFileBar := manifestProgress.AddBar(manifestFileSize, @@ -68,30 +82,31 @@ func init() { manifestBytes, err := io.ReadAll(manifestFileReader) if err != nil { - g3i.Logger().Fatalf("Failed reading manifest %s, %v\n", manifestPath, err) + logger.Fatalf("Failed reading manifest %s, %v\n", manifestPath, err) } manifestProgress.Wait() var objects []common.ManifestObject err = json.Unmarshal(manifestBytes, &objects) if err != nil { - g3i.Logger().Fatalf("Error has occurred during unmarshalling manifest object: %v\n", err) + logger.Fatalf("Error has occurred during unmarshalling manifest object: %v\n", err) } - err = download.DownloadMultiple( + err = sydownload.DownloadMultiple( context.Background(), - g3i, + dc, + dc, objects, downloadPath, - filenameFormat, - rename, - noPrompt, - protocol, + "original", + true, + false, + "", numParallel, - skipCompleted, + false, ) if err != nil { - g3i.Logger().Fatal(err.Error()) + logger.Fatal(err.Error()) } }, } @@ -101,11 +116,6 @@ func init() { downloadMultipleCmd.Flags().StringVar(&manifestPath, "manifest", "", "The manifest file to read from. A valid manifest can be acquired by using the \"Download Manifest\" button in Data Explorer from a data common's portal") downloadMultipleCmd.MarkFlagRequired("manifest") //nolint:errcheck downloadMultipleCmd.Flags().StringVar(&downloadPath, "download-path", ".", "The directory in which to store the downloaded files") - downloadMultipleCmd.Flags().StringVar(&filenameFormat, "filename-format", "original", "The format of filename to be used, including \"original\", \"guid\" and \"combined\"") - downloadMultipleCmd.Flags().BoolVar(&rename, "rename", false, "Only useful when \"--filename-format=original\", will rename file by appending a counter value to its filename if set to true, otherwise the same filename will be used") - downloadMultipleCmd.Flags().BoolVar(&noPrompt, "no-prompt", false, "If set to true, will not display user prompt message for confirmation") - downloadMultipleCmd.Flags().StringVar(&protocol, "protocol", "", "Specify the preferred protocol with --protocol=s3") downloadMultipleCmd.Flags().IntVar(&numParallel, "numparallel", 1, "Number of downloads to run in parallel") - downloadMultipleCmd.Flags().BoolVar(&skipCompleted, "skip-completed", false, "If set to true, will check for filename and size before download and skip any files in \"download-path\" that matches both") RootCmd.AddCommand(downloadMultipleCmd) } diff --git a/cmd/download-single.go b/cmd/download-single.go index 6d1c5db..1c9b485 100644 --- a/cmd/download-single.go +++ b/cmd/download-single.go @@ -5,20 +5,19 @@ import ( "log" "github.com/calypr/data-client/common" - "github.com/calypr/data-client/download" + "github.com/calypr/data-client/conf" "github.com/calypr/data-client/g3client" "github.com/calypr/data-client/logs" + sydrs "github.com/calypr/syfon/client/drs" + sylogs "github.com/calypr/syfon/client/pkg/logs" + syrequest "github.com/calypr/syfon/client/pkg/request" + sydownload "github.com/calypr/syfon/client/xfer/download" "github.com/spf13/cobra" ) func init() { var guid string var downloadPath string - var protocol string - var filenameFormat string - var rename bool - var noPrompt bool - var skipCompleted bool var profile string var downloadSingleCmd = &cobra.Command{ @@ -27,35 +26,50 @@ func init() { Long: `Gets a presigned URL for a file from a GUID and then downloads the specified file.`, Example: `./data-client download-single --profile= --guid=206dfaa6-bcf1-4bc9-b2d0-77179f0f48fc`, Run: func(cmd *cobra.Command, args []string) { - // don't initialize transmission logs for non-uploading related commands - logger, logCloser := logs.New(profile, logs.WithConsole(), logs.WithFailedLog(), logs.WithSucceededLog(), logs.WithScoreboard()) defer logCloser() - g3I, err := g3client.NewGen3Interface(profile, logger) - if err != nil { - log.Fatalf("Failed to parse config on profile %s, %v", profile, err) + var dc sydrs.Client + if backendType == "drs" { + config := conf.NewConfigure(logger.Logger) + cred, err := config.Load(profile) + if err != nil { + log.Fatalf("Failed to parse config on profile %s, %v", profile, err) + } + req := syrequest.NewRequestInterface( + sylogs.NewGen3Logger(logger.Logger, "", ""), + cred, + config, + ) + dc = sydrs.NewLocalDrsClient(req, cred.APIEndpoint, sylogs.NewGen3Logger(logger.Logger, "", "")) + } else { + g3I, err := g3client.NewGen3Interface(profile, logger) + if err != nil { + log.Fatalf("Failed to parse config on profile %s, %v", profile, err) + } + dc = g3I.DRSClient() } objects := []common.ManifestObject{ - common.ManifestObject{ + { GUID: guid, }, } - err = download.DownloadMultiple( + err := sydownload.DownloadMultiple( context.Background(), - g3I, + dc, + dc, objects, downloadPath, - filenameFormat, - rename, - noPrompt, - protocol, + "original", + true, + false, + "", 1, - skipCompleted, + false, ) if err != nil { - g3I.Logger().Println(err.Error()) + logger.Println(err.Error()) } }, } @@ -65,10 +79,5 @@ func init() { downloadSingleCmd.Flags().StringVar(&guid, "guid", "", "Specify the guid for the data you would like to work with") downloadSingleCmd.MarkFlagRequired("guid") //nolint:errcheck downloadSingleCmd.Flags().StringVar(&downloadPath, "download-path", ".", "The directory in which to store the downloaded files") - downloadSingleCmd.Flags().StringVar(&filenameFormat, "filename-format", "original", "The format of filename to be used, including \"original\", \"guid\" and \"combined\"") - downloadSingleCmd.Flags().BoolVar(&rename, "rename", false, "Only useful when \"--filename-format=original\", will rename file by appending a counter value to its filename if set to true, otherwise the same filename will be used") - downloadSingleCmd.Flags().BoolVar(&noPrompt, "no-prompt", false, "If set to true, will not display user prompt message for confirmation") - downloadSingleCmd.Flags().StringVar(&protocol, "protocol", "", "Specify the preferred protocol with --protocol=gs") - downloadSingleCmd.Flags().BoolVar(&skipCompleted, "skip-completed", false, "If set to true, will check for filename and size before download and skip any files in \"download-path\" that matches both") RootCmd.AddCommand(downloadSingleCmd) } diff --git a/cmd/generate-tsv.go b/cmd/generate-tsv.go deleted file mode 100644 index 47d92c4..0000000 --- a/cmd/generate-tsv.go +++ /dev/null @@ -1,17 +0,0 @@ -package cmd - -import ( - "github.com/spf13/cobra" -) - -func init() { - var generateTSVCmd = &cobra.Command{ - Use: "generate-tsv", - Short: "Generate a file upload tsv from a template", - Long: `Fills in a Gen3 data file template with information from a directory of files.`, - Deprecated: "please use an older version of data-client", - Run: func(cmd *cobra.Command, args []string) {}, - } - - RootCmd.AddCommand(generateTSVCmd) -} diff --git a/cmd/retry-upload.go b/cmd/retry-upload.go index 69de60d..672e9ce 100644 --- a/cmd/retry-upload.go +++ b/cmd/retry-upload.go @@ -6,7 +6,9 @@ import ( "github.com/calypr/data-client/common" "github.com/calypr/data-client/g3client" "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/upload" + sylogs "github.com/calypr/syfon/client/pkg/logs" + sytransfer "github.com/calypr/syfon/client/transfer" + syupload "github.com/calypr/syfon/client/xfer/upload" "github.com/spf13/cobra" ) @@ -32,11 +34,16 @@ func init() { if err != nil { Logger.Fatalf("Failed to initialize client: %v", err) } + bk := g3.DRSClient() + uploader, ok := bk.(sytransfer.Uploader) + if !ok { + Logger.Fatalf("DRS client does not implement transfer.Uploader") + } logger := g3.Logger() // Create scoreboard with our logger injected - sb := logs.NewSB(common.MaxRetryCount, logger) + sb := logs.NewSB(common.MaxRetryCount, logger.Logger) // Load failed log failedMap, err := common.LoadFailedLog(failedLogPath) @@ -44,7 +51,8 @@ func init() { logger.Fatalf("Cannot read failed log: %v", err) } - upload.RetryFailedUploads(context.Background(), g3, failedMap) + // Unified DRS Client serves as both logical resolver and technical movement writer Across S3, GCS, and Azure. + syupload.RetryFailedUploads(context.Background(), uploader, sylogs.NewGen3Logger(Logger.Logger, "", ""), failedMap) sb.PrintSB() }, } diff --git a/cmd/root.go b/cmd/root.go index a2ec2f8..2a676e4 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -7,6 +7,7 @@ import ( ) var profile string +var backendType string // RootCmd represents the base command when called without any subcommands var RootCmd = &cobra.Command{ @@ -27,5 +28,6 @@ func Execute() { func init() { RootCmd.PersistentFlags().StringVar(&profile, "profile", "", "Specify profile to use") + RootCmd.PersistentFlags().StringVar(&backendType, "backend", "gen3", "Specify backend to use (gen3 or drs)") _ = RootCmd.MarkFlagRequired("profile") } diff --git a/cmd/upload-multipart.go b/cmd/upload-multipart.go index 5f020e5..d65a927 100644 --- a/cmd/upload-multipart.go +++ b/cmd/upload-multipart.go @@ -8,7 +8,8 @@ import ( "github.com/calypr/data-client/common" "github.com/calypr/data-client/g3client" "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/upload" + sytransfer "github.com/calypr/syfon/client/transfer" + syupload "github.com/calypr/syfon/client/xfer/upload" "github.com/spf13/cobra" ) @@ -22,9 +23,9 @@ func init() { var uploadMultipartCmd = &cobra.Command{ Use: "upload-multipart", - Short: "Upload a single file using multipart upload", - Long: `Uploads a large file to object storage using multipart upload. -This method is resilient to network interruptions and supports resume capability.`, + Short: "Upload a single file using managed multipart upload", + Long: `Uploads a file to object storage using managed multipart upload +(init -> presigned part URLs -> complete).`, Example: `./data-client upload-multipart --profile=myprofile --file-path=./large.bam ./data-client upload-multipart --profile=myprofile --file-path=./data.bam --guid=existing-guid`, Run: func(cmd *cobra.Command, args []string) { @@ -43,6 +44,11 @@ This method is resilient to network interruptions and supports resume capability if err != nil { logger.Fatalf("failed to initialize Gen3 interface: %v", err) } + bk := g3.DRSClient() + uploader, ok := bk.(sytransfer.Uploader) + if !ok { + logger.Fatal("DRS client does not implement transfer.Uploader") + } absPath, err := common.GetAbsolutePath(filePath) if err != nil { @@ -56,13 +62,20 @@ This method is resilient to network interruptions and supports resume capability FileMetadata: common.FileMetadata{}, } - file, err := os.Open(absPath) + if fileInfo.Bucket == "" { + fileInfo.Bucket = bucketName + } + if fileInfo.Bucket == "" { + fileInfo.Bucket = bk.GetBucketName() + } + + // Force multipart path by using direct multipart entrypoint. + file, err := os.Open(fileInfo.SourcePath) if err != nil { - logger.Fatalf("cannot open file %s: %v", absPath, err) + logger.Fatal(err) } defer file.Close() - - err = upload.MultipartUpload(context.Background(), g3, fileInfo, file, true) + err = syupload.MultipartUpload(context.Background(), uploader, fileInfo, file, true) if err != nil { logger.Fatal(err) } diff --git a/cmd/upload-multiple.go b/cmd/upload-multiple.go index 99e58ff..93e74c6 100644 --- a/cmd/upload-multiple.go +++ b/cmd/upload-multiple.go @@ -12,7 +12,9 @@ import ( "github.com/calypr/data-client/common" "github.com/calypr/data-client/g3client" "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/upload" + sylogs "github.com/calypr/syfon/client/pkg/logs" + sytransfer "github.com/calypr/syfon/client/transfer" + syupload "github.com/calypr/syfon/client/xfer/upload" "github.com/spf13/cobra" ) @@ -45,9 +47,14 @@ Options to run multipart uploads for large files and parallel batch uploading ar if err != nil { logger.Fatalf("Failed to parse config on profile %s: %v", profile, err) } + bk := g3i.DRSClient() + uploader, ok := bk.(sytransfer.Uploader) + if !ok { + logger.Fatalf("DRS client does not implement transfer.Uploader") + } // Basic config validation - profileConfig := g3i.GetCredential() + profileConfig := g3i.Credentials().Current() if profileConfig.APIEndpoint == "" { logger.Fatal("No APIEndpoint found in configuration. Run \"./data-client configure\" first.") } @@ -79,7 +86,7 @@ Options to run multipart uploads for large files and parallel batch uploading ar for _, obj := range objects { localFilePath := filepath.Join(absUploadPath, obj.Title) - fur, err := upload.ProcessFilename(logger, absUploadPath, localFilePath, obj.GUID, includeSubDirName, false) + fur, err := syupload.ProcessFilename(sylogs.NewGen3Logger(logger.Logger, "", ""), absUploadPath, localFilePath, obj.GUID, includeSubDirName, false) if err != nil { logger.Printf("Skipping %s: %v\n", localFilePath, err) logger.Failed(localFilePath, filepath.Base(localFilePath), common.FileMetadata{}, obj.GUID, 0, false) @@ -99,50 +106,23 @@ Options to run multipart uploads for large files and parallel batch uploading ar return } - // Classify single vs multipart - single, multi := upload.SeparateSingleAndMultipartUploads(g3i, requests) - - // Upload single-part files if batch { - workers, respCh, errCh, batchFURObjects := upload.InitBatchUploadChannels(numParallel, len(single)) - for i, furObject := range single { - // FileInfo processing and path normalization are already done, so we use the object directly - if len(batchFURObjects) < workers { - batchFURObjects = append(batchFURObjects, furObject) - } else { - upload.BatchUpload(ctx, g3i, batchFURObjects, workers, respCh, errCh, bucketName) - batchFURObjects = []common.FileUploadRequestObject{furObject} - } - if i == len(single)-1 && len(batchFURObjects) > 0 { - upload.BatchUpload(ctx, g3i, batchFURObjects, workers, respCh, errCh, bucketName) - } - } + workers, respCh, errCh, _ := syupload.InitBatchUploadChannels(numParallel, len(requests)) + syupload.BatchUpload(ctx, uploader, sylogs.NewGen3Logger(logger.Logger, "", ""), requests, workers, respCh, errCh, bucketName) } else { - for _, req := range single { - upload.UploadSingle(ctx, g3i, req, true) - } - } - - // Upload multipart files - for _, req := range multi { - - file, err := os.Open(req.SourcePath) - if err != nil { - g3i.Logger().Printf("Error opening file %s : %v", req.SourcePath, err) - continue - } - - err = upload.MultipartUpload(ctx, g3i, req, file, true) - if err != nil { - logger.Println("Multipart upload failed:", err) + for _, req := range requests { + err = syupload.Upload(ctx, uploader, req, true) + if err != nil { + logger.Println("Upload failed:", err) + } } } - // Retry logic (only if nothing succeeded initially) + // Retry logic if len(logger.GetSucceededLogMap()) == 0 { failed := logger.GetFailedLogMap() if len(failed) > 0 { - upload.RetryFailedUploads(ctx, g3i, failed) + syupload.RetryFailedUploads(ctx, uploader, sylogs.NewGen3Logger(logger.Logger, "", ""), failed) } } diff --git a/cmd/upload-single.go b/cmd/upload-single.go index 34eb9ba..6d9600d 100644 --- a/cmd/upload-single.go +++ b/cmd/upload-single.go @@ -9,7 +9,8 @@ import ( "github.com/calypr/data-client/common" "github.com/calypr/data-client/g3client" "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/upload" + sytransfer "github.com/calypr/syfon/client/transfer" + syupload "github.com/calypr/syfon/client/xfer/upload" "github.com/spf13/cobra" ) @@ -31,14 +32,20 @@ func init() { if err != nil { log.Fatalf("Failed to parse config on profile %s: %v", profile, err) } + bk := g3i.DRSClient() + uploader, ok := bk.(sytransfer.Uploader) + if !ok { + log.Fatalln("DRS client does not implement transfer.Uploader") + } - req := common.FileUploadRequestObject{ + fur := common.FileUploadRequestObject{ SourcePath: filePath, ObjectKey: filepath.Base(filePath), Bucket: bucketName, GUID: guid, } - err = upload.UploadSingle(context.Background(), g3i, req, true) + // Unified DRS client serves as its own transport writer Across S3, GCS, and Azure. + err = syupload.Upload(context.Background(), uploader, fur, true) if err != nil { log.Fatalln(err.Error()) } diff --git a/cmd/upload.go b/cmd/upload.go index a99fdc0..a86583d 100644 --- a/cmd/upload.go +++ b/cmd/upload.go @@ -9,7 +9,9 @@ import ( "github.com/calypr/data-client/common" "github.com/calypr/data-client/g3client" "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/upload" + sylogs "github.com/calypr/syfon/client/pkg/logs" + sytransfer "github.com/calypr/syfon/client/transfer" + syupload "github.com/calypr/syfon/client/xfer/upload" "github.com/spf13/cobra" ) @@ -44,7 +46,7 @@ func init() { logger := g3i.Logger() if hasMetadata { - hasShepherd, err := g3i.Fence().CheckForShepherdAPI(ctx) + hasShepherd, err := g3i.FenceClient().CheckForShepherdAPI(ctx) if err != nil { logger.Printf("WARNING: Error when checking for Shepherd API: %v", err) } else { @@ -63,20 +65,16 @@ func init() { logger.Println("\nThe following file(s) has been found in path \"" + uploadPath + "\" and will be uploaded:") for _, filePath := range filePaths { - // Use ProcessFilename to create the unified object (GUID is empty here, as this command requests a new GUID) - // ProcessFilename signature: (uploadPath, filePath, objectId, includeSubDirName, includeMetadata) - furObject, err := upload.ProcessFilename(g3i.Logger(), uploadPath, filePath, "", includeSubDirName, hasMetadata) + syLogger := sylogs.NewGen3Logger(g3i.Logger().Logger, "", "") + furObject, err := syupload.ProcessFilename(syLogger, uploadPath, filePath, "", includeSubDirName, hasMetadata) furObject.Bucket = bucketName - // Handle case where ProcessFilename fails (e.g., metadata parsing error) if err != nil { - // Use the data available for logging the failure g3i.Logger().Failed(filePath, filepath.Base(filePath), common.FileMetadata{}, "", 0, false) logger.Println("Error processing file path or metadata: " + err.Error()) continue } - // Optional: Display file path before proceeding file, _ := os.Open(filePath) if fi, _ := file.Stat(); !fi.IsDir() { logger.Println("\t" + filePath) @@ -85,83 +83,29 @@ func init() { uploadRequestObjects = append(uploadRequestObjects, furObject) } - // fmt.Fprintln(os.Stderr) logger.Println() - if len(uploadRequestObjects) == 0 { - logger.Println("No valid file upload requests were created.") - return + // Unified DRS Client serves as both logical resolver and technical movement writer Across S3, GCS, and Azure. + drsClient := g3i.DRSClient() + uploader, ok := drsClient.(sytransfer.Uploader) + if !ok { + logger.Fatal("DRS client does not implement transfer.Uploader") } - singlePartObjects, multipartObjects := upload.SeparateSingleAndMultipartUploads(g3i, uploadRequestObjects) - if batch { - workers, respCh, errCh, batchFURObjects := upload.InitBatchUploadChannels(numParallel, len(singlePartObjects)) - - for _, furObject := range singlePartObjects { - if len(batchFURObjects) < workers { - batchFURObjects = append(batchFURObjects, furObject) - } else { - upload.BatchUpload(ctx, g3i, batchFURObjects, workers, respCh, errCh, bucketName) - batchFURObjects = []common.FileUploadRequestObject{furObject} - } - } - if len(batchFURObjects) > 0 { - upload.BatchUpload(ctx, g3i, batchFURObjects, workers, respCh, errCh, bucketName) - } - - if len(errCh) > 0 { - close(errCh) - for err := range errCh { - if err != nil { - logger.Printf("Error occurred during uploading: %s\n", err.Error()) - } - } - } + workers, respCh, errCh, _ := syupload.InitBatchUploadChannels(numParallel, len(uploadRequestObjects)) + syupload.BatchUpload(ctx, uploader, sylogs.NewGen3Logger(Logger.Logger, "", ""), uploadRequestObjects, workers, respCh, errCh, bucketName) } else { - for _, furObject := range singlePartObjects { - file, err := os.Open(furObject.SourcePath) + for _, furObject := range uploadRequestObjects { + err := syupload.Upload(ctx, uploader, furObject, true) if err != nil { - logger.Failed(furObject.SourcePath, furObject.ObjectKey, furObject.FileMetadata, furObject.GUID, 0, false) - logger.Println("File open error: " + err.Error()) - continue + logger.Error("Upload failed", "path", furObject.SourcePath, "error", err) } - defer file.Close() - fi, err := file.Stat() - if err != nil { - logger.Failed(furObject.SourcePath, furObject.ObjectKey, furObject.FileMetadata, furObject.GUID, 0, false) - logger.Println("File stat error for file" + fi.Name() + ", file may be missing or unreadable because of permissions.\n") - continue - } - upload.UploadSingle(ctx, g3i, furObject, true) } } - if len(multipartObjects) > 0 { - cred := g3i.GetCredential() - if cred.UseShepherd == "true" || - cred.UseShepherd == "" && common.DefaultUseShepherd == true { - logger.Printf("error: Shepherd currently does not support multipart uploads. For the moment, please disable Shepherd with\n $ data-client configure --profile=%v --use-shepherd=false\nand try again", cred.Profile) - return - } - g3i.Logger().Println("Multipart uploading...") - for _, furObject := range multipartObjects { - file, err := os.Open(furObject.SourcePath) - if err != nil { - logger.Failed(furObject.SourcePath, furObject.ObjectKey, furObject.FileMetadata, furObject.GUID, 0, false) - logger.Println("File open error: " + err.Error()) - continue - } - err = upload.MultipartUpload(ctx, g3i, furObject, file, true) - if err != nil { - g3i.Logger().Println(err.Error()) - } else { - g3i.Logger().Scoreboard().IncrementSB(0) - } - } - } if len(g3i.Logger().GetSucceededLogMap()) == 0 { - upload.RetryFailedUploads(ctx, g3i, g3i.Logger().GetFailedLogMap()) + syupload.RetryFailedUploads(ctx, uploader, sylogs.NewGen3Logger(Logger.Logger, "", ""), g3i.Logger().GetFailedLogMap()) } g3i.Logger().Scoreboard().PrintSB() }, diff --git a/common/common.go b/common/common.go index 716625f..711cf75 100644 --- a/common/common.go +++ b/common/common.go @@ -139,3 +139,10 @@ func CanDownloadFile(signedURL string) error { return fmt.Errorf("failed to access file, HTTP status: %d", resp.StatusCode) } +func IsCloudPresignedURL(url string) bool { + return strings.Contains(url, "X-Amz-Signature") || + strings.Contains(url, "X-Goog-Signature") || + strings.Contains(url, "Signature=") || + strings.Contains(url, "AWSAccessKeyId=") || + strings.Contains(url, "Expires=") +} diff --git a/common/constants.go b/common/constants.go index 1191795..6abde97 100644 --- a/common/constants.go +++ b/common/constants.go @@ -7,15 +7,15 @@ import ( const ( // B is bytes - B int64 = iota + B int64 = 1 // KB is kilobytes - KB int64 = 1 << (10 * iota) + KB int64 = 1024 * B // MB is megabytes - MB + MB int64 = 1024 * KB // GB is gigabytes - GB + GB int64 = 1024 * MB // TB is terabytes - TB + TB int64 = 1024 * GB ) const ( // DefaultUseShepherd sets whether gen3client will attempt to use the Shepherd / Object Management API @@ -36,31 +36,31 @@ const ( ShepherdVersionEndpoint = "/mds/version" // IndexdIndexEndpoint is the endpoint postfix for INDEXD index - IndexdIndexEndpoint = "/index/index" + IndexdIndexEndpoint = "/index" // FenceUserEndpoint is the endpoint postfix for FENCE user FenceUserEndpoint = "/user/user" - // FenceDataEndpoint is the endpoint postfix for FENCE data - FenceDataEndpoint = "/user/data" + // FenceDataEndpoint is the canonical endpoint prefix for upload/delete flows + FenceDataEndpoint = "/data/upload" // FenceAccessTokenEndpoint is the endpoint postfix for FENCE access token FenceAccessTokenEndpoint = "/user/credentials/api/access_token" - // FenceDataUploadEndpoint is the endpoint postfix for FENCE data upload - FenceDataUploadEndpoint = FenceDataEndpoint + "/upload" + // FenceDataUploadEndpoint is the endpoint postfix for upload init/presigned-url + FenceDataUploadEndpoint = FenceDataEndpoint - // FenceDataDownloadEndpoint is the endpoint postfix for FENCE data download - FenceDataDownloadEndpoint = FenceDataEndpoint + "/download" + // FenceDataDownloadEndpoint is the endpoint postfix for download presigned-url + FenceDataDownloadEndpoint = "/data/download" - // FenceDataMultipartInitEndpoint is the endpoint postfix for FENCE multipart init - FenceDataMultipartInitEndpoint = FenceDataEndpoint + "/multipart/init" + // FenceDataMultipartInitEndpoint is the endpoint postfix for multipart init + FenceDataMultipartInitEndpoint = "/data/multipart/init" - // FenceDataMultipartUploadEndpoint is the endpoint postfix for FENCE multipart upload - FenceDataMultipartUploadEndpoint = FenceDataEndpoint + "/multipart/upload" + // FenceDataMultipartUploadEndpoint is the endpoint postfix for multipart upload + FenceDataMultipartUploadEndpoint = "/data/multipart/upload" - // FenceDataMultipartCompleteEndpoint is the endpoint postfix for FENCE multipart complete - FenceDataMultipartCompleteEndpoint = FenceDataEndpoint + "/multipart/complete" + // FenceDataMultipartCompleteEndpoint is the endpoint postfix for multipart complete + FenceDataMultipartCompleteEndpoint = "/data/multipart/complete" // PathSeparator is os dependent path separator char PathSeparator = string(os.PathSeparator) diff --git a/common/resource.go b/common/resource.go index 9e0d011..86d0828 100644 --- a/common/resource.go +++ b/common/resource.go @@ -6,8 +6,11 @@ import ( ) func ProjectToResource(project string) (string, error) { + if project == "" { + return "", fmt.Errorf("error: project ID is empty") + } if !strings.Contains(project, "-") { - return "", fmt.Errorf("error: invalid project ID %s, ID should look like -", project) + return "/programs/default/projects/" + project, nil } projectIdArr := strings.SplitN(project, "-", 2) return "/programs/" + projectIdArr[0] + "/projects/" + projectIdArr[1], nil diff --git a/common/types.go b/common/types.go index 4626c44..64b6e6e 100644 --- a/common/types.go +++ b/common/types.go @@ -1,59 +1,17 @@ package common -import ( - "io" - "net/http" -) +import sycommon "github.com/calypr/syfon/client/pkg/common" -type AccessTokenStruct struct { - AccessToken string `json:"access_token"` -} - -// FileUploadRequestObject defines a object for file upload -type FileUploadRequestObject struct { - SourcePath string - ObjectKey string - FileMetadata FileMetadata - GUID string - PresignedURL string - Bucket string `json:"bucket,omitempty"` -} - -// FileDownloadResponseObject defines a object for file download -type FileDownloadResponseObject struct { - DownloadPath string - Filename string - GUID string - PresignedURL string - Range int64 - Overwrite bool - Skip bool - Response *http.Response - Writer io.Writer -} - -// FileMetadata defines the metadata accepted by the new object management API, Shepherd -type FileMetadata struct { - Authz []string `json:"authz"` - Aliases []string `json:"aliases"` - // Metadata is an encoded JSON string of any arbitrary metadata the user wishes to upload. - Metadata map[string]any `json:"metadata"` -} - -// RetryObject defines a object for retry upload -type RetryObject struct { - SourcePath string - ObjectKey string - FileMetadata FileMetadata - GUID string - RetryCount int - Multipart bool - Bucket string -} - -type ManifestObject struct { - GUID string `json:"object_id"` - SubjectID string `json:"subject_id"` - Title string `json:"title"` - Size int64 `json:"size"` -} +type AccessTokenStruct = sycommon.AccessTokenStruct +type FileUploadRequestObject = sycommon.FileUploadRequestObject +type FileDownloadResponseObject = sycommon.FileDownloadResponseObject +type FileMetadata = sycommon.FileMetadata +type RetryObject = sycommon.RetryObject +type MultipartUploadInit = sycommon.MultipartUploadInit +type MultipartUploadPart = sycommon.MultipartUploadPart +type ManifestObject = sycommon.ManifestObject +type ShepherdInitRequestObject = sycommon.ShepherdInitRequestObject +type ShepherdAuthz = sycommon.ShepherdAuthz +type PresignedURLResponse = sycommon.PresignedURLResponse +type UploadURLResolveRequest = sycommon.UploadURLResolveRequest +type UploadURLResolveResponse = sycommon.UploadURLResolveResponse diff --git a/conf/config.go b/conf/config.go index 6c40967..ba5f700 100644 --- a/conf/config.go +++ b/conf/config.go @@ -1,6 +1,6 @@ package conf -//go:generate mockgen -destination=../mocks/mock_configure.go -package=mocks github.com/calypr/data-client/conf ManagerInterface +//go:generate go run go.uber.org/mock/mockgen@v0.6.0 -destination=../mocks/mock_configure.go -package=mocks github.com/calypr/data-client/conf ManagerInterface import ( "encoding/json" @@ -12,20 +12,13 @@ import ( "strings" "github.com/calypr/data-client/common" + syconf "github.com/calypr/syfon/client/conf" "gopkg.in/ini.v1" ) var ErrProfileNotFound = errors.New("profile not found in config file") -type Credential struct { - Profile string - KeyID string - APIKey string - AccessToken string - APIEndpoint string - UseShepherd string - MinShepherdVersion string -} +type Credential = syconf.Credential type Manager struct { Logger *slog.Logger @@ -127,6 +120,8 @@ func (man *Manager) Load(profile string) (*Credential, error) { APIEndpoint: sec.Key("api_endpoint").String(), UseShepherd: sec.Key("use_shepherd").String(), MinShepherdVersion: sec.Key("min_shepherd_version").String(), + Bucket: sec.Key("bucket").String(), + ProjectID: sec.Key("project_id").String(), } if profileConfig.KeyID == "" && profileConfig.APIKey == "" && profileConfig.AccessToken == "" { @@ -178,6 +173,8 @@ func (man *Manager) Save(profileConfig *Credential) error { section.Key("use_shepherd").SetValue(profileConfig.UseShepherd) section.Key("min_shepherd_version").SetValue(profileConfig.MinShepherdVersion) + section.Key("bucket").SetValue(profileConfig.Bucket) + section.Key("project_id").SetValue(profileConfig.ProjectID) err = cfg.SaveTo(configPath) if err != nil { errs := fmt.Errorf("error occurred when saving config file: %s", err.Error()) diff --git a/download/batch.go b/download/batch.go deleted file mode 100644 index 967f16a..0000000 --- a/download/batch.go +++ /dev/null @@ -1,189 +0,0 @@ -package download - -import ( - "context" - "fmt" - "io" - "os" - "path/filepath" - "sync" - "sync/atomic" - - "github.com/calypr/data-client/common" - "github.com/calypr/data-client/g3client" - "github.com/calypr/data-client/logs" - "github.com/hashicorp/go-multierror" - "github.com/vbauerster/mpb/v8" - "github.com/vbauerster/mpb/v8/decor" - "golang.org/x/sync/errgroup" -) - -// downloadFiles performs bounded parallel downloads and collects ALL errors -func downloadFiles( - ctx context.Context, - g3i g3client.Gen3Interface, - files []common.FileDownloadResponseObject, - numParallel int, - protocol string, -) (int, error) { - if len(files) == 0 { - return 0, nil - } - - logger := g3i.Logger() - - protocolText := "" - if protocol != "" { - protocolText = "?protocol=" + protocol - } - - // Scoreboard: maxRetries = 0 for now (no retry logic yet) - sb := logs.NewSB(0, logger) - - progress := common.GetProgress(ctx) - useProgressBars := (progress == nil) - - var p *mpb.Progress - if useProgressBars { - p = mpb.New(mpb.WithOutput(os.Stdout)) - } - - var eg errgroup.Group - eg.SetLimit(numParallel) - - var success atomic.Int64 - var mu sync.Mutex - var allErrors []*multierror.Error - - for i := range files { - fdr := &files[i] // capture loop variable - - eg.Go(func() error { - var err error - - defer func() { - if err != nil { - // Final failure bucket - sb.IncrementSB(len(sb.Counts) - 1) - - mu.Lock() - allErrors = append(allErrors, multierror.Append(nil, err)) - mu.Unlock() - } else { - success.Add(1) - sb.IncrementSB(0) // success, no retries - } - }() - - // Get presigned URL - if err = GetDownloadResponse(ctx, g3i, fdr, protocolText); err != nil { - err = fmt.Errorf("get URL for %s (GUID: %s): %w", fdr.Filename, fdr.GUID, err) - return err - } - - // Prepare directories - fullPath := filepath.Join(fdr.DownloadPath, fdr.Filename) - if dir := filepath.Dir(fullPath); dir != "." { - if err = os.MkdirAll(dir, 0766); err != nil { - _ = fdr.Response.Body.Close() - err = fmt.Errorf("mkdir for %s: %w", fullPath, err) - return err - } - } - - flags := os.O_CREATE | os.O_WRONLY - if fdr.Range > 0 { - flags |= os.O_APPEND - } else if fdr.Overwrite { - flags |= os.O_TRUNC - } - - file, err := os.OpenFile(fullPath, flags, 0666) - if err != nil { - _ = fdr.Response.Body.Close() - err = fmt.Errorf("open local file %s: %w", fullPath, err) - return err - } - - // Progress bar for this file - total := fdr.Response.ContentLength + fdr.Range - var writer io.Writer = file - var bar *mpb.Bar - var tracker *progressWriter - - if useProgressBars { - bar = p.AddBar(total, - mpb.PrependDecorators( - decor.Name(truncateFilename(fdr.Filename, 40)+" "), - decor.CountersKibiByte("% .1f / % .1f"), - ), - mpb.AppendDecorators( - decor.Percentage(), - decor.AverageSpeed(decor.SizeB1024(0), "% .1f"), - ), - ) - - if fdr.Range > 0 { - bar.SetCurrent(fdr.Range) - } - - writer = bar.ProxyWriter(file) - } else if progress != nil { - tracker = newProgressWriter(file, progress, fdr.GUID, total) - writer = tracker - } - - _, copyErr := io.Copy(writer, fdr.Response.Body) - _ = fdr.Response.Body.Close() - _ = file.Close() - - if tracker != nil { - if finalizeErr := tracker.Finalize(); finalizeErr != nil && copyErr == nil { - copyErr = finalizeErr - } - } - - if copyErr != nil { - if bar != nil { - bar.Abort(true) - } - err = fmt.Errorf("download failed for %s: %w", fdr.Filename, copyErr) - return err - } - - return nil - }) - } - - // Wait for all downloads - _ = eg.Wait() - if p != nil { - p.Wait() - } - - // Combine errors - var combinedError error - mu.Lock() - if len(allErrors) > 0 { - multiErr := multierror.Append(nil, nil) - for _, e := range allErrors { - multiErr = multierror.Append(multiErr, e.Errors...) - } - combinedError = multiErr.ErrorOrNil() - } - mu.Unlock() - - downloaded := int(success.Load()) - - // Print scoreboard summary - sb.PrintSB() - - if combinedError != nil { - logger.Printf("%d files downloaded, but %d failed:\n", downloaded, len(allErrors)) - logger.Println(combinedError.Error()) - } else { - logger.Printf("%d files downloaded successfully.\n", downloaded) - } - - return downloaded, combinedError -} diff --git a/download/downloader.go b/download/downloader.go deleted file mode 100644 index 044eeb8..0000000 --- a/download/downloader.go +++ /dev/null @@ -1,168 +0,0 @@ -package download - -import ( - "context" - "fmt" - "log/slog" - "os" - "strings" - - "github.com/calypr/data-client/common" - "github.com/calypr/data-client/g3client" - "github.com/vbauerster/mpb/v8" - "github.com/vbauerster/mpb/v8/decor" -) - -// DownloadMultiple is the public entry point called from g3cmd -func DownloadMultiple( - ctx context.Context, - g3i g3client.Gen3Interface, - objects []common.ManifestObject, - downloadPath string, - filenameFormat string, - rename bool, - noPrompt bool, - protocol string, - numParallel int, - skipCompleted bool, -) error { - logger := g3i.Logger() - - // === Input validation === - if numParallel < 1 { - return fmt.Errorf("numparallel must be a positive integer") - } - - var err error - downloadPath, err = common.ParseRootPath(downloadPath) - if err != nil { - return fmt.Errorf("invalid download path: %w", err) - } - if !strings.HasSuffix(downloadPath, "/") { - downloadPath += "/" - } - - filenameFormat = strings.ToLower(strings.TrimSpace(filenameFormat)) - if filenameFormat != "original" && filenameFormat != "guid" && filenameFormat != "combined" { - return fmt.Errorf("filename-format must be one of: original, guid, combined") - } - if (filenameFormat == "guid" || filenameFormat == "combined") && rename { - logger.WarnContext(ctx, "NOTICE: rename flag is ignored in guid/combined mode") - rename = false - } - - // === Warnings and user confirmation === - if err := handleWarningsAndConfirmation(ctx, logger.Logger, downloadPath, filenameFormat, rename, noPrompt); err != nil { - return err // aborted by user - } - - // === Create download directory === - if err := os.MkdirAll(downloadPath, 0766); err != nil { - return fmt.Errorf("cannot create directory %s: %w", downloadPath, err) - } - - // === Prepare files (metadata + local validation) === - toDownload, skipped, renamed, err := prepareFiles(ctx, g3i, objects, downloadPath, filenameFormat, rename, skipCompleted, protocol) - if err != nil { - return err - } - - logger.InfoContext(ctx, "Summary", - "Total objects", len(objects), - "To download", len(toDownload), - "Skipped", len(skipped)) - - // === Download phase === - downloaded, downloadErr := downloadFiles(ctx, g3i, toDownload, numParallel, protocol) - - // === Final summary === - logger.InfoContext(ctx, fmt.Sprintf("%d files downloaded successfully.", downloaded)) - printRenamed(ctx, logger.Logger, renamed) - printSkipped(ctx, logger.Logger, skipped) - - if downloadErr != nil { - logger.WarnContext(ctx, "Some downloads failed. See errors above.") - } - - return nil // we log failures but don't fail the whole command unless critical -} - -// handleWarningsAndConfirmation prints warnings and asks for confirmation if needed -func handleWarningsAndConfirmation(ctx context.Context, logger *slog.Logger, downloadPath, filenameFormat string, rename, noPrompt bool) error { - if filenameFormat == "guid" || filenameFormat == "combined" { - logger.WarnContext(ctx, fmt.Sprintf("WARNING: in %q mode, duplicate files in %q will be overwritten", filenameFormat, downloadPath)) - } else if !rename { - logger.WarnContext(ctx, fmt.Sprintf("WARNING: rename=false in original mode – duplicates in %q will be overwritten", downloadPath)) - } else { - logger.InfoContext(ctx, fmt.Sprintf("NOTICE: rename=true in original mode – duplicates in %q will be renamed with a counter", downloadPath)) - } - - if noPrompt { - return nil - } - if !AskForConfirmation(logger, "Proceed? (y/N)") { - return fmt.Errorf("aborted by user") - } - return nil -} - -// prepareFiles gathers metadata, checks local files, collects skips/renames -func prepareFiles( - ctx context.Context, - g3i g3client.Gen3Interface, - objects []common.ManifestObject, - downloadPath, filenameFormat string, - rename, skipCompleted bool, - protocol string, -) ([]common.FileDownloadResponseObject, []RenamedOrSkippedFileInfo, []RenamedOrSkippedFileInfo, error) { - logger := g3i.Logger() - renamed := make([]RenamedOrSkippedFileInfo, 0) - skipped := make([]RenamedOrSkippedFileInfo, 0) - toDownload := make([]common.FileDownloadResponseObject, 0, len(objects)) - - p := mpb.New(mpb.WithOutput(os.Stdout)) - bar := p.AddBar(int64(len(objects)), - mpb.PrependDecorators(decor.Name("Preparing "), decor.CountersNoUnit("%d / %d")), - mpb.AppendDecorators(decor.Percentage()), - ) - - for _, obj := range objects { - if obj.GUID == "" { - logger.WarnContext(ctx, "Empty GUID, skipping entry") - bar.Increment() - continue - } - - info := &IndexdResponse{Name: obj.Title, Size: obj.Size} - var err error - if info.Name == "" || info.Size == 0 { - // Very strict object id checking - info, err = AskGen3ForFileInfo(ctx, g3i, obj.GUID, protocol, downloadPath, filenameFormat, rename, &renamed) - if err != nil { - return nil, nil, nil, err - } - } - - fdr := common.FileDownloadResponseObject{ - DownloadPath: downloadPath, - Filename: info.Name, - GUID: obj.GUID, - } - - if !rename { - validateLocalFileStat(logger, &fdr, int64(info.Size), skipCompleted) - } - - if fdr.Skip { - logger.InfoContext(ctx, fmt.Sprintf("Skipping %q (GUID: %s) – complete local copy exists", fdr.Filename, fdr.GUID)) - skipped = append(skipped, RenamedOrSkippedFileInfo{GUID: fdr.GUID, OldFilename: fdr.Filename}) - } else { - toDownload = append(toDownload, fdr) - } - - bar.Increment() - } - p.Wait() - logger.InfoContext(ctx, "Preparation complete") - return toDownload, skipped, renamed, nil -} diff --git a/download/file_info.go b/download/file_info.go deleted file mode 100644 index 8fb8134..0000000 --- a/download/file_info.go +++ /dev/null @@ -1,135 +0,0 @@ -package download - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - - "github.com/calypr/data-client/common" - "github.com/calypr/data-client/g3client" - "github.com/calypr/data-client/request" -) - -func AskGen3ForFileInfo( - ctx context.Context, - g3i g3client.Gen3Interface, - guid, protocol, downloadPath, filenameFormat string, - rename bool, - renamedFiles *[]RenamedOrSkippedFileInfo, -) (*IndexdResponse, error) { - hasShepherd, err := g3i.Fence().CheckForShepherdAPI(ctx) - if err != nil { - g3i.Logger().Println("Error checking Shepherd API: " + err.Error()) - g3i.Logger().Println("Falling back to Indexd...") - hasShepherd = false - } - - if hasShepherd { - info, err := fetchFromShepherd(ctx, g3i, guid, downloadPath, filenameFormat, renamedFiles) - if err == nil { - return info, nil - } - g3i.Logger().Printf("Shepherd fetch failed for %s: %v. Falling back to Indexd...\n", guid, err) - } - info, err := fetchFromIndexd(ctx, g3i, http.MethodGet, guid, protocol, downloadPath, filenameFormat, rename, renamedFiles) - if err != nil { - g3i.Logger().Printf("All meta-data lookups failed for %s: %v. Using GUID as default filename.\n", guid, err) - *renamedFiles = append(*renamedFiles, RenamedOrSkippedFileInfo{GUID: guid, OldFilename: guid, NewFilename: guid}) - return &IndexdResponse{guid, 0}, nil - } - return info, nil -} - -func fetchFromShepherd( - ctx context.Context, - g3i g3client.Gen3Interface, - guid, downloadPath, filenameFormat string, - renamedFiles *[]RenamedOrSkippedFileInfo, -) (*IndexdResponse, error) { - cred := g3i.GetCredential() - res, err := g3i.Fence().Do(ctx, - &request.RequestBuilder{ - Url: cred.APIEndpoint + "/" + cred.AccessToken + common.ShepherdEndpoint + "/objects/" + guid, - Method: http.MethodGet, - Token: cred.AccessToken, - }) - if err != nil { - return nil, err - } - defer res.Body.Close() - - var decoded struct { - Record struct { - FileName string `json:"file_name"` - Size int64 `json:"size"` - } `json:"record"` - } - if err := json.NewDecoder(res.Body).Decode(&decoded); err != nil { - return nil, err - } - - return &IndexdResponse{applyFilenameFormat(decoded.Record.FileName, guid, downloadPath, filenameFormat, false, renamedFiles), decoded.Record.Size}, nil -} - -func fetchFromIndexd( - ctx context.Context, - g3i g3client.Gen3Interface, method, - guid, protocol, downloadPath, filenameFormat string, - rename bool, - renamedFiles *[]RenamedOrSkippedFileInfo, -) (*IndexdResponse, error) { - - cred := g3i.GetCredential() - resp, err := g3i.Fence().Do( - ctx, - &request.RequestBuilder{ - Url: cred.APIEndpoint + common.IndexdIndexEndpoint + "/" + guid, - Method: method, - Token: cred.AccessToken, - }, - ) - if err != nil { - return nil, fmt.Errorf("error in fetch FromIndexd: %s", err) - } - - defer resp.Body.Close() - msg, err := g3i.Fence().ParseFenceURLResponse(resp) - if err != nil { - return nil, err - } - - if filenameFormat == "guid" { - return &IndexdResponse{guid, msg.Size}, nil - } - - if msg.FileName == "" { - return nil, fmt.Errorf("FileName is a required field in Indexd to download the file, but upload record %#v does not contain it", msg) - } - - return &IndexdResponse{applyFilenameFormat(msg.FileName, guid, downloadPath, filenameFormat, rename, renamedFiles), msg.Size}, nil -} - -func applyFilenameFormat(baseName, guid, downloadPath, format string, rename bool, renamedFiles *[]RenamedOrSkippedFileInfo) string { - switch format { - case "guid": - return guid - case "combined": - return guid + "_" + baseName - case "original": - if !rename { - return baseName - } - newName := processOriginalFilename(downloadPath, baseName) - if newName != baseName { - *renamedFiles = append(*renamedFiles, RenamedOrSkippedFileInfo{ - GUID: guid, - OldFilename: baseName, - NewFilename: newName, - }) - } - return newName - default: - return baseName - } -} diff --git a/download/progress_writer.go b/download/progress_writer.go deleted file mode 100644 index 3917234..0000000 --- a/download/progress_writer.go +++ /dev/null @@ -1,74 +0,0 @@ -package download - -import ( - "fmt" - "io" - - "github.com/calypr/data-client/common" -) - -type progressWriter struct { - writer io.Writer - onProgress common.ProgressCallback - hash string - total int64 - bytesSoFar int64 - bytesSinceReport int64 -} - -func newProgressWriter(writer io.Writer, onProgress common.ProgressCallback, hash string, total int64) *progressWriter { - return &progressWriter{ - writer: writer, - onProgress: onProgress, - hash: hash, - total: total, - } -} - -func (pw *progressWriter) Write(p []byte) (int, error) { - n, err := pw.writer.Write(p) - if n > 0 && pw.onProgress != nil { - delta := int64(n) - pw.bytesSoFar += delta - pw.bytesSinceReport += delta - - if pw.bytesSinceReport >= common.OnProgressThreshold { - if progressErr := pw.onProgress(common.ProgressEvent{ - Event: "progress", - Oid: pw.hash, - BytesSoFar: pw.bytesSoFar, - BytesSinceLast: pw.bytesSinceReport, - }); progressErr != nil { - return n, progressErr - } - pw.bytesSinceReport = 0 - } - } - return n, err -} - -func (pw *progressWriter) Finalize() error { - if pw.onProgress != nil && pw.bytesSinceReport > 0 { - _ = pw.onProgress(common.ProgressEvent{ - Event: "progress", - Oid: pw.hash, - BytesSoFar: pw.bytesSoFar, - BytesSinceLast: pw.bytesSinceReport, - }) - pw.bytesSinceReport = 0 - } - if pw.total > 0 && pw.bytesSoFar < pw.total { - delta := pw.total - pw.bytesSoFar - pw.bytesSoFar = pw.total - if pw.onProgress != nil { - _ = pw.onProgress(common.ProgressEvent{ - Event: "progress", - Oid: pw.hash, - BytesSoFar: pw.bytesSoFar, - BytesSinceLast: delta, - }) - } - return fmt.Errorf("download incomplete: %d/%d bytes", pw.bytesSoFar-delta, pw.total) - } - return nil -} diff --git a/download/progress_writer_test.go b/download/progress_writer_test.go deleted file mode 100644 index b11af3d..0000000 --- a/download/progress_writer_test.go +++ /dev/null @@ -1,46 +0,0 @@ -package download - -import ( - "bytes" - "io" - "testing" - - "github.com/calypr/data-client/common" -) - -func TestProgressWriterFinalizes(t *testing.T) { - payload := bytes.Repeat([]byte("b"), 20) - var events []common.ProgressEvent - - writer := newProgressWriter(io.Discard, func(event common.ProgressEvent) error { - events = append(events, event) - return nil - }, "oid-456", int64(len(payload))) - - if _, err := writer.Write(payload); err != nil { - t.Fatalf("write failed: %v", err) - } - if err := writer.Finalize(); err != nil { - t.Fatalf("finalize failed: %v", err) - } - - if len(events) == 0 { - t.Fatal("expected progress events, got none") - } - - var total int64 - for _, event := range events { - if event.Event != "progress" { - t.Fatalf("unexpected event type: %s", event.Event) - } - total += event.BytesSinceLast - } - - last := events[len(events)-1] - if last.BytesSoFar != int64(len(payload)) { - t.Fatalf("expected final bytesSoFar %d, got %d", len(payload), last.BytesSoFar) - } - if total != int64(len(payload)) { - t.Fatalf("expected bytesSinceLast sum %d, got %d", len(payload), total) - } -} diff --git a/download/transfer.go b/download/transfer.go deleted file mode 100644 index d171313..0000000 --- a/download/transfer.go +++ /dev/null @@ -1,148 +0,0 @@ -package download - -import ( - "context" - "fmt" - "io" - "os" - "path/filepath" - "strings" - - "github.com/calypr/data-client/common" - "github.com/calypr/data-client/g3client" -) - -// DownloadSingleWithProgress downloads a single object while emitting progress events. -func DownloadSingleWithProgress( - ctx context.Context, - g3i g3client.Gen3Interface, - guid string, - downloadPath string, - protocol string, -) error { - progress := common.GetProgress(ctx) - var err error - downloadPath, err = common.ParseRootPath(downloadPath) - if err != nil { - return fmt.Errorf("invalid download path: %w", err) - } - if !strings.HasSuffix(downloadPath, "/") { - downloadPath += "/" - } - - renamed := make([]RenamedOrSkippedFileInfo, 0) - info, err := AskGen3ForFileInfo(ctx, g3i, guid, protocol, downloadPath, "original", false, &renamed) - if err != nil { - return err - } - - fdr := common.FileDownloadResponseObject{ - DownloadPath: downloadPath, - Filename: info.Name, - GUID: guid, - } - - protocolText := "" - if protocol != "" { - protocolText = "?protocol=" + protocol - } - if err := GetDownloadResponse(ctx, g3i, &fdr, protocolText); err != nil { - return err - } - - fullPath := filepath.Join(fdr.DownloadPath, fdr.Filename) - if dir := filepath.Dir(fullPath); dir != "." { - if err = os.MkdirAll(dir, 0766); err != nil { - _ = fdr.Response.Body.Close() - return fmt.Errorf("mkdir for %s: %w", fullPath, err) - } - } - - flags := os.O_CREATE | os.O_WRONLY - if fdr.Range > 0 { - flags |= os.O_APPEND - } else if fdr.Overwrite { - flags |= os.O_TRUNC - } - - file, err := os.OpenFile(fullPath, flags, 0666) - if err != nil { - _ = fdr.Response.Body.Close() - return fmt.Errorf("open local file %s: %w", fullPath, err) - } - - total := info.Size - var writer io.Writer = file - var tracker *progressWriter - if progress != nil { - tracker = newProgressWriter(file, progress, guid, total) - writer = tracker - } - - _, copyErr := io.Copy(writer, fdr.Response.Body) - _ = fdr.Response.Body.Close() - _ = file.Close() - if tracker != nil { - if finalizeErr := tracker.Finalize(); finalizeErr != nil && copyErr == nil { - copyErr = finalizeErr - } - } - if copyErr != nil { - return fmt.Errorf("download failed for %s: %w", fdr.Filename, copyErr) - } - return nil -} - -// DownloadToPath downloads a single object by GUID to a specific destination file path. -// It bypasses the name lookup from Gen3 and uses the provided dstPath directly. -func DownloadToPath( - ctx context.Context, - g3i g3client.Gen3Interface, - guid string, - dstPath string, -) error { - progress := common.GetProgress(ctx) - hash := common.GetOid(ctx) - logger := g3i.Logger() - // logger.Printf("Downloading %s to %s\n", guid, dstPath) - - fdr := common.FileDownloadResponseObject{ - GUID: guid, - } - - if err := GetDownloadResponse(ctx, g3i, &fdr, ""); err != nil { - logger.FailedContext(ctx, dstPath, filepath.Base(dstPath), common.FileMetadata{}, guid, 0, false) - return err - } - defer fdr.Response.Body.Close() - - if dir := filepath.Dir(dstPath); dir != "." { - if err := os.MkdirAll(dir, 0766); err != nil { - logger.FailedContext(ctx, dstPath, filepath.Base(dstPath), common.FileMetadata{}, guid, 0, false) - return fmt.Errorf("mkdir for %s: %w", dstPath, err) - } - } - - file, err := os.Create(dstPath) - if err != nil { - logger.FailedContext(ctx, dstPath, filepath.Base(dstPath), common.FileMetadata{}, guid, 0, false) - return fmt.Errorf("create local file %s: %w", dstPath, err) - } - defer file.Close() - - var writer io.Writer = file - if progress != nil { - total := fdr.Response.ContentLength - tracker := newProgressWriter(file, progress, hash, total) - writer = tracker - defer tracker.Finalize() - } - - if _, err := io.Copy(writer, fdr.Response.Body); err != nil { - logger.FailedContext(ctx, dstPath, filepath.Base(dstPath), common.FileMetadata{}, guid, 0, false) - return fmt.Errorf("copy to %s: %w", dstPath, err) - } - - logger.SucceededContext(ctx, dstPath, guid) - return nil -} diff --git a/download/transfer_test.go b/download/transfer_test.go deleted file mode 100644 index d811afe..0000000 --- a/download/transfer_test.go +++ /dev/null @@ -1,218 +0,0 @@ -package download - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "io" - "net/http" - "net/url" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/calypr/data-client/common" - "github.com/calypr/data-client/conf" - "github.com/calypr/data-client/drs" - "github.com/calypr/data-client/fence" - "github.com/calypr/data-client/indexd" - "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/request" - "github.com/calypr/data-client/requestor" - "github.com/calypr/data-client/sower" -) - -type fakeGen3Download struct { - cred *conf.Credential - logger *logs.Gen3Logger - doFunc func(context.Context, *request.RequestBuilder) (*http.Response, error) -} - -func (f *fakeGen3Download) GetCredential() *conf.Credential { return f.cred } -func (f *fakeGen3Download) Logger() *logs.Gen3Logger { return f.logger } -func (f *fakeGen3Download) ExportCredential(ctx context.Context, cred *conf.Credential) error { - return nil -} -func (f *fakeGen3Download) Fence() fence.FenceInterface { return &fakeFence{doFunc: f.doFunc} } -func (f *fakeGen3Download) Indexd() indexd.IndexdInterface { return &fakeIndexd{doFunc: f.doFunc} } -func (f *fakeGen3Download) Sower() sower.SowerInterface { return nil } -func (f *fakeGen3Download) Requestor() requestor.RequestorInterface { return nil } - -type fakeFence struct { - fence.FenceInterface - doFunc func(context.Context, *request.RequestBuilder) (*http.Response, error) -} - -func (f *fakeFence) Do(ctx context.Context, req *request.RequestBuilder) (*http.Response, error) { - return f.doFunc(ctx, req) -} -func (f *fakeFence) New(method, url string) *request.RequestBuilder { - return &request.RequestBuilder{Method: method, Url: url, Headers: make(map[string]string)} -} -func (f *fakeFence) CheckForShepherdAPI(ctx context.Context) (bool, error) { return false, nil } -func (f *fakeFence) ResolveOID(ctx context.Context, oid string) (fence.FenceResponse, error) { - return fence.FenceResponse{}, nil -} -func (f *fakeFence) GetDownloadPresignedUrl(ctx context.Context, guid, protocol string) (string, error) { - if guid == "test-fallback" { - return "", errors.New("fence fallback") - } - return "https://download.example.com/object", nil -} -func (f *fakeFence) ParseFenceURLResponse(resp *http.Response) (fence.FenceResponse, error) { - var msg fence.FenceResponse - if resp != nil && resp.Body != nil { - json.NewDecoder(resp.Body).Decode(&msg) - } - return msg, nil -} - -type fakeIndexd struct { - indexd.IndexdInterface - doFunc func(context.Context, *request.RequestBuilder) (*http.Response, error) -} - -func (f *fakeIndexd) Do(ctx context.Context, req *request.RequestBuilder) (*http.Response, error) { - return f.doFunc(ctx, req) -} - -func (f *fakeIndexd) New(method, url string) *request.RequestBuilder { - return &request.RequestBuilder{Method: method, Url: url, Headers: make(map[string]string)} -} - -func (f *fakeIndexd) GetDownloadURL(ctx context.Context, did string, accessType string) (*drs.AccessURL, error) { - return &drs.AccessURL{URL: "https://download.example.com/object"}, nil -} - -func TestDownloadSingleWithProgressEmitsEvents(t *testing.T) { - payload := bytes.Repeat([]byte("d"), 64) - downloadDir := t.TempDir() - downloadPath := downloadDir + string(os.PathSeparator) - - var events []common.ProgressEvent - progress := func(event common.ProgressEvent) error { - events = append(events, event) - return nil - } - - fake := &fakeGen3Download{ - cred: &conf.Credential{APIEndpoint: "https://example.com", AccessToken: "token"}, - logger: logs.NewGen3Logger(nil, "", ""), - doFunc: func(_ context.Context, req *request.RequestBuilder) (*http.Response, error) { - switch { - case strings.Contains(req.Url, common.IndexdIndexEndpoint): - return newDownloadJSONResponse(req.Url, `{"file_name":"payload.bin","size":64}`), nil - case strings.HasPrefix(req.Url, "https://download.example.com/"): - return newDownloadResponse(req.Url, payload, http.StatusOK), nil - default: - return nil, errors.New("unexpected request url: " + req.Url) - } - }, - } - - ctx := common.WithProgress(context.Background(), progress) - err := DownloadSingleWithProgress(ctx, fake, "guid-123", downloadPath, "") - if err != nil { - t.Fatalf("download failed: %v", err) - } - - if len(events) == 0 { - t.Fatal("expected progress events") - } - for i := 1; i < len(events); i++ { - if events[i].BytesSoFar < events[i-1].BytesSoFar { - t.Fatalf("bytesSoFar not monotonic: %d then %d", events[i-1].BytesSoFar, events[i].BytesSoFar) - } - } - last := events[len(events)-1] - if last.BytesSoFar != int64(len(payload)) { - t.Fatalf("expected final bytesSoFar %d, got %d", len(payload), last.BytesSoFar) - } - fullPath := filepath.Join(downloadPath, "payload.bin") - if _, err := os.Stat(fullPath); err != nil { - t.Fatalf("expected file to exist: %v", err) - } -} - -func TestDownloadSingleWithProgressFinalizeOnError(t *testing.T) { - downloadDir := t.TempDir() - downloadPath := downloadDir + string(os.PathSeparator) - - var events []common.ProgressEvent - progress := func(event common.ProgressEvent) error { - events = append(events, event) - return nil - } - - fake := &fakeGen3Download{ - cred: &conf.Credential{APIEndpoint: "https://example.com", AccessToken: "token"}, - logger: logs.NewGen3Logger(nil, "", ""), - doFunc: func(_ context.Context, req *request.RequestBuilder) (*http.Response, error) { - switch { - case strings.Contains(req.Url, common.IndexdIndexEndpoint): - return newDownloadJSONResponse(req.Url, `{"file_name":"payload.bin","size":64}`), nil - case strings.HasPrefix(req.Url, "https://download.example.com/"): - return newDownloadResponse(req.Url, []byte("short"), http.StatusOK), nil - default: - return nil, errors.New("unexpected request url: " + req.Url) - } - }, - } - - ctx := common.WithProgress(context.Background(), progress) - err := DownloadSingleWithProgress(ctx, fake, "guid-123", downloadPath, "") - if err == nil { - t.Fatal("expected download error") - } - - if len(events) == 0 { - t.Fatal("expected progress events") - } - last := events[len(events)-1] - if last.BytesSoFar != 64 { - t.Fatalf("expected finalize bytesSoFar 64, got %d", last.BytesSoFar) - } -} - -func newDownloadJSONResponse(rawURL, body string) *http.Response { - parsedURL, err := url.Parse(rawURL) - if err != nil { - parsedURL = &url.URL{} - } - return &http.Response{ - StatusCode: http.StatusOK, - Body: io.NopCloser(strings.NewReader(body)), - Request: &http.Request{URL: parsedURL}, - Header: make(http.Header), - } -} - -func newDownloadResponse(rawURL string, payload []byte, status int) *http.Response { - parsedURL, err := url.Parse(rawURL) - if err != nil { - parsedURL = &url.URL{} - } - return &http.Response{ - StatusCode: status, - Body: io.NopCloser(bytes.NewReader(payload)), - ContentLength: int64(len(payload)), - Request: &http.Request{URL: parsedURL}, - Header: make(http.Header), - } -} - -// fakeRequestor implements requestor.RequestorInterface using the same doFunc. -type fakeRequestor struct { - requestor.RequestorInterface - doFunc func(context.Context, *request.RequestBuilder) (*http.Response, error) -} - -func (f *fakeRequestor) Do(ctx context.Context, req *request.RequestBuilder) (*http.Response, error) { - return f.doFunc(ctx, req) -} - -func (f *fakeRequestor) New(method, url string) *request.RequestBuilder { - return &request.RequestBuilder{Method: method, Url: url, Headers: make(map[string]string)} -} diff --git a/download/types.go b/download/types.go deleted file mode 100644 index c910b67..0000000 --- a/download/types.go +++ /dev/null @@ -1,60 +0,0 @@ -package download - -import ( - "os" - - "github.com/calypr/data-client/common" - "github.com/calypr/data-client/logs" -) - -type IndexdResponse struct { - Name string - Size int64 -} -type RenamedOrSkippedFileInfo struct { - GUID string - OldFilename string - NewFilename string -} - -func validateLocalFileStat( - logger *logs.Gen3Logger, - fdr *common.FileDownloadResponseObject, - filesize int64, - skipCompleted bool, -) { - fullPath := fdr.DownloadPath + fdr.Filename - - fi, err := os.Stat(fullPath) - if err != nil { - if os.IsNotExist(err) { - // No local file → full download, nothing special - return - } - logger.Printf("Error statting local file \"%s\": %s\n", fullPath, err.Error()) - logger.Println("Will attempt full download anyway") - return - } - - localSize := fi.Size() - - // User doesn't want to skip completed files → force full overwrite - if !skipCompleted { - fdr.Overwrite = true - return - } - - // Exact match → skip entirely - if localSize == filesize { - fdr.Skip = true - return - } - - // Local file larger than expected → overwrite fully (corrupted or different file) - if localSize > filesize { - fdr.Overwrite = true - return - } - - fdr.Range = localSize -} diff --git a/download/url_resolution.go b/download/url_resolution.go deleted file mode 100644 index d7427c3..0000000 --- a/download/url_resolution.go +++ /dev/null @@ -1,87 +0,0 @@ -package download - -import ( - "context" - "errors" - "fmt" - "io" - "net/http" - "strconv" - "strings" - - "github.com/calypr/data-client/common" - client "github.com/calypr/data-client/g3client" -) - -// GetDownloadResponse gets presigned URL and prepares HTTP response -func GetDownloadResponse(ctx context.Context, g3 client.Gen3Interface, fdr *common.FileDownloadResponseObject, protocolText string) error { - // 1. Try Fence first - url, err := g3.Fence().GetDownloadPresignedUrl(ctx, fdr.GUID, protocolText) - if err == nil && url != "" { - fdr.PresignedURL = url - } else { - // 2. Fallback to IndexD DRS endpoint - accessType := "s3" - if strings.HasPrefix(protocolText, "?protocol=") { - accessType = strings.TrimPrefix(protocolText, "?protocol=") - } else if protocolText == "?protocol=gs" { - accessType = "gs" - } - - accessURL, errIdx := g3.Indexd().GetDownloadURL(ctx, fdr.GUID, accessType) - if errIdx == nil && accessURL != nil && accessURL.URL != "" { - fdr.PresignedURL = accessURL.URL - // Some DRS providers might return required headers - // This is not currently used by makeDownloadRequest but good to have for future - } else { - if err != nil { - return err - } - if errIdx != nil { - return errIdx - } - return fmt.Errorf("failed to resolve download URL for %s", fdr.GUID) - } - } - - return makeDownloadRequest(ctx, g3, fdr) -} - -func isCloudPresignedURL(url string) bool { - return strings.Contains(url, "X-Amz-Signature") || - strings.Contains(url, "X-Goog-Signature") || - strings.Contains(url, "Signature=") || - strings.Contains(url, "AWSAccessKeyId=") || - strings.Contains(url, "Expires=") -} - -func makeDownloadRequest(ctx context.Context, g3 client.Gen3Interface, fdr *common.FileDownloadResponseObject) error { - skipAuth := isCloudPresignedURL(fdr.PresignedURL) - rb := g3.Fence().New(http.MethodGet, fdr.PresignedURL).WithSkipAuth(skipAuth) - - if fdr.Range > 0 { - rb.WithHeader("Range", "bytes="+strconv.FormatInt(fdr.Range, 10)+"-") - } - - resp, err := g3.Fence().Do(ctx, rb) - - if err != nil { - return errors.New("Request failed: " + strings.ReplaceAll(err.Error(), fdr.PresignedURL, "")) - } - - // Check for non-success status codes - if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusPartialContent { - defer resp.Body.Close() // Ensure the body is closed - - bodyBytes, err := io.ReadAll(resp.Body) - bodyString := "" - if err == nil { - bodyString = string(bodyBytes) - } - - return fmt.Errorf("non-OK response: %d, body: %s", resp.StatusCode, bodyString) - } - - fdr.Response = resp - return nil -} diff --git a/download/utils.go b/download/utils.go deleted file mode 100644 index 7209c44..0000000 --- a/download/utils.go +++ /dev/null @@ -1,77 +0,0 @@ -package download - -import ( - "bufio" - "os" - "path/filepath" - "strconv" - "strings" - - "context" - "fmt" - "log/slog" -) - -// AskForConfirmation asks user for confirmation before proceed, will wait if user entered garbage -func AskForConfirmation(logger *slog.Logger, s string) bool { - reader := bufio.NewReader(os.Stdin) - - for { - logger.Info(fmt.Sprintf("%s [y/n]: ", s)) - - response, err := reader.ReadString('\n') - if err != nil { - logger.Error("Error occurred during parsing user's confirmation: " + err.Error()) - os.Exit(1) - } - - switch strings.ToLower(strings.TrimSpace(response)) { - case "y", "yes": - return true - case "n", "no": - return false - default: - return false // Example of defaulting to false - } - } -} - -func processOriginalFilename(downloadPath string, actualFilename string) string { - _, err := os.Stat(downloadPath + actualFilename) - if os.IsNotExist(err) { - return actualFilename - } - extension := filepath.Ext(actualFilename) - filename := strings.TrimSuffix(actualFilename, extension) - counter := 2 - for { - newFilename := filename + "_" + strconv.Itoa(counter) + extension - _, err := os.Stat(downloadPath + newFilename) - if os.IsNotExist(err) { - return newFilename - } - counter++ - } -} - -// truncateFilename shortens long filenames for progress bar display -func truncateFilename(name string, max int) string { - if len(name) <= max { - return name - } - return "..." + name[len(name)-max+3:] -} - -// printRenamed shows renamed files in final summary -func printRenamed(ctx context.Context, logger *slog.Logger, renamed []RenamedOrSkippedFileInfo) { - for _, r := range renamed { - logger.InfoContext(ctx, fmt.Sprintf("Renamed %q to %q (GUID: %s)", r.OldFilename, r.NewFilename, r.GUID)) - } -} - -// printSkipped shows skipped files in final summary -func printSkipped(ctx context.Context, logger *slog.Logger, skipped []RenamedOrSkippedFileInfo) { - for _, s := range skipped { - logger.InfoContext(ctx, fmt.Sprintf("Skipped %q (GUID: %s)", s.OldFilename, s.GUID)) - } -} diff --git a/drs/drs.go b/drs/drs.go deleted file mode 100644 index 55feb1a..0000000 --- a/drs/drs.go +++ /dev/null @@ -1,87 +0,0 @@ -package drs - -import ( - "fmt" - "strings" - - "github.com/calypr/data-client/hash" - "github.com/google/uuid" -) - -// NAMESPACE is the UUID namespace used for generating DRS UUIDs -var NAMESPACE = uuid.NewMD5(uuid.NameSpaceURL, []byte("calypr.org")) - -func ProjectToResource(project string) (string, error) { - if !strings.Contains(project, "-") { - return "", fmt.Errorf("error: invalid project ID %s, ID should look like -", project) - } - projectIdArr := strings.SplitN(project, "-", 2) - return "/programs/" + projectIdArr[0] + "/projects/" + projectIdArr[1], nil -} - -// From git-drs/drsmap/drs_map.go - -func DrsUUID(projectId string, hash string) string { - // create UUID based on project ID and hash - hashStr := fmt.Sprintf("%s:%s", projectId, hash) - return uuid.NewSHA1(NAMESPACE, []byte(hashStr)).String() -} - -func FindMatchingRecord(records []DRSObject, projectId string) (*DRSObject, error) { - if len(records) == 0 { - return nil, nil - } - - // Convert project ID to resource path format for comparison - expectedAuthz, err := ProjectToResource(projectId) - if err != nil { - return nil, fmt.Errorf("error converting project ID to resource format: %v", err) - } - - for _, record := range records { - for _, access := range record.AccessMethods { - if access.Authorizations != nil && access.Authorizations.Value == expectedAuthz { - return &record, nil - } - } - } - - return nil, nil -} - -// DRS UUID generation using SHA1 (compatible with git-drs) -func GenerateDrsID(projectId, hash string) string { - return DrsUUID(projectId, hash) -} - -func BuildDrsObj(fileName string, checksum string, size int64, drsId string, bucketName string, projectId string) (*DRSObject, error) { - if bucketName == "" { - return nil, fmt.Errorf("error: bucket name is empty") - } - - fileURL := fmt.Sprintf("s3://%s/%s/%s", bucketName, drsId, checksum) - - authzStr, err := ProjectToResource(projectId) - if err != nil { - return nil, err - } - authorizations := Authorizations{ - Value: authzStr, - } - - drsObj := DRSObject{ - Id: drsId, - Name: fileName, - AccessMethods: []AccessMethod{{ - Type: "s3", - AccessURL: AccessURL{ - URL: fileURL, - }, - Authorizations: &authorizations, - }}, - Checksums: hash.HashInfo{SHA256: checksum}, - Size: size, - } - - return &drsObj, nil -} diff --git a/drs/object_builder.go b/drs/object_builder.go deleted file mode 100644 index 61fec11..0000000 --- a/drs/object_builder.go +++ /dev/null @@ -1,56 +0,0 @@ -package drs - -import ( - "fmt" - "path/filepath" - - "github.com/calypr/data-client/hash" -) - -type ObjectBuilder struct { - Bucket string - ProjectID string - AccessType string -} - -func NewObjectBuilder(bucket, projectID string) ObjectBuilder { - return ObjectBuilder{ - Bucket: bucket, - ProjectID: projectID, - AccessType: "s3", - } -} - -func (b ObjectBuilder) Build(fileName string, checksum string, size int64, drsID string) (*DRSObject, error) { - if b.Bucket == "" { - return nil, fmt.Errorf("error: bucket name is empty in config file") - } - accessType := b.AccessType - if accessType == "" { - accessType = "s3" - } - - fileURL := fmt.Sprintf("s3://%s", filepath.Join(b.Bucket, drsID, checksum)) - - authzStr, err := ProjectToResource(b.ProjectID) - if err != nil { - return nil, err - } - authorizations := Authorizations{ - Value: authzStr, - } - - drsObj := DRSObject{ - Id: drsID, - Name: fileName, - AccessMethods: []AccessMethod{{ - Type: accessType, - AccessURL: AccessURL{URL: fileURL}, - Authorizations: &authorizations, - }}, - Checksums: hash.HashInfo{SHA256: checksum}, - Size: size, - } - - return &drsObj, nil -} diff --git a/drs/object_builder_test.go b/drs/object_builder_test.go deleted file mode 100644 index e196e00..0000000 --- a/drs/object_builder_test.go +++ /dev/null @@ -1,51 +0,0 @@ -package drs - -import ( - "path/filepath" - "strings" - "testing" -) - -func TestObjectBuilderBuildSuccess(t *testing.T) { - builder := ObjectBuilder{ - ProjectID: "test-project", - Bucket: "bucket", - } - - obj, err := builder.Build("file.txt", "sha-256", 12, "did-1") - if err != nil { - t.Fatalf("Build error: %v", err) - } - if obj.Id != "did-1" { - t.Fatalf("unexpected Id: %s", obj.Id) - } - if obj.Name != "file.txt" { - t.Fatalf("unexpected Name: %s", obj.Name) - } - if obj.Checksums.SHA256 != "sha-256" { - t.Fatalf("unexpected checksum: %v", obj.Checksums) - } - if obj.Size != 12 { - t.Fatalf("unexpected size: %d", obj.Size) - } - if len(obj.AccessMethods) != 1 { - t.Fatalf("expected 1 access method, got %d", len(obj.AccessMethods)) - } - if !strings.Contains(obj.AccessMethods[0].AccessURL.URL, filepath.Join("bucket", "did-1", "sha-256")) { - t.Fatalf("unexpected access URL: %s", obj.AccessMethods[0].AccessURL.URL) - } - if obj.AccessMethods[0].Type != "s3" { - t.Fatalf("unexpected access method type: %s", obj.AccessMethods[0].Type) - } -} - -func TestObjectBuilderBuildEmptyBucket(t *testing.T) { - builder := ObjectBuilder{ - ProjectID: "test-project", - Bucket: "", - } - - if _, err := builder.Build("file.txt", "sha-256", 12, "did-1"); err == nil { - t.Fatalf("expected error when Bucket is empty") - } -} diff --git a/drs/types.go b/drs/types.go deleted file mode 100644 index ff203cc..0000000 --- a/drs/types.go +++ /dev/null @@ -1,54 +0,0 @@ -package drs - -import "github.com/calypr/data-client/hash" - -type ChecksumType = hash.ChecksumType -type Checksum = hash.Checksum -type HashInfo = hash.HashInfo - -type AccessURL struct { - URL string `json:"url"` - Headers []string `json:"headers"` -} - -type Authorizations struct { - Value string `json:"value"` -} - -type AccessMethod struct { - Type string `json:"type"` - AccessURL AccessURL `json:"access_url"` - AccessID string `json:"access_id,omitempty"` - Cloud string `json:"cloud,omitempty"` - Region string `json:"region,omitempty"` - Available string `json:"available,omitempty"` - Authorizations *Authorizations `json:"Authorizations,omitempty"` -} - -type Contents struct { -} - -type DRSPage struct { - DRSObjects []DRSObject `json:"drs_objects"` -} - -type DRSObjectResult struct { - Object *DRSObject - Error error -} - -type DRSObject struct { - Id string `json:"id"` - Name string `json:"name"` - SelfURI string `json:"self_uri,omitempty"` - Size int64 `json:"size"` - CreatedTime string `json:"created_time,omitempty"` - UpdatedTime string `json:"updated_time,omitempty"` - Version string `json:"version,omitempty"` - MimeType string `json:"mime_type,omitempty"` - Checksums hash.HashInfo `json:"checksums"` - AccessMethods []AccessMethod `json:"access_methods"` - Contents []Contents `json:"contents,omitempty"` - Description string `json:"description,omitempty"` - Aliases []string `json:"aliases,omitempty"` -} diff --git a/fence/client.go b/fence/client.go index 4a5cacf..fc6b73d 100644 --- a/fence/client.go +++ b/fence/client.go @@ -21,9 +21,9 @@ import ( ) // FenceBucketEndpoint is the endpoint postfix for FENCE bucket list -const FenceBucketEndpoint = "/user/data/buckets" +const FenceBucketEndpoint = "/data/buckets" -//go:generate mockgen -destination=../mocks/mock_fence.go -package=mocks github.com/calypr/data-client/fence FenceInterface +//go:generate go run go.uber.org/mock/mockgen@v0.6.0 -destination=../mocks/mock_fence.go -package=mocks github.com/calypr/data-client/fence FenceInterface // FenceInterface defines the interface for Fence client type FenceInterface interface { @@ -311,7 +311,7 @@ func (f *FenceClient) resolveFromFence(ctx context.Context, guid, protocolText s } func (f *FenceClient) GetBucketDetails(ctx context.Context, bucket string) (*S3Bucket, error) { - url := f.cred.APIEndpoint + "/user/data/buckets" + url := f.cred.APIEndpoint + "/data/buckets" resp, err := f.Do(ctx, &request.RequestBuilder{ Method: http.MethodGet, Url: url, @@ -506,31 +506,36 @@ func (f *FenceClient) ParseFenceURLResponse(resp *http.Response) (FenceResponse, return msg, fmt.Errorf("failed to read response body: %w", err) } bodyStr := string(bodyBytes) - - if len(bodyBytes) > 0 { - err = json.Unmarshal(bodyBytes, &msg) - if err != nil { - return msg, fmt.Errorf("failed to decode JSON: %w (Raw body: %s)", err, bodyStr) - } + strURL := "" + if resp.Request != nil && resp.Request.URL != nil { + strURL = resp.Request.URL.String() } + // Handle HTTP error statuses first so plain-text error bodies (for example: + // "Unauthorized") are reported accurately instead of as JSON decode failures. if !(resp.StatusCode == 200 || resp.StatusCode == 201 || resp.StatusCode == 204) { - strUrl := resp.Request.URL.String() switch resp.StatusCode { case http.StatusUnauthorized: - return msg, fmt.Errorf("401 Unauthorized: %s (URL: %s)", bodyStr, strUrl) + return msg, fmt.Errorf("401 Unauthorized: %s (URL: %s)", bodyStr, strURL) case http.StatusForbidden: - return msg, fmt.Errorf("403 Forbidden: %s (URL: %s)", bodyStr, strUrl) + return msg, fmt.Errorf("403 Forbidden: %s (URL: %s)", bodyStr, strURL) case http.StatusNotFound: - return msg, fmt.Errorf("404 Not Found: %s (URL: %s)", bodyStr, strUrl) + return msg, fmt.Errorf("404 Not Found: %s (URL: %s)", bodyStr, strURL) case http.StatusInternalServerError: - return msg, fmt.Errorf("500 Internal Server Error: %s (URL: %s)", bodyStr, strUrl) + return msg, fmt.Errorf("500 Internal Server Error: %s (URL: %s)", bodyStr, strURL) case http.StatusServiceUnavailable: - return msg, fmt.Errorf("503 Service Unavailable: %s (URL: %s)", bodyStr, strUrl) + return msg, fmt.Errorf("503 Service Unavailable: %s (URL: %s)", bodyStr, strURL) case http.StatusBadGateway: - return msg, fmt.Errorf("502 Bad Gateway: %s (URL: %s)", bodyStr, strUrl) + return msg, fmt.Errorf("502 Bad Gateway: %s (URL: %s)", bodyStr, strURL) default: - return msg, fmt.Errorf("unexpected error (%d): %s (URL: %s)", resp.StatusCode, bodyStr, strUrl) + return msg, fmt.Errorf("unexpected error (%d): %s (URL: %s)", resp.StatusCode, bodyStr, strURL) + } + } + + if len(bodyBytes) > 0 { + err = json.Unmarshal(bodyBytes, &msg) + if err != nil { + return msg, fmt.Errorf("failed to decode JSON response (status=%d, url=%s): %w (raw body: %s)", resp.StatusCode, strURL, err, bodyStr) } } diff --git a/fence/client_test.go b/fence/client_test.go index 6a85de3..1baa09e 100644 --- a/fence/client_test.go +++ b/fence/client_test.go @@ -55,12 +55,13 @@ func (m *mockFenceServer) handler(t *testing.T) http.HandlerFunc { w.WriteHeader(http.StatusOK) _ = json.NewEncoder(w).Encode(FenceResponse{URL: "https://download.url"}) return - case r.Method == http.MethodGet && path == "/user/data/buckets": + case r.Method == http.MethodGet && path == "/data/buckets": w.WriteHeader(http.StatusOK) _ = json.NewEncoder(w).Encode(S3BucketsResponse{ S3Buckets: map[string]*S3Bucket{ "test-bucket": { EndpointURL: "https://s3.amazonaws.com", + Provider: "s3", Region: "us-east-1", }, }, @@ -173,6 +174,9 @@ func TestFenceClient_GetBucketDetails(t *testing.T) { if info.Region != "us-east-1" { t.Errorf("expected region us-east-1, got %s", info.Region) } + if info.Provider != "s3" { + t.Errorf("expected provider s3, got %s", info.Provider) + } info, err = client.GetBucketDetails(context.Background(), "unknown-bucket") if err != nil { @@ -244,7 +248,7 @@ func TestFenceClient_UserPing(t *testing.T) { } if resp.BucketPrograms["test-bucket"] != "" { - // Our mock for /user/data/buckets returns a bucket but no programs by default unless we update it + // Our mock for /data/buckets returns a bucket but no programs by default unless we update it // In my update to types.go, I added Programs to S3Bucket. } } diff --git a/fence/types.go b/fence/types.go index 2352dbb..ef4956b 100644 --- a/fence/types.go +++ b/fence/types.go @@ -43,6 +43,7 @@ type MultipartCompleteRequestObject struct { type S3Bucket struct { EndpointURL string `json:"endpoint_url"` + Provider string `json:"provider,omitempty"` Programs []string `json:"programs,omitempty"` Region string `json:"region"` } diff --git a/g3client/client.go b/g3client/client.go index 2741aa1..4a95d36 100644 --- a/g3client/client.go +++ b/g3client/client.go @@ -8,24 +8,27 @@ import ( "github.com/calypr/data-client/conf" "github.com/calypr/data-client/fence" - "github.com/calypr/data-client/indexd" "github.com/calypr/data-client/logs" "github.com/calypr/data-client/request" "github.com/calypr/data-client/requestor" "github.com/calypr/data-client/sower" + "github.com/calypr/syfon/client/credentials" + "github.com/calypr/syfon/client/drs" + sylogs "github.com/calypr/syfon/client/pkg/logs" + syrequest "github.com/calypr/syfon/client/pkg/request" version "github.com/hashicorp/go-version" ) -//go:generate mockgen -destination=../mocks/mock_gen3interface.go -package=mocks github.com/calypr/data-client/g3client Gen3Interface +//go:generate go run go.uber.org/mock/mockgen@v0.6.0 -destination=../mocks/mock_gen3interface.go -package=mocks github.com/calypr/data-client/g3client Gen3Interface type Gen3Interface interface { - GetCredential() *conf.Credential + request.RequestInterface Logger() *logs.Gen3Logger - ExportCredential(ctx context.Context, cred *conf.Credential) error - Fence() fence.FenceInterface - Indexd() indexd.IndexdInterface - Sower() sower.SowerInterface - Requestor() requestor.RequestorInterface + Credentials() credentials.Manager + DRSClient() drs.Client + FenceClient() fence.FenceInterface + RequestorClient() requestor.RequestorInterface + SowerClient() sower.SowerInterface } func NewGen3InterfaceFromCredential(cred *conf.Credential, logger *logs.Gen3Logger, opts ...Option) Gen3Interface { @@ -64,8 +67,13 @@ func (g *Gen3Client) initializeClients() { if shouldInit(FenceClient) { g.fence = fence.NewFenceClient(g.RequestInterface, g.credential, g.logger.Logger) } - if shouldInit(IndexdClient) { - g.indexd = indexd.NewIndexdClient(g.RequestInterface, g.credential, g.logger.Logger) + if shouldInit(SyfonClient) { + syReq := syrequest.NewRequestInterface( + sylogs.NewGen3Logger(g.logger.Logger, "", ""), + g.credential, + g.config, + ) + g.syfon = drs.NewDrsClient(syReq, g.credential, sylogs.NewGen3Logger(g.logger.Logger, "", "")) } if shouldInit(SowerClient) { g.sower = sower.NewSowerClient(g.RequestInterface, g.credential.APIEndpoint) @@ -78,13 +86,14 @@ func (g *Gen3Client) initializeClients() { type Gen3Client struct { Ctx context.Context fence fence.FenceInterface - indexd indexd.IndexdInterface + syfon drs.Client sower sower.SowerInterface requestor requestor.RequestorInterface config conf.ManagerInterface request.RequestInterface credential *conf.Credential + creds credentials.Manager logger *logs.Gen3Logger requestedClients []ClientType @@ -94,7 +103,7 @@ type ClientType string const ( FenceClient ClientType = "fence" - IndexdClient ClientType = "indexd" + SyfonClient ClientType = "syfon" SowerClient ClientType = "sower" RequestorClient ClientType = "requestor" ) @@ -107,31 +116,31 @@ func WithClients(clients ...ClientType) Option { } } -func (g *Gen3Client) Fence() fence.FenceInterface { - return g.fence -} - -func (g *Gen3Client) Indexd() indexd.IndexdInterface { - return g.indexd +func (g *Gen3Client) DRSClient() drs.Client { + if g.syfon == nil { + syReq := syrequest.NewRequestInterface( + sylogs.NewGen3Logger(g.logger.Logger, "", ""), + g.credential, + g.config, + ) + g.syfon = drs.NewDrsClient(syReq, g.credential, sylogs.NewGen3Logger(g.logger.Logger, "", "")) + } + return g.syfon } -func (g *Gen3Client) Sower() sower.SowerInterface { - return g.sower +func (g *Gen3Client) FenceClient() fence.FenceInterface { + return g.fence } -func (g *Gen3Client) Requestor() requestor.RequestorInterface { +func (g *Gen3Client) RequestorClient() requestor.RequestorInterface { return g.requestor } -func (g *Gen3Client) Logger() *logs.Gen3Logger { - return g.logger -} - -func (g *Gen3Client) GetCredential() *conf.Credential { - return g.credential +func (g *Gen3Client) SowerClient() sower.SowerInterface { + return g.sower } -func (g *Gen3Client) ExportCredential(ctx context.Context, cred *conf.Credential) error { +func (g *Gen3Client) exportCredential(ctx context.Context, cred *conf.Credential) error { if cred.Profile == "" { return fmt.Errorf("profile name is required") } @@ -187,6 +196,25 @@ func (g *Gen3Client) ExportCredential(ctx context.Context, cred *conf.Credential return nil } +type gen3Credentials struct { + client *Gen3Client +} + +func (c *gen3Credentials) Current() *conf.Credential { + return c.client.credential +} + +func (c *gen3Credentials) Export(ctx context.Context, cred *conf.Credential) error { + return c.client.exportCredential(ctx, cred) +} + +func (g *Gen3Client) Credentials() credentials.Manager { + if g.creds == nil { + g.creds = &gen3Credentials{client: g} + } + return g.creds +} + // EnsureValidCredential checks if the credential is valid and refreshes it if the access token is expired but the API key is valid. // It accepts an optional fClient; if nil, it will initialize one internally if needed for refresh. func EnsureValidCredential(ctx context.Context, cred *conf.Credential, config conf.ManagerInterface, logger *logs.Gen3Logger, fClient fence.FenceInterface) error { @@ -244,3 +272,4 @@ func NewGen3Interface(profile string, logger *logs.Gen3Logger, opts ...Option) ( return client, nil } +func (g *Gen3Client) Logger() *logs.Gen3Logger { return g.logger } diff --git a/go.mod b/go.mod index c39b763..793a137 100644 --- a/go.mod +++ b/go.mod @@ -1,50 +1,110 @@ module github.com/calypr/data-client -go 1.24.2 +go 1.26.1 require ( - github.com/aws/aws-sdk-go-v2 v1.41.1 - github.com/aws/aws-sdk-go-v2/config v1.32.7 - github.com/aws/aws-sdk-go-v2/credentials v1.19.7 - github.com/aws/aws-sdk-go-v2/service/s3 v1.95.1 - github.com/golang-jwt/jwt/v5 v5.3.0 - github.com/google/uuid v1.6.0 + github.com/calypr/syfon/client v0.1.2 + github.com/golang-jwt/jwt/v5 v5.3.1 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-retryablehttp v0.7.8 - github.com/hashicorp/go-version v1.8.0 + github.com/hashicorp/go-version v1.9.0 github.com/spf13/cobra v1.10.2 - github.com/vbauerster/mpb/v8 v8.11.2 + github.com/vbauerster/mpb/v8 v8.12.0 go.uber.org/mock v0.6.0 - golang.org/x/sync v0.19.0 - gopkg.in/ini.v1 v1.67.0 + gopkg.in/ini.v1 v1.67.1 gopkg.in/yaml.v3 v3.0.1 ) +require github.com/gorilla/mux v1.8.0 // indirect + require ( + cel.dev/expr v0.25.1 // indirect + cloud.google.com/go v0.123.0 // indirect + cloud.google.com/go/auth v0.17.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.9.0 // indirect + cloud.google.com/go/iam v1.5.3 // indirect + cloud.google.com/go/monitoring v1.24.3 // indirect + cloud.google.com/go/storage v1.57.2 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect + github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.3 // indirect + github.com/Azure/go-autorest v14.2.0+incompatible // indirect + github.com/Azure/go-autorest/autorest/to v0.4.1 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.54.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect github.com/VividCortex/ewma v1.2.0 // indirect github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect + github.com/aws/aws-sdk-go-v2 v1.41.5 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4 // indirect + github.com/aws/aws-sdk-go-v2/config v1.32.7 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.19.7 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 // indirect + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.12 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.21 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.21 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 // indirect github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17 // indirect + github.com/aws/aws-sdk-go-v2/service/s3 v1.95.1 // indirect github.com/aws/aws-sdk-go-v2/service/signin v1.0.5 // indirect github.com/aws/aws-sdk-go-v2/service/sso v1.30.9 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.41.6 // indirect - github.com/aws/smithy-go v1.24.0 // indirect - github.com/clipperhouse/stringish v0.1.1 // indirect - github.com/clipperhouse/uax29/v2 v2.3.0 // indirect + github.com/aws/smithy-go v1.24.2 // indirect + github.com/calypr/syfon/apigen v0.1.2 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/clipperhouse/uax29/v2 v2.7.0 // indirect + github.com/cncf/xds/go v0.0.0-20251110193048-8bfbf64dc13e // indirect + github.com/envoyproxy/go-control-plane/envoy v1.36.0 // indirect + github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-jose/go-jose/v4 v4.1.3 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/google/s2a-go v0.1.9 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/google/wire v0.7.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect + github.com/googleapis/gax-go/v2 v2.15.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/mattn/go-runewidth v0.0.19 // indirect + github.com/kylelemons/godebug v1.1.0 // indirect + github.com/mattn/go-runewidth v0.0.20 // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/spf13/pflag v1.0.10 // indirect - github.com/stretchr/testify v1.11.1 // indirect - golang.org/x/sys v0.39.0 // indirect + github.com/spiffe/go-spiffe/v2 v2.6.0 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.38.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 // indirect + go.opentelemetry.io/otel v1.40.0 // indirect + go.opentelemetry.io/otel/metric v1.40.0 // indirect + go.opentelemetry.io/otel/sdk v1.40.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.40.0 // indirect + go.opentelemetry.io/otel/trace v1.40.0 // indirect + gocloud.dev v0.45.0 // indirect + golang.org/x/crypto v0.45.0 // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/oauth2 v0.33.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.41.0 // indirect + golang.org/x/text v0.31.0 // indirect + golang.org/x/time v0.14.0 // indirect + golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect + google.golang.org/api v0.256.0 // indirect + google.golang.org/genproto v0.0.0-20251124214823-79d6a2a48846 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846 // indirect + google.golang.org/grpc v1.77.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/validator.v2 v2.0.1 // indirect ) diff --git a/go.sum b/go.sum index d4cffb0..6e197fa 100644 --- a/go.sum +++ b/go.sum @@ -1,9 +1,59 @@ +cel.dev/expr v0.25.1 h1:1KrZg61W6TWSxuNZ37Xy49ps13NUovb66QLprthtwi4= +cel.dev/expr v0.25.1/go.mod h1:hrXvqGP6G6gyx8UAHSHJ5RGk//1Oj5nXQ2NI02Nrsg4= +cloud.google.com/go v0.123.0 h1:2NAUJwPR47q+E35uaJeYoNhuNEM9kM8SjgRgdeOJUSE= +cloud.google.com/go v0.123.0/go.mod h1:xBoMV08QcqUGuPW65Qfm1o9Y4zKZBpGS+7bImXLTAZU= +cloud.google.com/go/auth v0.17.0 h1:74yCm7hCj2rUyyAocqnFzsAYXgJhrG26XCFimrc/Kz4= +cloud.google.com/go/auth v0.17.0/go.mod h1:6wv/t5/6rOPAX4fJiRjKkJCvswLwdet7G8+UGXt7nCQ= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs= +cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= +cloud.google.com/go/iam v1.5.3 h1:+vMINPiDF2ognBJ97ABAYYwRgsaqxPbQDlMnbHMjolc= +cloud.google.com/go/iam v1.5.3/go.mod h1:MR3v9oLkZCTlaqljW6Eb2d3HGDGK5/bDv93jhfISFvU= +cloud.google.com/go/logging v1.13.1 h1:O7LvmO0kGLaHY/gq8cV7T0dyp6zJhYAOtZPX4TF3QtY= +cloud.google.com/go/logging v1.13.1/go.mod h1:XAQkfkMBxQRjQek96WLPNze7vsOmay9H5PqfsNYDqvw= +cloud.google.com/go/longrunning v0.7.0 h1:FV0+SYF1RIj59gyoWDRi45GiYUMM3K1qO51qoboQT1E= +cloud.google.com/go/longrunning v0.7.0/go.mod h1:ySn2yXmjbK9Ba0zsQqunhDkYi0+9rlXIwnoAf+h+TPY= +cloud.google.com/go/monitoring v1.24.3 h1:dde+gMNc0UhPZD1Azu6at2e79bfdztVDS5lvhOdsgaE= +cloud.google.com/go/monitoring v1.24.3/go.mod h1:nYP6W0tm3N9H/bOw8am7t62YTzZY+zUeQ+Bi6+2eonI= +cloud.google.com/go/storage v1.57.2 h1:sVlym3cHGYhrp6XZKkKb+92I1V42ks2qKKpB0CF5Mb4= +cloud.google.com/go/storage v1.57.2/go.mod h1:n5ijg4yiRXXpCu0sJTD6k+eMf7GRrJmPyr9YxLXGHOk= +cloud.google.com/go/trace v1.11.7 h1:kDNDX8JkaAG3R2nq1lIdkb7FCSi1rCmsEtKVsty7p+U= +cloud.google.com/go/trace v1.11.7/go.mod h1:TNn9d5V3fQVf6s4SCveVMIBS2LJUqo73GACmq/Tky0s= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 h1:JXg2dwJUmPB9JmtVmdEB16APJ7jurfbY5jnfXpJoRMc= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0/go.mod h1:YD5h/ldMsG0XiIw7PdyNhLxaM317eFh5yNLccNfGdyw= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 h1:Hk5QBxZQC1jb2Fwj6mpzme37xbCDdNTxU7O9eb5+LB4= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1/go.mod h1:IYus9qsFobWIc2YVwe/WPjcnyCkPKtnHAqUYeebc8z0= +github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2 h1:yz1bePFlP5Vws5+8ez6T3HWXPmwOK7Yvq8QxDBD3SKY= +github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2/go.mod h1:Pa9ZNPuoNu/GztvBSKk9J1cDJW6vk/n0zLtV4mgd8N8= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 h1:9iefClla7iYpfYWdzPCRDozdmndjTm8DXdpCzPajMgA= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2/go.mod h1:XtLgD3ZD34DAaVIIAyG3objl5DynM3CQ/vMcbBNJZGI= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.8.1 h1:/Zt+cDPnpC3OVDm/JKLOs7M2DKmLRIIp3XIx9pHHiig= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.8.1/go.mod h1:Ng3urmn6dYe8gnbCMoHHVl5APYz2txho3koEkV2o2HA= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.3 h1:ZJJNFaQ86GVKQ9ehwqyAFE6pIfyicpuJ8IkVaPBc6/4= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.3/go.mod h1:URuDvhmATVKqHBH9/0nOiNKk0+YcwfQ3WkK5PqHKxc8= +github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= +github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest/autorest/to v0.4.1 h1:CxNHBqdzTr7rLtdrtb5CMjJcDut+WNGCVv7OmS5+lTc= +github.com/Azure/go-autorest/autorest/to v0.4.1/go.mod h1:EtaofgU4zmtvn1zT2ARsjRFdq9vXx0YWtmElwL+GZ9M= +github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJTmL004Abzc5wDB5VtZG2PJk5ndYDgVacGqfirKxjM= +github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE= +github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 h1:XRzhVemXdgvJqCH0sFfrBUTnUJSBrBf7++ypk+twtRs= +github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0/go.mod h1:HKpQxkWaGLJ+D/5H8QRpyQXA1eKjxkFlOMwck5+33Jk= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 h1:sBEjpZlNHzK1voKq9695PJSX2o5NEXl7/OL3coiIY0c= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.54.0 h1:lhhYARPUu3LmHysQ/igznQphfzynnqI3D75oUyw1HXk= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.54.0/go.mod h1:l9rva3ApbBpEJxSNYnwT9N4CDLrWgtq3u8736C5hyJw= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.54.0 h1:xfK3bbi6F2RDtaZFtUdKO3osOBIhNb+xTs8lFW6yx9o= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.54.0/go.mod h1:vB2GH9GAYYJTO3mEn8oYwzEdhlayZIdQz6zdzgUIRvA= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 h1:s0WlVbf9qpvkh1c/uDAPElam0WrL7fHRIidgZJ7UqZI= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0/go.mod h1:Mf6O40IAyB9zR/1J8nGDDPirZQQPbYJni8Yisy7NTMc= github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow= github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= -github.com/aws/aws-sdk-go-v2 v1.41.1 h1:ABlyEARCDLN034NhxlRUSZr4l71mh+T5KAeGh6cerhU= -github.com/aws/aws-sdk-go-v2 v1.41.1/go.mod h1:MayyLB8y+buD9hZqkCW3kX1AKq07Y5pXxtgB+rRFhz0= +github.com/aws/aws-sdk-go-v2 v1.41.5 h1:dj5kopbwUsVUVFgO4Fi5BIT3t4WyqIDjGKCangnV/yY= +github.com/aws/aws-sdk-go-v2 v1.41.5/go.mod h1:mwsPRE8ceUUpiTgF7QmQIJ7lgsKUPQOUl3o72QBrE1o= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4 h1:489krEF9xIGkOaaX3CE/Be2uWjiXrkCH6gUX+bZA/BU= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4/go.mod h1:IOAPF6oT9KCsceNTvvYMNHy0+kMF8akOjeDvPENWxp4= github.com/aws/aws-sdk-go-v2/config v1.32.7 h1:vxUyWGUwmkQ2g19n7JY/9YL8MfAIl7bTesIUykECXmY= @@ -12,10 +62,12 @@ github.com/aws/aws-sdk-go-v2/credentials v1.19.7 h1:tHK47VqqtJxOymRrNtUXN5SP/zUT github.com/aws/aws-sdk-go-v2/credentials v1.19.7/go.mod h1:qOZk8sPDrxhf+4Wf4oT2urYJrYt3RejHSzgAquYeppw= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17 h1:I0GyV8wiYrP8XpA70g1HBcQO1JlQxCMTW9npl5UbDHY= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17/go.mod h1:tyw7BOl5bBe/oqvoIeECFJjMdzXoa/dfVz3QQ5lgHGA= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 h1:xOLELNKGp2vsiteLsvLPwxC+mYmO6OZ8PYgiuPJzF8U= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17/go.mod h1:5M5CI3D12dNOtH3/mk6minaRwI2/37ifCURZISxA/IQ= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 h1:WWLqlh79iO48yLkj1v3ISRNiv+3KdQoZ6JWyfcsyQik= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17/go.mod h1:EhG22vHRrvF8oXSTYStZhJc1aUgKtnJe+aOiFEV90cM= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.12 h1:Zy6Tme1AA13kX8x3CnkHx5cqdGWGaj/anwOiWGnA0Xo= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.12/go.mod h1:ql4uXYKoTM9WUAUSmthY4AtPVrlTBZOvnBJTiCUdPxI= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.21 h1:Rgg6wvjjtX8bNHcvi9OnXWwcE0a2vGpbwmtICOsvcf4= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.21/go.mod h1:A/kJFst/nm//cyqonihbdpQZwiUhhzpqTsdbhDdRF9c= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.21 h1:PEgGVtPoB6NTpPrBgqSE5hE/o47Ij9qk/SEZFbUOe9A= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.21/go.mod h1:p+hz+PRAYlY3zcpJhPwXlLC4C+kqn70WIHwnzAfs6ps= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17 h1:JqcdRG//czea7Ppjb+g/n4o8i/R50aTBHkA7vu0lK+k= @@ -38,21 +90,66 @@ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13 h1:gd84Omyu9JLriJVCbGApcLz github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13/go.mod h1:sTGThjphYE4Ohw8vJiRStAcu3rbjtXRsdNB0TvZ5wwo= github.com/aws/aws-sdk-go-v2/service/sts v1.41.6 h1:5fFjR/ToSOzB2OQ/XqWpZBmNvmP/pJ1jOWYlFDJTjRQ= github.com/aws/aws-sdk-go-v2/service/sts v1.41.6/go.mod h1:qgFDZQSD/Kys7nJnVqYlWKnh0SSdMjAi0uSwON4wgYQ= -github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk= -github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= -github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs= -github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA= -github.com/clipperhouse/uax29/v2 v2.3.0 h1:SNdx9DVUqMoBuBoW3iLOj4FQv3dN5mDtuqwuhIGpJy4= -github.com/clipperhouse/uax29/v2 v2.3.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g= +github.com/aws/smithy-go v1.24.2 h1:FzA3bu/nt/vDvmnkg+R8Xl46gmzEDam6mZ1hzmwXFng= +github.com/aws/smithy-go v1.24.2/go.mod h1:YE2RhdIuDbA5E5bTdciG9KrW3+TiEONeUWCqxX9i1Fc= +github.com/calypr/syfon/apigen v0.1.2 h1:/9IJcrS655DW4W/cPW0QoqBOhZrgTnRfUAlYOZ3q1e8= +github.com/calypr/syfon/apigen v0.1.2/go.mod h1:2oqNkseovHqbU8kYu4nU+w5VrwhzAj6KfPL7YTBQEGk= +github.com/calypr/syfon/client v0.1.2 h1:MdfV/bLBf0ZkLREuKOVfEKMRiDfpFgLuYSW8GiYOxdk= +github.com/calypr/syfon/client v0.1.2/go.mod h1:xh7ndhABQz7UlM/tq80ZfYKDs2vBuwgfyrAhs3ghg7Y= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/clipperhouse/uax29/v2 v2.7.0 h1:+gs4oBZ2gPfVrKPthwbMzWZDaAFPGYK72F0NJv2v7Vk= +github.com/clipperhouse/uax29/v2 v2.7.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM= +github.com/cncf/xds/go v0.0.0-20251110193048-8bfbf64dc13e h1:gt7U1Igw0xbJdyaCM5H2CnlAlPSkzrhsebQB6WQWjLA= +github.com/cncf/xds/go v0.0.0-20251110193048-8bfbf64dc13e/go.mod h1:KdCmV+x/BuvyMxRnYBlmVaq4OLiKW6iRQfvC62cvdkI= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.13.5-0.20251024222203-75eaa193e329 h1:K+fnvUM0VZ7ZFJf0n4L/BRlnsb9pL/GuDG6FqaH+PwM= +github.com/envoyproxy/go-control-plane v0.13.5-0.20251024222203-75eaa193e329/go.mod h1:Alz8LEClvR7xKsrq3qzoc4N0guvVNSS8KmSChGYr9hs= +github.com/envoyproxy/go-control-plane/envoy v1.36.0 h1:yg/JjO5E7ubRyKX3m07GF3reDNEnfOboJ0QySbH736g= +github.com/envoyproxy/go-control-plane/envoy v1.36.0/go.mod h1:ty89S1YCCVruQAm9OtKeEkQLTb+Lkz0k8v9W0Oxsv98= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= +github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= +github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= -github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= -github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs= +github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY= +github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-replayers/grpcreplay v1.3.0 h1:1Keyy0m1sIpqstQmgz307zhiJ1pV4uIlFds5weTmxbo= +github.com/google/go-replayers/grpcreplay v1.3.0/go.mod h1:v6NgKtkijC0d3e3RW8il6Sy5sqRVUwoQa4mHOGEy8DI= +github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk= +github.com/google/go-replayers/httpreplay v1.2.0/go.mod h1:WahEFFZZ7a1P4VM1qEeHy+tME4bwyqPcwWbNlUI1Mcg= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/wire v0.7.0 h1:JxUKI6+CVBgCO2WToKy/nQk0sS+amI9z9EjVmdaocj4= +github.com/google/wire v0.7.0/go.mod h1:n6YbUQD9cPKTnHXEBN2DXlOp/mVADhVErcMFb0v3J18= +github.com/googleapis/enterprise-certificate-proxy v0.3.7 h1:zrn2Ee/nWmHulBx5sAVrGgAa0f2/R35S4DJwfFaUPFQ= +github.com/googleapis/enterprise-certificate-proxy v0.3.7/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= +github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= +github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -64,38 +161,115 @@ github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+l github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVUrx/c8Unxc48= github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3iS0jrZ7LvzFGFmuKbw= -github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4= -github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.9.0 h1:CeOIz6k+LoN3qX9Z0tyQrPtiB1DFYRPfCIBtaXPSCnA= +github.com/hashicorp/go-version v1.9.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/keybase/go-keychain v0.0.1 h1:way+bWYa6lDppZoZcgMbYsvC7GxljxrskdNInRtuthU= +github.com/keybase/go-keychain v0.0.1/go.mod h1:PdEILRW3i9D8JcdM+FmY6RwkHGnhHxXwkPPMeUgOK1k= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= -github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/mattn/go-runewidth v0.0.20 h1:WcT52H91ZUAwy8+HUkdM3THM6gXqXuLJi9O3rjcQQaQ= +github.com/mattn/go-runewidth v0.0.20/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMpsbLuo= +github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/vbauerster/mpb/v8 v8.11.2 h1:OqLoHznUVU7SKS/WV+1dB5/hm20YLheYupiHhL5+M1Y= -github.com/vbauerster/mpb/v8 v8.11.2/go.mod h1:mEB/M353al1a7wMUNtiymmPsEkGlJgeJmtlbY5adCJ8= +github.com/vbauerster/mpb/v8 v8.12.0 h1:+gneY3ifzc88tKDzOtfG8k8gfngCx615S2ZmFM4liWg= +github.com/vbauerster/mpb/v8 v8.12.0/go.mod h1:V02YIuMVo301Y1VE9VtZlD8s84OMsk+EKN6mwvf/588= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/contrib/detectors/gcp v1.38.0 h1:ZoYbqX7OaA/TAikspPl3ozPI6iY6LiIY9I8cUfm+pJs= +go.opentelemetry.io/contrib/detectors/gcp v1.38.0/go.mod h1:SU+iU7nu5ud4oCb3LQOhIZ3nRLj6FNVrKgtflbaf2ts= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0 h1:YH4g8lQroajqUwWbq/tr2QX1JFmEXaDLgG+ew9bLMWo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0/go.mod h1:fvPi2qXDqFs8M4B4fmJhE92TyQs9Ydjlg3RvfUp+NbQ= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg= +go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms= +go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.37.0 h1:6VjV6Et+1Hd2iLZEPtdV7vie80Yyqf7oikJLjQ/myi0= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.37.0/go.mod h1:u8hcp8ji5gaM/RfcOo8z9NMnf1pVLfVY7lBY2VOGuUU= +go.opentelemetry.io/otel/metric v1.40.0 h1:rcZe317KPftE2rstWIBitCdVp89A2HqjkxR3c11+p9g= +go.opentelemetry.io/otel/metric v1.40.0/go.mod h1:ib/crwQH7N3r5kfiBZQbwrTge743UDc7DTFVZrrXnqc= +go.opentelemetry.io/otel/sdk v1.40.0 h1:KHW/jUzgo6wsPh9At46+h4upjtccTmuZCFAc9OJ71f8= +go.opentelemetry.io/otel/sdk v1.40.0/go.mod h1:Ph7EFdYvxq72Y8Li9q8KebuYUr2KoeyHx0DRMKrYBUE= +go.opentelemetry.io/otel/sdk/metric v1.40.0 h1:mtmdVqgQkeRxHgRv4qhyJduP3fYJRMX4AtAlbuWdCYw= +go.opentelemetry.io/otel/sdk/metric v1.40.0/go.mod h1:4Z2bGMf0KSK3uRjlczMOeMhKU2rhUqdWNoKcYrtcBPg= +go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw= +go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA= go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +gocloud.dev v0.45.0 h1:WknIK8IbRdmynDvara3Q7G6wQhmEiOGwpgJufbM39sY= +gocloud.dev v0.45.0/go.mod h1:0kXKmkCLG6d31N7NyLZWzt7jDSQura9zD/mWgiB6THI= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo= +golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= -golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk= -golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= +golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/api v0.256.0 h1:u6Khm8+F9sxbCTYNoBHg6/Hwv0N/i+V94MvkOSor6oI= +google.golang.org/api v0.256.0/go.mod h1:KIgPhksXADEKJlnEoRa9qAII4rXcy40vfI8HRqcU964= +google.golang.org/genproto v0.0.0-20251124214823-79d6a2a48846 h1:dDbsTLIK7EzwUq36kCSAsk0slouq/S0tWHeeGi97cD8= +google.golang.org/genproto v0.0.0-20251124214823-79d6a2a48846/go.mod h1:PP0g88Dz3C7hRAfbQCQggeWAXjuqGsNPLE4s7jh0RGU= +google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846 h1:ZdyUkS9po3H7G0tuh955QVyyotWvOD4W0aEapeGeUYk= +google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846/go.mod h1:Fk4kyraUvqD7i5H6S43sj2W98fbZa75lpZz/eUyhfO0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846 h1:Wgl1rcDNThT+Zn47YyCXOXyX/COgMTIdhJ717F0l4xk= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= +google.golang.org/grpc v1.77.0 h1:wVVY6/8cGA6vvffn+wWK5ToddbgdU3d8MNENr4evgXM= +google.golang.org/grpc v1.77.0/go.mod h1:z0BY1iVj0q8E1uSQCjL9cppRj+gnZjzDnzV0dHhrNig= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= -gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/ini.v1 v1.67.1 h1:tVBILHy0R6e4wkYOn3XmiITt/hEVH4TFMYvAX2Ytz6k= +gopkg.in/ini.v1 v1.67.1/go.mod h1:x/cyOwCgZqOkJoDIJ3c1KNHMo10+nLGAhh+kn3Zizss= +gopkg.in/validator.v2 v2.0.1 h1:xF0KWyGWXm/LM2G1TrEjqOu4pa6coO9AlWSf3msVfDY= +gopkg.in/validator.v2 v2.0.1/go.mod h1:lIUZBlB3Im4s/eYp39Ry/wkR02yOPhZ9IwIRBjuPuG8= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/hash/hash.go b/hash/hash.go deleted file mode 100644 index 11ee3a1..0000000 --- a/hash/hash.go +++ /dev/null @@ -1,144 +0,0 @@ -package hash - -import ( - "encoding/json" - "fmt" -) - -// ChecksumType represents the digest method used to create the checksum -type ChecksumType string - -// IANA Named Information Hash Algorithm Registry values and other common types -const ( - ChecksumTypeSHA1 ChecksumType = "sha1" - ChecksumTypeSHA256 ChecksumType = "sha256" - ChecksumTypeSHA512 ChecksumType = "sha512" - ChecksumTypeMD5 ChecksumType = "md5" - ChecksumTypeETag ChecksumType = "etag" - ChecksumTypeCRC32C ChecksumType = "crc32c" - ChecksumTypeTrunc512 ChecksumType = "trunc512" -) - -// IsValid checks if the checksum type is a known/recommended value -func (ct ChecksumType) IsValid() bool { - switch ct { - case ChecksumTypeSHA256, ChecksumTypeSHA512, ChecksumTypeSHA1, ChecksumTypeMD5, - ChecksumTypeETag, ChecksumTypeCRC32C, ChecksumTypeTrunc512: - return true - default: - return false - } -} - -// String returns the string representation of the checksum type -func (ct ChecksumType) String() string { - return string(ct) -} - -var SupportedChecksums = map[string]bool{ - string(ChecksumTypeSHA1): true, - string(ChecksumTypeSHA256): true, - string(ChecksumTypeSHA512): true, - string(ChecksumTypeMD5): true, - string(ChecksumTypeETag): true, - string(ChecksumTypeCRC32C): true, - string(ChecksumTypeTrunc512): true, -} - -type Checksum struct { - Checksum string `json:"checksum"` - Type ChecksumType `json:"type"` -} - -type HashInfo struct { - MD5 string `json:"md5,omitempty"` - SHA string `json:"sha,omitempty"` - SHA256 string `json:"sha256,omitempty"` - SHA512 string `json:"sha512,omitempty"` - CRC string `json:"crc,omitempty"` - ETag string `json:"etag,omitempty"` -} - -// UnmarshalJSON accepts both the DRS map-based schema and the array-of-checksums schema. -func (h *HashInfo) UnmarshalJSON(data []byte) error { - if string(data) == "null" { - *h = HashInfo{} - return nil - } - - var mapPayload map[string]string - if err := json.Unmarshal(data, &mapPayload); err == nil { - *h = ConvertStringMapToHashInfo(mapPayload) - return nil - } - - var checksumPayload []Checksum - if err := json.Unmarshal(data, &checksumPayload); err == nil { - *h = ConvertChecksumsToHashInfo(checksumPayload) - return nil - } - - return fmt.Errorf("unsupported HashInfo payload: %s", string(data)) -} - -func ConvertStringMapToHashInfo(inputHashes map[string]string) HashInfo { - hashInfo := HashInfo{} - - for key, value := range inputHashes { - if !SupportedChecksums[key] { - continue // Disregard unsupported types - } - switch key { - case string(ChecksumTypeMD5): - hashInfo.MD5 = value - case string(ChecksumTypeSHA1): - hashInfo.SHA = value - case string(ChecksumTypeSHA256): - hashInfo.SHA256 = value - case string(ChecksumTypeSHA512): - hashInfo.SHA512 = value - case string(ChecksumTypeCRC32C): - hashInfo.CRC = value - case string(ChecksumTypeETag): - hashInfo.ETag = value - } - } - - return hashInfo -} - -func ConvertHashInfoToMap(hashes HashInfo) map[string]string { - result := make(map[string]string) - if hashes.MD5 != "" { - result["md5"] = hashes.MD5 - } - if hashes.SHA != "" { - result["sha"] = hashes.SHA - } - if hashes.SHA256 != "" { - result["sha256"] = hashes.SHA256 - } - if hashes.SHA512 != "" { - result["sha512"] = hashes.SHA512 - } - if hashes.CRC != "" { - result["crc"] = hashes.CRC - } - if hashes.ETag != "" { - result["etag"] = hashes.ETag - } - return result -} - -func ConvertChecksumsToMap(checksums []Checksum) map[string]string { - result := make(map[string]string, len(checksums)) - for _, c := range checksums { - result[string(c.Type)] = c.Checksum - } - return result -} - -func ConvertChecksumsToHashInfo(checksums []Checksum) HashInfo { - checksumMap := ConvertChecksumsToMap(checksums) - return ConvertStringMapToHashInfo(checksumMap) -} diff --git a/hash/hash_test.go b/hash/hash_test.go deleted file mode 100644 index f08c7ea..0000000 --- a/hash/hash_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package hash - -import ( - "encoding/json" - "testing" -) - -func TestChecksumType_IsValid(t *testing.T) { - tests := []struct { - name string - ct ChecksumType - want bool - }{ - {"valid sha256", ChecksumTypeSHA256, true}, - {"valid md5", ChecksumTypeMD5, true}, - {"invalid type", "invalid", false}, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := tt.ct.IsValid(); got != tt.want { - t.Errorf("ChecksumType.IsValid() = %v, want %v", got, tt.want) - } - }) - } -} - -func TestHashInfo_UnmarshalJSON_Map(t *testing.T) { - jsonMap := `{"sha256": "hash-val", "md5": "md5-val"}` - var h HashInfo - if err := json.Unmarshal([]byte(jsonMap), &h); err != nil { - t.Fatalf("UnmarshalJSON failed: %v", err) - } - if h.SHA256 != "hash-val" { - t.Errorf("expected SHA256 hash-val, got %s", h.SHA256) - } - if h.MD5 != "md5-val" { - t.Errorf("expected MD5 md5-val, got %s", h.MD5) - } -} - -func TestHashInfo_UnmarshalJSON_List(t *testing.T) { - jsonList := `[{"type": "sha256", "checksum": "hash-val"}, {"type": "md5", "checksum": "md5-val"}]` - var h HashInfo - if err := json.Unmarshal([]byte(jsonList), &h); err != nil { - t.Fatalf("UnmarshalJSON failed: %v", err) - } - if h.SHA256 != "hash-val" { - t.Errorf("expected SHA256 hash-val, got %s", h.SHA256) - } - if h.MD5 != "md5-val" { - t.Errorf("expected MD5 md5-val, got %s", h.MD5) - } -} diff --git a/indexd/client.go b/indexd/client.go deleted file mode 100644 index 989e69e..0000000 --- a/indexd/client.go +++ /dev/null @@ -1,512 +0,0 @@ -package indexd - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "log/slog" - "net/http" - - "github.com/calypr/data-client/conf" - "github.com/calypr/data-client/drs" - "github.com/calypr/data-client/request" -) - -//go:generate mockgen -destination=../mocks/mock_indexd.go -package=mocks github.com/calypr/data-client/indexd IndexdInterface - -// IndexdInterface defines the interface for Indexd client -type IndexdInterface interface { - request.RequestInterface - - GetObject(ctx context.Context, id string) (*drs.DRSObject, error) - RegisterIndexdRecord(ctx context.Context, indexdObj *IndexdRecord) (*drs.DRSObject, error) - DeleteIndexdRecord(ctx context.Context, did string) error - GetObjectByHash(ctx context.Context, hashType, hashValue string) ([]drs.DRSObject, error) - GetDownloadURL(ctx context.Context, did string, accessType string) (*drs.AccessURL, error) - ListObjectsByProject(ctx context.Context, projectId string) (chan drs.DRSObjectResult, error) - UpdateRecord(ctx context.Context, updateInfo *drs.DRSObject, did string) (*drs.DRSObject, error) - - ListObjects(ctx context.Context) (chan drs.DRSObjectResult, error) - GetProjectSample(ctx context.Context, projectId string, limit int) ([]drs.DRSObject, error) - DeleteRecordsByProject(ctx context.Context, projectId string) error - DeleteRecordByHash(ctx context.Context, hashValue string, projectId string) error - RegisterRecord(ctx context.Context, record *drs.DRSObject) (*drs.DRSObject, error) - UpsertIndexdRecord(ctx context.Context, url string, sha256 string, fileSize int64, projectId string) (*drs.DRSObject, error) -} - -// IndexdClient implements IndexdInterface -type IndexdClient struct { - request.RequestInterface - cred *conf.Credential - logger *slog.Logger -} - -// NewIndexdClient creates a new IndexdClient -func NewIndexdClient(req request.RequestInterface, cred *conf.Credential, logger *slog.Logger) IndexdInterface { - return &IndexdClient{ - RequestInterface: req, - cred: cred, - logger: logger, - } -} - -func (c *IndexdClient) GetObject(ctx context.Context, id string) (*drs.DRSObject, error) { - url := fmt.Sprintf("%s/ga4gh/drs/v1/objects/%s", c.cred.APIEndpoint, id) - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodGet, - Url: url, - Token: c.cred.AccessToken, - }) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode == http.StatusNotFound { - return nil, fmt.Errorf("object %s not found", id) - } - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - return nil, fmt.Errorf("failed to get object %s: %s (status: %d)", id, string(body), resp.StatusCode) - } - - var out OutputObject - if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { - return nil, err - } - return ConvertOutputObjectToDRSObject(&out), nil -} - -func (c *IndexdClient) RegisterIndexdRecord(ctx context.Context, indexdObj *IndexdRecord) (*drs.DRSObject, error) { - indexdObjForm := IndexdRecordForm{ - IndexdRecord: *indexdObj, - Form: "object", - } - - jsonBytes, err := json.Marshal(indexdObjForm) - if err != nil { - return nil, err - } - - url := fmt.Sprintf("%s/index/index", c.cred.APIEndpoint) - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodPost, - Url: url, - Body: bytes.NewBuffer(jsonBytes), - Headers: map[string]string{ - "Content-Type": "application/json", - "Accept": "application/json", - }, - Token: c.cred.AccessToken, - }) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated { - body, _ := io.ReadAll(resp.Body) - return nil, fmt.Errorf("failed to register record %s: %s (status: %d)", indexdObj.Did, string(body), resp.StatusCode) - } - - return IndexdRecordToDrsObject(indexdObj) -} - -func (c *IndexdClient) DeleteIndexdRecord(ctx context.Context, did string) error { - // First get the record to get the revision (rev) - record, err := c.getIndexdRecordByDID(ctx, did) - if err != nil { - return err - } - - url := fmt.Sprintf("%s/index/index/%s?rev=%s", c.cred.APIEndpoint, did, record.Rev) - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodDelete, - Url: url, - Headers: map[string]string{ - "Accept": "application/json", - }, - Token: c.cred.AccessToken, - }) - if err != nil { - return err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusNoContent { - body, _ := io.ReadAll(resp.Body) - return fmt.Errorf("failed to delete record %s: %s (status: %d)", did, string(body), resp.StatusCode) - } - - return nil -} - -func (c *IndexdClient) getIndexdRecordByDID(ctx context.Context, did string) (*OutputInfo, error) { - url := fmt.Sprintf("%s/index/index/%s", c.cred.APIEndpoint, did) - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodGet, - Url: url, - Token: c.cred.AccessToken, - }) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - return nil, fmt.Errorf("failed to get indexd record %s: %s (status: %d)", did, string(body), resp.StatusCode) - } - - var info OutputInfo - if err := json.NewDecoder(resp.Body).Decode(&info); err != nil { - return nil, err - } - return &info, nil -} - -func (c *IndexdClient) GetObjectByHash(ctx context.Context, hashType, hashValue string) ([]drs.DRSObject, error) { - url := fmt.Sprintf("%s/index/index?hash=%s:%s", c.cred.APIEndpoint, hashType, hashValue) - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodGet, - Url: url, - Headers: map[string]string{ - "Accept": "application/json", - }, - Token: c.cred.AccessToken, - }) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - return nil, fmt.Errorf("failed to query by hash %s:%s: %s (status: %d)", hashType, hashValue, string(body), resp.StatusCode) - } - - var records ListRecords - if err := json.NewDecoder(resp.Body).Decode(&records); err != nil { - return nil, err - } - - out := make([]drs.DRSObject, 0, len(records.Records)) - for _, r := range records.Records { - drsObj, err := IndexdRecordToDrsObject(r.ToIndexdRecord()) - if err != nil { - return nil, err - } - out = append(out, *drsObj) - } - return out, nil -} - -func (c *IndexdClient) GetDownloadURL(ctx context.Context, did string, accessType string) (*drs.AccessURL, error) { - url := fmt.Sprintf("%s/ga4gh/drs/v1/objects/%s/access/%s", c.cred.APIEndpoint, did, accessType) - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodGet, - Url: url, - Token: c.cred.AccessToken, - }) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - return nil, fmt.Errorf("failed to get download URL for %s: %s (status: %d)", did, string(body), resp.StatusCode) - } - - var accessURL drs.AccessURL - if err := json.NewDecoder(resp.Body).Decode(&accessURL); err != nil { - return nil, err - } - return &accessURL, nil -} - -func (c *IndexdClient) ListObjectsByProject(ctx context.Context, projectId string) (chan drs.DRSObjectResult, error) { - const PAGESIZE = 50 - - resourcePath, err := drs.ProjectToResource(projectId) - if err != nil { - return nil, err - } - - out := make(chan drs.DRSObjectResult, PAGESIZE) - - go func() { - defer close(out) - pageNum := 0 - active := true - - for active { - url := fmt.Sprintf("%s/index/index?authz=%s&limit=%d&page=%d", - c.cred.APIEndpoint, resourcePath, PAGESIZE, pageNum) - - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodGet, - Url: url, - Headers: map[string]string{ - "Accept": "application/json", - }, - Token: c.cred.AccessToken, - }) - - if err != nil { - out <- drs.DRSObjectResult{Error: err} - break - } - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - resp.Body.Close() - out <- drs.DRSObjectResult{Error: fmt.Errorf("api error %d: %s", resp.StatusCode, string(body))} - break - } - - var page ListRecords - err = json.NewDecoder(resp.Body).Decode(&page) - resp.Body.Close() - - if err != nil { - out <- drs.DRSObjectResult{Error: err} - break - } - - if len(page.Records) == 0 { - active = false - break - } - - for _, elem := range page.Records { - drsObj, err := elem.ToIndexdRecord().ToDrsObject() - if err != nil { - out <- drs.DRSObjectResult{Error: err} - continue - } - out <- drs.DRSObjectResult{Object: drsObj} - } - pageNum++ - } - }() - - return out, nil -} - -func (c *IndexdClient) UpdateRecord(ctx context.Context, updateInfo *drs.DRSObject, did string) (*drs.DRSObject, error) { - // Get current revision from existing record - record, err := c.getIndexdRecordByDID(ctx, did) - if err != nil { - return nil, fmt.Errorf("could not retrieve existing record for DID %s: %v", did, err) - } - - // Build update payload starting with existing record values - updatePayload := UpdateInputInfo{ - URLs: record.URLs, - FileName: record.FileName, - Version: record.Version, - Authz: record.Authz, - ACL: record.ACL, - Metadata: record.Metadata, - } - - // Apply updates from updateInfo - if len(updateInfo.AccessMethods) > 0 { - newURLs := make([]string, 0, len(updateInfo.AccessMethods)) - for _, a := range updateInfo.AccessMethods { - newURLs = append(newURLs, a.AccessURL.URL) - } - updatePayload.URLs = appendUnique(updatePayload.URLs, newURLs) - - authz := IndexdAuthzFromDrsAccessMethods(updateInfo.AccessMethods) - updatePayload.Authz = appendUnique(updatePayload.Authz, authz) - } - - if updateInfo.Name != "" { - updatePayload.FileName = updateInfo.Name - } - - if updateInfo.Version != "" { - updatePayload.Version = updateInfo.Version - } - - if updateInfo.Description != "" { - if updatePayload.Metadata == nil { - updatePayload.Metadata = make(map[string]any) - } - updatePayload.Metadata["description"] = updateInfo.Description - } - - jsonBytes, err := json.Marshal(updatePayload) - if err != nil { - return nil, fmt.Errorf("error marshaling indexd update payload: %v", err) - } - - url := fmt.Sprintf("%s/index/index/%s?rev=%s", c.cred.APIEndpoint, did, record.Rev) - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodPut, - Url: url, - Body: bytes.NewBuffer(jsonBytes), - Headers: map[string]string{ - "Content-Type": "application/json", - "Accept": "application/json", - }, - Token: c.cred.AccessToken, - }) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - return nil, fmt.Errorf("failed to update record %s: %s (status: %d)", did, string(body), resp.StatusCode) - } - - return c.GetObject(ctx, did) -} - -func (c *IndexdClient) ListObjects(ctx context.Context) (chan drs.DRSObjectResult, error) { - url := fmt.Sprintf("%s/ga4gh/drs/v1/objects", c.cred.APIEndpoint) - const PAGESIZE = 50 - out := make(chan drs.DRSObjectResult, 10) - - go func() { - defer close(out) - pageNum := 0 - active := true - for active { - fullURL := fmt.Sprintf("%s?limit=%d&page=%d", url, PAGESIZE, pageNum) - resp, err := c.Do(ctx, &request.RequestBuilder{ - Method: http.MethodGet, - Url: fullURL, - Token: c.cred.AccessToken, - }) - - if err != nil { - out <- drs.DRSObjectResult{Error: err} - return - } - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - resp.Body.Close() - out <- drs.DRSObjectResult{Error: fmt.Errorf("api error %d: %s", resp.StatusCode, string(body))} - return - } - - var page drs.DRSPage - err = json.NewDecoder(resp.Body).Decode(&page) - resp.Body.Close() - - if err != nil { - out <- drs.DRSObjectResult{Error: err} - return - } - - if len(page.DRSObjects) == 0 { - active = false - break - } - - for _, elem := range page.DRSObjects { - out <- drs.DRSObjectResult{Object: &elem} - } - pageNum++ - } - }() - return out, nil -} - -func (c *IndexdClient) GetProjectSample(ctx context.Context, projectId string, limit int) ([]drs.DRSObject, error) { - if limit <= 0 { - limit = 1 - } - - objChan, err := c.ListObjectsByProject(ctx, projectId) - if err != nil { - return nil, err - } - - result := make([]drs.DRSObject, 0, limit) - for objResult := range objChan { - if objResult.Error != nil { - return nil, objResult.Error - } - result = append(result, *objResult.Object) - - if len(result) >= limit { - go func() { - for range objChan { - } - }() - break - } - } - - return result, nil -} - -func (c *IndexdClient) DeleteRecordsByProject(ctx context.Context, projectId string) error { - recs, err := c.ListObjectsByProject(ctx, projectId) - if err != nil { - return err - } - for rec := range recs { - if rec.Error != nil { - return rec.Error - } - err := c.DeleteIndexdRecord(ctx, rec.Object.Id) - if err != nil { - c.logger.Error(fmt.Sprintf("DeleteRecordsByProject Error for %s: %v", rec.Object.Id, err)) - continue - } - } - return nil -} - -func (c *IndexdClient) DeleteRecordByHash(ctx context.Context, hashValue string, projectId string) error { - records, err := c.GetObjectByHash(ctx, "sha256", hashValue) - if err != nil { - return fmt.Errorf("error getting records for hash %s: %v", hashValue, err) - } - if len(records) == 0 { - return fmt.Errorf("no records found for hash %s", hashValue) - } - - matchingRecord, err := drs.FindMatchingRecord(records, projectId) - if err != nil { - return fmt.Errorf("error finding matching record for project %s: %v", projectId, err) - } - if matchingRecord == nil { - return fmt.Errorf("no matching record found for project %s", projectId) - } - - return c.DeleteIndexdRecord(ctx, matchingRecord.Id) -} - -func (c *IndexdClient) RegisterRecord(ctx context.Context, record *drs.DRSObject) (*drs.DRSObject, error) { - indexdRecord, err := IndexdRecordFromDrsObject(record) - if err != nil { - return nil, fmt.Errorf("error converting DRS object to indexd record: %v", err) - } - - return c.RegisterIndexdRecord(ctx, indexdRecord) -} - -func appendUnique(existing []string, toAdd []string) []string { - seen := make(map[string]bool) - for _, v := range existing { - seen[v] = true - } - for _, v := range toAdd { - if !seen[v] { - existing = append(existing, v) - seen[v] = true - } - } - return existing -} diff --git a/indexd/client_test.go b/indexd/client_test.go deleted file mode 100644 index 818e498..0000000 --- a/indexd/client_test.go +++ /dev/null @@ -1,266 +0,0 @@ -package indexd - -import ( - "context" - "encoding/json" - "io" - "net/http" - "net/http/httptest" - "strings" - "sync" - "testing" - - "github.com/calypr/data-client/conf" - drs "github.com/calypr/data-client/drs" - "github.com/calypr/data-client/hash" - "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/request" -) - -type mockIndexdServer struct { - mu sync.Mutex - listProjectPages int - listObjectsPages int - lastUpdatePayload UpdateInputInfo -} - -func (m *mockIndexdServer) handler(t *testing.T) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - path := r.URL.Path - switch { - case r.Method == http.MethodGet && path == "/index/index": - if hashQuery := r.URL.Query().Get("hash"); hashQuery != "" { - record := sampleOutputInfo() - page := ListRecords{Records: []OutputInfo{record}} - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(page) - return - } - if r.URL.Query().Get("authz") != "" { - m.mu.Lock() - page := m.listProjectPages - m.listProjectPages++ - m.mu.Unlock() - w.WriteHeader(http.StatusOK) - if page == 0 { - _ = json.NewEncoder(w).Encode(ListRecords{Records: []OutputInfo{sampleOutputInfo()}}) - } else { - _ = json.NewEncoder(w).Encode(ListRecords{Records: []OutputInfo{}}) - } - return - } - - case r.Method == http.MethodPost && path == "/index/index": - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(`{"did":"did-1"}`)) - return - case r.Method == http.MethodGet && strings.HasPrefix(path, "/ga4gh/drs/v1/objects"): - if path == "/ga4gh/drs/v1/objects" { - m.mu.Lock() - page := m.listObjectsPages - m.listObjectsPages++ - m.mu.Unlock() - w.WriteHeader(http.StatusOK) - if page == 0 { - _ = json.NewEncoder(w).Encode(drs.DRSPage{DRSObjects: []drs.DRSObject{sampleDRSObject()}}) - } else { - _ = json.NewEncoder(w).Encode(drs.DRSPage{DRSObjects: []drs.DRSObject{}}) - } - return - } - obj := sampleOutputObject() - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(obj) - return - case r.Method == http.MethodGet && strings.HasPrefix(path, "/index/index/"): - record := sampleOutputInfo() - record.Rev = "rev-1" - w.WriteHeader(http.StatusOK) - _ = json.NewEncoder(w).Encode(record) - return - case r.Method == http.MethodPut && strings.HasPrefix(path, "/index/index/"): - body, _ := io.ReadAll(r.Body) - payload := UpdateInputInfo{} - _ = json.Unmarshal(body, &payload) - m.mu.Lock() - m.lastUpdatePayload = payload - m.mu.Unlock() - w.WriteHeader(http.StatusOK) - return - case r.Method == http.MethodDelete && strings.HasPrefix(path, "/index/index/"): - w.WriteHeader(http.StatusNoContent) - return - } - w.WriteHeader(http.StatusNotFound) - } -} - -func sampleOutputInfo() OutputInfo { - return OutputInfo{ - Did: "did-1", - FileName: "file.txt", - URLs: []string{"s3://bucket/key"}, - Authz: []string{"/programs/test/projects/proj"}, - Hashes: hash.HashInfo{SHA256: "sha-256"}, - Size: 123, - } -} - -func sampleDRSObject() drs.DRSObject { - return drs.DRSObject{ - Id: "did-1", - Name: "file.txt", - Size: 123, - Checksums: hash.HashInfo{ - SHA256: "sha-256", - }, - AccessMethods: []drs.AccessMethod{ - { - Type: "s3", - AccessURL: drs.AccessURL{URL: "s3://bucket/key"}, - Authorizations: &drs.Authorizations{Value: "/programs/test/projects/proj"}, - }, - }, - } -} - -func sampleOutputObject() OutputObject { - return OutputObject{ - Id: "did-1", - Name: "file.txt", - Size: 123, - Checksums: []hash.Checksum{ - {Checksum: "sha-256", Type: hash.ChecksumTypeSHA256}, - }, - } -} - -func newTestClient(server *httptest.Server) IndexdInterface { - cred := &conf.Credential{APIEndpoint: server.URL, Profile: "test", AccessToken: "test-token"} - logger, _ := logs.New("test") - config := conf.NewConfigure(logger.Logger) - req := request.NewRequestInterface(logger, cred, config) - return NewIndexdClient(req, cred, logger.Logger) -} - -func TestIndexdClient_ListAndQueryDirect(t *testing.T) { - mock := &mockIndexdServer{} - server := httptest.NewServer(mock.handler(t)) - defer server.Close() - - client := newTestClient(server) - - records, err := client.GetObjectByHash(context.Background(), "sha256", "sha-256") - if err != nil { - t.Fatalf("GetObjectByHash error: %v", err) - } - if len(records) != 1 || records[0].Id != "did-1" { - t.Fatalf("unexpected records: %+v", records) - } - - objChan, err := client.ListObjectsByProject(context.Background(), "test-proj") - if err != nil { - t.Fatalf("ListObjectsByProject error: %v", err) - } - var found bool - for res := range objChan { - if res.Error != nil { - t.Fatalf("ListObjectsByProject result error: %v", res.Error) - } - if res.Object != nil && res.Object.Id == "did-1" { - found = true - } - } - if !found { - t.Fatalf("expected object from ListObjectsByProject") - } - - listChan, err := client.ListObjects(context.Background()) - if err != nil { - t.Fatalf("ListObjects error: %v", err) - } - var listCount int - for res := range listChan { - if res.Error != nil { - t.Fatalf("ListObjects result error: %v", res.Error) - } - if res.Object != nil { - listCount++ - } - } - if listCount != 1 { - t.Fatalf("expected 1 object from ListObjects, got %d", listCount) - } -} - -func TestIndexdClient_RegisterAndUpdateDirect(t *testing.T) { - mock := &mockIndexdServer{} - server := httptest.NewServer(mock.handler(t)) - defer server.Close() - - client := newTestClient(server) - - drsObj := &drs.DRSObject{ - Id: "did-1", - Name: "file.txt", - Size: 123, - Checksums: hash.HashInfo{SHA256: "sha-256"}, - AccessMethods: []drs.AccessMethod{ - { - Type: "s3", - AccessURL: drs.AccessURL{URL: "s3://bucket/key"}, - Authorizations: &drs.Authorizations{Value: "/programs/test/projects/proj"}, - }, - }, - } - - obj, err := client.RegisterRecord(context.Background(), drsObj) - if err != nil { - t.Fatalf("RegisterRecord error: %v", err) - } - if obj.Id != "did-1" { - t.Fatalf("unexpected DRS object: %+v", obj) - } - - update := &drs.DRSObject{ - Name: "file-updated.txt", - Version: "v2", - Description: "updated", - AccessMethods: []drs.AccessMethod{ - { - Type: "s3", - AccessURL: drs.AccessURL{URL: "s3://bucket/other"}, - Authorizations: &drs.Authorizations{Value: "/programs/test/projects/proj"}, - }, - }, - } - - _, err = client.UpdateRecord(context.Background(), update, "did-1") - if err != nil { - t.Fatalf("UpdateRecord error: %v", err) - } - - mock.mu.Lock() - payload := mock.lastUpdatePayload - mock.mu.Unlock() - - if len(payload.URLs) != 2 { - t.Fatalf("expected URLs to include appended entries, got %+v", payload.URLs) - } -} - -func TestIndexdClient_GetObjectDirect(t *testing.T) { - mock := &mockIndexdServer{} - server := httptest.NewServer(mock.handler(t)) - defer server.Close() - - client := newTestClient(server) - - record, err := client.GetObject(context.Background(), "did-1") - if err != nil { - t.Fatalf("GetObject error: %v", err) - } - if record.Id != "did-1" { - t.Fatalf("unexpected record: %+v", record) - } -} diff --git a/indexd/convert.go b/indexd/convert.go deleted file mode 100644 index 117cac9..0000000 --- a/indexd/convert.go +++ /dev/null @@ -1,99 +0,0 @@ -package indexd - -// Conversion functions between drs.DRSObject and IndexdRecord - -import ( - "fmt" - "net/url" - - "github.com/calypr/data-client/drs" -) - -// IndexdRecordFromDrsObject represents a simplified version of an indexd record for conversion purposes -func IndexdRecordFromDrsObject(drsObj *drs.DRSObject) (*IndexdRecord, error) { - indexdObj := &IndexdRecord{ - Did: drsObj.Id, - Size: drsObj.Size, - FileName: drsObj.Name, - URLs: IndexdURLFromDrsAccessURLs(drsObj.AccessMethods), - Authz: IndexdAuthzFromDrsAccessMethods(drsObj.AccessMethods), - Hashes: drsObj.Checksums, - } - return indexdObj, nil -} - -func IndexdRecordToDrsObject(indexdObj *IndexdRecord) (*drs.DRSObject, error) { - accessMethods, err := DRSAccessMethodsFromIndexdURLs(indexdObj.URLs, indexdObj.Authz) - if err != nil { - return nil, err - } - for _, am := range accessMethods { - if am.Authorizations == nil || am.Authorizations.Value == "" { - return nil, fmt.Errorf("access method missing authorization %v, %v", indexdObj, indexdObj.Authz) - } - } - - return &drs.DRSObject{ - Id: indexdObj.Did, - Size: indexdObj.Size, - Name: indexdObj.FileName, - AccessMethods: accessMethods, - Checksums: indexdObj.Hashes, - }, nil -} - -func DRSAccessMethodsFromIndexdURLs(urls []string, authz []string) ([]drs.AccessMethod, error) { - var accessMethods []drs.AccessMethod - for _, urlString := range urls { - var method drs.AccessMethod - method.AccessURL = drs.AccessURL{URL: urlString} - - parsed, err := url.Parse(urlString) - if err != nil { - return nil, fmt.Errorf("failed to parse url %q: %v", urlString, err) - } - if parsed.Scheme == "" { - // default to https if no scheme or parse error - method.Type = "https" - } else { - method.Type = parsed.Scheme - } - - // check if authz is null or 0-length, then error - if authz == nil { - return nil, fmt.Errorf("authz is required") - } - - // NOTE: a record can only have 1 authz entry atm - method.Authorizations = &drs.Authorizations{Value: authz[0]} - accessMethods = append(accessMethods, method) - } - return accessMethods, nil -} - -// IndexdAuthzFromDrsAccessMethods extracts authz values from DRS access methods -func IndexdAuthzFromDrsAccessMethods(accessMethods []drs.AccessMethod) []string { - var authz []string - for _, drsURL := range accessMethods { - if drsURL.Authorizations != nil { - authz = append(authz, drsURL.Authorizations.Value) - } - } - return authz -} - -func IndexdURLFromDrsAccessURLs(accessMethods []drs.AccessMethod) []string { - var urls []string - for _, drsURL := range accessMethods { - urls = append(urls, drsURL.AccessURL.URL) - } - return urls -} - -func (inr *IndexdRecord) ToDrsObject() (*drs.DRSObject, error) { - o, err := IndexdRecordToDrsObject(inr) - if err != nil { - return nil, err - } - return o, nil -} diff --git a/indexd/records.go b/indexd/records.go deleted file mode 100644 index 7f03613..0000000 --- a/indexd/records.go +++ /dev/null @@ -1,97 +0,0 @@ -package indexd - -// https://github.com/uc-cdis/indexd/blob/master/openapis/swagger.yaml - -import ( - "github.com/calypr/data-client/hash" -) - -// subset of the OpenAPI spec for the InputInfo object in indexd -// TODO: make another object based on VersionInputInfo that has content_created_date and so can handle a POST of dates via indexd/ -type IndexdRecord struct { - // Unique identifier for the record (UUID) - Did string `json:"did"` - - // Human-readable file name - FileName string `json:"file_name,omitempty"` - - // List of URLs where the file can be accessed - URLs []string `json:"urls"` - - // Hashes of the file (e.g., md5, sha256) - Size int64 `json:"size"` - - // List of access control lists (ACLs) - ACL []string `json:"acl,omitempty"` - - // List of authorization policies - Authz []string `json:"authz,omitempty"` - - Hashes hash.HashInfo `json:"hashes,omitzero"` - - // Additional metadata as key-value pairs - Metadata map[string]string `json:"metadata,omitempty"` - - // Version of the record (optional) - Version string `json:"version,omitempty"` -} - -// create indexd record struct used for POSTs that is IndexdRecord with form field -type IndexdRecordForm struct { - IndexdRecord - Form string `json:"form"` - Rev string `json:"rev,omitempty"` -} - -type ListRecordsResult struct { - Record *OutputInfo - Error error -} - -type ListRecords struct { - IDs []string `json:"ids"` - Records []OutputInfo `json:"records"` - Size int64 `json:"size"` - Start int64 `json:"start"` - Limit int64 `json:"limit"` - FileName string `json:"file_name"` - URLs []string `json:"urls"` - ACL []string `json:"acl"` - Authz []string `json:"authz"` - Hashes hash.HashInfo `json:"hashes"` - Metadata map[string]any `json:"metadata"` - Version string `json:"version"` -} - -type OutputInfo struct { - Did string `json:"did"` - BaseID string `json:"baseid"` - Rev string `json:"rev"` - Form string `json:"form"` - Size int64 `json:"size"` - FileName string `json:"file_name"` - Version string `json:"version"` - Uploader string `json:"uploader"` - URLs []string `json:"urls"` - ACL []string `json:"acl"` - Authz []string `json:"authz"` - Hashes hash.HashInfo `json:"hashes"` - UpdatedDate string `json:"updated_date"` - CreatedDate string `json:"created_date"` - Metadata map[string]any `json:"metadata"` - URLsMetadata map[string]any `json:"urls_metadata"` -} - -func (outputInfo *OutputInfo) ToIndexdRecord() *IndexdRecord { - return &IndexdRecord{ - Did: outputInfo.Did, - Size: outputInfo.Size, - FileName: outputInfo.FileName, - URLs: outputInfo.URLs, - ACL: outputInfo.ACL, - Authz: outputInfo.Authz, - Hashes: outputInfo.Hashes, - //Metadata: outputInfo.Metadata, //TODO: re-enable metadata. One is map[string]string, the other is map[string]interface{} - Version: outputInfo.Version, - } -} diff --git a/indexd/types.go b/indexd/types.go deleted file mode 100644 index 54c601a..0000000 --- a/indexd/types.go +++ /dev/null @@ -1,70 +0,0 @@ -package indexd - -import ( - "github.com/calypr/data-client/drs" - "github.com/calypr/data-client/hash" -) - -type OutputObject struct { - Id string `json:"id"` - Name string `json:"name"` - SelfURI string `json:"self_uri,omitempty"` - Size int64 `json:"size"` - CreatedTime string `json:"created_time,omitempty"` - UpdatedTime string `json:"updated_time,omitempty"` - Version string `json:"version,omitempty"` - MimeType string `json:"mime_type,omitempty"` - Checksums []hash.Checksum `json:"checksums"` - AccessMethods []drs.AccessMethod `json:"access_methods"` - Contents []drs.Contents `json:"contents,omitempty"` - Description string `json:"description,omitempty"` - Aliases []string `json:"aliases,omitempty"` -} - -func ConvertOutputObjectToDRSObject(in *OutputObject) *drs.DRSObject { - if in == nil { - return nil - } - - hashInfo := hash.ConvertChecksumsToHashInfo(in.Checksums) - - return &drs.DRSObject{ - Id: in.Id, - Name: in.Name, - SelfURI: in.SelfURI, - Size: in.Size, - CreatedTime: in.CreatedTime, - UpdatedTime: in.UpdatedTime, - Version: in.Version, - MimeType: in.MimeType, - Checksums: hashInfo, - AccessMethods: in.AccessMethods, - Contents: in.Contents, - Description: in.Description, - Aliases: in.Aliases, - } -} - -// UpdateInputInfo is the put object for index records -type UpdateInputInfo struct { - // Human-readable file name - FileName string `json:"file_name,omitempty"` - - // Additional metadata as key-value pairs - Metadata map[string]any `json:"metadata,omitempty"` - - // URL-specific metadata as key-value pairs - URLsMetadata map[string]any `json:"urls_metadata,omitempty"` - - // Version of the record - Version string `json:"version,omitempty"` - - // List of URLs where the file can be accessed - URLs []string `json:"urls,omitempty"` - - // List of access control lists (ACLs) - ACL []string `json:"acl,omitempty"` - - // List of authorization policies - Authz []string `json:"authz,omitempty"` -} diff --git a/indexd/types_test.go b/indexd/types_test.go deleted file mode 100644 index c81536c..0000000 --- a/indexd/types_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package indexd - -import ( - "testing" - - "github.com/calypr/data-client/drs" - "github.com/calypr/data-client/hash" -) - -func TestConvertOutputObjectToDRSObject(t *testing.T) { - out := &OutputObject{ - Id: "did-1", - Name: "file.txt", - SelfURI: "drs://server/did-1", - Size: 12345, - CreatedTime: "2023-01-01T00:00:00Z", - UpdatedTime: "2023-01-02T00:00:00Z", - Version: "v1", - MimeType: "text/plain", - Checksums: []hash.Checksum{ - {Type: hash.ChecksumTypeSHA256, Checksum: "sha256-hash"}, - {Type: hash.ChecksumTypeMD5, Checksum: "md5-hash"}, - }, - AccessMethods: []drs.AccessMethod{ - { - Type: "s3", - AccessURL: drs.AccessURL{ - URL: "s3://bucket/key", - }, - }, - }, - Description: "A test file", - Aliases: []string{"alias1"}, - } - - drsObj := ConvertOutputObjectToDRSObject(out) - - if drsObj.Id != out.Id { - t.Errorf("expected Id %s, got %s", out.Id, drsObj.Id) - } - if drsObj.Name != out.Name { - t.Errorf("expected Name %s, got %s", out.Name, drsObj.Name) - } - if drsObj.Size != out.Size { - t.Errorf("expected Size %d, got %d", out.Size, drsObj.Size) - } - // Verify Checksums conversion (slice to HashInfo) - if drsObj.Checksums.SHA256 != "sha256-hash" { - t.Errorf("expected SHA256 %s, got %s", "sha256-hash", drsObj.Checksums.SHA256) - } - if drsObj.Checksums.MD5 != "md5-hash" { - t.Errorf("expected MD5 %s, got %s", "md5-hash", drsObj.Checksums.MD5) - } - if len(drsObj.AccessMethods) != 1 { - t.Errorf("expected 1 access method, got %d", len(drsObj.AccessMethods)) - } - if drsObj.AccessMethods[0].AccessURL.URL != "s3://bucket/key" { - t.Errorf("expected access URL s3://bucket/key, got %s", drsObj.AccessMethods[0].AccessURL.URL) - } -} diff --git a/indexd/upsert.go b/indexd/upsert.go deleted file mode 100644 index 31f7411..0000000 --- a/indexd/upsert.go +++ /dev/null @@ -1,54 +0,0 @@ -package indexd - -import ( - "context" - "fmt" - "slices" - - "github.com/calypr/data-client/drs" - "github.com/calypr/data-client/s3utils" -) - -// UpsertIndexdRecord creates or updates an indexd record with a new URL. -func (c *IndexdClient) UpsertIndexdRecord(ctx context.Context, url string, sha256 string, fileSize int64, projectId string) (*drs.DRSObject, error) { - uuid := drs.DrsUUID(projectId, sha256) - - records, err := c.GetObjectByHash(ctx, "sha256", sha256) - if err != nil { - return nil, fmt.Errorf("error querying indexd server: %v", err) - } - - var matchingRecord *drs.DRSObject - for i := range records { - if records[i].Id == uuid { - matchingRecord = &records[i] - break - } - } - - if matchingRecord != nil { - existingURLs := IndexdURLFromDrsAccessURLs(matchingRecord.AccessMethods) - if slices.Contains(existingURLs, url) { - c.logger.Debug("Nothing to do: file already registered") - return matchingRecord, nil - } - - c.logger.Debug("updating existing record with new url") - updatedRecord := drs.DRSObject{AccessMethods: []drs.AccessMethod{{AccessURL: drs.AccessURL{URL: url}}}} - return c.UpdateRecord(ctx, &updatedRecord, matchingRecord.Id) - } - - // If no record exists, create one - c.logger.Debug("creating new record") - _, key, err := s3utils.ParseS3URL(url) - if err != nil { - return nil, err - } - - drsObj, err := drs.BuildDrsObj(key, sha256, fileSize, uuid, "placeholder-bucket", projectId) - if err != nil { - return nil, err - } - - return c.RegisterRecord(ctx, drsObj) -} diff --git a/logs/factory.go b/logs/factory.go index 5a428f5..ac44b54 100644 --- a/logs/factory.go +++ b/logs/factory.go @@ -58,7 +58,7 @@ func New(profile string, opts ...Option) (*Gen3Logger, func()) { t := NewGen3Logger(sl, logDir, profile) if cfg.enableScoreboard { - t.scoreboard = NewSB(5, t) + t.scoreboard = NewSB(5, t.Logger) } if cfg.failedLog { diff --git a/logs/logger_test.go b/logs/logger_test.go index 7e689f8..382d2b5 100644 --- a/logs/logger_test.go +++ b/logs/logger_test.go @@ -68,7 +68,7 @@ func TestNew_WithScoreboardOption(t *testing.T) { t.Fatal("Expected non-nil logger") } - if logger.scoreboard == nil { + if logger.Scoreboard() == nil { t.Error("Expected non-nil scoreboard when WithScoreboard option is used") } } @@ -82,9 +82,8 @@ func TestNew_WithFailedLogOption(t *testing.T) { t.Fatal("Expected non-nil logger") } - if logger.failedPath == "" { - t.Error("Expected non-empty failed path when WithFailedLog option is used") - } + // Ensure failed-log helpers remain callable with syfon-backed logger. + _ = logger.GetFailedLogMap() } func TestNew_WithSucceededLogOption(t *testing.T) { @@ -96,9 +95,8 @@ func TestNew_WithSucceededLogOption(t *testing.T) { t.Fatal("Expected non-nil logger") } - if logger.succeededPath == "" { - t.Error("Expected non-empty succeeded path when WithSucceededLog option is used") - } + // Ensure succeeded-log helpers remain callable with syfon-backed logger. + _ = logger.GetSucceededLogMap() } func TestNew_WithBaseLogger(t *testing.T) { @@ -136,7 +134,7 @@ func TestNew_WithMultipleOptions(t *testing.T) { t.Error("Expected non-nil embedded slog logger") } - if logger.scoreboard == nil { + if logger.Scoreboard() == nil { t.Error("Expected non-nil scoreboard") } diff --git a/logs/scoreboard.go b/logs/scoreboard.go index bf43083..738a47a 100644 --- a/logs/scoreboard.go +++ b/logs/scoreboard.go @@ -2,6 +2,9 @@ package logs import ( "fmt" + "io" + "log/slog" + "os" "sync" "text/tabwriter" ) @@ -10,19 +13,21 @@ import ( type Scoreboard struct { mu sync.Mutex Counts []int // index 0 = success on first try, 1 = after 1 retry, ..., last = failed - log *Gen3Logger + logger *slog.Logger + writer io.Writer } -// New creates a new scoreboard +// NewSB creates a new scoreboard // maxRetryCount = how many retries you allow before giving up -func NewSB(maxRetryCount int, log *Gen3Logger) *Scoreboard { +func NewSB(maxRetryCount int, logger *slog.Logger) *Scoreboard { return &Scoreboard{ Counts: make([]int, maxRetryCount+2), // +2: one for success-on-first, one for final failure - log: log, + logger: logger, + writer: os.Stderr, } } -// Increment records a result after `retryCount` attempts +// IncrementSB records a result after `retryCount` attempts // retryCount == 0 → succeeded on first try // retryCount == max → final failure func (s *Scoreboard) IncrementSB(retryCount int) { @@ -38,7 +43,7 @@ func (s *Scoreboard) IncrementSB(retryCount int) { s.Counts[retryCount]++ } -// Print the beautiful table at the end +// PrintSB prints the beautiful table at the end func (s *Scoreboard) PrintSB() { s.mu.Lock() defer s.mu.Unlock() @@ -51,8 +56,8 @@ func (s *Scoreboard) PrintSB() { return } - s.log.Println("\n\nSubmission Results") - w := tabwriter.NewWriter(s.log.Writer(), 0, 0, 2, ' ', 0) + s.logger.Info("Submission Results") + w := tabwriter.NewWriter(s.writer, 0, 0, 2, ' ', 0) for i, count := range s.Counts { if i == 0 { diff --git a/logs/tee_logger.go b/logs/tee_logger.go index 228e06b..9861187 100644 --- a/logs/tee_logger.go +++ b/logs/tee_logger.go @@ -210,7 +210,7 @@ func (t *Gen3Logger) SucceededContext(ctx context.Context, filePath, guid string func (t *Gen3Logger) succeededHelper(ctx context.Context, filePath, guid string, skip int) { msg := fmt.Sprintf("Succeeded: %s (GUID: %s)", filePath, guid) - t.logWithSkip(ctx, slog.LevelInfo, skip, msg) + t.logWithSkip(ctx, slog.LevelDebug, skip, msg) if t.succeededPath != "" { t.writeSucceededSync(filePath, guid) } diff --git a/mocks/mock_configure.go b/mocks/mock_configure.go index 48aa6bc..dac723e 100644 --- a/mocks/mock_configure.go +++ b/mocks/mock_configure.go @@ -69,19 +69,34 @@ func (mr *MockManagerInterfaceMockRecorder) Import(filePath, fenceToken any) *go return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Import", reflect.TypeOf((*MockManagerInterface)(nil).Import), filePath, fenceToken) } -// IsValid mocks base method. -func (m *MockManagerInterface) IsValid(arg0 *conf.Credential) (bool, error) { +// IsCredentialValid mocks base method. +func (m *MockManagerInterface) IsCredentialValid(arg0 *conf.Credential) (bool, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "IsValid", arg0) + ret := m.ctrl.Call(m, "IsCredentialValid", arg0) ret0, _ := ret[0].(bool) ret1, _ := ret[1].(error) return ret0, ret1 } -// IsValid indicates an expected call of IsValid. -func (mr *MockManagerInterfaceMockRecorder) IsValid(arg0 any) *gomock.Call { +// IsCredentialValid indicates an expected call of IsCredentialValid. +func (mr *MockManagerInterfaceMockRecorder) IsCredentialValid(arg0 any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsValid", reflect.TypeOf((*MockManagerInterface)(nil).IsValid), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsCredentialValid", reflect.TypeOf((*MockManagerInterface)(nil).IsCredentialValid), arg0) +} + +// IsTokenValid mocks base method. +func (m *MockManagerInterface) IsTokenValid(arg0 string) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsTokenValid", arg0) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsTokenValid indicates an expected call of IsTokenValid. +func (mr *MockManagerInterfaceMockRecorder) IsTokenValid(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsTokenValid", reflect.TypeOf((*MockManagerInterface)(nil).IsTokenValid), arg0) } // Load mocks base method. diff --git a/mocks/mock_functions.go b/mocks/mock_functions.go deleted file mode 100644 index 9f905fd..0000000 --- a/mocks/mock_functions.go +++ /dev/null @@ -1,161 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/calypr/data-client/api (interfaces: FunctionInterface) -// -// Generated by this command: -// -// mockgen -destination=../mocks/mock_functions.go -package=mocks github.com/calypr/data-client/api FunctionInterface -// - -// Package mocks is a generated GoMock package. -package mocks - -import ( - context "context" - http "net/http" - reflect "reflect" - - conf "github.com/calypr/data-client/conf" - request "github.com/calypr/data-client/request" - gomock "go.uber.org/mock/gomock" -) - -// MockFunctionInterface is a mock of FunctionInterface interface. -type MockFunctionInterface struct { - ctrl *gomock.Controller - recorder *MockFunctionInterfaceMockRecorder - isgomock struct{} -} - -// MockFunctionInterfaceMockRecorder is the mock recorder for MockFunctionInterface. -type MockFunctionInterfaceMockRecorder struct { - mock *MockFunctionInterface -} - -// NewMockFunctionInterface creates a new mock instance. -func NewMockFunctionInterface(ctrl *gomock.Controller) *MockFunctionInterface { - mock := &MockFunctionInterface{ctrl: ctrl} - mock.recorder = &MockFunctionInterfaceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockFunctionInterface) EXPECT() *MockFunctionInterfaceMockRecorder { - return m.recorder -} - -// CheckForShepherdAPI mocks base method. -func (m *MockFunctionInterface) CheckForShepherdAPI(ctx context.Context) (bool, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CheckForShepherdAPI", ctx) - ret0, _ := ret[0].(bool) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// CheckForShepherdAPI indicates an expected call of CheckForShepherdAPI. -func (mr *MockFunctionInterfaceMockRecorder) CheckForShepherdAPI(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckForShepherdAPI", reflect.TypeOf((*MockFunctionInterface)(nil).CheckForShepherdAPI), ctx) -} - -// CheckPrivileges mocks base method. -func (m *MockFunctionInterface) CheckPrivileges(ctx context.Context) (map[string]any, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CheckPrivileges", ctx) - ret0, _ := ret[0].(map[string]any) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// CheckPrivileges indicates an expected call of CheckPrivileges. -func (mr *MockFunctionInterfaceMockRecorder) CheckPrivileges(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckPrivileges", reflect.TypeOf((*MockFunctionInterface)(nil).CheckPrivileges), ctx) -} - -// DeleteRecord mocks base method. -func (m *MockFunctionInterface) DeleteRecord(ctx context.Context, guid string) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteRecord", ctx, guid) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// DeleteRecord indicates an expected call of DeleteRecord. -func (mr *MockFunctionInterfaceMockRecorder) DeleteRecord(ctx, guid any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteRecord", reflect.TypeOf((*MockFunctionInterface)(nil).DeleteRecord), ctx, guid) -} - -// Do mocks base method. -func (m *MockFunctionInterface) Do(ctx context.Context, req *request.RequestBuilder) (*http.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Do", ctx, req) - ret0, _ := ret[0].(*http.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Do indicates an expected call of Do. -func (mr *MockFunctionInterfaceMockRecorder) Do(ctx, req any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Do", reflect.TypeOf((*MockFunctionInterface)(nil).Do), ctx, req) -} - -// ExportCredential mocks base method. -func (m *MockFunctionInterface) ExportCredential(ctx context.Context, cred *conf.Credential) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ExportCredential", ctx, cred) - ret0, _ := ret[0].(error) - return ret0 -} - -// ExportCredential indicates an expected call of ExportCredential. -func (mr *MockFunctionInterfaceMockRecorder) ExportCredential(ctx, cred any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ExportCredential", reflect.TypeOf((*MockFunctionInterface)(nil).ExportCredential), ctx, cred) -} - -// GetDownloadPresignedUrl mocks base method. -func (m *MockFunctionInterface) GetDownloadPresignedUrl(ctx context.Context, guid, protocolText string) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDownloadPresignedUrl", ctx, guid, protocolText) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDownloadPresignedUrl indicates an expected call of GetDownloadPresignedUrl. -func (mr *MockFunctionInterfaceMockRecorder) GetDownloadPresignedUrl(ctx, guid, protocolText any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDownloadPresignedUrl", reflect.TypeOf((*MockFunctionInterface)(nil).GetDownloadPresignedUrl), ctx, guid, protocolText) -} - -// New mocks base method. -func (m *MockFunctionInterface) New(method, url string) *request.RequestBuilder { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "New", method, url) - ret0, _ := ret[0].(*request.RequestBuilder) - return ret0 -} - -// New indicates an expected call of New. -func (mr *MockFunctionInterfaceMockRecorder) New(method, url any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "New", reflect.TypeOf((*MockFunctionInterface)(nil).New), method, url) -} - -// NewAccessToken mocks base method. -func (m *MockFunctionInterface) NewAccessToken(ctx context.Context) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "NewAccessToken", ctx) - ret0, _ := ret[0].(error) - return ret0 -} - -// NewAccessToken indicates an expected call of NewAccessToken. -func (mr *MockFunctionInterfaceMockRecorder) NewAccessToken(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewAccessToken", reflect.TypeOf((*MockFunctionInterface)(nil).NewAccessToken), ctx) -} diff --git a/mocks/mock_gen3interface.go b/mocks/mock_gen3interface.go index 7524b7c..2d7cd69 100644 --- a/mocks/mock_gen3interface.go +++ b/mocks/mock_gen3interface.go @@ -11,14 +11,16 @@ package mocks import ( context "context" + http "net/http" reflect "reflect" - conf "github.com/calypr/data-client/conf" fence "github.com/calypr/data-client/fence" - indexd "github.com/calypr/data-client/indexd" logs "github.com/calypr/data-client/logs" + request "github.com/calypr/data-client/request" requestor "github.com/calypr/data-client/requestor" sower "github.com/calypr/data-client/sower" + credentials "github.com/calypr/syfon/client/credentials" + drs "github.com/calypr/syfon/client/drs" gomock "go.uber.org/mock/gomock" ) @@ -46,60 +48,61 @@ func (m *MockGen3Interface) EXPECT() *MockGen3InterfaceMockRecorder { return m.recorder } -// ExportCredential mocks base method. -func (m *MockGen3Interface) ExportCredential(ctx context.Context, cred *conf.Credential) error { +// Credentials mocks base method. +func (m *MockGen3Interface) Credentials() credentials.Manager { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ExportCredential", ctx, cred) - ret0, _ := ret[0].(error) + ret := m.ctrl.Call(m, "Credentials") + ret0, _ := ret[0].(credentials.Manager) return ret0 } -// ExportCredential indicates an expected call of ExportCredential. -func (mr *MockGen3InterfaceMockRecorder) ExportCredential(ctx, cred any) *gomock.Call { +// Credentials indicates an expected call of Credentials. +func (mr *MockGen3InterfaceMockRecorder) Credentials() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ExportCredential", reflect.TypeOf((*MockGen3Interface)(nil).ExportCredential), ctx, cred) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Credentials", reflect.TypeOf((*MockGen3Interface)(nil).Credentials)) } -// Fence mocks base method. -func (m *MockGen3Interface) Fence() fence.FenceInterface { +// DRSClient mocks base method. +func (m *MockGen3Interface) DRSClient() drs.Client { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Fence") - ret0, _ := ret[0].(fence.FenceInterface) + ret := m.ctrl.Call(m, "DRSClient") + ret0, _ := ret[0].(drs.Client) return ret0 } -// Fence indicates an expected call of Fence. -func (mr *MockGen3InterfaceMockRecorder) Fence() *gomock.Call { +// DRSClient indicates an expected call of DRSClient. +func (mr *MockGen3InterfaceMockRecorder) DRSClient() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Fence", reflect.TypeOf((*MockGen3Interface)(nil).Fence)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DRSClient", reflect.TypeOf((*MockGen3Interface)(nil).DRSClient)) } -// GetCredential mocks base method. -func (m *MockGen3Interface) GetCredential() *conf.Credential { +// Do mocks base method. +func (m *MockGen3Interface) Do(ctx context.Context, req *request.RequestBuilder) (*http.Response, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetCredential") - ret0, _ := ret[0].(*conf.Credential) - return ret0 + ret := m.ctrl.Call(m, "Do", ctx, req) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 } -// GetCredential indicates an expected call of GetCredential. -func (mr *MockGen3InterfaceMockRecorder) GetCredential() *gomock.Call { +// Do indicates an expected call of Do. +func (mr *MockGen3InterfaceMockRecorder) Do(ctx, req any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCredential", reflect.TypeOf((*MockGen3Interface)(nil).GetCredential)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Do", reflect.TypeOf((*MockGen3Interface)(nil).Do), ctx, req) } -// Indexd mocks base method. -func (m *MockGen3Interface) Indexd() indexd.IndexdInterface { +// FenceClient mocks base method. +func (m *MockGen3Interface) FenceClient() fence.FenceInterface { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Indexd") - ret0, _ := ret[0].(indexd.IndexdInterface) + ret := m.ctrl.Call(m, "FenceClient") + ret0, _ := ret[0].(fence.FenceInterface) return ret0 } -// Indexd indicates an expected call of Indexd. -func (mr *MockGen3InterfaceMockRecorder) Indexd() *gomock.Call { +// FenceClient indicates an expected call of FenceClient. +func (mr *MockGen3InterfaceMockRecorder) FenceClient() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Indexd", reflect.TypeOf((*MockGen3Interface)(nil).Indexd)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FenceClient", reflect.TypeOf((*MockGen3Interface)(nil).FenceClient)) } // Logger mocks base method. @@ -116,30 +119,44 @@ func (mr *MockGen3InterfaceMockRecorder) Logger() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Logger", reflect.TypeOf((*MockGen3Interface)(nil).Logger)) } -// Requestor mocks base method. -func (m *MockGen3Interface) Requestor() requestor.RequestorInterface { +// New mocks base method. +func (m *MockGen3Interface) New(method, url string) *request.RequestBuilder { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "New", method, url) + ret0, _ := ret[0].(*request.RequestBuilder) + return ret0 +} + +// New indicates an expected call of New. +func (mr *MockGen3InterfaceMockRecorder) New(method, url any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "New", reflect.TypeOf((*MockGen3Interface)(nil).New), method, url) +} + +// RequestorClient mocks base method. +func (m *MockGen3Interface) RequestorClient() requestor.RequestorInterface { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Requestor") + ret := m.ctrl.Call(m, "RequestorClient") ret0, _ := ret[0].(requestor.RequestorInterface) return ret0 } -// Requestor indicates an expected call of Requestor. -func (mr *MockGen3InterfaceMockRecorder) Requestor() *gomock.Call { +// RequestorClient indicates an expected call of RequestorClient. +func (mr *MockGen3InterfaceMockRecorder) RequestorClient() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Requestor", reflect.TypeOf((*MockGen3Interface)(nil).Requestor)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RequestorClient", reflect.TypeOf((*MockGen3Interface)(nil).RequestorClient)) } -// Sower mocks base method. -func (m *MockGen3Interface) Sower() sower.SowerInterface { +// SowerClient mocks base method. +func (m *MockGen3Interface) SowerClient() sower.SowerInterface { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Sower") + ret := m.ctrl.Call(m, "SowerClient") ret0, _ := ret[0].(sower.SowerInterface) return ret0 } -// Sower indicates an expected call of Sower. -func (mr *MockGen3InterfaceMockRecorder) Sower() *gomock.Call { +// SowerClient indicates an expected call of SowerClient. +func (mr *MockGen3InterfaceMockRecorder) SowerClient() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Sower", reflect.TypeOf((*MockGen3Interface)(nil).Sower)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SowerClient", reflect.TypeOf((*MockGen3Interface)(nil).SowerClient)) } diff --git a/mocks/mock_indexd.go b/mocks/mock_indexd.go deleted file mode 100644 index 6a4f217..0000000 --- a/mocks/mock_indexd.go +++ /dev/null @@ -1,251 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/calypr/data-client/indexd (interfaces: IndexdInterface) -// -// Generated by this command: -// -// mockgen -destination=../mocks/mock_indexd.go -package=mocks github.com/calypr/data-client/indexd IndexdInterface -// - -// Package mocks is a generated GoMock package. -package mocks - -import ( - context "context" - http "net/http" - reflect "reflect" - - drs "github.com/calypr/data-client/drs" - indexd "github.com/calypr/data-client/indexd" - request "github.com/calypr/data-client/request" - gomock "go.uber.org/mock/gomock" -) - -// MockIndexdInterface is a mock of IndexdInterface interface. -type MockIndexdInterface struct { - ctrl *gomock.Controller - recorder *MockIndexdInterfaceMockRecorder - isgomock struct{} -} - -// MockIndexdInterfaceMockRecorder is the mock recorder for MockIndexdInterface. -type MockIndexdInterfaceMockRecorder struct { - mock *MockIndexdInterface -} - -// NewMockIndexdInterface creates a new mock instance. -func NewMockIndexdInterface(ctrl *gomock.Controller) *MockIndexdInterface { - mock := &MockIndexdInterface{ctrl: ctrl} - mock.recorder = &MockIndexdInterfaceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockIndexdInterface) EXPECT() *MockIndexdInterfaceMockRecorder { - return m.recorder -} - -// DeleteIndexdRecord mocks base method. -func (m *MockIndexdInterface) DeleteIndexdRecord(ctx context.Context, did string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteIndexdRecord", ctx, did) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteIndexdRecord indicates an expected call of DeleteIndexdRecord. -func (mr *MockIndexdInterfaceMockRecorder) DeleteIndexdRecord(ctx, did any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteIndexdRecord", reflect.TypeOf((*MockIndexdInterface)(nil).DeleteIndexdRecord), ctx, did) -} - -// DeleteRecordByHash mocks base method. -func (m *MockIndexdInterface) DeleteRecordByHash(ctx context.Context, hashValue, projectId string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteRecordByHash", ctx, hashValue, projectId) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteRecordByHash indicates an expected call of DeleteRecordByHash. -func (mr *MockIndexdInterfaceMockRecorder) DeleteRecordByHash(ctx, hashValue, projectId any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteRecordByHash", reflect.TypeOf((*MockIndexdInterface)(nil).DeleteRecordByHash), ctx, hashValue, projectId) -} - -// DeleteRecordsByProject mocks base method. -func (m *MockIndexdInterface) DeleteRecordsByProject(ctx context.Context, projectId string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteRecordsByProject", ctx, projectId) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteRecordsByProject indicates an expected call of DeleteRecordsByProject. -func (mr *MockIndexdInterfaceMockRecorder) DeleteRecordsByProject(ctx, projectId any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteRecordsByProject", reflect.TypeOf((*MockIndexdInterface)(nil).DeleteRecordsByProject), ctx, projectId) -} - -// Do mocks base method. -func (m *MockIndexdInterface) Do(ctx context.Context, req *request.RequestBuilder) (*http.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Do", ctx, req) - ret0, _ := ret[0].(*http.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Do indicates an expected call of Do. -func (mr *MockIndexdInterfaceMockRecorder) Do(ctx, req any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Do", reflect.TypeOf((*MockIndexdInterface)(nil).Do), ctx, req) -} - -// GetDownloadURL mocks base method. -func (m *MockIndexdInterface) GetDownloadURL(ctx context.Context, did, accessType string) (*drs.AccessURL, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDownloadURL", ctx, did, accessType) - ret0, _ := ret[0].(*drs.AccessURL) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDownloadURL indicates an expected call of GetDownloadURL. -func (mr *MockIndexdInterfaceMockRecorder) GetDownloadURL(ctx, did, accessType any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDownloadURL", reflect.TypeOf((*MockIndexdInterface)(nil).GetDownloadURL), ctx, did, accessType) -} - -// GetObject mocks base method. -func (m *MockIndexdInterface) GetObject(ctx context.Context, id string) (*drs.DRSObject, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetObject", ctx, id) - ret0, _ := ret[0].(*drs.DRSObject) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetObject indicates an expected call of GetObject. -func (mr *MockIndexdInterfaceMockRecorder) GetObject(ctx, id any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetObject", reflect.TypeOf((*MockIndexdInterface)(nil).GetObject), ctx, id) -} - -// GetObjectByHash mocks base method. -func (m *MockIndexdInterface) GetObjectByHash(ctx context.Context, hashType, hashValue string) ([]drs.DRSObject, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetObjectByHash", ctx, hashType, hashValue) - ret0, _ := ret[0].([]drs.DRSObject) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetObjectByHash indicates an expected call of GetObjectByHash. -func (mr *MockIndexdInterfaceMockRecorder) GetObjectByHash(ctx, hashType, hashValue any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetObjectByHash", reflect.TypeOf((*MockIndexdInterface)(nil).GetObjectByHash), ctx, hashType, hashValue) -} - -// GetProjectSample mocks base method. -func (m *MockIndexdInterface) GetProjectSample(ctx context.Context, projectId string, limit int) ([]drs.DRSObject, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetProjectSample", ctx, projectId, limit) - ret0, _ := ret[0].([]drs.DRSObject) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetProjectSample indicates an expected call of GetProjectSample. -func (mr *MockIndexdInterfaceMockRecorder) GetProjectSample(ctx, projectId, limit any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectSample", reflect.TypeOf((*MockIndexdInterface)(nil).GetProjectSample), ctx, projectId, limit) -} - -// ListObjects mocks base method. -func (m *MockIndexdInterface) ListObjects(ctx context.Context) (chan drs.DRSObjectResult, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListObjects", ctx) - ret0, _ := ret[0].(chan drs.DRSObjectResult) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListObjects indicates an expected call of ListObjects. -func (mr *MockIndexdInterfaceMockRecorder) ListObjects(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListObjects", reflect.TypeOf((*MockIndexdInterface)(nil).ListObjects), ctx) -} - -// ListObjectsByProject mocks base method. -func (m *MockIndexdInterface) ListObjectsByProject(ctx context.Context, projectId string) (chan drs.DRSObjectResult, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListObjectsByProject", ctx, projectId) - ret0, _ := ret[0].(chan drs.DRSObjectResult) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListObjectsByProject indicates an expected call of ListObjectsByProject. -func (mr *MockIndexdInterfaceMockRecorder) ListObjectsByProject(ctx, projectId any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListObjectsByProject", reflect.TypeOf((*MockIndexdInterface)(nil).ListObjectsByProject), ctx, projectId) -} - -// New mocks base method. -func (m *MockIndexdInterface) New(method, url string) *request.RequestBuilder { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "New", method, url) - ret0, _ := ret[0].(*request.RequestBuilder) - return ret0 -} - -// New indicates an expected call of New. -func (mr *MockIndexdInterfaceMockRecorder) New(method, url any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "New", reflect.TypeOf((*MockIndexdInterface)(nil).New), method, url) -} - -// RegisterIndexdRecord mocks base method. -func (m *MockIndexdInterface) RegisterIndexdRecord(ctx context.Context, indexdObj *indexd.IndexdRecord) (*drs.DRSObject, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "RegisterIndexdRecord", ctx, indexdObj) - ret0, _ := ret[0].(*drs.DRSObject) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// RegisterIndexdRecord indicates an expected call of RegisterIndexdRecord. -func (mr *MockIndexdInterfaceMockRecorder) RegisterIndexdRecord(ctx, indexdObj any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RegisterIndexdRecord", reflect.TypeOf((*MockIndexdInterface)(nil).RegisterIndexdRecord), ctx, indexdObj) -} - -// RegisterRecord mocks base method. -func (m *MockIndexdInterface) RegisterRecord(ctx context.Context, record *drs.DRSObject) (*drs.DRSObject, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "RegisterRecord", ctx, record) - ret0, _ := ret[0].(*drs.DRSObject) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// RegisterRecord indicates an expected call of RegisterRecord. -func (mr *MockIndexdInterfaceMockRecorder) RegisterRecord(ctx, record any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RegisterRecord", reflect.TypeOf((*MockIndexdInterface)(nil).RegisterRecord), ctx, record) -} - -// UpdateRecord mocks base method. -func (m *MockIndexdInterface) UpdateRecord(ctx context.Context, updateInfo *drs.DRSObject, did string) (*drs.DRSObject, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateRecord", ctx, updateInfo, did) - ret0, _ := ret[0].(*drs.DRSObject) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// UpdateRecord indicates an expected call of UpdateRecord. -func (mr *MockIndexdInterfaceMockRecorder) UpdateRecord(ctx, updateInfo, did any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateRecord", reflect.TypeOf((*MockIndexdInterface)(nil).UpdateRecord), ctx, updateInfo, did) -} diff --git a/mocks/mock_request.go b/mocks/mock_request.go index 8ccd2a0..0d2be55 100644 --- a/mocks/mock_request.go +++ b/mocks/mock_request.go @@ -3,7 +3,7 @@ // // Generated by this command: // -// mockgen -destination=../mocks/mock_request.go -package=mocks github.com/calypr/data-client/request RequestInterface +// mockgen -destination=./mocks/mock_request.go -package=mocks github.com/calypr/data-client/request RequestInterface // // Package mocks is a generated GoMock package. @@ -14,7 +14,7 @@ import ( http "net/http" reflect "reflect" - request "github.com/calypr/data-client/request" + request "github.com/calypr/syfon/client/pkg/request" gomock "go.uber.org/mock/gomock" ) diff --git a/request/auth.go b/request/auth.go index cc93723..1379a55 100644 --- a/request/auth.go +++ b/request/auth.go @@ -76,7 +76,9 @@ func (t *AuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { t.mu.RUnlock() // Just add the header and pass it down - req.Header.Set("Authorization", "Bearer "+token) + if token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } return t.Base.RoundTrip(req) } diff --git a/request/builder.go b/request/builder.go index e12e923..59b9192 100644 --- a/request/builder.go +++ b/request/builder.go @@ -58,3 +58,8 @@ func (ar *RequestBuilder) WithSkipAuth(skip bool) *RequestBuilder { ar.SkipAuth = skip return ar } + +func (ar *RequestBuilder) WithPartSize(size int64) *RequestBuilder { + ar.PartSize = size + return ar +} diff --git a/request/request.go b/request/request.go index 82711ba..102f3f0 100644 --- a/request/request.go +++ b/request/request.go @@ -1,6 +1,6 @@ package request -//go:generate mockgen -destination=../mocks/mock_request.go -package=mocks github.com/calypr/data-client/request RequestInterface +//go:generate mockgen -destination=../mocks/mock_request.go -package=mocks github.com/calypr/syfon/client/pkg/request RequestInterface import ( "context" @@ -41,8 +41,8 @@ func NewRequestInterface( }).DialContext, MaxIdleConns: 100, MaxIdleConnsPerHost: 100, - TLSHandshakeTimeout: 5 * time.Second, - ResponseHeaderTimeout: 10 * time.Second, + TLSHandshakeTimeout: 30 * time.Second, + ResponseHeaderTimeout: 60 * time.Second, } authTransport := &AuthTransport{ diff --git a/s3utils/s3_utils.go b/s3utils/s3_utils.go deleted file mode 100644 index 9e47805..0000000 --- a/s3utils/s3_utils.go +++ /dev/null @@ -1,129 +0,0 @@ -package s3utils - -import ( - "context" - "fmt" - "log/slog" - "strings" - "time" - - "github.com/aws/aws-sdk-go-v2/aws" - awsConfig "github.com/aws/aws-sdk-go-v2/config" - "github.com/aws/aws-sdk-go-v2/credentials" - "github.com/aws/aws-sdk-go-v2/service/s3" - "github.com/calypr/data-client/fence" -) - -// ParseS3URL parses a URL like s3://bucket/key and returns (bucket, key, error). -func ParseS3URL(s3url string) (string, string, error) { - s3Prefix := "s3://" - if !strings.HasPrefix(s3url, s3Prefix) { - return "", "", fmt.Errorf("S3 URL requires prefix 's3://': %s", s3url) - } - trimmed := strings.TrimPrefix(s3url, s3Prefix) - slashIndex := strings.Index(trimmed, "/") - if slashIndex == -1 || slashIndex == len(trimmed)-1 { - return "", "", fmt.Errorf("invalid S3 file URL: %s", s3url) - } - return trimmed[:slashIndex], trimmed[slashIndex+1:], nil -} - -// ValidateInputs checks if S3 URL and SHA256 hash are valid. -func ValidateInputs(s3URL, sha256 string) error { - if s3URL == "" { - return fmt.Errorf("S3 URL is required") - } - if sha256 == "" { - return fmt.Errorf("SHA256 hash is required") - } - if !strings.HasPrefix(s3URL, "s3://") { - return fmt.Errorf("invalid S3 URL: must start with s3://") - } - if len(sha256) != 64 { - return fmt.Errorf("invalid SHA256 hash: must be 64 characters") - } - return nil -} - -// FetchS3MetadataWithBucketDetails fetches S3 metadata (size and modified date) for a given S3 URL. -func FetchS3MetadataWithBucketDetails( - ctx context.Context, - s3URL string, - awsAccessKey string, - awsSecretKey string, - region string, - endpoint string, - bucketDetails *fence.S3Bucket, - s3Client *s3.Client, - logger *slog.Logger, -) (int64, string, error) { - bucket, key, err := ParseS3URL(s3URL) - if err != nil { - return 0, "", err - } - - if s3Client == nil { - var configOptions []func(*awsConfig.LoadOptions) error - if awsAccessKey != "" && awsSecretKey != "" { - configOptions = append(configOptions, - awsConfig.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(awsAccessKey, awsSecretKey, "")), - ) - } - - regionToUse := "" - if region != "" { - regionToUse = region - } else if bucketDetails != nil && bucketDetails.Region != "" { - regionToUse = bucketDetails.Region - } - if regionToUse != "" { - configOptions = append(configOptions, awsConfig.WithRegion(regionToUse)) - } - - cfg, err := awsConfig.LoadDefaultConfig(ctx, configOptions...) - if err != nil { - return 0, "", fmt.Errorf("unable to load AWS SDK config: %w", err) - } - - endpointToUse := "" - if endpoint != "" { - endpointToUse = endpoint - } else if bucketDetails != nil && bucketDetails.EndpointURL != "" { - endpointToUse = bucketDetails.EndpointURL - } - - s3Client = s3.NewFromConfig(cfg, func(o *s3.Options) { - if endpointToUse != "" { - o.BaseEndpoint = aws.String(endpointToUse) - } - o.UsePathStyle = true - }) - } - - input := &s3.HeadObjectInput{ - Bucket: &bucket, - Key: aws.String(key), - } - - resp, err := s3Client.HeadObject(ctx, input) - if err != nil { - return 0, "", fmt.Errorf("failed to head object: %w", err) - } - - var contentLength int64 - if resp.ContentLength != nil { - contentLength = *resp.ContentLength - } - - var lastModified string - if resp.LastModified != nil { - lastModified = resp.LastModified.Format(time.RFC3339) - } - - return contentLength, lastModified, nil -} - -type S3Meta struct { - Size int64 - LastModified string -} diff --git a/tests/download-multiple_test.go b/tests/download-multiple_test.go index 84169b7..ccfc3e5 100644 --- a/tests/download-multiple_test.go +++ b/tests/download-multiple_test.go @@ -3,17 +3,12 @@ package tests import ( "context" "fmt" - "io" - "net/http" - "strings" "testing" - "github.com/calypr/data-client/conf" - "github.com/calypr/data-client/download" - "github.com/calypr/data-client/fence" - "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/mocks" - req "github.com/calypr/data-client/request" + "github.com/calypr/syfon/client/drs" + "github.com/calypr/syfon/client/mocks" + sylogs "github.com/calypr/syfon/client/pkg/logs" + "github.com/calypr/syfon/client/xfer/download" "go.uber.org/mock/gomock" ) @@ -25,46 +20,16 @@ func Test_askGen3ForFileInfo_withShepherd(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() - mockGen3 := mocks.NewMockGen3Interface(mockCtrl) - mockFence := mocks.NewMockFenceInterface(mockCtrl) - - // Expect credential access - mockGen3.EXPECT().GetCredential().Return(&conf.Credential{}).AnyTimes() - mockGen3.EXPECT().Fence().Return(mockFence).AnyTimes() - - // Shepherd is available - mockFence.EXPECT(). - CheckForShepherdAPI(gomock.Any()). - Return(true, nil) - - // Mock successful Shepherd response - testBody := `{ - "record": { - "file_name": "test-file", - "size": 120, - "did": "000000-0000000-0000000-000000" - } - }` - resp := &http.Response{ - StatusCode: 200, - Body: io.NopCloser(strings.NewReader(testBody)), - } + mockIndexd := mocks.NewMockDrsClient(mockCtrl) - // Expect request to Shepherd - mockFence.EXPECT(). - Do(gomock.Any(), gomock.Any()). - DoAndReturn(func(ctx any, rb *req.RequestBuilder) (*http.Response, error) { - if !strings.HasSuffix(rb.Url, "/objects/"+testGUID) { - t.Errorf("Expected request to Shepherd objects endpoint, got %s", rb.Url) - } - return resp, nil - }) + mockIndexd.EXPECT(). + GetObject(gomock.Any(), testGUID). + Return(&drs.DRSObject{Id: testGUID, Name: testFileName, Size: testFileSize}, nil) - // Optional: logger - mockGen3.EXPECT().Logger().Return(logs.NewGen3Logger(nil, "", "test")).AnyTimes() + logger := sylogs.NewGen3Logger(nil, "", "test") skipped := []download.RenamedOrSkippedFileInfo{} - info, err := download.AskGen3ForFileInfo(context.Background(), mockGen3, testGUID, "", "", "original", true, &skipped) + info, err := download.GetFileInfo(context.Background(), mockIndexd, logger, testGUID, "", "", "original", true, &skipped) if err != nil { t.Error(err) } @@ -86,39 +51,17 @@ func Test_askGen3ForFileInfo_withShepherd_shepherdError(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() - mockGen3 := mocks.NewMockGen3Interface(mockCtrl) - mockFence := mocks.NewMockFenceInterface(mockCtrl) - - dummyCred := &conf.Credential{} - mockGen3.EXPECT().GetCredential().Return(dummyCred).AnyTimes() - mockGen3.EXPECT().Fence().Return(mockFence).AnyTimes() - - // 1. Shepherd is available - mockFence.EXPECT(). - CheckForShepherdAPI(gomock.Any()). - Return(true, nil). - Times(1) - - // 2. Shepherd request fails → triggers fallback to Indexd - mockFence.EXPECT(). - Do(gomock.Any(), gomock.Any()). - Return(nil, fmt.Errorf("Shepherd error")). - Times(1) // only the Shepherd call + mockIndexd := mocks.NewMockDrsClient(mockCtrl) - // 3. Fallback: Indexd request also fails - mockFence.EXPECT(). - Do(gomock.Any(), gomock.Any()). + mockIndexd.EXPECT(). + GetObject(gomock.Any(), testGUID). Return(nil, fmt.Errorf("Indexd error")). - Times(1) + Times(2) - // Logger - mockGen3.EXPECT(). - Logger(). - Return(logs.NewGen3Logger(nil, "", "test")). - AnyTimes() + logger := sylogs.NewGen3Logger(nil, "", "test") skipped := []download.RenamedOrSkippedFileInfo{} - info, err := download.AskGen3ForFileInfo(context.Background(), mockGen3, testGUID, "", "", "original", true, &skipped) + info, err := download.GetFileInfo(context.Background(), mockIndexd, logger, testGUID, "", "", "original", true, &skipped) if err != nil { t.Fatal(err) } @@ -146,29 +89,16 @@ func Test_askGen3ForFileInfo_noShepherd(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() - mockGen3 := mocks.NewMockGen3Interface(mockCtrl) - mockFence := mocks.NewMockFenceInterface(mockCtrl) - - mockGen3.EXPECT().GetCredential().Return(&conf.Credential{}).AnyTimes() - mockGen3.EXPECT().Fence().Return(mockFence).AnyTimes() - - // No Shepherd - mockFence.EXPECT().CheckForShepherdAPI(gomock.Any()).Return(false, nil) - - // Indexd returns parsed FenceResponse - mockFence.EXPECT(). - ParseFenceURLResponse(gomock.Any()). - Return(fence.FenceResponse{FileName: testFileName, Size: testFileSize}, nil) + mockIndexd := mocks.NewMockDrsClient(mockCtrl) - // Do called for indexd - mockFence.EXPECT(). - Do(gomock.Any(), gomock.Any()). - Return(&http.Response{StatusCode: 200, Body: io.NopCloser(strings.NewReader("{}"))}, nil) + mockIndexd.EXPECT(). + GetObject(gomock.Any(), testGUID). + Return(&drs.DRSObject{Id: testGUID, Name: testFileName, Size: testFileSize}, nil) - mockGen3.EXPECT().Logger().Return(logs.NewGen3Logger(nil, "", "test")).AnyTimes() + logger := sylogs.NewGen3Logger(nil, "", "test") skipped := []download.RenamedOrSkippedFileInfo{} - info, err := download.AskGen3ForFileInfo(context.Background(), mockGen3, testGUID, "", "", "original", true, &skipped) + info, err := download.GetFileInfo(context.Background(), mockIndexd, logger, testGUID, "", "", "original", true, &skipped) if err != nil { t.Fatal(err) } diff --git a/tests/utils_test.go b/tests/utils_test.go index fa330d6..289a74a 100644 --- a/tests/utils_test.go +++ b/tests/utils_test.go @@ -2,110 +2,85 @@ package tests import ( "context" - "fmt" "io" "net/http" "strings" "testing" "github.com/calypr/data-client/common" - "github.com/calypr/data-client/conf" - "github.com/calypr/data-client/download" - "github.com/calypr/data-client/fence" - "github.com/calypr/data-client/mocks" - "github.com/calypr/data-client/request" - "github.com/calypr/data-client/upload" - "go.uber.org/mock/gomock" + sylogs "github.com/calypr/syfon/client/pkg/logs" + "github.com/calypr/syfon/client/xfer/download" + "github.com/calypr/syfon/client/xfer/upload" ) -func TestGetDownloadResponse_withShepherd(t *testing.T) { - testGUID := "000000-0000000-0000000-000000" - testFilename := "test-file" - mockDownloadURL := "https://example.com/example.pfb" - - mockCtrl := gomock.NewController(t) - defer mockCtrl.Finish() - - mockGen3 := mocks.NewMockGen3Interface(mockCtrl) - mockFence := mocks.NewMockFenceInterface(mockCtrl) - - // Mock credential - mockGen3.EXPECT().GetCredential().Return(&conf.Credential{}).AnyTimes() - mockGen3.EXPECT().Fence().Return(mockFence).AnyTimes() - - mockFence.EXPECT(). - GetDownloadPresignedUrl(gomock.Any(), testGUID, ""). - Return(mockDownloadURL, nil) - - mockFence.EXPECT(). - New(http.MethodGet, mockDownloadURL). - Return(&request.RequestBuilder{ - Method: http.MethodGet, - Url: mockDownloadURL, - Headers: make(map[string]string), - }). - AnyTimes() +type fakeDownloader struct { + resolveFn func(ctx context.Context, guid, accessID string) (string, error) + downloadFn func(ctx context.Context, fdr *common.FileDownloadResponseObject) (*http.Response, error) +} - // Mock successful response from the presigned URL - mockResp := &http.Response{ - StatusCode: 200, - Body: io.NopCloser(strings.NewReader("content")), - } - mockFence.EXPECT(). - Do(gomock.Any(), gomock.Any()). - Return(mockResp, nil) +func (f *fakeDownloader) Name() string { return "fake-downloader" } +func (f *fakeDownloader) Logger() *sylogs.Gen3Logger { return sylogs.NewGen3Logger(nil, "", "test") } +func (f *fakeDownloader) ResolveDownloadURL(ctx context.Context, guid, accessID string) (string, error) { + return f.resolveFn(ctx, guid, accessID) +} +func (f *fakeDownloader) Download(ctx context.Context, fdr *common.FileDownloadResponseObject) (*http.Response, error) { + return f.downloadFn(ctx, fdr) +} - mockFDRObj := common.FileDownloadResponseObject{ - Filename: testFilename, - GUID: testGUID, - Range: 0, - } +type fakeUploader struct { + resolveFn func(ctx context.Context, guid, filename string, metadata common.FileMetadata, bucket string) (string, error) +} - err := download.GetDownloadResponse(context.Background(), mockGen3, &mockFDRObj, "") - if err != nil { - t.Fatalf("Unexpected error: %v", err) - } +func (f *fakeUploader) Name() string { return "fake-uploader" } +func (f *fakeUploader) Logger() *sylogs.Gen3Logger { return sylogs.NewGen3Logger(nil, "", "test") } - if mockFDRObj.PresignedURL != mockDownloadURL { - t.Errorf("Wanted URL %s, got %s", mockDownloadURL, mockFDRObj.PresignedURL) - } +func (f *fakeUploader) ResolveUploadURL(ctx context.Context, guid, filename string, metadata common.FileMetadata, bucket string) (string, error) { + return f.resolveFn(ctx, guid, filename, metadata, bucket) +} +func (f *fakeUploader) ResolveUploadURLs(ctx context.Context, requests []common.UploadURLResolveRequest) ([]common.UploadURLResolveResponse, error) { + return nil, nil +} +func (f *fakeUploader) InitMultipartUpload(ctx context.Context, guid string, filename string, bucket string) (*common.MultipartUploadInit, error) { + return nil, nil +} +func (f *fakeUploader) GetMultipartUploadURL(ctx context.Context, key string, uploadID string, partNumber int32, bucket string) (string, error) { + return "", nil +} +func (f *fakeUploader) CompleteMultipartUpload(ctx context.Context, key string, uploadID string, parts []common.MultipartUploadPart, bucket string) error { + return nil +} +func (f *fakeUploader) Upload(ctx context.Context, url string, body io.Reader, size int64) error { + return nil +} +func (f *fakeUploader) UploadPart(ctx context.Context, url string, body io.Reader, size int64) (string, error) { + return "", nil +} +func (f *fakeUploader) DeleteFile(ctx context.Context, guid string) (string, error) { + return "", nil } -func TestGetDownloadResponse_noShepherd(t *testing.T) { +func TestGetDownloadResponse(t *testing.T) { testGUID := "000000-0000000-0000000-000000" testFilename := "test-file" mockDownloadURL := "https://example.com/example.pfb" - mockCtrl := gomock.NewController(t) - defer mockCtrl.Finish() - - mockGen3 := mocks.NewMockGen3Interface(mockCtrl) - mockFence := mocks.NewMockFenceInterface(mockCtrl) - - mockGen3.EXPECT().GetCredential().Return(&conf.Credential{}).AnyTimes() - mockGen3.EXPECT().Fence().Return(mockFence).AnyTimes() - - mockFence.EXPECT(). - GetDownloadPresignedUrl(gomock.Any(), testGUID, ""). - Return(mockDownloadURL, nil) - - mockFence.EXPECT(). - New(http.MethodGet, mockDownloadURL). - Return(&request.RequestBuilder{ - Method: http.MethodGet, - Url: mockDownloadURL, - Headers: make(map[string]string), - }). - AnyTimes() - - // Mock successful response - mockResp := &http.Response{ - StatusCode: 200, - Body: io.NopCloser(strings.NewReader("content")), + bk := &fakeDownloader{ + resolveFn: func(ctx context.Context, guid, accessID string) (string, error) { + if guid != testGUID { + t.Fatalf("unexpected guid: %s", guid) + } + return mockDownloadURL, nil + }, + downloadFn: func(ctx context.Context, fdr *common.FileDownloadResponseObject) (*http.Response, error) { + if fdr.PresignedURL != mockDownloadURL { + t.Fatalf("expected URL %s, got %s", mockDownloadURL, fdr.PresignedURL) + } + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(strings.NewReader("content")), + }, nil + }, } - mockFence.EXPECT(). - Do(gomock.Any(), gomock.Any()). - Return(mockResp, nil) mockFDRObj := common.FileDownloadResponseObject{ Filename: testFilename, @@ -113,103 +88,40 @@ func TestGetDownloadResponse_noShepherd(t *testing.T) { Range: 0, } - err := download.GetDownloadResponse(context.Background(), mockGen3, &mockFDRObj, "") + err := download.GetDownloadResponse(context.Background(), bk, &mockFDRObj, "") if err != nil { - t.Fatalf("Unexpected error: %v", err) + t.Fatalf("unexpected error: %v", err) } - if mockFDRObj.PresignedURL != mockDownloadURL { - t.Errorf("Wanted URL %s, got %s", mockDownloadURL, mockFDRObj.PresignedURL) - } -} - -func TestGeneratePresignedUploadURL_noShepherd(t *testing.T) { - testFilename := "test-file" - testBucketname := "test-bucket" - mockPresignedURL := "https://example.com/example.pfb" - mockGUID := "000000-0000000-0000000-000000" - - mockCtrl := gomock.NewController(t) - defer mockCtrl.Finish() - - mockGen3 := mocks.NewMockGen3Interface(mockCtrl) - mockFence := mocks.NewMockFenceInterface(mockCtrl) - - mockGen3.EXPECT().GetCredential().Return(&conf.Credential{}).AnyTimes() - mockGen3.EXPECT().Fence().Return(mockFence).AnyTimes() - - // No Shepherd - mockFence.EXPECT(). - CheckForShepherdAPI(gomock.Any()). - Return(false, nil) - - mockFence.EXPECT(). - InitUpload(gomock.Any(), testFilename, testBucketname, ""). - Return(fence.FenceResponse{ - URL: mockPresignedURL, - GUID: mockGUID, - }, nil) - - resp, err := upload.GeneratePresignedUploadURL(context.Background(), mockGen3, testFilename, common.FileMetadata{}, testBucketname) - if err != nil { - t.Fatalf("Unexpected error: %v", err) - } - - if resp.URL != mockPresignedURL { - t.Errorf("Wanted URL %s, got %s", mockPresignedURL, resp.URL) - } - if resp.GUID != mockGUID { - t.Errorf("Wanted GUID %s, got %s", mockGUID, resp.GUID) + t.Errorf("wanted URL %s, got %s", mockDownloadURL, mockFDRObj.PresignedURL) } } -func TestGeneratePresignedUploadURL_withShepherd(t *testing.T) { +func TestGeneratePresignedUploadURL(t *testing.T) { testFilename := "test-file" - testBucketname := "test-bucket" - mockPresignedURL := "https://example.com/example.pfb" - mockGUID := "000000-0000000-0000000-000000" - - testMetadata := common.FileMetadata{ - Aliases: []string{"test-alias-1", "test-alias-2"}, - Authz: []string{"authz-resource-1", "authz-resource-2"}, - Metadata: map[string]any{"arbitrary": "metadata"}, - } - - mockCtrl := gomock.NewController(t) - defer mockCtrl.Finish() - - mockGen3 := mocks.NewMockGen3Interface(mockCtrl) - mockFence := mocks.NewMockFenceInterface(mockCtrl) - - mockGen3.EXPECT().GetCredential().Return(&conf.Credential{AccessToken: "token"}).AnyTimes() - mockGen3.EXPECT().Fence().Return(mockFence).AnyTimes() - - // Shepherd is deployed - mockFence.EXPECT(). - CheckForShepherdAPI(gomock.Any()). - Return(true, nil) - - // Shepherd returns GUID and upload_url - shepherdResp := &http.Response{ - StatusCode: 201, - Body: io.NopCloser(strings.NewReader(fmt.Sprintf( - `{"guid": "%s", "upload_url": "%s"}`, mockGUID, mockPresignedURL, - ))), - } - - mockFence.EXPECT(). - Do(gomock.Any(), gomock.Any()). - Return(shepherdResp, nil) - - respObj, err := upload.GeneratePresignedUploadURL(context.Background(), mockGen3, testFilename, testMetadata, testBucketname) + testBucket := "test-bucket" + mockUploadURL := "https://example.com/upload" + + bk := &fakeUploader{ + resolveFn: func(ctx context.Context, guid, filename string, metadata common.FileMetadata, bucket string) (string, error) { + if filename != testFilename { + t.Fatalf("unexpected filename: %s", filename) + } + if bucket != testBucket { + t.Fatalf("unexpected bucket: %s", bucket) + } + return mockUploadURL, nil + }, + } + + resp, err := upload.GeneratePresignedUploadURL(context.Background(), bk, testFilename, common.FileMetadata{}, testBucket) if err != nil { - t.Fatalf("Unexpected error: %v", err) + t.Fatalf("unexpected error: %v", err) } - - if respObj.URL != mockPresignedURL { - t.Errorf("Wanted URL %s, got %s", mockPresignedURL, respObj.URL) + if resp.URL != mockUploadURL { + t.Errorf("wanted URL %s, got %s", mockUploadURL, resp.URL) } - if respObj.GUID != mockGUID { - t.Errorf("Wanted GUID %s, got %s", mockGUID, respObj.GUID) + if resp.GUID != "" { + t.Errorf("wanted empty GUID, got %s", resp.GUID) } } diff --git a/upload/batch.go b/upload/batch.go deleted file mode 100644 index 41aea65..0000000 --- a/upload/batch.go +++ /dev/null @@ -1,161 +0,0 @@ -package upload - -import ( - "context" - "fmt" - "io" - "net/http" - "os" - "sync" - - "github.com/calypr/data-client/common" - client "github.com/calypr/data-client/g3client" - "github.com/calypr/data-client/request" - "github.com/vbauerster/mpb/v8" - "github.com/vbauerster/mpb/v8/decor" -) - -func InitBatchUploadChannels(numParallel int, inputSliceLen int) (int, chan *http.Response, chan error, []common.FileUploadRequestObject) { - workers := numParallel - if workers < 1 || workers > inputSliceLen { - workers = inputSliceLen - } - if workers < 1 { - workers = 1 - } - - respCh := make(chan *http.Response, inputSliceLen) - errCh := make(chan error, inputSliceLen) - batchSlice := make([]common.FileUploadRequestObject, 0, workers) - - return workers, respCh, errCh, batchSlice -} - -func BatchUpload( - ctx context.Context, - g3i client.Gen3Interface, - furObjects []common.FileUploadRequestObject, - workers int, - respCh chan *http.Response, - errCh chan error, - bucketName string, -) { - if len(furObjects) == 0 { - return - } - - // Ensure bucket is set - for i := range furObjects { - if furObjects[i].Bucket == "" { - furObjects[i].Bucket = bucketName - } - } - - progress := mpb.New(mpb.WithOutput(os.Stdout)) - - workCh := make(chan common.FileUploadRequestObject, len(furObjects)) - - var wg sync.WaitGroup - for i := 0; i < workers; i++ { - wg.Add(1) - go func() { - defer wg.Done() - for fur := range workCh { - // --- Ensure presigned URL --- - if fur.PresignedURL == "" { - resp, err := GeneratePresignedUploadURL(ctx, g3i, fur.ObjectKey, fur.FileMetadata, fur.Bucket) - if err != nil { - g3i.Logger().Failed(fur.SourcePath, fur.ObjectKey, fur.FileMetadata, "", 0, false) - errCh <- err - continue - } - fur.PresignedURL = resp.URL - fur.GUID = resp.GUID - g3i.Logger().Failed(fur.SourcePath, fur.ObjectKey, fur.FileMetadata, resp.GUID, 0, false) // update log - } - - // --- Open file --- - file, err := os.Open(fur.SourcePath) - if err != nil { - g3i.Logger().Failed(fur.SourcePath, fur.ObjectKey, fur.FileMetadata, fur.GUID, 0, false) - errCh <- fmt.Errorf("file open error: %w", err) - continue - } - - fi, err := file.Stat() - if err != nil { - file.Close() - g3i.Logger().Failed(fur.SourcePath, fur.ObjectKey, fur.FileMetadata, fur.GUID, 0, false) - errCh <- fmt.Errorf("file stat error: %w", err) - continue - } - - if fi.Size() > common.FileSizeLimit { - file.Close() - g3i.Logger().Failed(fur.SourcePath, fur.ObjectKey, fur.FileMetadata, fur.GUID, 0, false) - errCh <- fmt.Errorf("file size exceeds limit: %s", fur.ObjectKey) - continue - } - - // --- Progress bar --- - bar := progress.AddBar(fi.Size(), - mpb.PrependDecorators( - decor.Name(fur.ObjectKey+" "), - decor.CountersKibiByte("% .1f / % .1f"), - ), - mpb.AppendDecorators( - decor.Percentage(), - decor.AverageSpeed(decor.SizeB1024(0), " % .1f"), - ), - ) - - proxyReader := bar.ProxyReader(file) - - // --- Upload using DoAuthenticatedRequest (no manual http.Request!) --- - resp, err := g3i.Fence().Do( - ctx, - &request.RequestBuilder{ - Method: http.MethodPut, - Url: fur.PresignedURL, - Body: proxyReader, - }, - ) - - // Cleanup - file.Close() - bar.Abort(false) - - if err != nil { - g3i.Logger().Failed(fur.SourcePath, fur.ObjectKey, fur.FileMetadata, fur.GUID, 0, false) - errCh <- err - continue - } - - if resp.StatusCode != http.StatusOK { - bodyBytes, _ := io.ReadAll(resp.Body) - resp.Body.Close() - errMsg := fmt.Errorf("upload failed with status %d: %s", resp.StatusCode, string(bodyBytes)) - g3i.Logger().Failed(fur.SourcePath, fur.ObjectKey, fur.FileMetadata, fur.GUID, 0, false) - errCh <- errMsg - continue - } - - resp.Body.Close() - - // Success - respCh <- resp - g3i.Logger().DeleteFromFailedLog(fur.SourcePath) - g3i.Logger().Succeeded(fur.SourcePath, fur.GUID) - g3i.Logger().Scoreboard().IncrementSB(0) - } - }() - } - - for _, obj := range furObjects { - workCh <- obj - } - close(workCh) - - wg.Wait() - progress.Wait() -} diff --git a/upload/multipart.go b/upload/multipart.go deleted file mode 100644 index b0e3cce..0000000 --- a/upload/multipart.go +++ /dev/null @@ -1,232 +0,0 @@ -package upload - -import ( - "context" - "errors" - "fmt" - "io" - "net/http" - "os" - "sort" - "strings" - "sync" - "sync/atomic" - - "github.com/calypr/data-client/common" - "github.com/calypr/data-client/fence" - client "github.com/calypr/data-client/g3client" - "github.com/vbauerster/mpb/v8" - "github.com/vbauerster/mpb/v8/decor" -) - -func MultipartUpload(ctx context.Context, g3 client.Gen3Interface, req common.FileUploadRequestObject, file *os.File, showProgress bool) error { - g3.Logger().InfoContext(ctx, "File Upload Request", "request", req) - - stat, err := file.Stat() - if err != nil { - return fmt.Errorf("cannot stat file: %w", err) - } - - fileSize := stat.Size() - if fileSize == 0 { - return fmt.Errorf("file is empty: %s", req.ObjectKey) - } - - var p *mpb.Progress - var bar *mpb.Bar - if showProgress { - p = mpb.New(mpb.WithOutput(os.Stdout)) - bar = p.AddBar(fileSize, - mpb.PrependDecorators( - decor.Name(req.ObjectKey+" "), - decor.CountersKibiByte("%.1f / %.1f"), - ), - mpb.AppendDecorators( - decor.Percentage(), - decor.AverageSpeed(decor.SizeB1024(0), " % .1f"), - ), - ) - } - - // 1. Initialize multipart upload - uploadID, finalGUID, err := initMultipartUpload(ctx, g3, req, req.Bucket) - if err != nil { - return fmt.Errorf("failed to initiate multipart upload: %w", err) - } - - // 2. Construct the S3 Key correctly - // Ensure finalGUID is not empty to avoid a leading slash - key := fmt.Sprintf("%s/%s", finalGUID, req.ObjectKey) - g3.Logger().InfoContext(ctx, "Initialized Upload", "id", uploadID, "key", key) - - chunkSize := OptimalChunkSize(fileSize) - - numChunks := int((fileSize + chunkSize - 1) / chunkSize) - - chunks := make(chan int, numChunks) - for i := 1; i <= numChunks; i++ { - chunks <- i - } - close(chunks) - - var ( - wg sync.WaitGroup - mu sync.Mutex - parts []fence.MultipartPart - uploadErrors []error - totalBytes int64 // Atomic counter for monotonically increasing BytesSoFar - ) - - progressCallback := common.GetProgress(ctx) - oid := common.GetOid(ctx) - if oid == "" { - oid = resolveUploadOID(req) - } - - // 3. Worker logic - worker := func() { - defer wg.Done() - - for partNum := range chunks { - - offset := int64(partNum-1) * chunkSize - size := chunkSize - if offset+size > fileSize { - size = fileSize - offset - } - - // SectionReader implements io.Reader, io.ReaderAt, and io.Seeker - // It allows each worker to read its own segment without a shared buffer. - section := io.NewSectionReader(file, offset, size) - - url, err := generateMultipartPresignedURL(ctx, g3, key, uploadID, partNum, req.Bucket) - if err != nil { - mu.Lock() - uploadErrors = append(uploadErrors, fmt.Errorf("URL generation failed part %d: %w", partNum, err)) - mu.Unlock() - return - } - - // Perform the upload using the section directly - etag, err := uploadPart(ctx, url, section, size) - if err != nil { - mu.Lock() - uploadErrors = append(uploadErrors, fmt.Errorf("upload failed part %d: %w", partNum, err)) - mu.Unlock() - return - } - - mu.Lock() - parts = append(parts, fence.MultipartPart{ - PartNumber: partNum, - ETag: etag, - }) - if bar != nil { - bar.IncrInt64(size) - } - if progressCallback != nil { - currentTotal := atomic.AddInt64(&totalBytes, size) - _ = progressCallback(common.ProgressEvent{ - Event: "progress", - Oid: oid, - BytesSinceLast: size, - BytesSoFar: currentTotal, - }) - } - mu.Unlock() - } - } - - // Launch workers - for range common.MaxConcurrentUploads { - wg.Add(1) - go worker() - } - wg.Wait() - - if p != nil { - p.Wait() - } - - if len(uploadErrors) > 0 { - return fmt.Errorf("multipart upload failed with %d errors: %v", len(uploadErrors), uploadErrors) - } - - // 5. Finalize the upload - sort.Slice(parts, func(i, j int) bool { - return parts[i].PartNumber < parts[j].PartNumber - }) - - if err := CompleteMultipartUpload(ctx, g3, key, uploadID, parts, req.Bucket); err != nil { - return fmt.Errorf("failed to complete multipart upload: %w", err) - } - - g3.Logger().InfoContext(ctx, "Successfully uploaded", "file", req.ObjectKey, "key", key) - g3.Logger().SucceededContext(ctx, req.SourcePath, req.GUID) - return nil -} - -func initMultipartUpload(ctx context.Context, g3 client.Gen3Interface, furObject common.FileUploadRequestObject, bucketName string) (string, string, error) { - msg, err := g3.Fence().InitMultipartUpload(ctx, furObject.ObjectKey, bucketName, furObject.GUID) - - if err != nil { - if strings.Contains(err.Error(), "404") { - return "", "", errors.New(err.Error() + "\nPlease check to ensure FENCE version is at 2.8.0 or beyond") - } - return "", "", errors.New("Error has occurred during multipart upload initialization, detailed error message: " + err.Error()) - } - - if msg.UploadID == "" || msg.GUID == "" { - return "", "", errors.New("unknown error has occurred during multipart upload initialization. Please check logs from Gen3 services") - } - return msg.UploadID, msg.GUID, nil -} - -func generateMultipartPresignedURL(ctx context.Context, g3 client.Gen3Interface, key string, uploadID string, partNumber int, bucketName string) (string, error) { - url, err := g3.Fence().GenerateMultipartPresignedURL(ctx, key, uploadID, partNumber, bucketName) - if err != nil { - return "", errors.New("Error has occurred during multipart upload presigned url generation, detailed error message: " + err.Error()) - } - - if url == "" { - return "", errors.New("unknown error has occurred during multipart upload presigned url generation. Please check logs from Gen3 services") - } - return url, nil -} - -func CompleteMultipartUpload(ctx context.Context, g3 client.Gen3Interface, key string, uploadID string, parts []fence.MultipartPart, bucketName string) error { - err := g3.Fence().CompleteMultipartUpload(ctx, key, uploadID, parts, bucketName) - if err != nil { - return errors.New("Error has occurred during completing multipart upload, detailed error message: " + err.Error()) - } - return nil -} - -// uploadPart now returns the ETag and error directly. -// It accepts a Context to allow for cancellation (e.g., if another part fails). -func uploadPart(ctx context.Context, url string, data io.Reader, partSize int64) (string, error) { - req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, data) - if err != nil { - return "", err - } - - req.ContentLength = partSize - - resp, err := http.DefaultClient.Do(req) - if err != nil { - return "", err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(io.LimitReader(resp.Body, 1024)) - return "", fmt.Errorf("upload failed (%d): %s", resp.StatusCode, body) - } - - etag := resp.Header.Get("ETag") - if etag == "" { - return "", errors.New("no ETag returned") - } - - return strings.Trim(etag, `"`), nil -} diff --git a/upload/multipart_test.go b/upload/multipart_test.go deleted file mode 100644 index b7ded8d..0000000 --- a/upload/multipart_test.go +++ /dev/null @@ -1,188 +0,0 @@ -package upload - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/http/httptest" - "net/url" - "os" - "strings" - "sync" - "testing" - - "github.com/calypr/data-client/common" - "github.com/calypr/data-client/conf" - "github.com/calypr/data-client/fence" - "github.com/calypr/data-client/indexd" - "github.com/calypr/data-client/logs" - "github.com/calypr/data-client/request" - "github.com/calypr/data-client/requestor" - "github.com/calypr/data-client/sower" -) - -type fakeGen3Upload struct { - cred *conf.Credential - logger *logs.Gen3Logger - doFunc func(context.Context, *request.RequestBuilder) (*http.Response, error) -} - -func (f *fakeGen3Upload) GetCredential() *conf.Credential { return f.cred } -func (f *fakeGen3Upload) Logger() *logs.Gen3Logger { return f.logger } -func (f *fakeGen3Upload) ExportCredential(ctx context.Context, cred *conf.Credential) error { - return nil -} -func (f *fakeGen3Upload) Fence() fence.FenceInterface { return &fakeFence{doFunc: f.doFunc} } -func (f *fakeGen3Upload) Indexd() indexd.IndexdInterface { return &fakeIndexd{doFunc: f.doFunc} } -func (f *fakeGen3Upload) Sower() sower.SowerInterface { return nil } -func (f *fakeGen3Upload) Requestor() requestor.RequestorInterface { return nil } - -type fakeFence struct { - fence.FenceInterface - doFunc func(context.Context, *request.RequestBuilder) (*http.Response, error) -} - -func (f *fakeFence) Do(ctx context.Context, req *request.RequestBuilder) (*http.Response, error) { - return f.doFunc(ctx, req) -} -func (f *fakeFence) InitMultipartUpload(ctx context.Context, filename string, bucket string, guid string) (fence.FenceResponse, error) { - resp, err := f.Do(ctx, &request.RequestBuilder{Url: common.FenceDataMultipartInitEndpoint}) - if err != nil { - return fence.FenceResponse{}, err - } - return f.ParseFenceURLResponse(resp) -} -func (f *fakeFence) GenerateMultipartPresignedURL(ctx context.Context, key string, uploadID string, partNumber int, bucket string) (string, error) { - resp, err := f.Do(ctx, &request.RequestBuilder{Url: common.FenceDataMultipartUploadEndpoint}) - if err != nil { - return "", err - } - msg, err := f.ParseFenceURLResponse(resp) - return msg.PresignedURL, err -} -func (f *fakeFence) CompleteMultipartUpload(ctx context.Context, key string, uploadID string, parts []fence.MultipartPart, bucket string) error { - _, err := f.Do(ctx, &request.RequestBuilder{Url: common.FenceDataMultipartCompleteEndpoint}) - return err -} -func (f *fakeFence) ParseFenceURLResponse(resp *http.Response) (fence.FenceResponse, error) { - var msg fence.FenceResponse - if resp != nil && resp.Body != nil { - json.NewDecoder(resp.Body).Decode(&msg) - } - return msg, nil -} - -type fakeIndexd struct { - indexd.IndexdInterface - doFunc func(context.Context, *request.RequestBuilder) (*http.Response, error) -} - -func (f *fakeIndexd) Do(ctx context.Context, req *request.RequestBuilder) (*http.Response, error) { - return f.doFunc(ctx, req) -} - -func TestMultipartUploadProgressIntegration(t *testing.T) { - ctx := context.Background() - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodPut { - w.WriteHeader(http.StatusMethodNotAllowed) - return - } - _, _ = io.Copy(io.Discard, r.Body) - _ = r.Body.Close() - w.Header().Set("ETag", "etag-123") - w.WriteHeader(http.StatusOK) - })) - defer server.Close() - - file, err := os.CreateTemp(t.TempDir(), "multipart-*.bin") - if err != nil { - t.Fatalf("create temp file: %v", err) - } - defer file.Close() - - fileSize := int64(101 * common.MB) - if err := file.Truncate(fileSize); err != nil { - t.Fatalf("truncate file: %v", err) - } - if _, err := file.Seek(0, io.SeekStart); err != nil { - t.Fatalf("seek file: %v", err) - } - - var ( - events []common.ProgressEvent - mu sync.Mutex - ) - progress := func(event common.ProgressEvent) error { - mu.Lock() - defer mu.Unlock() - events = append(events, event) - return nil - } - - logger := logs.NewGen3Logger(nil, "", "") - fake := &fakeGen3Upload{ - cred: &conf.Credential{ - APIEndpoint: "https://example.com", - AccessToken: "token", - }, - logger: logger, - doFunc: func(_ context.Context, req *request.RequestBuilder) (*http.Response, error) { - switch { - case strings.Contains(req.Url, common.FenceDataMultipartInitEndpoint): - return newJSONResponse(req.Url, `{"uploadId":"upload-123","guid":"guid-123"}`), nil - case strings.Contains(req.Url, common.FenceDataMultipartUploadEndpoint): - return newJSONResponse(req.Url, fmt.Sprintf(`{"presigned_url":"%s"}`, server.URL)), nil - case strings.Contains(req.Url, common.FenceDataMultipartCompleteEndpoint): - return newJSONResponse(req.Url, `{}`), nil - default: - return nil, fmt.Errorf("unexpected request url: %s", req.Url) - } - }, - } - - requestObject := common.FileUploadRequestObject{ - SourcePath: file.Name(), - ObjectKey: "multipart.bin", - GUID: "guid-123", - Bucket: "bucket", - } - - ctx = common.WithProgress(ctx, progress) - ctx = common.WithOid(ctx, "guid-123") - - if err := MultipartUpload(ctx, fake, requestObject, file, false); err != nil { - t.Fatalf("multipart upload failed: %v", err) - } - - mu.Lock() - defer mu.Unlock() - if len(events) == 0 { - t.Fatal("expected progress events") - } - for i := 1; i < len(events); i++ { - if events[i].BytesSoFar < events[i-1].BytesSoFar { - t.Fatalf("bytesSoFar not monotonic: %d then %d", events[i-1].BytesSoFar, events[i].BytesSoFar) - } - } - last := events[len(events)-1] - if last.BytesSoFar != fileSize { - t.Fatalf("expected final bytesSoFar %d, got %d", fileSize, last.BytesSoFar) - } -} - -func newJSONResponse(rawURL, body string) *http.Response { - parsedURL, err := url.Parse(rawURL) - if err != nil { - parsedURL = &url.URL{} - } - return &http.Response{ - StatusCode: http.StatusOK, - Body: io.NopCloser(bytes.NewBufferString(body)), - Request: &http.Request{URL: parsedURL}, - Header: make(http.Header), - } -} diff --git a/upload/progress_reader.go b/upload/progress_reader.go deleted file mode 100644 index da12f7d..0000000 --- a/upload/progress_reader.go +++ /dev/null @@ -1,81 +0,0 @@ -package upload - -import ( - "fmt" - "io" - - "github.com/calypr/data-client/common" -) - -type progressReader struct { - reader io.Reader - onProgress common.ProgressCallback - hash string - total int64 - bytesSoFar int64 - bytesSinceReport int64 -} - -func newProgressReader(reader io.Reader, onProgress common.ProgressCallback, hash string, total int64) *progressReader { - return &progressReader{ - reader: reader, - onProgress: onProgress, - hash: hash, - total: total, - } -} - -func resolveUploadOID(req common.FileUploadRequestObject) string { - if req.ObjectKey != "" { - return req.ObjectKey - } - return req.GUID -} - -func (pr *progressReader) Read(p []byte) (int, error) { - n, err := pr.reader.Read(p) - if n > 0 && pr.onProgress != nil { - delta := int64(n) - pr.bytesSoFar += delta - pr.bytesSinceReport += delta - - if pr.bytesSinceReport >= common.OnProgressThreshold { - if progressErr := pr.onProgress(common.ProgressEvent{ - Event: "progress", - Oid: pr.hash, - BytesSoFar: pr.bytesSoFar, - BytesSinceLast: pr.bytesSinceReport, - }); progressErr != nil { - return n, progressErr - } - pr.bytesSinceReport = 0 - } - } - return n, err -} - -func (pr *progressReader) Finalize() error { - if pr.onProgress != nil && pr.bytesSinceReport > 0 { - _ = pr.onProgress(common.ProgressEvent{ - Event: "progress", - Oid: pr.hash, - BytesSoFar: pr.bytesSoFar, - BytesSinceLast: pr.bytesSinceReport, - }) - pr.bytesSinceReport = 0 - } - if pr.total > 0 && pr.bytesSoFar < pr.total { - delta := pr.total - pr.bytesSoFar - pr.bytesSoFar = pr.total - if pr.onProgress != nil { - _ = pr.onProgress(common.ProgressEvent{ - Event: "progress", - Oid: pr.hash, - BytesSoFar: pr.bytesSoFar, - BytesSinceLast: delta, - }) - } - return fmt.Errorf("upload incomplete: %d/%d bytes", pr.bytesSoFar-delta, pr.total) - } - return nil -} diff --git a/upload/progress_reader_test.go b/upload/progress_reader_test.go deleted file mode 100644 index 789afa0..0000000 --- a/upload/progress_reader_test.go +++ /dev/null @@ -1,46 +0,0 @@ -package upload - -import ( - "bytes" - "io" - "testing" - - "github.com/calypr/data-client/common" -) - -func TestProgressReaderFinalizes(t *testing.T) { - payload := bytes.Repeat([]byte("a"), 16) - var events []common.ProgressEvent - - reader := newProgressReader(bytes.NewReader(payload), func(event common.ProgressEvent) error { - events = append(events, event) - return nil - }, "oid-123", int64(len(payload))) - - if _, err := io.Copy(io.Discard, reader); err != nil { - t.Fatalf("copy failed: %v", err) - } - if err := reader.Finalize(); err != nil { - t.Fatalf("finalize failed: %v", err) - } - - if len(events) == 0 { - t.Fatal("expected progress events, got none") - } - - var total int64 - for _, event := range events { - if event.Event != "progress" { - t.Fatalf("unexpected event type: %s", event.Event) - } - total += event.BytesSinceLast - } - - last := events[len(events)-1] - if last.BytesSoFar != int64(len(payload)) { - t.Fatalf("expected final bytesSoFar %d, got %d", len(payload), last.BytesSoFar) - } - if total != int64(len(payload)) { - t.Fatalf("expected bytesSinceLast sum %d, got %d", len(payload), total) - } -} diff --git a/upload/request.go b/upload/request.go deleted file mode 100644 index 6036fed..0000000 --- a/upload/request.go +++ /dev/null @@ -1,86 +0,0 @@ -package upload - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "net/http" - "os" - "strings" - - "github.com/calypr/data-client/common" - client "github.com/calypr/data-client/g3client" - req "github.com/calypr/data-client/request" - "github.com/vbauerster/mpb/v8" -) - -// GeneratePresignedURL handles both Shepherd and Fence fallback -func GeneratePresignedUploadURL(ctx context.Context, g3 client.Gen3Interface, filename string, metadata common.FileMetadata, bucket string) (*PresignedURLResponse, error) { - hasShepherd, err := g3.Fence().CheckForShepherdAPI(ctx) - if err != nil || !hasShepherd { - msg, err := g3.Fence().InitUpload(ctx, filename, bucket, "") - if err != nil { - return nil, err - } - return &PresignedURLResponse{URL: msg.URL, GUID: msg.GUID}, nil - } - - shepherdPayload := ShepherdInitRequestObject{ - Filename: filename, - Authz: ShepherdAuthz{ - Version: "0", ResourcePaths: metadata.Authz, - }, - Aliases: metadata.Aliases, - Metadata: metadata.Metadata, - } - - reader, err := common.ToJSONReader(shepherdPayload) - if err != nil { - return nil, err - } - - cred := g3.GetCredential() - r, err := g3.Fence().Do( - ctx, - &req.RequestBuilder{ - Url: cred.APIEndpoint + common.ShepherdEndpoint + "/objects", - Method: http.MethodPost, - Body: reader, - Token: cred.AccessToken, - }) - if err != nil || r.StatusCode != http.StatusCreated { - return nil, fmt.Errorf("shepherd upload init failed") - } - - var res PresignedURLResponse - if err := json.NewDecoder(r.Body).Decode(&res); err != nil { - return nil, err - } - return &res, nil -} - -// GenerateUploadRequest helps preparing the HTTP request for upload and the progress bar for single part upload -func generateUploadRequest(ctx context.Context, g3 client.Gen3Interface, furObject common.FileUploadRequestObject, file *os.File, progress *mpb.Progress) (common.FileUploadRequestObject, error) { - if furObject.PresignedURL == "" { - msg, err := g3.Fence().GetUploadPresignedUrl(ctx, furObject.GUID, furObject.ObjectKey, furObject.Bucket) - if err != nil && !strings.Contains(err.Error(), "No GUID found") { - return furObject, fmt.Errorf("Upload error: %w", err) - } - if msg.URL == "" { - return furObject, errors.New("Upload error: error in generating presigned URL for " + furObject.ObjectKey) - } - furObject.PresignedURL = msg.URL - } - - fi, err := file.Stat() - if err != nil { - return furObject, errors.New("File stat error for file" + furObject.ObjectKey + ", file may be missing or unreadable because of permissions.\n") - } - - if fi.Size() > common.FileSizeLimit { - return furObject, errors.New("The file size of file " + furObject.ObjectKey + " exceeds the limit allowed and cannot be uploaded. The maximum allowed file size is " + FormatSize(common.FileSizeLimit) + ".\n") - } - - return furObject, err -} diff --git a/upload/retry.go b/upload/retry.go deleted file mode 100644 index 679a93d..0000000 --- a/upload/retry.go +++ /dev/null @@ -1,171 +0,0 @@ -package upload - -import ( - "context" - "os" - "path/filepath" - "time" - - "github.com/calypr/data-client/common" - client "github.com/calypr/data-client/g3client" -) - -// GetWaitTime calculates exponential backoff with cap -func GetWaitTime(retryCount int) time.Duration { - exp := 1 << retryCount // 2^retryCount - seconds := int64(exp) - if seconds > common.MaxWaitTime { - seconds = common.MaxWaitTime - } - return time.Duration(seconds) * time.Second -} - -// RetryFailedUploads re-uploads previously failed files with exponential backoff -func RetryFailedUploads(ctx context.Context, g3 client.Gen3Interface, failedMap map[string]common.RetryObject) { - logger := g3.Logger() - if len(failedMap) == 0 { - logger.Println("No failed files to retry.") - return - } - - sb := logger.Scoreboard() - - logger.Printf("Starting retry-upload for %d failed Uploads", len(failedMap)) - retryChan := make(chan common.RetryObject, len(failedMap)) - - // Queue only non-already-succeeded files - for _, ro := range failedMap { - retryChan <- ro - } - - if len(retryChan) == 0 { - logger.Println("All previously failed files have since succeeded.") - return - } - - for ro := range retryChan { - ro.RetryCount++ - logger.Printf("#%d retry — %s\n", ro.RetryCount, ro.SourcePath) - wait := GetWaitTime(ro.RetryCount) - logger.Printf("Waiting %.0f seconds before retry...\n", wait.Seconds()) - time.Sleep(wait) - - // Clean up old record if exists - if ro.GUID != "" { - if msg, err := g3.Fence().DeleteRecord( - ctx, - ro.GUID, - ); err == nil { - logger.Println(msg) - } - } - - file, err := os.Open(ro.SourcePath) - if err != nil { - continue - } - - // Ensure filename is set - if ro.ObjectKey == "" { - absPath, _ := common.GetAbsolutePath(ro.SourcePath) - ro.ObjectKey = filepath.Base(absPath) - } - - if ro.Multipart { - // Retry multipart - req := common.FileUploadRequestObject{ - SourcePath: ro.SourcePath, - ObjectKey: ro.ObjectKey, - GUID: ro.GUID, - FileMetadata: ro.FileMetadata, - Bucket: ro.Bucket, - } - err = MultipartUpload(ctx, g3, req, file, true) - if err == nil { - logger.Succeeded(ro.SourcePath, req.GUID) - if sb != nil { - sb.IncrementSB(ro.RetryCount - 1) - } - continue - } - } else { - // Retry single-part - respObj, err := GeneratePresignedUploadURL(ctx, g3, ro.ObjectKey, ro.FileMetadata, ro.Bucket) - if err != nil { - handleRetryFailure(ctx, g3, ro, retryChan, err) - continue - } - - file, err := os.Open(ro.SourcePath) - if err != nil { - handleRetryFailure(ctx, g3, ro, retryChan, err) - continue - } - stat, _ := file.Stat() - file.Close() - - if stat.Size() > common.FileSizeLimit { - ro.Multipart = true - retryChan <- ro - continue - } - - fur := common.FileUploadRequestObject{ - SourcePath: ro.SourcePath, - ObjectKey: ro.ObjectKey, - FileMetadata: ro.FileMetadata, - GUID: respObj.GUID, - PresignedURL: respObj.URL, - } - - fur, err = generateUploadRequest(ctx, g3, fur, nil, nil) - if err != nil { - handleRetryFailure(ctx, g3, ro, retryChan, err) - continue - } - - err = UploadSingle(ctx, g3, fur, true) - if err == nil { - logger.Succeeded(ro.SourcePath, fur.GUID) - if sb != nil { - sb.IncrementSB(ro.RetryCount - 1) - } - continue - } - } - - // On failure, requeue if retries remain - handleRetryFailure(ctx, g3, ro, retryChan, err) - } -} - -// handleRetryFailure logs failure and requeues if retries remain -func handleRetryFailure(ctx context.Context, g3 client.Gen3Interface, ro common.RetryObject, retryChan chan common.RetryObject, err error) { - logger := g3.Logger() - logger.Failed(ro.SourcePath, ro.ObjectKey, ro.FileMetadata, ro.GUID, ro.RetryCount, ro.Multipart) - if err != nil { - logger.Println("Retry error:", err) - } - - if ro.RetryCount < common.MaxRetryCount { - retryChan <- ro - return - } - - // Max retries reached — final cleanup - if ro.GUID != "" { - if msg, err := g3.Fence().DeleteRecord(ctx, ro.GUID); err == nil { - logger.Println("Cleaned up failed record:", msg) - } else { - logger.Println("Cleanup failed:", err) - } - } - - if sb := logger.Scoreboard(); sb != nil { - sb.IncrementSB(common.MaxRetryCount + 1) - } - - if len(retryChan) == 0 { - close(retryChan) - } -} diff --git a/upload/singleFile.go b/upload/singleFile.go deleted file mode 100644 index 962a468..0000000 --- a/upload/singleFile.go +++ /dev/null @@ -1,96 +0,0 @@ -package upload - -import ( - "context" - "fmt" - "io" - "os" - - "github.com/calypr/data-client/common" - client "github.com/calypr/data-client/g3client" -) - -func UploadSingle(ctx context.Context, g3Client client.Gen3Interface, req common.FileUploadRequestObject, showProgress bool) error { - - // We use the provided client interface - g3i := g3Client - - g3i.Logger().InfoContext(ctx, "File Upload Request", "request", req) - - // Helper to handle * in path if it was passed, though optimally caller handles this. - // We will trust the SourcePath in the request object mostly, but for safety we can check existence. - // But commonly parsing happens before creating the object usually. - // Let's assume req.SourcePath is a single valid file path for now as per design. - - file, err := os.Open(req.SourcePath) - if err != nil { - if showProgress { - sb := g3i.Logger().Scoreboard() - if sb != nil { - sb.IncrementSB(len(sb.Counts)) - sb.PrintSB() - } - } - g3i.Logger().Failed(req.SourcePath, req.ObjectKey, common.FileMetadata{}, "", 0, false) - g3i.Logger().ErrorContext(ctx, "File open error", "file", req.SourcePath, "error", err) - return fmt.Errorf("[ERROR] when opening file path %s, an error occurred: %s\n", req.SourcePath, err.Error()) - } - defer file.Close() - - fi, err := file.Stat() - if err != nil { - return fmt.Errorf("failed to stat file: %w", err) - } - fileSize := fi.Size() - - furObject, err := generateUploadRequest(ctx, g3i, req, file, nil) - if err != nil { - if showProgress { - sb := g3i.Logger().Scoreboard() - if sb != nil { - sb.IncrementSB(len(sb.Counts)) - sb.PrintSB() - } - } - g3i.Logger().Failed(req.SourcePath, req.ObjectKey, common.FileMetadata{}, req.GUID, 0, false) - g3i.Logger().ErrorContext(ctx, "Error occurred during request generation", "file", req.SourcePath, "error", err) - return fmt.Errorf("[ERROR] Error occurred during request generation for file %s: %s\n", req.SourcePath, err.Error()) - } - - progressCallback := common.GetProgress(ctx) - oid := common.GetOid(ctx) - if oid == "" { - oid = resolveUploadOID(furObject) - } - - var reader io.Reader = file - var progressTracker *progressReader - if progressCallback != nil { - progressTracker = newProgressReader(file, progressCallback, oid, fileSize) - reader = progressTracker - } - - _, err = uploadPart(ctx, furObject.PresignedURL, reader, fileSize) - if progressTracker != nil { - if finalizeErr := progressTracker.Finalize(); finalizeErr != nil && err == nil { - err = finalizeErr - } - } - - if err != nil { - g3i.Logger().ErrorContext(ctx, "Upload failed", "error", err) - return err - } - - g3i.Logger().InfoContext(ctx, "Successfully uploaded", "file", req.ObjectKey) - g3i.Logger().Succeeded(req.SourcePath, req.GUID) - - if showProgress { - sb := g3i.Logger().Scoreboard() - if sb != nil { - sb.IncrementSB(0) - sb.PrintSB() - } - } - return nil -} diff --git a/upload/types.go b/upload/types.go deleted file mode 100644 index 8c69ce1..0000000 --- a/upload/types.go +++ /dev/null @@ -1,46 +0,0 @@ -package upload - -import "github.com/calypr/data-client/common" - -type PresignedURLResponse struct { - GUID string `json:"guid"` - URL string `json:"upload_url"` -} - -type UploadConfig struct { - BucketName string - NumParallel int - ForceMultipart bool - IncludeSubDirName bool - HasMetadata bool - ShowProgress bool -} - -// ShepherdInitRequestObject represents the payload that sends to Shepherd for getting a singlepart upload presignedURL or init a multipart upload for new object file -type ShepherdInitRequestObject struct { - Filename string `json:"file_name"` - Authz ShepherdAuthz `json:"authz"` - Aliases []string `json:"aliases"` - // Metadata is an encoded JSON string of any arbitrary metadata the user wishes to upload. - Metadata map[string]any `json:"metadata"` -} - -type ShepherdAuthz struct { - Version string `json:"version"` - ResourcePaths []string `json:"resource_paths"` -} - -// FileInfo is a helper struct for including subdirname as filename -type FileInfo struct { - FilePath string - Filename string - FileMetadata common.FileMetadata - ObjectId string -} - -// RenamedOrSkippedFileInfo is a helper struct for recording renamed or skipped files -type RenamedOrSkippedFileInfo struct { - GUID string - OldFilename string - NewFilename string -} diff --git a/upload/upload.go b/upload/upload.go deleted file mode 100644 index aec8d66..0000000 --- a/upload/upload.go +++ /dev/null @@ -1,208 +0,0 @@ -package upload - -import ( - "context" - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/calypr/data-client/common" - drs "github.com/calypr/data-client/drs" // Imported for DRSObject - client "github.com/calypr/data-client/g3client" - "github.com/vbauerster/mpb/v8" -) - -// Upload is a unified catch-all function that automatically chooses between -// single-part and multipart upload based on file size. -func Upload(ctx context.Context, g3 client.Gen3Interface, req common.FileUploadRequestObject, showProgress bool) error { - g3.Logger().Printf("Processing Upload Request for: %s\n", req.SourcePath) - - file, err := os.Open(req.SourcePath) - if err != nil { - return fmt.Errorf("cannot open file %s: %w", req.SourcePath, err) - } - defer file.Close() - - stat, err := file.Stat() - if err != nil { - return fmt.Errorf("cannot stat file: %w", err) - } - - fileSize := stat.Size() - if fileSize == 0 { - return fmt.Errorf("file is empty: %s", req.ObjectKey) - } - - // Use Single-Part if file is smaller than 5GB (or your defined limit) - if fileSize < 5*common.GB { - g3.Logger().Printf("File size %d bytes (< 5GB), performing single-part upload\n", fileSize) - return UploadSingle(ctx, g3, req, true) - } - g3.Logger().Printf("File size %d bytes (>= 5GB), performing multipart upload\n", fileSize) - return MultipartUpload(ctx, g3, req, file, showProgress) -} - -// UploadSingleFile handles single-part upload with progress -func UploadSingleFile(ctx context.Context, g3 client.Gen3Interface, req common.FileUploadRequestObject, showProgress bool) error { - file, err := os.Open(req.SourcePath) - if err != nil { - return err - } - defer file.Close() - - fi, _ := file.Stat() - if fi.Size() > common.FileSizeLimit { - return fmt.Errorf("file exceeds 5GB limit") - } - - if fi.Size() > common.FileSizeLimit { - return fmt.Errorf("file exceeds 5GB limit") - } - - // Generate request with progress bar - var p *mpb.Progress - if showProgress { - p = mpb.New(mpb.WithOutput(os.Stdout)) - } - - // Populate PresignedURL and GUID if missing - fur, err := generateUploadRequest(ctx, g3, req, file, p) - if err != nil { - return err - } - - return MultipartUpload(ctx, g3, fur, file, showProgress) -} - -// RegisterAndUploadFile orchestrates registration with Indexd and uploading via Fence. -// It handles checking for existing records, upsert logic, checking if file is already downloadable, and performing the upload. -func RegisterAndUploadFile(ctx context.Context, g3 client.Gen3Interface, drsObject *drs.DRSObject, filePath string, bucketName string, upsert bool) (*drs.DRSObject, error) { - // 1. Register with Indexd - // Note: The caller is responsible for converting local DRS object to data-client DRS object if needed. - - res, err := g3.Indexd().RegisterRecord(ctx, drsObject) - if err != nil { - if strings.Contains(err.Error(), "already exists") { - if !upsert { - g3.Logger().Printf("indexd record already exists, proceeding for %s\n", drsObject.Id) - } else { - g3.Logger().Printf("indexd record already exists, deleting and re-adding for %s\n", drsObject.Id) - err = g3.Indexd().DeleteIndexdRecord(ctx, drsObject.Id) - if err != nil { - return nil, fmt.Errorf("failed to delete existing record: %w", err) - } - res, err = g3.Indexd().RegisterRecord(ctx, drsObject) - if err != nil { - return nil, fmt.Errorf("failed to re-register record: %w", err) - } - } - } else { - return nil, fmt.Errorf("error registering indexd record: %w", err) - } - } else { - // If registration succeeded, use the returned object which might have updated fields (e.g. created time) - // although we typically reuse the ID for upload. - } - - // If we didn't get a new object (upsert=false case), we should fetch the existing one to be sure about its state? - // But we have the ID in drsObject.Id. - - // 2. Check if file is downloadable - downloadable, err := isFileDownloadable(ctx, g3, drsObject.Id) - if err != nil { - return nil, fmt.Errorf("failed to check if file is downloadable: %w", err) - } - - if downloadable { - g3.Logger().Printf("File %s is already downloadable, skipping upload.\n", drsObject.Id) - // Return the registered object (or the one passed in if we didn't re-register) - // If we re-registered, res is populated. If not, we might want to return the fetched object? - // For consistency, let's return res if set, or fetch it. - if res != nil { - return res, nil - } - return g3.Indexd().GetObject(ctx, drsObject.Id) - } - - // 3. Upload File - uploadFilename := filepath.Base(filePath) - - // Attempt to determine the correct upload filename from the registered object's URL. - // git-drs registers s3://bucket/GUID/SHA, so we want to upload to "SHA", not "filename.ext". - if res != nil && len(res.AccessMethods) > 0 { - for _, am := range res.AccessMethods { - if am.Type == "s3" && am.AccessURL.URL != "" { - // Parse s3://bucket/guid/sha -> sha - parts := strings.Split(am.AccessURL.URL, "/") - if len(parts) > 0 { - candidate := parts[len(parts)-1] - if candidate != "" { - uploadFilename = candidate - } - } - break - } - } - } else if len(drsObject.AccessMethods) > 0 { - // Fallback to checking the input object if res didn't have methods (unlikely for upsert=false) - for _, am := range drsObject.AccessMethods { - if am.Type == "s3" && am.AccessURL.URL != "" { - parts := strings.Split(am.AccessURL.URL, "/") - if len(parts) > 0 { - candidate := parts[len(parts)-1] - if candidate != "" { - uploadFilename = candidate - } - } - break - } - } - } - - req := common.FileUploadRequestObject{ - SourcePath: filePath, - ObjectKey: uploadFilename, - GUID: drsObject.Id, - Bucket: bucketName, - } - - // Use Upload function which handles single/multipart selection - err = Upload(ctx, g3, req, false) - if err != nil { - return nil, fmt.Errorf("failed to upload file: %w", err) - } - - // Return the object - if res != nil { - return res, nil - } - return g3.Indexd().GetObject(ctx, drsObject.Id) -} - -func isFileDownloadable(ctx context.Context, g3 client.Gen3Interface, did string) (bool, error) { - // Get the object to find access methods - obj, err := g3.Indexd().GetObject(ctx, did) - if err != nil { - return false, err - } - - if len(obj.AccessMethods) == 0 { - return false, nil - } - - accessType := obj.AccessMethods[0].Type - res, err := g3.Indexd().GetDownloadURL(ctx, did, accessType) - if err != nil { - // If we can't get a download URL, it's not downloadable - return false, nil - } - - if res.URL == "" { - return false, nil - } - - // Check if the URL is accessible - err = common.CanDownloadFile(res.URL) - return err == nil, nil -} diff --git a/upload/utils.go b/upload/utils.go deleted file mode 100644 index 54cf836..0000000 --- a/upload/utils.go +++ /dev/null @@ -1,189 +0,0 @@ -package upload - -import ( - "encoding/json" - "errors" - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/calypr/data-client/common" - client "github.com/calypr/data-client/g3client" - "github.com/calypr/data-client/logs" -) - -func SeparateSingleAndMultipartUploads(g3i client.Gen3Interface, objects []common.FileUploadRequestObject) ([]common.FileUploadRequestObject, []common.FileUploadRequestObject) { - fileSizeLimit := common.FileSizeLimit - - var singlepartObjects []common.FileUploadRequestObject - var multipartObjects []common.FileUploadRequestObject - - for _, object := range objects { - fi, err := os.Stat(object.SourcePath) - if err != nil { - if os.IsNotExist(err) { - g3i.Logger().Printf("The file you specified \"%s\" does not exist locally\n", object.SourcePath) - } else { - g3i.Logger().Println("File stat error: " + err.Error()) - } - g3i.Logger().Failed(object.SourcePath, object.ObjectKey, object.FileMetadata, object.GUID, 0, false) - continue - } - if fi.IsDir() { - continue - } - if _, ok := g3i.Logger().GetSucceededLogMap()[object.SourcePath]; ok { - g3i.Logger().Println("File \"" + object.SourcePath + "\" found in history. Skipping.") - continue - } - if fi.Size() > common.MultipartFileSizeLimit { - g3i.Logger().Printf("File %s exceeds max limit\n", fi.Name()) - continue - } - if fi.Size() > int64(fileSizeLimit) { - multipartObjects = append(multipartObjects, object) - } else { - singlepartObjects = append(singlepartObjects, object) - } - } - return singlepartObjects, multipartObjects -} - -// ProcessFilename returns an FileInfo object which has the information about the path and name to be used for upload of a file -func ProcessFilename(logger *logs.Gen3Logger, uploadPath string, filePath string, objectId string, includeSubDirName bool, includeMetadata bool) (common.FileUploadRequestObject, error) { - var err error - filePath, err = common.GetAbsolutePath(filePath) - if err != nil { - return common.FileUploadRequestObject{}, err - } - - filename := filepath.Base(filePath) // Default to base filename - - var metadata common.FileMetadata - if includeSubDirName { - absUploadPath, err := common.GetAbsolutePath(uploadPath) - if err != nil { - return common.FileUploadRequestObject{}, err - } - - // Ensure absUploadPath is a directory path for relative calculation - // Trim the optional wildcard if present - uploadDir := strings.TrimSuffix(absUploadPath, common.PathSeparator+"*") - fileInfo, err := os.Stat(uploadDir) - if err != nil { - return common.FileUploadRequestObject{}, err - } - if fileInfo.IsDir() { - // Calculate the path of the file relative to the upload directory - relPath, err := filepath.Rel(uploadDir, filePath) - if err != nil { - return common.FileUploadRequestObject{}, err - } - filename = relPath - } - } - - if includeMetadata { - // The metadata path is the file name plus '_metadata.json' - metadataFilePath := strings.TrimSuffix(filePath, filepath.Ext(filePath)) + "_metadata.json" - var metadataFileBytes []byte - if _, err := os.Stat(metadataFilePath); err == nil { - metadataFileBytes, err = os.ReadFile(metadataFilePath) - if err != nil { - return common.FileUploadRequestObject{}, errors.New("Error reading metadata file " + metadataFilePath + ": " + err.Error()) - } - err := json.Unmarshal(metadataFileBytes, &metadata) - if err != nil { - return common.FileUploadRequestObject{}, errors.New("Error parsing metadata file " + metadataFilePath + ": " + err.Error()) - } - } else { - // No metadata file was found for this file -- proceed, but warn the user. - logger.Printf("WARNING: File metadata is enabled, but could not find the metadata file %v for file %v. Execute `data-client upload --help` for more info on file metadata.\n", metadataFilePath, filePath) - } - } - return common.FileUploadRequestObject{SourcePath: filePath, ObjectKey: filename, FileMetadata: metadata, GUID: objectId}, nil -} - -// FormatSize helps to parse a int64 size into string -func FormatSize(size int64) string { - var unitSize int64 - switch { - case size >= common.TB: - unitSize = common.TB - case size >= common.GB: - unitSize = common.GB - case size >= common.MB: - unitSize = common.MB - case size >= common.KB: - unitSize = common.KB - default: - unitSize = common.B - } - - var unitMap = map[int64]string{ - common.B: "B", - common.KB: "KB", - common.MB: "MB", - common.GB: "GB", - common.TB: "TB", - } - - return fmt.Sprintf("%.1f"+unitMap[unitSize], float64(size)/float64(unitSize)) -} - -// OptimalChunkSize returns a recommended chunk size for the given fileSize (in bytes). -// - <= 100 MB: return fileSize (use single PUT) -// - >100 MB and <= 1 GB: 10 MB -// - >1 GB and <= 10 GB: scaled between 25 MB and 128 MB -// - >10 GB and <= 100 GB: 256 MB -// - >100 GB: scaled between 512 MB and 1024 MB (1 GB) -// See: -// https://cloud.switch.ch/-/documentation/s3/multipart-uploads/#best-practices -func OptimalChunkSize(fileSize int64) int64 { - if fileSize <= 0 { - return 1 * common.MB - } - - switch { - case fileSize <= 100*common.MB: - // Single PUT: return whole file size - return fileSize - - case fileSize <= 1*common.GB: - return 10 * common.MB - - case fileSize <= 10*common.GB: - return scaleLinear(fileSize, 1*common.GB, 10*common.GB, 25*common.MB, 128*common.MB) - - case fileSize <= 100*common.GB: - return 256 * common.MB - - default: - // Scale for very large files; cap scaling at 1 TB for ratio purposes - return scaleLinear(fileSize, 100*common.GB, 1000*common.GB, 512*common.MB, 1024*common.MB) - } -} - -// scaleLinear scales size in [minSize, maxSize] to chunk in [minChunk, maxChunk] (linear). -// Result is rounded down to nearest MB and clamped to [minChunk, maxChunk]. -func scaleLinear(size, minSize, maxSize, minChunk, maxChunk int64) int64 { - if size <= minSize { - return minChunk - } - if size >= maxSize { - return maxChunk - } - ratio := float64(size-minSize) / float64(maxSize-minSize) - chunkF := float64(minChunk) + ratio*(float64(maxChunk-minChunk)) - // round down to nearest MB - mb := int64(common.MB) - chunk := int64(chunkF) / mb * mb - if chunk < minChunk { - return minChunk - } - if chunk > maxChunk { - return maxChunk - } - return chunk -} diff --git a/upload/utils_test.go b/upload/utils_test.go deleted file mode 100644 index 6abe45e..0000000 --- a/upload/utils_test.go +++ /dev/null @@ -1,124 +0,0 @@ -package upload - -import ( - "testing" - - "github.com/calypr/data-client/common" -) - -func TestOptimalChunkSize(t *testing.T) { - tests := []struct { - name string - fileSize int64 - wantChunkSize int64 - wantParts int64 - }{ - { - name: "0 bytes", - fileSize: 0, - wantChunkSize: 1 * common.MB, - wantParts: 0, - }, - { - name: "1MB", - fileSize: 1 * common.MB, - wantChunkSize: 1 * common.MB, - wantParts: 1, - }, - { - name: "100MB", - fileSize: 100 * common.MB, - wantChunkSize: 100 * common.MB, - wantParts: 1, - }, - { - name: "100MB+1B", - fileSize: 100*common.MB + 1, - wantChunkSize: 10 * common.MB, - wantParts: 11, - }, - { - name: "500MB", - fileSize: 500 * common.MB, - wantChunkSize: 10 * common.MB, - wantParts: 50, - }, - { - name: "1GB", - fileSize: 1 * common.GB, - wantChunkSize: 10 * common.MB, - wantParts: 103, - }, - { - name: "1GB+1B", - fileSize: 1*common.GB + 1, - wantChunkSize: 25 * common.MB, - wantParts: 41, - }, - { - name: "5GB", - fileSize: 5 * common.GB, - wantChunkSize: 70 * common.MB, - wantParts: 74, - }, - { - name: "10GB", - fileSize: 10 * common.GB, - wantChunkSize: 128 * common.MB, - wantParts: 80, - }, - { - name: "10GB+1B", - fileSize: 10*common.GB + 1, - wantChunkSize: 256 * common.MB, - wantParts: 41, - }, - { - name: "50GB", - fileSize: 50 * common.GB, - wantChunkSize: 256 * common.MB, - wantParts: 200, - }, - { - name: "100GB", - fileSize: 100 * common.GB, - wantChunkSize: 256 * common.MB, - wantParts: 400, - }, - { - name: "100GB+1B", - fileSize: 100*common.GB + 1, - wantChunkSize: 512 * common.MB, - wantParts: 201, - }, - { - name: "500GB", - fileSize: 500 * common.GB, - wantChunkSize: 739 * common.MB, - wantParts: 693, - }, - { - name: "1TB", - fileSize: 1 * common.TB, - wantChunkSize: 1 * common.GB, - wantParts: 1024, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - chunkSize := OptimalChunkSize(tt.fileSize) - if chunkSize != tt.wantChunkSize { - t.Fatalf("chunk size = %d, want %d", chunkSize, tt.wantChunkSize) - } - - parts := int64(0) - if tt.fileSize > 0 && chunkSize > 0 { - parts = (tt.fileSize + chunkSize - 1) / chunkSize - } - if parts != tt.wantParts { - t.Fatalf("parts = %d, want %d", parts, tt.wantParts) - } - }) - } -}