Skip to content

Commit

Permalink
Increase file copy timeout (#548)
Browse files Browse the repository at this point in the history
  • Loading branch information
mjh1 authored Mar 29, 2023
1 parent 11ebfb4 commit 32e1200
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 20 deletions.
2 changes: 1 addition & 1 deletion clients/arweave_ipfs_s3.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func CopyDStorageToS3(url, s3URL string, requestID string) error {
return err
}

err = UploadToOSURL(s3URL, "", content, MAX_COPY_FILE_DURATION)
err = UploadToOSURL(s3URL, "", content, MaxCopyFileDuration)
if err != nil {
return err
}
Expand Down
21 changes: 19 additions & 2 deletions clients/input_copy.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,15 @@ import (
"time"

"github.com/cenkalti/backoff/v4"
"github.com/hashicorp/go-retryablehttp"
"github.com/livepeer/catalyst-api/config"
xerrors "github.com/livepeer/catalyst-api/errors"
"github.com/livepeer/catalyst-api/log"
"github.com/livepeer/catalyst-api/video"
"github.com/livepeer/go-tools/drivers"
)

const MAX_COPY_FILE_DURATION = 30 * time.Minute
const MaxCopyFileDuration = 2 * time.Hour
const PresignDuration = 24 * time.Hour

type InputCopier interface {
Expand Down Expand Up @@ -98,7 +99,7 @@ func CopyFile(ctx context.Context, sourceURL, destOSBaseURL, filename, requestID

content := io.TeeReader(c, &byteAccWriter)

err = UploadToOSURL(destOSBaseURL, filename, content, MAX_COPY_FILE_DURATION)
err = UploadToOSURL(destOSBaseURL, filename, content, MaxCopyFileDuration)
if err != nil {
log.Log(requestID, "Copy attempt failed", "source", sourceURL, "dest", path.Join(destOSBaseURL, filename), "err", err)
}
Expand All @@ -118,6 +119,22 @@ func getFile(ctx context.Context, url, requestID string) (io.ReadCloser, error)
}
}

var retryableHttpClient = newRetryableHttpClient()

func newRetryableHttpClient() *http.Client {
client := retryablehttp.NewClient()
client.RetryMax = 5 // Retry a maximum of this+1 times
client.RetryWaitMin = 200 * time.Millisecond // Wait at least this long between retries
client.RetryWaitMax = 5 * time.Second // Wait at most this long between retries (exponential backoff)
client.HTTPClient = &http.Client{
// Give up on requests that take more than this long - the file is probably too big for us to process locally if it takes this long
// or something else has gone wrong and the request is hanging
Timeout: MaxCopyFileDuration,
}

return client.StandardClient()
}

func getFileHTTP(ctx context.Context, url string) (io.ReadCloser, error) {
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
Expand Down
17 changes: 0 additions & 17 deletions clients/mediaconvert.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"context"
"errors"
"fmt"
"net/http"
"net/url"
"path"
"strings"
Expand All @@ -15,7 +14,6 @@ import (
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/mediaconvert"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/hashicorp/go-retryablehttp"
"github.com/livepeer/catalyst-api/log"
"github.com/livepeer/catalyst-api/video"
"golang.org/x/sync/errgroup"
Expand All @@ -24,7 +22,6 @@ import (
const MAX_COPY_DIR_DURATION = 2 * time.Hour

var pollDelay = 10 * time.Second
var retryableHttpClient = newRetryableHttpClient()

const (
rateLimitedPollDelay = 15 * time.Second
Expand Down Expand Up @@ -493,17 +490,3 @@ func contains[T comparable](v T, list []T) bool {
}
return false
}

func newRetryableHttpClient() *http.Client {
client := retryablehttp.NewClient()
client.RetryMax = 5 // Retry a maximum of this+1 times
client.RetryWaitMin = 200 * time.Millisecond // Wait at least this long between retries
client.RetryWaitMax = 5 * time.Second // Wait at most this long between retries (exponential backoff)
client.HTTPClient = &http.Client{
// Give up on requests that take more than this long - the file is probably too big for us to process locally if it takes this long
// or something else has gone wrong and the request is hanging
Timeout: MAX_COPY_FILE_DURATION,
}

return client.StandardClient()
}

0 comments on commit 32e1200

Please sign in to comment.