Compare commits

...
Sign in to create a new pull request.

1 commit

Author SHA1 Message Date
Nick Craig-Wood
ee28856f1d gcs: Allow compressed files to be downloaded - fixes #2658
Before this change, the go runtime would automatically decompress
compressed objects leading to length mismatches.

After this change rclone will download the compressed object which
will match in length and checksum.
2020-06-25 22:13:33 +01:00

View file

@ -298,6 +298,7 @@ type Object struct {
bytes int64 // Bytes in the object
modTime time.Time // Modified time of the object
mimeType string
gzipped bool // set if object has Content-Encoding: gzip
}
// ------------------------------------------------------------
@ -899,6 +900,7 @@ func (o *Object) setMetaData(info *storage.Object) {
o.url = info.MediaLink
o.bytes = int64(info.Size)
o.mimeType = info.ContentType
o.gzipped = info.ContentEncoding == "gzip"
// Read md5sum
md5sumData, err := base64.StdEncoding.DecodeString(info.Md5Hash)
@ -1026,6 +1028,15 @@ func (o *Object) Open(ctx context.Context, options ...fs.OpenOption) (in io.Read
}
req = req.WithContext(ctx) // go1.13 can use NewRequestWithContext
fs.FixRangeOption(options, o.bytes)
if o.gzipped {
// Allow files which are stored on the cloud storage system
// compressed to be downloaded without being decompressed. Note
// that setting this here overrides the automatic decompression
// in the Transport.
//
// See: https://cloud.google.com/storage/docs/transcoding
req.Header.Set("Accept-Encoding", "gzip")
}
fs.OpenOptionAddHTTPHeaders(req.Header, options)
var res *http.Response
err = o.fs.pacer.Call(func() (bool, error) {