fs/erasure: Rename meta 'md5Sum' as 'etag'. (#4319)

This PR also does backend format change to 1.0.1
from 1.0.0.  Backward compatible changes are still
kept to read the 'md5Sum' key. But all new objects
will be stored with the same details under 'etag'.

Fixes #4312
This commit is contained in:
Harshavardhana 2017-05-14 12:05:51 -07:00 committed by GitHub
parent c63afabc9b
commit 155a90403a
33 changed files with 274 additions and 187 deletions

View file

@ -62,8 +62,8 @@ func setObjectHeaders(w http.ResponseWriter, objInfo ObjectInfo, contentRange *h
w.Header().Set("Last-Modified", lastModified)
// Set Etag if available.
if objInfo.MD5Sum != "" {
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
if objInfo.ETag != "" {
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
}
// Set all other user defined metadata.

View file

@ -321,8 +321,8 @@ func generateListObjectsV1Response(bucket, prefix, marker, delimiter string, max
}
content.Key = object.Name
content.LastModified = object.ModTime.UTC().Format(timeFormatAMZLong)
if object.MD5Sum != "" {
content.ETag = "\"" + object.MD5Sum + "\""
if object.ETag != "" {
content.ETag = "\"" + object.ETag + "\""
}
content.Size = object.Size
content.StorageClass = globalMinioDefaultStorageClass
@ -370,8 +370,8 @@ func generateListObjectsV2Response(bucket, prefix, token, startAfter, delimiter
}
content.Key = object.Name
content.LastModified = object.ModTime.UTC().Format(timeFormatAMZLong)
if object.MD5Sum != "" {
content.ETag = "\"" + object.MD5Sum + "\""
if object.ETag != "" {
content.ETag = "\"" + object.ETag + "\""
}
content.Size = object.Size
content.StorageClass = globalMinioDefaultStorageClass

View file

@ -49,7 +49,7 @@ func runPutObjectBenchmark(b *testing.B, obj ObjectLayer, objSize int) {
// generate md5sum for the generated data.
// md5sum of the data to written is required as input for PutObject.
metadata := make(map[string]string)
metadata["md5Sum"] = getMD5Hash(textData)
metadata["etag"] = getMD5Hash(textData)
sha256sum := ""
// benchmark utility which helps obtain number of allocations and bytes allocated per ops.
b.ReportAllocs()
@ -61,8 +61,8 @@ func runPutObjectBenchmark(b *testing.B, obj ObjectLayer, objSize int) {
if err != nil {
b.Fatal(err)
}
if objInfo.MD5Sum != metadata["md5Sum"] {
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.MD5Sum, metadata["md5Sum"])
if objInfo.ETag != metadata["etag"] {
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.ETag, metadata["etag"])
}
}
// Benchmark ends here. Stop timer.
@ -85,15 +85,15 @@ func runPutObjectPartBenchmark(b *testing.B, obj ObjectLayer, partSize int) {
objSize := 128 * humanize.MiByte
// PutObjectPart returns md5Sum of the object inserted.
// md5Sum variable is assigned with that value.
var md5Sum, uploadID string
// PutObjectPart returns etag of the object inserted.
// etag variable is assigned with that value.
var etag, uploadID string
// get text data generated for number of bytes equal to object size.
textData := generateBytesData(objSize)
// generate md5sum for the generated data.
// md5sum of the data to written is required as input for NewMultipartUpload.
metadata := make(map[string]string)
metadata["md5Sum"] = getMD5Hash(textData)
metadata["etag"] = getMD5Hash(textData)
sha256sum := ""
uploadID, err = obj.NewMultipartUpload(bucket, object, metadata)
if err != nil {
@ -115,14 +115,14 @@ func runPutObjectPartBenchmark(b *testing.B, obj ObjectLayer, partSize int) {
textPartData = textData[j*partSize:]
}
metadata := make(map[string]string)
metadata["md5Sum"] = getMD5Hash([]byte(textPartData))
metadata["etag"] = getMD5Hash([]byte(textPartData))
var partInfo PartInfo
partInfo, err = obj.PutObjectPart(bucket, object, uploadID, j, int64(len(textPartData)), bytes.NewBuffer(textPartData), metadata["md5Sum"], sha256sum)
partInfo, err = obj.PutObjectPart(bucket, object, uploadID, j, int64(len(textPartData)), bytes.NewBuffer(textPartData), metadata["etag"], sha256sum)
if err != nil {
b.Fatal(err)
}
if partInfo.ETag != metadata["md5Sum"] {
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, md5Sum, metadata["md5Sum"])
if partInfo.ETag != metadata["etag"] {
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, etag, metadata["etag"])
}
}
}
@ -208,19 +208,19 @@ func runGetObjectBenchmark(b *testing.B, obj ObjectLayer, objSize int) {
for i := 0; i < 10; i++ {
// get text data generated for number of bytes equal to object size.
textData := generateBytesData(objSize)
// generate md5sum for the generated data.
// md5sum of the data to written is required as input for PutObject.
// generate etag for the generated data.
// etag of the data to written is required as input for PutObject.
// PutObject is the functions which writes the data onto the FS/XL backend.
metadata := make(map[string]string)
metadata["md5Sum"] = getMD5Hash(textData)
metadata["etag"] = getMD5Hash(textData)
// insert the object.
var objInfo ObjectInfo
objInfo, err = obj.PutObject(bucket, "object"+strconv.Itoa(i), int64(len(textData)), bytes.NewBuffer(textData), metadata, sha256sum)
if err != nil {
b.Fatal(err)
}
if objInfo.MD5Sum != metadata["md5Sum"] {
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.MD5Sum, metadata["md5Sum"])
if objInfo.ETag != metadata["etag"] {
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.ETag, metadata["etag"])
}
}
@ -317,7 +317,7 @@ func runPutObjectBenchmarkParallel(b *testing.B, obj ObjectLayer, objSize int) {
// generate md5sum for the generated data.
// md5sum of the data to written is required as input for PutObject.
metadata := make(map[string]string)
metadata["md5Sum"] = getMD5Hash([]byte(textData))
metadata["etag"] = getMD5Hash([]byte(textData))
sha256sum := ""
// benchmark utility which helps obtain number of allocations and bytes allocated per ops.
b.ReportAllocs()
@ -332,8 +332,8 @@ func runPutObjectBenchmarkParallel(b *testing.B, obj ObjectLayer, objSize int) {
if err != nil {
b.Fatal(err)
}
if objInfo.MD5Sum != metadata["md5Sum"] {
b.Fatalf("Write no: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", objInfo.MD5Sum, metadata["md5Sum"])
if objInfo.ETag != metadata["etag"] {
b.Fatalf("Write no: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", objInfo.ETag, metadata["etag"])
}
i++
}
@ -367,7 +367,7 @@ func runGetObjectBenchmarkParallel(b *testing.B, obj ObjectLayer, objSize int) {
// md5sum of the data to written is required as input for PutObject.
// PutObject is the functions which writes the data onto the FS/XL backend.
metadata := make(map[string]string)
metadata["md5Sum"] = getMD5Hash([]byte(textData))
metadata["etag"] = getMD5Hash([]byte(textData))
sha256sum := ""
// insert the object.
var objInfo ObjectInfo
@ -375,8 +375,8 @@ func runGetObjectBenchmarkParallel(b *testing.B, obj ObjectLayer, objSize int) {
if err != nil {
b.Fatal(err)
}
if objInfo.MD5Sum != metadata["md5Sum"] {
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.MD5Sum, metadata["md5Sum"])
if objInfo.ETag != metadata["etag"] {
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.ETag, metadata["etag"])
}
}

View file

@ -535,7 +535,7 @@ func (api objectAPIHandlers) PostPolicyBucketHandler(w http.ResponseWriter, r *h
return
}
w.Header().Set("ETag", `"`+objInfo.MD5Sum+`"`)
w.Header().Set("ETag", `"`+objInfo.ETag+`"`)
w.Header().Set("Location", getObjectLocation(bucket, object))
// Get host and port from Request.RemoteAddr.
@ -568,7 +568,7 @@ func (api objectAPIHandlers) PostPolicyBucketHandler(w http.ResponseWriter, r *h
resp := encodeResponse(PostResponse{
Bucket: objInfo.Bucket,
Key: objInfo.Name,
ETag: `"` + objInfo.MD5Sum + `"`,
ETag: `"` + objInfo.ETag + `"`,
Location: getObjectLocation(objInfo.Bucket, objInfo.Name),
})
writeResponse(w, http.StatusCreated, resp, "application/xml")

View file

@ -172,7 +172,7 @@ func newNotificationEvent(event eventData) NotificationEvent {
// For all other events we should set ETag and Size.
nEvent.S3.Object = objectMeta{
Key: escapedObj,
ETag: event.ObjInfo.MD5Sum,
ETag: event.ObjInfo.ETag,
Size: event.ObjInfo.Size,
ContentType: event.ObjInfo.ContentType,
UserDefined: event.ObjInfo.UserDefined,

View file

@ -33,11 +33,31 @@ import (
"github.com/tidwall/gjson"
)
// FS format, and object metadata.
const (
fsMetaJSONFile = "fs.json"
// fs.json object metadata.
fsMetaJSONFile = "fs.json"
// format.json FS format metadata.
fsFormatJSONFile = "format.json"
)
// FS metadata constants.
const (
// FS backend meta 1.0.0 version.
fsMetaVersion100 = "1.0.0"
// FS backend meta 1.0.1 version.
fsMetaVersion = "1.0.1"
// FS backend meta format.
fsMetaFormat = "fs"
// FS backend format version.
fsFormatVersion = fsFormatV2
// Add more constants here.
)
// A fsMetaV1 represents a metadata header mapping keys to sets of values.
type fsMetaV1 struct {
Version string `json:"version"`
@ -50,6 +70,19 @@ type fsMetaV1 struct {
Parts []objectPartInfo `json:"parts,omitempty"`
}
// IsValid - tells if the format is sane by validating the version
// string and format style.
func (m fsMetaV1) IsValid() bool {
return isFSMetaValid(m.Version, m.Format)
}
// Verifies if the backend format metadata is sane by validating
// the version string and format style.
func isFSMetaValid(version, format string) bool {
return ((version == fsMetaVersion || version == fsMetaVersion100) &&
format == fsMetaFormat)
}
// Converts metadata to object info.
func (m fsMetaV1) ToObjectInfo(bucket, object string, fi os.FileInfo) ObjectInfo {
if len(m.Meta) == 0 {
@ -78,17 +111,15 @@ func (m fsMetaV1) ToObjectInfo(bucket, object string, fi os.FileInfo) ObjectInfo
objInfo.IsDir = fi.IsDir()
}
objInfo.MD5Sum = m.Meta["md5Sum"]
// Extract etag from metadata.
objInfo.ETag = extractETag(m.Meta)
objInfo.ContentType = m.Meta["content-type"]
objInfo.ContentEncoding = m.Meta["content-encoding"]
// md5Sum has already been extracted into objInfo.MD5Sum. We
// need to remove it from m.Meta to avoid it from appearing as
// part of response headers. e.g, X-Minio-* or X-Amz-*.
delete(m.Meta, "md5Sum")
// Save all the other userdefined API.
objInfo.UserDefined = m.Meta
// etag/md5Sum has already been extracted. We need to
// remove to avoid it from appearing as part of
// response headers. e.g, X-Minio-* or X-Amz-*.
objInfo.UserDefined = cleanMetaETag(m.Meta)
// Success..
return objInfo
@ -207,6 +238,12 @@ func (m *fsMetaV1) ReadFrom(lk *lock.LockedFile) (n int64, err error) {
// obtain format.
m.Format = parseFSFormat(fsMetaBuf)
// Verify if the format is valid, return corrupted format
// for unrecognized formats.
if !isFSMetaValid(m.Version, m.Format) {
return 0, traceError(errCorruptedFormat)
}
// obtain metadata.
m.Meta = parseFSMetaMap(fsMetaBuf)
@ -220,20 +257,6 @@ func (m *fsMetaV1) ReadFrom(lk *lock.LockedFile) (n int64, err error) {
return int64(len(fsMetaBuf)), nil
}
// FS metadata constants.
const (
// FS backend meta version.
fsMetaVersion = "1.0.0"
// FS backend meta format.
fsMetaFormat = "fs"
// FS backend format version.
fsFormatVersion = fsFormatV2
// Add more constants here.
)
// FS format version strings.
const (
fsFormatV1 = "1" // Previous format.

View file

@ -110,10 +110,10 @@ func TestWriteFSMetadata(t *testing.T) {
if err != nil {
t.Fatal("Unexpected error ", err)
}
if fsMeta.Version != "1.0.0" {
if fsMeta.Version != fsMetaVersion {
t.Fatalf("Unexpected version %s", fsMeta.Version)
}
if fsMeta.Format != "fs" {
if fsMeta.Format != fsMetaFormat {
t.Fatalf("Unexpected format %s", fsMeta.Format)
}
}

View file

@ -871,7 +871,7 @@ func (fs fsObjects) CompleteMultipartUpload(bucket string, object string, upload
if len(fsMeta.Meta) == 0 {
fsMeta.Meta = make(map[string]string)
}
fsMeta.Meta["md5Sum"] = s3MD5
fsMeta.Meta["etag"] = s3MD5
// Write all the set metadata.
if _, err = fsMeta.WriteTo(metaFile); err != nil {

View file

@ -712,12 +712,12 @@ func (fs fsObjects) PutObject(bucket string, object string, size int64, data io.
newMD5Hex := hex.EncodeToString(md5Writer.Sum(nil))
// Update the md5sum if not set with the newly calculated one.
if len(metadata["md5Sum"]) == 0 {
metadata["md5Sum"] = newMD5Hex
if len(metadata["etag"]) == 0 {
metadata["etag"] = newMD5Hex
}
// md5Hex representation.
md5Hex := metadata["md5Sum"]
md5Hex := metadata["etag"]
if md5Hex != "" {
if newMD5Hex != md5Hex {
// Returns md5 mismatch.
@ -849,8 +849,12 @@ func (fs fsObjects) getObjectETag(bucket, entry string) (string, error) {
}
}
fsMetaMap := parseFSMetaMap(fsMetaBuf)
return fsMetaMap["md5Sum"], nil
// Check if FS metadata is valid, if not return error.
if !isFSMetaValid(parseFSVersion(fsMetaBuf), parseFSFormat(fsMetaBuf)) {
return "", toObjectErr(traceError(errCorruptedFormat), bucket, entry)
}
return extractETag(parseFSMetaMap(fsMetaBuf)), nil
}
// ListObjects - list all objects at prefix upto maxKeys., optionally delimited by '/'. Maintains the list pool
@ -901,8 +905,8 @@ func (fs fsObjects) ListObjects(bucket, prefix, marker, delimiter string, maxKey
// Protect reading `fs.json`.
objectLock := globalNSMutex.NewNSLock(bucket, entry)
objectLock.RLock()
var md5Sum string
md5Sum, err = fs.getObjectETag(bucket, entry)
var etag string
etag, err = fs.getObjectETag(bucket, entry)
objectLock.RUnlock()
if err != nil {
return ObjectInfo{}, err
@ -922,7 +926,7 @@ func (fs fsObjects) ListObjects(bucket, prefix, marker, delimiter string, maxKey
Size: fi.Size(),
ModTime: fi.ModTime(),
IsDir: fi.IsDir(),
MD5Sum: md5Sum,
ETag: etag,
}, nil
}

View file

@ -244,7 +244,7 @@ func TestFSMigrateObjectWithObjects(t *testing.T) {
fsPath1 := pathJoin(bucketMetaPrefix, "testvolume1", "my-object1", fsMetaJSONFile)
fsPath1 = pathJoin(disk, minioMetaBucket, fsPath1)
fsMetaJSON := `{"version":"1.0.0","format":"fs","minio":{"release":"DEVELOPMENT.2017-03-27T02-26-33Z"},"meta":{"md5Sum":"467886be95c8ecfd71a2900e3f461b4f"}`
fsMetaJSON := `{"version":"1.0.0","format":"fs","minio":{"release":"DEVELOPMENT.2017-03-27T02-26-33Z"},"meta":{"etag":"467886be95c8ecfd71a2900e3f461b4f"}`
if _, err = fsCreateFile(fsPath1, bytes.NewReader([]byte(fsMetaJSON)), nil, 0); err != nil {
t.Fatal(err)
}
@ -253,7 +253,7 @@ func TestFSMigrateObjectWithObjects(t *testing.T) {
fsPath2 := pathJoin(bucketMetaPrefix, "testvolume2", "my-object2", fsMetaJSONFile)
fsPath2 = pathJoin(disk, minioMetaBucket, fsPath2)
fsMetaJSON = `{"version":"1.0.0","format":"fs","minio":{"release":"DEVELOPMENT.2017-03-27T02-26-33Z"},"meta":{"md5Sum":"467886be95c8ecfd71a2900eff461b4d"}`
fsMetaJSON = `{"version":"1.0.0","format":"fs","minio":{"release":"DEVELOPMENT.2017-03-27T02-26-33Z"},"meta":{"etag":"467886be95c8ecfd71a2900eff461b4d"}`
if _, err = fsCreateFile(fsPath2, bytes.NewReader([]byte(fsMetaJSON)), nil, 0); err != nil {
t.Fatal(err)
}

View file

@ -127,7 +127,7 @@ func (a AzureObjects) AnonGetObjectInfo(bucket, object string) (objInfo ObjectIn
objInfo.UserDefined["Content-Encoding"] = resp.Header.Get("Content-Encoding")
}
objInfo.UserDefined["Content-Type"] = resp.Header.Get("Content-Type")
objInfo.MD5Sum = resp.Header.Get("Etag")
objInfo.ETag = resp.Header.Get("Etag")
objInfo.ModTime = t
objInfo.Name = object
objInfo.Size = contentLength
@ -182,7 +182,7 @@ func (a AzureObjects) AnonListObjects(bucket, prefix, marker, delimiter string,
Name: object.Name,
ModTime: t,
Size: object.Properties.ContentLength,
MD5Sum: object.Properties.Etag,
ETag: object.Properties.Etag,
ContentType: object.Properties.ContentType,
ContentEncoding: object.Properties.ContentEncoding,
})

View file

@ -235,7 +235,7 @@ func (a AzureObjects) ListObjects(bucket, prefix, marker, delimiter string, maxK
Name: object.Name,
ModTime: t,
Size: object.Properties.ContentLength,
MD5Sum: canonicalizeETag(object.Properties.Etag),
ETag: canonicalizeETag(object.Properties.Etag),
ContentType: object.Properties.ContentType,
ContentEncoding: object.Properties.ContentEncoding,
})
@ -285,7 +285,7 @@ func (a AzureObjects) GetObjectInfo(bucket, object string) (objInfo ObjectInfo,
objInfo = ObjectInfo{
Bucket: bucket,
UserDefined: make(map[string]string),
MD5Sum: canonicalizeETag(prop.Etag),
ETag: canonicalizeETag(prop.Etag),
ModTime: t,
Name: object,
Size: prop.ContentLength,
@ -319,7 +319,7 @@ func (a AzureObjects) PutObject(bucket, object string, size int64, data io.Reade
teeReader = io.TeeReader(data, sha256Writer)
}
delete(metadata, "md5Sum")
delete(metadata, "etag")
err = a.client.CreateBlockBlobFromReader(bucket, object, uint64(size), teeReader, canonicalMetadata(metadata))
if err != nil {

View file

@ -234,7 +234,7 @@ func (api gatewayAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Re
}
// Make sure we hex encode md5sum here.
metadata["md5Sum"] = hex.EncodeToString(md5Bytes)
metadata["etag"] = hex.EncodeToString(md5Bytes)
sha256sum := ""
@ -282,7 +282,7 @@ func (api gatewayAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Re
return
}
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
writeSuccessResponseHeadersOnly(w)
}

View file

@ -36,13 +36,13 @@ func (l *s3Gateway) AnonPutObject(bucket string, object string, size int64, data
}
var md5sumBytes []byte
md5sum := metadata["md5Sum"]
md5sum := metadata["etag"]
if md5sum != "" {
md5sumBytes, err = hex.DecodeString(md5sum)
if err != nil {
return ObjectInfo{}, s3ToObjectError(traceError(err), bucket, object)
}
delete(metadata, "md5Sum")
delete(metadata, "etag")
}
oi, err := l.anonClient.PutObject(bucket, object, size, data, md5sumBytes, sha256sumBytes, toMinioClientMetadata(metadata))

View file

@ -295,7 +295,7 @@ func fromMinioClientObjectInfo(bucket string, oi minio.ObjectInfo) ObjectInfo {
Name: oi.Key,
ModTime: oi.LastModified,
Size: oi.Size,
MD5Sum: oi.ETag,
ETag: oi.ETag,
UserDefined: userDefined,
ContentType: oi.ContentType,
ContentEncoding: oi.Metadata.Get("Content-Encoding"),
@ -326,13 +326,13 @@ func (l *s3Gateway) PutObject(bucket string, object string, size int64, data io.
}
var md5sumBytes []byte
md5sum := metadata["md5Sum"]
md5sum := metadata["etag"]
if md5sum != "" {
md5sumBytes, err = hex.DecodeString(md5sum)
if err != nil {
return ObjectInfo{}, s3ToObjectError(traceError(err), bucket, object)
}
delete(metadata, "md5Sum")
delete(metadata, "etag")
}
oi, err := l.Client.PutObject(bucket, object, size, data, md5sumBytes, sha256sumBytes, toMinioClientMetadata(metadata))

View file

@ -134,7 +134,7 @@ func getRedirectPostRawQuery(objInfo ObjectInfo) string {
redirectValues := make(url.Values)
redirectValues.Set("bucket", objInfo.Bucket)
redirectValues.Set("key", objInfo.Name)
redirectValues.Set("etag", "\""+objInfo.MD5Sum+"\"")
redirectValues.Set("etag", "\""+objInfo.ETag+"\"")
return redirectValues.Encode()
}

View file

@ -35,8 +35,8 @@ const (
// Objects meta prefix.
objectMetaPrefix = "objects"
// Md5Sum of empty string.
emptyStrMd5Sum = "d41d8cd98f00b204e9800998ecf8427e"
// ETag (hex encoded md5sum) of empty string.
emptyETag = "d41d8cd98f00b204e9800998ecf8427e"
)
// Global object layer mutex, used for safely updating object layer.
@ -68,10 +68,10 @@ func dirObjectInfo(bucket, object string, size int64, metadata map[string]string
// This is a special case with size as '0' and object ends with
// a slash separator, we treat it like a valid operation and
// return success.
md5Sum := metadata["md5Sum"]
delete(metadata, "md5Sum")
if md5Sum == "" {
md5Sum = emptyStrMd5Sum
etag := metadata["etag"]
delete(metadata, "etag")
if etag == "" {
etag = emptyETag
}
return ObjectInfo{
@ -81,7 +81,7 @@ func dirObjectInfo(bucket, object string, size int64, metadata map[string]string
ContentType: "application/octet-stream",
IsDir: true,
Size: size,
MD5Sum: md5Sum,
ETag: etag,
UserDefined: metadata,
}
}

View file

@ -101,8 +101,8 @@ type ObjectInfo struct {
// IsDir indicates if the object is prefix.
IsDir bool
// Hex encoded md5 checksum of the object.
MD5Sum string
// Hex encoded unique entity tag of the object.
ETag string
// A standard MIME type describing the format of the object.
ContentType string

View file

@ -549,7 +549,7 @@ func testListObjects(obj ObjectLayer, instanceType string, t TestErrHandler) {
if testCase.result.Objects[j].Name != result.Objects[j].Name {
t.Errorf("Test %d: %s: Expected object name to be \"%s\", but found \"%s\" instead", i+1, instanceType, testCase.result.Objects[j].Name, result.Objects[j].Name)
}
if result.Objects[j].MD5Sum == "" {
if result.Objects[j].ETag == "" {
t.Errorf("Test %d: %s: Expected md5sum to be not empty, but found empty instead", i+1, instanceType)
}

View file

@ -1930,7 +1930,7 @@ func testObjectCompleteMultipartUpload(obj ObjectLayer, instanceType string, t T
if actualErr == nil && testCase.shouldPass {
// Asserting IsTruncated.
if actualResult.MD5Sum != testCase.expectedS3MD5 {
if actualResult.ETag != testCase.expectedS3MD5 {
t.Errorf("Test %d: %s: Expected the result to be \"%v\", but found it to \"%v\"", i+1, instanceType, testCase.expectedS3MD5, actualResult)
}
}

View file

@ -29,7 +29,7 @@ import (
)
func md5Header(data []byte) map[string]string {
return map[string]string{"md5Sum": getMD5Hash([]byte(data))}
return map[string]string{"etag": getMD5Hash([]byte(data))}
}
// Wrapper for calling PutObject tests for both XL multiple disks and single node setup.
@ -94,29 +94,29 @@ func testObjectAPIPutObject(obj ObjectLayer, instanceType string, t TestErrHandl
// Test case - 7.
// Input to replicate Md5 mismatch.
{bucket, object, []byte(""), map[string]string{"md5Sum": "a35"}, "", 0, "",
{bucket, object, []byte(""), map[string]string{"etag": "a35"}, "", 0, "",
BadDigest{ExpectedMD5: "a35", CalculatedMD5: "d41d8cd98f00b204e9800998ecf8427e"}},
// Test case - 8.
// With incorrect sha256.
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "e2fc714c4727ee9395f324cd2e7f331f"}, "incorrect-sha256", int64(len("abcd")), "", SHA256Mismatch{}},
{bucket, object, []byte("abcd"), map[string]string{"etag": "e2fc714c4727ee9395f324cd2e7f331f"}, "incorrect-sha256", int64(len("abcd")), "", SHA256Mismatch{}},
// Test case - 9.
// Input with size more than the size of actual data inside the reader.
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "a35"}, "", int64(len("abcd") + 1), "",
{bucket, object, []byte("abcd"), map[string]string{"etag": "a35"}, "", int64(len("abcd") + 1), "",
IncompleteBody{}},
// Test case - 10.
// Input with size less than the size of actual data inside the reader.
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "a35"}, "", int64(len("abcd") - 1), "",
{bucket, object, []byte("abcd"), map[string]string{"etag": "a35"}, "", int64(len("abcd") - 1), "",
BadDigest{ExpectedMD5: "a35", CalculatedMD5: "900150983cd24fb0d6963f7d28e17f72"}},
// Test case - 11-14.
// Validating for success cases.
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "e2fc714c4727ee9395f324cd2e7f331f"}, "", int64(len("abcd")), "", nil},
{bucket, object, []byte("efgh"), map[string]string{"md5Sum": "1f7690ebdd9b4caf8fab49ca1757bf27"}, "", int64(len("efgh")), "", nil},
{bucket, object, []byte("ijkl"), map[string]string{"md5Sum": "09a0877d04abf8759f99adec02baf579"}, "", int64(len("ijkl")), "", nil},
{bucket, object, []byte("mnop"), map[string]string{"md5Sum": "e132e96a5ddad6da8b07bba6f6131fef"}, "", int64(len("mnop")), "", nil},
{bucket, object, []byte("abcd"), map[string]string{"etag": "e2fc714c4727ee9395f324cd2e7f331f"}, "", int64(len("abcd")), "", nil},
{bucket, object, []byte("efgh"), map[string]string{"etag": "1f7690ebdd9b4caf8fab49ca1757bf27"}, "", int64(len("efgh")), "", nil},
{bucket, object, []byte("ijkl"), map[string]string{"etag": "09a0877d04abf8759f99adec02baf579"}, "", int64(len("ijkl")), "", nil},
{bucket, object, []byte("mnop"), map[string]string{"etag": "e132e96a5ddad6da8b07bba6f6131fef"}, "", int64(len("mnop")), "", nil},
// Test case 15-17.
// With no metadata
@ -169,8 +169,8 @@ func testObjectAPIPutObject(obj ObjectLayer, instanceType string, t TestErrHandl
// Test passes as expected, but the output values are verified for correctness here.
if actualErr == nil {
// Asserting whether the md5 output is correct.
if expectedMD5, ok := testCase.inputMeta["md5Sum"]; ok && expectedMD5 != objInfo.MD5Sum {
t.Errorf("Test %d: %s: Calculated Md5 different from the actual one %s.", i+1, instanceType, objInfo.MD5Sum)
if expectedMD5, ok := testCase.inputMeta["etag"]; ok && expectedMD5 != objInfo.ETag {
t.Errorf("Test %d: %s: Calculated Md5 different from the actual one %s.", i+1, instanceType, objInfo.ETag)
}
}
}
@ -220,10 +220,10 @@ func testObjectAPIPutObjectDiskNotFound(obj ObjectLayer, instanceType string, di
expectedError error
}{
// Validating for success cases.
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "e2fc714c4727ee9395f324cd2e7f331f"}, int64(len("abcd")), true, "", nil},
{bucket, object, []byte("efgh"), map[string]string{"md5Sum": "1f7690ebdd9b4caf8fab49ca1757bf27"}, int64(len("efgh")), true, "", nil},
{bucket, object, []byte("ijkl"), map[string]string{"md5Sum": "09a0877d04abf8759f99adec02baf579"}, int64(len("ijkl")), true, "", nil},
{bucket, object, []byte("mnop"), map[string]string{"md5Sum": "e132e96a5ddad6da8b07bba6f6131fef"}, int64(len("mnop")), true, "", nil},
{bucket, object, []byte("abcd"), map[string]string{"etag": "e2fc714c4727ee9395f324cd2e7f331f"}, int64(len("abcd")), true, "", nil},
{bucket, object, []byte("efgh"), map[string]string{"etag": "1f7690ebdd9b4caf8fab49ca1757bf27"}, int64(len("efgh")), true, "", nil},
{bucket, object, []byte("ijkl"), map[string]string{"etag": "09a0877d04abf8759f99adec02baf579"}, int64(len("ijkl")), true, "", nil},
{bucket, object, []byte("mnop"), map[string]string{"etag": "e132e96a5ddad6da8b07bba6f6131fef"}, int64(len("mnop")), true, "", nil},
}
sha256sum := ""
@ -246,8 +246,8 @@ func testObjectAPIPutObjectDiskNotFound(obj ObjectLayer, instanceType string, di
// Test passes as expected, but the output values are verified for correctness here.
if actualErr == nil && testCase.shouldPass {
// Asserting whether the md5 output is correct.
if testCase.inputMeta["md5Sum"] != objInfo.MD5Sum {
t.Errorf("Test %d: %s: Calculated Md5 different from the actual one %s.", i+1, instanceType, objInfo.MD5Sum)
if testCase.inputMeta["etag"] != objInfo.ETag {
t.Errorf("Test %d: %s: Calculated Md5 different from the actual one %s.", i+1, instanceType, objInfo.ETag)
}
}
}
@ -271,7 +271,7 @@ func testObjectAPIPutObjectDiskNotFound(obj ObjectLayer, instanceType string, di
bucket,
object,
[]byte("mnop"),
map[string]string{"md5Sum": "e132e96a5ddad6da8b07bba6f6131fef"},
map[string]string{"etag": "e132e96a5ddad6da8b07bba6f6131fef"},
int64(len("mnop")),
false,
"",

View file

@ -187,6 +187,35 @@ func getCompleteMultipartMD5(parts []completePart) (string, error) {
return s3MD5, nil
}
// Clean meta etag keys 'md5Sum', 'etag'.
func cleanMetaETag(metadata map[string]string) map[string]string {
return cleanMetadata(metadata, "md5Sum", "etag")
}
// Clean metadata takes keys to be filtered
// and returns a new map with the keys filtered.
func cleanMetadata(metadata map[string]string, keyNames ...string) map[string]string {
var newMeta = make(map[string]string)
for k, v := range metadata {
if contains(keyNames, k) {
continue
}
newMeta[k] = v
}
return newMeta
}
// Extracts etag value from the metadata.
func extractETag(metadata map[string]string) string {
// md5Sum tag is kept for backward compatibility.
etag, ok := metadata["md5Sum"]
if !ok {
etag = metadata["etag"]
}
// Success.
return etag
}
// Prefix matcher string matches prefix in a platform specific way.
// For example on windows since its case insensitive we are supposed
// to do case insensitive checks.

View file

@ -59,8 +59,8 @@ func checkCopyObjectPreconditions(w http.ResponseWriter, r *http.Request, objInf
// set object-related metadata headers
w.Header().Set("Last-Modified", objInfo.ModTime.UTC().Format(http.TimeFormat))
if objInfo.MD5Sum != "" {
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
if objInfo.ETag != "" {
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
}
}
// x-amz-copy-source-if-modified-since: Return the object only if it has been modified
@ -95,7 +95,7 @@ func checkCopyObjectPreconditions(w http.ResponseWriter, r *http.Request, objInf
// same as the one specified; otherwise return a 412 (precondition failed).
ifMatchETagHeader := r.Header.Get("x-amz-copy-source-if-match")
if ifMatchETagHeader != "" {
if objInfo.MD5Sum != "" && !isETagEqual(objInfo.MD5Sum, ifMatchETagHeader) {
if objInfo.ETag != "" && !isETagEqual(objInfo.ETag, ifMatchETagHeader) {
// If the object ETag does not match with the specified ETag.
writeHeaders()
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
@ -107,7 +107,7 @@ func checkCopyObjectPreconditions(w http.ResponseWriter, r *http.Request, objInf
// one specified otherwise, return a 304 (not modified).
ifNoneMatchETagHeader := r.Header.Get("x-amz-copy-source-if-none-match")
if ifNoneMatchETagHeader != "" {
if objInfo.MD5Sum != "" && isETagEqual(objInfo.MD5Sum, ifNoneMatchETagHeader) {
if objInfo.ETag != "" && isETagEqual(objInfo.ETag, ifNoneMatchETagHeader) {
// If the object ETag matches with the specified ETag.
writeHeaders()
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
@ -144,8 +144,8 @@ func checkPreconditions(w http.ResponseWriter, r *http.Request, objInfo ObjectIn
// set object-related metadata headers
w.Header().Set("Last-Modified", objInfo.ModTime.UTC().Format(http.TimeFormat))
if objInfo.MD5Sum != "" {
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
if objInfo.ETag != "" {
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
}
}
// If-Modified-Since : Return the object only if it has been modified since the specified time,
@ -180,7 +180,7 @@ func checkPreconditions(w http.ResponseWriter, r *http.Request, objInfo ObjectIn
// otherwise return a 412 (precondition failed).
ifMatchETagHeader := r.Header.Get("If-Match")
if ifMatchETagHeader != "" {
if !isETagEqual(objInfo.MD5Sum, ifMatchETagHeader) {
if !isETagEqual(objInfo.ETag, ifMatchETagHeader) {
// If the object ETag does not match with the specified ETag.
writeHeaders()
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
@ -192,7 +192,7 @@ func checkPreconditions(w http.ResponseWriter, r *http.Request, objInfo ObjectIn
// one specified otherwise, return a 304 (not modified).
ifNoneMatchETagHeader := r.Header.Get("If-None-Match")
if ifNoneMatchETagHeader != "" {
if isETagEqual(objInfo.MD5Sum, ifNoneMatchETagHeader) {
if isETagEqual(objInfo.ETag, ifNoneMatchETagHeader) {
// If the object ETag matches with the specified ETag.
writeHeaders()
w.WriteHeader(http.StatusNotModified)

View file

@ -360,10 +360,8 @@ func (api objectAPIHandlers) CopyObjectHandler(w http.ResponseWriter, r *http.Re
defaultMeta := objInfo.UserDefined
// Make sure to remove saved md5sum, object might have been uploaded
// as multipart which doesn't have a standard md5sum, we just let
// CopyObject calculate a new one.
delete(defaultMeta, "md5Sum")
// Make sure to remove saved etag, CopyObject calculates a new one.
delete(defaultMeta, "etag")
newMetadata := getCpObjMetadataFromHeader(r.Header, defaultMeta)
// Check if x-amz-metadata-directive was not set to REPLACE and source,
@ -383,8 +381,7 @@ func (api objectAPIHandlers) CopyObjectHandler(w http.ResponseWriter, r *http.Re
return
}
md5Sum := objInfo.MD5Sum
response := generateCopyObjectResponse(md5Sum, objInfo.ModTime)
response := generateCopyObjectResponse(objInfo.ETag, objInfo.ModTime)
encodedSuccessResponse := encodeResponse(response)
// Write success response.
@ -482,7 +479,7 @@ func (api objectAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Req
}
// Make sure we hex encode md5sum here.
metadata["md5Sum"] = hex.EncodeToString(md5Bytes)
metadata["etag"] = hex.EncodeToString(md5Bytes)
sha256sum := ""
@ -540,7 +537,7 @@ func (api objectAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Req
writeErrorResponse(w, toAPIErrorCode(err), r.URL)
return
}
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
writeSuccessResponseHeadersOnly(w)
// Get host and port from Request.RemoteAddr.
@ -965,7 +962,7 @@ func (api objectAPIHandlers) CompleteMultipartUploadHandler(w http.ResponseWrite
// Get object location.
location := getLocation(r)
// Generate complete multipart response.
response := generateCompleteMultpartUploadResponse(bucket, object, location, objInfo.MD5Sum)
response := generateCompleteMultpartUploadResponse(bucket, object, location, objInfo.ETag)
encodedSuccessResponse := encodeResponse(response)
if err != nil {
errorIf(err, "Unable to parse CompleteMultipartUpload response")
@ -974,7 +971,7 @@ func (api objectAPIHandlers) CompleteMultipartUploadHandler(w http.ResponseWrite
}
// Set etag.
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
// Write success response.
writeSuccessResponseXML(w, encodedSuccessResponse)

View file

@ -104,14 +104,14 @@ func testMultipartObjectCreation(obj ObjectLayer, instanceType string, c TestErr
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
completedParts := completeMultipartUpload{}
for i := 1; i <= 10; i++ {
expectedMD5Sumhex := getMD5Hash(data)
expectedETaghex := getMD5Hash(data)
var calcPartInfo PartInfo
calcPartInfo, err = obj.PutObjectPart("bucket", "key", uploadID, i, int64(len(data)), bytes.NewBuffer(data), expectedMD5Sumhex, "")
calcPartInfo, err = obj.PutObjectPart("bucket", "key", uploadID, i, int64(len(data)), bytes.NewBuffer(data), expectedETaghex, "")
if err != nil {
c.Errorf("%s: <ERROR> %s", instanceType, err)
}
if calcPartInfo.ETag != expectedMD5Sumhex {
if calcPartInfo.ETag != expectedETaghex {
c.Errorf("MD5 Mismatch")
}
completedParts.Parts = append(completedParts.Parts, completePart{
@ -123,7 +123,7 @@ func testMultipartObjectCreation(obj ObjectLayer, instanceType string, c TestErr
if err != nil {
c.Fatalf("%s: <ERROR> %s", instanceType, err)
}
if objInfo.MD5Sum != "7d364cb728ce42a74a96d22949beefb2-10" {
if objInfo.ETag != "7d364cb728ce42a74a96d22949beefb2-10" {
c.Errorf("Md5 mismtch")
}
}
@ -153,18 +153,18 @@ func testMultipartObjectAbort(obj ObjectLayer, instanceType string, c TestErrHan
randomString = randomString + strconv.Itoa(num)
}
expectedMD5Sumhex := getMD5Hash([]byte(randomString))
expectedETaghex := getMD5Hash([]byte(randomString))
metadata["md5"] = expectedMD5Sumhex
metadata["md5"] = expectedETaghex
var calcPartInfo PartInfo
calcPartInfo, err = obj.PutObjectPart("bucket", "key", uploadID, i, int64(len(randomString)), bytes.NewBufferString(randomString), expectedMD5Sumhex, "")
calcPartInfo, err = obj.PutObjectPart("bucket", "key", uploadID, i, int64(len(randomString)), bytes.NewBufferString(randomString), expectedETaghex, "")
if err != nil {
c.Fatalf("%s: <ERROR> %s", instanceType, err)
}
if calcPartInfo.ETag != expectedMD5Sumhex {
if calcPartInfo.ETag != expectedETaghex {
c.Errorf("Md5 Mismatch")
}
parts[i] = expectedMD5Sumhex
parts[i] = expectedETaghex
}
err = obj.AbortMultipartUpload("bucket", "key", uploadID)
if err != nil {
@ -191,18 +191,18 @@ func testMultipleObjectCreation(obj ObjectLayer, instanceType string, c TestErrH
randomString = randomString + strconv.Itoa(num)
}
expectedMD5Sumhex := getMD5Hash([]byte(randomString))
expectedETaghex := getMD5Hash([]byte(randomString))
key := "obj" + strconv.Itoa(i)
objects[key] = []byte(randomString)
metadata := make(map[string]string)
metadata["md5Sum"] = expectedMD5Sumhex
metadata["etag"] = expectedETaghex
var objInfo ObjectInfo
objInfo, err = obj.PutObject("bucket", key, int64(len(randomString)), bytes.NewBufferString(randomString), metadata, "")
if err != nil {
c.Fatalf("%s: <ERROR> %s", instanceType, err)
}
if objInfo.MD5Sum != expectedMD5Sumhex {
if objInfo.ETag != expectedETaghex {
c.Errorf("Md5 Mismatch")
}
}

View file

@ -1151,7 +1151,7 @@ func (s *TestSuiteCommon) TestPutObject(c *C) {
c.Assert(err, IsNil)
c.Assert(response.StatusCode, Equals, http.StatusOK)
// The response Etag header should contain Md5sum of an empty string.
c.Assert(response.Header.Get("Etag"), Equals, "\""+emptyStrMd5Sum+"\"")
c.Assert(response.Header.Get("Etag"), Equals, "\""+emptyETag+"\"")
}
// TestListBuckets - Make request for listing of all buckets.
@ -1841,7 +1841,7 @@ func (s *TestSuiteCommon) TestGetObjectLarge11MiB(c *C) {
getContent, err := ioutil.ReadAll(response.Body)
c.Assert(err, IsNil)
// Get md5Sum of the response content.
// Get etag of the response content.
getMD5 := getMD5Hash(getContent)
// Compare putContent and getContent.
@ -2505,8 +2505,8 @@ func (s *TestSuiteCommon) TestObjectValidMD5(c *C) {
// Create a byte array of 5MB.
// content for the object to be uploaded.
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
// calculate md5Sum of the data.
md5SumBase64 := getMD5HashBase64(data)
// calculate etag of the data.
etagBase64 := getMD5HashBase64(data)
buffer1 := bytes.NewReader(data)
objectName := "test-1-object"
@ -2515,7 +2515,7 @@ func (s *TestSuiteCommon) TestObjectValidMD5(c *C) {
int64(buffer1.Len()), buffer1, s.accessKey, s.secretKey, s.signer)
c.Assert(err, IsNil)
// set the Content-Md5 to be the hash to content.
request.Header.Set("Content-Md5", md5SumBase64)
request.Header.Set("Content-Md5", etagBase64)
client = http.Client{Transport: s.transport}
response, err = client.Do(request)
c.Assert(err, IsNil)
@ -2578,14 +2578,14 @@ func (s *TestSuiteCommon) TestObjectMultipart(c *C) {
// content for the part to be uploaded.
// Create a byte array of 5MB.
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
// calculate md5Sum of the data.
// calculate etag of the data.
md5SumBase64 := getMD5HashBase64(data)
buffer1 := bytes.NewReader(data)
// HTTP request for the part to be uploaded.
request, err = newTestSignedRequest("PUT", getPartUploadURL(s.endPoint, bucketName, objectName, uploadID, "1"),
int64(buffer1.Len()), buffer1, s.accessKey, s.secretKey, s.signer)
// set the Content-Md5 header to the base64 encoding the md5Sum of the content.
// set the Content-Md5 header to the base64 encoding the etag of the content.
request.Header.Set("Content-Md5", md5SumBase64)
c.Assert(err, IsNil)
@ -2599,14 +2599,14 @@ func (s *TestSuiteCommon) TestObjectMultipart(c *C) {
// Create a byte array of 1 byte.
data = []byte("0")
// calculate md5Sum of the data.
// calculate etag of the data.
md5SumBase64 = getMD5HashBase64(data)
buffer2 := bytes.NewReader(data)
// HTTP request for the second part to be uploaded.
request, err = newTestSignedRequest("PUT", getPartUploadURL(s.endPoint, bucketName, objectName, uploadID, "2"),
int64(buffer2.Len()), buffer2, s.accessKey, s.secretKey, s.signer)
// set the Content-Md5 header to the base64 encoding the md5Sum of the content.
// set the Content-Md5 header to the base64 encoding the etag of the content.
request.Header.Set("Content-Md5", md5SumBase64)
c.Assert(err, IsNil)

View file

@ -370,7 +370,7 @@ func testListObjectsWebHandler(obj ObjectLayer, instanceType string, t TestErrHa
data := bytes.Repeat([]byte("a"), objectSize)
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data), map[string]string{"md5Sum": "c9a34cfc85d982698c6ac89f76071abd"}, "")
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data), map[string]string{"etag": "c9a34cfc85d982698c6ac89f76071abd"}, "")
if err != nil {
t.Fatalf("Was not able to upload an object, %v", err)
@ -465,14 +465,14 @@ func testRemoveObjectWebHandler(obj ObjectLayer, instanceType string, t TestErrH
data := bytes.Repeat([]byte("a"), objectSize)
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data),
map[string]string{"md5Sum": "c9a34cfc85d982698c6ac89f76071abd"}, "")
map[string]string{"etag": "c9a34cfc85d982698c6ac89f76071abd"}, "")
if err != nil {
t.Fatalf("Was not able to upload an object, %v", err)
}
objectName = "a/object"
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data),
map[string]string{"md5Sum": "c9a34cfc85d982698c6ac89f76071abd"}, "")
map[string]string{"etag": "c9a34cfc85d982698c6ac89f76071abd"}, "")
if err != nil {
t.Fatalf("Was not able to upload an object, %v", err)
}
@ -788,7 +788,7 @@ func testDownloadWebHandler(obj ObjectLayer, instanceType string, t TestErrHandl
}
content := []byte("temporary file's content")
_, err = obj.PutObject(bucketName, objectName, int64(len(content)), bytes.NewReader(content), map[string]string{"md5Sum": "01ce59706106fe5e02e7f55fffda7f34"}, "")
_, err = obj.PutObject(bucketName, objectName, int64(len(content)), bytes.NewReader(content), map[string]string{"etag": "01ce59706106fe5e02e7f55fffda7f34"}, "")
if err != nil {
t.Fatalf("Was not able to upload an object, %v", err)
}
@ -940,7 +940,7 @@ func testWebPresignedGetHandler(obj ObjectLayer, instanceType string, t TestErrH
}
data := bytes.Repeat([]byte("a"), objectSize)
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data), map[string]string{"md5Sum": "c9a34cfc85d982698c6ac89f76071abd"}, "")
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data), map[string]string{"etag": "c9a34cfc85d982698c6ac89f76071abd"}, "")
if err != nil {
t.Fatalf("Was not able to upload an object, %v", err)
}

View file

@ -146,7 +146,10 @@ type xlMetaV1 struct {
// XL metadata constants.
const (
// XL meta version.
xlMetaVersion = "1.0.0"
xlMetaVersion = "1.0.1"
// XL meta version.
xlMetaVersion100 = "1.0.0"
// XL meta format string.
xlMetaFormat = "xl"
@ -173,7 +176,38 @@ func newXLMetaV1(object string, dataBlocks, parityBlocks int) (xlMeta xlMetaV1)
// IsValid - tells if the format is sane by validating the version
// string and format style.
func (m xlMetaV1) IsValid() bool {
return m.Version == xlMetaVersion && m.Format == xlMetaFormat
return isXLMetaValid(m.Version, m.Format)
}
// Verifies if the backend format metadata is sane by validating
// the version string and format style.
func isXLMetaValid(version, format string) bool {
return ((version == xlMetaVersion || version == xlMetaVersion100) &&
format == xlMetaFormat)
}
// Converts metadata to object info.
func (m xlMetaV1) ToObjectInfo(bucket, object string) ObjectInfo {
objInfo := ObjectInfo{
IsDir: false,
Bucket: bucket,
Name: object,
Size: m.Stat.Size,
ModTime: m.Stat.ModTime,
ContentType: m.Meta["content-type"],
ContentEncoding: m.Meta["content-encoding"],
}
// Extract etag from metadata.
objInfo.ETag = extractETag(m.Meta)
// etag/md5Sum has already been extracted. We need to
// remove to avoid it from appearing as part of
// response headers. e.g, X-Minio-* or X-Amz-*.
objInfo.UserDefined = cleanMetaETag(m.Meta)
// Success.
return objInfo
}
// objectPartIndex - returns the index of matching object part number.

View file

@ -973,7 +973,7 @@ func (xl xlObjects) CompleteMultipartUpload(bucket string, object string, upload
xlMeta.Stat.ModTime = UTCNow()
// Save successfully calculated md5sum.
xlMeta.Meta["md5Sum"] = s3MD5
xlMeta.Meta["etag"] = s3MD5
uploadIDPath = path.Join(bucket, object, uploadID)
tempUploadIDPath := uploadID
@ -1061,7 +1061,7 @@ func (xl xlObjects) CompleteMultipartUpload(bucket string, object string, upload
Name: object,
Size: xlMeta.Stat.Size,
ModTime: xlMeta.Stat.ModTime,
MD5Sum: xlMeta.Meta["md5Sum"],
ETag: xlMeta.Meta["etag"],
ContentType: xlMeta.Meta["content-type"],
ContentEncoding: xlMeta.Meta["content-encoding"],
UserDefined: xlMeta.Meta,

View file

@ -101,23 +101,7 @@ func (xl xlObjects) CopyObject(srcBucket, srcObject, dstBucket, dstObject string
if err = renameXLMetadata(onlineDisks, minioMetaTmpBucket, tempObj, srcBucket, srcObject, xl.writeQuorum); err != nil {
return ObjectInfo{}, toObjectErr(err, srcBucket, srcObject)
}
objInfo := ObjectInfo{
IsDir: false,
Bucket: srcBucket,
Name: srcObject,
Size: xlMeta.Stat.Size,
ModTime: xlMeta.Stat.ModTime,
MD5Sum: xlMeta.Meta["md5Sum"],
ContentType: xlMeta.Meta["content-type"],
ContentEncoding: xlMeta.Meta["content-encoding"],
}
// md5Sum has already been extracted into objInfo.MD5Sum. We
// need to remove it from xlMetaMap to avoid it from appearing as
// part of response headers. e.g, X-Minio-* or X-Amz-*.
delete(xlMeta.Meta, "md5Sum")
objInfo.UserDefined = xlMeta.Meta
return objInfo, nil
return xlMeta.ToObjectInfo(srcBucket, srcObject), nil
}
// Initialize pipe.
@ -333,10 +317,9 @@ func (xl xlObjects) GetObjectInfo(bucket, object string) (ObjectInfo, error) {
// getObjectInfo - wrapper for reading object metadata and constructs ObjectInfo.
func (xl xlObjects) getObjectInfo(bucket, object string) (objInfo ObjectInfo, err error) {
// returns xl meta map and stat info.
// Extracts xlStat and xlMetaMap.
xlStat, xlMetaMap, err := xl.readXLMetaStat(bucket, object)
if err != nil {
// Return error.
return ObjectInfo{}, err
}
@ -346,17 +329,19 @@ func (xl xlObjects) getObjectInfo(bucket, object string) (objInfo ObjectInfo, er
Name: object,
Size: xlStat.Size,
ModTime: xlStat.ModTime,
MD5Sum: xlMetaMap["md5Sum"],
ContentType: xlMetaMap["content-type"],
ContentEncoding: xlMetaMap["content-encoding"],
}
// md5Sum has already been extracted into objInfo.MD5Sum. We
// need to remove it from xlMetaMap to avoid it from appearing as
// part of response headers. e.g, X-Minio-* or X-Amz-*.
// Extract etag.
objInfo.ETag = extractETag(xlMetaMap)
delete(xlMetaMap, "md5Sum")
objInfo.UserDefined = xlMetaMap
// etag/md5Sum has already been extracted. We need to
// remove to avoid it from appearing as part of
// response headers. e.g, X-Minio-* or X-Amz-*.
objInfo.UserDefined = cleanMetaETag(xlMetaMap)
// Success.
return objInfo, nil
}
@ -650,8 +635,8 @@ func (xl xlObjects) PutObject(bucket string, object string, size int64, data io.
newMD5Hex := hex.EncodeToString(md5Writer.Sum(nil))
// Update the md5sum if not set with the newly calculated one.
if len(metadata["md5Sum"]) == 0 {
metadata["md5Sum"] = newMD5Hex
if len(metadata["etag"]) == 0 {
metadata["etag"] = newMD5Hex
}
// Guess content-type from the extension if possible.
@ -664,7 +649,7 @@ func (xl xlObjects) PutObject(bucket string, object string, size int64, data io.
}
// md5Hex representation.
md5Hex := metadata["md5Sum"]
md5Hex := metadata["etag"]
if md5Hex != "" {
if newMD5Hex != md5Hex {
// Returns md5 mismatch.
@ -730,7 +715,7 @@ func (xl xlObjects) PutObject(bucket string, object string, size int64, data io.
Name: object,
Size: xlMeta.Stat.Size,
ModTime: xlMeta.Stat.ModTime,
MD5Sum: xlMeta.Meta["md5Sum"],
ETag: xlMeta.Meta["etag"],
ContentType: xlMeta.Meta["content-type"],
ContentEncoding: xlMeta.Meta["content-encoding"],
UserDefined: xlMeta.Meta,

View file

@ -253,6 +253,19 @@ func readXLMetaStat(disk StorageAPI, bucket string, object string) (statInfo, ma
if err != nil {
return statInfo{}, nil, traceError(err)
}
// obtain version.
xlVersion := parseXLVersion(xlMetaBuf)
// obtain format.
xlFormat := parseXLFormat(xlMetaBuf)
// Validate if the xl.json we read is sane, return corrupted format.
if !isXLMetaValid(xlVersion, xlFormat) {
// For version mismatchs and unrecognized format, return corrupted format.
return statInfo{}, nil, traceError(errCorruptedFormat)
}
// obtain xlMetaV1{}.Meta using `github.com/tidwall/gjson`.
xlMetaMap := parseXLMetaMap(xlMetaBuf)
@ -261,6 +274,7 @@ func readXLMetaStat(disk StorageAPI, bucket string, object string) (statInfo, ma
if err != nil {
return statInfo{}, nil, traceError(err)
}
// Return structured `xl.json`.
return xlStat, xlMetaMap, nil
}

View file

@ -5,6 +5,7 @@
"release": "DEVELOPMENT.GOGET"
},
"meta": {
"etag": "97586a5290d4f5a41328062d6a7da593-3",
"content-type": "binary/octet-stream",
"content-encoding": "gzip"
},

View file

@ -14,7 +14,7 @@
}
],
"meta": {
"md5Sum": "97586a5290d4f5a41328062d6a7da593-3",
"etag": "97586a5290d4f5a41328062d6a7da593-3",
"content-type": "application\/octet-stream",
"content-encoding": "gzip"
},