diff options
Diffstat (limited to 'backup')
| -rw-r--r-- | backup/backup_test.go | 17 | ||||
| -rw-r--r-- | backup/common_test.go | 18 | ||||
| -rw-r--r-- | backup/restore_test.go | 18 | 
3 files changed, 41 insertions, 12 deletions
diff --git a/backup/backup_test.go b/backup/backup_test.go index 714d814..67fbb6a 100644 --- a/backup/backup_test.go +++ b/backup/backup_test.go @@ -29,25 +29,18 @@ func wantObject(  func TestWriteLargeFile(t *testing.T) {  	s := storage.NewMemoryStorage() -	id, err := WriteFile(s, bytes.NewReader(make([]byte, 2*BlobChunkSize+100))) +	id, err := WriteFile(s, bytes.NewReader(content_largefile))  	if err != nil {  		t.Fatalf("Unexpected error when writing file: %s", err)  	} -	if id.String() != "sha3-256:ab7907ee6b45b343422a0354de500bcf99f5ff69fe8125be84e43d421803c34e" { +	if !id.Equals(objid_largefile) {  		t.Errorf("Unexpected file id: %s", id)  	} -	want_large_blob := append([]byte("blob 16777216\n"), make([]byte, BlobChunkSize)...) -	want_small_blob := append([]byte("blob 100\n"), make([]byte, 100)...) -	want_file := []byte("file 274\n" + -		"blob=sha3-256:7287cbb09bdd8a0d96a6f6297413cd9d09a2763814636245a5a44120e6351be3&size=16777216\n" + -		"blob=sha3-256:7287cbb09bdd8a0d96a6f6297413cd9d09a2763814636245a5a44120e6351be3&size=16777216\n" + -		"blob=sha3-256:ddf124464f7b80e95f4a9c704f79e7037ff5d731648ba6b40c769893b428128c&size=100\n") - -	wantObject(t, s, objects.MustParseObjectId("sha3-256:ab7907ee6b45b343422a0354de500bcf99f5ff69fe8125be84e43d421803c34e"), want_file) -	wantObject(t, s, objects.MustParseObjectId("sha3-256:7287cbb09bdd8a0d96a6f6297413cd9d09a2763814636245a5a44120e6351be3"), want_large_blob) -	wantObject(t, s, objects.MustParseObjectId("sha3-256:ddf124464f7b80e95f4a9c704f79e7037ff5d731648ba6b40c769893b428128c"), want_small_blob) +	wantObject(t, s, objid_largefile, obj_largefile) +	wantObject(t, s, objid_largefile_blob0, obj_largefile_blob0) +	wantObject(t, s, objid_largefile_blob1, obj_largefile_blob1)  }  func mkfile(t *testing.T, d fs.Dir, name string, exec bool, content []byte) { diff --git a/backup/common_test.go b/backup/common_test.go index f06d737..a438c47 100644 --- a/backup/common_test.go +++ b/backup/common_test.go @@ -4,6 +4,7 @@ import (  	"code.laria.me/petrific/objects"  ) +// Test tree  var (  	objid_emptyfile = objects.MustParseObjectId("sha3-256:4a10682307d5b5dc072d1b862497296640176109347b149aad38cd640000491b")  	obj_emptyfile   = []byte("file 0\n") @@ -28,3 +29,20 @@ var (  		"acl=u::rwx,g::r-x,o::r-x&name=bar&ref=sha3-256:4a10682307d5b5dc072d1b862497296640176109347b149aad38cd640000491b&type=file\n" +  		"acl=u::rwx,g::r-x,o::r-x&name=sub&ref=sha3-256:f1716a1b0cad23b6faab9712243402b8f8e7919c377fc5d5d87bd465cef056d7&type=dir\n")  ) + +// Large file +var ( +	content_largefile = make([]byte, 2*BlobChunkSize+100) + +	objid_largefile_blob0 = objects.MustParseObjectId("sha3-256:7287cbb09bdd8a0d96a6f6297413cd9d09a2763814636245a5a44120e6351be3") +	obj_largefile_blob0   = append([]byte("blob 16777216\n"), make([]byte, BlobChunkSize)...) + +	objid_largefile_blob1 = objects.MustParseObjectId("sha3-256:ddf124464f7b80e95f4a9c704f79e7037ff5d731648ba6b40c769893b428128c") +	obj_largefile_blob1   = append([]byte("blob 100\n"), make([]byte, 100)...) + +	objid_largefile = objects.MustParseObjectId("sha3-256:ab7907ee6b45b343422a0354de500bcf99f5ff69fe8125be84e43d421803c34e") +	obj_largefile   = []byte("file 274\n" + +		"blob=sha3-256:7287cbb09bdd8a0d96a6f6297413cd9d09a2763814636245a5a44120e6351be3&size=16777216\n" + +		"blob=sha3-256:7287cbb09bdd8a0d96a6f6297413cd9d09a2763814636245a5a44120e6351be3&size=16777216\n" + +		"blob=sha3-256:ddf124464f7b80e95f4a9c704f79e7037ff5d731648ba6b40c769893b428128c&size=100\n") +) diff --git a/backup/restore_test.go b/backup/restore_test.go index 1a88e5f..9e6e525 100644 --- a/backup/restore_test.go +++ b/backup/restore_test.go @@ -107,3 +107,21 @@ func TestRestoreDir(t *testing.T) {  		}))  	})(t, root)  } + +func TestRestoreLargeFile(t *testing.T) { +	s := storage.NewMemoryStorage() +	s.Set(objid_largefile_blob0, objects.OTBlob, obj_largefile_blob0) +	s.Set(objid_largefile_blob1, objects.OTBlob, obj_largefile_blob1) +	s.Set(objid_largefile, objects.OTFile, obj_largefile) + +	buf := new(bytes.Buffer) + +	if err := RestoreFile(s, objid_largefile, buf); err != nil { +		t.Fatalf("Unexpected error while restoring file: %s", err) +	} + +	have := buf.Bytes() +	if !bytes.Equal(have, content_largefile) { +		t.Errorf("Unexpected restoration result: %s", have) +	} +}  | 
