From cb5a10c6a32a7aba4c1abe81be4981ceba64f0c8 Mon Sep 17 00:00:00 2001 From: Chris Lu Date: Sun, 18 Nov 2018 17:05:36 -0800 Subject: [PATCH] adjust the tests --- weed/filer2/filechunks_test.go | 62 +++++++++++++++++++++++----------- 1 file changed, 42 insertions(+), 20 deletions(-) diff --git a/weed/filer2/filechunks_test.go b/weed/filer2/filechunks_test.go index 292b7fc25..7e3a20f61 100644 --- a/weed/filer2/filechunks_test.go +++ b/weed/filer2/filechunks_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/chrislusf/seaweedfs/weed/pb/filer_pb" + "fmt" ) func TestCompactFileChunks(t *testing.T) { @@ -15,20 +16,50 @@ func TestCompactFileChunks(t *testing.T) { {Offset: 110, Size: 200, FileId: "jkl", Mtime: 300}, } - compacted, garbarge := CompactFileChunks(chunks) - - log.Printf("Compacted: %+v", compacted) - log.Printf("Garbage : %+v", garbarge) + compacted, garbage := CompactFileChunks(chunks) if len(compacted) != 3 { t.Fatalf("unexpected compacted: %d", len(compacted)) } - if len(garbarge) != 1 { - t.Fatalf("unexpected garbarge: %d", len(garbarge)) + if len(garbage) != 1 { + t.Fatalf("unexpected garbage: %d", len(garbage)) } } + +func TestCompactFileChunks2(t *testing.T) { + + chunks := []*filer_pb.FileChunk{ + {Offset: 0, Size: 100, FileId: "abc", Mtime: 50}, + {Offset: 100, Size: 100, FileId: "def", Mtime: 100}, + {Offset: 200, Size: 100, FileId: "ghi", Mtime: 200}, + {Offset: 0, Size: 100, FileId: "abcf", Mtime: 300}, + {Offset: 50, Size: 100, FileId: "fhfh", Mtime: 400}, + {Offset: 100, Size: 100, FileId: "yuyu", Mtime: 500}, + } + + k := 3 + + for n := 0; n < k; n++ { + chunks = append(chunks, &filer_pb.FileChunk{ + Offset: int64(n * 100), Size: 100, FileId: fmt.Sprintf("fileId%d",n), Mtime: int64(n), + }) + chunks = append(chunks, &filer_pb.FileChunk{ + Offset: int64(n * 50), Size: 100, FileId: fmt.Sprintf("fileId%d",n+k), Mtime: int64(n + k), + }) + } + + compacted, garbage := CompactFileChunks(chunks) + + if len(compacted) != 3 { + t.Fatalf("unexpected compacted: %d", len(compacted)) + } + if len(garbage) != 9 { + t.Fatalf("unexpected garbage: %d", len(garbage)) + } +} + func TestIntervalMerging(t *testing.T) { testcases := []struct { @@ -318,31 +349,22 @@ func TestChunksReading(t *testing.T) { } -func BenchmarkNonOverlappingVisibleIntervals(b *testing.B) { +func BenchmarkCompactFileChunks(b *testing.B) { - chunks := []*filer_pb.FileChunk{ - {Offset: 0, Size: 2097152, FileId: "7,0294cbb9892b", Mtime: 123}, - {Offset: 0, Size: 3145728, FileId: "3,029565bf3092", Mtime: 130}, - {Offset: 2097152, Size: 3145728, FileId: "6,029632f47ae2", Mtime: 140}, - {Offset: 5242880, Size: 3145728, FileId: "2,029734c5aa10", Mtime: 150}, - {Offset: 8388608, Size: 3145728, FileId: "5,02982f80de50", Mtime: 160}, - {Offset: 11534336, Size: 2842193, FileId: "7,0299ad723803", Mtime: 170}, - } + var chunks []*filer_pb.FileChunk k := 1024 for n := 0; n < k; n++ { chunks = append(chunks, &filer_pb.FileChunk{ - Offset: int64(n*100), Size: 100, FileId: "7,0294cbb9892b", Mtime: int64(n), + Offset: int64(n * 100), Size: 100, FileId: fmt.Sprintf("fileId%d",n), Mtime: int64(n), }) chunks = append(chunks, &filer_pb.FileChunk{ - Offset: int64(n*50), Size: 100, FileId: "7,0294cbb9892b", Mtime: int64(n+k), + Offset: int64(n * 50), Size: 100, FileId: fmt.Sprintf("fileId%d",n+k), Mtime: int64(n + k), }) } - // run the Fib function b.N times for n := 0; n < b.N; n++ { - intervals := nonOverlappingVisibleIntervals(chunks) - cleanupIntervals(intervals) + CompactFileChunks(chunks) } }