1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44 package org.eclipse.jgit.internal.storage.dfs;
45
46 import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
47 import static org.junit.Assert.assertEquals;
48 import static org.junit.Assert.assertTrue;
49
50 import java.util.Arrays;
51 import java.util.Collections;
52 import java.util.List;
53
54 import org.eclipse.jgit.junit.TestRng;
55 import org.eclipse.jgit.lib.ObjectId;
56 import org.eclipse.jgit.lib.ObjectInserter;
57 import org.eclipse.jgit.lib.ObjectReader;
58 import org.junit.Before;
59 import org.junit.Rule;
60 import org.junit.Test;
61 import org.junit.rules.TestName;
62
63 public class DfsBlockCacheTest {
64 @Rule
65 public TestName testName = new TestName();
66 private TestRng rng;
67 private DfsBlockCache cache;
68
69 @Before
70 public void setUp() {
71 rng = new TestRng(testName.getMethodName());
72 resetCache();
73 }
74
75 @SuppressWarnings("resource")
76 @Test
77 public void streamKeyReusesBlocks() throws Exception {
78 DfsRepositoryDescription repo = new DfsRepositoryDescription("test");
79 InMemoryRepository r1 = new InMemoryRepository(repo);
80 byte[] content = rng.nextBytes(424242);
81 ObjectId id;
82 try (ObjectInserter ins = r1.newObjectInserter()) {
83 id = ins.insert(OBJ_BLOB, content);
84 ins.flush();
85 }
86
87 long oldSize = cache.getCurrentSize();
88 assertTrue(oldSize > 2000);
89 assertEquals(0, cache.getHitCount());
90
91 List<DfsPackDescription> packs = r1.getObjectDatabase().listPacks();
92 InMemoryRepository r2 = new InMemoryRepository(repo);
93 r2.getObjectDatabase().commitPack(packs, Collections.emptyList());
94 try (ObjectReader rdr = r2.newObjectReader()) {
95 byte[] actual = rdr.open(id, OBJ_BLOB).getBytes();
96 assertTrue(Arrays.equals(content, actual));
97 }
98 assertEquals(0, cache.getMissCount());
99 assertEquals(oldSize, cache.getCurrentSize());
100 }
101
102 @SuppressWarnings("resource")
103 @Test
104 public void weirdBlockSize() throws Exception {
105 DfsRepositoryDescription repo = new DfsRepositoryDescription("test");
106 InMemoryRepository r1 = new InMemoryRepository(repo);
107
108 byte[] content1 = rng.nextBytes(4);
109 byte[] content2 = rng.nextBytes(424242);
110 ObjectId id1;
111 ObjectId id2;
112 try (ObjectInserter ins = r1.newObjectInserter()) {
113 id1 = ins.insert(OBJ_BLOB, content1);
114 id2 = ins.insert(OBJ_BLOB, content2);
115 ins.flush();
116 }
117
118 resetCache();
119 List<DfsPackDescription> packs = r1.getObjectDatabase().listPacks();
120
121 InMemoryRepository r2 = new InMemoryRepository(repo);
122 r2.getObjectDatabase().setReadableChannelBlockSizeForTest(500);
123 r2.getObjectDatabase().commitPack(packs, Collections.emptyList());
124 try (ObjectReader rdr = r2.newObjectReader()) {
125 byte[] actual = rdr.open(id1, OBJ_BLOB).getBytes();
126 assertTrue(Arrays.equals(content1, actual));
127 }
128
129 InMemoryRepository r3 = new InMemoryRepository(repo);
130 r3.getObjectDatabase().setReadableChannelBlockSizeForTest(500);
131 r3.getObjectDatabase().commitPack(packs, Collections.emptyList());
132 try (ObjectReader rdr = r3.newObjectReader()) {
133 byte[] actual = rdr.open(id2, OBJ_BLOB).getBytes();
134 assertTrue(Arrays.equals(content2, actual));
135 }
136 }
137
138 private void resetCache() {
139 DfsBlockCache.reconfigure(new DfsBlockCacheConfig()
140 .setBlockSize(512)
141 .setBlockLimit(1 << 20));
142 cache = DfsBlockCache.getInstance();
143 }
144 }