dfs: Switch InMemoryRepository to DfsReftableDatabase
This ensure DfsReftableDatabase is tested by the same test suites that
use/test InMemoryRepository. It also simplifies the logic of
InMemoryRepository and brings its compatibility story closer to any
other DFS repository that uses reftables for its reference storage.
Change-Id: I881469fd77ed11a9239b477633510b8c482a19ca
diff --git a/org.eclipse.jgit.http.test/META-INF/MANIFEST.MF b/org.eclipse.jgit.http.test/META-INF/MANIFEST.MF
index 421fa8a..cf190a6 100644
--- a/org.eclipse.jgit.http.test/META-INF/MANIFEST.MF
+++ b/org.eclipse.jgit.http.test/META-INF/MANIFEST.MF
@@ -29,6 +29,7 @@
org.eclipse.jgit.internal;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.dfs;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.file;version="[4.9.0,4.10.0)",
+ org.eclipse.jgit.internal.storage.reftable;version="[4.9.0,4.10.0)",
org.eclipse.jgit.junit;version="[4.9.0,4.10.0)",
org.eclipse.jgit.junit.http;version="[4.9.0,4.10.0)",
org.eclipse.jgit.lib;version="[4.9.0,4.10.0)",
diff --git a/org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/RefsUnreadableInMemoryRepository.java b/org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/RefsUnreadableInMemoryRepository.java
index a1e41d1..21787fe 100644
--- a/org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/RefsUnreadableInMemoryRepository.java
+++ b/org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/RefsUnreadableInMemoryRepository.java
@@ -46,6 +46,7 @@
import org.eclipse.jgit.internal.storage.dfs.DfsRepositoryDescription;
import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository;
+import org.eclipse.jgit.internal.storage.reftable.Reftable;
import org.eclipse.jgit.lib.RefDatabase;
/**
@@ -80,14 +81,12 @@
}
private class RefsUnreadableRefDatabase extends MemRefDatabase {
-
@Override
- protected RefCache scanAllRefs() throws IOException {
+ protected Reftable read() throws IOException {
if (failing) {
throw new IOException("disk failed, no refs found");
- } else {
- return super.scanAllRefs();
}
+ return super.read();
}
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
index bb6017c..fd727ae 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
@@ -14,7 +14,6 @@
import static org.junit.Assert.fail;
import java.io.IOException;
-import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
@@ -714,38 +713,6 @@
}
@Test
- public void leavesNonGcReftablesIfNotConfigured() throws Exception {
- String master = "refs/heads/master";
- RevCommit commit0 = commit().message("0").create();
- RevCommit commit1 = commit().message("1").parent(commit0).create();
- git.update(master, commit1);
-
- DfsPackDescription t1 = odb.newPack(INSERT);
- try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) {
- out.write("ignored".getBytes(StandardCharsets.UTF_8));
- t1.addFileExt(REFTABLE);
- }
- odb.commitPack(Collections.singleton(t1), null);
-
- DfsGarbageCollector gc = new DfsGarbageCollector(repo);
- gc.setReftableConfig(null);
- run(gc);
-
- // Single GC pack present with all objects.
- assertEquals(1, odb.getPacks().length);
- DfsPackFile pack = odb.getPacks()[0];
- DfsPackDescription desc = pack.getPackDescription();
- assertEquals(GC, desc.getPackSource());
- assertTrue("commit0 in pack", isObjectInPack(commit0, pack));
- assertTrue("commit1 in pack", isObjectInPack(commit1, pack));
-
- // Only INSERT REFTABLE above is present.
- DfsReftable[] tables = odb.getReftables();
- assertEquals(1, tables.length);
- assertEquals(t1, tables[0].getPackDescription());
- }
-
- @Test
public void prunesNonGcReftables() throws Exception {
String master = "refs/heads/master";
RevCommit commit0 = commit().message("0").create();
@@ -754,7 +721,7 @@
DfsPackDescription t1 = odb.newPack(INSERT);
try (DfsOutputStream out = odb.writeFile(t1, REFTABLE)) {
- out.write("ignored".getBytes(StandardCharsets.UTF_8));
+ new ReftableWriter().begin(out).finish();
t1.addFileExt(REFTABLE);
}
odb.commitPack(Collections.singleton(t1), null);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
index 6e9d7e0..d568d72 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
@@ -446,7 +446,7 @@
// add, as the pack was already committed via commitPack().
// If this is the case return without changing the list.
for (DfsPackFile p : o.packs) {
- if (p == newPack)
+ if (p.key.equals(newPack.key))
return;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java
index 383ed3d..0bb4e30 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java
@@ -6,30 +6,12 @@
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.storage.pack.PackExt;
-import org.eclipse.jgit.lib.BatchRefUpdate;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectIdRef;
-import org.eclipse.jgit.lib.ProgressMonitor;
-import org.eclipse.jgit.lib.Ref;
-import org.eclipse.jgit.lib.Ref.Storage;
import org.eclipse.jgit.lib.RefDatabase;
-import org.eclipse.jgit.revwalk.RevObject;
-import org.eclipse.jgit.revwalk.RevTag;
-import org.eclipse.jgit.revwalk.RevWalk;
-import org.eclipse.jgit.transport.ReceiveCommand;
-import org.eclipse.jgit.util.RefList;
/**
* Git repository stored entirely in the local process memory.
@@ -54,9 +36,8 @@
static final AtomicInteger packId = new AtomicInteger();
private final MemObjDatabase objdb;
- private final RefDatabase refdb;
+ private final MemRefDatabase refdb;
private String gitwebDescription;
- private boolean performsAtomicTransactions = true;
/**
* Initialize a new in-memory repository.
@@ -92,7 +73,7 @@
* @param atomic
*/
public void setPerformsAtomicTransactions(boolean atomic) {
- performsAtomicTransactions = atomic;
+ refdb.performsAtomicTransactions = atomic;
}
@Override
@@ -148,6 +129,7 @@
if (replace != null)
n.removeAll(replace);
packs = n;
+ clearCache();
}
@Override
@@ -159,37 +141,43 @@
protected ReadableChannel openFile(DfsPackDescription desc, PackExt ext)
throws FileNotFoundException, IOException {
MemPack memPack = (MemPack) desc;
- byte[] file = memPack.fileMap.get(ext);
+ byte[] file = memPack.get(ext);
if (file == null)
throw new FileNotFoundException(desc.getFileName(ext));
return new ByteArrayReadableChannel(file, blockSize);
}
@Override
- protected DfsOutputStream writeFile(
- DfsPackDescription desc, final PackExt ext) throws IOException {
- final MemPack memPack = (MemPack) desc;
+ protected DfsOutputStream writeFile(DfsPackDescription desc,
+ PackExt ext) throws IOException {
+ MemPack memPack = (MemPack) desc;
return new Out() {
@Override
public void flush() {
- memPack.fileMap.put(ext, getData());
+ memPack.put(ext, getData());
}
};
}
}
private static class MemPack extends DfsPackDescription {
- final Map<PackExt, byte[]>
- fileMap = new HashMap<>();
+ final byte[][] fileMap = new byte[PackExt.values().length][];
MemPack(String name, DfsRepositoryDescription repoDesc) {
super(repoDesc, name);
}
+
+ void put(PackExt ext, byte[] data) {
+ fileMap[ext.getPosition()] = data;
+ }
+
+ byte[] get(PackExt ext) {
+ return fileMap[ext.getPosition()];
+ }
}
private abstract static class Out extends DfsOutputStream {
private final ByteArrayOutputStream dst = new ByteArrayOutputStream();
-
private byte[] data;
@Override
@@ -221,7 +209,6 @@
public void close() {
flush();
}
-
}
private static class ByteArrayReadableChannel implements ReadableChannel {
@@ -281,19 +268,11 @@
}
}
- /**
- * A ref database storing all refs in-memory.
- * <p>
- * This class is protected (and not private) to facilitate testing using
- * subclasses of InMemoryRepository.
- */
- protected class MemRefDatabase extends DfsRefDatabase {
- private final ConcurrentMap<String, Ref> refs = new ConcurrentHashMap<>();
- private final ReadWriteLock lock = new ReentrantReadWriteLock(true /* fair */);
+ /** DfsRefDatabase used by InMemoryRepository. */
+ protected class MemRefDatabase extends DfsReftableDatabase {
+ boolean performsAtomicTransactions = true;
- /**
- * Initialize a new in-memory ref database.
- */
+ /** Initialize a new in-memory ref database. */
protected MemRefDatabase() {
super(InMemoryRepository.this);
}
@@ -302,172 +281,5 @@
public boolean performsAtomicTransactions() {
return performsAtomicTransactions;
}
-
- @Override
- public BatchRefUpdate newBatchUpdate() {
- return new BatchRefUpdate(this) {
- @Override
- public void execute(RevWalk walk, ProgressMonitor monitor)
- throws IOException {
- if (performsAtomicTransactions() && isAtomic()) {
- try {
- lock.writeLock().lock();
- batch(getCommands());
- } finally {
- lock.writeLock().unlock();
- }
- } else {
- super.execute(walk, monitor);
- }
- }
- };
- }
-
- @Override
- protected RefCache scanAllRefs() throws IOException {
- RefList.Builder<Ref> ids = new RefList.Builder<>();
- RefList.Builder<Ref> sym = new RefList.Builder<>();
- try {
- lock.readLock().lock();
- for (Ref ref : refs.values()) {
- if (ref.isSymbolic())
- sym.add(ref);
- ids.add(ref);
- }
- } finally {
- lock.readLock().unlock();
- }
- ids.sort();
- sym.sort();
- objdb.getCurrentPackList().markDirty();
- return new RefCache(ids.toRefList(), sym.toRefList());
- }
-
- private void batch(List<ReceiveCommand> cmds) {
- // Validate that the target exists in a new RevWalk, as the RevWalk
- // from the RefUpdate might be reading back unflushed objects.
- Map<ObjectId, ObjectId> peeled = new HashMap<>();
- try (RevWalk rw = new RevWalk(getRepository())) {
- for (ReceiveCommand c : cmds) {
- if (c.getResult() != ReceiveCommand.Result.NOT_ATTEMPTED) {
- ReceiveCommand.abort(cmds);
- return;
- }
-
- if (!ObjectId.zeroId().equals(c.getNewId())) {
- try {
- RevObject o = rw.parseAny(c.getNewId());
- if (o instanceof RevTag) {
- peeled.put(o, rw.peel(o).copy());
- }
- } catch (IOException e) {
- c.setResult(ReceiveCommand.Result.REJECTED_MISSING_OBJECT);
- ReceiveCommand.abort(cmds);
- return;
- }
- }
- }
- }
-
- // Check all references conform to expected old value.
- for (ReceiveCommand c : cmds) {
- Ref r = refs.get(c.getRefName());
- if (r == null) {
- if (c.getType() != ReceiveCommand.Type.CREATE) {
- c.setResult(ReceiveCommand.Result.LOCK_FAILURE);
- ReceiveCommand.abort(cmds);
- return;
- }
- } else {
- ObjectId objectId = r.getObjectId();
- if (r.isSymbolic() || objectId == null
- || !objectId.equals(c.getOldId())) {
- c.setResult(ReceiveCommand.Result.LOCK_FAILURE);
- ReceiveCommand.abort(cmds);
- return;
- }
- }
- }
-
- // Write references.
- for (ReceiveCommand c : cmds) {
- if (c.getType() == ReceiveCommand.Type.DELETE) {
- refs.remove(c.getRefName());
- c.setResult(ReceiveCommand.Result.OK);
- continue;
- }
-
- ObjectId p = peeled.get(c.getNewId());
- Ref r;
- if (p != null) {
- r = new ObjectIdRef.PeeledTag(Storage.PACKED,
- c.getRefName(), c.getNewId(), p);
- } else {
- r = new ObjectIdRef.PeeledNonTag(Storage.PACKED,
- c.getRefName(), c.getNewId());
- }
- refs.put(r.getName(), r);
- c.setResult(ReceiveCommand.Result.OK);
- }
- clearCache();
- }
-
- @Override
- protected boolean compareAndPut(Ref oldRef, Ref newRef)
- throws IOException {
- try {
- lock.writeLock().lock();
- ObjectId id = newRef.getObjectId();
- if (id != null) {
- try (RevWalk rw = new RevWalk(getRepository())) {
- // Validate that the target exists in a new RevWalk, as the RevWalk
- // from the RefUpdate might be reading back unflushed objects.
- rw.parseAny(id);
- }
- }
- String name = newRef.getName();
- if (oldRef == null)
- return refs.putIfAbsent(name, newRef) == null;
-
- Ref cur = refs.get(name);
- if (cur != null) {
- if (eq(cur, oldRef))
- return refs.replace(name, cur, newRef);
- }
-
- if (oldRef.getStorage() == Storage.NEW)
- return refs.putIfAbsent(name, newRef) == null;
-
- return false;
- } finally {
- lock.writeLock().unlock();
- }
- }
-
- @Override
- protected boolean compareAndRemove(Ref oldRef) throws IOException {
- try {
- lock.writeLock().lock();
- String name = oldRef.getName();
- Ref cur = refs.get(name);
- if (cur != null && eq(cur, oldRef))
- return refs.remove(name, cur);
- else
- return false;
- } finally {
- lock.writeLock().unlock();
- }
- }
-
- private boolean eq(Ref a, Ref b) {
- if (!Objects.equals(a.getName(), b.getName()))
- return false;
- if (a.isSymbolic() != b.isSymbolic())
- return false;
- if (a.isSymbolic())
- return Objects.equals(a.getTarget().getName(), b.getTarget().getName());
- else
- return Objects.equals(a.getObjectId(), b.getObjectId());
- }
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableBatchRefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableBatchRefUpdate.java
index fa2e8a9..8731aab 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableBatchRefUpdate.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableBatchRefUpdate.java
@@ -117,6 +117,10 @@
Config cfg = refdb.getRepository().getConfig();
reftableConfig = new ReftableConfig();
+ if (refdb instanceof InMemoryRepository.MemRefDatabase) {
+ reftableConfig.setAlignBlocks(false);
+ reftableConfig.setIndexObjects(false);
+ }
reftableConfig.fromConfig(cfg);
}