Merge "git ignore .DS_Store"
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
index 5a40907..34f6c71 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
@@ -43,6 +43,8 @@
 
 package org.eclipse.jgit.internal.storage.file;
 
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
+import static java.util.concurrent.TimeUnit.SECONDS;
 import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.LOCK_FAILURE;
 import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.OK;
 import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.REJECTED_MISSING_OBJECT;
@@ -54,9 +56,11 @@
 import static org.eclipse.jgit.transport.ReceiveCommand.Type.UPDATE;
 import static org.eclipse.jgit.transport.ReceiveCommand.Type.UPDATE_NONFASTFORWARD;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.io.IOException;
@@ -66,6 +70,7 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
 import java.util.function.Predicate;
 
 import org.eclipse.jgit.junit.LocalDiskRepositoryTestCase;
@@ -96,6 +101,7 @@
 import org.junit.runners.Parameterized.Parameter;
 import org.junit.runners.Parameterized.Parameters;
 
+@SuppressWarnings("boxing")
 @RunWith(Parameterized.class)
 public class BatchRefUpdateTest extends LocalDiskRepositoryTestCase {
 	@Parameter
@@ -125,6 +131,7 @@
 		cfg.save();
 
 		refdir = (RefDirectory) diskRepo.getRefDatabase();
+		refdir.setRetrySleepMs(Arrays.asList(0, 0));
 
 		repo = new TestRepository<>(diskRepo);
 		A = repo.commit().create();
@@ -584,6 +591,136 @@
 				getLastReflog("refs/heads/branch"));
 	}
 
+	@Test
+	public void packedRefsLockFailure() throws Exception {
+		writeLooseRef("refs/heads/master", A);
+
+		List<ReceiveCommand> cmds = Arrays.asList(
+				new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+				new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE));
+
+		LockFile myLock = refdir.lockPackedRefs();
+		try {
+			execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+			assertFalse(getLockFile("refs/heads/master").exists());
+			assertFalse(getLockFile("refs/heads/branch").exists());
+
+			if (atomic) {
+				assertResults(cmds, LOCK_FAILURE, TRANSACTION_ABORTED);
+				assertRefs("refs/heads/master", A);
+			} else {
+				// Only operates on loose refs, doesn't care that packed-refs is locked.
+				assertResults(cmds, OK, OK);
+				assertRefs(
+						"refs/heads/master", B,
+						"refs/heads/branch", B);
+			}
+		} finally {
+			myLock.unlock();
+		}
+	}
+
+	@Test
+	public void oneRefLockFailure() throws Exception {
+		writeLooseRef("refs/heads/master", A);
+
+		List<ReceiveCommand> cmds = Arrays.asList(
+				new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE),
+				new ReceiveCommand(A, B, "refs/heads/master", UPDATE));
+
+		LockFile myLock = new LockFile(refdir.fileFor("refs/heads/master"));
+		assertTrue(myLock.lock());
+		try {
+			execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+			assertFalse(LockFile.getLockFile(refdir.packedRefsFile).exists());
+			assertFalse(getLockFile("refs/heads/branch").exists());
+
+			if (atomic) {
+				assertResults(cmds, TRANSACTION_ABORTED, LOCK_FAILURE);
+				assertRefs("refs/heads/master", A);
+			} else {
+				assertResults(cmds, OK, LOCK_FAILURE);
+				assertRefs(
+						"refs/heads/branch", B,
+						"refs/heads/master", A);
+			}
+		} finally {
+			myLock.unlock();
+		}
+	}
+
+	@Test
+	public void singleRefUpdateDoesNotRequirePackedRefsLock() throws Exception {
+		writeLooseRef("refs/heads/master", A);
+
+		List<ReceiveCommand> cmds = Arrays.asList(
+				new ReceiveCommand(A, B, "refs/heads/master", UPDATE));
+
+		LockFile myLock = refdir.lockPackedRefs();
+		try {
+			execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+			assertFalse(getLockFile("refs/heads/master").exists());
+			assertResults(cmds, OK);
+			assertRefs("refs/heads/master", B);
+		} finally {
+			myLock.unlock();
+		}
+	}
+
+	@Test
+	public void atomicUpdateRespectsInProcessLock() throws Exception {
+		assumeTrue(atomic);
+
+		writeLooseRef("refs/heads/master", A);
+
+		List<ReceiveCommand> cmds = Arrays.asList(
+				new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+				new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE));
+
+		Thread t = new Thread(() -> {
+			try {
+				execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+			} catch (Exception e) {
+				throw new RuntimeException(e);
+			}
+		});
+
+		ReentrantLock l = refdir.inProcessPackedRefsLock;
+		l.lock();
+		try {
+			t.start();
+			long timeoutSecs = 10;
+			long startNanos = System.nanoTime();
+
+			// Hold onto the lock until we observe the worker thread has attempted to
+			// acquire it.
+			while (l.getQueueLength() == 0) {
+				long elapsedNanos = System.nanoTime() - startNanos;
+				assertTrue(
+						"timed out waiting for work thread to attempt to acquire lock",
+						NANOSECONDS.toSeconds(elapsedNanos) < timeoutSecs);
+				Thread.sleep(3);
+			}
+
+			// Once we unlock, the worker thread should finish the update promptly.
+			l.unlock();
+			t.join(SECONDS.toMillis(timeoutSecs));
+			assertFalse(t.isAlive());
+		} finally {
+			if (l.isHeldByCurrentThread()) {
+				l.unlock();
+			}
+		}
+
+		assertResults(cmds, OK, OK);
+		assertRefs(
+				"refs/heads/master", B,
+				"refs/heads/branch", B);
+	}
+
 	private void writeLooseRef(String name, AnyObjectId id) throws IOException {
 		write(new File(diskRepo.getDirectory(), name), id.name() + "\n");
 	}
@@ -712,6 +849,10 @@
 		return r.getLastEntry();
 	}
 
+	private File getLockFile(String refName) {
+		return LockFile.getLockFile(refdir.fileFor(refName));
+	}
+
 	private void assertReflogUnchanged(
 			Map<String, ReflogEntry> old, String name) throws IOException {
 		assertReflogEquals(old.get(name), getLastReflog(name), true);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
index 145fed6..fefccf3 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
@@ -60,10 +60,12 @@
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.eclipse.jgit.errors.LockFailedException;
 import org.eclipse.jgit.events.ListenerHandle;
 import org.eclipse.jgit.events.RefsChangedEvent;
 import org.eclipse.jgit.events.RefsChangedListener;
@@ -79,6 +81,7 @@
 import org.junit.Before;
 import org.junit.Test;
 
+@SuppressWarnings("boxing")
 public class RefDirectoryTest extends LocalDiskRepositoryTestCase {
 	private Repository diskRepo;
 
@@ -1284,6 +1287,23 @@
 		assertEquals(1, changeCount.get());
 	}
 
+	@Test
+	public void testPackedRefsLockFailure() throws Exception {
+		writeLooseRef("refs/heads/master", A);
+		refdir.setRetrySleepMs(Arrays.asList(0, 0));
+		LockFile myLock = refdir.lockPackedRefs();
+		try {
+			refdir.pack(Arrays.asList("refs/heads/master"));
+			fail("expected LockFailedException");
+		} catch (LockFailedException e) {
+			assertEquals(refdir.packedRefsFile.getPath(), e.getFile().getPath());
+		} finally {
+			myLock.unlock();
+		}
+		Ref ref = refdir.getRef("refs/heads/master");
+		assertEquals(Storage.LOOSE, ref.getStorage());
+	}
+
 	private void writeLooseRef(String name, AnyObjectId id) throws IOException {
 		writeLooseRef(name, id.name() + "\n");
 	}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/AbbreviatedObjectIdTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/AbbreviatedObjectIdTest.java
index 6529d9e..30a9626 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/AbbreviatedObjectIdTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/AbbreviatedObjectIdTest.java
@@ -86,7 +86,6 @@
 		final ObjectId f = i.toObjectId();
 		assertNotNull(f);
 		assertEquals(ObjectId.fromString(s), f);
-		assertEquals(f.hashCode(), i.hashCode());
 	}
 
 	@Test
@@ -101,7 +100,6 @@
 		final ObjectId f = i.toObjectId();
 		assertNotNull(f);
 		assertEquals(ObjectId.fromString(s), f);
-		assertEquals(f.hashCode(), i.hashCode());
 	}
 
 	@Test
@@ -215,7 +213,7 @@
 	}
 
 	@Test
-	public void testEquals_Short() {
+	public void testEquals_Short8() {
 		final String s = "7b6e8067";
 		final AbbreviatedObjectId a = AbbreviatedObjectId.fromString(s);
 		final AbbreviatedObjectId b = AbbreviatedObjectId.fromString(s);
@@ -226,6 +224,18 @@
 	}
 
 	@Test
+	public void testEquals_Short4() {
+		final String s = "7b6e";
+		final AbbreviatedObjectId a = AbbreviatedObjectId.fromString(s);
+		final AbbreviatedObjectId b = AbbreviatedObjectId.fromString(s);
+		assertNotSame(a, b);
+		assertTrue(a.hashCode() != 0);
+		assertTrue(a.hashCode() == b.hashCode());
+		assertEquals(b, a);
+		assertEquals(a, b);
+	}
+
+	@Test
 	public void testEquals_Full() {
 		final String s = "7b6e8067ec96acef9a4184b43210d583b6d2f99a";
 		final AbbreviatedObjectId a = AbbreviatedObjectId.fromString(s);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/IntListTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/IntListTest.java
index c6eca9d..d6ea8c6 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/IntListTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/IntListTest.java
@@ -44,6 +44,7 @@
 package org.eclipse.jgit.util;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -186,6 +187,16 @@
 	}
 
 	@Test
+	public void testContains() {
+		IntList i = new IntList();
+		i.add(1);
+		i.add(4);
+		assertTrue(i.contains(1));
+		assertTrue(i.contains(4));
+		assertFalse(i.contains(2));
+	}
+
+	@Test
 	public void testToString() {
 		final IntList i = new IntList();
 		i.add(1);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/RawParseUtils_LineMapTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/RawParseUtils_LineMapTest.java
index 5939714..2e9cbb5 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/RawParseUtils_LineMapTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/RawParseUtils_LineMapTest.java
@@ -43,6 +43,7 @@
 
 package org.eclipse.jgit.util;
 
+import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
@@ -55,52 +56,51 @@
 	public void testEmpty() {
 		final IntList map = RawParseUtils.lineMap(new byte[] {}, 0, 0);
 		assertNotNull(map);
-		assertEquals(2, map.size());
-		assertEquals(Integer.MIN_VALUE, map.get(0));
-		assertEquals(0, map.get(1));
+		assertArrayEquals(new int[]{Integer.MIN_VALUE, 0}, asInts(map));
 	}
 
 	@Test
 	public void testOneBlankLine() {
 		final IntList map = RawParseUtils.lineMap(new byte[] { '\n' }, 0, 1);
-		assertEquals(3, map.size());
-		assertEquals(Integer.MIN_VALUE, map.get(0));
-		assertEquals(0, map.get(1));
-		assertEquals(1, map.get(2));
+		assertArrayEquals(new int[]{Integer.MIN_VALUE, 0, 1}, asInts(map));
 	}
 
 	@Test
 	public void testTwoLineFooBar() throws UnsupportedEncodingException {
 		final byte[] buf = "foo\nbar\n".getBytes("ISO-8859-1");
 		final IntList map = RawParseUtils.lineMap(buf, 0, buf.length);
-		assertEquals(4, map.size());
-		assertEquals(Integer.MIN_VALUE, map.get(0));
-		assertEquals(0, map.get(1));
-		assertEquals(4, map.get(2));
-		assertEquals(buf.length, map.get(3));
+		assertArrayEquals(new int[]{Integer.MIN_VALUE, 0, 4, buf.length}, asInts(map));
 	}
 
 	@Test
 	public void testTwoLineNoLF() throws UnsupportedEncodingException {
 		final byte[] buf = "foo\nbar".getBytes("ISO-8859-1");
 		final IntList map = RawParseUtils.lineMap(buf, 0, buf.length);
-		assertEquals(4, map.size());
-		assertEquals(Integer.MIN_VALUE, map.get(0));
-		assertEquals(0, map.get(1));
-		assertEquals(4, map.get(2));
-		assertEquals(buf.length, map.get(3));
+		assertArrayEquals(new int[]{Integer.MIN_VALUE, 0, 4, buf.length}, asInts(map));
+	}
+
+	@Test
+	public void testBinary() throws UnsupportedEncodingException {
+		final byte[] buf = "xxxfoo\nb\0ar".getBytes("ISO-8859-1");
+		final IntList map = RawParseUtils.lineMap(buf, 3, buf.length);
+		assertArrayEquals(new int[]{Integer.MIN_VALUE, 3, buf.length}, asInts(map));
 	}
 
 	@Test
 	public void testFourLineBlanks() throws UnsupportedEncodingException {
 		final byte[] buf = "foo\n\n\nbar\n".getBytes("ISO-8859-1");
 		final IntList map = RawParseUtils.lineMap(buf, 0, buf.length);
-		assertEquals(6, map.size());
-		assertEquals(Integer.MIN_VALUE, map.get(0));
-		assertEquals(0, map.get(1));
-		assertEquals(4, map.get(2));
-		assertEquals(5, map.get(3));
-		assertEquals(6, map.get(4));
-		assertEquals(buf.length, map.get(5));
+
+		assertArrayEquals(new int[]{
+				Integer.MIN_VALUE, 0, 4, 5, 6, buf.length
+		}, asInts(map));
+	}
+
+	private int[] asInts(IntList l) {
+		int[] result = new int[l.size()];
+		for (int i = 0; i < l.size(); i++) {
+			result[i] = l.get(i);
+		}
+		return result;
 	}
 }
diff --git a/org.eclipse.jgit/pom.xml b/org.eclipse.jgit/pom.xml
index 1f341e6..8306eb5 100644
--- a/org.eclipse.jgit/pom.xml
+++ b/org.eclipse.jgit/pom.xml
@@ -206,8 +206,8 @@
     <pluginManagement>
       <plugins>
         <plugin>
-          <groupId>org.codehaus.mojo</groupId>
-          <artifactId>findbugs-maven-plugin</artifactId>
+          <groupId>com.github.hazendaz.spotbugs</groupId>
+          <artifactId>spotbugs-maven-plugin</artifactId>
           <configuration>
             <excludeFilterFile>findBugs/FindBugsExcludeFilter.xml</excludeFilterFile>
           </configuration>
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java
index 06c31f2..b661ae7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java
@@ -49,6 +49,7 @@
 import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_NONFASTFORWARD;
 
 import java.io.IOException;
+import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -57,7 +58,10 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.errors.LockFailedException;
 import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.internal.JGitText;
 import org.eclipse.jgit.internal.storage.file.RefDirectory.PackedRefList;
 import org.eclipse.jgit.lib.BatchRefUpdate;
 import org.eclipse.jgit.lib.ObjectId;
@@ -164,12 +168,19 @@
 
 		// Pack refs normally, so we can create lock files even in the case where
 		// refs/x is deleted and refs/x/y is created in this batch.
-		refdb.pack(
-				pending.stream().map(ReceiveCommand::getRefName).collect(toList()));
-
-		Map<String, LockFile> locks = new HashMap<>();
 		try {
-			if (!lockLooseRefs(pending, locks)) {
+			refdb.pack(
+					pending.stream().map(ReceiveCommand::getRefName).collect(toList()));
+		} catch (LockFailedException e) {
+			lockFailure(pending.get(0), pending);
+			return;
+		}
+
+		Map<String, LockFile> locks = null;
+		refdb.inProcessPackedRefsLock.lock();
+		try {
+			locks = lockLooseRefs(pending);
+			if (locks == null) {
 				return;
 			}
 			PackedRefList oldPackedList = refdb.pack(locks);
@@ -177,15 +188,19 @@
 			if (newRefs == null) {
 				return;
 			}
-			LockFile packedRefsLock = new LockFile(refdb.packedRefsFile);
-			try {
-				packedRefsLock.lock();
-				refdb.commitPackedRefs(packedRefsLock, newRefs, oldPackedList);
-			} finally {
-				packedRefsLock.unlock();
+			LockFile packedRefsLock = refdb.lockPackedRefs();
+			if (packedRefsLock == null) {
+				lockFailure(pending.get(0), pending);
+				return;
 			}
+			// commitPackedRefs removes lock file (by renaming over real file).
+			refdb.commitPackedRefs(packedRefsLock, newRefs, oldPackedList);
 		} finally {
-			locks.values().forEach(LockFile::unlock);
+			try {
+				unlockAll(locks);
+			} finally {
+				refdb.inProcessPackedRefsLock.unlock();
+			}
 		}
 
 		refdb.fireRefsChanged();
@@ -271,17 +286,54 @@
 		return true;
 	}
 
-	private boolean lockLooseRefs(List<ReceiveCommand> commands,
-			Map<String, LockFile> locks) throws IOException {
-		for (ReceiveCommand c : commands) {
-			LockFile lock = new LockFile(refdb.fileFor(c.getRefName()));
-			if (!lock.lock()) {
-				lockFailure(c, commands);
-				return false;
+	/**
+	 * Lock loose refs corresponding to a list of commands.
+	 *
+	 * @param commands
+	 *            commands that we intend to execute.
+	 * @return map of ref name in the input commands to lock file. Always contains
+	 *         one entry for each ref in the input list. All locks are acquired
+	 *         before returning. If any lock was not able to be acquired: the
+	 *         return value is null; no locks are held; and all commands that were
+	 *         pending are set to fail with {@code LOCK_FAILURE}.
+	 * @throws IOException
+	 *             an error occurred other than a failure to acquire; no locks are
+	 *             held if this exception is thrown.
+	 */
+	@Nullable
+	private Map<String, LockFile> lockLooseRefs(List<ReceiveCommand> commands)
+			throws IOException {
+		ReceiveCommand failed = null;
+		Map<String, LockFile> locks = new HashMap<>();
+		try {
+			RETRY: for (int ms : refdb.getRetrySleepMs()) {
+				failed = null;
+				// Release all locks before trying again, to prevent deadlock.
+				unlockAll(locks);
+				locks.clear();
+				RefDirectory.sleep(ms);
+
+				for (ReceiveCommand c : commands) {
+					String name = c.getRefName();
+					LockFile lock = new LockFile(refdb.fileFor(name));
+					if (locks.put(name, lock) != null) {
+						throw new IOException(
+								MessageFormat.format(JGitText.get().duplicateRef, name));
+					}
+					if (!lock.lock()) {
+						failed = c;
+						continue RETRY;
+					}
+				}
+				Map<String, LockFile> result = locks;
+				locks = null;
+				return result;
 			}
-			locks.put(c.getRefName(), lock);
+		} finally {
+			unlockAll(locks);
 		}
-		return true;
+		lockFailure(failed != null ? failed : commands.get(0), commands);
+		return null;
 	}
 
 	private static RefList<Ref> applyUpdates(RevWalk walk, RefList<Ref> refs,
@@ -444,6 +496,12 @@
 				Ref.Storage.PACKED, cmd.getRefName(), newId);
 	}
 
+	private static void unlockAll(@Nullable Map<?, LockFile> locks) {
+		if (locks != null) {
+			locks.values().forEach(LockFile::unlock);
+		}
+	}
+
 	private static void lockFailure(ReceiveCommand cmd,
 			List<ReceiveCommand> commands) {
 		reject(cmd, LOCK_FAILURE, commands);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
index eb56974..ecf7ef9 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
@@ -63,6 +63,7 @@
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.io.InterruptedIOException;
 import java.security.DigestInputStream;
 import java.security.MessageDigest;
 import java.text.MessageFormat;
@@ -74,8 +75,10 @@
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.ReentrantLock;
 
 import org.eclipse.jgit.annotations.NonNull;
+import org.eclipse.jgit.annotations.Nullable;
 import org.eclipse.jgit.errors.InvalidObjectIdException;
 import org.eclipse.jgit.errors.LockFailedException;
 import org.eclipse.jgit.errors.MissingObjectException;
@@ -137,6 +140,10 @@
 			Constants.MERGE_HEAD, Constants.FETCH_HEAD, Constants.ORIG_HEAD,
 			Constants.CHERRY_PICK_HEAD };
 
+	@SuppressWarnings("boxing")
+	private static final List<Integer> RETRY_SLEEP_MS =
+			Collections.unmodifiableList(Arrays.asList(0, 100, 200, 400, 800, 1600));
+
 	private final FileRepository parent;
 
 	private final File gitDir;
@@ -161,6 +168,22 @@
 	final AtomicReference<PackedRefList> packedRefs = new AtomicReference<>();
 
 	/**
+	 * Lock for coordinating operations within a single process that may contend
+	 * on the {@code packed-refs} file.
+	 * <p>
+	 * All operations that write {@code packed-refs} must still acquire a
+	 * {@link LockFile} on {@link #packedRefsFile}, even after they have acquired
+	 * this lock, since there may be multiple {@link RefDirectory} instances or
+	 * other processes operating on the same repo on disk.
+	 * <p>
+	 * This lock exists so multiple threads in the same process can wait in a fair
+	 * queue without trying, failing, and retrying to acquire the on-disk lock. If
+	 * {@code RepositoryCache} is used, this lock instance will be used by all
+	 * threads.
+	 */
+	final ReentrantLock inProcessPackedRefsLock = new ReentrantLock(true);
+
+	/**
 	 * Number of modifications made to this database.
 	 * <p>
 	 * This counter is incremented when a change is made, or detected from the
@@ -176,6 +199,8 @@
 	 */
 	private final AtomicInteger lastNotifiedModCnt = new AtomicInteger();
 
+	private List<Integer> retrySleepMs = RETRY_SLEEP_MS;
+
 	RefDirectory(final FileRepository db) {
 		final FS fs = db.getFS();
 		parent = db;
@@ -602,16 +627,19 @@
 		// we don't miss an edit made externally.
 		final PackedRefList packed = getPackedRefs();
 		if (packed.contains(name)) {
-			LockFile lck = new LockFile(packedRefsFile);
-			if (!lck.lock())
-				throw new LockFailedException(packedRefsFile);
+			inProcessPackedRefsLock.lock();
 			try {
-				PackedRefList cur = readPackedRefs();
-				int idx = cur.find(name);
-				if (0 <= idx)
-					commitPackedRefs(lck, cur.remove(idx), packed);
+				LockFile lck = lockPackedRefsOrThrow();
+				try {
+					PackedRefList cur = readPackedRefs();
+					int idx = cur.find(name);
+					if (0 <= idx)
+						commitPackedRefs(lck, cur.remove(idx), packed);
+				} finally {
+					lck.unlock();
+				}
 			} finally {
-				lck.unlock();
+				inProcessPackedRefsLock.unlock();
 			}
 		}
 
@@ -665,106 +693,126 @@
 		FS fs = parent.getFS();
 
 		// Lock the packed refs file and read the content
-		LockFile lck = new LockFile(packedRefsFile);
-		if (!lck.lock()) {
-			throw new IOException(MessageFormat.format(
-					JGitText.get().cannotLock, packedRefsFile));
-		}
-
+		inProcessPackedRefsLock.lock();
 		try {
-			final PackedRefList packed = getPackedRefs();
-			RefList<Ref> cur = readPackedRefs();
+			LockFile lck = lockPackedRefsOrThrow();
+			try {
+				final PackedRefList packed = getPackedRefs();
+				RefList<Ref> cur = readPackedRefs();
 
-			// Iterate over all refs to be packed
-			boolean dirty = false;
-			for (String refName : refs) {
-				Ref oldRef = readRef(refName, cur);
-				if (oldRef == null) {
-					continue; // A non-existent ref is already correctly packed.
-				}
-				if (oldRef.isSymbolic()) {
-					continue; // can't pack symbolic refs
-				}
-				// Add/Update it to packed-refs
-				Ref newRef = peeledPackedRef(oldRef);
-				if (newRef == oldRef) {
-					// No-op; peeledPackedRef returns the input ref only if it's already
-					// packed, and readRef returns a packed ref only if there is no loose
-					// ref.
-					continue;
-				}
-
-				dirty = true;
-				int idx = cur.find(refName);
-				if (idx >= 0) {
-					cur = cur.set(idx, newRef);
-				} else {
-					cur = cur.add(idx, newRef);
-				}
-			}
-			if (!dirty) {
-				// All requested refs were already packed accurately
-				return packed;
-			}
-
-			// The new content for packed-refs is collected. Persist it.
-			PackedRefList result = commitPackedRefs(lck, cur, packed);
-
-			// Now delete the loose refs which are now packed
-			for (String refName : refs) {
-				// Lock the loose ref
-				File refFile = fileFor(refName);
-				if (!fs.exists(refFile)) {
-					continue;
-				}
-
-				LockFile rLck = heldLocks.get(refName);
-				boolean shouldUnlock;
-				if (rLck == null) {
-					rLck = new LockFile(refFile);
-					if (!rLck.lock()) {
+				// Iterate over all refs to be packed
+				boolean dirty = false;
+				for (String refName : refs) {
+					Ref oldRef = readRef(refName, cur);
+					if (oldRef == null) {
+						continue; // A non-existent ref is already correctly packed.
+					}
+					if (oldRef.isSymbolic()) {
+						continue; // can't pack symbolic refs
+					}
+					// Add/Update it to packed-refs
+					Ref newRef = peeledPackedRef(oldRef);
+					if (newRef == oldRef) {
+						// No-op; peeledPackedRef returns the input ref only if it's already
+						// packed, and readRef returns a packed ref only if there is no
+						// loose ref.
 						continue;
 					}
-					shouldUnlock = true;
-				} else {
-					shouldUnlock = false;
+
+					dirty = true;
+					int idx = cur.find(refName);
+					if (idx >= 0) {
+						cur = cur.set(idx, newRef);
+					} else {
+						cur = cur.add(idx, newRef);
+					}
+				}
+				if (!dirty) {
+					// All requested refs were already packed accurately
+					return packed;
 				}
 
-				try {
-					LooseRef currentLooseRef = scanRef(null, refName);
-					if (currentLooseRef == null || currentLooseRef.isSymbolic()) {
+				// The new content for packed-refs is collected. Persist it.
+				PackedRefList result = commitPackedRefs(lck, cur, packed);
+
+				// Now delete the loose refs which are now packed
+				for (String refName : refs) {
+					// Lock the loose ref
+					File refFile = fileFor(refName);
+					if (!fs.exists(refFile)) {
 						continue;
 					}
-					Ref packedRef = cur.get(refName);
-					ObjectId clr_oid = currentLooseRef.getObjectId();
-					if (clr_oid != null
-							&& clr_oid.equals(packedRef.getObjectId())) {
-						RefList<LooseRef> curLoose, newLoose;
-						do {
-							curLoose = looseRefs.get();
-							int idx = curLoose.find(refName);
-							if (idx < 0) {
-								break;
-							}
-							newLoose = curLoose.remove(idx);
-						} while (!looseRefs.compareAndSet(curLoose, newLoose));
-						int levels = levelsIn(refName) - 2;
-						delete(refFile, levels, rLck);
+
+					LockFile rLck = heldLocks.get(refName);
+					boolean shouldUnlock;
+					if (rLck == null) {
+						rLck = new LockFile(refFile);
+						if (!rLck.lock()) {
+							continue;
+						}
+						shouldUnlock = true;
+					} else {
+						shouldUnlock = false;
 					}
-				} finally {
-					if (shouldUnlock) {
-						rLck.unlock();
+
+					try {
+						LooseRef currentLooseRef = scanRef(null, refName);
+						if (currentLooseRef == null || currentLooseRef.isSymbolic()) {
+							continue;
+						}
+						Ref packedRef = cur.get(refName);
+						ObjectId clr_oid = currentLooseRef.getObjectId();
+						if (clr_oid != null
+								&& clr_oid.equals(packedRef.getObjectId())) {
+							RefList<LooseRef> curLoose, newLoose;
+							do {
+								curLoose = looseRefs.get();
+								int idx = curLoose.find(refName);
+								if (idx < 0) {
+									break;
+								}
+								newLoose = curLoose.remove(idx);
+							} while (!looseRefs.compareAndSet(curLoose, newLoose));
+							int levels = levelsIn(refName) - 2;
+							delete(refFile, levels, rLck);
+						}
+					} finally {
+						if (shouldUnlock) {
+							rLck.unlock();
+						}
 					}
 				}
+				// Don't fire refsChanged. The refs have not change, only their
+				// storage.
+				return result;
+			} finally {
+				lck.unlock();
 			}
-			// Don't fire refsChanged. The refs have not change, only their
-			// storage.
-			return result;
 		} finally {
-			lck.unlock();
+			inProcessPackedRefsLock.unlock();
 		}
 	}
 
+	@Nullable
+	LockFile lockPackedRefs() throws IOException {
+		LockFile lck = new LockFile(packedRefsFile);
+		for (int ms : getRetrySleepMs()) {
+			sleep(ms);
+			if (lck.lock()) {
+				return lck;
+			}
+		}
+		return null;
+	}
+
+	private LockFile lockPackedRefsOrThrow() throws IOException {
+		LockFile lck = lockPackedRefs();
+		if (lck == null) {
+			throw new LockFailedException(packedRefsFile);
+		}
+		return lck;
+	}
+
 	/**
 	 * Make sure a ref is peeled and has the Storage PACKED. If the given ref
 	 * has this attributes simply return it. Otherwise create a new peeled
@@ -1175,6 +1223,63 @@
 		}
 	}
 
+	/**
+	 * Get times to sleep while retrying a possibly contentious operation.
+	 * <p>
+	 * For retrying an operation that might have high contention, such as locking
+	 * the {@code packed-refs} file, the caller may implement a retry loop using
+	 * the returned values:
+	 *
+	 * <pre>
+	 * for (int toSleepMs : getRetrySleepMs()) {
+	 *   sleep(toSleepMs);
+	 *   if (isSuccessful(doSomething())) {
+	 *     return success;
+	 *   }
+	 * }
+	 * return failure;
+	 * </pre>
+	 *
+	 * The first value in the returned iterable is 0, and the caller should treat
+	 * a fully-consumed iterator as a timeout.
+	 *
+	 * @return iterable of times, in milliseconds, that the caller should sleep
+	 *         before attempting an operation.
+	 */
+	Iterable<Integer> getRetrySleepMs() {
+		return retrySleepMs;
+	}
+
+	void setRetrySleepMs(List<Integer> retrySleepMs) {
+		if (retrySleepMs == null || retrySleepMs.isEmpty()
+				|| retrySleepMs.get(0).intValue() != 0) {
+			throw new IllegalArgumentException();
+		}
+		this.retrySleepMs = retrySleepMs;
+	}
+
+	/**
+	 * Sleep with {@link Thread#sleep(long)}, converting {@link
+	 * InterruptedException} to {@link InterruptedIOException}.
+	 *
+	 * @param ms
+	 *            time to sleep, in milliseconds; zero or negative is a no-op.
+	 * @throws InterruptedIOException
+	 *             if sleeping was interrupted.
+	 */
+	static void sleep(long ms) throws InterruptedIOException {
+		if (ms <= 0) {
+			return;
+		}
+		try {
+			Thread.sleep(ms);
+		} catch (InterruptedException e) {
+			InterruptedIOException ie = new InterruptedIOException();
+			ie.initCause(e);
+			throw ie;
+		}
+	}
+
 	static class PackedRefList extends RefList<Ref> {
 
 		private final FileSnapshot snapshot;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
index 29a379e..0567051 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
@@ -336,7 +336,7 @@
 
 	@Override
 	public int hashCode() {
-		return w2;
+		return w1;
 	}
 
 	@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
index 658dd06..0a3c846 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
@@ -71,6 +71,21 @@
 	}
 
 	/**
+	 * Check if an entry appears in this collection.
+	 *
+	 * @param value
+	 *            the value to search for.
+	 * @return true of {@code value} appears in this list.
+	 * @since 4.9
+	 */
+	public boolean contains(int value) {
+		for (int i = 0; i < count; i++)
+			if (entries[i] == value)
+				return true;
+		return false;
+	}
+
+	/**
 	 * @param i
 	 *            index to read, must be in the range [0, {@link #size()}).
 	 * @return the number at the specified index
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
index 86777b9..ad138bb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
@@ -618,6 +618,10 @@
 	 * <p>
 	 * The last element (index <code>map.size()-1</code>) always contains
 	 * <code>end</code>.
+	 * <p>
+	 * If the data contains a '\0' anywhere, the whole region is considered binary
+	 * and a LineMap corresponding to a single line is returned.
+	 * </p>
 	 *
 	 * @param buf
 	 *            buffer to scan.
@@ -629,14 +633,29 @@
 	 * @return a line map indexing the start position of each line.
 	 */
 	public static final IntList lineMap(final byte[] buf, int ptr, int end) {
+		int start = ptr;
+
 		// Experimentally derived from multiple source repositories
 		// the average number of bytes/line is 36. Its a rough guess
 		// to initially size our map close to the target.
-		//
-		final IntList map = new IntList((end - ptr) / 36);
-		map.fillTo(1, Integer.MIN_VALUE);
-		for (; ptr < end; ptr = nextLF(buf, ptr))
-			map.add(ptr);
+		IntList map = new IntList((end - ptr) / 36);
+		map.add(Integer.MIN_VALUE);
+		boolean foundLF = true;
+		for (; ptr < end; ptr++) {
+			if (foundLF) {
+				map.add(ptr);
+			}
+
+			if (buf[ptr] == '\0') {
+				// binary data.
+				map = new IntList(3);
+				map.add(Integer.MIN_VALUE);
+				map.add(start);
+				break;
+			}
+
+			foundLF = (buf[ptr] == '\n');
+		}
 		map.add(end);
 		return map;
 	}
diff --git a/pom.xml b/pom.xml
index 19d0225..3e6c55b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -212,7 +212,7 @@
     <maven-javadoc-plugin-version>2.10.4</maven-javadoc-plugin-version>
     <tycho-extras-version>1.0.0</tycho-extras-version>
     <gson-version>2.2.4</gson-version>
-    <findbugs-maven-plugin-version>3.0.4</findbugs-maven-plugin-version>
+    <spotbugs-maven-plugin-version>3.0.6</spotbugs-maven-plugin-version>
     <maven-surefire-report-plugin-version>2.20</maven-surefire-report-plugin-version>
 
     <!-- Properties to enable jacoco code coverage analysis -->
@@ -371,9 +371,9 @@
         </plugin>
 
         <plugin>
-          <groupId>org.codehaus.mojo</groupId>
-          <artifactId>findbugs-maven-plugin</artifactId>
-          <version>${findbugs-maven-plugin-version}</version>
+          <groupId>com.github.hazendaz.spotbugs</groupId>
+          <artifactId>spotbugs-maven-plugin</artifactId>
+          <version>${spotbugs-maven-plugin-version}</version>
           <configuration>
             <findbugsXmlOutput>true</findbugsXmlOutput>
             <failOnError>false</failOnError>
@@ -579,9 +579,9 @@
         <version>2.5</version>
       </plugin>
       <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>findbugs-maven-plugin</artifactId>
-        <version>${findbugs-maven-plugin-version}</version>
+        <groupId>com.github.hazendaz.spotbugs</groupId>
+        <artifactId>spotbugs-maven-plugin</artifactId>
+        <version>${spotbugs-maven-plugin-version}</version>
       </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
@@ -751,8 +751,8 @@
       <build>
         <plugins>
           <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>findbugs-maven-plugin</artifactId>
+            <groupId>com.github.hazendaz.spotbugs</groupId>
+            <artifactId>spotbugs-maven-plugin</artifactId>
           </plugin>
           <plugin>
             <groupId>org.apache.maven.plugins</groupId>