View Javadoc
1   /*
2    * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
3    * Copyright (C) 2011, Shawn O. Pearce <spearce@spearce.org>
4    * and other copyright owners as documented in the project's IP log.
5    *
6    * This program and the accompanying materials are made available
7    * under the terms of the Eclipse Distribution License v1.0 which
8    * accompanies this distribution, is reproduced below, and is
9    * available at http://www.eclipse.org/org/documents/edl-v10.php
10   *
11   * All rights reserved.
12   *
13   * Redistribution and use in source and binary forms, with or
14   * without modification, are permitted provided that the following
15   * conditions are met:
16   *
17   * - Redistributions of source code must retain the above copyright
18   *   notice, this list of conditions and the following disclaimer.
19   *
20   * - Redistributions in binary form must reproduce the above
21   *   copyright notice, this list of conditions and the following
22   *   disclaimer in the documentation and/or other materials provided
23   *   with the distribution.
24   *
25   * - Neither the name of the Eclipse Foundation, Inc. nor the
26   *   names of its contributors may be used to endorse or promote
27   *   products derived from this software without specific prior
28   *   written permission.
29   *
30   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
31   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
32   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
33   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
34   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
37   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
38   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
39   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
40   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
41   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
42   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43   */
44  package org.eclipse.jgit.internal.storage.file;
45  
46  import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
47  import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
48  
49  import java.io.File;
50  import java.io.FileOutputStream;
51  import java.io.IOException;
52  import java.io.OutputStream;
53  import java.io.PrintWriter;
54  import java.io.StringWriter;
55  import java.nio.channels.Channels;
56  import java.nio.channels.FileChannel;
57  import java.nio.file.DirectoryNotEmptyException;
58  import java.nio.file.DirectoryStream;
59  import java.nio.file.Files;
60  import java.nio.file.Path;
61  import java.nio.file.Paths;
62  import java.nio.file.StandardCopyOption;
63  import java.text.MessageFormat;
64  import java.text.ParseException;
65  import java.time.Instant;
66  import java.time.temporal.ChronoUnit;
67  import java.util.ArrayList;
68  import java.util.Collection;
69  import java.util.Collections;
70  import java.util.Comparator;
71  import java.util.Date;
72  import java.util.HashMap;
73  import java.util.HashSet;
74  import java.util.Iterator;
75  import java.util.LinkedList;
76  import java.util.List;
77  import java.util.Map;
78  import java.util.Objects;
79  import java.util.Set;
80  import java.util.TreeMap;
81  import java.util.concurrent.Callable;
82  import java.util.concurrent.ExecutorService;
83  import java.util.regex.Pattern;
84  import java.util.stream.Collectors;
85  import java.util.stream.Stream;
86  
87  import org.eclipse.jgit.annotations.NonNull;
88  import org.eclipse.jgit.dircache.DirCacheIterator;
89  import org.eclipse.jgit.errors.CancelledException;
90  import org.eclipse.jgit.errors.CorruptObjectException;
91  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
92  import org.eclipse.jgit.errors.MissingObjectException;
93  import org.eclipse.jgit.errors.NoWorkTreeException;
94  import org.eclipse.jgit.internal.JGitText;
95  import org.eclipse.jgit.internal.storage.pack.PackExt;
96  import org.eclipse.jgit.internal.storage.pack.PackWriter;
97  import org.eclipse.jgit.internal.storage.reftree.RefTreeNames;
98  import org.eclipse.jgit.lib.ConfigConstants;
99  import org.eclipse.jgit.lib.Constants;
100 import org.eclipse.jgit.lib.FileMode;
101 import org.eclipse.jgit.lib.NullProgressMonitor;
102 import org.eclipse.jgit.lib.ObjectId;
103 import org.eclipse.jgit.lib.ObjectIdSet;
104 import org.eclipse.jgit.lib.ObjectLoader;
105 import org.eclipse.jgit.lib.ObjectReader;
106 import org.eclipse.jgit.lib.ProgressMonitor;
107 import org.eclipse.jgit.lib.Ref;
108 import org.eclipse.jgit.lib.Ref.Storage;
109 import org.eclipse.jgit.lib.RefDatabase;
110 import org.eclipse.jgit.lib.ReflogEntry;
111 import org.eclipse.jgit.lib.ReflogReader;
112 import org.eclipse.jgit.lib.internal.WorkQueue;
113 import org.eclipse.jgit.revwalk.ObjectWalk;
114 import org.eclipse.jgit.revwalk.RevObject;
115 import org.eclipse.jgit.revwalk.RevWalk;
116 import org.eclipse.jgit.storage.pack.PackConfig;
117 import org.eclipse.jgit.treewalk.TreeWalk;
118 import org.eclipse.jgit.treewalk.filter.TreeFilter;
119 import org.eclipse.jgit.util.FileUtils;
120 import org.eclipse.jgit.util.GitDateParser;
121 import org.eclipse.jgit.util.SystemReader;
122 import org.slf4j.Logger;
123 import org.slf4j.LoggerFactory;
124 
125 /**
126  * A garbage collector for git {@link FileRepository}. Instances of this class
127  * are not thread-safe. Don't use the same instance from multiple threads.
128  *
129  * This class started as a copy of DfsGarbageCollector from Shawn O. Pearce
130  * adapted to FileRepositories.
131  */
132 public class GC {
133 	private final static Logger LOG = LoggerFactory
134 			.getLogger(GC.class);
135 
136 	private static final String PRUNE_EXPIRE_DEFAULT = "2.weeks.ago"; //$NON-NLS-1$
137 
138 	private static final String PRUNE_PACK_EXPIRE_DEFAULT = "1.hour.ago"; //$NON-NLS-1$
139 
140 	private static final Pattern PATTERN_LOOSE_OBJECT = Pattern
141 			.compile("[0-9a-fA-F]{38}"); //$NON-NLS-1$
142 
143 	private static final String PACK_EXT = "." + PackExt.PACK.getExtension();//$NON-NLS-1$
144 
145 	private static final String BITMAP_EXT = "." //$NON-NLS-1$
146 			+ PackExt.BITMAP_INDEX.getExtension();
147 
148 	private static final String INDEX_EXT = "." + PackExt.INDEX.getExtension(); //$NON-NLS-1$
149 
150 	private static final int DEFAULT_AUTOPACKLIMIT = 50;
151 
152 	private static final int DEFAULT_AUTOLIMIT = 6700;
153 
154 	private static volatile ExecutorService executor;
155 
156 	/**
157 	 * Set the executor for running auto-gc in the background. If no executor is
158 	 * set JGit's own WorkQueue will be used.
159 	 *
160 	 * @param e
161 	 *            the executor to be used for running auto-gc
162 	 * @since 4.8
163 	 */
164 	public static void setExecutor(ExecutorService e) {
165 		executor = e;
166 	}
167 
168 	private final FileRepository repo;
169 
170 	private ProgressMonitor pm;
171 
172 	private long expireAgeMillis = -1;
173 
174 	private Date expire;
175 
176 	private long packExpireAgeMillis = -1;
177 
178 	private Date packExpire;
179 
180 	private PackConfig pconfig = null;
181 
182 	/**
183 	 * the refs which existed during the last call to {@link #repack()}. This is
184 	 * needed during {@link #prune(Set)} where we can optimize by looking at the
185 	 * difference between the current refs and the refs which existed during
186 	 * last {@link #repack()}.
187 	 */
188 	private Collection<Ref> lastPackedRefs;
189 
190 	/**
191 	 * Holds the starting time of the last repack() execution. This is needed in
192 	 * prune() to inspect only those reflog entries which have been added since
193 	 * last repack().
194 	 */
195 	private long lastRepackTime;
196 
197 	/**
198 	 * Whether gc should do automatic housekeeping
199 	 */
200 	private boolean automatic;
201 
202 	/**
203 	 * Whether to run gc in a background thread
204 	 */
205 	private boolean background;
206 
207 	/**
208 	 * Creates a new garbage collector with default values. An expirationTime of
209 	 * two weeks and <code>null</code> as progress monitor will be used.
210 	 *
211 	 * @param repo
212 	 *            the repo to work on
213 	 */
214 	public GC(FileRepository repo) {
215 		this.repo = repo;
216 		this.pm = NullProgressMonitor.INSTANCE;
217 	}
218 
219 	/**
220 	 * Runs a garbage collector on a {@link FileRepository}. It will
221 	 * <ul>
222 	 * <li>pack loose references into packed-refs</li>
223 	 * <li>repack all reachable objects into new pack files and delete the old
224 	 * pack files</li>
225 	 * <li>prune all loose objects which are now reachable by packs</li>
226 	 * </ul>
227 	 *
228 	 * If {@link #setAuto(boolean)} was set to {@code true} {@code gc} will
229 	 * first check whether any housekeeping is required; if not, it exits
230 	 * without performing any work.
231 	 *
232 	 * If {@link #setBackground(boolean)} was set to {@code true}
233 	 * {@code collectGarbage} will start the gc in the background, and then
234 	 * return immediately. In this case, errors will not be reported except in
235 	 * gc.log.
236 	 *
237 	 * @return the collection of {@link PackFile}'s which are newly created
238 	 * @throws IOException
239 	 * @throws ParseException
240 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
241 	 *             parsed
242 	 */
243 	// TODO(ms): in 5.0 change signature and return Future<Collection<PackFile>>
244 	public Collection<PackFile> gc() throws IOException, ParseException {
245 		if (!background) {
246 			return doGc();
247 		}
248 		final GcLog gcLog = new GcLog(repo);
249 		if (!gcLog.lock()) {
250 			// there is already a background gc running
251 			return Collections.emptyList();
252 		}
253 
254 		Callable<Collection<PackFile>> gcTask = () -> {
255 			try {
256 				Collection<PackFile> newPacks = doGc();
257 				if (automatic && tooManyLooseObjects()) {
258 					String message = JGitText.get().gcTooManyUnpruned;
259 					gcLog.write(message);
260 					gcLog.commit();
261 				}
262 				return newPacks;
263 			} catch (IOException | ParseException e) {
264 				try {
265 					gcLog.write(e.getMessage());
266 					StringWriter sw = new StringWriter();
267 					e.printStackTrace(new PrintWriter(sw));
268 					gcLog.write(sw.toString());
269 					gcLog.commit();
270 				} catch (IOException e2) {
271 					e2.addSuppressed(e);
272 					LOG.error(e2.getMessage(), e2);
273 				}
274 			} finally {
275 				gcLog.unlock();
276 			}
277 			return Collections.emptyList();
278 		};
279 		// TODO(ms): in 5.0 change signature and return the Future
280 		executor().submit(gcTask);
281 		return Collections.emptyList();
282 	}
283 
284 	private ExecutorService executor() {
285 		return (executor != null) ? executor : WorkQueue.getExecutor();
286 	}
287 
288 	private Collection<PackFile> doGc() throws IOException, ParseException {
289 		if (automatic && !needGc()) {
290 			return Collections.emptyList();
291 		}
292 		pm.start(6 /* tasks */);
293 		packRefs();
294 		// TODO: implement reflog_expire(pm, repo);
295 		Collection<PackFile> newPacks = repack();
296 		prune(Collections.<ObjectId> emptySet());
297 		// TODO: implement rerere_gc(pm);
298 		return newPacks;
299 	}
300 
301 	/**
302 	 * Loosen objects in a pack file which are not also in the newly-created
303 	 * pack files.
304 	 *
305 	 * @param inserter
306 	 * @param reader
307 	 * @param pack
308 	 * @param existing
309 	 * @throws IOException
310 	 */
311 	private void loosen(ObjectDirectoryInserter inserter, ObjectReader reader, PackFile pack, HashSet<ObjectId> existing)
312 			throws IOException {
313 		for (PackIndex.MutableEntry entry : pack) {
314 			ObjectId oid = entry.toObjectId();
315 			if (existing.contains(oid)) {
316 				continue;
317 			}
318 			existing.add(oid);
319 			ObjectLoader loader = reader.open(oid);
320 			inserter.insert(loader.getType(),
321 					loader.getSize(),
322 					loader.openStream(),
323 					true /* create this object even though it's a duplicate */);
324 		}
325 	}
326 
327 	/**
328 	 * Delete old pack files. What is 'old' is defined by specifying a set of
329 	 * old pack files and a set of new pack files. Each pack file contained in
330 	 * old pack files but not contained in new pack files will be deleted. If
331 	 * preserveOldPacks is set, keep a copy of the pack file in the preserve
332 	 * directory. If an expirationDate is set then pack files which are younger
333 	 * than the expirationDate will not be deleted nor preserved.
334 	 * <p>
335 	 * If we're not immediately expiring loose objects, loosen any objects
336 	 * in the old pack files which aren't in the new pack files.
337 	 *
338 	 * @param oldPacks
339 	 * @param newPacks
340 	 * @throws ParseException
341 	 * @throws IOException
342 	 */
343 	private void deleteOldPacks(Collection<PackFile> oldPacks,
344 			Collection<PackFile> newPacks) throws ParseException, IOException {
345 		HashSet<ObjectId> ids = new HashSet<>();
346 		for (PackFile pack : newPacks) {
347 			for (PackIndex.MutableEntry entry : pack) {
348 				ids.add(entry.toObjectId());
349 			}
350 		}
351 		ObjectReader reader = repo.newObjectReader();
352 		ObjectDirectory dir = repo.getObjectDatabase();
353 		ObjectDirectoryInserter inserter = dir.newInserter();
354 		boolean shouldLoosen = !"now".equals(getPruneExpireStr()) && //$NON-NLS-1$
355 			getExpireDate() < Long.MAX_VALUE;
356 
357 		prunePreserved();
358 		long packExpireDate = getPackExpireDate();
359 		oldPackLoop: for (PackFile oldPack : oldPacks) {
360 			checkCancelled();
361 			String oldName = oldPack.getPackName();
362 			// check whether an old pack file is also among the list of new
363 			// pack files. Then we must not delete it.
364 			for (PackFile newPack : newPacks)
365 				if (oldName.equals(newPack.getPackName()))
366 					continue oldPackLoop;
367 
368 			if (!oldPack.shouldBeKept()
369 					&& repo.getFS().lastModified(
370 							oldPack.getPackFile()) < packExpireDate) {
371 				oldPack.close();
372 				if (shouldLoosen) {
373 					loosen(inserter, reader, oldPack, ids);
374 				}
375 				prunePack(oldName);
376 			}
377 		}
378 
379 		// close the complete object database. That's my only chance to force
380 		// rescanning and to detect that certain pack files are now deleted.
381 		repo.getObjectDatabase().close();
382 	}
383 
384 	/**
385 	 * Deletes old pack file, unless 'preserve-oldpacks' is set, in which case it
386 	 * moves the pack file to the preserved directory
387 	 *
388 	 * @param packFile
389 	 * @param packName
390 	 * @param ext
391 	 * @param deleteOptions
392 	 * @throws IOException
393 	 */
394 	private void removeOldPack(File packFile, String packName, PackExt ext,
395 			int deleteOptions) throws IOException {
396 		if (pconfig != null && pconfig.isPreserveOldPacks()) {
397 			File oldPackDir = repo.getObjectDatabase().getPreservedDirectory();
398 			FileUtils.mkdir(oldPackDir, true);
399 
400 			String oldPackName = "pack-" + packName + ".old-" + ext.getExtension();  //$NON-NLS-1$ //$NON-NLS-2$
401 			File oldPackFile = new File(oldPackDir, oldPackName);
402 			FileUtils.rename(packFile, oldPackFile);
403 		} else {
404 			FileUtils.delete(packFile, deleteOptions);
405 		}
406 	}
407 
408 	/**
409 	 * Delete the preserved directory including all pack files within
410 	 */
411 	private void prunePreserved() {
412 		if (pconfig != null && pconfig.isPrunePreserved()) {
413 			try {
414 				FileUtils.delete(repo.getObjectDatabase().getPreservedDirectory(),
415 						FileUtils.RECURSIVE | FileUtils.RETRY | FileUtils.SKIP_MISSING);
416 			} catch (IOException e) {
417 				// Deletion of the preserved pack files failed. Silently return.
418 			}
419 		}
420 	}
421 
422 	/**
423 	 * Delete files associated with a single pack file. First try to delete the
424 	 * ".pack" file because on some platforms the ".pack" file may be locked and
425 	 * can't be deleted. In such a case it is better to detect this early and
426 	 * give up on deleting files for this packfile. Otherwise we may delete the
427 	 * ".index" file and when failing to delete the ".pack" file we are left
428 	 * with a ".pack" file without a ".index" file.
429 	 *
430 	 * @param packName
431 	 */
432 	private void prunePack(String packName) {
433 		PackExt[] extensions = PackExt.values();
434 		try {
435 			// Delete the .pack file first and if this fails give up on deleting
436 			// the other files
437 			int deleteOptions = FileUtils.RETRY | FileUtils.SKIP_MISSING;
438 			for (PackExt ext : extensions)
439 				if (PackExt.PACK.equals(ext)) {
440 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
441 					removeOldPack(f, packName, ext, deleteOptions);
442 					break;
443 				}
444 			// The .pack file has been deleted. Delete as many as the other
445 			// files as you can.
446 			deleteOptions |= FileUtils.IGNORE_ERRORS;
447 			for (PackExt ext : extensions) {
448 				if (!PackExt.PACK.equals(ext)) {
449 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
450 					removeOldPack(f, packName, ext, deleteOptions);
451 				}
452 			}
453 		} catch (IOException e) {
454 			// Deletion of the .pack file failed. Silently return.
455 		}
456 	}
457 
458 	/**
459 	 * Like "git prune-packed" this method tries to prune all loose objects
460 	 * which can be found in packs. If certain objects can't be pruned (e.g.
461 	 * because the filesystem delete operation fails) this is silently ignored.
462 	 *
463 	 * @throws IOException
464 	 */
465 	public void prunePacked() throws IOException {
466 		ObjectDirectory objdb = repo.getObjectDatabase();
467 		Collection<PackFile> packs = objdb.getPacks();
468 		File objects = repo.getObjectsDirectory();
469 		String[] fanout = objects.list();
470 
471 		if (fanout != null && fanout.length > 0) {
472 			pm.beginTask(JGitText.get().pruneLoosePackedObjects, fanout.length);
473 			try {
474 				for (String d : fanout) {
475 					checkCancelled();
476 					pm.update(1);
477 					if (d.length() != 2)
478 						continue;
479 					String[] entries = new File(objects, d).list();
480 					if (entries == null)
481 						continue;
482 					for (String e : entries) {
483 						checkCancelled();
484 						if (e.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
485 							continue;
486 						ObjectId id;
487 						try {
488 							id = ObjectId.fromString(d + e);
489 						} catch (IllegalArgumentException notAnObject) {
490 							// ignoring the file that does not represent loose
491 							// object
492 							continue;
493 						}
494 						boolean found = false;
495 						for (PackFile p : packs) {
496 							checkCancelled();
497 							if (p.hasObject(id)) {
498 								found = true;
499 								break;
500 							}
501 						}
502 						if (found)
503 							FileUtils.delete(objdb.fileFor(id), FileUtils.RETRY
504 									| FileUtils.SKIP_MISSING
505 									| FileUtils.IGNORE_ERRORS);
506 					}
507 				}
508 			} finally {
509 				pm.endTask();
510 			}
511 		}
512 	}
513 
514 	/**
515 	 * Like "git prune" this method tries to prune all loose objects which are
516 	 * unreferenced. If certain objects can't be pruned (e.g. because the
517 	 * filesystem delete operation fails) this is silently ignored.
518 	 *
519 	 * @param objectsToKeep
520 	 *            a set of objects which should explicitly not be pruned
521 	 *
522 	 * @throws IOException
523 	 * @throws ParseException
524 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
525 	 *             parsed
526 	 */
527 	public void prune(Set<ObjectId> objectsToKeep) throws IOException,
528 			ParseException {
529 		long expireDate = getExpireDate();
530 
531 		// Collect all loose objects which are old enough, not referenced from
532 		// the index and not in objectsToKeep
533 		Map<ObjectId, File> deletionCandidates = new HashMap<>();
534 		Set<ObjectId> indexObjects = null;
535 		File objects = repo.getObjectsDirectory();
536 		String[] fanout = objects.list();
537 		if (fanout == null || fanout.length == 0) {
538 			return;
539 		}
540 		pm.beginTask(JGitText.get().pruneLooseUnreferencedObjects,
541 				fanout.length);
542 		try {
543 			for (String d : fanout) {
544 				checkCancelled();
545 				pm.update(1);
546 				if (d.length() != 2)
547 					continue;
548 				File[] entries = new File(objects, d).listFiles();
549 				if (entries == null)
550 					continue;
551 				for (File f : entries) {
552 					checkCancelled();
553 					String fName = f.getName();
554 					if (fName.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
555 						continue;
556 					if (repo.getFS().lastModified(f) >= expireDate)
557 						continue;
558 					try {
559 						ObjectId id = ObjectId.fromString(d + fName);
560 						if (objectsToKeep.contains(id))
561 							continue;
562 						if (indexObjects == null)
563 							indexObjects = listNonHEADIndexObjects();
564 						if (indexObjects.contains(id))
565 							continue;
566 						deletionCandidates.put(id, f);
567 					} catch (IllegalArgumentException notAnObject) {
568 						// ignoring the file that does not represent loose
569 						// object
570 						continue;
571 					}
572 				}
573 			}
574 		} finally {
575 			pm.endTask();
576 		}
577 
578 		if (deletionCandidates.isEmpty()) {
579 			return;
580 		}
581 
582 		checkCancelled();
583 
584 		// From the set of current refs remove all those which have been handled
585 		// during last repack(). Only those refs will survive which have been
586 		// added or modified since the last repack. Only these can save existing
587 		// loose refs from being pruned.
588 		Collection<Ref> newRefs;
589 		if (lastPackedRefs == null || lastPackedRefs.isEmpty())
590 			newRefs = getAllRefs();
591 		else {
592 			Map<String, Ref> last = new HashMap<>();
593 			for (Ref r : lastPackedRefs) {
594 				last.put(r.getName(), r);
595 			}
596 			newRefs = new ArrayList<>();
597 			for (Ref r : getAllRefs()) {
598 				Ref old = last.get(r.getName());
599 				if (!equals(r, old)) {
600 					newRefs.add(r);
601 				}
602 			}
603 		}
604 
605 		if (!newRefs.isEmpty()) {
606 			// There are new/modified refs! Check which loose objects are now
607 			// referenced by these modified refs (or their reflogentries).
608 			// Remove these loose objects
609 			// from the deletionCandidates. When the last candidate is removed
610 			// leave this method.
611 			ObjectWalk w = new ObjectWalk(repo);
612 			try {
613 				for (Ref cr : newRefs) {
614 					checkCancelled();
615 					w.markStart(w.parseAny(cr.getObjectId()));
616 				}
617 				if (lastPackedRefs != null)
618 					for (Ref lpr : lastPackedRefs) {
619 						w.markUninteresting(w.parseAny(lpr.getObjectId()));
620 					}
621 				removeReferenced(deletionCandidates, w);
622 			} finally {
623 				w.dispose();
624 			}
625 		}
626 
627 		if (deletionCandidates.isEmpty())
628 			return;
629 
630 		// Since we have not left the method yet there are still
631 		// deletionCandidates. Last chance for these objects not to be pruned is
632 		// that they are referenced by reflog entries. Even refs which currently
633 		// point to the same object as during last repack() may have
634 		// additional reflog entries not handled during last repack()
635 		ObjectWalk w = new ObjectWalk(repo);
636 		try {
637 			for (Ref ar : getAllRefs())
638 				for (ObjectId id : listRefLogObjects(ar, lastRepackTime)) {
639 					checkCancelled();
640 					w.markStart(w.parseAny(id));
641 				}
642 			if (lastPackedRefs != null)
643 				for (Ref lpr : lastPackedRefs) {
644 					checkCancelled();
645 					w.markUninteresting(w.parseAny(lpr.getObjectId()));
646 				}
647 			removeReferenced(deletionCandidates, w);
648 		} finally {
649 			w.dispose();
650 		}
651 
652 		if (deletionCandidates.isEmpty())
653 			return;
654 
655 		checkCancelled();
656 
657 		// delete all candidates which have survived: these are unreferenced
658 		// loose objects. Make a last check, though, to avoid deleting objects
659 		// that could have been referenced while the candidates list was being
660 		// built (by an incoming push, for example).
661 		Set<File> touchedFanout = new HashSet<>();
662 		for (File f : deletionCandidates.values()) {
663 			if (f.lastModified() < expireDate) {
664 				f.delete();
665 				touchedFanout.add(f.getParentFile());
666 			}
667 		}
668 
669 		for (File f : touchedFanout) {
670 			FileUtils.delete(f,
671 					FileUtils.EMPTY_DIRECTORIES_ONLY | FileUtils.IGNORE_ERRORS);
672 		}
673 
674 		repo.getObjectDatabase().close();
675 	}
676 
677 	private long getExpireDate() throws ParseException {
678 		long expireDate = Long.MAX_VALUE;
679 
680 		if (expire == null && expireAgeMillis == -1) {
681 			String pruneExpireStr = getPruneExpireStr();
682 			if (pruneExpireStr == null)
683 				pruneExpireStr = PRUNE_EXPIRE_DEFAULT;
684 			expire = GitDateParser.parse(pruneExpireStr, null, SystemReader
685 					.getInstance().getLocale());
686 			expireAgeMillis = -1;
687 		}
688 		if (expire != null)
689 			expireDate = expire.getTime();
690 		if (expireAgeMillis != -1)
691 			expireDate = System.currentTimeMillis() - expireAgeMillis;
692 		return expireDate;
693 	}
694 
695 	private String getPruneExpireStr() {
696 		return repo.getConfig().getString(
697                         ConfigConstants.CONFIG_GC_SECTION, null,
698                         ConfigConstants.CONFIG_KEY_PRUNEEXPIRE);
699 	}
700 
701 	private long getPackExpireDate() throws ParseException {
702 		long packExpireDate = Long.MAX_VALUE;
703 
704 		if (packExpire == null && packExpireAgeMillis == -1) {
705 			String prunePackExpireStr = repo.getConfig().getString(
706 					ConfigConstants.CONFIG_GC_SECTION, null,
707 					ConfigConstants.CONFIG_KEY_PRUNEPACKEXPIRE);
708 			if (prunePackExpireStr == null)
709 				prunePackExpireStr = PRUNE_PACK_EXPIRE_DEFAULT;
710 			packExpire = GitDateParser.parse(prunePackExpireStr, null,
711 					SystemReader.getInstance().getLocale());
712 			packExpireAgeMillis = -1;
713 		}
714 		if (packExpire != null)
715 			packExpireDate = packExpire.getTime();
716 		if (packExpireAgeMillis != -1)
717 			packExpireDate = System.currentTimeMillis() - packExpireAgeMillis;
718 		return packExpireDate;
719 	}
720 
721 	/**
722 	 * Remove all entries from a map which key is the id of an object referenced
723 	 * by the given ObjectWalk
724 	 *
725 	 * @param id2File
726 	 * @param w
727 	 * @throws MissingObjectException
728 	 * @throws IncorrectObjectTypeException
729 	 * @throws IOException
730 	 */
731 	private void removeReferenced(Map<ObjectId, File> id2File,
732 			ObjectWalk w) throws MissingObjectException,
733 			IncorrectObjectTypeException, IOException {
734 		RevObject ro = w.next();
735 		while (ro != null) {
736 			checkCancelled();
737 			if (id2File.remove(ro.getId()) != null)
738 				if (id2File.isEmpty())
739 					return;
740 			ro = w.next();
741 		}
742 		ro = w.nextObject();
743 		while (ro != null) {
744 			checkCancelled();
745 			if (id2File.remove(ro.getId()) != null)
746 				if (id2File.isEmpty())
747 					return;
748 			ro = w.nextObject();
749 		}
750 	}
751 
752 	private static boolean equals(Ref r1, Ref r2) {
753 		if (r1 == null || r2 == null)
754 			return false;
755 		if (r1.isSymbolic()) {
756 			if (!r2.isSymbolic())
757 				return false;
758 			return r1.getTarget().getName().equals(r2.getTarget().getName());
759 		} else {
760 			if (r2.isSymbolic()) {
761 				return false;
762 			}
763 			return Objects.equals(r1.getObjectId(), r2.getObjectId());
764 		}
765 	}
766 
767 	/**
768 	 * Packs all non-symbolic, loose refs into packed-refs.
769 	 *
770 	 * @throws IOException
771 	 */
772 	public void packRefs() throws IOException {
773 		Collection<Ref> refs = repo.getRefDatabase().getRefs(Constants.R_REFS).values();
774 		List<String> refsToBePacked = new ArrayList<>(refs.size());
775 		pm.beginTask(JGitText.get().packRefs, refs.size());
776 		try {
777 			for (Ref ref : refs) {
778 				checkCancelled();
779 				if (!ref.isSymbolic() && ref.getStorage().isLoose())
780 					refsToBePacked.add(ref.getName());
781 				pm.update(1);
782 			}
783 			((RefDirectory) repo.getRefDatabase()).pack(refsToBePacked);
784 		} finally {
785 			pm.endTask();
786 		}
787 	}
788 
789 	/**
790 	 * Packs all objects which reachable from any of the heads into one pack
791 	 * file. Additionally all objects which are not reachable from any head but
792 	 * which are reachable from any of the other refs (e.g. tags), special refs
793 	 * (e.g. FETCH_HEAD) or index are packed into a separate pack file. Objects
794 	 * included in pack files which have a .keep file associated are never
795 	 * repacked. All old pack files which existed before are deleted.
796 	 *
797 	 * @return a collection of the newly created pack files
798 	 * @throws IOException
799 	 *             when during reading of refs, index, packfiles, objects,
800 	 *             reflog-entries or during writing to the packfiles
801 	 *             {@link IOException} occurs
802 	 */
803 	public Collection<PackFile> repack() throws IOException {
804 		Collection<PackFile> toBeDeleted = repo.getObjectDatabase().getPacks();
805 
806 		long time = System.currentTimeMillis();
807 		Collection<Ref> refsBefore = getAllRefs();
808 
809 		Set<ObjectId> allHeadsAndTags = new HashSet<>();
810 		Set<ObjectId> allHeads = new HashSet<>();
811 		Set<ObjectId> allTags = new HashSet<>();
812 		Set<ObjectId> nonHeads = new HashSet<>();
813 		Set<ObjectId> txnHeads = new HashSet<>();
814 		Set<ObjectId> tagTargets = new HashSet<>();
815 		Set<ObjectId> indexObjects = listNonHEADIndexObjects();
816 		RefDatabase refdb = repo.getRefDatabase();
817 
818 		for (Ref ref : refsBefore) {
819 			checkCancelled();
820 			nonHeads.addAll(listRefLogObjects(ref, 0));
821 			if (ref.isSymbolic() || ref.getObjectId() == null) {
822 				continue;
823 			}
824 			if (isHead(ref)) {
825 				allHeads.add(ref.getObjectId());
826 			} else if (isTag(ref)) {
827 				allTags.add(ref.getObjectId());
828 			} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
829 				txnHeads.add(ref.getObjectId());
830 			} else {
831 				nonHeads.add(ref.getObjectId());
832 			}
833 			if (ref.getPeeledObjectId() != null) {
834 				tagTargets.add(ref.getPeeledObjectId());
835 			}
836 		}
837 
838 		List<ObjectIdSet> excluded = new LinkedList<>();
839 		for (final PackFile f : repo.getObjectDatabase().getPacks()) {
840 			checkCancelled();
841 			if (f.shouldBeKept())
842 				excluded.add(f.getIndex());
843 		}
844 
845 		// Don't exclude tags that are also branch tips
846 		allTags.removeAll(allHeads);
847 		allHeadsAndTags.addAll(allHeads);
848 		allHeadsAndTags.addAll(allTags);
849 
850 		// Hoist all branch tips and tags earlier in the pack file
851 		tagTargets.addAll(allHeadsAndTags);
852 		nonHeads.addAll(indexObjects);
853 
854 		// Combine the GC_REST objects into the GC pack if requested
855 		if (pconfig != null && pconfig.getSinglePack()) {
856 			allHeadsAndTags.addAll(nonHeads);
857 			nonHeads.clear();
858 		}
859 
860 		List<PackFile> ret = new ArrayList<>(2);
861 		PackFile heads = null;
862 		if (!allHeadsAndTags.isEmpty()) {
863 			heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
864 					tagTargets, excluded);
865 			if (heads != null) {
866 				ret.add(heads);
867 				excluded.add(0, heads.getIndex());
868 			}
869 		}
870 		if (!nonHeads.isEmpty()) {
871 			PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
872 					tagTargets, excluded);
873 			if (rest != null)
874 				ret.add(rest);
875 		}
876 		if (!txnHeads.isEmpty()) {
877 			PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
878 					null, excluded);
879 			if (txn != null)
880 				ret.add(txn);
881 		}
882 		try {
883 			deleteOldPacks(toBeDeleted, ret);
884 		} catch (ParseException e) {
885 			// TODO: the exception has to be wrapped into an IOException because
886 			// throwing the ParseException directly would break the API, instead
887 			// we should throw a ConfigInvalidException
888 			throw new IOException(e);
889 		}
890 		prunePacked();
891 		deleteEmptyRefsFolders();
892 		deleteOrphans();
893 		deleteTempPacksIdx();
894 
895 		lastPackedRefs = refsBefore;
896 		lastRepackTime = time;
897 		return ret;
898 	}
899 
900 	private static boolean isHead(Ref ref) {
901 		return ref.getName().startsWith(Constants.R_HEADS);
902 	}
903 
904 	private static boolean isTag(Ref ref) {
905 		return ref.getName().startsWith(Constants.R_TAGS);
906 	}
907 
908 	private void deleteEmptyRefsFolders() throws IOException {
909 		Path refs = repo.getDirectory().toPath().resolve(Constants.R_REFS);
910 		// Avoid deleting a folder that was created after the threshold so that concurrent
911 		// operations trying to create a reference are not impacted
912 		Instant threshold = Instant.now().minus(30, ChronoUnit.SECONDS);
913 		try (Stream<Path> entries = Files.list(refs)) {
914 			Iterator<Path> iterator = entries.iterator();
915 			while (iterator.hasNext()) {
916 				try (Stream<Path> s = Files.list(iterator.next())) {
917 					s.filter(path -> canBeSafelyDeleted(path, threshold)).forEach(this::deleteDir);
918 				}
919 			}
920 		}
921 	}
922 
923 	private boolean canBeSafelyDeleted(Path path, Instant threshold) {
924 		try {
925 			return Files.getLastModifiedTime(path).toInstant().isBefore(threshold);
926 		}
927 		catch (IOException e) {
928 			LOG.warn(MessageFormat.format(
929 					JGitText.get().cannotAccessLastModifiedForSafeDeletion,
930 					path), e);
931 			return false;
932 		}
933 	}
934 
935 	private void deleteDir(Path dir) {
936 		try (Stream<Path> dirs = Files.walk(dir)) {
937 			dirs.filter(this::isDirectory).sorted(Comparator.reverseOrder())
938 					.forEach(this::delete);
939 		} catch (IOException e) {
940 			LOG.error(e.getMessage(), e);
941 		}
942 	}
943 
944 	private boolean isDirectory(Path p) {
945 		return p.toFile().isDirectory();
946 	}
947 
948 	private void delete(Path d) {
949 		try {
950 			Files.delete(d);
951 		} catch (DirectoryNotEmptyException e) {
952 			// Don't log
953 		} catch (IOException e) {
954 			LOG.error(MessageFormat.format(JGitText.get().cannotDeleteFile, d),
955 					e);
956 		}
957 	}
958 
959 	/**
960 	 * Deletes orphans
961 	 * <p>
962 	 * A file is considered an orphan if it is either a "bitmap" or an index
963 	 * file, and its corresponding pack file is missing in the list.
964 	 * </p>
965 	 */
966 	private void deleteOrphans() {
967 		Path packDir = Paths.get(repo.getObjectsDirectory().getAbsolutePath(),
968 				"pack"); //$NON-NLS-1$
969 		List<String> fileNames = null;
970 		try (Stream<Path> files = Files.list(packDir)) {
971 			fileNames = files.map(path -> path.getFileName().toString())
972 					.filter(name -> {
973 						return (name.endsWith(PACK_EXT)
974 								|| name.endsWith(BITMAP_EXT)
975 								|| name.endsWith(INDEX_EXT));
976 					}).sorted(Collections.reverseOrder())
977 					.collect(Collectors.toList());
978 		} catch (IOException e1) {
979 			// ignore
980 		}
981 		if (fileNames == null) {
982 			return;
983 		}
984 
985 		String base = null;
986 		for (String n : fileNames) {
987 			if (n.endsWith(PACK_EXT)) {
988 				base = n.substring(0, n.lastIndexOf('.'));
989 			} else {
990 				if (base == null || !n.startsWith(base)) {
991 					try {
992 						Files.delete(new File(packDir.toFile(), n).toPath());
993 					} catch (IOException e) {
994 						LOG.error(e.getMessage(), e);
995 					}
996 				}
997 			}
998 		}
999 	}
1000 
1001 	private void deleteTempPacksIdx() {
1002 		Path packDir = Paths.get(repo.getObjectsDirectory().getAbsolutePath(),
1003 				"pack"); //$NON-NLS-1$
1004 		Instant threshold = Instant.now().minus(1, ChronoUnit.DAYS);
1005 		try (DirectoryStream<Path> stream =
1006 				Files.newDirectoryStream(packDir, "gc_*_tmp")) { //$NON-NLS-1$
1007 			stream.forEach(t -> {
1008 				try {
1009 					Instant lastModified = Files.getLastModifiedTime(t)
1010 							.toInstant();
1011 					if (lastModified.isBefore(threshold)) {
1012 						Files.deleteIfExists(t);
1013 					}
1014 				} catch (IOException e) {
1015 					LOG.error(e.getMessage(), e);
1016 				}
1017 			});
1018 		} catch (IOException e) {
1019 			LOG.error(e.getMessage(), e);
1020 		}
1021 	}
1022 
1023 	/**
1024 	 * @param ref
1025 	 *            the ref which log should be inspected
1026 	 * @param minTime only reflog entries not older then this time are processed
1027 	 * @return the {@link ObjectId}s contained in the reflog
1028 	 * @throws IOException
1029 	 */
1030 	private Set<ObjectId> listRefLogObjects(Ref ref, long minTime) throws IOException {
1031 		ReflogReader reflogReader = repo.getReflogReader(ref.getName());
1032 		if (reflogReader == null) {
1033 			return Collections.emptySet();
1034 		}
1035 		List<ReflogEntry> rlEntries = reflogReader
1036 				.getReverseEntries();
1037 		if (rlEntries == null || rlEntries.isEmpty())
1038 			return Collections.<ObjectId> emptySet();
1039 		Set<ObjectId> ret = new HashSet<>();
1040 		for (ReflogEntry e : rlEntries) {
1041 			if (e.getWho().getWhen().getTime() < minTime)
1042 				break;
1043 			ObjectId newId = e.getNewId();
1044 			if (newId != null && !ObjectId.zeroId().equals(newId))
1045 				ret.add(newId);
1046 			ObjectId oldId = e.getOldId();
1047 			if (oldId != null && !ObjectId.zeroId().equals(oldId))
1048 				ret.add(oldId);
1049 		}
1050 		return ret;
1051 	}
1052 
1053 	/**
1054 	 * Returns a collection of all refs and additional refs.
1055 	 *
1056 	 * Additional refs which don't start with "refs/" are not returned because
1057 	 * they should not save objects from being garbage collected. Examples for
1058 	 * such references are ORIG_HEAD, MERGE_HEAD, FETCH_HEAD and
1059 	 * CHERRY_PICK_HEAD.
1060 	 *
1061 	 * @return a collection of refs pointing to live objects.
1062 	 * @throws IOException
1063 	 */
1064 	private Collection<Ref> getAllRefs() throws IOException {
1065 		RefDatabase refdb = repo.getRefDatabase();
1066 		Collection<Ref> refs = refdb.getRefs(RefDatabase.ALL).values();
1067 		List<Ref> addl = refdb.getAdditionalRefs();
1068 		if (!addl.isEmpty()) {
1069 			List<Ref> all = new ArrayList<>(refs.size() + addl.size());
1070 			all.addAll(refs);
1071 			// add additional refs which start with refs/
1072 			for (Ref r : addl) {
1073 				checkCancelled();
1074 				if (r.getName().startsWith(Constants.R_REFS)) {
1075 					all.add(r);
1076 				}
1077 			}
1078 			return all;
1079 		}
1080 		return refs;
1081 	}
1082 
1083 	/**
1084 	 * Return a list of those objects in the index which differ from whats in
1085 	 * HEAD
1086 	 *
1087 	 * @return a set of ObjectIds of changed objects in the index
1088 	 * @throws IOException
1089 	 * @throws CorruptObjectException
1090 	 * @throws NoWorkTreeException
1091 	 */
1092 	private Set<ObjectId> listNonHEADIndexObjects()
1093 			throws CorruptObjectException, IOException {
1094 		if (repo.isBare()) {
1095 			return Collections.emptySet();
1096 		}
1097 		try (TreeWalk treeWalk = new TreeWalk(repo)) {
1098 			treeWalk.addTree(new DirCacheIterator(repo.readDirCache()));
1099 			ObjectId headID = repo.resolve(Constants.HEAD);
1100 			if (headID != null) {
1101 				try (RevWalk revWalk = new RevWalk(repo)) {
1102 					treeWalk.addTree(revWalk.parseTree(headID));
1103 				}
1104 			}
1105 
1106 			treeWalk.setFilter(TreeFilter.ANY_DIFF);
1107 			treeWalk.setRecursive(true);
1108 			Set<ObjectId> ret = new HashSet<>();
1109 
1110 			while (treeWalk.next()) {
1111 				checkCancelled();
1112 				ObjectId objectId = treeWalk.getObjectId(0);
1113 				switch (treeWalk.getRawMode(0) & FileMode.TYPE_MASK) {
1114 				case FileMode.TYPE_MISSING:
1115 				case FileMode.TYPE_GITLINK:
1116 					continue;
1117 				case FileMode.TYPE_TREE:
1118 				case FileMode.TYPE_FILE:
1119 				case FileMode.TYPE_SYMLINK:
1120 					ret.add(objectId);
1121 					continue;
1122 				default:
1123 					throw new IOException(MessageFormat.format(
1124 							JGitText.get().corruptObjectInvalidMode3,
1125 							String.format("%o", //$NON-NLS-1$
1126 									Integer.valueOf(treeWalk.getRawMode(0))),
1127 							(objectId == null) ? "null" : objectId.name(), //$NON-NLS-1$
1128 							treeWalk.getPathString(), //
1129 							repo.getIndexFile()));
1130 				}
1131 			}
1132 			return ret;
1133 		}
1134 	}
1135 
1136 	private PackFile writePack(@NonNull Set<? extends ObjectId> want,
1137 			@NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
1138 			Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
1139 			throws IOException {
1140 		checkCancelled();
1141 		File tmpPack = null;
1142 		Map<PackExt, File> tmpExts = new TreeMap<>(
1143 				new Comparator<PackExt>() {
1144 					@Override
1145 					public int compare(PackExt o1, PackExt o2) {
1146 						// INDEX entries must be returned last, so the pack
1147 						// scanner does pick up the new pack until all the
1148 						// PackExt entries have been written.
1149 						if (o1 == o2)
1150 							return 0;
1151 						if (o1 == PackExt.INDEX)
1152 							return 1;
1153 						if (o2 == PackExt.INDEX)
1154 							return -1;
1155 						return Integer.signum(o1.hashCode() - o2.hashCode());
1156 					}
1157 
1158 				});
1159 		try (PackWriter pw = new PackWriter(
1160 				(pconfig == null) ? new PackConfig(repo) : pconfig,
1161 				repo.newObjectReader())) {
1162 			// prepare the PackWriter
1163 			pw.setDeltaBaseAsOffset(true);
1164 			pw.setReuseDeltaCommits(false);
1165 			if (tagTargets != null) {
1166 				pw.setTagTargets(tagTargets);
1167 			}
1168 			if (excludeObjects != null)
1169 				for (ObjectIdSet idx : excludeObjects)
1170 					pw.excludeObjects(idx);
1171 			pw.preparePack(pm, want, have, PackWriter.NONE, tags);
1172 			if (pw.getObjectCount() == 0)
1173 				return null;
1174 			checkCancelled();
1175 
1176 			// create temporary files
1177 			String id = pw.computeName().getName();
1178 			File packdir = new File(repo.getObjectsDirectory(), "pack"); //$NON-NLS-1$
1179 			tmpPack = File.createTempFile("gc_", ".pack_tmp", packdir); //$NON-NLS-1$ //$NON-NLS-2$
1180 			final String tmpBase = tmpPack.getName()
1181 					.substring(0, tmpPack.getName().lastIndexOf('.'));
1182 			File tmpIdx = new File(packdir, tmpBase + ".idx_tmp"); //$NON-NLS-1$
1183 			tmpExts.put(INDEX, tmpIdx);
1184 
1185 			if (!tmpIdx.createNewFile())
1186 				throw new IOException(MessageFormat.format(
1187 						JGitText.get().cannotCreateIndexfile, tmpIdx.getPath()));
1188 
1189 			// write the packfile
1190 			FileOutputStream fos = new FileOutputStream(tmpPack);
1191 			FileChannel channel = fos.getChannel();
1192 			OutputStream channelStream = Channels.newOutputStream(channel);
1193 			try {
1194 				pw.writePack(pm, pm, channelStream);
1195 			} finally {
1196 				channel.force(true);
1197 				channelStream.close();
1198 				fos.close();
1199 			}
1200 
1201 			// write the packindex
1202 			fos = new FileOutputStream(tmpIdx);
1203 			FileChannel idxChannel = fos.getChannel();
1204 			OutputStream idxStream = Channels.newOutputStream(idxChannel);
1205 			try {
1206 				pw.writeIndex(idxStream);
1207 			} finally {
1208 				idxChannel.force(true);
1209 				idxStream.close();
1210 				fos.close();
1211 			}
1212 
1213 			if (pw.prepareBitmapIndex(pm)) {
1214 				File tmpBitmapIdx = new File(packdir, tmpBase + ".bitmap_tmp"); //$NON-NLS-1$
1215 				tmpExts.put(BITMAP_INDEX, tmpBitmapIdx);
1216 
1217 				if (!tmpBitmapIdx.createNewFile())
1218 					throw new IOException(MessageFormat.format(
1219 							JGitText.get().cannotCreateIndexfile,
1220 							tmpBitmapIdx.getPath()));
1221 
1222 				fos = new FileOutputStream(tmpBitmapIdx);
1223 				idxChannel = fos.getChannel();
1224 				idxStream = Channels.newOutputStream(idxChannel);
1225 				try {
1226 					pw.writeBitmapIndex(idxStream);
1227 				} finally {
1228 					idxChannel.force(true);
1229 					idxStream.close();
1230 					fos.close();
1231 				}
1232 			}
1233 
1234 			// rename the temporary files to real files
1235 			File realPack = nameFor(id, ".pack"); //$NON-NLS-1$
1236 
1237 			repo.getObjectDatabase().closeAllPackHandles(realPack);
1238 			tmpPack.setReadOnly();
1239 
1240 			FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE);
1241 			for (Map.Entry<PackExt, File> tmpEntry : tmpExts.entrySet()) {
1242 				File tmpExt = tmpEntry.getValue();
1243 				tmpExt.setReadOnly();
1244 
1245 				File realExt = nameFor(id,
1246 						"." + tmpEntry.getKey().getExtension()); //$NON-NLS-1$
1247 				try {
1248 					FileUtils.rename(tmpExt, realExt,
1249 							StandardCopyOption.ATOMIC_MOVE);
1250 				} catch (IOException e) {
1251 					File newExt = new File(realExt.getParentFile(),
1252 							realExt.getName() + ".new"); //$NON-NLS-1$
1253 					try {
1254 						FileUtils.rename(tmpExt, newExt,
1255 								StandardCopyOption.ATOMIC_MOVE);
1256 					} catch (IOException e2) {
1257 						newExt = tmpExt;
1258 						e = e2;
1259 					}
1260 					throw new IOException(MessageFormat.format(
1261 							JGitText.get().panicCantRenameIndexFile, newExt,
1262 							realExt), e);
1263 				}
1264 			}
1265 
1266 			return repo.getObjectDatabase().openPack(realPack);
1267 		} finally {
1268 			if (tmpPack != null && tmpPack.exists())
1269 				tmpPack.delete();
1270 			for (File tmpExt : tmpExts.values()) {
1271 				if (tmpExt.exists())
1272 					tmpExt.delete();
1273 			}
1274 		}
1275 	}
1276 
1277 	private File nameFor(String name, String ext) {
1278 		File packdir = new File(repo.getObjectsDirectory(), "pack"); //$NON-NLS-1$
1279 		return new File(packdir, "pack-" + name + ext); //$NON-NLS-1$
1280 	}
1281 
1282 	private void checkCancelled() throws CancelledException {
1283 		if (pm.isCancelled()) {
1284 			throw new CancelledException(JGitText.get().operationCanceled);
1285 		}
1286 	}
1287 
1288 	/**
1289 	 * A class holding statistical data for a FileRepository regarding how many
1290 	 * objects are stored as loose or packed objects
1291 	 */
1292 	public static class RepoStatistics {
1293 		/**
1294 		 * The number of objects stored in pack files. If the same object is
1295 		 * stored in multiple pack files then it is counted as often as it
1296 		 * occurs in pack files.
1297 		 */
1298 		public long numberOfPackedObjects;
1299 
1300 		/**
1301 		 * The number of pack files
1302 		 */
1303 		public long numberOfPackFiles;
1304 
1305 		/**
1306 		 * The number of objects stored as loose objects.
1307 		 */
1308 		public long numberOfLooseObjects;
1309 
1310 		/**
1311 		 * The sum of the sizes of all files used to persist loose objects.
1312 		 */
1313 		public long sizeOfLooseObjects;
1314 
1315 		/**
1316 		 * The sum of the sizes of all pack files.
1317 		 */
1318 		public long sizeOfPackedObjects;
1319 
1320 		/**
1321 		 * The number of loose refs.
1322 		 */
1323 		public long numberOfLooseRefs;
1324 
1325 		/**
1326 		 * The number of refs stored in pack files.
1327 		 */
1328 		public long numberOfPackedRefs;
1329 
1330 		/**
1331 		 * The number of bitmaps in the bitmap indices.
1332 		 */
1333 		public long numberOfBitmaps;
1334 
1335 		@Override
1336 		public String toString() {
1337 			final StringBuilder b = new StringBuilder();
1338 			b.append("numberOfPackedObjects=").append(numberOfPackedObjects); //$NON-NLS-1$
1339 			b.append(", numberOfPackFiles=").append(numberOfPackFiles); //$NON-NLS-1$
1340 			b.append(", numberOfLooseObjects=").append(numberOfLooseObjects); //$NON-NLS-1$
1341 			b.append(", numberOfLooseRefs=").append(numberOfLooseRefs); //$NON-NLS-1$
1342 			b.append(", numberOfPackedRefs=").append(numberOfPackedRefs); //$NON-NLS-1$
1343 			b.append(", sizeOfLooseObjects=").append(sizeOfLooseObjects); //$NON-NLS-1$
1344 			b.append(", sizeOfPackedObjects=").append(sizeOfPackedObjects); //$NON-NLS-1$
1345 			b.append(", numberOfBitmaps=").append(numberOfBitmaps); //$NON-NLS-1$
1346 			return b.toString();
1347 		}
1348 	}
1349 
1350 	/**
1351 	 * Returns information about objects and pack files for a FileRepository.
1352 	 *
1353 	 * @return information about objects and pack files for a FileRepository
1354 	 * @throws IOException
1355 	 */
1356 	public RepoStatistics getStatistics() throws IOException {
1357 		RepoStatistics ret = new RepoStatistics();
1358 		Collection<PackFile> packs = repo.getObjectDatabase().getPacks();
1359 		for (PackFile f : packs) {
1360 			ret.numberOfPackedObjects += f.getIndex().getObjectCount();
1361 			ret.numberOfPackFiles++;
1362 			ret.sizeOfPackedObjects += f.getPackFile().length();
1363 			if (f.getBitmapIndex() != null)
1364 				ret.numberOfBitmaps += f.getBitmapIndex().getBitmapCount();
1365 		}
1366 		File objDir = repo.getObjectsDirectory();
1367 		String[] fanout = objDir.list();
1368 		if (fanout != null && fanout.length > 0) {
1369 			for (String d : fanout) {
1370 				if (d.length() != 2)
1371 					continue;
1372 				File[] entries = new File(objDir, d).listFiles();
1373 				if (entries == null)
1374 					continue;
1375 				for (File f : entries) {
1376 					if (f.getName().length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
1377 						continue;
1378 					ret.numberOfLooseObjects++;
1379 					ret.sizeOfLooseObjects += f.length();
1380 				}
1381 			}
1382 		}
1383 
1384 		RefDatabase refDb = repo.getRefDatabase();
1385 		for (Ref r : refDb.getRefs(RefDatabase.ALL).values()) {
1386 			Storage storage = r.getStorage();
1387 			if (storage == Storage.LOOSE || storage == Storage.LOOSE_PACKED)
1388 				ret.numberOfLooseRefs++;
1389 			if (storage == Storage.PACKED || storage == Storage.LOOSE_PACKED)
1390 				ret.numberOfPackedRefs++;
1391 		}
1392 
1393 		return ret;
1394 	}
1395 
1396 	/**
1397 	 * Set the progress monitor used for garbage collection methods.
1398 	 *
1399 	 * @param pm
1400 	 * @return this
1401 	 */
1402 	public GC setProgressMonitor(ProgressMonitor pm) {
1403 		this.pm = (pm == null) ? NullProgressMonitor.INSTANCE : pm;
1404 		return this;
1405 	}
1406 
1407 	/**
1408 	 * During gc() or prune() each unreferenced, loose object which has been
1409 	 * created or modified in the last <code>expireAgeMillis</code> milliseconds
1410 	 * will not be pruned. Only older objects may be pruned. If set to 0 then
1411 	 * every object is a candidate for pruning.
1412 	 *
1413 	 * @param expireAgeMillis
1414 	 *            minimal age of objects to be pruned in milliseconds.
1415 	 */
1416 	public void setExpireAgeMillis(long expireAgeMillis) {
1417 		this.expireAgeMillis = expireAgeMillis;
1418 		expire = null;
1419 	}
1420 
1421 	/**
1422 	 * During gc() or prune() packfiles which are created or modified in the
1423 	 * last <code>packExpireAgeMillis</code> milliseconds will not be deleted.
1424 	 * Only older packfiles may be deleted. If set to 0 then every packfile is a
1425 	 * candidate for deletion.
1426 	 *
1427 	 * @param packExpireAgeMillis
1428 	 *            minimal age of packfiles to be deleted in milliseconds.
1429 	 */
1430 	public void setPackExpireAgeMillis(long packExpireAgeMillis) {
1431 		this.packExpireAgeMillis = packExpireAgeMillis;
1432 		expire = null;
1433 	}
1434 
1435 	/**
1436 	 * Set the PackConfig used when (re-)writing packfiles. This allows to
1437 	 * influence how packs are written and to implement something similar to
1438 	 * "git gc --aggressive"
1439 	 *
1440 	 * @param pconfig
1441 	 *            the {@link PackConfig} used when writing packs
1442 	 */
1443 	public void setPackConfig(PackConfig pconfig) {
1444 		this.pconfig = pconfig;
1445 	}
1446 
1447 	/**
1448 	 * During gc() or prune() each unreferenced, loose object which has been
1449 	 * created or modified after or at <code>expire</code> will not be pruned.
1450 	 * Only older objects may be pruned. If set to null then every object is a
1451 	 * candidate for pruning.
1452 	 *
1453 	 * @param expire
1454 	 *            instant in time which defines object expiration
1455 	 *            objects with modification time before this instant are expired
1456 	 *            objects with modification time newer or equal to this instant
1457 	 *            are not expired
1458 	 */
1459 	public void setExpire(Date expire) {
1460 		this.expire = expire;
1461 		expireAgeMillis = -1;
1462 	}
1463 
1464 	/**
1465 	 * During gc() or prune() packfiles which are created or modified after or
1466 	 * at <code>packExpire</code> will not be deleted. Only older packfiles may
1467 	 * be deleted. If set to null then every packfile is a candidate for
1468 	 * deletion.
1469 	 *
1470 	 * @param packExpire
1471 	 *            instant in time which defines packfile expiration
1472 	 */
1473 	public void setPackExpire(Date packExpire) {
1474 		this.packExpire = packExpire;
1475 		packExpireAgeMillis = -1;
1476 	}
1477 
1478 	/**
1479 	 * Set the {@code gc --auto} option.
1480 	 *
1481 	 * With this option, gc checks whether any housekeeping is required; if not,
1482 	 * it exits without performing any work. Some JGit commands run
1483 	 * {@code gc --auto} after performing operations that could create many
1484 	 * loose objects.
1485 	 * <p/>
1486 	 * Housekeeping is required if there are too many loose objects or too many
1487 	 * packs in the repository. If the number of loose objects exceeds the value
1488 	 * of the gc.auto option JGit GC consolidates all existing packs into a
1489 	 * single pack (equivalent to {@code -A} option), whereas git-core would
1490 	 * combine all loose objects into a single pack using {@code repack -d -l}.
1491 	 * Setting the value of {@code gc.auto} to 0 disables automatic packing of
1492 	 * loose objects.
1493 	 * <p/>
1494 	 * If the number of packs exceeds the value of {@code gc.autoPackLimit},
1495 	 * then existing packs (except those marked with a .keep file) are
1496 	 * consolidated into a single pack by using the {@code -A} option of repack.
1497 	 * Setting {@code gc.autoPackLimit} to 0 disables automatic consolidation of
1498 	 * packs.
1499 	 * <p/>
1500 	 * Like git the following jgit commands run auto gc:
1501 	 * <ul>
1502 	 * <li>fetch</li>
1503 	 * <li>merge</li>
1504 	 * <li>rebase</li>
1505 	 * <li>receive-pack</li>
1506 	 * </ul>
1507 	 * The auto gc for receive-pack can be suppressed by setting the config
1508 	 * option {@code receive.autogc = false}
1509 	 *
1510 	 * @param auto
1511 	 *            defines whether gc should do automatic housekeeping
1512 	 */
1513 	public void setAuto(boolean auto) {
1514 		this.automatic = auto;
1515 	}
1516 
1517 	/**
1518 	 * @param background
1519 	 *            whether to run the gc in a background thread.
1520 	 */
1521 	void setBackground(boolean background) {
1522 		this.background = background;
1523 	}
1524 
1525 	private boolean needGc() {
1526 		if (tooManyPacks()) {
1527 			addRepackAllOption();
1528 		} else if (!tooManyLooseObjects()) {
1529 			return false;
1530 		}
1531 		// TODO run pre-auto-gc hook, if it fails return false
1532 		return true;
1533 	}
1534 
1535 	private void addRepackAllOption() {
1536 		// TODO: if JGit GC is enhanced to support repack's option -l this
1537 		// method needs to be implemented
1538 	}
1539 
1540 	/**
1541 	 * @return {@code true} if number of packs > gc.autopacklimit (default 50)
1542 	 */
1543 	boolean tooManyPacks() {
1544 		int autopacklimit = repo.getConfig().getInt(
1545 				ConfigConstants.CONFIG_GC_SECTION,
1546 				ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT,
1547 				DEFAULT_AUTOPACKLIMIT);
1548 		if (autopacklimit <= 0) {
1549 			return false;
1550 		}
1551 		// JGit always creates two packfiles, one for the objects reachable from
1552 		// branches, and another one for the rest
1553 		return repo.getObjectDatabase().getPacks().size() > (autopacklimit + 1);
1554 	}
1555 
1556 	/**
1557 	 * Quickly estimate number of loose objects, SHA1 is distributed evenly so
1558 	 * counting objects in one directory (bucket 17) is sufficient
1559 	 *
1560 	 * @return {@code true} if number of loose objects > gc.auto (default 6700)
1561 	 */
1562 	boolean tooManyLooseObjects() {
1563 		int auto = getLooseObjectLimit();
1564 		if (auto <= 0) {
1565 			return false;
1566 		}
1567 		int n = 0;
1568 		int threshold = (auto + 255) / 256;
1569 		Path dir = repo.getObjectsDirectory().toPath().resolve("17"); //$NON-NLS-1$
1570 		if (!Files.exists(dir)) {
1571 			return false;
1572 		}
1573 		try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir,
1574 				new DirectoryStream.Filter<Path>() {
1575 
1576 					@Override
1577 					public boolean accept(Path file) throws IOException {
1578 						Path fileName = file.getFileName();
1579 						return Files.isRegularFile(file) && fileName != null
1580 								&& PATTERN_LOOSE_OBJECT
1581 										.matcher(fileName.toString()).matches();
1582 					}
1583 				})) {
1584 			for (Iterator<Path> iter = stream.iterator(); iter.hasNext();
1585 					iter.next()) {
1586 				if (++n > threshold) {
1587 					return true;
1588 				}
1589 			}
1590 		} catch (IOException e) {
1591 			LOG.error(e.getMessage(), e);
1592 		}
1593 		return false;
1594 	}
1595 
1596 	private int getLooseObjectLimit() {
1597 		return repo.getConfig().getInt(ConfigConstants.CONFIG_GC_SECTION,
1598 				ConfigConstants.CONFIG_KEY_AUTO, DEFAULT_AUTOLIMIT);
1599 	}
1600 }