View Javadoc
1   /*
2    * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
3    * Copyright (C) 2011, Shawn O. Pearce <spearce@spearce.org>
4    * and other copyright owners as documented in the project's IP log.
5    *
6    * This program and the accompanying materials are made available
7    * under the terms of the Eclipse Distribution License v1.0 which
8    * accompanies this distribution, is reproduced below, and is
9    * available at http://www.eclipse.org/org/documents/edl-v10.php
10   *
11   * All rights reserved.
12   *
13   * Redistribution and use in source and binary forms, with or
14   * without modification, are permitted provided that the following
15   * conditions are met:
16   *
17   * - Redistributions of source code must retain the above copyright
18   *   notice, this list of conditions and the following disclaimer.
19   *
20   * - Redistributions in binary form must reproduce the above
21   *   copyright notice, this list of conditions and the following
22   *   disclaimer in the documentation and/or other materials provided
23   *   with the distribution.
24   *
25   * - Neither the name of the Eclipse Foundation, Inc. nor the
26   *   names of its contributors may be used to endorse or promote
27   *   products derived from this software without specific prior
28   *   written permission.
29   *
30   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
31   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
32   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
33   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
34   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
37   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
38   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
39   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
40   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
41   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
42   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43   */
44  package org.eclipse.jgit.internal.storage.file;
45  
46  import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
47  import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
48  
49  import java.io.File;
50  import java.io.FileOutputStream;
51  import java.io.IOException;
52  import java.io.OutputStream;
53  import java.nio.channels.Channels;
54  import java.nio.channels.FileChannel;
55  import java.nio.file.DirectoryStream;
56  import java.nio.file.Files;
57  import java.nio.file.Path;
58  import java.nio.file.Paths;
59  import java.nio.file.StandardCopyOption;
60  import java.text.MessageFormat;
61  import java.text.ParseException;
62  import java.util.ArrayList;
63  import java.util.Collection;
64  import java.util.Collections;
65  import java.util.Comparator;
66  import java.util.Date;
67  import java.util.HashMap;
68  import java.util.HashSet;
69  import java.util.Iterator;
70  import java.util.LinkedList;
71  import java.util.List;
72  import java.util.Map;
73  import java.util.Objects;
74  import java.util.Set;
75  import java.util.TreeMap;
76  import java.util.regex.Pattern;
77  import java.util.stream.Collectors;
78  import java.util.stream.Stream;
79  
80  import org.eclipse.jgit.annotations.NonNull;
81  import org.eclipse.jgit.dircache.DirCacheIterator;
82  import org.eclipse.jgit.errors.CancelledException;
83  import org.eclipse.jgit.errors.CorruptObjectException;
84  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
85  import org.eclipse.jgit.errors.MissingObjectException;
86  import org.eclipse.jgit.errors.NoWorkTreeException;
87  import org.eclipse.jgit.internal.JGitText;
88  import org.eclipse.jgit.internal.storage.pack.PackExt;
89  import org.eclipse.jgit.internal.storage.pack.PackWriter;
90  import org.eclipse.jgit.internal.storage.reftree.RefTreeNames;
91  import org.eclipse.jgit.lib.ConfigConstants;
92  import org.eclipse.jgit.lib.Constants;
93  import org.eclipse.jgit.lib.FileMode;
94  import org.eclipse.jgit.lib.NullProgressMonitor;
95  import org.eclipse.jgit.lib.ObjectId;
96  import org.eclipse.jgit.lib.ObjectIdSet;
97  import org.eclipse.jgit.lib.ObjectLoader;
98  import org.eclipse.jgit.lib.ObjectReader;
99  import org.eclipse.jgit.lib.ProgressMonitor;
100 import org.eclipse.jgit.lib.Ref;
101 import org.eclipse.jgit.lib.Ref.Storage;
102 import org.eclipse.jgit.lib.RefDatabase;
103 import org.eclipse.jgit.lib.ReflogEntry;
104 import org.eclipse.jgit.lib.ReflogReader;
105 import org.eclipse.jgit.revwalk.ObjectWalk;
106 import org.eclipse.jgit.revwalk.RevObject;
107 import org.eclipse.jgit.revwalk.RevWalk;
108 import org.eclipse.jgit.storage.pack.PackConfig;
109 import org.eclipse.jgit.treewalk.TreeWalk;
110 import org.eclipse.jgit.treewalk.filter.TreeFilter;
111 import org.eclipse.jgit.util.FileUtils;
112 import org.eclipse.jgit.util.GitDateParser;
113 import org.eclipse.jgit.util.SystemReader;
114 import org.slf4j.Logger;
115 import org.slf4j.LoggerFactory;
116 
117 /**
118  * A garbage collector for git {@link FileRepository}. Instances of this class
119  * are not thread-safe. Don't use the same instance from multiple threads.
120  *
121  * This class started as a copy of DfsGarbageCollector from Shawn O. Pearce
122  * adapted to FileRepositories.
123  */
124 public class GC {
125 	private final static Logger LOG = LoggerFactory
126 			.getLogger(GC.class);
127 
128 	private static final String PRUNE_EXPIRE_DEFAULT = "2.weeks.ago"; //$NON-NLS-1$
129 
130 	private static final String PRUNE_PACK_EXPIRE_DEFAULT = "1.hour.ago"; //$NON-NLS-1$
131 
132 	private static final Pattern PATTERN_LOOSE_OBJECT = Pattern
133 			.compile("[0-9a-fA-F]{38}"); //$NON-NLS-1$
134 
135 	private static final String PACK_EXT = "." + PackExt.PACK.getExtension();//$NON-NLS-1$
136 
137 	private static final String BITMAP_EXT = "." //$NON-NLS-1$
138 			+ PackExt.BITMAP_INDEX.getExtension();
139 
140 	private static final String INDEX_EXT = "." + PackExt.INDEX.getExtension(); //$NON-NLS-1$
141 
142 	private static final int DEFAULT_AUTOPACKLIMIT = 50;
143 
144 	private static final int DEFAULT_AUTOLIMIT = 6700;
145 
146 	private final FileRepository repo;
147 
148 	private ProgressMonitor pm;
149 
150 	private long expireAgeMillis = -1;
151 
152 	private Date expire;
153 
154 	private long packExpireAgeMillis = -1;
155 
156 	private Date packExpire;
157 
158 	private PackConfig pconfig = null;
159 
160 	/**
161 	 * the refs which existed during the last call to {@link #repack()}. This is
162 	 * needed during {@link #prune(Set)} where we can optimize by looking at the
163 	 * difference between the current refs and the refs which existed during
164 	 * last {@link #repack()}.
165 	 */
166 	private Collection<Ref> lastPackedRefs;
167 
168 	/**
169 	 * Holds the starting time of the last repack() execution. This is needed in
170 	 * prune() to inspect only those reflog entries which have been added since
171 	 * last repack().
172 	 */
173 	private long lastRepackTime;
174 
175 	/**
176 	 * Whether gc should do automatic housekeeping
177 	 */
178 	private boolean automatic;
179 
180 	/**
181 	 * Creates a new garbage collector with default values. An expirationTime of
182 	 * two weeks and <code>null</code> as progress monitor will be used.
183 	 *
184 	 * @param repo
185 	 *            the repo to work on
186 	 */
187 	public GC(FileRepository repo) {
188 		this.repo = repo;
189 		this.pm = NullProgressMonitor.INSTANCE;
190 	}
191 
192 	/**
193 	 * Runs a garbage collector on a {@link FileRepository}. It will
194 	 * <ul>
195 	 * <li>pack loose references into packed-refs</li>
196 	 * <li>repack all reachable objects into new pack files and delete the old
197 	 * pack files</li>
198 	 * <li>prune all loose objects which are now reachable by packs</li>
199 	 * </ul>
200 	 *
201 	 * If {@link #setAuto(boolean)} was set to {@code true} {@code gc} will
202 	 * first check whether any housekeeping is required; if not, it exits
203 	 * without performing any work.
204 	 *
205 	 * @return the collection of {@link PackFile}'s which are newly created
206 	 * @throws IOException
207 	 * @throws ParseException
208 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
209 	 *             parsed
210 	 */
211 	public Collection<PackFile> gc() throws IOException, ParseException {
212 		if (automatic && !needGc()) {
213 			return Collections.emptyList();
214 		}
215 		pm.start(6 /* tasks */);
216 		packRefs();
217 		// TODO: implement reflog_expire(pm, repo);
218 		Collection<PackFile> newPacks = repack();
219 		prune(Collections.<ObjectId> emptySet());
220 		// TODO: implement rerere_gc(pm);
221 		return newPacks;
222 	}
223 
224 	/**
225 	 * Loosen objects in a pack file which are not also in the newly-created
226 	 * pack files.
227 	 *
228 	 * @param inserter
229 	 * @param reader
230 	 * @param pack
231 	 * @param existing
232 	 * @throws IOException
233 	 */
234 	private void loosen(ObjectDirectoryInserter inserter, ObjectReader reader, PackFile pack, HashSet<ObjectId> existing)
235 			throws IOException {
236 		for (PackIndex.MutableEntry entry : pack) {
237 			ObjectId oid = entry.toObjectId();
238 			if (existing.contains(oid)) {
239 				continue;
240 			}
241 			existing.add(oid);
242 			ObjectLoader loader = reader.open(oid);
243 			inserter.insert(loader.getType(),
244 					loader.getSize(),
245 					loader.openStream(),
246 					true /* create this object even though it's a duplicate */);
247 		}
248 	}
249 
250 	/**
251 	 * Delete old pack files. What is 'old' is defined by specifying a set of
252 	 * old pack files and a set of new pack files. Each pack file contained in
253 	 * old pack files but not contained in new pack files will be deleted. If
254 	 * preserveOldPacks is set, keep a copy of the pack file in the preserve
255 	 * directory. If an expirationDate is set then pack files which are younger
256 	 * than the expirationDate will not be deleted nor preserved.
257 	 * <p>
258 	 * If we're not immediately expiring loose objects, loosen any objects
259 	 * in the old pack files which aren't in the new pack files.
260 	 *
261 	 * @param oldPacks
262 	 * @param newPacks
263 	 * @throws ParseException
264 	 * @throws IOException
265 	 */
266 	private void deleteOldPacks(Collection<PackFile> oldPacks,
267 			Collection<PackFile> newPacks) throws ParseException, IOException {
268 		HashSet<ObjectId> ids = new HashSet<>();
269 		for (PackFile pack : newPacks) {
270 			for (PackIndex.MutableEntry entry : pack) {
271 				ids.add(entry.toObjectId());
272 			}
273 		}
274 		ObjectReader reader = repo.newObjectReader();
275 		ObjectDirectory dir = repo.getObjectDatabase();
276 		ObjectDirectoryInserter inserter = dir.newInserter();
277 		boolean shouldLoosen = !"now".equals(getPruneExpireStr()) && //$NON-NLS-1$
278 			getExpireDate() < Long.MAX_VALUE;
279 
280 		prunePreserved();
281 		long packExpireDate = getPackExpireDate();
282 		oldPackLoop: for (PackFile oldPack : oldPacks) {
283 			checkCancelled();
284 			String oldName = oldPack.getPackName();
285 			// check whether an old pack file is also among the list of new
286 			// pack files. Then we must not delete it.
287 			for (PackFile newPack : newPacks)
288 				if (oldName.equals(newPack.getPackName()))
289 					continue oldPackLoop;
290 
291 			if (!oldPack.shouldBeKept()
292 					&& repo.getFS().lastModified(
293 							oldPack.getPackFile()) < packExpireDate) {
294 				oldPack.close();
295 				if (shouldLoosen) {
296 					loosen(inserter, reader, oldPack, ids);
297 				}
298 				prunePack(oldName);
299 			}
300 		}
301 
302 		// close the complete object database. That's my only chance to force
303 		// rescanning and to detect that certain pack files are now deleted.
304 		repo.getObjectDatabase().close();
305 	}
306 
307 	/**
308 	 * Deletes old pack file, unless 'preserve-oldpacks' is set, in which case it
309 	 * moves the pack file to the preserved directory
310 	 *
311 	 * @param packFile
312 	 * @param packName
313 	 * @param ext
314 	 * @param deleteOptions
315 	 * @throws IOException
316 	 */
317 	private void removeOldPack(File packFile, String packName, PackExt ext,
318 			int deleteOptions) throws IOException {
319 		if (pconfig != null && pconfig.isPreserveOldPacks()) {
320 			File oldPackDir = repo.getObjectDatabase().getPreservedDirectory();
321 			FileUtils.mkdir(oldPackDir, true);
322 
323 			String oldPackName = "pack-" + packName + ".old-" + ext.getExtension();  //$NON-NLS-1$ //$NON-NLS-2$
324 			File oldPackFile = new File(oldPackDir, oldPackName);
325 			FileUtils.rename(packFile, oldPackFile);
326 		} else {
327 			FileUtils.delete(packFile, deleteOptions);
328 		}
329 	}
330 
331 	/**
332 	 * Delete the preserved directory including all pack files within
333 	 */
334 	private void prunePreserved() {
335 		if (pconfig != null && pconfig.isPrunePreserved()) {
336 			try {
337 				FileUtils.delete(repo.getObjectDatabase().getPreservedDirectory(),
338 						FileUtils.RECURSIVE | FileUtils.RETRY | FileUtils.SKIP_MISSING);
339 			} catch (IOException e) {
340 				// Deletion of the preserved pack files failed. Silently return.
341 			}
342 		}
343 	}
344 
345 	/**
346 	 * Delete files associated with a single pack file. First try to delete the
347 	 * ".pack" file because on some platforms the ".pack" file may be locked and
348 	 * can't be deleted. In such a case it is better to detect this early and
349 	 * give up on deleting files for this packfile. Otherwise we may delete the
350 	 * ".index" file and when failing to delete the ".pack" file we are left
351 	 * with a ".pack" file without a ".index" file.
352 	 *
353 	 * @param packName
354 	 */
355 	private void prunePack(String packName) {
356 		PackExt[] extensions = PackExt.values();
357 		try {
358 			// Delete the .pack file first and if this fails give up on deleting
359 			// the other files
360 			int deleteOptions = FileUtils.RETRY | FileUtils.SKIP_MISSING;
361 			for (PackExt ext : extensions)
362 				if (PackExt.PACK.equals(ext)) {
363 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
364 					removeOldPack(f, packName, ext, deleteOptions);
365 					break;
366 				}
367 			// The .pack file has been deleted. Delete as many as the other
368 			// files as you can.
369 			deleteOptions |= FileUtils.IGNORE_ERRORS;
370 			for (PackExt ext : extensions) {
371 				if (!PackExt.PACK.equals(ext)) {
372 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
373 					removeOldPack(f, packName, ext, deleteOptions);
374 				}
375 			}
376 		} catch (IOException e) {
377 			// Deletion of the .pack file failed. Silently return.
378 		}
379 	}
380 
381 	/**
382 	 * Like "git prune-packed" this method tries to prune all loose objects
383 	 * which can be found in packs. If certain objects can't be pruned (e.g.
384 	 * because the filesystem delete operation fails) this is silently ignored.
385 	 *
386 	 * @throws IOException
387 	 */
388 	public void prunePacked() throws IOException {
389 		ObjectDirectory objdb = repo.getObjectDatabase();
390 		Collection<PackFile> packs = objdb.getPacks();
391 		File objects = repo.getObjectsDirectory();
392 		String[] fanout = objects.list();
393 
394 		if (fanout != null && fanout.length > 0) {
395 			pm.beginTask(JGitText.get().pruneLoosePackedObjects, fanout.length);
396 			try {
397 				for (String d : fanout) {
398 					checkCancelled();
399 					pm.update(1);
400 					if (d.length() != 2)
401 						continue;
402 					String[] entries = new File(objects, d).list();
403 					if (entries == null)
404 						continue;
405 					for (String e : entries) {
406 						checkCancelled();
407 						if (e.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
408 							continue;
409 						ObjectId id;
410 						try {
411 							id = ObjectId.fromString(d + e);
412 						} catch (IllegalArgumentException notAnObject) {
413 							// ignoring the file that does not represent loose
414 							// object
415 							continue;
416 						}
417 						boolean found = false;
418 						for (PackFile p : packs) {
419 							checkCancelled();
420 							if (p.hasObject(id)) {
421 								found = true;
422 								break;
423 							}
424 						}
425 						if (found)
426 							FileUtils.delete(objdb.fileFor(id), FileUtils.RETRY
427 									| FileUtils.SKIP_MISSING
428 									| FileUtils.IGNORE_ERRORS);
429 					}
430 				}
431 			} finally {
432 				pm.endTask();
433 			}
434 		}
435 	}
436 
437 	/**
438 	 * Like "git prune" this method tries to prune all loose objects which are
439 	 * unreferenced. If certain objects can't be pruned (e.g. because the
440 	 * filesystem delete operation fails) this is silently ignored.
441 	 *
442 	 * @param objectsToKeep
443 	 *            a set of objects which should explicitly not be pruned
444 	 *
445 	 * @throws IOException
446 	 * @throws ParseException
447 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
448 	 *             parsed
449 	 */
450 	public void prune(Set<ObjectId> objectsToKeep) throws IOException,
451 			ParseException {
452 		long expireDate = getExpireDate();
453 
454 		// Collect all loose objects which are old enough, not referenced from
455 		// the index and not in objectsToKeep
456 		Map<ObjectId, File> deletionCandidates = new HashMap<>();
457 		Set<ObjectId> indexObjects = null;
458 		File objects = repo.getObjectsDirectory();
459 		String[] fanout = objects.list();
460 		if (fanout == null || fanout.length == 0) {
461 			return;
462 		}
463 		pm.beginTask(JGitText.get().pruneLooseUnreferencedObjects,
464 				fanout.length);
465 		try {
466 			for (String d : fanout) {
467 				checkCancelled();
468 				pm.update(1);
469 				if (d.length() != 2)
470 					continue;
471 				File[] entries = new File(objects, d).listFiles();
472 				if (entries == null)
473 					continue;
474 				for (File f : entries) {
475 					checkCancelled();
476 					String fName = f.getName();
477 					if (fName.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
478 						continue;
479 					if (repo.getFS().lastModified(f) >= expireDate)
480 						continue;
481 					try {
482 						ObjectId id = ObjectId.fromString(d + fName);
483 						if (objectsToKeep.contains(id))
484 							continue;
485 						if (indexObjects == null)
486 							indexObjects = listNonHEADIndexObjects();
487 						if (indexObjects.contains(id))
488 							continue;
489 						deletionCandidates.put(id, f);
490 					} catch (IllegalArgumentException notAnObject) {
491 						// ignoring the file that does not represent loose
492 						// object
493 						continue;
494 					}
495 				}
496 			}
497 		} finally {
498 			pm.endTask();
499 		}
500 
501 		if (deletionCandidates.isEmpty()) {
502 			return;
503 		}
504 
505 		checkCancelled();
506 
507 		// From the set of current refs remove all those which have been handled
508 		// during last repack(). Only those refs will survive which have been
509 		// added or modified since the last repack. Only these can save existing
510 		// loose refs from being pruned.
511 		Collection<Ref> newRefs;
512 		if (lastPackedRefs == null || lastPackedRefs.isEmpty())
513 			newRefs = getAllRefs();
514 		else {
515 			Map<String, Ref> last = new HashMap<>();
516 			for (Ref r : lastPackedRefs) {
517 				last.put(r.getName(), r);
518 			}
519 			newRefs = new ArrayList<>();
520 			for (Ref r : getAllRefs()) {
521 				Ref old = last.get(r.getName());
522 				if (!equals(r, old)) {
523 					newRefs.add(r);
524 				}
525 			}
526 		}
527 
528 		if (!newRefs.isEmpty()) {
529 			// There are new/modified refs! Check which loose objects are now
530 			// referenced by these modified refs (or their reflogentries).
531 			// Remove these loose objects
532 			// from the deletionCandidates. When the last candidate is removed
533 			// leave this method.
534 			ObjectWalk w = new ObjectWalk(repo);
535 			try {
536 				for (Ref cr : newRefs) {
537 					checkCancelled();
538 					w.markStart(w.parseAny(cr.getObjectId()));
539 				}
540 				if (lastPackedRefs != null)
541 					for (Ref lpr : lastPackedRefs) {
542 						w.markUninteresting(w.parseAny(lpr.getObjectId()));
543 					}
544 				removeReferenced(deletionCandidates, w);
545 			} finally {
546 				w.dispose();
547 			}
548 		}
549 
550 		if (deletionCandidates.isEmpty())
551 			return;
552 
553 		// Since we have not left the method yet there are still
554 		// deletionCandidates. Last chance for these objects not to be pruned is
555 		// that they are referenced by reflog entries. Even refs which currently
556 		// point to the same object as during last repack() may have
557 		// additional reflog entries not handled during last repack()
558 		ObjectWalk w = new ObjectWalk(repo);
559 		try {
560 			for (Ref ar : getAllRefs())
561 				for (ObjectId id : listRefLogObjects(ar, lastRepackTime)) {
562 					checkCancelled();
563 					w.markStart(w.parseAny(id));
564 				}
565 			if (lastPackedRefs != null)
566 				for (Ref lpr : lastPackedRefs) {
567 					checkCancelled();
568 					w.markUninteresting(w.parseAny(lpr.getObjectId()));
569 				}
570 			removeReferenced(deletionCandidates, w);
571 		} finally {
572 			w.dispose();
573 		}
574 
575 		if (deletionCandidates.isEmpty())
576 			return;
577 
578 		checkCancelled();
579 
580 		// delete all candidates which have survived: these are unreferenced
581 		// loose objects. Make a last check, though, to avoid deleting objects
582 		// that could have been referenced while the candidates list was being
583 		// built (by an incoming push, for example).
584 		Set<File> touchedFanout = new HashSet<>();
585 		for (File f : deletionCandidates.values()) {
586 			if (f.lastModified() < expireDate) {
587 				f.delete();
588 				touchedFanout.add(f.getParentFile());
589 			}
590 		}
591 
592 		for (File f : touchedFanout) {
593 			FileUtils.delete(f,
594 					FileUtils.EMPTY_DIRECTORIES_ONLY | FileUtils.IGNORE_ERRORS);
595 		}
596 
597 		repo.getObjectDatabase().close();
598 	}
599 
600 	private long getExpireDate() throws ParseException {
601 		long expireDate = Long.MAX_VALUE;
602 
603 		if (expire == null && expireAgeMillis == -1) {
604 			String pruneExpireStr = getPruneExpireStr();
605 			if (pruneExpireStr == null)
606 				pruneExpireStr = PRUNE_EXPIRE_DEFAULT;
607 			expire = GitDateParser.parse(pruneExpireStr, null, SystemReader
608 					.getInstance().getLocale());
609 			expireAgeMillis = -1;
610 		}
611 		if (expire != null)
612 			expireDate = expire.getTime();
613 		if (expireAgeMillis != -1)
614 			expireDate = System.currentTimeMillis() - expireAgeMillis;
615 		return expireDate;
616 	}
617 
618 	private String getPruneExpireStr() {
619 		return repo.getConfig().getString(
620                         ConfigConstants.CONFIG_GC_SECTION, null,
621                         ConfigConstants.CONFIG_KEY_PRUNEEXPIRE);
622 	}
623 
624 	private long getPackExpireDate() throws ParseException {
625 		long packExpireDate = Long.MAX_VALUE;
626 
627 		if (packExpire == null && packExpireAgeMillis == -1) {
628 			String prunePackExpireStr = repo.getConfig().getString(
629 					ConfigConstants.CONFIG_GC_SECTION, null,
630 					ConfigConstants.CONFIG_KEY_PRUNEPACKEXPIRE);
631 			if (prunePackExpireStr == null)
632 				prunePackExpireStr = PRUNE_PACK_EXPIRE_DEFAULT;
633 			packExpire = GitDateParser.parse(prunePackExpireStr, null,
634 					SystemReader.getInstance().getLocale());
635 			packExpireAgeMillis = -1;
636 		}
637 		if (packExpire != null)
638 			packExpireDate = packExpire.getTime();
639 		if (packExpireAgeMillis != -1)
640 			packExpireDate = System.currentTimeMillis() - packExpireAgeMillis;
641 		return packExpireDate;
642 	}
643 
644 	/**
645 	 * Remove all entries from a map which key is the id of an object referenced
646 	 * by the given ObjectWalk
647 	 *
648 	 * @param id2File
649 	 * @param w
650 	 * @throws MissingObjectException
651 	 * @throws IncorrectObjectTypeException
652 	 * @throws IOException
653 	 */
654 	private void removeReferenced(Map<ObjectId, File> id2File,
655 			ObjectWalk w) throws MissingObjectException,
656 			IncorrectObjectTypeException, IOException {
657 		RevObject ro = w.next();
658 		while (ro != null) {
659 			checkCancelled();
660 			if (id2File.remove(ro.getId()) != null)
661 				if (id2File.isEmpty())
662 					return;
663 			ro = w.next();
664 		}
665 		ro = w.nextObject();
666 		while (ro != null) {
667 			checkCancelled();
668 			if (id2File.remove(ro.getId()) != null)
669 				if (id2File.isEmpty())
670 					return;
671 			ro = w.nextObject();
672 		}
673 	}
674 
675 	private static boolean equals(Ref r1, Ref r2) {
676 		if (r1 == null || r2 == null)
677 			return false;
678 		if (r1.isSymbolic()) {
679 			if (!r2.isSymbolic())
680 				return false;
681 			return r1.getTarget().getName().equals(r2.getTarget().getName());
682 		} else {
683 			if (r2.isSymbolic()) {
684 				return false;
685 			}
686 			return Objects.equals(r1.getObjectId(), r2.getObjectId());
687 		}
688 	}
689 
690 	/**
691 	 * Packs all non-symbolic, loose refs into packed-refs.
692 	 *
693 	 * @throws IOException
694 	 */
695 	public void packRefs() throws IOException {
696 		Collection<Ref> refs = repo.getRefDatabase().getRefs(Constants.R_REFS).values();
697 		List<String> refsToBePacked = new ArrayList<>(refs.size());
698 		pm.beginTask(JGitText.get().packRefs, refs.size());
699 		try {
700 			for (Ref ref : refs) {
701 				checkCancelled();
702 				if (!ref.isSymbolic() && ref.getStorage().isLoose())
703 					refsToBePacked.add(ref.getName());
704 				pm.update(1);
705 			}
706 			((RefDirectory) repo.getRefDatabase()).pack(refsToBePacked);
707 		} finally {
708 			pm.endTask();
709 		}
710 	}
711 
712 	/**
713 	 * Packs all objects which reachable from any of the heads into one pack
714 	 * file. Additionally all objects which are not reachable from any head but
715 	 * which are reachable from any of the other refs (e.g. tags), special refs
716 	 * (e.g. FETCH_HEAD) or index are packed into a separate pack file. Objects
717 	 * included in pack files which have a .keep file associated are never
718 	 * repacked. All old pack files which existed before are deleted.
719 	 *
720 	 * @return a collection of the newly created pack files
721 	 * @throws IOException
722 	 *             when during reading of refs, index, packfiles, objects,
723 	 *             reflog-entries or during writing to the packfiles
724 	 *             {@link IOException} occurs
725 	 */
726 	public Collection<PackFile> repack() throws IOException {
727 		Collection<PackFile> toBeDeleted = repo.getObjectDatabase().getPacks();
728 
729 		long time = System.currentTimeMillis();
730 		Collection<Ref> refsBefore = getAllRefs();
731 
732 		Set<ObjectId> allHeads = new HashSet<>();
733 		Set<ObjectId> nonHeads = new HashSet<>();
734 		Set<ObjectId> txnHeads = new HashSet<>();
735 		Set<ObjectId> tagTargets = new HashSet<>();
736 		Set<ObjectId> indexObjects = listNonHEADIndexObjects();
737 		RefDatabase refdb = repo.getRefDatabase();
738 
739 		for (Ref ref : refsBefore) {
740 			checkCancelled();
741 			nonHeads.addAll(listRefLogObjects(ref, 0));
742 			if (ref.isSymbolic() || ref.getObjectId() == null)
743 				continue;
744 			if (isHead(ref) || isTag(ref))
745 				allHeads.add(ref.getObjectId());
746 			else if (RefTreeNames.isRefTree(refdb, ref.getName()))
747 				txnHeads.add(ref.getObjectId());
748 			else
749 				nonHeads.add(ref.getObjectId());
750 			if (ref.getPeeledObjectId() != null)
751 				tagTargets.add(ref.getPeeledObjectId());
752 		}
753 
754 		List<ObjectIdSet> excluded = new LinkedList<>();
755 		for (final PackFile f : repo.getObjectDatabase().getPacks()) {
756 			checkCancelled();
757 			if (f.shouldBeKept())
758 				excluded.add(f.getIndex());
759 		}
760 
761 		tagTargets.addAll(allHeads);
762 		nonHeads.addAll(indexObjects);
763 
764 		List<PackFile> ret = new ArrayList<>(2);
765 		PackFile heads = null;
766 		if (!allHeads.isEmpty()) {
767 			heads = writePack(allHeads, Collections.<ObjectId> emptySet(),
768 					tagTargets, excluded);
769 			if (heads != null) {
770 				ret.add(heads);
771 				excluded.add(0, heads.getIndex());
772 			}
773 		}
774 		if (!nonHeads.isEmpty()) {
775 			PackFile rest = writePack(nonHeads, allHeads, tagTargets, excluded);
776 			if (rest != null)
777 				ret.add(rest);
778 		}
779 		if (!txnHeads.isEmpty()) {
780 			PackFile txn = writePack(txnHeads, PackWriter.NONE, null, excluded);
781 			if (txn != null)
782 				ret.add(txn);
783 		}
784 		try {
785 			deleteOldPacks(toBeDeleted, ret);
786 		} catch (ParseException e) {
787 			// TODO: the exception has to be wrapped into an IOException because
788 			// throwing the ParseException directly would break the API, instead
789 			// we should throw a ConfigInvalidException
790 			throw new IOException(e);
791 		}
792 		prunePacked();
793 		deleteOrphans();
794 
795 		lastPackedRefs = refsBefore;
796 		lastRepackTime = time;
797 		return ret;
798 	}
799 
800 	private static boolean isHead(Ref ref) {
801 		return ref.getName().startsWith(Constants.R_HEADS);
802 	}
803 
804 	private static boolean isTag(Ref ref) {
805 		return ref.getName().startsWith(Constants.R_TAGS);
806 	}
807 
808 	/**
809 	 * Deletes orphans
810 	 * <p>
811 	 * A file is considered an orphan if it is either a "bitmap" or an index
812 	 * file, and its corresponding pack file is missing in the list.
813 	 * </p>
814 	 */
815 	private void deleteOrphans() {
816 		Path packDir = Paths.get(repo.getObjectsDirectory().getAbsolutePath(),
817 				"pack"); //$NON-NLS-1$
818 		List<String> fileNames = null;
819 		try (Stream<Path> files = Files.list(packDir)) {
820 			fileNames = files.map(path -> path.getFileName().toString())
821 					.filter(name -> {
822 						return (name.endsWith(PACK_EXT)
823 								|| name.endsWith(BITMAP_EXT)
824 								|| name.endsWith(INDEX_EXT));
825 					}).sorted(Collections.reverseOrder())
826 					.collect(Collectors.toList());
827 		} catch (IOException e1) {
828 			// ignore
829 		}
830 		if (fileNames == null) {
831 			return;
832 		}
833 
834 		String base = null;
835 		for (String n : fileNames) {
836 			if (n.endsWith(PACK_EXT)) {
837 				base = n.substring(0, n.lastIndexOf('.'));
838 			} else {
839 				if (base == null || !n.startsWith(base)) {
840 					try {
841 						Files.delete(new File(packDir.toFile(), n).toPath());
842 					} catch (IOException e) {
843 						LOG.error(e.getMessage(), e);
844 					}
845 				}
846 			}
847 		}
848 	}
849 
850 	/**
851 	 * @param ref
852 	 *            the ref which log should be inspected
853 	 * @param minTime only reflog entries not older then this time are processed
854 	 * @return the {@link ObjectId}s contained in the reflog
855 	 * @throws IOException
856 	 */
857 	private Set<ObjectId> listRefLogObjects(Ref ref, long minTime) throws IOException {
858 		ReflogReader reflogReader = repo.getReflogReader(ref.getName());
859 		if (reflogReader == null) {
860 			return Collections.emptySet();
861 		}
862 		List<ReflogEntry> rlEntries = reflogReader
863 				.getReverseEntries();
864 		if (rlEntries == null || rlEntries.isEmpty())
865 			return Collections.<ObjectId> emptySet();
866 		Set<ObjectId> ret = new HashSet<>();
867 		for (ReflogEntry e : rlEntries) {
868 			if (e.getWho().getWhen().getTime() < minTime)
869 				break;
870 			ObjectId newId = e.getNewId();
871 			if (newId != null && !ObjectId.zeroId().equals(newId))
872 				ret.add(newId);
873 			ObjectId oldId = e.getOldId();
874 			if (oldId != null && !ObjectId.zeroId().equals(oldId))
875 				ret.add(oldId);
876 		}
877 		return ret;
878 	}
879 
880 	/**
881 	 * Returns a collection of all refs and additional refs.
882 	 *
883 	 * Additional refs which don't start with "refs/" are not returned because
884 	 * they should not save objects from being garbage collected. Examples for
885 	 * such references are ORIG_HEAD, MERGE_HEAD, FETCH_HEAD and
886 	 * CHERRY_PICK_HEAD.
887 	 *
888 	 * @return a collection of refs pointing to live objects.
889 	 * @throws IOException
890 	 */
891 	private Collection<Ref> getAllRefs() throws IOException {
892 		RefDatabase refdb = repo.getRefDatabase();
893 		Collection<Ref> refs = refdb.getRefs(RefDatabase.ALL).values();
894 		List<Ref> addl = refdb.getAdditionalRefs();
895 		if (!addl.isEmpty()) {
896 			List<Ref> all = new ArrayList<>(refs.size() + addl.size());
897 			all.addAll(refs);
898 			// add additional refs which start with refs/
899 			for (Ref r : addl) {
900 				checkCancelled();
901 				if (r.getName().startsWith(Constants.R_REFS)) {
902 					all.add(r);
903 				}
904 			}
905 			return all;
906 		}
907 		return refs;
908 	}
909 
910 	/**
911 	 * Return a list of those objects in the index which differ from whats in
912 	 * HEAD
913 	 *
914 	 * @return a set of ObjectIds of changed objects in the index
915 	 * @throws IOException
916 	 * @throws CorruptObjectException
917 	 * @throws NoWorkTreeException
918 	 */
919 	private Set<ObjectId> listNonHEADIndexObjects()
920 			throws CorruptObjectException, IOException {
921 		if (repo.isBare()) {
922 			return Collections.emptySet();
923 		}
924 		try (TreeWalk treeWalk = new TreeWalk(repo)) {
925 			treeWalk.addTree(new DirCacheIterator(repo.readDirCache()));
926 			ObjectId headID = repo.resolve(Constants.HEAD);
927 			if (headID != null) {
928 				try (RevWalk revWalk = new RevWalk(repo)) {
929 					treeWalk.addTree(revWalk.parseTree(headID));
930 				}
931 			}
932 
933 			treeWalk.setFilter(TreeFilter.ANY_DIFF);
934 			treeWalk.setRecursive(true);
935 			Set<ObjectId> ret = new HashSet<>();
936 
937 			while (treeWalk.next()) {
938 				checkCancelled();
939 				ObjectId objectId = treeWalk.getObjectId(0);
940 				switch (treeWalk.getRawMode(0) & FileMode.TYPE_MASK) {
941 				case FileMode.TYPE_MISSING:
942 				case FileMode.TYPE_GITLINK:
943 					continue;
944 				case FileMode.TYPE_TREE:
945 				case FileMode.TYPE_FILE:
946 				case FileMode.TYPE_SYMLINK:
947 					ret.add(objectId);
948 					continue;
949 				default:
950 					throw new IOException(MessageFormat.format(
951 							JGitText.get().corruptObjectInvalidMode3,
952 							String.format("%o", //$NON-NLS-1$
953 									Integer.valueOf(treeWalk.getRawMode(0))),
954 							(objectId == null) ? "null" : objectId.name(), //$NON-NLS-1$
955 							treeWalk.getPathString(), //
956 							repo.getIndexFile()));
957 				}
958 			}
959 			return ret;
960 		}
961 	}
962 
963 	private PackFile writePack(@NonNull Set<? extends ObjectId> want,
964 			@NonNull Set<? extends ObjectId> have, Set<ObjectId> tagTargets,
965 			List<ObjectIdSet> excludeObjects) throws IOException {
966 		checkCancelled();
967 		File tmpPack = null;
968 		Map<PackExt, File> tmpExts = new TreeMap<>(
969 				new Comparator<PackExt>() {
970 					@Override
971 					public int compare(PackExt o1, PackExt o2) {
972 						// INDEX entries must be returned last, so the pack
973 						// scanner does pick up the new pack until all the
974 						// PackExt entries have been written.
975 						if (o1 == o2)
976 							return 0;
977 						if (o1 == PackExt.INDEX)
978 							return 1;
979 						if (o2 == PackExt.INDEX)
980 							return -1;
981 						return Integer.signum(o1.hashCode() - o2.hashCode());
982 					}
983 
984 				});
985 		try (PackWriter pw = new PackWriter(
986 				(pconfig == null) ? new PackConfig(repo) : pconfig,
987 				repo.newObjectReader())) {
988 			// prepare the PackWriter
989 			pw.setDeltaBaseAsOffset(true);
990 			pw.setReuseDeltaCommits(false);
991 			if (tagTargets != null)
992 				pw.setTagTargets(tagTargets);
993 			if (excludeObjects != null)
994 				for (ObjectIdSet idx : excludeObjects)
995 					pw.excludeObjects(idx);
996 			pw.preparePack(pm, want, have);
997 			if (pw.getObjectCount() == 0)
998 				return null;
999 			checkCancelled();
1000 
1001 			// create temporary files
1002 			String id = pw.computeName().getName();
1003 			File packdir = new File(repo.getObjectsDirectory(), "pack"); //$NON-NLS-1$
1004 			tmpPack = File.createTempFile("gc_", ".pack_tmp", packdir); //$NON-NLS-1$ //$NON-NLS-2$
1005 			final String tmpBase = tmpPack.getName()
1006 					.substring(0, tmpPack.getName().lastIndexOf('.'));
1007 			File tmpIdx = new File(packdir, tmpBase + ".idx_tmp"); //$NON-NLS-1$
1008 			tmpExts.put(INDEX, tmpIdx);
1009 
1010 			if (!tmpIdx.createNewFile())
1011 				throw new IOException(MessageFormat.format(
1012 						JGitText.get().cannotCreateIndexfile, tmpIdx.getPath()));
1013 
1014 			// write the packfile
1015 			FileOutputStream fos = new FileOutputStream(tmpPack);
1016 			FileChannel channel = fos.getChannel();
1017 			OutputStream channelStream = Channels.newOutputStream(channel);
1018 			try {
1019 				pw.writePack(pm, pm, channelStream);
1020 			} finally {
1021 				channel.force(true);
1022 				channelStream.close();
1023 				fos.close();
1024 			}
1025 
1026 			// write the packindex
1027 			fos = new FileOutputStream(tmpIdx);
1028 			FileChannel idxChannel = fos.getChannel();
1029 			OutputStream idxStream = Channels.newOutputStream(idxChannel);
1030 			try {
1031 				pw.writeIndex(idxStream);
1032 			} finally {
1033 				idxChannel.force(true);
1034 				idxStream.close();
1035 				fos.close();
1036 			}
1037 
1038 			if (pw.prepareBitmapIndex(pm)) {
1039 				File tmpBitmapIdx = new File(packdir, tmpBase + ".bitmap_tmp"); //$NON-NLS-1$
1040 				tmpExts.put(BITMAP_INDEX, tmpBitmapIdx);
1041 
1042 				if (!tmpBitmapIdx.createNewFile())
1043 					throw new IOException(MessageFormat.format(
1044 							JGitText.get().cannotCreateIndexfile,
1045 							tmpBitmapIdx.getPath()));
1046 
1047 				fos = new FileOutputStream(tmpBitmapIdx);
1048 				idxChannel = fos.getChannel();
1049 				idxStream = Channels.newOutputStream(idxChannel);
1050 				try {
1051 					pw.writeBitmapIndex(idxStream);
1052 				} finally {
1053 					idxChannel.force(true);
1054 					idxStream.close();
1055 					fos.close();
1056 				}
1057 			}
1058 
1059 			// rename the temporary files to real files
1060 			File realPack = nameFor(id, ".pack"); //$NON-NLS-1$
1061 
1062 			// if the packfile already exists (because we are rewriting a
1063 			// packfile for the same set of objects maybe with different
1064 			// PackConfig) then make sure we get rid of all handles on the file.
1065 			// Windows will not allow for rename otherwise.
1066 			if (realPack.exists())
1067 				for (PackFile p : repo.getObjectDatabase().getPacks())
1068 					if (realPack.getPath().equals(p.getPackFile().getPath())) {
1069 						p.close();
1070 						break;
1071 					}
1072 			tmpPack.setReadOnly();
1073 
1074 			FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE);
1075 			for (Map.Entry<PackExt, File> tmpEntry : tmpExts.entrySet()) {
1076 				File tmpExt = tmpEntry.getValue();
1077 				tmpExt.setReadOnly();
1078 
1079 				File realExt = nameFor(id,
1080 						"." + tmpEntry.getKey().getExtension()); //$NON-NLS-1$
1081 				try {
1082 					FileUtils.rename(tmpExt, realExt,
1083 							StandardCopyOption.ATOMIC_MOVE);
1084 				} catch (IOException e) {
1085 					File newExt = new File(realExt.getParentFile(),
1086 							realExt.getName() + ".new"); //$NON-NLS-1$
1087 					try {
1088 						FileUtils.rename(tmpExt, newExt,
1089 								StandardCopyOption.ATOMIC_MOVE);
1090 					} catch (IOException e2) {
1091 						newExt = tmpExt;
1092 						e = e2;
1093 					}
1094 					throw new IOException(MessageFormat.format(
1095 							JGitText.get().panicCantRenameIndexFile, newExt,
1096 							realExt), e);
1097 				}
1098 			}
1099 
1100 			return repo.getObjectDatabase().openPack(realPack);
1101 		} finally {
1102 			if (tmpPack != null && tmpPack.exists())
1103 				tmpPack.delete();
1104 			for (File tmpExt : tmpExts.values()) {
1105 				if (tmpExt.exists())
1106 					tmpExt.delete();
1107 			}
1108 		}
1109 	}
1110 
1111 	private File nameFor(String name, String ext) {
1112 		File packdir = new File(repo.getObjectsDirectory(), "pack"); //$NON-NLS-1$
1113 		return new File(packdir, "pack-" + name + ext); //$NON-NLS-1$
1114 	}
1115 
1116 	private void checkCancelled() throws CancelledException {
1117 		if (pm.isCancelled()) {
1118 			throw new CancelledException(JGitText.get().operationCanceled);
1119 		}
1120 	}
1121 
1122 	/**
1123 	 * A class holding statistical data for a FileRepository regarding how many
1124 	 * objects are stored as loose or packed objects
1125 	 */
1126 	public static class RepoStatistics {
1127 		/**
1128 		 * The number of objects stored in pack files. If the same object is
1129 		 * stored in multiple pack files then it is counted as often as it
1130 		 * occurs in pack files.
1131 		 */
1132 		public long numberOfPackedObjects;
1133 
1134 		/**
1135 		 * The number of pack files
1136 		 */
1137 		public long numberOfPackFiles;
1138 
1139 		/**
1140 		 * The number of objects stored as loose objects.
1141 		 */
1142 		public long numberOfLooseObjects;
1143 
1144 		/**
1145 		 * The sum of the sizes of all files used to persist loose objects.
1146 		 */
1147 		public long sizeOfLooseObjects;
1148 
1149 		/**
1150 		 * The sum of the sizes of all pack files.
1151 		 */
1152 		public long sizeOfPackedObjects;
1153 
1154 		/**
1155 		 * The number of loose refs.
1156 		 */
1157 		public long numberOfLooseRefs;
1158 
1159 		/**
1160 		 * The number of refs stored in pack files.
1161 		 */
1162 		public long numberOfPackedRefs;
1163 
1164 		/**
1165 		 * The number of bitmaps in the bitmap indices.
1166 		 */
1167 		public long numberOfBitmaps;
1168 
1169 		@Override
1170 		public String toString() {
1171 			final StringBuilder b = new StringBuilder();
1172 			b.append("numberOfPackedObjects=").append(numberOfPackedObjects); //$NON-NLS-1$
1173 			b.append(", numberOfPackFiles=").append(numberOfPackFiles); //$NON-NLS-1$
1174 			b.append(", numberOfLooseObjects=").append(numberOfLooseObjects); //$NON-NLS-1$
1175 			b.append(", numberOfLooseRefs=").append(numberOfLooseRefs); //$NON-NLS-1$
1176 			b.append(", numberOfPackedRefs=").append(numberOfPackedRefs); //$NON-NLS-1$
1177 			b.append(", sizeOfLooseObjects=").append(sizeOfLooseObjects); //$NON-NLS-1$
1178 			b.append(", sizeOfPackedObjects=").append(sizeOfPackedObjects); //$NON-NLS-1$
1179 			b.append(", numberOfBitmaps=").append(numberOfBitmaps); //$NON-NLS-1$
1180 			return b.toString();
1181 		}
1182 	}
1183 
1184 	/**
1185 	 * Returns information about objects and pack files for a FileRepository.
1186 	 *
1187 	 * @return information about objects and pack files for a FileRepository
1188 	 * @throws IOException
1189 	 */
1190 	public RepoStatistics getStatistics() throws IOException {
1191 		RepoStatistics ret = new RepoStatistics();
1192 		Collection<PackFile> packs = repo.getObjectDatabase().getPacks();
1193 		for (PackFile f : packs) {
1194 			ret.numberOfPackedObjects += f.getIndex().getObjectCount();
1195 			ret.numberOfPackFiles++;
1196 			ret.sizeOfPackedObjects += f.getPackFile().length();
1197 			if (f.getBitmapIndex() != null)
1198 				ret.numberOfBitmaps += f.getBitmapIndex().getBitmapCount();
1199 		}
1200 		File objDir = repo.getObjectsDirectory();
1201 		String[] fanout = objDir.list();
1202 		if (fanout != null && fanout.length > 0) {
1203 			for (String d : fanout) {
1204 				if (d.length() != 2)
1205 					continue;
1206 				File[] entries = new File(objDir, d).listFiles();
1207 				if (entries == null)
1208 					continue;
1209 				for (File f : entries) {
1210 					if (f.getName().length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
1211 						continue;
1212 					ret.numberOfLooseObjects++;
1213 					ret.sizeOfLooseObjects += f.length();
1214 				}
1215 			}
1216 		}
1217 
1218 		RefDatabase refDb = repo.getRefDatabase();
1219 		for (Ref r : refDb.getRefs(RefDatabase.ALL).values()) {
1220 			Storage storage = r.getStorage();
1221 			if (storage == Storage.LOOSE || storage == Storage.LOOSE_PACKED)
1222 				ret.numberOfLooseRefs++;
1223 			if (storage == Storage.PACKED || storage == Storage.LOOSE_PACKED)
1224 				ret.numberOfPackedRefs++;
1225 		}
1226 
1227 		return ret;
1228 	}
1229 
1230 	/**
1231 	 * Set the progress monitor used for garbage collection methods.
1232 	 *
1233 	 * @param pm
1234 	 * @return this
1235 	 */
1236 	public GC setProgressMonitor(ProgressMonitor pm) {
1237 		this.pm = (pm == null) ? NullProgressMonitor.INSTANCE : pm;
1238 		return this;
1239 	}
1240 
1241 	/**
1242 	 * During gc() or prune() each unreferenced, loose object which has been
1243 	 * created or modified in the last <code>expireAgeMillis</code> milliseconds
1244 	 * will not be pruned. Only older objects may be pruned. If set to 0 then
1245 	 * every object is a candidate for pruning.
1246 	 *
1247 	 * @param expireAgeMillis
1248 	 *            minimal age of objects to be pruned in milliseconds.
1249 	 */
1250 	public void setExpireAgeMillis(long expireAgeMillis) {
1251 		this.expireAgeMillis = expireAgeMillis;
1252 		expire = null;
1253 	}
1254 
1255 	/**
1256 	 * During gc() or prune() packfiles which are created or modified in the
1257 	 * last <code>packExpireAgeMillis</code> milliseconds will not be deleted.
1258 	 * Only older packfiles may be deleted. If set to 0 then every packfile is a
1259 	 * candidate for deletion.
1260 	 *
1261 	 * @param packExpireAgeMillis
1262 	 *            minimal age of packfiles to be deleted in milliseconds.
1263 	 */
1264 	public void setPackExpireAgeMillis(long packExpireAgeMillis) {
1265 		this.packExpireAgeMillis = packExpireAgeMillis;
1266 		expire = null;
1267 	}
1268 
1269 	/**
1270 	 * Set the PackConfig used when (re-)writing packfiles. This allows to
1271 	 * influence how packs are written and to implement something similar to
1272 	 * "git gc --aggressive"
1273 	 *
1274 	 * @param pconfig
1275 	 *            the {@link PackConfig} used when writing packs
1276 	 */
1277 	public void setPackConfig(PackConfig pconfig) {
1278 		this.pconfig = pconfig;
1279 	}
1280 
1281 	/**
1282 	 * During gc() or prune() each unreferenced, loose object which has been
1283 	 * created or modified after or at <code>expire</code> will not be pruned.
1284 	 * Only older objects may be pruned. If set to null then every object is a
1285 	 * candidate for pruning.
1286 	 *
1287 	 * @param expire
1288 	 *            instant in time which defines object expiration
1289 	 *            objects with modification time before this instant are expired
1290 	 *            objects with modification time newer or equal to this instant
1291 	 *            are not expired
1292 	 */
1293 	public void setExpire(Date expire) {
1294 		this.expire = expire;
1295 		expireAgeMillis = -1;
1296 	}
1297 
1298 	/**
1299 	 * During gc() or prune() packfiles which are created or modified after or
1300 	 * at <code>packExpire</code> will not be deleted. Only older packfiles may
1301 	 * be deleted. If set to null then every packfile is a candidate for
1302 	 * deletion.
1303 	 *
1304 	 * @param packExpire
1305 	 *            instant in time which defines packfile expiration
1306 	 */
1307 	public void setPackExpire(Date packExpire) {
1308 		this.packExpire = packExpire;
1309 		packExpireAgeMillis = -1;
1310 	}
1311 
1312 	/**
1313 	 * Set the {@code gc --auto} option.
1314 	 *
1315 	 * With this option, gc checks whether any housekeeping is required; if not,
1316 	 * it exits without performing any work. Some JGit commands run
1317 	 * {@code gc --auto} after performing operations that could create many
1318 	 * loose objects.
1319 	 * <p/>
1320 	 * Housekeeping is required if there are too many loose objects or too many
1321 	 * packs in the repository. If the number of loose objects exceeds the value
1322 	 * of the gc.auto option JGit GC consolidates all existing packs into a
1323 	 * single pack (equivalent to {@code -A} option), whereas git-core would
1324 	 * combine all loose objects into a single pack using {@code repack -d -l}.
1325 	 * Setting the value of {@code gc.auto} to 0 disables automatic packing of
1326 	 * loose objects.
1327 	 * <p/>
1328 	 * If the number of packs exceeds the value of {@code gc.autoPackLimit},
1329 	 * then existing packs (except those marked with a .keep file) are
1330 	 * consolidated into a single pack by using the {@code -A} option of repack.
1331 	 * Setting {@code gc.autoPackLimit} to 0 disables automatic consolidation of
1332 	 * packs.
1333 	 * <p/>
1334 	 * Like git the following jgit commands run auto gc:
1335 	 * <ul>
1336 	 * <li>fetch</li>
1337 	 * <li>merge</li>
1338 	 * <li>rebase</li>
1339 	 * <li>receive-pack</li>
1340 	 * </ul>
1341 	 * The auto gc for receive-pack can be suppressed by setting the config
1342 	 * option {@code receive.autogc = false}
1343 	 *
1344 	 * @param auto
1345 	 *            defines whether gc should do automatic housekeeping
1346 	 */
1347 	public void setAuto(boolean auto) {
1348 		this.automatic = auto;
1349 	}
1350 
1351 	private boolean needGc() {
1352 		if (tooManyPacks()) {
1353 			addRepackAllOption();
1354 		} else if (!tooManyLooseObjects()) {
1355 			return false;
1356 		}
1357 		// TODO run pre-auto-gc hook, if it fails return false
1358 		return true;
1359 	}
1360 
1361 	private void addRepackAllOption() {
1362 		// TODO: if JGit GC is enhanced to support repack's option -l this
1363 		// method needs to be implemented
1364 	}
1365 
1366 	/**
1367 	 * @return {@code true} if number of packs > gc.autopacklimit (default 50)
1368 	 */
1369 	boolean tooManyPacks() {
1370 		int autopacklimit = repo.getConfig().getInt(
1371 				ConfigConstants.CONFIG_GC_SECTION,
1372 				ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT,
1373 				DEFAULT_AUTOPACKLIMIT);
1374 		if (autopacklimit <= 0) {
1375 			return false;
1376 		}
1377 		// JGit always creates two packfiles, one for the objects reachable from
1378 		// branches, and another one for the rest
1379 		return repo.getObjectDatabase().getPacks().size() > (autopacklimit + 1);
1380 	}
1381 
1382 	/**
1383 	 * Quickly estimate number of loose objects, SHA1 is distributed evenly so
1384 	 * counting objects in one directory (bucket 17) is sufficient
1385 	 *
1386 	 * @return {@code true} if number of loose objects > gc.auto (default 6700)
1387 	 */
1388 	boolean tooManyLooseObjects() {
1389 		int auto = repo.getConfig().getInt(ConfigConstants.CONFIG_GC_SECTION,
1390 				ConfigConstants.CONFIG_KEY_AUTO, DEFAULT_AUTOLIMIT);
1391 		if (auto <= 0) {
1392 			return false;
1393 		}
1394 		int n = 0;
1395 		int threshold = (auto + 255) / 256;
1396 		Path dir = repo.getObjectsDirectory().toPath().resolve("17"); //$NON-NLS-1$
1397 		if (!Files.exists(dir)) {
1398 			return false;
1399 		}
1400 		try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir,
1401 				new DirectoryStream.Filter<Path>() {
1402 
1403 					@Override
1404 					public boolean accept(Path file) throws IOException {
1405 						Path fileName = file.getFileName();
1406 						return Files.isRegularFile(file) && fileName != null
1407 								&& PATTERN_LOOSE_OBJECT
1408 										.matcher(fileName.toString()).matches();
1409 					}
1410 				})) {
1411 			for (Iterator<Path> iter = stream.iterator(); iter.hasNext();
1412 					iter.next()) {
1413 				if (++n > threshold) {
1414 					return true;
1415 				}
1416 			}
1417 		} catch (IOException e) {
1418 			LOG.error(e.getMessage(), e);
1419 		}
1420 		return false;
1421 	}
1422 }