View Javadoc
1   /*
2    * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
3    * Copyright (C) 2011, Shawn O. Pearce <spearce@spearce.org> and others
4    *
5    * This program and the accompanying materials are made available under the
6    * terms of the Eclipse Distribution License v. 1.0 which is available at
7    * https://www.eclipse.org/org/documents/edl-v10.php.
8    *
9    * SPDX-License-Identifier: BSD-3-Clause
10   */
11  package org.eclipse.jgit.internal.storage.file;
12  
13  import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
14  import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
15  
16  import java.io.File;
17  import java.io.FileOutputStream;
18  import java.io.IOException;
19  import java.io.OutputStream;
20  import java.io.PrintWriter;
21  import java.io.StringWriter;
22  import java.nio.channels.Channels;
23  import java.nio.channels.FileChannel;
24  import java.nio.file.DirectoryNotEmptyException;
25  import java.nio.file.DirectoryStream;
26  import java.nio.file.Files;
27  import java.nio.file.Path;
28  import java.nio.file.StandardCopyOption;
29  import java.text.MessageFormat;
30  import java.text.ParseException;
31  import java.time.Instant;
32  import java.time.temporal.ChronoUnit;
33  import java.util.ArrayList;
34  import java.util.Collection;
35  import java.util.Collections;
36  import java.util.Comparator;
37  import java.util.Date;
38  import java.util.HashMap;
39  import java.util.HashSet;
40  import java.util.Iterator;
41  import java.util.LinkedList;
42  import java.util.List;
43  import java.util.Map;
44  import java.util.Objects;
45  import java.util.Set;
46  import java.util.TreeMap;
47  import java.util.concurrent.Callable;
48  import java.util.concurrent.ExecutorService;
49  import java.util.regex.Pattern;
50  import java.util.stream.Collectors;
51  import java.util.stream.Stream;
52  
53  import org.eclipse.jgit.annotations.NonNull;
54  import org.eclipse.jgit.dircache.DirCacheIterator;
55  import org.eclipse.jgit.errors.CancelledException;
56  import org.eclipse.jgit.errors.CorruptObjectException;
57  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
58  import org.eclipse.jgit.errors.MissingObjectException;
59  import org.eclipse.jgit.errors.NoWorkTreeException;
60  import org.eclipse.jgit.internal.JGitText;
61  import org.eclipse.jgit.internal.storage.pack.PackExt;
62  import org.eclipse.jgit.internal.storage.pack.PackWriter;
63  import org.eclipse.jgit.internal.storage.reftree.RefTreeNames;
64  import org.eclipse.jgit.lib.ConfigConstants;
65  import org.eclipse.jgit.lib.Constants;
66  import org.eclipse.jgit.lib.FileMode;
67  import org.eclipse.jgit.lib.NullProgressMonitor;
68  import org.eclipse.jgit.lib.ObjectId;
69  import org.eclipse.jgit.lib.ObjectIdSet;
70  import org.eclipse.jgit.lib.ObjectLoader;
71  import org.eclipse.jgit.lib.ObjectReader;
72  import org.eclipse.jgit.lib.ProgressMonitor;
73  import org.eclipse.jgit.lib.Ref;
74  import org.eclipse.jgit.lib.Ref.Storage;
75  import org.eclipse.jgit.lib.RefDatabase;
76  import org.eclipse.jgit.lib.ReflogEntry;
77  import org.eclipse.jgit.lib.ReflogReader;
78  import org.eclipse.jgit.lib.internal.WorkQueue;
79  import org.eclipse.jgit.revwalk.ObjectWalk;
80  import org.eclipse.jgit.revwalk.RevObject;
81  import org.eclipse.jgit.revwalk.RevWalk;
82  import org.eclipse.jgit.storage.pack.PackConfig;
83  import org.eclipse.jgit.treewalk.TreeWalk;
84  import org.eclipse.jgit.treewalk.filter.TreeFilter;
85  import org.eclipse.jgit.util.FileUtils;
86  import org.eclipse.jgit.util.GitDateParser;
87  import org.eclipse.jgit.util.SystemReader;
88  import org.slf4j.Logger;
89  import org.slf4j.LoggerFactory;
90  
91  /**
92   * A garbage collector for git
93   * {@link org.eclipse.jgit.internal.storage.file.FileRepository}. Instances of
94   * this class are not thread-safe. Don't use the same instance from multiple
95   * threads.
96   *
97   * This class started as a copy of DfsGarbageCollector from Shawn O. Pearce
98   * adapted to FileRepositories.
99   */
100 public class GC {
101 	private static final Logger LOG = LoggerFactory
102 			.getLogger(GC.class);
103 
104 	private static final String PRUNE_EXPIRE_DEFAULT = "2.weeks.ago"; //$NON-NLS-1$
105 
106 	private static final String PRUNE_PACK_EXPIRE_DEFAULT = "1.hour.ago"; //$NON-NLS-1$
107 
108 	private static final Pattern PATTERN_LOOSE_OBJECT = Pattern
109 			.compile("[0-9a-fA-F]{38}"); //$NON-NLS-1$
110 
111 	private static final String PACK_EXT = "." + PackExt.PACK.getExtension();//$NON-NLS-1$
112 
113 	private static final String BITMAP_EXT = "." //$NON-NLS-1$
114 			+ PackExt.BITMAP_INDEX.getExtension();
115 
116 	private static final String INDEX_EXT = "." + PackExt.INDEX.getExtension(); //$NON-NLS-1$
117 
118 	private static final int DEFAULT_AUTOPACKLIMIT = 50;
119 
120 	private static final int DEFAULT_AUTOLIMIT = 6700;
121 
122 	private static volatile ExecutorService executor;
123 
124 	/**
125 	 * Set the executor for running auto-gc in the background. If no executor is
126 	 * set JGit's own WorkQueue will be used.
127 	 *
128 	 * @param e
129 	 *            the executor to be used for running auto-gc
130 	 */
131 	public static void setExecutor(ExecutorService e) {
132 		executor = e;
133 	}
134 
135 	private final FileRepository repo;
136 
137 	private ProgressMonitor pm;
138 
139 	private long expireAgeMillis = -1;
140 
141 	private Date expire;
142 
143 	private long packExpireAgeMillis = -1;
144 
145 	private Date packExpire;
146 
147 	private PackConfig pconfig;
148 
149 	/**
150 	 * the refs which existed during the last call to {@link #repack()}. This is
151 	 * needed during {@link #prune(Set)} where we can optimize by looking at the
152 	 * difference between the current refs and the refs which existed during
153 	 * last {@link #repack()}.
154 	 */
155 	private Collection<Ref> lastPackedRefs;
156 
157 	/**
158 	 * Holds the starting time of the last repack() execution. This is needed in
159 	 * prune() to inspect only those reflog entries which have been added since
160 	 * last repack().
161 	 */
162 	private long lastRepackTime;
163 
164 	/**
165 	 * Whether gc should do automatic housekeeping
166 	 */
167 	private boolean automatic;
168 
169 	/**
170 	 * Whether to run gc in a background thread
171 	 */
172 	private boolean background;
173 
174 	/**
175 	 * Creates a new garbage collector with default values. An expirationTime of
176 	 * two weeks and <code>null</code> as progress monitor will be used.
177 	 *
178 	 * @param repo
179 	 *            the repo to work on
180 	 */
181 	public GC(FileRepository repo) {
182 		this.repo = repo;
183 		this.pconfig = new PackConfig(repo);
184 		this.pm = NullProgressMonitor.INSTANCE;
185 	}
186 
187 	/**
188 	 * Runs a garbage collector on a
189 	 * {@link org.eclipse.jgit.internal.storage.file.FileRepository}. It will
190 	 * <ul>
191 	 * <li>pack loose references into packed-refs</li>
192 	 * <li>repack all reachable objects into new pack files and delete the old
193 	 * pack files</li>
194 	 * <li>prune all loose objects which are now reachable by packs</li>
195 	 * </ul>
196 	 *
197 	 * If {@link #setAuto(boolean)} was set to {@code true} {@code gc} will
198 	 * first check whether any housekeeping is required; if not, it exits
199 	 * without performing any work.
200 	 *
201 	 * If {@link #setBackground(boolean)} was set to {@code true}
202 	 * {@code collectGarbage} will start the gc in the background, and then
203 	 * return immediately. In this case, errors will not be reported except in
204 	 * gc.log.
205 	 *
206 	 * @return the collection of
207 	 *         {@link org.eclipse.jgit.internal.storage.file.PackFile}'s which
208 	 *         are newly created
209 	 * @throws java.io.IOException
210 	 * @throws java.text.ParseException
211 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
212 	 *             parsed
213 	 */
214 	// TODO(ms): change signature and return Future<Collection<PackFile>>
215 	@SuppressWarnings("FutureReturnValueIgnored")
216 	public Collection<PackFile> gc() throws IOException, ParseException {
217 		if (!background) {
218 			return doGc();
219 		}
220 		final GcLogernal/storage/file/GcLog.html#GcLog">GcLog gcLog = new GcLog(repo);
221 		if (!gcLog.lock()) {
222 			// there is already a background gc running
223 			return Collections.emptyList();
224 		}
225 
226 		Callable<Collection<PackFile>> gcTask = () -> {
227 			try {
228 				Collection<PackFile> newPacks = doGc();
229 				if (automatic && tooManyLooseObjects()) {
230 					String message = JGitText.get().gcTooManyUnpruned;
231 					gcLog.write(message);
232 					gcLog.commit();
233 				}
234 				return newPacks;
235 			} catch (IOException | ParseException e) {
236 				try {
237 					gcLog.write(e.getMessage());
238 					StringWriter sw = new StringWriter();
239 					e.printStackTrace(new PrintWriter(sw));
240 					gcLog.write(sw.toString());
241 					gcLog.commit();
242 				} catch (IOException e2) {
243 					e2.addSuppressed(e);
244 					LOG.error(e2.getMessage(), e2);
245 				}
246 			} finally {
247 				gcLog.unlock();
248 			}
249 			return Collections.emptyList();
250 		};
251 		// TODO(ms): change signature and return the Future
252 		executor().submit(gcTask);
253 		return Collections.emptyList();
254 	}
255 
256 	private ExecutorService executor() {
257 		return (executor != null) ? executor : WorkQueue.getExecutor();
258 	}
259 
260 	private Collection<PackFile> doGc() throws IOException, ParseException {
261 		if (automatic && !needGc()) {
262 			return Collections.emptyList();
263 		}
264 		pm.start(6 /* tasks */);
265 		packRefs();
266 		// TODO: implement reflog_expire(pm, repo);
267 		Collection<PackFile> newPacks = repack();
268 		prune(Collections.emptySet());
269 		// TODO: implement rerere_gc(pm);
270 		return newPacks;
271 	}
272 
273 	/**
274 	 * Loosen objects in a pack file which are not also in the newly-created
275 	 * pack files.
276 	 *
277 	 * @param inserter
278 	 * @param reader
279 	 * @param pack
280 	 * @param existing
281 	 * @throws IOException
282 	 */
283 	private void loosen(ObjectDirectoryInserter inserter, ObjectReader reader, PackFile pack, HashSet<ObjectId> existing)
284 			throws IOException {
285 		for (PackIndex.MutableEntry entry : pack) {
286 			ObjectId oid = entry.toObjectId();
287 			if (existing.contains(oid)) {
288 				continue;
289 			}
290 			existing.add(oid);
291 			ObjectLoader loader = reader.open(oid);
292 			inserter.insert(loader.getType(),
293 					loader.getSize(),
294 					loader.openStream(),
295 					true /* create this object even though it's a duplicate */);
296 		}
297 	}
298 
299 	/**
300 	 * Delete old pack files. What is 'old' is defined by specifying a set of
301 	 * old pack files and a set of new pack files. Each pack file contained in
302 	 * old pack files but not contained in new pack files will be deleted. If
303 	 * preserveOldPacks is set, keep a copy of the pack file in the preserve
304 	 * directory. If an expirationDate is set then pack files which are younger
305 	 * than the expirationDate will not be deleted nor preserved.
306 	 * <p>
307 	 * If we're not immediately expiring loose objects, loosen any objects
308 	 * in the old pack files which aren't in the new pack files.
309 	 *
310 	 * @param oldPacks
311 	 * @param newPacks
312 	 * @throws ParseException
313 	 * @throws IOException
314 	 */
315 	private void deleteOldPacks(Collection<PackFile> oldPacks,
316 			Collection<PackFile> newPacks) throws ParseException, IOException {
317 		HashSet<ObjectId> ids = new HashSet<>();
318 		for (PackFile pack : newPacks) {
319 			for (PackIndex.MutableEntry entry : pack) {
320 				ids.add(entry.toObjectId());
321 			}
322 		}
323 		ObjectReader reader = repo.newObjectReader();
324 		ObjectDirectory dir = repo.getObjectDatabase();
325 		ObjectDirectoryInserter inserter = dir.newInserter();
326 		boolean shouldLoosen = !"now".equals(getPruneExpireStr()) && //$NON-NLS-1$
327 			getExpireDate() < Long.MAX_VALUE;
328 
329 		prunePreserved();
330 		long packExpireDate = getPackExpireDate();
331 		oldPackLoop: for (PackFile oldPack : oldPacks) {
332 			checkCancelled();
333 			String oldName = oldPack.getPackName();
334 			// check whether an old pack file is also among the list of new
335 			// pack files. Then we must not delete it.
336 			for (PackFile newPack : newPacks)
337 				if (oldName.equals(newPack.getPackName()))
338 					continue oldPackLoop;
339 
340 			if (!oldPack.shouldBeKept()
341 					&& repo.getFS()
342 							.lastModifiedInstant(oldPack.getPackFile())
343 							.toEpochMilli() < packExpireDate) {
344 				oldPack.close();
345 				if (shouldLoosen) {
346 					loosen(inserter, reader, oldPack, ids);
347 				}
348 				prunePack(oldName);
349 			}
350 		}
351 
352 		// close the complete object database. That's my only chance to force
353 		// rescanning and to detect that certain pack files are now deleted.
354 		repo.getObjectDatabase().close();
355 	}
356 
357 	/**
358 	 * Deletes old pack file, unless 'preserve-oldpacks' is set, in which case it
359 	 * moves the pack file to the preserved directory
360 	 *
361 	 * @param packFile
362 	 * @param packName
363 	 * @param ext
364 	 * @param deleteOptions
365 	 * @throws IOException
366 	 */
367 	private void removeOldPack(File packFile, String packName, PackExt ext,
368 			int deleteOptions) throws IOException {
369 		if (pconfig.isPreserveOldPacks()) {
370 			File oldPackDir = repo.getObjectDatabase().getPreservedDirectory();
371 			FileUtils.mkdir(oldPackDir, true);
372 
373 			String oldPackName = "pack-" + packName + ".old-" + ext.getExtension();  //$NON-NLS-1$ //$NON-NLS-2$
374 			File oldPackFile = new File(oldPackDir, oldPackName);
375 			FileUtils.rename(packFile, oldPackFile);
376 		} else {
377 			FileUtils.delete(packFile, deleteOptions);
378 		}
379 	}
380 
381 	/**
382 	 * Delete the preserved directory including all pack files within
383 	 */
384 	private void prunePreserved() {
385 		if (pconfig.isPrunePreserved()) {
386 			try {
387 				FileUtils.delete(repo.getObjectDatabase().getPreservedDirectory(),
388 						FileUtils.RECURSIVE | FileUtils.RETRY | FileUtils.SKIP_MISSING);
389 			} catch (IOException e) {
390 				// Deletion of the preserved pack files failed. Silently return.
391 			}
392 		}
393 	}
394 
395 	/**
396 	 * Delete files associated with a single pack file. First try to delete the
397 	 * ".pack" file because on some platforms the ".pack" file may be locked and
398 	 * can't be deleted. In such a case it is better to detect this early and
399 	 * give up on deleting files for this packfile. Otherwise we may delete the
400 	 * ".index" file and when failing to delete the ".pack" file we are left
401 	 * with a ".pack" file without a ".index" file.
402 	 *
403 	 * @param packName
404 	 */
405 	private void prunePack(String packName) {
406 		PackExt[] extensions = PackExt.values();
407 		try {
408 			// Delete the .pack file first and if this fails give up on deleting
409 			// the other files
410 			int deleteOptions = FileUtils.RETRY | FileUtils.SKIP_MISSING;
411 			for (PackExt ext : extensions)
412 				if (PackExt.PACK.equals(ext)) {
413 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
414 					removeOldPack(f, packName, ext, deleteOptions);
415 					break;
416 				}
417 			// The .pack file has been deleted. Delete as many as the other
418 			// files as you can.
419 			deleteOptions |= FileUtils.IGNORE_ERRORS;
420 			for (PackExt ext : extensions) {
421 				if (!PackExt.PACK.equals(ext)) {
422 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
423 					removeOldPack(f, packName, ext, deleteOptions);
424 				}
425 			}
426 		} catch (IOException e) {
427 			// Deletion of the .pack file failed. Silently return.
428 		}
429 	}
430 
431 	/**
432 	 * Like "git prune-packed" this method tries to prune all loose objects
433 	 * which can be found in packs. If certain objects can't be pruned (e.g.
434 	 * because the filesystem delete operation fails) this is silently ignored.
435 	 *
436 	 * @throws java.io.IOException
437 	 */
438 	public void prunePacked() throws IOException {
439 		ObjectDirectory objdb = repo.getObjectDatabase();
440 		Collection<PackFile> packs = objdb.getPacks();
441 		File objects = repo.getObjectsDirectory();
442 		String[] fanout = objects.list();
443 
444 		if (fanout != null && fanout.length > 0) {
445 			pm.beginTask(JGitText.get().pruneLoosePackedObjects, fanout.length);
446 			try {
447 				for (String d : fanout) {
448 					checkCancelled();
449 					pm.update(1);
450 					if (d.length() != 2)
451 						continue;
452 					String[] entries = new File(objects, d).list();
453 					if (entries == null)
454 						continue;
455 					for (String e : entries) {
456 						checkCancelled();
457 						if (e.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
458 							continue;
459 						ObjectId id;
460 						try {
461 							id = ObjectId.fromString(d + e);
462 						} catch (IllegalArgumentException notAnObject) {
463 							// ignoring the file that does not represent loose
464 							// object
465 							continue;
466 						}
467 						boolean found = false;
468 						for (PackFile p : packs) {
469 							checkCancelled();
470 							if (p.hasObject(id)) {
471 								found = true;
472 								break;
473 							}
474 						}
475 						if (found)
476 							FileUtils.delete(objdb.fileFor(id), FileUtils.RETRY
477 									| FileUtils.SKIP_MISSING
478 									| FileUtils.IGNORE_ERRORS);
479 					}
480 				}
481 			} finally {
482 				pm.endTask();
483 			}
484 		}
485 	}
486 
487 	/**
488 	 * Like "git prune" this method tries to prune all loose objects which are
489 	 * unreferenced. If certain objects can't be pruned (e.g. because the
490 	 * filesystem delete operation fails) this is silently ignored.
491 	 *
492 	 * @param objectsToKeep
493 	 *            a set of objects which should explicitly not be pruned
494 	 * @throws java.io.IOException
495 	 * @throws java.text.ParseException
496 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
497 	 *             parsed
498 	 */
499 	public void prune(Set<ObjectId> objectsToKeep) throws IOException,
500 			ParseException {
501 		long expireDate = getExpireDate();
502 
503 		// Collect all loose objects which are old enough, not referenced from
504 		// the index and not in objectsToKeep
505 		Map<ObjectId, File> deletionCandidates = new HashMap<>();
506 		Set<ObjectId> indexObjects = null;
507 		File objects = repo.getObjectsDirectory();
508 		String[] fanout = objects.list();
509 		if (fanout == null || fanout.length == 0) {
510 			return;
511 		}
512 		pm.beginTask(JGitText.get().pruneLooseUnreferencedObjects,
513 				fanout.length);
514 		try {
515 			for (String d : fanout) {
516 				checkCancelled();
517 				pm.update(1);
518 				if (d.length() != 2)
519 					continue;
520 				File dir = new File(objects, d);
521 				File[] entries = dir.listFiles();
522 				if (entries == null || entries.length == 0) {
523 					FileUtils.delete(dir, FileUtils.IGNORE_ERRORS);
524 					continue;
525 				}
526 				for (File f : entries) {
527 					checkCancelled();
528 					String fName = f.getName();
529 					if (fName.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
530 						continue;
531 					if (repo.getFS().lastModifiedInstant(f)
532 							.toEpochMilli() >= expireDate) {
533 						continue;
534 					}
535 					try {
536 						ObjectId id = ObjectId.fromString(d + fName);
537 						if (objectsToKeep.contains(id))
538 							continue;
539 						if (indexObjects == null)
540 							indexObjects = listNonHEADIndexObjects();
541 						if (indexObjects.contains(id))
542 							continue;
543 						deletionCandidates.put(id, f);
544 					} catch (IllegalArgumentException notAnObject) {
545 						// ignoring the file that does not represent loose
546 						// object
547 					}
548 				}
549 			}
550 		} finally {
551 			pm.endTask();
552 		}
553 
554 		if (deletionCandidates.isEmpty()) {
555 			return;
556 		}
557 
558 		checkCancelled();
559 
560 		// From the set of current refs remove all those which have been handled
561 		// during last repack(). Only those refs will survive which have been
562 		// added or modified since the last repack. Only these can save existing
563 		// loose refs from being pruned.
564 		Collection<Ref> newRefs;
565 		if (lastPackedRefs == null || lastPackedRefs.isEmpty())
566 			newRefs = getAllRefs();
567 		else {
568 			Map<String, Ref> last = new HashMap<>();
569 			for (Ref r : lastPackedRefs) {
570 				last.put(r.getName(), r);
571 			}
572 			newRefs = new ArrayList<>();
573 			for (Ref r : getAllRefs()) {
574 				Ref old = last.get(r.getName());
575 				if (!equals(r, old)) {
576 					newRefs.add(r);
577 				}
578 			}
579 		}
580 
581 		if (!newRefs.isEmpty()) {
582 			// There are new/modified refs! Check which loose objects are now
583 			// referenced by these modified refs (or their reflogentries).
584 			// Remove these loose objects
585 			// from the deletionCandidates. When the last candidate is removed
586 			// leave this method.
587 			ObjectWalk w = new ObjectWalk(repo);
588 			try {
589 				for (Ref cr : newRefs) {
590 					checkCancelled();
591 					w.markStart(w.parseAny(cr.getObjectId()));
592 				}
593 				if (lastPackedRefs != null)
594 					for (Ref lpr : lastPackedRefs) {
595 						w.markUninteresting(w.parseAny(lpr.getObjectId()));
596 					}
597 				removeReferenced(deletionCandidates, w);
598 			} finally {
599 				w.dispose();
600 			}
601 		}
602 
603 		if (deletionCandidates.isEmpty())
604 			return;
605 
606 		// Since we have not left the method yet there are still
607 		// deletionCandidates. Last chance for these objects not to be pruned is
608 		// that they are referenced by reflog entries. Even refs which currently
609 		// point to the same object as during last repack() may have
610 		// additional reflog entries not handled during last repack()
611 		ObjectWalk w = new ObjectWalk(repo);
612 		try {
613 			for (Ref ar : getAllRefs())
614 				for (ObjectId id : listRefLogObjects(ar, lastRepackTime)) {
615 					checkCancelled();
616 					w.markStart(w.parseAny(id));
617 				}
618 			if (lastPackedRefs != null)
619 				for (Ref lpr : lastPackedRefs) {
620 					checkCancelled();
621 					w.markUninteresting(w.parseAny(lpr.getObjectId()));
622 				}
623 			removeReferenced(deletionCandidates, w);
624 		} finally {
625 			w.dispose();
626 		}
627 
628 		if (deletionCandidates.isEmpty())
629 			return;
630 
631 		checkCancelled();
632 
633 		// delete all candidates which have survived: these are unreferenced
634 		// loose objects. Make a last check, though, to avoid deleting objects
635 		// that could have been referenced while the candidates list was being
636 		// built (by an incoming push, for example).
637 		Set<File> touchedFanout = new HashSet<>();
638 		for (File f : deletionCandidates.values()) {
639 			if (f.lastModified() < expireDate) {
640 				f.delete();
641 				touchedFanout.add(f.getParentFile());
642 			}
643 		}
644 
645 		for (File f : touchedFanout) {
646 			FileUtils.delete(f,
647 					FileUtils.EMPTY_DIRECTORIES_ONLY | FileUtils.IGNORE_ERRORS);
648 		}
649 
650 		repo.getObjectDatabase().close();
651 	}
652 
653 	private long getExpireDate() throws ParseException {
654 		long expireDate = Long.MAX_VALUE;
655 
656 		if (expire == null && expireAgeMillis == -1) {
657 			String pruneExpireStr = getPruneExpireStr();
658 			if (pruneExpireStr == null)
659 				pruneExpireStr = PRUNE_EXPIRE_DEFAULT;
660 			expire = GitDateParser.parse(pruneExpireStr, null, SystemReader
661 					.getInstance().getLocale());
662 			expireAgeMillis = -1;
663 		}
664 		if (expire != null)
665 			expireDate = expire.getTime();
666 		if (expireAgeMillis != -1)
667 			expireDate = System.currentTimeMillis() - expireAgeMillis;
668 		return expireDate;
669 	}
670 
671 	private String getPruneExpireStr() {
672 		return repo.getConfig().getString(
673                         ConfigConstants.CONFIG_GC_SECTION, null,
674                         ConfigConstants.CONFIG_KEY_PRUNEEXPIRE);
675 	}
676 
677 	private long getPackExpireDate() throws ParseException {
678 		long packExpireDate = Long.MAX_VALUE;
679 
680 		if (packExpire == null && packExpireAgeMillis == -1) {
681 			String prunePackExpireStr = repo.getConfig().getString(
682 					ConfigConstants.CONFIG_GC_SECTION, null,
683 					ConfigConstants.CONFIG_KEY_PRUNEPACKEXPIRE);
684 			if (prunePackExpireStr == null)
685 				prunePackExpireStr = PRUNE_PACK_EXPIRE_DEFAULT;
686 			packExpire = GitDateParser.parse(prunePackExpireStr, null,
687 					SystemReader.getInstance().getLocale());
688 			packExpireAgeMillis = -1;
689 		}
690 		if (packExpire != null)
691 			packExpireDate = packExpire.getTime();
692 		if (packExpireAgeMillis != -1)
693 			packExpireDate = System.currentTimeMillis() - packExpireAgeMillis;
694 		return packExpireDate;
695 	}
696 
697 	/**
698 	 * Remove all entries from a map which key is the id of an object referenced
699 	 * by the given ObjectWalk
700 	 *
701 	 * @param id2File
702 	 * @param w
703 	 * @throws MissingObjectException
704 	 * @throws IncorrectObjectTypeException
705 	 * @throws IOException
706 	 */
707 	private void removeReferenced(Map<ObjectId, File> id2File,
708 			ObjectWalk w) throws MissingObjectException,
709 			IncorrectObjectTypeException, IOException {
710 		RevObject ro = w.next();
711 		while (ro != null) {
712 			checkCancelled();
713 			if (id2File.remove(ro.getId()) != null && id2File.isEmpty()) {
714 				return;
715 			}
716 			ro = w.next();
717 		}
718 		ro = w.nextObject();
719 		while (ro != null) {
720 			checkCancelled();
721 			if (id2File.remove(ro.getId()) != null && id2File.isEmpty()) {
722 				return;
723 			}
724 			ro = w.nextObject();
725 		}
726 	}
727 
728 	private static boolean equals(Reff" href="../../../../../../org/eclipse/jgit/lib/Ref.html#Ref">Ref r1, Ref r2) {
729 		if (r1 == null || r2 == null) {
730 			return false;
731 		}
732 		if (r1.isSymbolic()) {
733 			return r2.isSymbolic() && r1.getTarget().getName()
734 					.equals(r2.getTarget().getName());
735 		}
736 		return !r2.isSymbolic()
737 				&& Objects.equals(r1.getObjectId(), r2.getObjectId());
738 	}
739 
740 	/**
741 	 * Pack ref storage. For a RefDirectory database, this packs all
742 	 * non-symbolic, loose refs into packed-refs. For Reftable, all of the data
743 	 * is compacted into a single table.
744 	 *
745 	 * @throws java.io.IOException
746 	 */
747 	public void packRefs() throws IOException {
748 		RefDatabase refDb = repo.getRefDatabase();
749 		if (refDb instanceof FileReftableDatabase) {
750 			// TODO: abstract this more cleanly.
751 			pm.beginTask(JGitText.get().packRefs, 1);
752 			try {
753 				((FileReftableDatabase) refDb).compactFully();
754 			} finally {
755 				pm.endTask();
756 			}
757 			return;
758 		}
759 
760 		Collection<Ref> refs = refDb.getRefsByPrefix(Constants.R_REFS);
761 		List<String> refsToBePacked = new ArrayList<>(refs.size());
762 		pm.beginTask(JGitText.get().packRefs, refs.size());
763 		try {
764 			for (Ref ref : refs) {
765 				checkCancelled();
766 				if (!ref.isSymbolic() && ref.getStorage().isLoose())
767 					refsToBePacked.add(ref.getName());
768 				pm.update(1);
769 			}
770 			((RefDirectory) repo.getRefDatabase()).pack(refsToBePacked);
771 		} finally {
772 			pm.endTask();
773 		}
774 	}
775 
776 	/**
777 	 * Packs all objects which reachable from any of the heads into one pack
778 	 * file. Additionally all objects which are not reachable from any head but
779 	 * which are reachable from any of the other refs (e.g. tags), special refs
780 	 * (e.g. FETCH_HEAD) or index are packed into a separate pack file. Objects
781 	 * included in pack files which have a .keep file associated are never
782 	 * repacked. All old pack files which existed before are deleted.
783 	 *
784 	 * @return a collection of the newly created pack files
785 	 * @throws java.io.IOException
786 	 *             when during reading of refs, index, packfiles, objects,
787 	 *             reflog-entries or during writing to the packfiles
788 	 *             {@link java.io.IOException} occurs
789 	 */
790 	public Collection<PackFile> repack() throws IOException {
791 		Collection<PackFile> toBeDeleted = repo.getObjectDatabase().getPacks();
792 
793 		long time = System.currentTimeMillis();
794 		Collection<Ref> refsBefore = getAllRefs();
795 
796 		Set<ObjectId> allHeadsAndTags = new HashSet<>();
797 		Set<ObjectId> allHeads = new HashSet<>();
798 		Set<ObjectId> allTags = new HashSet<>();
799 		Set<ObjectId> nonHeads = new HashSet<>();
800 		Set<ObjectId> txnHeads = new HashSet<>();
801 		Set<ObjectId> tagTargets = new HashSet<>();
802 		Set<ObjectId> indexObjects = listNonHEADIndexObjects();
803 		RefDatabase refdb = repo.getRefDatabase();
804 
805 		for (Ref ref : refsBefore) {
806 			checkCancelled();
807 			nonHeads.addAll(listRefLogObjects(ref, 0));
808 			if (ref.isSymbolic() || ref.getObjectId() == null) {
809 				continue;
810 			}
811 			if (isHead(ref)) {
812 				allHeads.add(ref.getObjectId());
813 			} else if (isTag(ref)) {
814 				allTags.add(ref.getObjectId());
815 			} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
816 				txnHeads.add(ref.getObjectId());
817 			} else {
818 				nonHeads.add(ref.getObjectId());
819 			}
820 			if (ref.getPeeledObjectId() != null) {
821 				tagTargets.add(ref.getPeeledObjectId());
822 			}
823 		}
824 
825 		List<ObjectIdSet> excluded = new LinkedList<>();
826 		for (PackFile f : repo.getObjectDatabase().getPacks()) {
827 			checkCancelled();
828 			if (f.shouldBeKept())
829 				excluded.add(f.getIndex());
830 		}
831 
832 		// Don't exclude tags that are also branch tips
833 		allTags.removeAll(allHeads);
834 		allHeadsAndTags.addAll(allHeads);
835 		allHeadsAndTags.addAll(allTags);
836 
837 		// Hoist all branch tips and tags earlier in the pack file
838 		tagTargets.addAll(allHeadsAndTags);
839 		nonHeads.addAll(indexObjects);
840 
841 		// Combine the GC_REST objects into the GC pack if requested
842 		if (pconfig.getSinglePack()) {
843 			allHeadsAndTags.addAll(nonHeads);
844 			nonHeads.clear();
845 		}
846 
847 		List<PackFile> ret = new ArrayList<>(2);
848 		PackFile heads = null;
849 		if (!allHeadsAndTags.isEmpty()) {
850 			heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
851 					tagTargets, excluded);
852 			if (heads != null) {
853 				ret.add(heads);
854 				excluded.add(0, heads.getIndex());
855 			}
856 		}
857 		if (!nonHeads.isEmpty()) {
858 			PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
859 					tagTargets, excluded);
860 			if (rest != null)
861 				ret.add(rest);
862 		}
863 		if (!txnHeads.isEmpty()) {
864 			PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
865 					null, excluded);
866 			if (txn != null)
867 				ret.add(txn);
868 		}
869 		try {
870 			deleteOldPacks(toBeDeleted, ret);
871 		} catch (ParseException e) {
872 			// TODO: the exception has to be wrapped into an IOException because
873 			// throwing the ParseException directly would break the API, instead
874 			// we should throw a ConfigInvalidException
875 			throw new IOException(e);
876 		}
877 		prunePacked();
878 		if (repo.getRefDatabase() instanceof RefDirectory) {
879 			// TODO: abstract this more cleanly.
880 			deleteEmptyRefsFolders();
881 		}
882 		deleteOrphans();
883 		deleteTempPacksIdx();
884 
885 		lastPackedRefs = refsBefore;
886 		lastRepackTime = time;
887 		return ret;
888 	}
889 
890 	private static boolean isHead(Ref ref) {
891 		return ref.getName().startsWith(Constants.R_HEADS);
892 	}
893 
894 	private static boolean isTag(Ref ref) {
895 		return ref.getName().startsWith(Constants.R_TAGS);
896 	}
897 
898 	private void deleteEmptyRefsFolders() throws IOException {
899 		Path refs = repo.getDirectory().toPath().resolve(Constants.R_REFS);
900 		// Avoid deleting a folder that was created after the threshold so that concurrent
901 		// operations trying to create a reference are not impacted
902 		Instant threshold = Instant.now().minus(30, ChronoUnit.SECONDS);
903 		try (Stream<Path> entries = Files.list(refs)
904 				.filter(Files::isDirectory)) {
905 			Iterator<Path> iterator = entries.iterator();
906 			while (iterator.hasNext()) {
907 				try (Stream<Path> s = Files.list(iterator.next())) {
908 					s.filter(path -> canBeSafelyDeleted(path, threshold)).forEach(this::deleteDir);
909 				}
910 			}
911 		}
912 	}
913 
914 	private boolean canBeSafelyDeleted(Path path, Instant threshold) {
915 		try {
916 			return Files.getLastModifiedTime(path).toInstant().isBefore(threshold);
917 		}
918 		catch (IOException e) {
919 			LOG.warn(MessageFormat.format(
920 					JGitText.get().cannotAccessLastModifiedForSafeDeletion,
921 					path), e);
922 			return false;
923 		}
924 	}
925 
926 	private void deleteDir(Path dir) {
927 		try (Stream<Path> dirs = Files.walk(dir)) {
928 			dirs.filter(this::isDirectory).sorted(Comparator.reverseOrder())
929 					.forEach(this::delete);
930 		} catch (IOException e) {
931 			LOG.error(e.getMessage(), e);
932 		}
933 	}
934 
935 	private boolean isDirectory(Path p) {
936 		return p.toFile().isDirectory();
937 	}
938 
939 	private void delete(Path d) {
940 		try {
941 			Files.delete(d);
942 		} catch (DirectoryNotEmptyException e) {
943 			// Don't log
944 		} catch (IOException e) {
945 			LOG.error(MessageFormat.format(JGitText.get().cannotDeleteFile, d),
946 					e);
947 		}
948 	}
949 
950 	/**
951 	 * Deletes orphans
952 	 * <p>
953 	 * A file is considered an orphan if it is either a "bitmap" or an index
954 	 * file, and its corresponding pack file is missing in the list.
955 	 * </p>
956 	 */
957 	private void deleteOrphans() {
958 		Path packDir = repo.getObjectDatabase().getPackDirectory().toPath();
959 		List<String> fileNames = null;
960 		try (Stream<Path> files = Files.list(packDir)) {
961 			fileNames = files.map(path -> path.getFileName().toString())
962 					.filter(name -> (name.endsWith(PACK_EXT)
963 							|| name.endsWith(BITMAP_EXT)
964 							|| name.endsWith(INDEX_EXT)))
965 					.sorted(Collections.reverseOrder())
966 					.collect(Collectors.toList());
967 		} catch (IOException e1) {
968 			// ignore
969 		}
970 		if (fileNames == null) {
971 			return;
972 		}
973 
974 		String base = null;
975 		for (String n : fileNames) {
976 			if (n.endsWith(PACK_EXT)) {
977 				base = n.substring(0, n.lastIndexOf('.'));
978 			} else {
979 				if (base == null || !n.startsWith(base)) {
980 					try {
981 						Files.delete(packDir.resolve(n));
982 					} catch (IOException e) {
983 						LOG.error(e.getMessage(), e);
984 					}
985 				}
986 			}
987 		}
988 	}
989 
990 	private void deleteTempPacksIdx() {
991 		Path packDir = repo.getObjectDatabase().getPackDirectory().toPath();
992 		Instant threshold = Instant.now().minus(1, ChronoUnit.DAYS);
993 		if (!Files.exists(packDir)) {
994 			return;
995 		}
996 		try (DirectoryStream<Path> stream =
997 				Files.newDirectoryStream(packDir, "gc_*_tmp")) { //$NON-NLS-1$
998 			stream.forEach(t -> {
999 				try {
1000 					Instant lastModified = Files.getLastModifiedTime(t)
1001 							.toInstant();
1002 					if (lastModified.isBefore(threshold)) {
1003 						Files.deleteIfExists(t);
1004 					}
1005 				} catch (IOException e) {
1006 					LOG.error(e.getMessage(), e);
1007 				}
1008 			});
1009 		} catch (IOException e) {
1010 			LOG.error(e.getMessage(), e);
1011 		}
1012 	}
1013 
1014 	/**
1015 	 * @param ref
1016 	 *            the ref which log should be inspected
1017 	 * @param minTime only reflog entries not older then this time are processed
1018 	 * @return the {@link ObjectId}s contained in the reflog
1019 	 * @throws IOException
1020 	 */
1021 	private Set<ObjectId> listRefLogObjects(Ref ref, long minTime) throws IOException {
1022 		ReflogReader reflogReader = repo.getReflogReader(ref.getName());
1023 		if (reflogReader == null) {
1024 			return Collections.emptySet();
1025 		}
1026 		List<ReflogEntry> rlEntries = reflogReader
1027 				.getReverseEntries();
1028 		if (rlEntries == null || rlEntries.isEmpty())
1029 			return Collections.emptySet();
1030 		Set<ObjectId> ret = new HashSet<>();
1031 		for (ReflogEntry e : rlEntries) {
1032 			if (e.getWho().getWhen().getTime() < minTime)
1033 				break;
1034 			ObjectId newId = e.getNewId();
1035 			if (newId != null && !ObjectId.zeroId().equals(newId))
1036 				ret.add(newId);
1037 			ObjectId oldId = e.getOldId();
1038 			if (oldId != null && !ObjectId.zeroId().equals(oldId))
1039 				ret.add(oldId);
1040 		}
1041 		return ret;
1042 	}
1043 
1044 	/**
1045 	 * Returns a collection of all refs and additional refs.
1046 	 *
1047 	 * Additional refs which don't start with "refs/" are not returned because
1048 	 * they should not save objects from being garbage collected. Examples for
1049 	 * such references are ORIG_HEAD, MERGE_HEAD, FETCH_HEAD and
1050 	 * CHERRY_PICK_HEAD.
1051 	 *
1052 	 * @return a collection of refs pointing to live objects.
1053 	 * @throws IOException
1054 	 */
1055 	private Collection<Ref> getAllRefs() throws IOException {
1056 		RefDatabase refdb = repo.getRefDatabase();
1057 		Collection<Ref> refs = refdb.getRefs();
1058 		List<Ref> addl = refdb.getAdditionalRefs();
1059 		if (!addl.isEmpty()) {
1060 			List<Ref> all = new ArrayList<>(refs.size() + addl.size());
1061 			all.addAll(refs);
1062 			// add additional refs which start with refs/
1063 			for (Ref r : addl) {
1064 				checkCancelled();
1065 				if (r.getName().startsWith(Constants.R_REFS)) {
1066 					all.add(r);
1067 				}
1068 			}
1069 			return all;
1070 		}
1071 		return refs;
1072 	}
1073 
1074 	/**
1075 	 * Return a list of those objects in the index which differ from whats in
1076 	 * HEAD
1077 	 *
1078 	 * @return a set of ObjectIds of changed objects in the index
1079 	 * @throws IOException
1080 	 * @throws CorruptObjectException
1081 	 * @throws NoWorkTreeException
1082 	 */
1083 	private Set<ObjectId> listNonHEADIndexObjects()
1084 			throws CorruptObjectException, IOException {
1085 		if (repo.isBare()) {
1086 			return Collections.emptySet();
1087 		}
1088 		try (TreeWalkTreeWalk.html#TreeWalk">TreeWalk treeWalk = new TreeWalk(repo)) {
1089 			treeWalk.addTree(new DirCacheIterator(repo.readDirCache()));
1090 			ObjectId headID = repo.resolve(Constants.HEAD);
1091 			if (headID != null) {
1092 				try (RevWalk/RevWalk.html#RevWalk">RevWalk revWalk = new RevWalk(repo)) {
1093 					treeWalk.addTree(revWalk.parseTree(headID));
1094 				}
1095 			}
1096 
1097 			treeWalk.setFilter(TreeFilter.ANY_DIFF);
1098 			treeWalk.setRecursive(true);
1099 			Set<ObjectId> ret = new HashSet<>();
1100 
1101 			while (treeWalk.next()) {
1102 				checkCancelled();
1103 				ObjectId objectId = treeWalk.getObjectId(0);
1104 				switch (treeWalk.getRawMode(0) & FileMode.TYPE_MASK) {
1105 				case FileMode.TYPE_MISSING:
1106 				case FileMode.TYPE_GITLINK:
1107 					continue;
1108 				case FileMode.TYPE_TREE:
1109 				case FileMode.TYPE_FILE:
1110 				case FileMode.TYPE_SYMLINK:
1111 					ret.add(objectId);
1112 					continue;
1113 				default:
1114 					throw new IOException(MessageFormat.format(
1115 							JGitText.get().corruptObjectInvalidMode3,
1116 							String.format("%o", //$NON-NLS-1$
1117 									Integer.valueOf(treeWalk.getRawMode(0))),
1118 							(objectId == null) ? "null" : objectId.name(), //$NON-NLS-1$
1119 							treeWalk.getPathString(), //
1120 							repo.getIndexFile()));
1121 				}
1122 			}
1123 			return ret;
1124 		}
1125 	}
1126 
1127 	private PackFile writePack(@NonNull Set<? extends ObjectId> want,
1128 			@NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
1129 			Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
1130 			throws IOException {
1131 		checkCancelled();
1132 		File tmpPack = null;
1133 		Map<PackExt, File> tmpExts = new TreeMap<>((o1, o2) -> {
1134 			// INDEX entries must be returned last, so the pack
1135 			// scanner does pick up the new pack until all the
1136 			// PackExt entries have been written.
1137 			if (o1 == o2) {
1138 				return 0;
1139 			}
1140 			if (o1 == PackExt.INDEX) {
1141 				return 1;
1142 			}
1143 			if (o2 == PackExt.INDEX) {
1144 				return -1;
1145 			}
1146 			return Integer.signum(o1.hashCode() - o2.hashCode());
1147 		});
1148 		try (PackWriternal/storage/pack/PackWriter.html#PackWriter">PackWriter pw = new PackWriter(
1149 				pconfig,
1150 				repo.newObjectReader())) {
1151 			// prepare the PackWriter
1152 			pw.setDeltaBaseAsOffset(true);
1153 			pw.setReuseDeltaCommits(false);
1154 			if (tagTargets != null) {
1155 				pw.setTagTargets(tagTargets);
1156 			}
1157 			if (excludeObjects != null)
1158 				for (ObjectIdSet idx : excludeObjects)
1159 					pw.excludeObjects(idx);
1160 			pw.preparePack(pm, want, have, PackWriter.NONE, tags);
1161 			if (pw.getObjectCount() == 0)
1162 				return null;
1163 			checkCancelled();
1164 
1165 			// create temporary files
1166 			String id = pw.computeName().getName();
1167 			File packdir = repo.getObjectDatabase().getPackDirectory();
1168 			tmpPack = File.createTempFile("gc_", ".pack_tmp", packdir); //$NON-NLS-1$ //$NON-NLS-2$
1169 			final String tmpBase = tmpPack.getName()
1170 					.substring(0, tmpPack.getName().lastIndexOf('.'));
1171 			File tmpIdx = new File(packdir, tmpBase + ".idx_tmp"); //$NON-NLS-1$
1172 			tmpExts.put(INDEX, tmpIdx);
1173 
1174 			if (!tmpIdx.createNewFile())
1175 				throw new IOException(MessageFormat.format(
1176 						JGitText.get().cannotCreateIndexfile, tmpIdx.getPath()));
1177 
1178 			// write the packfile
1179 			try (FileOutputStream fos = new FileOutputStream(tmpPack);
1180 					FileChannel channel = fos.getChannel();
1181 					OutputStream channelStream = Channels
1182 							.newOutputStream(channel)) {
1183 				pw.writePack(pm, pm, channelStream);
1184 				channel.force(true);
1185 			}
1186 
1187 			// write the packindex
1188 			try (FileOutputStream fos = new FileOutputStream(tmpIdx);
1189 					FileChannel idxChannel = fos.getChannel();
1190 					OutputStream idxStream = Channels
1191 							.newOutputStream(idxChannel)) {
1192 				pw.writeIndex(idxStream);
1193 				idxChannel.force(true);
1194 			}
1195 
1196 			if (pw.prepareBitmapIndex(pm)) {
1197 				File tmpBitmapIdx = new File(packdir, tmpBase + ".bitmap_tmp"); //$NON-NLS-1$
1198 				tmpExts.put(BITMAP_INDEX, tmpBitmapIdx);
1199 
1200 				if (!tmpBitmapIdx.createNewFile())
1201 					throw new IOException(MessageFormat.format(
1202 							JGitText.get().cannotCreateIndexfile,
1203 							tmpBitmapIdx.getPath()));
1204 
1205 				try (FileOutputStream fos = new FileOutputStream(tmpBitmapIdx);
1206 						FileChannel idxChannel = fos.getChannel();
1207 						OutputStream idxStream = Channels
1208 								.newOutputStream(idxChannel)) {
1209 					pw.writeBitmapIndex(idxStream);
1210 					idxChannel.force(true);
1211 				}
1212 			}
1213 
1214 			// rename the temporary files to real files
1215 			File realPack = nameFor(id, ".pack"); //$NON-NLS-1$
1216 
1217 			repo.getObjectDatabase().closeAllPackHandles(realPack);
1218 			tmpPack.setReadOnly();
1219 
1220 			FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE);
1221 			for (Map.Entry<PackExt, File> tmpEntry : tmpExts.entrySet()) {
1222 				File tmpExt = tmpEntry.getValue();
1223 				tmpExt.setReadOnly();
1224 
1225 				File realExt = nameFor(id,
1226 						"." + tmpEntry.getKey().getExtension()); //$NON-NLS-1$
1227 				try {
1228 					FileUtils.rename(tmpExt, realExt,
1229 							StandardCopyOption.ATOMIC_MOVE);
1230 				} catch (IOException e) {
1231 					File newExt = new File(realExt.getParentFile(),
1232 							realExt.getName() + ".new"); //$NON-NLS-1$
1233 					try {
1234 						FileUtils.rename(tmpExt, newExt,
1235 								StandardCopyOption.ATOMIC_MOVE);
1236 					} catch (IOException e2) {
1237 						newExt = tmpExt;
1238 						e = e2;
1239 					}
1240 					throw new IOException(MessageFormat.format(
1241 							JGitText.get().panicCantRenameIndexFile, newExt,
1242 							realExt), e);
1243 				}
1244 			}
1245 			boolean interrupted = false;
1246 			try {
1247 				FileSnapshot snapshot = FileSnapshot.save(realPack);
1248 				if (pconfig.doWaitPreventRacyPack(snapshot.size())) {
1249 					snapshot.waitUntilNotRacy();
1250 				}
1251 			} catch (InterruptedException e) {
1252 				interrupted = true;
1253 			}
1254 			try {
1255 				return repo.getObjectDatabase().openPack(realPack);
1256 			} finally {
1257 				if (interrupted) {
1258 					// Re-set interrupted flag
1259 					Thread.currentThread().interrupt();
1260 				}
1261 			}
1262 		} finally {
1263 			if (tmpPack != null && tmpPack.exists())
1264 				tmpPack.delete();
1265 			for (File tmpExt : tmpExts.values()) {
1266 				if (tmpExt.exists())
1267 					tmpExt.delete();
1268 			}
1269 		}
1270 	}
1271 
1272 	private File nameFor(String name, String ext) {
1273 		File packdir = repo.getObjectDatabase().getPackDirectory();
1274 		return new File(packdir, "pack-" + name + ext); //$NON-NLS-1$
1275 	}
1276 
1277 	private void checkCancelled() throws CancelledException {
1278 		if (pm.isCancelled() || Thread.currentThread().isInterrupted()) {
1279 			throw new CancelledException(JGitText.get().operationCanceled);
1280 		}
1281 	}
1282 
1283 	/**
1284 	 * A class holding statistical data for a FileRepository regarding how many
1285 	 * objects are stored as loose or packed objects
1286 	 */
1287 	public static class RepoStatistics {
1288 		/**
1289 		 * The number of objects stored in pack files. If the same object is
1290 		 * stored in multiple pack files then it is counted as often as it
1291 		 * occurs in pack files.
1292 		 */
1293 		public long numberOfPackedObjects;
1294 
1295 		/**
1296 		 * The number of pack files
1297 		 */
1298 		public long numberOfPackFiles;
1299 
1300 		/**
1301 		 * The number of objects stored as loose objects.
1302 		 */
1303 		public long numberOfLooseObjects;
1304 
1305 		/**
1306 		 * The sum of the sizes of all files used to persist loose objects.
1307 		 */
1308 		public long sizeOfLooseObjects;
1309 
1310 		/**
1311 		 * The sum of the sizes of all pack files.
1312 		 */
1313 		public long sizeOfPackedObjects;
1314 
1315 		/**
1316 		 * The number of loose refs.
1317 		 */
1318 		public long numberOfLooseRefs;
1319 
1320 		/**
1321 		 * The number of refs stored in pack files.
1322 		 */
1323 		public long numberOfPackedRefs;
1324 
1325 		/**
1326 		 * The number of bitmaps in the bitmap indices.
1327 		 */
1328 		public long numberOfBitmaps;
1329 
1330 		@Override
1331 		public String toString() {
1332 			final StringBuilder b = new StringBuilder();
1333 			b.append("numberOfPackedObjects=").append(numberOfPackedObjects); //$NON-NLS-1$
1334 			b.append(", numberOfPackFiles=").append(numberOfPackFiles); //$NON-NLS-1$
1335 			b.append(", numberOfLooseObjects=").append(numberOfLooseObjects); //$NON-NLS-1$
1336 			b.append(", numberOfLooseRefs=").append(numberOfLooseRefs); //$NON-NLS-1$
1337 			b.append(", numberOfPackedRefs=").append(numberOfPackedRefs); //$NON-NLS-1$
1338 			b.append(", sizeOfLooseObjects=").append(sizeOfLooseObjects); //$NON-NLS-1$
1339 			b.append(", sizeOfPackedObjects=").append(sizeOfPackedObjects); //$NON-NLS-1$
1340 			b.append(", numberOfBitmaps=").append(numberOfBitmaps); //$NON-NLS-1$
1341 			return b.toString();
1342 		}
1343 	}
1344 
1345 	/**
1346 	 * Returns information about objects and pack files for a FileRepository.
1347 	 *
1348 	 * @return information about objects and pack files for a FileRepository
1349 	 * @throws java.io.IOException
1350 	 */
1351 	public RepoStatistics getStatistics() throws IOException {
1352 		RepoStatistics ret = new RepoStatistics();
1353 		Collection<PackFile> packs = repo.getObjectDatabase().getPacks();
1354 		for (PackFile f : packs) {
1355 			ret.numberOfPackedObjects += f.getIndex().getObjectCount();
1356 			ret.numberOfPackFiles++;
1357 			ret.sizeOfPackedObjects += f.getPackFile().length();
1358 			if (f.getBitmapIndex() != null)
1359 				ret.numberOfBitmaps += f.getBitmapIndex().getBitmapCount();
1360 		}
1361 		File objDir = repo.getObjectsDirectory();
1362 		String[] fanout = objDir.list();
1363 		if (fanout != null && fanout.length > 0) {
1364 			for (String d : fanout) {
1365 				if (d.length() != 2)
1366 					continue;
1367 				File[] entries = new File(objDir, d).listFiles();
1368 				if (entries == null)
1369 					continue;
1370 				for (File f : entries) {
1371 					if (f.getName().length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
1372 						continue;
1373 					ret.numberOfLooseObjects++;
1374 					ret.sizeOfLooseObjects += f.length();
1375 				}
1376 			}
1377 		}
1378 
1379 		RefDatabase refDb = repo.getRefDatabase();
1380 		for (Ref r : refDb.getRefs()) {
1381 			Storage storage = r.getStorage();
1382 			if (storage == Storage.LOOSE || storage == Storage.LOOSE_PACKED)
1383 				ret.numberOfLooseRefs++;
1384 			if (storage == Storage.PACKED || storage == Storage.LOOSE_PACKED)
1385 				ret.numberOfPackedRefs++;
1386 		}
1387 
1388 		return ret;
1389 	}
1390 
1391 	/**
1392 	 * Set the progress monitor used for garbage collection methods.
1393 	 *
1394 	 * @param pm a {@link org.eclipse.jgit.lib.ProgressMonitor} object.
1395 	 * @return this
1396 	 */
1397 	public GC setProgressMonitor(ProgressMonitor pm) {
1398 		this.pm = (pm == null) ? NullProgressMonitor.INSTANCE : pm;
1399 		return this;
1400 	}
1401 
1402 	/**
1403 	 * During gc() or prune() each unreferenced, loose object which has been
1404 	 * created or modified in the last <code>expireAgeMillis</code> milliseconds
1405 	 * will not be pruned. Only older objects may be pruned. If set to 0 then
1406 	 * every object is a candidate for pruning.
1407 	 *
1408 	 * @param expireAgeMillis
1409 	 *            minimal age of objects to be pruned in milliseconds.
1410 	 */
1411 	public void setExpireAgeMillis(long expireAgeMillis) {
1412 		this.expireAgeMillis = expireAgeMillis;
1413 		expire = null;
1414 	}
1415 
1416 	/**
1417 	 * During gc() or prune() packfiles which are created or modified in the
1418 	 * last <code>packExpireAgeMillis</code> milliseconds will not be deleted.
1419 	 * Only older packfiles may be deleted. If set to 0 then every packfile is a
1420 	 * candidate for deletion.
1421 	 *
1422 	 * @param packExpireAgeMillis
1423 	 *            minimal age of packfiles to be deleted in milliseconds.
1424 	 */
1425 	public void setPackExpireAgeMillis(long packExpireAgeMillis) {
1426 		this.packExpireAgeMillis = packExpireAgeMillis;
1427 		expire = null;
1428 	}
1429 
1430 	/**
1431 	 * Set the PackConfig used when (re-)writing packfiles. This allows to
1432 	 * influence how packs are written and to implement something similar to
1433 	 * "git gc --aggressive"
1434 	 *
1435 	 * @param pconfig
1436 	 *            the {@link org.eclipse.jgit.storage.pack.PackConfig} used when
1437 	 *            writing packs
1438 	 */
1439 	public void setPackConfig(@NonNull PackConfig pconfig) {
1440 		this.pconfig = pconfig;
1441 	}
1442 
1443 	/**
1444 	 * During gc() or prune() each unreferenced, loose object which has been
1445 	 * created or modified after or at <code>expire</code> will not be pruned.
1446 	 * Only older objects may be pruned. If set to null then every object is a
1447 	 * candidate for pruning.
1448 	 *
1449 	 * @param expire
1450 	 *            instant in time which defines object expiration
1451 	 *            objects with modification time before this instant are expired
1452 	 *            objects with modification time newer or equal to this instant
1453 	 *            are not expired
1454 	 */
1455 	public void setExpire(Date expire) {
1456 		this.expire = expire;
1457 		expireAgeMillis = -1;
1458 	}
1459 
1460 	/**
1461 	 * During gc() or prune() packfiles which are created or modified after or
1462 	 * at <code>packExpire</code> will not be deleted. Only older packfiles may
1463 	 * be deleted. If set to null then every packfile is a candidate for
1464 	 * deletion.
1465 	 *
1466 	 * @param packExpire
1467 	 *            instant in time which defines packfile expiration
1468 	 */
1469 	public void setPackExpire(Date packExpire) {
1470 		this.packExpire = packExpire;
1471 		packExpireAgeMillis = -1;
1472 	}
1473 
1474 	/**
1475 	 * Set the {@code gc --auto} option.
1476 	 *
1477 	 * With this option, gc checks whether any housekeeping is required; if not,
1478 	 * it exits without performing any work. Some JGit commands run
1479 	 * {@code gc --auto} after performing operations that could create many
1480 	 * loose objects.
1481 	 * <p>
1482 	 * Housekeeping is required if there are too many loose objects or too many
1483 	 * packs in the repository. If the number of loose objects exceeds the value
1484 	 * of the gc.auto option JGit GC consolidates all existing packs into a
1485 	 * single pack (equivalent to {@code -A} option), whereas git-core would
1486 	 * combine all loose objects into a single pack using {@code repack -d -l}.
1487 	 * Setting the value of {@code gc.auto} to 0 disables automatic packing of
1488 	 * loose objects.
1489 	 * <p>
1490 	 * If the number of packs exceeds the value of {@code gc.autoPackLimit},
1491 	 * then existing packs (except those marked with a .keep file) are
1492 	 * consolidated into a single pack by using the {@code -A} option of repack.
1493 	 * Setting {@code gc.autoPackLimit} to 0 disables automatic consolidation of
1494 	 * packs.
1495 	 * <p>
1496 	 * Like git the following jgit commands run auto gc:
1497 	 * <ul>
1498 	 * <li>fetch</li>
1499 	 * <li>merge</li>
1500 	 * <li>rebase</li>
1501 	 * <li>receive-pack</li>
1502 	 * </ul>
1503 	 * The auto gc for receive-pack can be suppressed by setting the config
1504 	 * option {@code receive.autogc = false}
1505 	 *
1506 	 * @param auto
1507 	 *            defines whether gc should do automatic housekeeping
1508 	 */
1509 	public void setAuto(boolean auto) {
1510 		this.automatic = auto;
1511 	}
1512 
1513 	/**
1514 	 * @param background
1515 	 *            whether to run the gc in a background thread.
1516 	 */
1517 	void setBackground(boolean background) {
1518 		this.background = background;
1519 	}
1520 
1521 	private boolean needGc() {
1522 		if (tooManyPacks()) {
1523 			addRepackAllOption();
1524 		} else {
1525 			return tooManyLooseObjects();
1526 		}
1527 		// TODO run pre-auto-gc hook, if it fails return false
1528 		return true;
1529 	}
1530 
1531 	private void addRepackAllOption() {
1532 		// TODO: if JGit GC is enhanced to support repack's option -l this
1533 		// method needs to be implemented
1534 	}
1535 
1536 	/**
1537 	 * @return {@code true} if number of packs > gc.autopacklimit (default 50)
1538 	 */
1539 	boolean tooManyPacks() {
1540 		int autopacklimit = repo.getConfig().getInt(
1541 				ConfigConstants.CONFIG_GC_SECTION,
1542 				ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT,
1543 				DEFAULT_AUTOPACKLIMIT);
1544 		if (autopacklimit <= 0) {
1545 			return false;
1546 		}
1547 		// JGit always creates two packfiles, one for the objects reachable from
1548 		// branches, and another one for the rest
1549 		return repo.getObjectDatabase().getPacks().size() > (autopacklimit + 1);
1550 	}
1551 
1552 	/**
1553 	 * Quickly estimate number of loose objects, SHA1 is distributed evenly so
1554 	 * counting objects in one directory (bucket 17) is sufficient
1555 	 *
1556 	 * @return {@code true} if number of loose objects > gc.auto (default 6700)
1557 	 */
1558 	boolean tooManyLooseObjects() {
1559 		int auto = getLooseObjectLimit();
1560 		if (auto <= 0) {
1561 			return false;
1562 		}
1563 		int n = 0;
1564 		int threshold = (auto + 255) / 256;
1565 		Path dir = repo.getObjectsDirectory().toPath().resolve("17"); //$NON-NLS-1$
1566 		if (!dir.toFile().exists()) {
1567 			return false;
1568 		}
1569 		try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, file -> {
1570 					Path fileName = file.getFileName();
1571 					return file.toFile().isFile() && fileName != null
1572 							&& PATTERN_LOOSE_OBJECT.matcher(fileName.toString())
1573 									.matches();
1574 				})) {
1575 			for (Iterator<Path> iter = stream.iterator(); iter.hasNext(); iter
1576 					.next()) {
1577 				if (++n > threshold) {
1578 					return true;
1579 				}
1580 			}
1581 		} catch (IOException e) {
1582 			LOG.error(e.getMessage(), e);
1583 		}
1584 		return false;
1585 	}
1586 
1587 	private int getLooseObjectLimit() {
1588 		return repo.getConfig().getInt(ConfigConstants.CONFIG_GC_SECTION,
1589 				ConfigConstants.CONFIG_KEY_AUTO, DEFAULT_AUTOLIMIT);
1590 	}
1591 }