View Javadoc
1   /*
2    * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
3    * Copyright (C) 2011, Shawn O. Pearce <spearce@spearce.org> and others
4    *
5    * This program and the accompanying materials are made available under the
6    * terms of the Eclipse Distribution License v. 1.0 which is available at
7    * https://www.eclipse.org/org/documents/edl-v10.php.
8    *
9    * SPDX-License-Identifier: BSD-3-Clause
10   */
11  package org.eclipse.jgit.internal.storage.file;
12  
13  import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
14  import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
15  
16  import java.io.File;
17  import java.io.FileOutputStream;
18  import java.io.IOException;
19  import java.io.OutputStream;
20  import java.io.PrintWriter;
21  import java.io.StringWriter;
22  import java.nio.channels.Channels;
23  import java.nio.channels.FileChannel;
24  import java.nio.file.DirectoryNotEmptyException;
25  import java.nio.file.DirectoryStream;
26  import java.nio.file.Files;
27  import java.nio.file.Path;
28  import java.nio.file.StandardCopyOption;
29  import java.text.MessageFormat;
30  import java.text.ParseException;
31  import java.time.Instant;
32  import java.time.temporal.ChronoUnit;
33  import java.util.ArrayList;
34  import java.util.Collection;
35  import java.util.Collections;
36  import java.util.Comparator;
37  import java.util.Date;
38  import java.util.HashMap;
39  import java.util.HashSet;
40  import java.util.Iterator;
41  import java.util.LinkedList;
42  import java.util.List;
43  import java.util.Map;
44  import java.util.Objects;
45  import java.util.Set;
46  import java.util.TreeMap;
47  import java.util.concurrent.Callable;
48  import java.util.concurrent.ExecutorService;
49  import java.util.regex.Pattern;
50  import java.util.stream.Collectors;
51  import java.util.stream.Stream;
52  
53  import org.eclipse.jgit.annotations.NonNull;
54  import org.eclipse.jgit.dircache.DirCacheIterator;
55  import org.eclipse.jgit.errors.CancelledException;
56  import org.eclipse.jgit.errors.CorruptObjectException;
57  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
58  import org.eclipse.jgit.errors.MissingObjectException;
59  import org.eclipse.jgit.errors.NoWorkTreeException;
60  import org.eclipse.jgit.internal.JGitText;
61  import org.eclipse.jgit.internal.storage.pack.PackExt;
62  import org.eclipse.jgit.internal.storage.pack.PackWriter;
63  import org.eclipse.jgit.internal.storage.reftree.RefTreeNames;
64  import org.eclipse.jgit.lib.ConfigConstants;
65  import org.eclipse.jgit.lib.Constants;
66  import org.eclipse.jgit.lib.FileMode;
67  import org.eclipse.jgit.lib.NullProgressMonitor;
68  import org.eclipse.jgit.lib.ObjectId;
69  import org.eclipse.jgit.lib.ObjectIdSet;
70  import org.eclipse.jgit.lib.ObjectLoader;
71  import org.eclipse.jgit.lib.ObjectReader;
72  import org.eclipse.jgit.lib.ProgressMonitor;
73  import org.eclipse.jgit.lib.Ref;
74  import org.eclipse.jgit.lib.Ref.Storage;
75  import org.eclipse.jgit.lib.RefDatabase;
76  import org.eclipse.jgit.lib.ReflogEntry;
77  import org.eclipse.jgit.lib.ReflogReader;
78  import org.eclipse.jgit.lib.internal.WorkQueue;
79  import org.eclipse.jgit.revwalk.ObjectWalk;
80  import org.eclipse.jgit.revwalk.RevObject;
81  import org.eclipse.jgit.revwalk.RevWalk;
82  import org.eclipse.jgit.storage.pack.PackConfig;
83  import org.eclipse.jgit.treewalk.TreeWalk;
84  import org.eclipse.jgit.treewalk.filter.TreeFilter;
85  import org.eclipse.jgit.util.FileUtils;
86  import org.eclipse.jgit.util.GitDateParser;
87  import org.eclipse.jgit.util.SystemReader;
88  import org.slf4j.Logger;
89  import org.slf4j.LoggerFactory;
90  
91  /**
92   * A garbage collector for git
93   * {@link org.eclipse.jgit.internal.storage.file.FileRepository}. Instances of
94   * this class are not thread-safe. Don't use the same instance from multiple
95   * threads.
96   *
97   * This class started as a copy of DfsGarbageCollector from Shawn O. Pearce
98   * adapted to FileRepositories.
99   */
100 public class GC {
101 	private static final Logger LOG = LoggerFactory
102 			.getLogger(GC.class);
103 
104 	private static final String PRUNE_EXPIRE_DEFAULT = "2.weeks.ago"; //$NON-NLS-1$
105 
106 	private static final String PRUNE_PACK_EXPIRE_DEFAULT = "1.hour.ago"; //$NON-NLS-1$
107 
108 	private static final Pattern PATTERN_LOOSE_OBJECT = Pattern
109 			.compile("[0-9a-fA-F]{38}"); //$NON-NLS-1$
110 
111 	private static final String PACK_EXT = "." + PackExt.PACK.getExtension();//$NON-NLS-1$
112 
113 	private static final String BITMAP_EXT = "." //$NON-NLS-1$
114 			+ PackExt.BITMAP_INDEX.getExtension();
115 
116 	private static final String INDEX_EXT = "." + PackExt.INDEX.getExtension(); //$NON-NLS-1$
117 
118 	private static final String KEEP_EXT = "." + PackExt.KEEP.getExtension(); //$NON-NLS-1$
119 
120 	private static final int DEFAULT_AUTOPACKLIMIT = 50;
121 
122 	private static final int DEFAULT_AUTOLIMIT = 6700;
123 
124 	private static volatile ExecutorService executor;
125 
126 	/**
127 	 * Set the executor for running auto-gc in the background. If no executor is
128 	 * set JGit's own WorkQueue will be used.
129 	 *
130 	 * @param e
131 	 *            the executor to be used for running auto-gc
132 	 */
133 	public static void setExecutor(ExecutorService e) {
134 		executor = e;
135 	}
136 
137 	private final FileRepository repo;
138 
139 	private ProgressMonitor pm;
140 
141 	private long expireAgeMillis = -1;
142 
143 	private Date expire;
144 
145 	private long packExpireAgeMillis = -1;
146 
147 	private Date packExpire;
148 
149 	private PackConfig pconfig;
150 
151 	/**
152 	 * the refs which existed during the last call to {@link #repack()}. This is
153 	 * needed during {@link #prune(Set)} where we can optimize by looking at the
154 	 * difference between the current refs and the refs which existed during
155 	 * last {@link #repack()}.
156 	 */
157 	private Collection<Ref> lastPackedRefs;
158 
159 	/**
160 	 * Holds the starting time of the last repack() execution. This is needed in
161 	 * prune() to inspect only those reflog entries which have been added since
162 	 * last repack().
163 	 */
164 	private long lastRepackTime;
165 
166 	/**
167 	 * Whether gc should do automatic housekeeping
168 	 */
169 	private boolean automatic;
170 
171 	/**
172 	 * Whether to run gc in a background thread
173 	 */
174 	private boolean background;
175 
176 	/**
177 	 * Creates a new garbage collector with default values. An expirationTime of
178 	 * two weeks and <code>null</code> as progress monitor will be used.
179 	 *
180 	 * @param repo
181 	 *            the repo to work on
182 	 */
183 	public GC(FileRepository repo) {
184 		this.repo = repo;
185 		this.pconfig = new PackConfig(repo);
186 		this.pm = NullProgressMonitor.INSTANCE;
187 	}
188 
189 	/**
190 	 * Runs a garbage collector on a
191 	 * {@link org.eclipse.jgit.internal.storage.file.FileRepository}. It will
192 	 * <ul>
193 	 * <li>pack loose references into packed-refs</li>
194 	 * <li>repack all reachable objects into new pack files and delete the old
195 	 * pack files</li>
196 	 * <li>prune all loose objects which are now reachable by packs</li>
197 	 * </ul>
198 	 *
199 	 * If {@link #setAuto(boolean)} was set to {@code true} {@code gc} will
200 	 * first check whether any housekeeping is required; if not, it exits
201 	 * without performing any work.
202 	 *
203 	 * If {@link #setBackground(boolean)} was set to {@code true}
204 	 * {@code collectGarbage} will start the gc in the background, and then
205 	 * return immediately. In this case, errors will not be reported except in
206 	 * gc.log.
207 	 *
208 	 * @return the collection of
209 	 *         {@link org.eclipse.jgit.internal.storage.file.PackFile}'s which
210 	 *         are newly created
211 	 * @throws java.io.IOException
212 	 * @throws java.text.ParseException
213 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
214 	 *             parsed
215 	 */
216 	// TODO(ms): change signature and return Future<Collection<PackFile>>
217 	@SuppressWarnings("FutureReturnValueIgnored")
218 	public Collection<PackFile> gc() throws IOException, ParseException {
219 		if (!background) {
220 			return doGc();
221 		}
222 		final GcLogernal/storage/file/GcLog.html#GcLog">GcLog gcLog = new GcLog(repo);
223 		if (!gcLog.lock()) {
224 			// there is already a background gc running
225 			return Collections.emptyList();
226 		}
227 
228 		Callable<Collection<PackFile>> gcTask = () -> {
229 			try {
230 				Collection<PackFile> newPacks = doGc();
231 				if (automatic && tooManyLooseObjects()) {
232 					String message = JGitText.get().gcTooManyUnpruned;
233 					gcLog.write(message);
234 					gcLog.commit();
235 				}
236 				return newPacks;
237 			} catch (IOException | ParseException e) {
238 				try {
239 					gcLog.write(e.getMessage());
240 					StringWriter sw = new StringWriter();
241 					e.printStackTrace(new PrintWriter(sw));
242 					gcLog.write(sw.toString());
243 					gcLog.commit();
244 				} catch (IOException e2) {
245 					e2.addSuppressed(e);
246 					LOG.error(e2.getMessage(), e2);
247 				}
248 			} finally {
249 				gcLog.unlock();
250 			}
251 			return Collections.emptyList();
252 		};
253 		// TODO(ms): change signature and return the Future
254 		executor().submit(gcTask);
255 		return Collections.emptyList();
256 	}
257 
258 	private ExecutorService executor() {
259 		return (executor != null) ? executor : WorkQueue.getExecutor();
260 	}
261 
262 	private Collection<PackFile> doGc() throws IOException, ParseException {
263 		if (automatic && !needGc()) {
264 			return Collections.emptyList();
265 		}
266 		pm.start(6 /* tasks */);
267 		packRefs();
268 		// TODO: implement reflog_expire(pm, repo);
269 		Collection<PackFile> newPacks = repack();
270 		prune(Collections.emptySet());
271 		// TODO: implement rerere_gc(pm);
272 		return newPacks;
273 	}
274 
275 	/**
276 	 * Loosen objects in a pack file which are not also in the newly-created
277 	 * pack files.
278 	 *
279 	 * @param inserter
280 	 * @param reader
281 	 * @param pack
282 	 * @param existing
283 	 * @throws IOException
284 	 */
285 	private void loosen(ObjectDirectoryInserter inserter, ObjectReader reader, PackFile pack, HashSet<ObjectId> existing)
286 			throws IOException {
287 		for (PackIndex.MutableEntry entry : pack) {
288 			ObjectId oid = entry.toObjectId();
289 			if (existing.contains(oid)) {
290 				continue;
291 			}
292 			existing.add(oid);
293 			ObjectLoader loader = reader.open(oid);
294 			inserter.insert(loader.getType(),
295 					loader.getSize(),
296 					loader.openStream(),
297 					true /* create this object even though it's a duplicate */);
298 		}
299 	}
300 
301 	/**
302 	 * Delete old pack files. What is 'old' is defined by specifying a set of
303 	 * old pack files and a set of new pack files. Each pack file contained in
304 	 * old pack files but not contained in new pack files will be deleted. If
305 	 * preserveOldPacks is set, keep a copy of the pack file in the preserve
306 	 * directory. If an expirationDate is set then pack files which are younger
307 	 * than the expirationDate will not be deleted nor preserved.
308 	 * <p>
309 	 * If we're not immediately expiring loose objects, loosen any objects
310 	 * in the old pack files which aren't in the new pack files.
311 	 *
312 	 * @param oldPacks
313 	 * @param newPacks
314 	 * @throws ParseException
315 	 * @throws IOException
316 	 */
317 	private void deleteOldPacks(Collection<PackFile> oldPacks,
318 			Collection<PackFile> newPacks) throws ParseException, IOException {
319 		HashSet<ObjectId> ids = new HashSet<>();
320 		for (PackFile pack : newPacks) {
321 			for (PackIndex.MutableEntry entry : pack) {
322 				ids.add(entry.toObjectId());
323 			}
324 		}
325 		ObjectReader reader = repo.newObjectReader();
326 		ObjectDirectory dir = repo.getObjectDatabase();
327 		ObjectDirectoryInserter inserter = dir.newInserter();
328 		boolean shouldLoosen = !"now".equals(getPruneExpireStr()) && //$NON-NLS-1$
329 			getExpireDate() < Long.MAX_VALUE;
330 
331 		prunePreserved();
332 		long packExpireDate = getPackExpireDate();
333 		oldPackLoop: for (PackFile oldPack : oldPacks) {
334 			checkCancelled();
335 			String oldName = oldPack.getPackName();
336 			// check whether an old pack file is also among the list of new
337 			// pack files. Then we must not delete it.
338 			for (PackFile newPack : newPacks)
339 				if (oldName.equals(newPack.getPackName()))
340 					continue oldPackLoop;
341 
342 			if (!oldPack.shouldBeKept()
343 					&& repo.getFS()
344 							.lastModifiedInstant(oldPack.getPackFile())
345 							.toEpochMilli() < packExpireDate) {
346 				oldPack.close();
347 				if (shouldLoosen) {
348 					loosen(inserter, reader, oldPack, ids);
349 				}
350 				prunePack(oldName);
351 			}
352 		}
353 
354 		// close the complete object database. That's my only chance to force
355 		// rescanning and to detect that certain pack files are now deleted.
356 		repo.getObjectDatabase().close();
357 	}
358 
359 	/**
360 	 * Deletes old pack file, unless 'preserve-oldpacks' is set, in which case it
361 	 * moves the pack file to the preserved directory
362 	 *
363 	 * @param packFile
364 	 * @param packName
365 	 * @param ext
366 	 * @param deleteOptions
367 	 * @throws IOException
368 	 */
369 	private void removeOldPack(File packFile, String packName, PackExt ext,
370 			int deleteOptions) throws IOException {
371 		if (pconfig.isPreserveOldPacks()) {
372 			File oldPackDir = repo.getObjectDatabase().getPreservedDirectory();
373 			FileUtils.mkdir(oldPackDir, true);
374 
375 			String oldPackName = "pack-" + packName + ".old-" + ext.getExtension();  //$NON-NLS-1$ //$NON-NLS-2$
376 			File oldPackFile = new File(oldPackDir, oldPackName);
377 			FileUtils.rename(packFile, oldPackFile);
378 		} else {
379 			FileUtils.delete(packFile, deleteOptions);
380 		}
381 	}
382 
383 	/**
384 	 * Delete the preserved directory including all pack files within
385 	 */
386 	private void prunePreserved() {
387 		if (pconfig.isPrunePreserved()) {
388 			try {
389 				FileUtils.delete(repo.getObjectDatabase().getPreservedDirectory(),
390 						FileUtils.RECURSIVE | FileUtils.RETRY | FileUtils.SKIP_MISSING);
391 			} catch (IOException e) {
392 				// Deletion of the preserved pack files failed. Silently return.
393 			}
394 		}
395 	}
396 
397 	/**
398 	 * Delete files associated with a single pack file. First try to delete the
399 	 * ".pack" file because on some platforms the ".pack" file may be locked and
400 	 * can't be deleted. In such a case it is better to detect this early and
401 	 * give up on deleting files for this packfile. Otherwise we may delete the
402 	 * ".index" file and when failing to delete the ".pack" file we are left
403 	 * with a ".pack" file without a ".index" file.
404 	 *
405 	 * @param packName
406 	 */
407 	private void prunePack(String packName) {
408 		PackExt[] extensions = PackExt.values();
409 		try {
410 			// Delete the .pack file first and if this fails give up on deleting
411 			// the other files
412 			int deleteOptions = FileUtils.RETRY | FileUtils.SKIP_MISSING;
413 			for (PackExt ext : extensions)
414 				if (PackExt.PACK.equals(ext)) {
415 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
416 					removeOldPack(f, packName, ext, deleteOptions);
417 					break;
418 				}
419 			// The .pack file has been deleted. Delete as many as the other
420 			// files as you can.
421 			deleteOptions |= FileUtils.IGNORE_ERRORS;
422 			for (PackExt ext : extensions) {
423 				if (!PackExt.PACK.equals(ext)) {
424 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
425 					removeOldPack(f, packName, ext, deleteOptions);
426 				}
427 			}
428 		} catch (IOException e) {
429 			// Deletion of the .pack file failed. Silently return.
430 		}
431 	}
432 
433 	/**
434 	 * Like "git prune-packed" this method tries to prune all loose objects
435 	 * which can be found in packs. If certain objects can't be pruned (e.g.
436 	 * because the filesystem delete operation fails) this is silently ignored.
437 	 *
438 	 * @throws java.io.IOException
439 	 */
440 	public void prunePacked() throws IOException {
441 		ObjectDirectory objdb = repo.getObjectDatabase();
442 		Collection<PackFile> packs = objdb.getPacks();
443 		File objects = repo.getObjectsDirectory();
444 		String[] fanout = objects.list();
445 
446 		if (fanout != null && fanout.length > 0) {
447 			pm.beginTask(JGitText.get().pruneLoosePackedObjects, fanout.length);
448 			try {
449 				for (String d : fanout) {
450 					checkCancelled();
451 					pm.update(1);
452 					if (d.length() != 2)
453 						continue;
454 					String[] entries = new File(objects, d).list();
455 					if (entries == null)
456 						continue;
457 					for (String e : entries) {
458 						checkCancelled();
459 						if (e.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
460 							continue;
461 						ObjectId id;
462 						try {
463 							id = ObjectId.fromString(d + e);
464 						} catch (IllegalArgumentException notAnObject) {
465 							// ignoring the file that does not represent loose
466 							// object
467 							continue;
468 						}
469 						boolean found = false;
470 						for (PackFile p : packs) {
471 							checkCancelled();
472 							if (p.hasObject(id)) {
473 								found = true;
474 								break;
475 							}
476 						}
477 						if (found)
478 							FileUtils.delete(objdb.fileFor(id), FileUtils.RETRY
479 									| FileUtils.SKIP_MISSING
480 									| FileUtils.IGNORE_ERRORS);
481 					}
482 				}
483 			} finally {
484 				pm.endTask();
485 			}
486 		}
487 	}
488 
489 	/**
490 	 * Like "git prune" this method tries to prune all loose objects which are
491 	 * unreferenced. If certain objects can't be pruned (e.g. because the
492 	 * filesystem delete operation fails) this is silently ignored.
493 	 *
494 	 * @param objectsToKeep
495 	 *            a set of objects which should explicitly not be pruned
496 	 * @throws java.io.IOException
497 	 * @throws java.text.ParseException
498 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
499 	 *             parsed
500 	 */
501 	public void prune(Set<ObjectId> objectsToKeep) throws IOException,
502 			ParseException {
503 		long expireDate = getExpireDate();
504 
505 		// Collect all loose objects which are old enough, not referenced from
506 		// the index and not in objectsToKeep
507 		Map<ObjectId, File> deletionCandidates = new HashMap<>();
508 		Set<ObjectId> indexObjects = null;
509 		File objects = repo.getObjectsDirectory();
510 		String[] fanout = objects.list();
511 		if (fanout == null || fanout.length == 0) {
512 			return;
513 		}
514 		pm.beginTask(JGitText.get().pruneLooseUnreferencedObjects,
515 				fanout.length);
516 		try {
517 			for (String d : fanout) {
518 				checkCancelled();
519 				pm.update(1);
520 				if (d.length() != 2)
521 					continue;
522 				File dir = new File(objects, d);
523 				File[] entries = dir.listFiles();
524 				if (entries == null || entries.length == 0) {
525 					FileUtils.delete(dir, FileUtils.IGNORE_ERRORS);
526 					continue;
527 				}
528 				for (File f : entries) {
529 					checkCancelled();
530 					String fName = f.getName();
531 					if (fName.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
532 						continue;
533 					if (repo.getFS().lastModifiedInstant(f)
534 							.toEpochMilli() >= expireDate) {
535 						continue;
536 					}
537 					try {
538 						ObjectId id = ObjectId.fromString(d + fName);
539 						if (objectsToKeep.contains(id))
540 							continue;
541 						if (indexObjects == null)
542 							indexObjects = listNonHEADIndexObjects();
543 						if (indexObjects.contains(id))
544 							continue;
545 						deletionCandidates.put(id, f);
546 					} catch (IllegalArgumentException notAnObject) {
547 						// ignoring the file that does not represent loose
548 						// object
549 					}
550 				}
551 			}
552 		} finally {
553 			pm.endTask();
554 		}
555 
556 		if (deletionCandidates.isEmpty()) {
557 			return;
558 		}
559 
560 		checkCancelled();
561 
562 		// From the set of current refs remove all those which have been handled
563 		// during last repack(). Only those refs will survive which have been
564 		// added or modified since the last repack. Only these can save existing
565 		// loose refs from being pruned.
566 		Collection<Ref> newRefs;
567 		if (lastPackedRefs == null || lastPackedRefs.isEmpty())
568 			newRefs = getAllRefs();
569 		else {
570 			Map<String, Ref> last = new HashMap<>();
571 			for (Ref r : lastPackedRefs) {
572 				last.put(r.getName(), r);
573 			}
574 			newRefs = new ArrayList<>();
575 			for (Ref r : getAllRefs()) {
576 				Ref old = last.get(r.getName());
577 				if (!equals(r, old)) {
578 					newRefs.add(r);
579 				}
580 			}
581 		}
582 
583 		if (!newRefs.isEmpty()) {
584 			// There are new/modified refs! Check which loose objects are now
585 			// referenced by these modified refs (or their reflogentries).
586 			// Remove these loose objects
587 			// from the deletionCandidates. When the last candidate is removed
588 			// leave this method.
589 			ObjectWalk w = new ObjectWalk(repo);
590 			try {
591 				for (Ref cr : newRefs) {
592 					checkCancelled();
593 					w.markStart(w.parseAny(cr.getObjectId()));
594 				}
595 				if (lastPackedRefs != null)
596 					for (Ref lpr : lastPackedRefs) {
597 						w.markUninteresting(w.parseAny(lpr.getObjectId()));
598 					}
599 				removeReferenced(deletionCandidates, w);
600 			} finally {
601 				w.dispose();
602 			}
603 		}
604 
605 		if (deletionCandidates.isEmpty())
606 			return;
607 
608 		// Since we have not left the method yet there are still
609 		// deletionCandidates. Last chance for these objects not to be pruned is
610 		// that they are referenced by reflog entries. Even refs which currently
611 		// point to the same object as during last repack() may have
612 		// additional reflog entries not handled during last repack()
613 		ObjectWalk w = new ObjectWalk(repo);
614 		try {
615 			for (Ref ar : getAllRefs())
616 				for (ObjectId id : listRefLogObjects(ar, lastRepackTime)) {
617 					checkCancelled();
618 					w.markStart(w.parseAny(id));
619 				}
620 			if (lastPackedRefs != null)
621 				for (Ref lpr : lastPackedRefs) {
622 					checkCancelled();
623 					w.markUninteresting(w.parseAny(lpr.getObjectId()));
624 				}
625 			removeReferenced(deletionCandidates, w);
626 		} finally {
627 			w.dispose();
628 		}
629 
630 		if (deletionCandidates.isEmpty())
631 			return;
632 
633 		checkCancelled();
634 
635 		// delete all candidates which have survived: these are unreferenced
636 		// loose objects. Make a last check, though, to avoid deleting objects
637 		// that could have been referenced while the candidates list was being
638 		// built (by an incoming push, for example).
639 		Set<File> touchedFanout = new HashSet<>();
640 		for (File f : deletionCandidates.values()) {
641 			if (f.lastModified() < expireDate) {
642 				f.delete();
643 				touchedFanout.add(f.getParentFile());
644 			}
645 		}
646 
647 		for (File f : touchedFanout) {
648 			FileUtils.delete(f,
649 					FileUtils.EMPTY_DIRECTORIES_ONLY | FileUtils.IGNORE_ERRORS);
650 		}
651 
652 		repo.getObjectDatabase().close();
653 	}
654 
655 	private long getExpireDate() throws ParseException {
656 		long expireDate = Long.MAX_VALUE;
657 
658 		if (expire == null && expireAgeMillis == -1) {
659 			String pruneExpireStr = getPruneExpireStr();
660 			if (pruneExpireStr == null)
661 				pruneExpireStr = PRUNE_EXPIRE_DEFAULT;
662 			expire = GitDateParser.parse(pruneExpireStr, null, SystemReader
663 					.getInstance().getLocale());
664 			expireAgeMillis = -1;
665 		}
666 		if (expire != null)
667 			expireDate = expire.getTime();
668 		if (expireAgeMillis != -1)
669 			expireDate = System.currentTimeMillis() - expireAgeMillis;
670 		return expireDate;
671 	}
672 
673 	private String getPruneExpireStr() {
674 		return repo.getConfig().getString(
675                         ConfigConstants.CONFIG_GC_SECTION, null,
676                         ConfigConstants.CONFIG_KEY_PRUNEEXPIRE);
677 	}
678 
679 	private long getPackExpireDate() throws ParseException {
680 		long packExpireDate = Long.MAX_VALUE;
681 
682 		if (packExpire == null && packExpireAgeMillis == -1) {
683 			String prunePackExpireStr = repo.getConfig().getString(
684 					ConfigConstants.CONFIG_GC_SECTION, null,
685 					ConfigConstants.CONFIG_KEY_PRUNEPACKEXPIRE);
686 			if (prunePackExpireStr == null)
687 				prunePackExpireStr = PRUNE_PACK_EXPIRE_DEFAULT;
688 			packExpire = GitDateParser.parse(prunePackExpireStr, null,
689 					SystemReader.getInstance().getLocale());
690 			packExpireAgeMillis = -1;
691 		}
692 		if (packExpire != null)
693 			packExpireDate = packExpire.getTime();
694 		if (packExpireAgeMillis != -1)
695 			packExpireDate = System.currentTimeMillis() - packExpireAgeMillis;
696 		return packExpireDate;
697 	}
698 
699 	/**
700 	 * Remove all entries from a map which key is the id of an object referenced
701 	 * by the given ObjectWalk
702 	 *
703 	 * @param id2File
704 	 * @param w
705 	 * @throws MissingObjectException
706 	 * @throws IncorrectObjectTypeException
707 	 * @throws IOException
708 	 */
709 	private void removeReferenced(Map<ObjectId, File> id2File,
710 			ObjectWalk w) throws MissingObjectException,
711 			IncorrectObjectTypeException, IOException {
712 		RevObject ro = w.next();
713 		while (ro != null) {
714 			checkCancelled();
715 			if (id2File.remove(ro.getId()) != null && id2File.isEmpty()) {
716 				return;
717 			}
718 			ro = w.next();
719 		}
720 		ro = w.nextObject();
721 		while (ro != null) {
722 			checkCancelled();
723 			if (id2File.remove(ro.getId()) != null && id2File.isEmpty()) {
724 				return;
725 			}
726 			ro = w.nextObject();
727 		}
728 	}
729 
730 	private static boolean equals(Reff" href="../../../../../../org/eclipse/jgit/lib/Ref.html#Ref">Ref r1, Ref r2) {
731 		if (r1 == null || r2 == null) {
732 			return false;
733 		}
734 		if (r1.isSymbolic()) {
735 			return r2.isSymbolic() && r1.getTarget().getName()
736 					.equals(r2.getTarget().getName());
737 		}
738 		return !r2.isSymbolic()
739 				&& Objects.equals(r1.getObjectId(), r2.getObjectId());
740 	}
741 
742 	/**
743 	 * Pack ref storage. For a RefDirectory database, this packs all
744 	 * non-symbolic, loose refs into packed-refs. For Reftable, all of the data
745 	 * is compacted into a single table.
746 	 *
747 	 * @throws java.io.IOException
748 	 */
749 	public void packRefs() throws IOException {
750 		RefDatabase refDb = repo.getRefDatabase();
751 		if (refDb instanceof FileReftableDatabase) {
752 			// TODO: abstract this more cleanly.
753 			pm.beginTask(JGitText.get().packRefs, 1);
754 			try {
755 				((FileReftableDatabase) refDb).compactFully();
756 			} finally {
757 				pm.endTask();
758 			}
759 			return;
760 		}
761 
762 		Collection<Ref> refs = refDb.getRefsByPrefix(Constants.R_REFS);
763 		List<String> refsToBePacked = new ArrayList<>(refs.size());
764 		pm.beginTask(JGitText.get().packRefs, refs.size());
765 		try {
766 			for (Ref ref : refs) {
767 				checkCancelled();
768 				if (!ref.isSymbolic() && ref.getStorage().isLoose())
769 					refsToBePacked.add(ref.getName());
770 				pm.update(1);
771 			}
772 			((RefDirectory) repo.getRefDatabase()).pack(refsToBePacked);
773 		} finally {
774 			pm.endTask();
775 		}
776 	}
777 
778 	/**
779 	 * Packs all objects which reachable from any of the heads into one pack
780 	 * file. Additionally all objects which are not reachable from any head but
781 	 * which are reachable from any of the other refs (e.g. tags), special refs
782 	 * (e.g. FETCH_HEAD) or index are packed into a separate pack file. Objects
783 	 * included in pack files which have a .keep file associated are never
784 	 * repacked. All old pack files which existed before are deleted.
785 	 *
786 	 * @return a collection of the newly created pack files
787 	 * @throws java.io.IOException
788 	 *             when during reading of refs, index, packfiles, objects,
789 	 *             reflog-entries or during writing to the packfiles
790 	 *             {@link java.io.IOException} occurs
791 	 */
792 	public Collection<PackFile> repack() throws IOException {
793 		Collection<PackFile> toBeDeleted = repo.getObjectDatabase().getPacks();
794 
795 		long time = System.currentTimeMillis();
796 		Collection<Ref> refsBefore = getAllRefs();
797 
798 		Set<ObjectId> allHeadsAndTags = new HashSet<>();
799 		Set<ObjectId> allHeads = new HashSet<>();
800 		Set<ObjectId> allTags = new HashSet<>();
801 		Set<ObjectId> nonHeads = new HashSet<>();
802 		Set<ObjectId> txnHeads = new HashSet<>();
803 		Set<ObjectId> tagTargets = new HashSet<>();
804 		Set<ObjectId> indexObjects = listNonHEADIndexObjects();
805 		RefDatabase refdb = repo.getRefDatabase();
806 
807 		for (Ref ref : refsBefore) {
808 			checkCancelled();
809 			nonHeads.addAll(listRefLogObjects(ref, 0));
810 			if (ref.isSymbolic() || ref.getObjectId() == null) {
811 				continue;
812 			}
813 			if (isHead(ref)) {
814 				allHeads.add(ref.getObjectId());
815 			} else if (isTag(ref)) {
816 				allTags.add(ref.getObjectId());
817 			} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
818 				txnHeads.add(ref.getObjectId());
819 			} else {
820 				nonHeads.add(ref.getObjectId());
821 			}
822 			if (ref.getPeeledObjectId() != null) {
823 				tagTargets.add(ref.getPeeledObjectId());
824 			}
825 		}
826 
827 		List<ObjectIdSet> excluded = new LinkedList<>();
828 		for (PackFile f : repo.getObjectDatabase().getPacks()) {
829 			checkCancelled();
830 			if (f.shouldBeKept())
831 				excluded.add(f.getIndex());
832 		}
833 
834 		// Don't exclude tags that are also branch tips
835 		allTags.removeAll(allHeads);
836 		allHeadsAndTags.addAll(allHeads);
837 		allHeadsAndTags.addAll(allTags);
838 
839 		// Hoist all branch tips and tags earlier in the pack file
840 		tagTargets.addAll(allHeadsAndTags);
841 		nonHeads.addAll(indexObjects);
842 
843 		// Combine the GC_REST objects into the GC pack if requested
844 		if (pconfig.getSinglePack()) {
845 			allHeadsAndTags.addAll(nonHeads);
846 			nonHeads.clear();
847 		}
848 
849 		List<PackFile> ret = new ArrayList<>(2);
850 		PackFile heads = null;
851 		if (!allHeadsAndTags.isEmpty()) {
852 			heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
853 					tagTargets, excluded);
854 			if (heads != null) {
855 				ret.add(heads);
856 				excluded.add(0, heads.getIndex());
857 			}
858 		}
859 		if (!nonHeads.isEmpty()) {
860 			PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
861 					tagTargets, excluded);
862 			if (rest != null)
863 				ret.add(rest);
864 		}
865 		if (!txnHeads.isEmpty()) {
866 			PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
867 					null, excluded);
868 			if (txn != null)
869 				ret.add(txn);
870 		}
871 		try {
872 			deleteOldPacks(toBeDeleted, ret);
873 		} catch (ParseException e) {
874 			// TODO: the exception has to be wrapped into an IOException because
875 			// throwing the ParseException directly would break the API, instead
876 			// we should throw a ConfigInvalidException
877 			throw new IOException(e);
878 		}
879 		prunePacked();
880 		if (repo.getRefDatabase() instanceof RefDirectory) {
881 			// TODO: abstract this more cleanly.
882 			deleteEmptyRefsFolders();
883 		}
884 		deleteOrphans();
885 		deleteTempPacksIdx();
886 
887 		lastPackedRefs = refsBefore;
888 		lastRepackTime = time;
889 		return ret;
890 	}
891 
892 	private static boolean isHead(Ref ref) {
893 		return ref.getName().startsWith(Constants.R_HEADS);
894 	}
895 
896 	private static boolean isTag(Ref ref) {
897 		return ref.getName().startsWith(Constants.R_TAGS);
898 	}
899 
900 	private void deleteEmptyRefsFolders() throws IOException {
901 		Path refs = repo.getDirectory().toPath().resolve(Constants.R_REFS);
902 		// Avoid deleting a folder that was created after the threshold so that concurrent
903 		// operations trying to create a reference are not impacted
904 		Instant threshold = Instant.now().minus(30, ChronoUnit.SECONDS);
905 		try (Stream<Path> entries = Files.list(refs)
906 				.filter(Files::isDirectory)) {
907 			Iterator<Path> iterator = entries.iterator();
908 			while (iterator.hasNext()) {
909 				try (Stream<Path> s = Files.list(iterator.next())) {
910 					s.filter(path -> canBeSafelyDeleted(path, threshold)).forEach(this::deleteDir);
911 				}
912 			}
913 		}
914 	}
915 
916 	private boolean canBeSafelyDeleted(Path path, Instant threshold) {
917 		try {
918 			return Files.getLastModifiedTime(path).toInstant().isBefore(threshold);
919 		}
920 		catch (IOException e) {
921 			LOG.warn(MessageFormat.format(
922 					JGitText.get().cannotAccessLastModifiedForSafeDeletion,
923 					path), e);
924 			return false;
925 		}
926 	}
927 
928 	private void deleteDir(Path dir) {
929 		try (Stream<Path> dirs = Files.walk(dir)) {
930 			dirs.filter(this::isDirectory).sorted(Comparator.reverseOrder())
931 					.forEach(this::delete);
932 		} catch (IOException e) {
933 			LOG.error(e.getMessage(), e);
934 		}
935 	}
936 
937 	private boolean isDirectory(Path p) {
938 		return p.toFile().isDirectory();
939 	}
940 
941 	private void delete(Path d) {
942 		try {
943 			Files.delete(d);
944 		} catch (DirectoryNotEmptyException e) {
945 			// Don't log
946 		} catch (IOException e) {
947 			LOG.error(MessageFormat.format(JGitText.get().cannotDeleteFile, d),
948 					e);
949 		}
950 	}
951 
952 	/**
953 	 * Deletes orphans
954 	 * <p>
955 	 * A file is considered an orphan if it is either a "bitmap" or an index
956 	 * file, and its corresponding pack file is missing in the list.
957 	 * </p>
958 	 */
959 	private void deleteOrphans() {
960 		Path packDir = repo.getObjectDatabase().getPackDirectory().toPath();
961 		List<String> fileNames = null;
962 		try (Stream<Path> files = Files.list(packDir)) {
963 			fileNames = files.map(path -> path.getFileName().toString())
964 					.filter(name -> (name.endsWith(PACK_EXT)
965 							|| name.endsWith(BITMAP_EXT)
966 							|| name.endsWith(INDEX_EXT)
967 							|| name.endsWith(KEEP_EXT)))
968 					// sort files with same base name in the order:
969 					// .pack, .keep, .index, .bitmap to avoid look ahead
970 					.sorted(Collections.reverseOrder())
971 					.collect(Collectors.toList());
972 		} catch (IOException e) {
973 			LOG.error(e.getMessage(), e);
974 			return;
975 		}
976 		if (fileNames == null) {
977 			return;
978 		}
979 
980 		String base = null;
981 		for (String n : fileNames) {
982 			if (n.endsWith(PACK_EXT) || n.endsWith(KEEP_EXT)) {
983 				base = n.substring(0, n.lastIndexOf('.'));
984 			} else {
985 				if (base == null || !n.startsWith(base)) {
986 					try {
987 						Path delete = packDir.resolve(n);
988 						FileUtils.delete(delete.toFile(),
989 								FileUtils.RETRY | FileUtils.SKIP_MISSING);
990 						LOG.warn(JGitText.get().deletedOrphanInPackDir, delete);
991 					} catch (IOException e) {
992 						LOG.error(e.getMessage(), e);
993 					}
994 				}
995 			}
996 		}
997 	}
998 
999 	private void deleteTempPacksIdx() {
1000 		Path packDir = repo.getObjectDatabase().getPackDirectory().toPath();
1001 		Instant threshold = Instant.now().minus(1, ChronoUnit.DAYS);
1002 		if (!Files.exists(packDir)) {
1003 			return;
1004 		}
1005 		try (DirectoryStream<Path> stream =
1006 				Files.newDirectoryStream(packDir, "gc_*_tmp")) { //$NON-NLS-1$
1007 			stream.forEach(t -> {
1008 				try {
1009 					Instant lastModified = Files.getLastModifiedTime(t)
1010 							.toInstant();
1011 					if (lastModified.isBefore(threshold)) {
1012 						Files.deleteIfExists(t);
1013 					}
1014 				} catch (IOException e) {
1015 					LOG.error(e.getMessage(), e);
1016 				}
1017 			});
1018 		} catch (IOException e) {
1019 			LOG.error(e.getMessage(), e);
1020 		}
1021 	}
1022 
1023 	/**
1024 	 * @param ref
1025 	 *            the ref which log should be inspected
1026 	 * @param minTime only reflog entries not older then this time are processed
1027 	 * @return the {@link ObjectId}s contained in the reflog
1028 	 * @throws IOException
1029 	 */
1030 	private Set<ObjectId> listRefLogObjects(Ref ref, long minTime) throws IOException {
1031 		ReflogReader reflogReader = repo.getReflogReader(ref.getName());
1032 		if (reflogReader == null) {
1033 			return Collections.emptySet();
1034 		}
1035 		List<ReflogEntry> rlEntries = reflogReader
1036 				.getReverseEntries();
1037 		if (rlEntries == null || rlEntries.isEmpty())
1038 			return Collections.emptySet();
1039 		Set<ObjectId> ret = new HashSet<>();
1040 		for (ReflogEntry e : rlEntries) {
1041 			if (e.getWho().getWhen().getTime() < minTime)
1042 				break;
1043 			ObjectId newId = e.getNewId();
1044 			if (newId != null && !ObjectId.zeroId().equals(newId))
1045 				ret.add(newId);
1046 			ObjectId oldId = e.getOldId();
1047 			if (oldId != null && !ObjectId.zeroId().equals(oldId))
1048 				ret.add(oldId);
1049 		}
1050 		return ret;
1051 	}
1052 
1053 	/**
1054 	 * Returns a collection of all refs and additional refs.
1055 	 *
1056 	 * Additional refs which don't start with "refs/" are not returned because
1057 	 * they should not save objects from being garbage collected. Examples for
1058 	 * such references are ORIG_HEAD, MERGE_HEAD, FETCH_HEAD and
1059 	 * CHERRY_PICK_HEAD.
1060 	 *
1061 	 * @return a collection of refs pointing to live objects.
1062 	 * @throws IOException
1063 	 */
1064 	private Collection<Ref> getAllRefs() throws IOException {
1065 		RefDatabase refdb = repo.getRefDatabase();
1066 		Collection<Ref> refs = refdb.getRefs();
1067 		List<Ref> addl = refdb.getAdditionalRefs();
1068 		if (!addl.isEmpty()) {
1069 			List<Ref> all = new ArrayList<>(refs.size() + addl.size());
1070 			all.addAll(refs);
1071 			// add additional refs which start with refs/
1072 			for (Ref r : addl) {
1073 				checkCancelled();
1074 				if (r.getName().startsWith(Constants.R_REFS)) {
1075 					all.add(r);
1076 				}
1077 			}
1078 			return all;
1079 		}
1080 		return refs;
1081 	}
1082 
1083 	/**
1084 	 * Return a list of those objects in the index which differ from whats in
1085 	 * HEAD
1086 	 *
1087 	 * @return a set of ObjectIds of changed objects in the index
1088 	 * @throws IOException
1089 	 * @throws CorruptObjectException
1090 	 * @throws NoWorkTreeException
1091 	 */
1092 	private Set<ObjectId> listNonHEADIndexObjects()
1093 			throws CorruptObjectException, IOException {
1094 		if (repo.isBare()) {
1095 			return Collections.emptySet();
1096 		}
1097 		try (TreeWalkTreeWalk.html#TreeWalk">TreeWalk treeWalk = new TreeWalk(repo)) {
1098 			treeWalk.addTree(new DirCacheIterator(repo.readDirCache()));
1099 			ObjectId headID = repo.resolve(Constants.HEAD);
1100 			if (headID != null) {
1101 				try (RevWalk/RevWalk.html#RevWalk">RevWalk revWalk = new RevWalk(repo)) {
1102 					treeWalk.addTree(revWalk.parseTree(headID));
1103 				}
1104 			}
1105 
1106 			treeWalk.setFilter(TreeFilter.ANY_DIFF);
1107 			treeWalk.setRecursive(true);
1108 			Set<ObjectId> ret = new HashSet<>();
1109 
1110 			while (treeWalk.next()) {
1111 				checkCancelled();
1112 				ObjectId objectId = treeWalk.getObjectId(0);
1113 				switch (treeWalk.getRawMode(0) & FileMode.TYPE_MASK) {
1114 				case FileMode.TYPE_MISSING:
1115 				case FileMode.TYPE_GITLINK:
1116 					continue;
1117 				case FileMode.TYPE_TREE:
1118 				case FileMode.TYPE_FILE:
1119 				case FileMode.TYPE_SYMLINK:
1120 					ret.add(objectId);
1121 					continue;
1122 				default:
1123 					throw new IOException(MessageFormat.format(
1124 							JGitText.get().corruptObjectInvalidMode3,
1125 							String.format("%o", //$NON-NLS-1$
1126 									Integer.valueOf(treeWalk.getRawMode(0))),
1127 							(objectId == null) ? "null" : objectId.name(), //$NON-NLS-1$
1128 							treeWalk.getPathString(), //
1129 							repo.getIndexFile()));
1130 				}
1131 			}
1132 			return ret;
1133 		}
1134 	}
1135 
1136 	private PackFile writePack(@NonNull Set<? extends ObjectId> want,
1137 			@NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
1138 			Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
1139 			throws IOException {
1140 		checkCancelled();
1141 		File tmpPack = null;
1142 		Map<PackExt, File> tmpExts = new TreeMap<>((o1, o2) -> {
1143 			// INDEX entries must be returned last, so the pack
1144 			// scanner does pick up the new pack until all the
1145 			// PackExt entries have been written.
1146 			if (o1 == o2) {
1147 				return 0;
1148 			}
1149 			if (o1 == PackExt.INDEX) {
1150 				return 1;
1151 			}
1152 			if (o2 == PackExt.INDEX) {
1153 				return -1;
1154 			}
1155 			return Integer.signum(o1.hashCode() - o2.hashCode());
1156 		});
1157 		try (PackWriternal/storage/pack/PackWriter.html#PackWriter">PackWriter pw = new PackWriter(
1158 				pconfig,
1159 				repo.newObjectReader())) {
1160 			// prepare the PackWriter
1161 			pw.setDeltaBaseAsOffset(true);
1162 			pw.setReuseDeltaCommits(false);
1163 			if (tagTargets != null) {
1164 				pw.setTagTargets(tagTargets);
1165 			}
1166 			if (excludeObjects != null)
1167 				for (ObjectIdSet idx : excludeObjects)
1168 					pw.excludeObjects(idx);
1169 			pw.preparePack(pm, want, have, PackWriter.NONE, tags);
1170 			if (pw.getObjectCount() == 0)
1171 				return null;
1172 			checkCancelled();
1173 
1174 			// create temporary files
1175 			String id = pw.computeName().getName();
1176 			File packdir = repo.getObjectDatabase().getPackDirectory();
1177 			packdir.mkdirs();
1178 			tmpPack = File.createTempFile("gc_", ".pack_tmp", packdir); //$NON-NLS-1$ //$NON-NLS-2$
1179 			final String tmpBase = tmpPack.getName()
1180 					.substring(0, tmpPack.getName().lastIndexOf('.'));
1181 			File tmpIdx = new File(packdir, tmpBase + ".idx_tmp"); //$NON-NLS-1$
1182 			tmpExts.put(INDEX, tmpIdx);
1183 
1184 			if (!tmpIdx.createNewFile())
1185 				throw new IOException(MessageFormat.format(
1186 						JGitText.get().cannotCreateIndexfile, tmpIdx.getPath()));
1187 
1188 			// write the packfile
1189 			try (FileOutputStream fos = new FileOutputStream(tmpPack);
1190 					FileChannel channel = fos.getChannel();
1191 					OutputStream channelStream = Channels
1192 							.newOutputStream(channel)) {
1193 				pw.writePack(pm, pm, channelStream);
1194 				channel.force(true);
1195 			}
1196 
1197 			// write the packindex
1198 			try (FileOutputStream fos = new FileOutputStream(tmpIdx);
1199 					FileChannel idxChannel = fos.getChannel();
1200 					OutputStream idxStream = Channels
1201 							.newOutputStream(idxChannel)) {
1202 				pw.writeIndex(idxStream);
1203 				idxChannel.force(true);
1204 			}
1205 
1206 			if (pw.prepareBitmapIndex(pm)) {
1207 				File tmpBitmapIdx = new File(packdir, tmpBase + ".bitmap_tmp"); //$NON-NLS-1$
1208 				tmpExts.put(BITMAP_INDEX, tmpBitmapIdx);
1209 
1210 				if (!tmpBitmapIdx.createNewFile())
1211 					throw new IOException(MessageFormat.format(
1212 							JGitText.get().cannotCreateIndexfile,
1213 							tmpBitmapIdx.getPath()));
1214 
1215 				try (FileOutputStream fos = new FileOutputStream(tmpBitmapIdx);
1216 						FileChannel idxChannel = fos.getChannel();
1217 						OutputStream idxStream = Channels
1218 								.newOutputStream(idxChannel)) {
1219 					pw.writeBitmapIndex(idxStream);
1220 					idxChannel.force(true);
1221 				}
1222 			}
1223 
1224 			// rename the temporary files to real files
1225 			File realPack = nameFor(id, ".pack"); //$NON-NLS-1$
1226 
1227 			repo.getObjectDatabase().closeAllPackHandles(realPack);
1228 			tmpPack.setReadOnly();
1229 
1230 			FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE);
1231 			for (Map.Entry<PackExt, File> tmpEntry : tmpExts.entrySet()) {
1232 				File tmpExt = tmpEntry.getValue();
1233 				tmpExt.setReadOnly();
1234 
1235 				File realExt = nameFor(id,
1236 						"." + tmpEntry.getKey().getExtension()); //$NON-NLS-1$
1237 				try {
1238 					FileUtils.rename(tmpExt, realExt,
1239 							StandardCopyOption.ATOMIC_MOVE);
1240 				} catch (IOException e) {
1241 					File newExt = new File(realExt.getParentFile(),
1242 							realExt.getName() + ".new"); //$NON-NLS-1$
1243 					try {
1244 						FileUtils.rename(tmpExt, newExt,
1245 								StandardCopyOption.ATOMIC_MOVE);
1246 					} catch (IOException e2) {
1247 						newExt = tmpExt;
1248 						e = e2;
1249 					}
1250 					throw new IOException(MessageFormat.format(
1251 							JGitText.get().panicCantRenameIndexFile, newExt,
1252 							realExt), e);
1253 				}
1254 			}
1255 			boolean interrupted = false;
1256 			try {
1257 				FileSnapshot snapshot = FileSnapshot.save(realPack);
1258 				if (pconfig.doWaitPreventRacyPack(snapshot.size())) {
1259 					snapshot.waitUntilNotRacy();
1260 				}
1261 			} catch (InterruptedException e) {
1262 				interrupted = true;
1263 			}
1264 			try {
1265 				return repo.getObjectDatabase().openPack(realPack);
1266 			} finally {
1267 				if (interrupted) {
1268 					// Re-set interrupted flag
1269 					Thread.currentThread().interrupt();
1270 				}
1271 			}
1272 		} finally {
1273 			if (tmpPack != null && tmpPack.exists())
1274 				tmpPack.delete();
1275 			for (File tmpExt : tmpExts.values()) {
1276 				if (tmpExt.exists())
1277 					tmpExt.delete();
1278 			}
1279 		}
1280 	}
1281 
1282 	private File nameFor(String name, String ext) {
1283 		File packdir = repo.getObjectDatabase().getPackDirectory();
1284 		return new File(packdir, "pack-" + name + ext); //$NON-NLS-1$
1285 	}
1286 
1287 	private void checkCancelled() throws CancelledException {
1288 		if (pm.isCancelled() || Thread.currentThread().isInterrupted()) {
1289 			throw new CancelledException(JGitText.get().operationCanceled);
1290 		}
1291 	}
1292 
1293 	/**
1294 	 * A class holding statistical data for a FileRepository regarding how many
1295 	 * objects are stored as loose or packed objects
1296 	 */
1297 	public static class RepoStatistics {
1298 		/**
1299 		 * The number of objects stored in pack files. If the same object is
1300 		 * stored in multiple pack files then it is counted as often as it
1301 		 * occurs in pack files.
1302 		 */
1303 		public long numberOfPackedObjects;
1304 
1305 		/**
1306 		 * The number of pack files
1307 		 */
1308 		public long numberOfPackFiles;
1309 
1310 		/**
1311 		 * The number of objects stored as loose objects.
1312 		 */
1313 		public long numberOfLooseObjects;
1314 
1315 		/**
1316 		 * The sum of the sizes of all files used to persist loose objects.
1317 		 */
1318 		public long sizeOfLooseObjects;
1319 
1320 		/**
1321 		 * The sum of the sizes of all pack files.
1322 		 */
1323 		public long sizeOfPackedObjects;
1324 
1325 		/**
1326 		 * The number of loose refs.
1327 		 */
1328 		public long numberOfLooseRefs;
1329 
1330 		/**
1331 		 * The number of refs stored in pack files.
1332 		 */
1333 		public long numberOfPackedRefs;
1334 
1335 		/**
1336 		 * The number of bitmaps in the bitmap indices.
1337 		 */
1338 		public long numberOfBitmaps;
1339 
1340 		@Override
1341 		public String toString() {
1342 			final StringBuilder b = new StringBuilder();
1343 			b.append("numberOfPackedObjects=").append(numberOfPackedObjects); //$NON-NLS-1$
1344 			b.append(", numberOfPackFiles=").append(numberOfPackFiles); //$NON-NLS-1$
1345 			b.append(", numberOfLooseObjects=").append(numberOfLooseObjects); //$NON-NLS-1$
1346 			b.append(", numberOfLooseRefs=").append(numberOfLooseRefs); //$NON-NLS-1$
1347 			b.append(", numberOfPackedRefs=").append(numberOfPackedRefs); //$NON-NLS-1$
1348 			b.append(", sizeOfLooseObjects=").append(sizeOfLooseObjects); //$NON-NLS-1$
1349 			b.append(", sizeOfPackedObjects=").append(sizeOfPackedObjects); //$NON-NLS-1$
1350 			b.append(", numberOfBitmaps=").append(numberOfBitmaps); //$NON-NLS-1$
1351 			return b.toString();
1352 		}
1353 	}
1354 
1355 	/**
1356 	 * Returns information about objects and pack files for a FileRepository.
1357 	 *
1358 	 * @return information about objects and pack files for a FileRepository
1359 	 * @throws java.io.IOException
1360 	 */
1361 	public RepoStatistics getStatistics() throws IOException {
1362 		RepoStatistics ret = new RepoStatistics();
1363 		Collection<PackFile> packs = repo.getObjectDatabase().getPacks();
1364 		for (PackFile f : packs) {
1365 			ret.numberOfPackedObjects += f.getIndex().getObjectCount();
1366 			ret.numberOfPackFiles++;
1367 			ret.sizeOfPackedObjects += f.getPackFile().length();
1368 			if (f.getBitmapIndex() != null)
1369 				ret.numberOfBitmaps += f.getBitmapIndex().getBitmapCount();
1370 		}
1371 		File objDir = repo.getObjectsDirectory();
1372 		String[] fanout = objDir.list();
1373 		if (fanout != null && fanout.length > 0) {
1374 			for (String d : fanout) {
1375 				if (d.length() != 2)
1376 					continue;
1377 				File[] entries = new File(objDir, d).listFiles();
1378 				if (entries == null)
1379 					continue;
1380 				for (File f : entries) {
1381 					if (f.getName().length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
1382 						continue;
1383 					ret.numberOfLooseObjects++;
1384 					ret.sizeOfLooseObjects += f.length();
1385 				}
1386 			}
1387 		}
1388 
1389 		RefDatabase refDb = repo.getRefDatabase();
1390 		for (Ref r : refDb.getRefs()) {
1391 			Storage storage = r.getStorage();
1392 			if (storage == Storage.LOOSE || storage == Storage.LOOSE_PACKED)
1393 				ret.numberOfLooseRefs++;
1394 			if (storage == Storage.PACKED || storage == Storage.LOOSE_PACKED)
1395 				ret.numberOfPackedRefs++;
1396 		}
1397 
1398 		return ret;
1399 	}
1400 
1401 	/**
1402 	 * Set the progress monitor used for garbage collection methods.
1403 	 *
1404 	 * @param pm a {@link org.eclipse.jgit.lib.ProgressMonitor} object.
1405 	 * @return this
1406 	 */
1407 	public GC setProgressMonitor(ProgressMonitor pm) {
1408 		this.pm = (pm == null) ? NullProgressMonitor.INSTANCE : pm;
1409 		return this;
1410 	}
1411 
1412 	/**
1413 	 * During gc() or prune() each unreferenced, loose object which has been
1414 	 * created or modified in the last <code>expireAgeMillis</code> milliseconds
1415 	 * will not be pruned. Only older objects may be pruned. If set to 0 then
1416 	 * every object is a candidate for pruning.
1417 	 *
1418 	 * @param expireAgeMillis
1419 	 *            minimal age of objects to be pruned in milliseconds.
1420 	 */
1421 	public void setExpireAgeMillis(long expireAgeMillis) {
1422 		this.expireAgeMillis = expireAgeMillis;
1423 		expire = null;
1424 	}
1425 
1426 	/**
1427 	 * During gc() or prune() packfiles which are created or modified in the
1428 	 * last <code>packExpireAgeMillis</code> milliseconds will not be deleted.
1429 	 * Only older packfiles may be deleted. If set to 0 then every packfile is a
1430 	 * candidate for deletion.
1431 	 *
1432 	 * @param packExpireAgeMillis
1433 	 *            minimal age of packfiles to be deleted in milliseconds.
1434 	 */
1435 	public void setPackExpireAgeMillis(long packExpireAgeMillis) {
1436 		this.packExpireAgeMillis = packExpireAgeMillis;
1437 		expire = null;
1438 	}
1439 
1440 	/**
1441 	 * Set the PackConfig used when (re-)writing packfiles. This allows to
1442 	 * influence how packs are written and to implement something similar to
1443 	 * "git gc --aggressive"
1444 	 *
1445 	 * @param pconfig
1446 	 *            the {@link org.eclipse.jgit.storage.pack.PackConfig} used when
1447 	 *            writing packs
1448 	 */
1449 	public void setPackConfig(@NonNull PackConfig pconfig) {
1450 		this.pconfig = pconfig;
1451 	}
1452 
1453 	/**
1454 	 * During gc() or prune() each unreferenced, loose object which has been
1455 	 * created or modified after or at <code>expire</code> will not be pruned.
1456 	 * Only older objects may be pruned. If set to null then every object is a
1457 	 * candidate for pruning.
1458 	 *
1459 	 * @param expire
1460 	 *            instant in time which defines object expiration
1461 	 *            objects with modification time before this instant are expired
1462 	 *            objects with modification time newer or equal to this instant
1463 	 *            are not expired
1464 	 */
1465 	public void setExpire(Date expire) {
1466 		this.expire = expire;
1467 		expireAgeMillis = -1;
1468 	}
1469 
1470 	/**
1471 	 * During gc() or prune() packfiles which are created or modified after or
1472 	 * at <code>packExpire</code> will not be deleted. Only older packfiles may
1473 	 * be deleted. If set to null then every packfile is a candidate for
1474 	 * deletion.
1475 	 *
1476 	 * @param packExpire
1477 	 *            instant in time which defines packfile expiration
1478 	 */
1479 	public void setPackExpire(Date packExpire) {
1480 		this.packExpire = packExpire;
1481 		packExpireAgeMillis = -1;
1482 	}
1483 
1484 	/**
1485 	 * Set the {@code gc --auto} option.
1486 	 *
1487 	 * With this option, gc checks whether any housekeeping is required; if not,
1488 	 * it exits without performing any work. Some JGit commands run
1489 	 * {@code gc --auto} after performing operations that could create many
1490 	 * loose objects.
1491 	 * <p>
1492 	 * Housekeeping is required if there are too many loose objects or too many
1493 	 * packs in the repository. If the number of loose objects exceeds the value
1494 	 * of the gc.auto option JGit GC consolidates all existing packs into a
1495 	 * single pack (equivalent to {@code -A} option), whereas git-core would
1496 	 * combine all loose objects into a single pack using {@code repack -d -l}.
1497 	 * Setting the value of {@code gc.auto} to 0 disables automatic packing of
1498 	 * loose objects.
1499 	 * <p>
1500 	 * If the number of packs exceeds the value of {@code gc.autoPackLimit},
1501 	 * then existing packs (except those marked with a .keep file) are
1502 	 * consolidated into a single pack by using the {@code -A} option of repack.
1503 	 * Setting {@code gc.autoPackLimit} to 0 disables automatic consolidation of
1504 	 * packs.
1505 	 * <p>
1506 	 * Like git the following jgit commands run auto gc:
1507 	 * <ul>
1508 	 * <li>fetch</li>
1509 	 * <li>merge</li>
1510 	 * <li>rebase</li>
1511 	 * <li>receive-pack</li>
1512 	 * </ul>
1513 	 * The auto gc for receive-pack can be suppressed by setting the config
1514 	 * option {@code receive.autogc = false}
1515 	 *
1516 	 * @param auto
1517 	 *            defines whether gc should do automatic housekeeping
1518 	 */
1519 	public void setAuto(boolean auto) {
1520 		this.automatic = auto;
1521 	}
1522 
1523 	/**
1524 	 * @param background
1525 	 *            whether to run the gc in a background thread.
1526 	 */
1527 	void setBackground(boolean background) {
1528 		this.background = background;
1529 	}
1530 
1531 	private boolean needGc() {
1532 		if (tooManyPacks()) {
1533 			addRepackAllOption();
1534 		} else {
1535 			return tooManyLooseObjects();
1536 		}
1537 		// TODO run pre-auto-gc hook, if it fails return false
1538 		return true;
1539 	}
1540 
1541 	private void addRepackAllOption() {
1542 		// TODO: if JGit GC is enhanced to support repack's option -l this
1543 		// method needs to be implemented
1544 	}
1545 
1546 	/**
1547 	 * @return {@code true} if number of packs > gc.autopacklimit (default 50)
1548 	 */
1549 	boolean tooManyPacks() {
1550 		int autopacklimit = repo.getConfig().getInt(
1551 				ConfigConstants.CONFIG_GC_SECTION,
1552 				ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT,
1553 				DEFAULT_AUTOPACKLIMIT);
1554 		if (autopacklimit <= 0) {
1555 			return false;
1556 		}
1557 		// JGit always creates two packfiles, one for the objects reachable from
1558 		// branches, and another one for the rest
1559 		return repo.getObjectDatabase().getPacks().size() > (autopacklimit + 1);
1560 	}
1561 
1562 	/**
1563 	 * Quickly estimate number of loose objects, SHA1 is distributed evenly so
1564 	 * counting objects in one directory (bucket 17) is sufficient
1565 	 *
1566 	 * @return {@code true} if number of loose objects > gc.auto (default 6700)
1567 	 */
1568 	boolean tooManyLooseObjects() {
1569 		int auto = getLooseObjectLimit();
1570 		if (auto <= 0) {
1571 			return false;
1572 		}
1573 		int n = 0;
1574 		int threshold = (auto + 255) / 256;
1575 		Path dir = repo.getObjectsDirectory().toPath().resolve("17"); //$NON-NLS-1$
1576 		if (!dir.toFile().exists()) {
1577 			return false;
1578 		}
1579 		try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, file -> {
1580 					Path fileName = file.getFileName();
1581 					return file.toFile().isFile() && fileName != null
1582 							&& PATTERN_LOOSE_OBJECT.matcher(fileName.toString())
1583 									.matches();
1584 				})) {
1585 			for (Iterator<Path> iter = stream.iterator(); iter.hasNext(); iter
1586 					.next()) {
1587 				if (++n > threshold) {
1588 					return true;
1589 				}
1590 			}
1591 		} catch (IOException e) {
1592 			LOG.error(e.getMessage(), e);
1593 		}
1594 		return false;
1595 	}
1596 
1597 	private int getLooseObjectLimit() {
1598 		return repo.getConfig().getInt(ConfigConstants.CONFIG_GC_SECTION,
1599 				ConfigConstants.CONFIG_KEY_AUTO, DEFAULT_AUTOLIMIT);
1600 	}
1601 }