View Javadoc
1   /*
2    * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
3    * Copyright (C) 2011, Shawn O. Pearce <spearce@spearce.org>
4    * and other copyright owners as documented in the project's IP log.
5    *
6    * This program and the accompanying materials are made available
7    * under the terms of the Eclipse Distribution License v1.0 which
8    * accompanies this distribution, is reproduced below, and is
9    * available at http://www.eclipse.org/org/documents/edl-v10.php
10   *
11   * All rights reserved.
12   *
13   * Redistribution and use in source and binary forms, with or
14   * without modification, are permitted provided that the following
15   * conditions are met:
16   *
17   * - Redistributions of source code must retain the above copyright
18   *   notice, this list of conditions and the following disclaimer.
19   *
20   * - Redistributions in binary form must reproduce the above
21   *   copyright notice, this list of conditions and the following
22   *   disclaimer in the documentation and/or other materials provided
23   *   with the distribution.
24   *
25   * - Neither the name of the Eclipse Foundation, Inc. nor the
26   *   names of its contributors may be used to endorse or promote
27   *   products derived from this software without specific prior
28   *   written permission.
29   *
30   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
31   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
32   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
33   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
34   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
37   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
38   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
39   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
40   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
41   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
42   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43   */
44  package org.eclipse.jgit.internal.storage.file;
45  
46  import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
47  import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
48  
49  import java.io.File;
50  import java.io.FileOutputStream;
51  import java.io.IOException;
52  import java.io.OutputStream;
53  import java.io.PrintWriter;
54  import java.io.StringWriter;
55  import java.nio.channels.Channels;
56  import java.nio.channels.FileChannel;
57  import java.nio.file.DirectoryNotEmptyException;
58  import java.nio.file.DirectoryStream;
59  import java.nio.file.Files;
60  import java.nio.file.Path;
61  import java.nio.file.StandardCopyOption;
62  import java.text.MessageFormat;
63  import java.text.ParseException;
64  import java.time.Instant;
65  import java.time.temporal.ChronoUnit;
66  import java.util.ArrayList;
67  import java.util.Collection;
68  import java.util.Collections;
69  import java.util.Comparator;
70  import java.util.Date;
71  import java.util.HashMap;
72  import java.util.HashSet;
73  import java.util.Iterator;
74  import java.util.LinkedList;
75  import java.util.List;
76  import java.util.Map;
77  import java.util.Objects;
78  import java.util.Set;
79  import java.util.TreeMap;
80  import java.util.concurrent.Callable;
81  import java.util.concurrent.ExecutorService;
82  import java.util.regex.Pattern;
83  import java.util.stream.Collectors;
84  import java.util.stream.Stream;
85  
86  import org.eclipse.jgit.annotations.NonNull;
87  import org.eclipse.jgit.dircache.DirCacheIterator;
88  import org.eclipse.jgit.errors.CancelledException;
89  import org.eclipse.jgit.errors.CorruptObjectException;
90  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
91  import org.eclipse.jgit.errors.MissingObjectException;
92  import org.eclipse.jgit.errors.NoWorkTreeException;
93  import org.eclipse.jgit.internal.JGitText;
94  import org.eclipse.jgit.internal.storage.pack.PackExt;
95  import org.eclipse.jgit.internal.storage.pack.PackWriter;
96  import org.eclipse.jgit.internal.storage.reftree.RefTreeNames;
97  import org.eclipse.jgit.lib.ConfigConstants;
98  import org.eclipse.jgit.lib.Constants;
99  import org.eclipse.jgit.lib.FileMode;
100 import org.eclipse.jgit.lib.NullProgressMonitor;
101 import org.eclipse.jgit.lib.ObjectId;
102 import org.eclipse.jgit.lib.ObjectIdSet;
103 import org.eclipse.jgit.lib.ObjectLoader;
104 import org.eclipse.jgit.lib.ObjectReader;
105 import org.eclipse.jgit.lib.ProgressMonitor;
106 import org.eclipse.jgit.lib.Ref;
107 import org.eclipse.jgit.lib.Ref.Storage;
108 import org.eclipse.jgit.lib.RefDatabase;
109 import org.eclipse.jgit.lib.ReflogEntry;
110 import org.eclipse.jgit.lib.ReflogReader;
111 import org.eclipse.jgit.lib.internal.WorkQueue;
112 import org.eclipse.jgit.revwalk.ObjectWalk;
113 import org.eclipse.jgit.revwalk.RevObject;
114 import org.eclipse.jgit.revwalk.RevWalk;
115 import org.eclipse.jgit.storage.pack.PackConfig;
116 import org.eclipse.jgit.treewalk.TreeWalk;
117 import org.eclipse.jgit.treewalk.filter.TreeFilter;
118 import org.eclipse.jgit.util.FileUtils;
119 import org.eclipse.jgit.util.GitDateParser;
120 import org.eclipse.jgit.util.SystemReader;
121 import org.slf4j.Logger;
122 import org.slf4j.LoggerFactory;
123 
124 /**
125  * A garbage collector for git
126  * {@link org.eclipse.jgit.internal.storage.file.FileRepository}. Instances of
127  * this class are not thread-safe. Don't use the same instance from multiple
128  * threads.
129  *
130  * This class started as a copy of DfsGarbageCollector from Shawn O. Pearce
131  * adapted to FileRepositories.
132  */
133 public class GC {
134 	private final static Logger LOG = LoggerFactory
135 			.getLogger(GC.class);
136 
137 	private static final String PRUNE_EXPIRE_DEFAULT = "2.weeks.ago"; //$NON-NLS-1$
138 
139 	private static final String PRUNE_PACK_EXPIRE_DEFAULT = "1.hour.ago"; //$NON-NLS-1$
140 
141 	private static final Pattern PATTERN_LOOSE_OBJECT = Pattern
142 			.compile("[0-9a-fA-F]{38}"); //$NON-NLS-1$
143 
144 	private static final String PACK_EXT = "." + PackExt.PACK.getExtension();//$NON-NLS-1$
145 
146 	private static final String BITMAP_EXT = "." //$NON-NLS-1$
147 			+ PackExt.BITMAP_INDEX.getExtension();
148 
149 	private static final String INDEX_EXT = "." + PackExt.INDEX.getExtension(); //$NON-NLS-1$
150 
151 	private static final int DEFAULT_AUTOPACKLIMIT = 50;
152 
153 	private static final int DEFAULT_AUTOLIMIT = 6700;
154 
155 	private static volatile ExecutorService executor;
156 
157 	/**
158 	 * Set the executor for running auto-gc in the background. If no executor is
159 	 * set JGit's own WorkQueue will be used.
160 	 *
161 	 * @param e
162 	 *            the executor to be used for running auto-gc
163 	 * @since 4.8
164 	 */
165 	public static void setExecutor(ExecutorService e) {
166 		executor = e;
167 	}
168 
169 	private final FileRepository repo;
170 
171 	private ProgressMonitor pm;
172 
173 	private long expireAgeMillis = -1;
174 
175 	private Date expire;
176 
177 	private long packExpireAgeMillis = -1;
178 
179 	private Date packExpire;
180 
181 	private PackConfig pconfig = null;
182 
183 	/**
184 	 * the refs which existed during the last call to {@link #repack()}. This is
185 	 * needed during {@link #prune(Set)} where we can optimize by looking at the
186 	 * difference between the current refs and the refs which existed during
187 	 * last {@link #repack()}.
188 	 */
189 	private Collection<Ref> lastPackedRefs;
190 
191 	/**
192 	 * Holds the starting time of the last repack() execution. This is needed in
193 	 * prune() to inspect only those reflog entries which have been added since
194 	 * last repack().
195 	 */
196 	private long lastRepackTime;
197 
198 	/**
199 	 * Whether gc should do automatic housekeeping
200 	 */
201 	private boolean automatic;
202 
203 	/**
204 	 * Whether to run gc in a background thread
205 	 */
206 	private boolean background;
207 
208 	/**
209 	 * Creates a new garbage collector with default values. An expirationTime of
210 	 * two weeks and <code>null</code> as progress monitor will be used.
211 	 *
212 	 * @param repo
213 	 *            the repo to work on
214 	 */
215 	public GC(FileRepository repo) {
216 		this.repo = repo;
217 		this.pm = NullProgressMonitor.INSTANCE;
218 	}
219 
220 	/**
221 	 * Runs a garbage collector on a
222 	 * {@link org.eclipse.jgit.internal.storage.file.FileRepository}. It will
223 	 * <ul>
224 	 * <li>pack loose references into packed-refs</li>
225 	 * <li>repack all reachable objects into new pack files and delete the old
226 	 * pack files</li>
227 	 * <li>prune all loose objects which are now reachable by packs</li>
228 	 * </ul>
229 	 *
230 	 * If {@link #setAuto(boolean)} was set to {@code true} {@code gc} will
231 	 * first check whether any housekeeping is required; if not, it exits
232 	 * without performing any work.
233 	 *
234 	 * If {@link #setBackground(boolean)} was set to {@code true}
235 	 * {@code collectGarbage} will start the gc in the background, and then
236 	 * return immediately. In this case, errors will not be reported except in
237 	 * gc.log.
238 	 *
239 	 * @return the collection of
240 	 *         {@link org.eclipse.jgit.internal.storage.file.PackFile}'s which
241 	 *         are newly created
242 	 * @throws java.io.IOException
243 	 * @throws java.text.ParseException
244 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
245 	 *             parsed
246 	 */
247 	// TODO(ms): in 5.0 change signature and return Future<Collection<PackFile>>
248 	@SuppressWarnings("FutureReturnValueIgnored")
249 	public Collection<PackFile> gc() throws IOException, ParseException {
250 		if (!background) {
251 			return doGc();
252 		}
253 		final GcLog gcLog = new GcLog(repo);
254 		if (!gcLog.lock()) {
255 			// there is already a background gc running
256 			return Collections.emptyList();
257 		}
258 
259 		Callable<Collection<PackFile>> gcTask = () -> {
260 			try {
261 				Collection<PackFile> newPacks = doGc();
262 				if (automatic && tooManyLooseObjects()) {
263 					String message = JGitText.get().gcTooManyUnpruned;
264 					gcLog.write(message);
265 					gcLog.commit();
266 				}
267 				return newPacks;
268 			} catch (IOException | ParseException e) {
269 				try {
270 					gcLog.write(e.getMessage());
271 					StringWriter sw = new StringWriter();
272 					e.printStackTrace(new PrintWriter(sw));
273 					gcLog.write(sw.toString());
274 					gcLog.commit();
275 				} catch (IOException e2) {
276 					e2.addSuppressed(e);
277 					LOG.error(e2.getMessage(), e2);
278 				}
279 			} finally {
280 				gcLog.unlock();
281 			}
282 			return Collections.emptyList();
283 		};
284 		// TODO(ms): in 5.0 change signature and return the Future
285 		executor().submit(gcTask);
286 		return Collections.emptyList();
287 	}
288 
289 	private ExecutorService executor() {
290 		return (executor != null) ? executor : WorkQueue.getExecutor();
291 	}
292 
293 	private Collection<PackFile> doGc() throws IOException, ParseException {
294 		if (automatic && !needGc()) {
295 			return Collections.emptyList();
296 		}
297 		pm.start(6 /* tasks */);
298 		packRefs();
299 		// TODO: implement reflog_expire(pm, repo);
300 		Collection<PackFile> newPacks = repack();
301 		prune(Collections.emptySet());
302 		// TODO: implement rerere_gc(pm);
303 		return newPacks;
304 	}
305 
306 	/**
307 	 * Loosen objects in a pack file which are not also in the newly-created
308 	 * pack files.
309 	 *
310 	 * @param inserter
311 	 * @param reader
312 	 * @param pack
313 	 * @param existing
314 	 * @throws IOException
315 	 */
316 	private void loosen(ObjectDirectoryInserter inserter, ObjectReader reader, PackFile pack, HashSet<ObjectId> existing)
317 			throws IOException {
318 		for (PackIndex.MutableEntry entry : pack) {
319 			ObjectId oid = entry.toObjectId();
320 			if (existing.contains(oid)) {
321 				continue;
322 			}
323 			existing.add(oid);
324 			ObjectLoader loader = reader.open(oid);
325 			inserter.insert(loader.getType(),
326 					loader.getSize(),
327 					loader.openStream(),
328 					true /* create this object even though it's a duplicate */);
329 		}
330 	}
331 
332 	/**
333 	 * Delete old pack files. What is 'old' is defined by specifying a set of
334 	 * old pack files and a set of new pack files. Each pack file contained in
335 	 * old pack files but not contained in new pack files will be deleted. If
336 	 * preserveOldPacks is set, keep a copy of the pack file in the preserve
337 	 * directory. If an expirationDate is set then pack files which are younger
338 	 * than the expirationDate will not be deleted nor preserved.
339 	 * <p>
340 	 * If we're not immediately expiring loose objects, loosen any objects
341 	 * in the old pack files which aren't in the new pack files.
342 	 *
343 	 * @param oldPacks
344 	 * @param newPacks
345 	 * @throws ParseException
346 	 * @throws IOException
347 	 */
348 	private void deleteOldPacks(Collection<PackFile> oldPacks,
349 			Collection<PackFile> newPacks) throws ParseException, IOException {
350 		HashSet<ObjectId> ids = new HashSet<>();
351 		for (PackFile pack : newPacks) {
352 			for (PackIndex.MutableEntry entry : pack) {
353 				ids.add(entry.toObjectId());
354 			}
355 		}
356 		ObjectReader reader = repo.newObjectReader();
357 		ObjectDirectory dir = repo.getObjectDatabase();
358 		ObjectDirectoryInserter inserter = dir.newInserter();
359 		boolean shouldLoosen = !"now".equals(getPruneExpireStr()) && //$NON-NLS-1$
360 			getExpireDate() < Long.MAX_VALUE;
361 
362 		prunePreserved();
363 		long packExpireDate = getPackExpireDate();
364 		oldPackLoop: for (PackFile oldPack : oldPacks) {
365 			checkCancelled();
366 			String oldName = oldPack.getPackName();
367 			// check whether an old pack file is also among the list of new
368 			// pack files. Then we must not delete it.
369 			for (PackFile newPack : newPacks)
370 				if (oldName.equals(newPack.getPackName()))
371 					continue oldPackLoop;
372 
373 			if (!oldPack.shouldBeKept()
374 					&& repo.getFS().lastModified(
375 							oldPack.getPackFile()) < packExpireDate) {
376 				oldPack.close();
377 				if (shouldLoosen) {
378 					loosen(inserter, reader, oldPack, ids);
379 				}
380 				prunePack(oldName);
381 			}
382 		}
383 
384 		// close the complete object database. That's my only chance to force
385 		// rescanning and to detect that certain pack files are now deleted.
386 		repo.getObjectDatabase().close();
387 	}
388 
389 	/**
390 	 * Deletes old pack file, unless 'preserve-oldpacks' is set, in which case it
391 	 * moves the pack file to the preserved directory
392 	 *
393 	 * @param packFile
394 	 * @param packName
395 	 * @param ext
396 	 * @param deleteOptions
397 	 * @throws IOException
398 	 */
399 	private void removeOldPack(File packFile, String packName, PackExt ext,
400 			int deleteOptions) throws IOException {
401 		if (pconfig != null && pconfig.isPreserveOldPacks()) {
402 			File oldPackDir = repo.getObjectDatabase().getPreservedDirectory();
403 			FileUtils.mkdir(oldPackDir, true);
404 
405 			String oldPackName = "pack-" + packName + ".old-" + ext.getExtension();  //$NON-NLS-1$ //$NON-NLS-2$
406 			File oldPackFile = new File(oldPackDir, oldPackName);
407 			FileUtils.rename(packFile, oldPackFile);
408 		} else {
409 			FileUtils.delete(packFile, deleteOptions);
410 		}
411 	}
412 
413 	/**
414 	 * Delete the preserved directory including all pack files within
415 	 */
416 	private void prunePreserved() {
417 		if (pconfig != null && pconfig.isPrunePreserved()) {
418 			try {
419 				FileUtils.delete(repo.getObjectDatabase().getPreservedDirectory(),
420 						FileUtils.RECURSIVE | FileUtils.RETRY | FileUtils.SKIP_MISSING);
421 			} catch (IOException e) {
422 				// Deletion of the preserved pack files failed. Silently return.
423 			}
424 		}
425 	}
426 
427 	/**
428 	 * Delete files associated with a single pack file. First try to delete the
429 	 * ".pack" file because on some platforms the ".pack" file may be locked and
430 	 * can't be deleted. In such a case it is better to detect this early and
431 	 * give up on deleting files for this packfile. Otherwise we may delete the
432 	 * ".index" file and when failing to delete the ".pack" file we are left
433 	 * with a ".pack" file without a ".index" file.
434 	 *
435 	 * @param packName
436 	 */
437 	private void prunePack(String packName) {
438 		PackExt[] extensions = PackExt.values();
439 		try {
440 			// Delete the .pack file first and if this fails give up on deleting
441 			// the other files
442 			int deleteOptions = FileUtils.RETRY | FileUtils.SKIP_MISSING;
443 			for (PackExt ext : extensions)
444 				if (PackExt.PACK.equals(ext)) {
445 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
446 					removeOldPack(f, packName, ext, deleteOptions);
447 					break;
448 				}
449 			// The .pack file has been deleted. Delete as many as the other
450 			// files as you can.
451 			deleteOptions |= FileUtils.IGNORE_ERRORS;
452 			for (PackExt ext : extensions) {
453 				if (!PackExt.PACK.equals(ext)) {
454 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
455 					removeOldPack(f, packName, ext, deleteOptions);
456 				}
457 			}
458 		} catch (IOException e) {
459 			// Deletion of the .pack file failed. Silently return.
460 		}
461 	}
462 
463 	/**
464 	 * Like "git prune-packed" this method tries to prune all loose objects
465 	 * which can be found in packs. If certain objects can't be pruned (e.g.
466 	 * because the filesystem delete operation fails) this is silently ignored.
467 	 *
468 	 * @throws java.io.IOException
469 	 */
470 	public void prunePacked() throws IOException {
471 		ObjectDirectory objdb = repo.getObjectDatabase();
472 		Collection<PackFile> packs = objdb.getPacks();
473 		File objects = repo.getObjectsDirectory();
474 		String[] fanout = objects.list();
475 
476 		if (fanout != null && fanout.length > 0) {
477 			pm.beginTask(JGitText.get().pruneLoosePackedObjects, fanout.length);
478 			try {
479 				for (String d : fanout) {
480 					checkCancelled();
481 					pm.update(1);
482 					if (d.length() != 2)
483 						continue;
484 					String[] entries = new File(objects, d).list();
485 					if (entries == null)
486 						continue;
487 					for (String e : entries) {
488 						checkCancelled();
489 						if (e.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
490 							continue;
491 						ObjectId id;
492 						try {
493 							id = ObjectId.fromString(d + e);
494 						} catch (IllegalArgumentException notAnObject) {
495 							// ignoring the file that does not represent loose
496 							// object
497 							continue;
498 						}
499 						boolean found = false;
500 						for (PackFile p : packs) {
501 							checkCancelled();
502 							if (p.hasObject(id)) {
503 								found = true;
504 								break;
505 							}
506 						}
507 						if (found)
508 							FileUtils.delete(objdb.fileFor(id), FileUtils.RETRY
509 									| FileUtils.SKIP_MISSING
510 									| FileUtils.IGNORE_ERRORS);
511 					}
512 				}
513 			} finally {
514 				pm.endTask();
515 			}
516 		}
517 	}
518 
519 	/**
520 	 * Like "git prune" this method tries to prune all loose objects which are
521 	 * unreferenced. If certain objects can't be pruned (e.g. because the
522 	 * filesystem delete operation fails) this is silently ignored.
523 	 *
524 	 * @param objectsToKeep
525 	 *            a set of objects which should explicitly not be pruned
526 	 * @throws java.io.IOException
527 	 * @throws java.text.ParseException
528 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
529 	 *             parsed
530 	 */
531 	public void prune(Set<ObjectId> objectsToKeep) throws IOException,
532 			ParseException {
533 		long expireDate = getExpireDate();
534 
535 		// Collect all loose objects which are old enough, not referenced from
536 		// the index and not in objectsToKeep
537 		Map<ObjectId, File> deletionCandidates = new HashMap<>();
538 		Set<ObjectId> indexObjects = null;
539 		File objects = repo.getObjectsDirectory();
540 		String[] fanout = objects.list();
541 		if (fanout == null || fanout.length == 0) {
542 			return;
543 		}
544 		pm.beginTask(JGitText.get().pruneLooseUnreferencedObjects,
545 				fanout.length);
546 		try {
547 			for (String d : fanout) {
548 				checkCancelled();
549 				pm.update(1);
550 				if (d.length() != 2)
551 					continue;
552 				File[] entries = new File(objects, d).listFiles();
553 				if (entries == null)
554 					continue;
555 				for (File f : entries) {
556 					checkCancelled();
557 					String fName = f.getName();
558 					if (fName.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
559 						continue;
560 					if (repo.getFS().lastModified(f) >= expireDate)
561 						continue;
562 					try {
563 						ObjectId id = ObjectId.fromString(d + fName);
564 						if (objectsToKeep.contains(id))
565 							continue;
566 						if (indexObjects == null)
567 							indexObjects = listNonHEADIndexObjects();
568 						if (indexObjects.contains(id))
569 							continue;
570 						deletionCandidates.put(id, f);
571 					} catch (IllegalArgumentException notAnObject) {
572 						// ignoring the file that does not represent loose
573 						// object
574 					}
575 				}
576 			}
577 		} finally {
578 			pm.endTask();
579 		}
580 
581 		if (deletionCandidates.isEmpty()) {
582 			return;
583 		}
584 
585 		checkCancelled();
586 
587 		// From the set of current refs remove all those which have been handled
588 		// during last repack(). Only those refs will survive which have been
589 		// added or modified since the last repack. Only these can save existing
590 		// loose refs from being pruned.
591 		Collection<Ref> newRefs;
592 		if (lastPackedRefs == null || lastPackedRefs.isEmpty())
593 			newRefs = getAllRefs();
594 		else {
595 			Map<String, Ref> last = new HashMap<>();
596 			for (Ref r : lastPackedRefs) {
597 				last.put(r.getName(), r);
598 			}
599 			newRefs = new ArrayList<>();
600 			for (Ref r : getAllRefs()) {
601 				Ref old = last.get(r.getName());
602 				if (!equals(r, old)) {
603 					newRefs.add(r);
604 				}
605 			}
606 		}
607 
608 		if (!newRefs.isEmpty()) {
609 			// There are new/modified refs! Check which loose objects are now
610 			// referenced by these modified refs (or their reflogentries).
611 			// Remove these loose objects
612 			// from the deletionCandidates. When the last candidate is removed
613 			// leave this method.
614 			ObjectWalk w = new ObjectWalk(repo);
615 			try {
616 				for (Ref cr : newRefs) {
617 					checkCancelled();
618 					w.markStart(w.parseAny(cr.getObjectId()));
619 				}
620 				if (lastPackedRefs != null)
621 					for (Ref lpr : lastPackedRefs) {
622 						w.markUninteresting(w.parseAny(lpr.getObjectId()));
623 					}
624 				removeReferenced(deletionCandidates, w);
625 			} finally {
626 				w.dispose();
627 			}
628 		}
629 
630 		if (deletionCandidates.isEmpty())
631 			return;
632 
633 		// Since we have not left the method yet there are still
634 		// deletionCandidates. Last chance for these objects not to be pruned is
635 		// that they are referenced by reflog entries. Even refs which currently
636 		// point to the same object as during last repack() may have
637 		// additional reflog entries not handled during last repack()
638 		ObjectWalk w = new ObjectWalk(repo);
639 		try {
640 			for (Ref ar : getAllRefs())
641 				for (ObjectId id : listRefLogObjects(ar, lastRepackTime)) {
642 					checkCancelled();
643 					w.markStart(w.parseAny(id));
644 				}
645 			if (lastPackedRefs != null)
646 				for (Ref lpr : lastPackedRefs) {
647 					checkCancelled();
648 					w.markUninteresting(w.parseAny(lpr.getObjectId()));
649 				}
650 			removeReferenced(deletionCandidates, w);
651 		} finally {
652 			w.dispose();
653 		}
654 
655 		if (deletionCandidates.isEmpty())
656 			return;
657 
658 		checkCancelled();
659 
660 		// delete all candidates which have survived: these are unreferenced
661 		// loose objects. Make a last check, though, to avoid deleting objects
662 		// that could have been referenced while the candidates list was being
663 		// built (by an incoming push, for example).
664 		Set<File> touchedFanout = new HashSet<>();
665 		for (File f : deletionCandidates.values()) {
666 			if (f.lastModified() < expireDate) {
667 				f.delete();
668 				touchedFanout.add(f.getParentFile());
669 			}
670 		}
671 
672 		for (File f : touchedFanout) {
673 			FileUtils.delete(f,
674 					FileUtils.EMPTY_DIRECTORIES_ONLY | FileUtils.IGNORE_ERRORS);
675 		}
676 
677 		repo.getObjectDatabase().close();
678 	}
679 
680 	private long getExpireDate() throws ParseException {
681 		long expireDate = Long.MAX_VALUE;
682 
683 		if (expire == null && expireAgeMillis == -1) {
684 			String pruneExpireStr = getPruneExpireStr();
685 			if (pruneExpireStr == null)
686 				pruneExpireStr = PRUNE_EXPIRE_DEFAULT;
687 			expire = GitDateParser.parse(pruneExpireStr, null, SystemReader
688 					.getInstance().getLocale());
689 			expireAgeMillis = -1;
690 		}
691 		if (expire != null)
692 			expireDate = expire.getTime();
693 		if (expireAgeMillis != -1)
694 			expireDate = System.currentTimeMillis() - expireAgeMillis;
695 		return expireDate;
696 	}
697 
698 	private String getPruneExpireStr() {
699 		return repo.getConfig().getString(
700                         ConfigConstants.CONFIG_GC_SECTION, null,
701                         ConfigConstants.CONFIG_KEY_PRUNEEXPIRE);
702 	}
703 
704 	private long getPackExpireDate() throws ParseException {
705 		long packExpireDate = Long.MAX_VALUE;
706 
707 		if (packExpire == null && packExpireAgeMillis == -1) {
708 			String prunePackExpireStr = repo.getConfig().getString(
709 					ConfigConstants.CONFIG_GC_SECTION, null,
710 					ConfigConstants.CONFIG_KEY_PRUNEPACKEXPIRE);
711 			if (prunePackExpireStr == null)
712 				prunePackExpireStr = PRUNE_PACK_EXPIRE_DEFAULT;
713 			packExpire = GitDateParser.parse(prunePackExpireStr, null,
714 					SystemReader.getInstance().getLocale());
715 			packExpireAgeMillis = -1;
716 		}
717 		if (packExpire != null)
718 			packExpireDate = packExpire.getTime();
719 		if (packExpireAgeMillis != -1)
720 			packExpireDate = System.currentTimeMillis() - packExpireAgeMillis;
721 		return packExpireDate;
722 	}
723 
724 	/**
725 	 * Remove all entries from a map which key is the id of an object referenced
726 	 * by the given ObjectWalk
727 	 *
728 	 * @param id2File
729 	 * @param w
730 	 * @throws MissingObjectException
731 	 * @throws IncorrectObjectTypeException
732 	 * @throws IOException
733 	 */
734 	private void removeReferenced(Map<ObjectId, File> id2File,
735 			ObjectWalk w) throws MissingObjectException,
736 			IncorrectObjectTypeException, IOException {
737 		RevObject ro = w.next();
738 		while (ro != null) {
739 			checkCancelled();
740 			if (id2File.remove(ro.getId()) != null && id2File.isEmpty()) {
741 				return;
742 			}
743 			ro = w.next();
744 		}
745 		ro = w.nextObject();
746 		while (ro != null) {
747 			checkCancelled();
748 			if (id2File.remove(ro.getId()) != null && id2File.isEmpty()) {
749 				return;
750 			}
751 			ro = w.nextObject();
752 		}
753 	}
754 
755 	private static boolean equals(Ref r1, Ref r2) {
756 		if (r1 == null || r2 == null) {
757 			return false;
758 		}
759 		if (r1.isSymbolic()) {
760 			return r2.isSymbolic() && r1.getTarget().getName()
761 					.equals(r2.getTarget().getName());
762 		}
763 		return !r2.isSymbolic()
764 				&& Objects.equals(r1.getObjectId(), r2.getObjectId());
765 	}
766 
767 	/**
768 	 * Packs all non-symbolic, loose refs into packed-refs.
769 	 *
770 	 * @throws java.io.IOException
771 	 */
772 	public void packRefs() throws IOException {
773 		Collection<Ref> refs = repo.getRefDatabase()
774 				.getRefsByPrefix(Constants.R_REFS);
775 		List<String> refsToBePacked = new ArrayList<>(refs.size());
776 		pm.beginTask(JGitText.get().packRefs, refs.size());
777 		try {
778 			for (Ref ref : refs) {
779 				checkCancelled();
780 				if (!ref.isSymbolic() && ref.getStorage().isLoose())
781 					refsToBePacked.add(ref.getName());
782 				pm.update(1);
783 			}
784 			((RefDirectory) repo.getRefDatabase()).pack(refsToBePacked);
785 		} finally {
786 			pm.endTask();
787 		}
788 	}
789 
790 	/**
791 	 * Packs all objects which reachable from any of the heads into one pack
792 	 * file. Additionally all objects which are not reachable from any head but
793 	 * which are reachable from any of the other refs (e.g. tags), special refs
794 	 * (e.g. FETCH_HEAD) or index are packed into a separate pack file. Objects
795 	 * included in pack files which have a .keep file associated are never
796 	 * repacked. All old pack files which existed before are deleted.
797 	 *
798 	 * @return a collection of the newly created pack files
799 	 * @throws java.io.IOException
800 	 *             when during reading of refs, index, packfiles, objects,
801 	 *             reflog-entries or during writing to the packfiles
802 	 *             {@link java.io.IOException} occurs
803 	 */
804 	public Collection<PackFile> repack() throws IOException {
805 		Collection<PackFile> toBeDeleted = repo.getObjectDatabase().getPacks();
806 
807 		long time = System.currentTimeMillis();
808 		Collection<Ref> refsBefore = getAllRefs();
809 
810 		Set<ObjectId> allHeadsAndTags = new HashSet<>();
811 		Set<ObjectId> allHeads = new HashSet<>();
812 		Set<ObjectId> allTags = new HashSet<>();
813 		Set<ObjectId> nonHeads = new HashSet<>();
814 		Set<ObjectId> txnHeads = new HashSet<>();
815 		Set<ObjectId> tagTargets = new HashSet<>();
816 		Set<ObjectId> indexObjects = listNonHEADIndexObjects();
817 		RefDatabase refdb = repo.getRefDatabase();
818 
819 		for (Ref ref : refsBefore) {
820 			checkCancelled();
821 			nonHeads.addAll(listRefLogObjects(ref, 0));
822 			if (ref.isSymbolic() || ref.getObjectId() == null) {
823 				continue;
824 			}
825 			if (isHead(ref)) {
826 				allHeads.add(ref.getObjectId());
827 			} else if (isTag(ref)) {
828 				allTags.add(ref.getObjectId());
829 			} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
830 				txnHeads.add(ref.getObjectId());
831 			} else {
832 				nonHeads.add(ref.getObjectId());
833 			}
834 			if (ref.getPeeledObjectId() != null) {
835 				tagTargets.add(ref.getPeeledObjectId());
836 			}
837 		}
838 
839 		List<ObjectIdSet> excluded = new LinkedList<>();
840 		for (PackFile f : repo.getObjectDatabase().getPacks()) {
841 			checkCancelled();
842 			if (f.shouldBeKept())
843 				excluded.add(f.getIndex());
844 		}
845 
846 		// Don't exclude tags that are also branch tips
847 		allTags.removeAll(allHeads);
848 		allHeadsAndTags.addAll(allHeads);
849 		allHeadsAndTags.addAll(allTags);
850 
851 		// Hoist all branch tips and tags earlier in the pack file
852 		tagTargets.addAll(allHeadsAndTags);
853 		nonHeads.addAll(indexObjects);
854 
855 		// Combine the GC_REST objects into the GC pack if requested
856 		if (pconfig != null && pconfig.getSinglePack()) {
857 			allHeadsAndTags.addAll(nonHeads);
858 			nonHeads.clear();
859 		}
860 
861 		List<PackFile> ret = new ArrayList<>(2);
862 		PackFile heads = null;
863 		if (!allHeadsAndTags.isEmpty()) {
864 			heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
865 					tagTargets, excluded);
866 			if (heads != null) {
867 				ret.add(heads);
868 				excluded.add(0, heads.getIndex());
869 			}
870 		}
871 		if (!nonHeads.isEmpty()) {
872 			PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
873 					tagTargets, excluded);
874 			if (rest != null)
875 				ret.add(rest);
876 		}
877 		if (!txnHeads.isEmpty()) {
878 			PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
879 					null, excluded);
880 			if (txn != null)
881 				ret.add(txn);
882 		}
883 		try {
884 			deleteOldPacks(toBeDeleted, ret);
885 		} catch (ParseException e) {
886 			// TODO: the exception has to be wrapped into an IOException because
887 			// throwing the ParseException directly would break the API, instead
888 			// we should throw a ConfigInvalidException
889 			throw new IOException(e);
890 		}
891 		prunePacked();
892 		deleteEmptyRefsFolders();
893 		deleteOrphans();
894 		deleteTempPacksIdx();
895 
896 		lastPackedRefs = refsBefore;
897 		lastRepackTime = time;
898 		return ret;
899 	}
900 
901 	private static boolean isHead(Ref ref) {
902 		return ref.getName().startsWith(Constants.R_HEADS);
903 	}
904 
905 	private static boolean isTag(Ref ref) {
906 		return ref.getName().startsWith(Constants.R_TAGS);
907 	}
908 
909 	private void deleteEmptyRefsFolders() throws IOException {
910 		Path refs = repo.getDirectory().toPath().resolve(Constants.R_REFS);
911 		// Avoid deleting a folder that was created after the threshold so that concurrent
912 		// operations trying to create a reference are not impacted
913 		Instant threshold = Instant.now().minus(30, ChronoUnit.SECONDS);
914 		try (Stream<Path> entries = Files.list(refs)) {
915 			Iterator<Path> iterator = entries.iterator();
916 			while (iterator.hasNext()) {
917 				try (Stream<Path> s = Files.list(iterator.next())) {
918 					s.filter(path -> canBeSafelyDeleted(path, threshold)).forEach(this::deleteDir);
919 				}
920 			}
921 		}
922 	}
923 
924 	private boolean canBeSafelyDeleted(Path path, Instant threshold) {
925 		try {
926 			return Files.getLastModifiedTime(path).toInstant().isBefore(threshold);
927 		}
928 		catch (IOException e) {
929 			LOG.warn(MessageFormat.format(
930 					JGitText.get().cannotAccessLastModifiedForSafeDeletion,
931 					path), e);
932 			return false;
933 		}
934 	}
935 
936 	private void deleteDir(Path dir) {
937 		try (Stream<Path> dirs = Files.walk(dir)) {
938 			dirs.filter(this::isDirectory).sorted(Comparator.reverseOrder())
939 					.forEach(this::delete);
940 		} catch (IOException e) {
941 			LOG.error(e.getMessage(), e);
942 		}
943 	}
944 
945 	private boolean isDirectory(Path p) {
946 		return p.toFile().isDirectory();
947 	}
948 
949 	private void delete(Path d) {
950 		try {
951 			Files.delete(d);
952 		} catch (DirectoryNotEmptyException e) {
953 			// Don't log
954 		} catch (IOException e) {
955 			LOG.error(MessageFormat.format(JGitText.get().cannotDeleteFile, d),
956 					e);
957 		}
958 	}
959 
960 	/**
961 	 * Deletes orphans
962 	 * <p>
963 	 * A file is considered an orphan if it is either a "bitmap" or an index
964 	 * file, and its corresponding pack file is missing in the list.
965 	 * </p>
966 	 */
967 	private void deleteOrphans() {
968 		Path packDir = repo.getObjectDatabase().getPackDirectory().toPath();
969 		List<String> fileNames = null;
970 		try (Stream<Path> files = Files.list(packDir)) {
971 			fileNames = files.map(path -> path.getFileName().toString())
972 					.filter(name -> (name.endsWith(PACK_EXT)
973 							|| name.endsWith(BITMAP_EXT)
974 							|| name.endsWith(INDEX_EXT)))
975 					.sorted(Collections.reverseOrder())
976 					.collect(Collectors.toList());
977 		} catch (IOException e1) {
978 			// ignore
979 		}
980 		if (fileNames == null) {
981 			return;
982 		}
983 
984 		String base = null;
985 		for (String n : fileNames) {
986 			if (n.endsWith(PACK_EXT)) {
987 				base = n.substring(0, n.lastIndexOf('.'));
988 			} else {
989 				if (base == null || !n.startsWith(base)) {
990 					try {
991 						Files.delete(packDir.resolve(n));
992 					} catch (IOException e) {
993 						LOG.error(e.getMessage(), e);
994 					}
995 				}
996 			}
997 		}
998 	}
999 
1000 	private void deleteTempPacksIdx() {
1001 		Path packDir = repo.getObjectDatabase().getPackDirectory().toPath();
1002 		Instant threshold = Instant.now().minus(1, ChronoUnit.DAYS);
1003 		if (!Files.exists(packDir)) {
1004 			return;
1005 		}
1006 		try (DirectoryStream<Path> stream =
1007 				Files.newDirectoryStream(packDir, "gc_*_tmp")) { //$NON-NLS-1$
1008 			stream.forEach(t -> {
1009 				try {
1010 					Instant lastModified = Files.getLastModifiedTime(t)
1011 							.toInstant();
1012 					if (lastModified.isBefore(threshold)) {
1013 						Files.deleteIfExists(t);
1014 					}
1015 				} catch (IOException e) {
1016 					LOG.error(e.getMessage(), e);
1017 				}
1018 			});
1019 		} catch (IOException e) {
1020 			LOG.error(e.getMessage(), e);
1021 		}
1022 	}
1023 
1024 	/**
1025 	 * @param ref
1026 	 *            the ref which log should be inspected
1027 	 * @param minTime only reflog entries not older then this time are processed
1028 	 * @return the {@link ObjectId}s contained in the reflog
1029 	 * @throws IOException
1030 	 */
1031 	private Set<ObjectId> listRefLogObjects(Ref ref, long minTime) throws IOException {
1032 		ReflogReader reflogReader = repo.getReflogReader(ref.getName());
1033 		if (reflogReader == null) {
1034 			return Collections.emptySet();
1035 		}
1036 		List<ReflogEntry> rlEntries = reflogReader
1037 				.getReverseEntries();
1038 		if (rlEntries == null || rlEntries.isEmpty())
1039 			return Collections.emptySet();
1040 		Set<ObjectId> ret = new HashSet<>();
1041 		for (ReflogEntry e : rlEntries) {
1042 			if (e.getWho().getWhen().getTime() < minTime)
1043 				break;
1044 			ObjectId newId = e.getNewId();
1045 			if (newId != null && !ObjectId.zeroId().equals(newId))
1046 				ret.add(newId);
1047 			ObjectId oldId = e.getOldId();
1048 			if (oldId != null && !ObjectId.zeroId().equals(oldId))
1049 				ret.add(oldId);
1050 		}
1051 		return ret;
1052 	}
1053 
1054 	/**
1055 	 * Returns a collection of all refs and additional refs.
1056 	 *
1057 	 * Additional refs which don't start with "refs/" are not returned because
1058 	 * they should not save objects from being garbage collected. Examples for
1059 	 * such references are ORIG_HEAD, MERGE_HEAD, FETCH_HEAD and
1060 	 * CHERRY_PICK_HEAD.
1061 	 *
1062 	 * @return a collection of refs pointing to live objects.
1063 	 * @throws IOException
1064 	 */
1065 	private Collection<Ref> getAllRefs() throws IOException {
1066 		RefDatabase refdb = repo.getRefDatabase();
1067 		Collection<Ref> refs = refdb.getRefs();
1068 		List<Ref> addl = refdb.getAdditionalRefs();
1069 		if (!addl.isEmpty()) {
1070 			List<Ref> all = new ArrayList<>(refs.size() + addl.size());
1071 			all.addAll(refs);
1072 			// add additional refs which start with refs/
1073 			for (Ref r : addl) {
1074 				checkCancelled();
1075 				if (r.getName().startsWith(Constants.R_REFS)) {
1076 					all.add(r);
1077 				}
1078 			}
1079 			return all;
1080 		}
1081 		return refs;
1082 	}
1083 
1084 	/**
1085 	 * Return a list of those objects in the index which differ from whats in
1086 	 * HEAD
1087 	 *
1088 	 * @return a set of ObjectIds of changed objects in the index
1089 	 * @throws IOException
1090 	 * @throws CorruptObjectException
1091 	 * @throws NoWorkTreeException
1092 	 */
1093 	private Set<ObjectId> listNonHEADIndexObjects()
1094 			throws CorruptObjectException, IOException {
1095 		if (repo.isBare()) {
1096 			return Collections.emptySet();
1097 		}
1098 		try (TreeWalk treeWalk = new TreeWalk(repo)) {
1099 			treeWalk.addTree(new DirCacheIterator(repo.readDirCache()));
1100 			ObjectId headID = repo.resolve(Constants.HEAD);
1101 			if (headID != null) {
1102 				try (RevWalk revWalk = new RevWalk(repo)) {
1103 					treeWalk.addTree(revWalk.parseTree(headID));
1104 				}
1105 			}
1106 
1107 			treeWalk.setFilter(TreeFilter.ANY_DIFF);
1108 			treeWalk.setRecursive(true);
1109 			Set<ObjectId> ret = new HashSet<>();
1110 
1111 			while (treeWalk.next()) {
1112 				checkCancelled();
1113 				ObjectId objectId = treeWalk.getObjectId(0);
1114 				switch (treeWalk.getRawMode(0) & FileMode.TYPE_MASK) {
1115 				case FileMode.TYPE_MISSING:
1116 				case FileMode.TYPE_GITLINK:
1117 					continue;
1118 				case FileMode.TYPE_TREE:
1119 				case FileMode.TYPE_FILE:
1120 				case FileMode.TYPE_SYMLINK:
1121 					ret.add(objectId);
1122 					continue;
1123 				default:
1124 					throw new IOException(MessageFormat.format(
1125 							JGitText.get().corruptObjectInvalidMode3,
1126 							String.format("%o", //$NON-NLS-1$
1127 									Integer.valueOf(treeWalk.getRawMode(0))),
1128 							(objectId == null) ? "null" : objectId.name(), //$NON-NLS-1$
1129 							treeWalk.getPathString(), //
1130 							repo.getIndexFile()));
1131 				}
1132 			}
1133 			return ret;
1134 		}
1135 	}
1136 
1137 	private PackFile writePack(@NonNull Set<? extends ObjectId> want,
1138 			@NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
1139 			Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
1140 			throws IOException {
1141 		checkCancelled();
1142 		File tmpPack = null;
1143 		Map<PackExt, File> tmpExts = new TreeMap<>((o1, o2) -> {
1144 			// INDEX entries must be returned last, so the pack
1145 			// scanner does pick up the new pack until all the
1146 			// PackExt entries have been written.
1147 			if (o1 == o2) {
1148 				return 0;
1149 			}
1150 			if (o1 == PackExt.INDEX) {
1151 				return 1;
1152 			}
1153 			if (o2 == PackExt.INDEX) {
1154 				return -1;
1155 			}
1156 			return Integer.signum(o1.hashCode() - o2.hashCode());
1157 		});
1158 		try (PackWriter pw = new PackWriter(
1159 				(pconfig == null) ? new PackConfig(repo) : pconfig,
1160 				repo.newObjectReader())) {
1161 			// prepare the PackWriter
1162 			pw.setDeltaBaseAsOffset(true);
1163 			pw.setReuseDeltaCommits(false);
1164 			if (tagTargets != null) {
1165 				pw.setTagTargets(tagTargets);
1166 			}
1167 			if (excludeObjects != null)
1168 				for (ObjectIdSet idx : excludeObjects)
1169 					pw.excludeObjects(idx);
1170 			pw.preparePack(pm, want, have, PackWriter.NONE, tags);
1171 			if (pw.getObjectCount() == 0)
1172 				return null;
1173 			checkCancelled();
1174 
1175 			// create temporary files
1176 			String id = pw.computeName().getName();
1177 			File packdir = repo.getObjectDatabase().getPackDirectory();
1178 			tmpPack = File.createTempFile("gc_", ".pack_tmp", packdir); //$NON-NLS-1$ //$NON-NLS-2$
1179 			final String tmpBase = tmpPack.getName()
1180 					.substring(0, tmpPack.getName().lastIndexOf('.'));
1181 			File tmpIdx = new File(packdir, tmpBase + ".idx_tmp"); //$NON-NLS-1$
1182 			tmpExts.put(INDEX, tmpIdx);
1183 
1184 			if (!tmpIdx.createNewFile())
1185 				throw new IOException(MessageFormat.format(
1186 						JGitText.get().cannotCreateIndexfile, tmpIdx.getPath()));
1187 
1188 			// write the packfile
1189 			try (FileOutputStream fos = new FileOutputStream(tmpPack);
1190 					FileChannel channel = fos.getChannel();
1191 					OutputStream channelStream = Channels
1192 							.newOutputStream(channel)) {
1193 				pw.writePack(pm, pm, channelStream);
1194 				channel.force(true);
1195 			}
1196 
1197 			// write the packindex
1198 			try (FileOutputStream fos = new FileOutputStream(tmpIdx);
1199 					FileChannel idxChannel = fos.getChannel();
1200 					OutputStream idxStream = Channels
1201 							.newOutputStream(idxChannel)) {
1202 				pw.writeIndex(idxStream);
1203 				idxChannel.force(true);
1204 			}
1205 
1206 			if (pw.prepareBitmapIndex(pm)) {
1207 				File tmpBitmapIdx = new File(packdir, tmpBase + ".bitmap_tmp"); //$NON-NLS-1$
1208 				tmpExts.put(BITMAP_INDEX, tmpBitmapIdx);
1209 
1210 				if (!tmpBitmapIdx.createNewFile())
1211 					throw new IOException(MessageFormat.format(
1212 							JGitText.get().cannotCreateIndexfile,
1213 							tmpBitmapIdx.getPath()));
1214 
1215 				try (FileOutputStream fos = new FileOutputStream(tmpBitmapIdx);
1216 						FileChannel idxChannel = fos.getChannel();
1217 						OutputStream idxStream = Channels
1218 								.newOutputStream(idxChannel)) {
1219 					pw.writeBitmapIndex(idxStream);
1220 					idxChannel.force(true);
1221 				}
1222 			}
1223 
1224 			// rename the temporary files to real files
1225 			File realPack = nameFor(id, ".pack"); //$NON-NLS-1$
1226 
1227 			repo.getObjectDatabase().closeAllPackHandles(realPack);
1228 			tmpPack.setReadOnly();
1229 
1230 			FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE);
1231 			for (Map.Entry<PackExt, File> tmpEntry : tmpExts.entrySet()) {
1232 				File tmpExt = tmpEntry.getValue();
1233 				tmpExt.setReadOnly();
1234 
1235 				File realExt = nameFor(id,
1236 						"." + tmpEntry.getKey().getExtension()); //$NON-NLS-1$
1237 				try {
1238 					FileUtils.rename(tmpExt, realExt,
1239 							StandardCopyOption.ATOMIC_MOVE);
1240 				} catch (IOException e) {
1241 					File newExt = new File(realExt.getParentFile(),
1242 							realExt.getName() + ".new"); //$NON-NLS-1$
1243 					try {
1244 						FileUtils.rename(tmpExt, newExt,
1245 								StandardCopyOption.ATOMIC_MOVE);
1246 					} catch (IOException e2) {
1247 						newExt = tmpExt;
1248 						e = e2;
1249 					}
1250 					throw new IOException(MessageFormat.format(
1251 							JGitText.get().panicCantRenameIndexFile, newExt,
1252 							realExt), e);
1253 				}
1254 			}
1255 
1256 			return repo.getObjectDatabase().openPack(realPack);
1257 		} finally {
1258 			if (tmpPack != null && tmpPack.exists())
1259 				tmpPack.delete();
1260 			for (File tmpExt : tmpExts.values()) {
1261 				if (tmpExt.exists())
1262 					tmpExt.delete();
1263 			}
1264 		}
1265 	}
1266 
1267 	private File nameFor(String name, String ext) {
1268 		File packdir = repo.getObjectDatabase().getPackDirectory();
1269 		return new File(packdir, "pack-" + name + ext); //$NON-NLS-1$
1270 	}
1271 
1272 	private void checkCancelled() throws CancelledException {
1273 		if (pm.isCancelled()) {
1274 			throw new CancelledException(JGitText.get().operationCanceled);
1275 		}
1276 	}
1277 
1278 	/**
1279 	 * A class holding statistical data for a FileRepository regarding how many
1280 	 * objects are stored as loose or packed objects
1281 	 */
1282 	public static class RepoStatistics {
1283 		/**
1284 		 * The number of objects stored in pack files. If the same object is
1285 		 * stored in multiple pack files then it is counted as often as it
1286 		 * occurs in pack files.
1287 		 */
1288 		public long numberOfPackedObjects;
1289 
1290 		/**
1291 		 * The number of pack files
1292 		 */
1293 		public long numberOfPackFiles;
1294 
1295 		/**
1296 		 * The number of objects stored as loose objects.
1297 		 */
1298 		public long numberOfLooseObjects;
1299 
1300 		/**
1301 		 * The sum of the sizes of all files used to persist loose objects.
1302 		 */
1303 		public long sizeOfLooseObjects;
1304 
1305 		/**
1306 		 * The sum of the sizes of all pack files.
1307 		 */
1308 		public long sizeOfPackedObjects;
1309 
1310 		/**
1311 		 * The number of loose refs.
1312 		 */
1313 		public long numberOfLooseRefs;
1314 
1315 		/**
1316 		 * The number of refs stored in pack files.
1317 		 */
1318 		public long numberOfPackedRefs;
1319 
1320 		/**
1321 		 * The number of bitmaps in the bitmap indices.
1322 		 */
1323 		public long numberOfBitmaps;
1324 
1325 		@Override
1326 		public String toString() {
1327 			final StringBuilder b = new StringBuilder();
1328 			b.append("numberOfPackedObjects=").append(numberOfPackedObjects); //$NON-NLS-1$
1329 			b.append(", numberOfPackFiles=").append(numberOfPackFiles); //$NON-NLS-1$
1330 			b.append(", numberOfLooseObjects=").append(numberOfLooseObjects); //$NON-NLS-1$
1331 			b.append(", numberOfLooseRefs=").append(numberOfLooseRefs); //$NON-NLS-1$
1332 			b.append(", numberOfPackedRefs=").append(numberOfPackedRefs); //$NON-NLS-1$
1333 			b.append(", sizeOfLooseObjects=").append(sizeOfLooseObjects); //$NON-NLS-1$
1334 			b.append(", sizeOfPackedObjects=").append(sizeOfPackedObjects); //$NON-NLS-1$
1335 			b.append(", numberOfBitmaps=").append(numberOfBitmaps); //$NON-NLS-1$
1336 			return b.toString();
1337 		}
1338 	}
1339 
1340 	/**
1341 	 * Returns information about objects and pack files for a FileRepository.
1342 	 *
1343 	 * @return information about objects and pack files for a FileRepository
1344 	 * @throws java.io.IOException
1345 	 */
1346 	public RepoStatistics getStatistics() throws IOException {
1347 		RepoStatistics ret = new RepoStatistics();
1348 		Collection<PackFile> packs = repo.getObjectDatabase().getPacks();
1349 		for (PackFile f : packs) {
1350 			ret.numberOfPackedObjects += f.getIndex().getObjectCount();
1351 			ret.numberOfPackFiles++;
1352 			ret.sizeOfPackedObjects += f.getPackFile().length();
1353 			if (f.getBitmapIndex() != null)
1354 				ret.numberOfBitmaps += f.getBitmapIndex().getBitmapCount();
1355 		}
1356 		File objDir = repo.getObjectsDirectory();
1357 		String[] fanout = objDir.list();
1358 		if (fanout != null && fanout.length > 0) {
1359 			for (String d : fanout) {
1360 				if (d.length() != 2)
1361 					continue;
1362 				File[] entries = new File(objDir, d).listFiles();
1363 				if (entries == null)
1364 					continue;
1365 				for (File f : entries) {
1366 					if (f.getName().length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
1367 						continue;
1368 					ret.numberOfLooseObjects++;
1369 					ret.sizeOfLooseObjects += f.length();
1370 				}
1371 			}
1372 		}
1373 
1374 		RefDatabase refDb = repo.getRefDatabase();
1375 		for (Ref r : refDb.getRefs()) {
1376 			Storage storage = r.getStorage();
1377 			if (storage == Storage.LOOSE || storage == Storage.LOOSE_PACKED)
1378 				ret.numberOfLooseRefs++;
1379 			if (storage == Storage.PACKED || storage == Storage.LOOSE_PACKED)
1380 				ret.numberOfPackedRefs++;
1381 		}
1382 
1383 		return ret;
1384 	}
1385 
1386 	/**
1387 	 * Set the progress monitor used for garbage collection methods.
1388 	 *
1389 	 * @param pm a {@link org.eclipse.jgit.lib.ProgressMonitor} object.
1390 	 * @return this
1391 	 */
1392 	public GC setProgressMonitor(ProgressMonitor pm) {
1393 		this.pm = (pm == null) ? NullProgressMonitor.INSTANCE : pm;
1394 		return this;
1395 	}
1396 
1397 	/**
1398 	 * During gc() or prune() each unreferenced, loose object which has been
1399 	 * created or modified in the last <code>expireAgeMillis</code> milliseconds
1400 	 * will not be pruned. Only older objects may be pruned. If set to 0 then
1401 	 * every object is a candidate for pruning.
1402 	 *
1403 	 * @param expireAgeMillis
1404 	 *            minimal age of objects to be pruned in milliseconds.
1405 	 */
1406 	public void setExpireAgeMillis(long expireAgeMillis) {
1407 		this.expireAgeMillis = expireAgeMillis;
1408 		expire = null;
1409 	}
1410 
1411 	/**
1412 	 * During gc() or prune() packfiles which are created or modified in the
1413 	 * last <code>packExpireAgeMillis</code> milliseconds will not be deleted.
1414 	 * Only older packfiles may be deleted. If set to 0 then every packfile is a
1415 	 * candidate for deletion.
1416 	 *
1417 	 * @param packExpireAgeMillis
1418 	 *            minimal age of packfiles to be deleted in milliseconds.
1419 	 */
1420 	public void setPackExpireAgeMillis(long packExpireAgeMillis) {
1421 		this.packExpireAgeMillis = packExpireAgeMillis;
1422 		expire = null;
1423 	}
1424 
1425 	/**
1426 	 * Set the PackConfig used when (re-)writing packfiles. This allows to
1427 	 * influence how packs are written and to implement something similar to
1428 	 * "git gc --aggressive"
1429 	 *
1430 	 * @param pconfig
1431 	 *            the {@link org.eclipse.jgit.storage.pack.PackConfig} used when
1432 	 *            writing packs
1433 	 */
1434 	public void setPackConfig(PackConfig pconfig) {
1435 		this.pconfig = pconfig;
1436 	}
1437 
1438 	/**
1439 	 * During gc() or prune() each unreferenced, loose object which has been
1440 	 * created or modified after or at <code>expire</code> will not be pruned.
1441 	 * Only older objects may be pruned. If set to null then every object is a
1442 	 * candidate for pruning.
1443 	 *
1444 	 * @param expire
1445 	 *            instant in time which defines object expiration
1446 	 *            objects with modification time before this instant are expired
1447 	 *            objects with modification time newer or equal to this instant
1448 	 *            are not expired
1449 	 */
1450 	public void setExpire(Date expire) {
1451 		this.expire = expire;
1452 		expireAgeMillis = -1;
1453 	}
1454 
1455 	/**
1456 	 * During gc() or prune() packfiles which are created or modified after or
1457 	 * at <code>packExpire</code> will not be deleted. Only older packfiles may
1458 	 * be deleted. If set to null then every packfile is a candidate for
1459 	 * deletion.
1460 	 *
1461 	 * @param packExpire
1462 	 *            instant in time which defines packfile expiration
1463 	 */
1464 	public void setPackExpire(Date packExpire) {
1465 		this.packExpire = packExpire;
1466 		packExpireAgeMillis = -1;
1467 	}
1468 
1469 	/**
1470 	 * Set the {@code gc --auto} option.
1471 	 *
1472 	 * With this option, gc checks whether any housekeeping is required; if not,
1473 	 * it exits without performing any work. Some JGit commands run
1474 	 * {@code gc --auto} after performing operations that could create many
1475 	 * loose objects.
1476 	 * <p>
1477 	 * Housekeeping is required if there are too many loose objects or too many
1478 	 * packs in the repository. If the number of loose objects exceeds the value
1479 	 * of the gc.auto option JGit GC consolidates all existing packs into a
1480 	 * single pack (equivalent to {@code -A} option), whereas git-core would
1481 	 * combine all loose objects into a single pack using {@code repack -d -l}.
1482 	 * Setting the value of {@code gc.auto} to 0 disables automatic packing of
1483 	 * loose objects.
1484 	 * <p>
1485 	 * If the number of packs exceeds the value of {@code gc.autoPackLimit},
1486 	 * then existing packs (except those marked with a .keep file) are
1487 	 * consolidated into a single pack by using the {@code -A} option of repack.
1488 	 * Setting {@code gc.autoPackLimit} to 0 disables automatic consolidation of
1489 	 * packs.
1490 	 * <p>
1491 	 * Like git the following jgit commands run auto gc:
1492 	 * <ul>
1493 	 * <li>fetch</li>
1494 	 * <li>merge</li>
1495 	 * <li>rebase</li>
1496 	 * <li>receive-pack</li>
1497 	 * </ul>
1498 	 * The auto gc for receive-pack can be suppressed by setting the config
1499 	 * option {@code receive.autogc = false}
1500 	 *
1501 	 * @param auto
1502 	 *            defines whether gc should do automatic housekeeping
1503 	 */
1504 	public void setAuto(boolean auto) {
1505 		this.automatic = auto;
1506 	}
1507 
1508 	/**
1509 	 * @param background
1510 	 *            whether to run the gc in a background thread.
1511 	 */
1512 	void setBackground(boolean background) {
1513 		this.background = background;
1514 	}
1515 
1516 	private boolean needGc() {
1517 		if (tooManyPacks()) {
1518 			addRepackAllOption();
1519 		} else {
1520 			return tooManyLooseObjects();
1521 		}
1522 		// TODO run pre-auto-gc hook, if it fails return false
1523 		return true;
1524 	}
1525 
1526 	private void addRepackAllOption() {
1527 		// TODO: if JGit GC is enhanced to support repack's option -l this
1528 		// method needs to be implemented
1529 	}
1530 
1531 	/**
1532 	 * @return {@code true} if number of packs > gc.autopacklimit (default 50)
1533 	 */
1534 	boolean tooManyPacks() {
1535 		int autopacklimit = repo.getConfig().getInt(
1536 				ConfigConstants.CONFIG_GC_SECTION,
1537 				ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT,
1538 				DEFAULT_AUTOPACKLIMIT);
1539 		if (autopacklimit <= 0) {
1540 			return false;
1541 		}
1542 		// JGit always creates two packfiles, one for the objects reachable from
1543 		// branches, and another one for the rest
1544 		return repo.getObjectDatabase().getPacks().size() > (autopacklimit + 1);
1545 	}
1546 
1547 	/**
1548 	 * Quickly estimate number of loose objects, SHA1 is distributed evenly so
1549 	 * counting objects in one directory (bucket 17) is sufficient
1550 	 *
1551 	 * @return {@code true} if number of loose objects > gc.auto (default 6700)
1552 	 */
1553 	boolean tooManyLooseObjects() {
1554 		int auto = getLooseObjectLimit();
1555 		if (auto <= 0) {
1556 			return false;
1557 		}
1558 		int n = 0;
1559 		int threshold = (auto + 255) / 256;
1560 		Path dir = repo.getObjectsDirectory().toPath().resolve("17"); //$NON-NLS-1$
1561 		if (!dir.toFile().exists()) {
1562 			return false;
1563 		}
1564 		try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, file -> {
1565 					Path fileName = file.getFileName();
1566 					return file.toFile().isFile() && fileName != null
1567 							&& PATTERN_LOOSE_OBJECT.matcher(fileName.toString())
1568 									.matches();
1569 				})) {
1570 			for (Iterator<Path> iter = stream.iterator(); iter.hasNext(); iter
1571 					.next()) {
1572 				if (++n > threshold) {
1573 					return true;
1574 				}
1575 			}
1576 		} catch (IOException e) {
1577 			LOG.error(e.getMessage(), e);
1578 		}
1579 		return false;
1580 	}
1581 
1582 	private int getLooseObjectLimit() {
1583 		return repo.getConfig().getInt(ConfigConstants.CONFIG_GC_SECTION,
1584 				ConfigConstants.CONFIG_KEY_AUTO, DEFAULT_AUTOLIMIT);
1585 	}
1586 }