View Javadoc
1   /*
2    * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
3    * Copyright (C) 2011, Shawn O. Pearce <spearce@spearce.org>
4    * and other copyright owners as documented in the project's IP log.
5    *
6    * This program and the accompanying materials are made available
7    * under the terms of the Eclipse Distribution License v1.0 which
8    * accompanies this distribution, is reproduced below, and is
9    * available at http://www.eclipse.org/org/documents/edl-v10.php
10   *
11   * All rights reserved.
12   *
13   * Redistribution and use in source and binary forms, with or
14   * without modification, are permitted provided that the following
15   * conditions are met:
16   *
17   * - Redistributions of source code must retain the above copyright
18   *   notice, this list of conditions and the following disclaimer.
19   *
20   * - Redistributions in binary form must reproduce the above
21   *   copyright notice, this list of conditions and the following
22   *   disclaimer in the documentation and/or other materials provided
23   *   with the distribution.
24   *
25   * - Neither the name of the Eclipse Foundation, Inc. nor the
26   *   names of its contributors may be used to endorse or promote
27   *   products derived from this software without specific prior
28   *   written permission.
29   *
30   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
31   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
32   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
33   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
34   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
37   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
38   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
39   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
40   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
41   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
42   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43   */
44  package org.eclipse.jgit.internal.storage.file;
45  
46  import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
47  import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
48  
49  import java.io.File;
50  import java.io.FileOutputStream;
51  import java.io.IOException;
52  import java.io.OutputStream;
53  import java.io.PrintWriter;
54  import java.io.StringWriter;
55  import java.nio.channels.Channels;
56  import java.nio.channels.FileChannel;
57  import java.nio.file.DirectoryStream;
58  import java.nio.file.Files;
59  import java.nio.file.Path;
60  import java.nio.file.StandardCopyOption;
61  import java.text.MessageFormat;
62  import java.text.ParseException;
63  import java.time.Instant;
64  import java.time.temporal.ChronoUnit;
65  import java.util.ArrayList;
66  import java.util.Collection;
67  import java.util.Collections;
68  import java.util.Comparator;
69  import java.util.Date;
70  import java.util.HashMap;
71  import java.util.HashSet;
72  import java.util.Iterator;
73  import java.util.LinkedList;
74  import java.util.List;
75  import java.util.Map;
76  import java.util.Objects;
77  import java.util.Set;
78  import java.util.TreeMap;
79  import java.util.concurrent.Callable;
80  import java.util.concurrent.ExecutionException;
81  import java.util.concurrent.ExecutorService;
82  import java.util.concurrent.Future;
83  import java.util.regex.Pattern;
84  import java.util.stream.Collectors;
85  import java.util.stream.Stream;
86  
87  import org.eclipse.jgit.annotations.NonNull;
88  import org.eclipse.jgit.api.errors.JGitInternalException;
89  import org.eclipse.jgit.dircache.DirCacheIterator;
90  import org.eclipse.jgit.errors.CancelledException;
91  import org.eclipse.jgit.errors.CorruptObjectException;
92  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
93  import org.eclipse.jgit.errors.MissingObjectException;
94  import org.eclipse.jgit.errors.NoWorkTreeException;
95  import org.eclipse.jgit.internal.JGitText;
96  import org.eclipse.jgit.internal.storage.pack.PackExt;
97  import org.eclipse.jgit.internal.storage.pack.PackWriter;
98  import org.eclipse.jgit.internal.storage.reftree.RefTreeNames;
99  import org.eclipse.jgit.lib.ConfigConstants;
100 import org.eclipse.jgit.lib.Constants;
101 import org.eclipse.jgit.lib.FileMode;
102 import org.eclipse.jgit.lib.NullProgressMonitor;
103 import org.eclipse.jgit.lib.ObjectId;
104 import org.eclipse.jgit.lib.ObjectIdSet;
105 import org.eclipse.jgit.lib.ObjectLoader;
106 import org.eclipse.jgit.lib.ObjectReader;
107 import org.eclipse.jgit.lib.ProgressMonitor;
108 import org.eclipse.jgit.lib.Ref;
109 import org.eclipse.jgit.lib.Ref.Storage;
110 import org.eclipse.jgit.lib.RefDatabase;
111 import org.eclipse.jgit.lib.ReflogEntry;
112 import org.eclipse.jgit.lib.ReflogReader;
113 import org.eclipse.jgit.lib.internal.WorkQueue;
114 import org.eclipse.jgit.revwalk.ObjectWalk;
115 import org.eclipse.jgit.revwalk.RevObject;
116 import org.eclipse.jgit.revwalk.RevWalk;
117 import org.eclipse.jgit.storage.pack.PackConfig;
118 import org.eclipse.jgit.treewalk.TreeWalk;
119 import org.eclipse.jgit.treewalk.filter.TreeFilter;
120 import org.eclipse.jgit.util.FileUtils;
121 import org.eclipse.jgit.util.GitDateParser;
122 import org.eclipse.jgit.util.SystemReader;
123 import org.slf4j.Logger;
124 import org.slf4j.LoggerFactory;
125 
126 /**
127  * A garbage collector for git
128  * {@link org.eclipse.jgit.internal.storage.file.FileRepository}. Instances of
129  * this class are not thread-safe. Don't use the same instance from multiple
130  * threads.
131  *
132  * This class started as a copy of DfsGarbageCollector from Shawn O. Pearce
133  * adapted to FileRepositories.
134  */
135 public class GC {
136 	private final static Logger LOG = LoggerFactory
137 			.getLogger(GC.class);
138 
139 	private static final String PRUNE_EXPIRE_DEFAULT = "2.weeks.ago"; //$NON-NLS-1$
140 
141 	private static final String PRUNE_PACK_EXPIRE_DEFAULT = "1.hour.ago"; //$NON-NLS-1$
142 
143 	private static final Pattern PATTERN_LOOSE_OBJECT = Pattern
144 			.compile("[0-9a-fA-F]{38}"); //$NON-NLS-1$
145 
146 	private static final String PACK_EXT = "." + PackExt.PACK.getExtension();//$NON-NLS-1$
147 
148 	private static final String BITMAP_EXT = "." //$NON-NLS-1$
149 			+ PackExt.BITMAP_INDEX.getExtension();
150 
151 	private static final String INDEX_EXT = "." + PackExt.INDEX.getExtension(); //$NON-NLS-1$
152 
153 	private static final int DEFAULT_AUTOPACKLIMIT = 50;
154 
155 	private static final int DEFAULT_AUTOLIMIT = 6700;
156 
157 	private static volatile ExecutorService executor;
158 
159 	/**
160 	 * Set the executor for running auto-gc in the background. If no executor is
161 	 * set JGit's own WorkQueue will be used.
162 	 *
163 	 * @param e
164 	 *            the executor to be used for running auto-gc
165 	 * @since 4.8
166 	 */
167 	public static void setExecutor(ExecutorService e) {
168 		executor = e;
169 	}
170 
171 	private final FileRepository repo;
172 
173 	private ProgressMonitor pm;
174 
175 	private long expireAgeMillis = -1;
176 
177 	private Date expire;
178 
179 	private long packExpireAgeMillis = -1;
180 
181 	private Date packExpire;
182 
183 	private PackConfig pconfig = null;
184 
185 	/**
186 	 * the refs which existed during the last call to {@link #repack()}. This is
187 	 * needed during {@link #prune(Set)} where we can optimize by looking at the
188 	 * difference between the current refs and the refs which existed during
189 	 * last {@link #repack()}.
190 	 */
191 	private Collection<Ref> lastPackedRefs;
192 
193 	/**
194 	 * Holds the starting time of the last repack() execution. This is needed in
195 	 * prune() to inspect only those reflog entries which have been added since
196 	 * last repack().
197 	 */
198 	private long lastRepackTime;
199 
200 	/**
201 	 * Whether gc should do automatic housekeeping
202 	 */
203 	private boolean automatic;
204 
205 	/**
206 	 * Whether to run gc in a background thread
207 	 */
208 	private boolean background;
209 
210 	/**
211 	 * Creates a new garbage collector with default values. An expirationTime of
212 	 * two weeks and <code>null</code> as progress monitor will be used.
213 	 *
214 	 * @param repo
215 	 *            the repo to work on
216 	 */
217 	public GC(FileRepository repo) {
218 		this.repo = repo;
219 		this.pm = NullProgressMonitor.INSTANCE;
220 	}
221 
222 	/**
223 	 * Runs a garbage collector on a
224 	 * {@link org.eclipse.jgit.internal.storage.file.FileRepository}. It will
225 	 * <ul>
226 	 * <li>pack loose references into packed-refs</li>
227 	 * <li>repack all reachable objects into new pack files and delete the old
228 	 * pack files</li>
229 	 * <li>prune all loose objects which are now reachable by packs</li>
230 	 * </ul>
231 	 *
232 	 * If {@link #setAuto(boolean)} was set to {@code true} {@code gc} will
233 	 * first check whether any housekeeping is required; if not, it exits
234 	 * without performing any work.
235 	 *
236 	 * If {@link #setBackground(boolean)} was set to {@code true}
237 	 * {@code collectGarbage} will start the gc in the background, and then
238 	 * return immediately. In this case, errors will not be reported except in
239 	 * gc.log.
240 	 *
241 	 * @return the collection of
242 	 *         {@link org.eclipse.jgit.internal.storage.file.PackFile}'s which
243 	 *         are newly created
244 	 * @throws java.io.IOException
245 	 * @throws java.text.ParseException
246 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
247 	 *             parsed
248 	 */
249 	// TODO(ms): in 5.0 change signature and return Future<Collection<PackFile>>
250 	public Collection<PackFile> gc() throws IOException, ParseException {
251 		final GcLog gcLog = background ? new GcLog(repo) : null;
252 		if (gcLog != null && !gcLog.lock(background)) {
253 			// there is already a background gc running
254 			return Collections.emptyList();
255 		}
256 
257 		Callable<Collection<PackFile>> gcTask = () -> {
258 			try {
259 				Collection<PackFile> newPacks = doGc();
260 				if (automatic && tooManyLooseObjects() && gcLog != null) {
261 					String message = JGitText.get().gcTooManyUnpruned;
262 					gcLog.write(message);
263 					gcLog.commit();
264 				}
265 				return newPacks;
266 			} catch (IOException | ParseException e) {
267 				if (background) {
268 					if (gcLog == null) {
269 						// Lacking a log, there's no way to report this.
270 						return Collections.emptyList();
271 					}
272 					try {
273 						gcLog.write(e.getMessage());
274 						StringWriter sw = new StringWriter();
275 						e.printStackTrace(new PrintWriter(sw));
276 						gcLog.write(sw.toString());
277 						gcLog.commit();
278 					} catch (IOException e2) {
279 						e2.addSuppressed(e);
280 						LOG.error(e2.getMessage(), e2);
281 					}
282 				} else {
283 					throw new JGitInternalException(e.getMessage(), e);
284 				}
285 			} finally {
286 				if (gcLog != null) {
287 					gcLog.unlock();
288 				}
289 			}
290 			return Collections.emptyList();
291 		};
292 		Future<Collection<PackFile>> result = executor().submit(gcTask);
293 		if (background) {
294 			// TODO(ms): in 5.0 change signature and return the Future
295 			return Collections.emptyList();
296 		}
297 		try {
298 			return result.get();
299 		} catch (InterruptedException | ExecutionException e) {
300 			throw new IOException(e);
301 		}
302 	}
303 
304 	private ExecutorService executor() {
305 		return (executor != null) ? executor : WorkQueue.getExecutor();
306 	}
307 
308 	private Collection<PackFile> doGc() throws IOException, ParseException {
309 		if (automatic && !needGc()) {
310 			return Collections.emptyList();
311 		}
312 		pm.start(6 /* tasks */);
313 		packRefs();
314 		// TODO: implement reflog_expire(pm, repo);
315 		Collection<PackFile> newPacks = repack();
316 		prune(Collections.emptySet());
317 		// TODO: implement rerere_gc(pm);
318 		return newPacks;
319 	}
320 
321 	/**
322 	 * Loosen objects in a pack file which are not also in the newly-created
323 	 * pack files.
324 	 *
325 	 * @param inserter
326 	 * @param reader
327 	 * @param pack
328 	 * @param existing
329 	 * @throws IOException
330 	 */
331 	private void loosen(ObjectDirectoryInserter inserter, ObjectReader reader, PackFile pack, HashSet<ObjectId> existing)
332 			throws IOException {
333 		for (PackIndex.MutableEntry entry : pack) {
334 			ObjectId oid = entry.toObjectId();
335 			if (existing.contains(oid)) {
336 				continue;
337 			}
338 			existing.add(oid);
339 			ObjectLoader loader = reader.open(oid);
340 			inserter.insert(loader.getType(),
341 					loader.getSize(),
342 					loader.openStream(),
343 					true /* create this object even though it's a duplicate */);
344 		}
345 	}
346 
347 	/**
348 	 * Delete old pack files. What is 'old' is defined by specifying a set of
349 	 * old pack files and a set of new pack files. Each pack file contained in
350 	 * old pack files but not contained in new pack files will be deleted. If
351 	 * preserveOldPacks is set, keep a copy of the pack file in the preserve
352 	 * directory. If an expirationDate is set then pack files which are younger
353 	 * than the expirationDate will not be deleted nor preserved.
354 	 * <p>
355 	 * If we're not immediately expiring loose objects, loosen any objects
356 	 * in the old pack files which aren't in the new pack files.
357 	 *
358 	 * @param oldPacks
359 	 * @param newPacks
360 	 * @throws ParseException
361 	 * @throws IOException
362 	 */
363 	private void deleteOldPacks(Collection<PackFile> oldPacks,
364 			Collection<PackFile> newPacks) throws ParseException, IOException {
365 		HashSet<ObjectId> ids = new HashSet<>();
366 		for (PackFile pack : newPacks) {
367 			for (PackIndex.MutableEntry entry : pack) {
368 				ids.add(entry.toObjectId());
369 			}
370 		}
371 		ObjectReader reader = repo.newObjectReader();
372 		ObjectDirectory dir = repo.getObjectDatabase();
373 		ObjectDirectoryInserter inserter = dir.newInserter();
374 		boolean shouldLoosen = !"now".equals(getPruneExpireStr()) && //$NON-NLS-1$
375 			getExpireDate() < Long.MAX_VALUE;
376 
377 		prunePreserved();
378 		long packExpireDate = getPackExpireDate();
379 		oldPackLoop: for (PackFile oldPack : oldPacks) {
380 			checkCancelled();
381 			String oldName = oldPack.getPackName();
382 			// check whether an old pack file is also among the list of new
383 			// pack files. Then we must not delete it.
384 			for (PackFile newPack : newPacks)
385 				if (oldName.equals(newPack.getPackName()))
386 					continue oldPackLoop;
387 
388 			if (!oldPack.shouldBeKept()
389 					&& repo.getFS().lastModified(
390 							oldPack.getPackFile()) < packExpireDate) {
391 				oldPack.close();
392 				if (shouldLoosen) {
393 					loosen(inserter, reader, oldPack, ids);
394 				}
395 				prunePack(oldName);
396 			}
397 		}
398 
399 		// close the complete object database. That's my only chance to force
400 		// rescanning and to detect that certain pack files are now deleted.
401 		repo.getObjectDatabase().close();
402 	}
403 
404 	/**
405 	 * Deletes old pack file, unless 'preserve-oldpacks' is set, in which case it
406 	 * moves the pack file to the preserved directory
407 	 *
408 	 * @param packFile
409 	 * @param packName
410 	 * @param ext
411 	 * @param deleteOptions
412 	 * @throws IOException
413 	 */
414 	private void removeOldPack(File packFile, String packName, PackExt ext,
415 			int deleteOptions) throws IOException {
416 		if (pconfig != null && pconfig.isPreserveOldPacks()) {
417 			File oldPackDir = repo.getObjectDatabase().getPreservedDirectory();
418 			FileUtils.mkdir(oldPackDir, true);
419 
420 			String oldPackName = "pack-" + packName + ".old-" + ext.getExtension();  //$NON-NLS-1$ //$NON-NLS-2$
421 			File oldPackFile = new File(oldPackDir, oldPackName);
422 			FileUtils.rename(packFile, oldPackFile);
423 		} else {
424 			FileUtils.delete(packFile, deleteOptions);
425 		}
426 	}
427 
428 	/**
429 	 * Delete the preserved directory including all pack files within
430 	 */
431 	private void prunePreserved() {
432 		if (pconfig != null && pconfig.isPrunePreserved()) {
433 			try {
434 				FileUtils.delete(repo.getObjectDatabase().getPreservedDirectory(),
435 						FileUtils.RECURSIVE | FileUtils.RETRY | FileUtils.SKIP_MISSING);
436 			} catch (IOException e) {
437 				// Deletion of the preserved pack files failed. Silently return.
438 			}
439 		}
440 	}
441 
442 	/**
443 	 * Delete files associated with a single pack file. First try to delete the
444 	 * ".pack" file because on some platforms the ".pack" file may be locked and
445 	 * can't be deleted. In such a case it is better to detect this early and
446 	 * give up on deleting files for this packfile. Otherwise we may delete the
447 	 * ".index" file and when failing to delete the ".pack" file we are left
448 	 * with a ".pack" file without a ".index" file.
449 	 *
450 	 * @param packName
451 	 */
452 	private void prunePack(String packName) {
453 		PackExt[] extensions = PackExt.values();
454 		try {
455 			// Delete the .pack file first and if this fails give up on deleting
456 			// the other files
457 			int deleteOptions = FileUtils.RETRY | FileUtils.SKIP_MISSING;
458 			for (PackExt ext : extensions)
459 				if (PackExt.PACK.equals(ext)) {
460 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
461 					removeOldPack(f, packName, ext, deleteOptions);
462 					break;
463 				}
464 			// The .pack file has been deleted. Delete as many as the other
465 			// files as you can.
466 			deleteOptions |= FileUtils.IGNORE_ERRORS;
467 			for (PackExt ext : extensions) {
468 				if (!PackExt.PACK.equals(ext)) {
469 					File f = nameFor(packName, "." + ext.getExtension()); //$NON-NLS-1$
470 					removeOldPack(f, packName, ext, deleteOptions);
471 				}
472 			}
473 		} catch (IOException e) {
474 			// Deletion of the .pack file failed. Silently return.
475 		}
476 	}
477 
478 	/**
479 	 * Like "git prune-packed" this method tries to prune all loose objects
480 	 * which can be found in packs. If certain objects can't be pruned (e.g.
481 	 * because the filesystem delete operation fails) this is silently ignored.
482 	 *
483 	 * @throws java.io.IOException
484 	 */
485 	public void prunePacked() throws IOException {
486 		ObjectDirectory objdb = repo.getObjectDatabase();
487 		Collection<PackFile> packs = objdb.getPacks();
488 		File objects = repo.getObjectsDirectory();
489 		String[] fanout = objects.list();
490 
491 		if (fanout != null && fanout.length > 0) {
492 			pm.beginTask(JGitText.get().pruneLoosePackedObjects, fanout.length);
493 			try {
494 				for (String d : fanout) {
495 					checkCancelled();
496 					pm.update(1);
497 					if (d.length() != 2)
498 						continue;
499 					String[] entries = new File(objects, d).list();
500 					if (entries == null)
501 						continue;
502 					for (String e : entries) {
503 						checkCancelled();
504 						if (e.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
505 							continue;
506 						ObjectId id;
507 						try {
508 							id = ObjectId.fromString(d + e);
509 						} catch (IllegalArgumentException notAnObject) {
510 							// ignoring the file that does not represent loose
511 							// object
512 							continue;
513 						}
514 						boolean found = false;
515 						for (PackFile p : packs) {
516 							checkCancelled();
517 							if (p.hasObject(id)) {
518 								found = true;
519 								break;
520 							}
521 						}
522 						if (found)
523 							FileUtils.delete(objdb.fileFor(id), FileUtils.RETRY
524 									| FileUtils.SKIP_MISSING
525 									| FileUtils.IGNORE_ERRORS);
526 					}
527 				}
528 			} finally {
529 				pm.endTask();
530 			}
531 		}
532 	}
533 
534 	/**
535 	 * Like "git prune" this method tries to prune all loose objects which are
536 	 * unreferenced. If certain objects can't be pruned (e.g. because the
537 	 * filesystem delete operation fails) this is silently ignored.
538 	 *
539 	 * @param objectsToKeep
540 	 *            a set of objects which should explicitly not be pruned
541 	 * @throws java.io.IOException
542 	 * @throws java.text.ParseException
543 	 *             If the configuration parameter "gc.pruneexpire" couldn't be
544 	 *             parsed
545 	 */
546 	public void prune(Set<ObjectId> objectsToKeep) throws IOException,
547 			ParseException {
548 		long expireDate = getExpireDate();
549 
550 		// Collect all loose objects which are old enough, not referenced from
551 		// the index and not in objectsToKeep
552 		Map<ObjectId, File> deletionCandidates = new HashMap<>();
553 		Set<ObjectId> indexObjects = null;
554 		File objects = repo.getObjectsDirectory();
555 		String[] fanout = objects.list();
556 		if (fanout == null || fanout.length == 0) {
557 			return;
558 		}
559 		pm.beginTask(JGitText.get().pruneLooseUnreferencedObjects,
560 				fanout.length);
561 		try {
562 			for (String d : fanout) {
563 				checkCancelled();
564 				pm.update(1);
565 				if (d.length() != 2)
566 					continue;
567 				File[] entries = new File(objects, d).listFiles();
568 				if (entries == null)
569 					continue;
570 				for (File f : entries) {
571 					checkCancelled();
572 					String fName = f.getName();
573 					if (fName.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
574 						continue;
575 					if (repo.getFS().lastModified(f) >= expireDate)
576 						continue;
577 					try {
578 						ObjectId id = ObjectId.fromString(d + fName);
579 						if (objectsToKeep.contains(id))
580 							continue;
581 						if (indexObjects == null)
582 							indexObjects = listNonHEADIndexObjects();
583 						if (indexObjects.contains(id))
584 							continue;
585 						deletionCandidates.put(id, f);
586 					} catch (IllegalArgumentException notAnObject) {
587 						// ignoring the file that does not represent loose
588 						// object
589 					}
590 				}
591 			}
592 		} finally {
593 			pm.endTask();
594 		}
595 
596 		if (deletionCandidates.isEmpty()) {
597 			return;
598 		}
599 
600 		checkCancelled();
601 
602 		// From the set of current refs remove all those which have been handled
603 		// during last repack(). Only those refs will survive which have been
604 		// added or modified since the last repack. Only these can save existing
605 		// loose refs from being pruned.
606 		Collection<Ref> newRefs;
607 		if (lastPackedRefs == null || lastPackedRefs.isEmpty())
608 			newRefs = getAllRefs();
609 		else {
610 			Map<String, Ref> last = new HashMap<>();
611 			for (Ref r : lastPackedRefs) {
612 				last.put(r.getName(), r);
613 			}
614 			newRefs = new ArrayList<>();
615 			for (Ref r : getAllRefs()) {
616 				Ref old = last.get(r.getName());
617 				if (!equals(r, old)) {
618 					newRefs.add(r);
619 				}
620 			}
621 		}
622 
623 		if (!newRefs.isEmpty()) {
624 			// There are new/modified refs! Check which loose objects are now
625 			// referenced by these modified refs (or their reflogentries).
626 			// Remove these loose objects
627 			// from the deletionCandidates. When the last candidate is removed
628 			// leave this method.
629 			ObjectWalk w = new ObjectWalk(repo);
630 			try {
631 				for (Ref cr : newRefs) {
632 					checkCancelled();
633 					w.markStart(w.parseAny(cr.getObjectId()));
634 				}
635 				if (lastPackedRefs != null)
636 					for (Ref lpr : lastPackedRefs) {
637 						w.markUninteresting(w.parseAny(lpr.getObjectId()));
638 					}
639 				removeReferenced(deletionCandidates, w);
640 			} finally {
641 				w.dispose();
642 			}
643 		}
644 
645 		if (deletionCandidates.isEmpty())
646 			return;
647 
648 		// Since we have not left the method yet there are still
649 		// deletionCandidates. Last chance for these objects not to be pruned is
650 		// that they are referenced by reflog entries. Even refs which currently
651 		// point to the same object as during last repack() may have
652 		// additional reflog entries not handled during last repack()
653 		ObjectWalk w = new ObjectWalk(repo);
654 		try {
655 			for (Ref ar : getAllRefs())
656 				for (ObjectId id : listRefLogObjects(ar, lastRepackTime)) {
657 					checkCancelled();
658 					w.markStart(w.parseAny(id));
659 				}
660 			if (lastPackedRefs != null)
661 				for (Ref lpr : lastPackedRefs) {
662 					checkCancelled();
663 					w.markUninteresting(w.parseAny(lpr.getObjectId()));
664 				}
665 			removeReferenced(deletionCandidates, w);
666 		} finally {
667 			w.dispose();
668 		}
669 
670 		if (deletionCandidates.isEmpty())
671 			return;
672 
673 		checkCancelled();
674 
675 		// delete all candidates which have survived: these are unreferenced
676 		// loose objects. Make a last check, though, to avoid deleting objects
677 		// that could have been referenced while the candidates list was being
678 		// built (by an incoming push, for example).
679 		Set<File> touchedFanout = new HashSet<>();
680 		for (File f : deletionCandidates.values()) {
681 			if (f.lastModified() < expireDate) {
682 				f.delete();
683 				touchedFanout.add(f.getParentFile());
684 			}
685 		}
686 
687 		for (File f : touchedFanout) {
688 			FileUtils.delete(f,
689 					FileUtils.EMPTY_DIRECTORIES_ONLY | FileUtils.IGNORE_ERRORS);
690 		}
691 
692 		repo.getObjectDatabase().close();
693 	}
694 
695 	private long getExpireDate() throws ParseException {
696 		long expireDate = Long.MAX_VALUE;
697 
698 		if (expire == null && expireAgeMillis == -1) {
699 			String pruneExpireStr = getPruneExpireStr();
700 			if (pruneExpireStr == null)
701 				pruneExpireStr = PRUNE_EXPIRE_DEFAULT;
702 			expire = GitDateParser.parse(pruneExpireStr, null, SystemReader
703 					.getInstance().getLocale());
704 			expireAgeMillis = -1;
705 		}
706 		if (expire != null)
707 			expireDate = expire.getTime();
708 		if (expireAgeMillis != -1)
709 			expireDate = System.currentTimeMillis() - expireAgeMillis;
710 		return expireDate;
711 	}
712 
713 	private String getPruneExpireStr() {
714 		return repo.getConfig().getString(
715                         ConfigConstants.CONFIG_GC_SECTION, null,
716                         ConfigConstants.CONFIG_KEY_PRUNEEXPIRE);
717 	}
718 
719 	private long getPackExpireDate() throws ParseException {
720 		long packExpireDate = Long.MAX_VALUE;
721 
722 		if (packExpire == null && packExpireAgeMillis == -1) {
723 			String prunePackExpireStr = repo.getConfig().getString(
724 					ConfigConstants.CONFIG_GC_SECTION, null,
725 					ConfigConstants.CONFIG_KEY_PRUNEPACKEXPIRE);
726 			if (prunePackExpireStr == null)
727 				prunePackExpireStr = PRUNE_PACK_EXPIRE_DEFAULT;
728 			packExpire = GitDateParser.parse(prunePackExpireStr, null,
729 					SystemReader.getInstance().getLocale());
730 			packExpireAgeMillis = -1;
731 		}
732 		if (packExpire != null)
733 			packExpireDate = packExpire.getTime();
734 		if (packExpireAgeMillis != -1)
735 			packExpireDate = System.currentTimeMillis() - packExpireAgeMillis;
736 		return packExpireDate;
737 	}
738 
739 	/**
740 	 * Remove all entries from a map which key is the id of an object referenced
741 	 * by the given ObjectWalk
742 	 *
743 	 * @param id2File
744 	 * @param w
745 	 * @throws MissingObjectException
746 	 * @throws IncorrectObjectTypeException
747 	 * @throws IOException
748 	 */
749 	private void removeReferenced(Map<ObjectId, File> id2File,
750 			ObjectWalk w) throws MissingObjectException,
751 			IncorrectObjectTypeException, IOException {
752 		RevObject ro = w.next();
753 		while (ro != null) {
754 			checkCancelled();
755 			if (id2File.remove(ro.getId()) != null && id2File.isEmpty()) {
756 				return;
757 			}
758 			ro = w.next();
759 		}
760 		ro = w.nextObject();
761 		while (ro != null) {
762 			checkCancelled();
763 			if (id2File.remove(ro.getId()) != null && id2File.isEmpty()) {
764 				return;
765 			}
766 			ro = w.nextObject();
767 		}
768 	}
769 
770 	private static boolean equals(Ref r1, Ref r2) {
771 		if (r1 == null || r2 == null) {
772 			return false;
773 		}
774 		if (r1.isSymbolic()) {
775 			return r2.isSymbolic() && r1.getTarget().getName()
776 					.equals(r2.getTarget().getName());
777 		}
778 		return !r2.isSymbolic()
779 				&& Objects.equals(r1.getObjectId(), r2.getObjectId());
780 	}
781 
782 	/**
783 	 * Packs all non-symbolic, loose refs into packed-refs.
784 	 *
785 	 * @throws java.io.IOException
786 	 */
787 	public void packRefs() throws IOException {
788 		Collection<Ref> refs = repo.getRefDatabase()
789 				.getRefsByPrefix(Constants.R_REFS);
790 		List<String> refsToBePacked = new ArrayList<>(refs.size());
791 		pm.beginTask(JGitText.get().packRefs, refs.size());
792 		try {
793 			for (Ref ref : refs) {
794 				checkCancelled();
795 				if (!ref.isSymbolic() && ref.getStorage().isLoose())
796 					refsToBePacked.add(ref.getName());
797 				pm.update(1);
798 			}
799 			((RefDirectory) repo.getRefDatabase()).pack(refsToBePacked);
800 		} finally {
801 			pm.endTask();
802 		}
803 	}
804 
805 	/**
806 	 * Packs all objects which reachable from any of the heads into one pack
807 	 * file. Additionally all objects which are not reachable from any head but
808 	 * which are reachable from any of the other refs (e.g. tags), special refs
809 	 * (e.g. FETCH_HEAD) or index are packed into a separate pack file. Objects
810 	 * included in pack files which have a .keep file associated are never
811 	 * repacked. All old pack files which existed before are deleted.
812 	 *
813 	 * @return a collection of the newly created pack files
814 	 * @throws java.io.IOException
815 	 *             when during reading of refs, index, packfiles, objects,
816 	 *             reflog-entries or during writing to the packfiles
817 	 *             {@link java.io.IOException} occurs
818 	 */
819 	public Collection<PackFile> repack() throws IOException {
820 		Collection<PackFile> toBeDeleted = repo.getObjectDatabase().getPacks();
821 
822 		long time = System.currentTimeMillis();
823 		Collection<Ref> refsBefore = getAllRefs();
824 
825 		Set<ObjectId> allHeadsAndTags = new HashSet<>();
826 		Set<ObjectId> allHeads = new HashSet<>();
827 		Set<ObjectId> allTags = new HashSet<>();
828 		Set<ObjectId> nonHeads = new HashSet<>();
829 		Set<ObjectId> txnHeads = new HashSet<>();
830 		Set<ObjectId> tagTargets = new HashSet<>();
831 		Set<ObjectId> indexObjects = listNonHEADIndexObjects();
832 		RefDatabase refdb = repo.getRefDatabase();
833 
834 		for (Ref ref : refsBefore) {
835 			checkCancelled();
836 			nonHeads.addAll(listRefLogObjects(ref, 0));
837 			if (ref.isSymbolic() || ref.getObjectId() == null) {
838 				continue;
839 			}
840 			if (isHead(ref)) {
841 				allHeads.add(ref.getObjectId());
842 			} else if (isTag(ref)) {
843 				allTags.add(ref.getObjectId());
844 			} else if (RefTreeNames.isRefTree(refdb, ref.getName())) {
845 				txnHeads.add(ref.getObjectId());
846 			} else {
847 				nonHeads.add(ref.getObjectId());
848 			}
849 			if (ref.getPeeledObjectId() != null) {
850 				tagTargets.add(ref.getPeeledObjectId());
851 			}
852 		}
853 
854 		List<ObjectIdSet> excluded = new LinkedList<>();
855 		for (PackFile f : repo.getObjectDatabase().getPacks()) {
856 			checkCancelled();
857 			if (f.shouldBeKept())
858 				excluded.add(f.getIndex());
859 		}
860 
861 		// Don't exclude tags that are also branch tips
862 		allTags.removeAll(allHeads);
863 		allHeadsAndTags.addAll(allHeads);
864 		allHeadsAndTags.addAll(allTags);
865 
866 		// Hoist all branch tips and tags earlier in the pack file
867 		tagTargets.addAll(allHeadsAndTags);
868 		nonHeads.addAll(indexObjects);
869 
870 		// Combine the GC_REST objects into the GC pack if requested
871 		if (pconfig != null && pconfig.getSinglePack()) {
872 			allHeadsAndTags.addAll(nonHeads);
873 			nonHeads.clear();
874 		}
875 
876 		List<PackFile> ret = new ArrayList<>(2);
877 		PackFile heads = null;
878 		if (!allHeadsAndTags.isEmpty()) {
879 			heads = writePack(allHeadsAndTags, PackWriter.NONE, allTags,
880 					tagTargets, excluded);
881 			if (heads != null) {
882 				ret.add(heads);
883 				excluded.add(0, heads.getIndex());
884 			}
885 		}
886 		if (!nonHeads.isEmpty()) {
887 			PackFile rest = writePack(nonHeads, allHeadsAndTags, PackWriter.NONE,
888 					tagTargets, excluded);
889 			if (rest != null)
890 				ret.add(rest);
891 		}
892 		if (!txnHeads.isEmpty()) {
893 			PackFile txn = writePack(txnHeads, PackWriter.NONE, PackWriter.NONE,
894 					null, excluded);
895 			if (txn != null)
896 				ret.add(txn);
897 		}
898 		try {
899 			deleteOldPacks(toBeDeleted, ret);
900 		} catch (ParseException e) {
901 			// TODO: the exception has to be wrapped into an IOException because
902 			// throwing the ParseException directly would break the API, instead
903 			// we should throw a ConfigInvalidException
904 			throw new IOException(e);
905 		}
906 		prunePacked();
907 		deleteEmptyRefsFolders();
908 		deleteOrphans();
909 		deleteTempPacksIdx();
910 
911 		lastPackedRefs = refsBefore;
912 		lastRepackTime = time;
913 		return ret;
914 	}
915 
916 	private static boolean isHead(Ref ref) {
917 		return ref.getName().startsWith(Constants.R_HEADS);
918 	}
919 
920 	private static boolean isTag(Ref ref) {
921 		return ref.getName().startsWith(Constants.R_TAGS);
922 	}
923 
924 	private void deleteEmptyRefsFolders() throws IOException {
925 		Path refs = repo.getDirectory().toPath().resolve("refs"); //$NON-NLS-1$
926 		try (Stream<Path> entries = Files.list(refs)) {
927 			Iterator<Path> iterator = entries.iterator();
928 			while (iterator.hasNext()) {
929 				try (Stream<Path> s = Files.list(iterator.next())) {
930 					s.forEach(this::deleteDir);
931 				}
932 			}
933 		}
934 	}
935 
936 	private void deleteDir(Path dir) {
937 		try (Stream<Path> dirs = Files.walk(dir)) {
938 			dirs.filter(this::isDirectory).sorted(Comparator.reverseOrder())
939 					.forEach(this::delete);
940 		} catch (IOException e) {
941 			LOG.error(e.getMessage(), e);
942 		}
943 	}
944 
945 	private boolean isDirectory(Path p) {
946 		return p.toFile().isDirectory();
947 	}
948 
949 	private boolean delete(Path d) {
950 		try {
951 			// Avoid deleting a folder that was just created so that concurrent
952 			// operations trying to create a reference are not impacted
953 			Instant threshold = Instant.now().minus(30, ChronoUnit.SECONDS);
954 			Instant lastModified = Files.getLastModifiedTime(d).toInstant();
955 			if (lastModified.isBefore(threshold)) {
956 				// If the folder is not empty, the delete operation will fail
957 				// silently. This is a cheaper alternative to filtering the
958 				// stream in the calling method.
959 				return d.toFile().delete();
960 			}
961 		} catch (IOException e) {
962 			LOG.error(e.getMessage(), e);
963 		}
964 		return false;
965 	}
966 
967 	/**
968 	 * Deletes orphans
969 	 * <p>
970 	 * A file is considered an orphan if it is either a "bitmap" or an index
971 	 * file, and its corresponding pack file is missing in the list.
972 	 * </p>
973 	 */
974 	private void deleteOrphans() {
975 		Path packDir = repo.getObjectDatabase().getPackDirectory().toPath();
976 		List<String> fileNames = null;
977 		try (Stream<Path> files = Files.list(packDir)) {
978 			fileNames = files.map(path -> path.getFileName().toString())
979 					.filter(name -> (name.endsWith(PACK_EXT)
980 							|| name.endsWith(BITMAP_EXT)
981 							|| name.endsWith(INDEX_EXT)))
982 					.sorted(Collections.reverseOrder())
983 					.collect(Collectors.toList());
984 		} catch (IOException e1) {
985 			// ignore
986 		}
987 		if (fileNames == null) {
988 			return;
989 		}
990 
991 		String base = null;
992 		for (String n : fileNames) {
993 			if (n.endsWith(PACK_EXT)) {
994 				base = n.substring(0, n.lastIndexOf('.'));
995 			} else {
996 				if (base == null || !n.startsWith(base)) {
997 					try {
998 						Files.delete(packDir.resolve(n));
999 					} catch (IOException e) {
1000 						LOG.error(e.getMessage(), e);
1001 					}
1002 				}
1003 			}
1004 		}
1005 	}
1006 
1007 	private void deleteTempPacksIdx() {
1008 		Path packDir = repo.getObjectDatabase().getPackDirectory().toPath();
1009 		Instant threshold = Instant.now().minus(1, ChronoUnit.DAYS);
1010 		try (DirectoryStream<Path> stream =
1011 				Files.newDirectoryStream(packDir, "gc_*_tmp")) { //$NON-NLS-1$
1012 			stream.forEach(t -> {
1013 				try {
1014 					Instant lastModified = Files.getLastModifiedTime(t)
1015 							.toInstant();
1016 					if (lastModified.isBefore(threshold)) {
1017 						Files.deleteIfExists(t);
1018 					}
1019 				} catch (IOException e) {
1020 					LOG.error(e.getMessage(), e);
1021 				}
1022 			});
1023 		} catch (IOException e) {
1024 			LOG.error(e.getMessage(), e);
1025 		}
1026 	}
1027 
1028 	/**
1029 	 * @param ref
1030 	 *            the ref which log should be inspected
1031 	 * @param minTime only reflog entries not older then this time are processed
1032 	 * @return the {@link ObjectId}s contained in the reflog
1033 	 * @throws IOException
1034 	 */
1035 	private Set<ObjectId> listRefLogObjects(Ref ref, long minTime) throws IOException {
1036 		ReflogReader reflogReader = repo.getReflogReader(ref.getName());
1037 		if (reflogReader == null) {
1038 			return Collections.emptySet();
1039 		}
1040 		List<ReflogEntry> rlEntries = reflogReader
1041 				.getReverseEntries();
1042 		if (rlEntries == null || rlEntries.isEmpty())
1043 			return Collections.emptySet();
1044 		Set<ObjectId> ret = new HashSet<>();
1045 		for (ReflogEntry e : rlEntries) {
1046 			if (e.getWho().getWhen().getTime() < minTime)
1047 				break;
1048 			ObjectId newId = e.getNewId();
1049 			if (newId != null && !ObjectId.zeroId().equals(newId))
1050 				ret.add(newId);
1051 			ObjectId oldId = e.getOldId();
1052 			if (oldId != null && !ObjectId.zeroId().equals(oldId))
1053 				ret.add(oldId);
1054 		}
1055 		return ret;
1056 	}
1057 
1058 	/**
1059 	 * Returns a collection of all refs and additional refs.
1060 	 *
1061 	 * Additional refs which don't start with "refs/" are not returned because
1062 	 * they should not save objects from being garbage collected. Examples for
1063 	 * such references are ORIG_HEAD, MERGE_HEAD, FETCH_HEAD and
1064 	 * CHERRY_PICK_HEAD.
1065 	 *
1066 	 * @return a collection of refs pointing to live objects.
1067 	 * @throws IOException
1068 	 */
1069 	private Collection<Ref> getAllRefs() throws IOException {
1070 		RefDatabase refdb = repo.getRefDatabase();
1071 		Collection<Ref> refs = refdb.getRefs();
1072 		List<Ref> addl = refdb.getAdditionalRefs();
1073 		if (!addl.isEmpty()) {
1074 			List<Ref> all = new ArrayList<>(refs.size() + addl.size());
1075 			all.addAll(refs);
1076 			// add additional refs which start with refs/
1077 			for (Ref r : addl) {
1078 				checkCancelled();
1079 				if (r.getName().startsWith(Constants.R_REFS)) {
1080 					all.add(r);
1081 				}
1082 			}
1083 			return all;
1084 		}
1085 		return refs;
1086 	}
1087 
1088 	/**
1089 	 * Return a list of those objects in the index which differ from whats in
1090 	 * HEAD
1091 	 *
1092 	 * @return a set of ObjectIds of changed objects in the index
1093 	 * @throws IOException
1094 	 * @throws CorruptObjectException
1095 	 * @throws NoWorkTreeException
1096 	 */
1097 	private Set<ObjectId> listNonHEADIndexObjects()
1098 			throws CorruptObjectException, IOException {
1099 		if (repo.isBare()) {
1100 			return Collections.emptySet();
1101 		}
1102 		try (TreeWalk treeWalk = new TreeWalk(repo)) {
1103 			treeWalk.addTree(new DirCacheIterator(repo.readDirCache()));
1104 			ObjectId headID = repo.resolve(Constants.HEAD);
1105 			if (headID != null) {
1106 				try (RevWalk revWalk = new RevWalk(repo)) {
1107 					treeWalk.addTree(revWalk.parseTree(headID));
1108 				}
1109 			}
1110 
1111 			treeWalk.setFilter(TreeFilter.ANY_DIFF);
1112 			treeWalk.setRecursive(true);
1113 			Set<ObjectId> ret = new HashSet<>();
1114 
1115 			while (treeWalk.next()) {
1116 				checkCancelled();
1117 				ObjectId objectId = treeWalk.getObjectId(0);
1118 				switch (treeWalk.getRawMode(0) & FileMode.TYPE_MASK) {
1119 				case FileMode.TYPE_MISSING:
1120 				case FileMode.TYPE_GITLINK:
1121 					continue;
1122 				case FileMode.TYPE_TREE:
1123 				case FileMode.TYPE_FILE:
1124 				case FileMode.TYPE_SYMLINK:
1125 					ret.add(objectId);
1126 					continue;
1127 				default:
1128 					throw new IOException(MessageFormat.format(
1129 							JGitText.get().corruptObjectInvalidMode3,
1130 							String.format("%o", //$NON-NLS-1$
1131 									Integer.valueOf(treeWalk.getRawMode(0))),
1132 							(objectId == null) ? "null" : objectId.name(), //$NON-NLS-1$
1133 							treeWalk.getPathString(), //
1134 							repo.getIndexFile()));
1135 				}
1136 			}
1137 			return ret;
1138 		}
1139 	}
1140 
1141 	private PackFile writePack(@NonNull Set<? extends ObjectId> want,
1142 			@NonNull Set<? extends ObjectId> have, @NonNull Set<ObjectId> tags,
1143 			Set<ObjectId> tagTargets, List<ObjectIdSet> excludeObjects)
1144 			throws IOException {
1145 		checkCancelled();
1146 		File tmpPack = null;
1147 		Map<PackExt, File> tmpExts = new TreeMap<>((o1, o2) -> {
1148 			// INDEX entries must be returned last, so the pack
1149 			// scanner does pick up the new pack until all the
1150 			// PackExt entries have been written.
1151 			if (o1 == o2) {
1152 				return 0;
1153 			}
1154 			if (o1 == PackExt.INDEX) {
1155 				return 1;
1156 			}
1157 			if (o2 == PackExt.INDEX) {
1158 				return -1;
1159 			}
1160 			return Integer.signum(o1.hashCode() - o2.hashCode());
1161 		});
1162 		try (PackWriter pw = new PackWriter(
1163 				(pconfig == null) ? new PackConfig(repo) : pconfig,
1164 				repo.newObjectReader())) {
1165 			// prepare the PackWriter
1166 			pw.setDeltaBaseAsOffset(true);
1167 			pw.setReuseDeltaCommits(false);
1168 			if (tagTargets != null) {
1169 				pw.setTagTargets(tagTargets);
1170 			}
1171 			if (excludeObjects != null)
1172 				for (ObjectIdSet idx : excludeObjects)
1173 					pw.excludeObjects(idx);
1174 			pw.preparePack(pm, want, have, PackWriter.NONE, tags);
1175 			if (pw.getObjectCount() == 0)
1176 				return null;
1177 			checkCancelled();
1178 
1179 			// create temporary files
1180 			String id = pw.computeName().getName();
1181 			File packdir = repo.getObjectDatabase().getPackDirectory();
1182 			tmpPack = File.createTempFile("gc_", ".pack_tmp", packdir); //$NON-NLS-1$ //$NON-NLS-2$
1183 			final String tmpBase = tmpPack.getName()
1184 					.substring(0, tmpPack.getName().lastIndexOf('.'));
1185 			File tmpIdx = new File(packdir, tmpBase + ".idx_tmp"); //$NON-NLS-1$
1186 			tmpExts.put(INDEX, tmpIdx);
1187 
1188 			if (!tmpIdx.createNewFile())
1189 				throw new IOException(MessageFormat.format(
1190 						JGitText.get().cannotCreateIndexfile, tmpIdx.getPath()));
1191 
1192 			// write the packfile
1193 			try (FileOutputStream fos = new FileOutputStream(tmpPack);
1194 					FileChannel channel = fos.getChannel();
1195 					OutputStream channelStream = Channels
1196 							.newOutputStream(channel)) {
1197 				pw.writePack(pm, pm, channelStream);
1198 				channel.force(true);
1199 			}
1200 
1201 			// write the packindex
1202 			try (FileOutputStream fos = new FileOutputStream(tmpIdx);
1203 					FileChannel idxChannel = fos.getChannel();
1204 					OutputStream idxStream = Channels
1205 							.newOutputStream(idxChannel)) {
1206 				pw.writeIndex(idxStream);
1207 				idxChannel.force(true);
1208 			}
1209 
1210 			if (pw.prepareBitmapIndex(pm)) {
1211 				File tmpBitmapIdx = new File(packdir, tmpBase + ".bitmap_tmp"); //$NON-NLS-1$
1212 				tmpExts.put(BITMAP_INDEX, tmpBitmapIdx);
1213 
1214 				if (!tmpBitmapIdx.createNewFile())
1215 					throw new IOException(MessageFormat.format(
1216 							JGitText.get().cannotCreateIndexfile,
1217 							tmpBitmapIdx.getPath()));
1218 
1219 				try (FileOutputStream fos = new FileOutputStream(tmpBitmapIdx);
1220 						FileChannel idxChannel = fos.getChannel();
1221 						OutputStream idxStream = Channels
1222 								.newOutputStream(idxChannel)) {
1223 					pw.writeBitmapIndex(idxStream);
1224 					idxChannel.force(true);
1225 				}
1226 			}
1227 
1228 			// rename the temporary files to real files
1229 			File realPack = nameFor(id, ".pack"); //$NON-NLS-1$
1230 
1231 			repo.getObjectDatabase().closeAllPackHandles(realPack);
1232 			tmpPack.setReadOnly();
1233 
1234 			FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE);
1235 			for (Map.Entry<PackExt, File> tmpEntry : tmpExts.entrySet()) {
1236 				File tmpExt = tmpEntry.getValue();
1237 				tmpExt.setReadOnly();
1238 
1239 				File realExt = nameFor(id,
1240 						"." + tmpEntry.getKey().getExtension()); //$NON-NLS-1$
1241 				try {
1242 					FileUtils.rename(tmpExt, realExt,
1243 							StandardCopyOption.ATOMIC_MOVE);
1244 				} catch (IOException e) {
1245 					File newExt = new File(realExt.getParentFile(),
1246 							realExt.getName() + ".new"); //$NON-NLS-1$
1247 					try {
1248 						FileUtils.rename(tmpExt, newExt,
1249 								StandardCopyOption.ATOMIC_MOVE);
1250 					} catch (IOException e2) {
1251 						newExt = tmpExt;
1252 						e = e2;
1253 					}
1254 					throw new IOException(MessageFormat.format(
1255 							JGitText.get().panicCantRenameIndexFile, newExt,
1256 							realExt), e);
1257 				}
1258 			}
1259 
1260 			return repo.getObjectDatabase().openPack(realPack);
1261 		} finally {
1262 			if (tmpPack != null && tmpPack.exists())
1263 				tmpPack.delete();
1264 			for (File tmpExt : tmpExts.values()) {
1265 				if (tmpExt.exists())
1266 					tmpExt.delete();
1267 			}
1268 		}
1269 	}
1270 
1271 	private File nameFor(String name, String ext) {
1272 		File packdir = repo.getObjectDatabase().getPackDirectory();
1273 		return new File(packdir, "pack-" + name + ext); //$NON-NLS-1$
1274 	}
1275 
1276 	private void checkCancelled() throws CancelledException {
1277 		if (pm.isCancelled()) {
1278 			throw new CancelledException(JGitText.get().operationCanceled);
1279 		}
1280 	}
1281 
1282 	/**
1283 	 * A class holding statistical data for a FileRepository regarding how many
1284 	 * objects are stored as loose or packed objects
1285 	 */
1286 	public static class RepoStatistics {
1287 		/**
1288 		 * The number of objects stored in pack files. If the same object is
1289 		 * stored in multiple pack files then it is counted as often as it
1290 		 * occurs in pack files.
1291 		 */
1292 		public long numberOfPackedObjects;
1293 
1294 		/**
1295 		 * The number of pack files
1296 		 */
1297 		public long numberOfPackFiles;
1298 
1299 		/**
1300 		 * The number of objects stored as loose objects.
1301 		 */
1302 		public long numberOfLooseObjects;
1303 
1304 		/**
1305 		 * The sum of the sizes of all files used to persist loose objects.
1306 		 */
1307 		public long sizeOfLooseObjects;
1308 
1309 		/**
1310 		 * The sum of the sizes of all pack files.
1311 		 */
1312 		public long sizeOfPackedObjects;
1313 
1314 		/**
1315 		 * The number of loose refs.
1316 		 */
1317 		public long numberOfLooseRefs;
1318 
1319 		/**
1320 		 * The number of refs stored in pack files.
1321 		 */
1322 		public long numberOfPackedRefs;
1323 
1324 		/**
1325 		 * The number of bitmaps in the bitmap indices.
1326 		 */
1327 		public long numberOfBitmaps;
1328 
1329 		@Override
1330 		public String toString() {
1331 			final StringBuilder b = new StringBuilder();
1332 			b.append("numberOfPackedObjects=").append(numberOfPackedObjects); //$NON-NLS-1$
1333 			b.append(", numberOfPackFiles=").append(numberOfPackFiles); //$NON-NLS-1$
1334 			b.append(", numberOfLooseObjects=").append(numberOfLooseObjects); //$NON-NLS-1$
1335 			b.append(", numberOfLooseRefs=").append(numberOfLooseRefs); //$NON-NLS-1$
1336 			b.append(", numberOfPackedRefs=").append(numberOfPackedRefs); //$NON-NLS-1$
1337 			b.append(", sizeOfLooseObjects=").append(sizeOfLooseObjects); //$NON-NLS-1$
1338 			b.append(", sizeOfPackedObjects=").append(sizeOfPackedObjects); //$NON-NLS-1$
1339 			b.append(", numberOfBitmaps=").append(numberOfBitmaps); //$NON-NLS-1$
1340 			return b.toString();
1341 		}
1342 	}
1343 
1344 	/**
1345 	 * Returns information about objects and pack files for a FileRepository.
1346 	 *
1347 	 * @return information about objects and pack files for a FileRepository
1348 	 * @throws java.io.IOException
1349 	 */
1350 	public RepoStatistics getStatistics() throws IOException {
1351 		RepoStatistics ret = new RepoStatistics();
1352 		Collection<PackFile> packs = repo.getObjectDatabase().getPacks();
1353 		for (PackFile f : packs) {
1354 			ret.numberOfPackedObjects += f.getIndex().getObjectCount();
1355 			ret.numberOfPackFiles++;
1356 			ret.sizeOfPackedObjects += f.getPackFile().length();
1357 			if (f.getBitmapIndex() != null)
1358 				ret.numberOfBitmaps += f.getBitmapIndex().getBitmapCount();
1359 		}
1360 		File objDir = repo.getObjectsDirectory();
1361 		String[] fanout = objDir.list();
1362 		if (fanout != null && fanout.length > 0) {
1363 			for (String d : fanout) {
1364 				if (d.length() != 2)
1365 					continue;
1366 				File[] entries = new File(objDir, d).listFiles();
1367 				if (entries == null)
1368 					continue;
1369 				for (File f : entries) {
1370 					if (f.getName().length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
1371 						continue;
1372 					ret.numberOfLooseObjects++;
1373 					ret.sizeOfLooseObjects += f.length();
1374 				}
1375 			}
1376 		}
1377 
1378 		RefDatabase refDb = repo.getRefDatabase();
1379 		for (Ref r : refDb.getRefs()) {
1380 			Storage storage = r.getStorage();
1381 			if (storage == Storage.LOOSE || storage == Storage.LOOSE_PACKED)
1382 				ret.numberOfLooseRefs++;
1383 			if (storage == Storage.PACKED || storage == Storage.LOOSE_PACKED)
1384 				ret.numberOfPackedRefs++;
1385 		}
1386 
1387 		return ret;
1388 	}
1389 
1390 	/**
1391 	 * Set the progress monitor used for garbage collection methods.
1392 	 *
1393 	 * @param pm a {@link org.eclipse.jgit.lib.ProgressMonitor} object.
1394 	 * @return this
1395 	 */
1396 	public GC setProgressMonitor(ProgressMonitor pm) {
1397 		this.pm = (pm == null) ? NullProgressMonitor.INSTANCE : pm;
1398 		return this;
1399 	}
1400 
1401 	/**
1402 	 * During gc() or prune() each unreferenced, loose object which has been
1403 	 * created or modified in the last <code>expireAgeMillis</code> milliseconds
1404 	 * will not be pruned. Only older objects may be pruned. If set to 0 then
1405 	 * every object is a candidate for pruning.
1406 	 *
1407 	 * @param expireAgeMillis
1408 	 *            minimal age of objects to be pruned in milliseconds.
1409 	 */
1410 	public void setExpireAgeMillis(long expireAgeMillis) {
1411 		this.expireAgeMillis = expireAgeMillis;
1412 		expire = null;
1413 	}
1414 
1415 	/**
1416 	 * During gc() or prune() packfiles which are created or modified in the
1417 	 * last <code>packExpireAgeMillis</code> milliseconds will not be deleted.
1418 	 * Only older packfiles may be deleted. If set to 0 then every packfile is a
1419 	 * candidate for deletion.
1420 	 *
1421 	 * @param packExpireAgeMillis
1422 	 *            minimal age of packfiles to be deleted in milliseconds.
1423 	 */
1424 	public void setPackExpireAgeMillis(long packExpireAgeMillis) {
1425 		this.packExpireAgeMillis = packExpireAgeMillis;
1426 		expire = null;
1427 	}
1428 
1429 	/**
1430 	 * Set the PackConfig used when (re-)writing packfiles. This allows to
1431 	 * influence how packs are written and to implement something similar to
1432 	 * "git gc --aggressive"
1433 	 *
1434 	 * @param pconfig
1435 	 *            the {@link org.eclipse.jgit.storage.pack.PackConfig} used when
1436 	 *            writing packs
1437 	 */
1438 	public void setPackConfig(PackConfig pconfig) {
1439 		this.pconfig = pconfig;
1440 	}
1441 
1442 	/**
1443 	 * During gc() or prune() each unreferenced, loose object which has been
1444 	 * created or modified after or at <code>expire</code> will not be pruned.
1445 	 * Only older objects may be pruned. If set to null then every object is a
1446 	 * candidate for pruning.
1447 	 *
1448 	 * @param expire
1449 	 *            instant in time which defines object expiration
1450 	 *            objects with modification time before this instant are expired
1451 	 *            objects with modification time newer or equal to this instant
1452 	 *            are not expired
1453 	 */
1454 	public void setExpire(Date expire) {
1455 		this.expire = expire;
1456 		expireAgeMillis = -1;
1457 	}
1458 
1459 	/**
1460 	 * During gc() or prune() packfiles which are created or modified after or
1461 	 * at <code>packExpire</code> will not be deleted. Only older packfiles may
1462 	 * be deleted. If set to null then every packfile is a candidate for
1463 	 * deletion.
1464 	 *
1465 	 * @param packExpire
1466 	 *            instant in time which defines packfile expiration
1467 	 */
1468 	public void setPackExpire(Date packExpire) {
1469 		this.packExpire = packExpire;
1470 		packExpireAgeMillis = -1;
1471 	}
1472 
1473 	/**
1474 	 * Set the {@code gc --auto} option.
1475 	 *
1476 	 * With this option, gc checks whether any housekeeping is required; if not,
1477 	 * it exits without performing any work. Some JGit commands run
1478 	 * {@code gc --auto} after performing operations that could create many
1479 	 * loose objects.
1480 	 * <p>
1481 	 * Housekeeping is required if there are too many loose objects or too many
1482 	 * packs in the repository. If the number of loose objects exceeds the value
1483 	 * of the gc.auto option JGit GC consolidates all existing packs into a
1484 	 * single pack (equivalent to {@code -A} option), whereas git-core would
1485 	 * combine all loose objects into a single pack using {@code repack -d -l}.
1486 	 * Setting the value of {@code gc.auto} to 0 disables automatic packing of
1487 	 * loose objects.
1488 	 * <p>
1489 	 * If the number of packs exceeds the value of {@code gc.autoPackLimit},
1490 	 * then existing packs (except those marked with a .keep file) are
1491 	 * consolidated into a single pack by using the {@code -A} option of repack.
1492 	 * Setting {@code gc.autoPackLimit} to 0 disables automatic consolidation of
1493 	 * packs.
1494 	 * <p>
1495 	 * Like git the following jgit commands run auto gc:
1496 	 * <ul>
1497 	 * <li>fetch</li>
1498 	 * <li>merge</li>
1499 	 * <li>rebase</li>
1500 	 * <li>receive-pack</li>
1501 	 * </ul>
1502 	 * The auto gc for receive-pack can be suppressed by setting the config
1503 	 * option {@code receive.autogc = false}
1504 	 *
1505 	 * @param auto
1506 	 *            defines whether gc should do automatic housekeeping
1507 	 */
1508 	public void setAuto(boolean auto) {
1509 		this.automatic = auto;
1510 	}
1511 
1512 	/**
1513 	 * @param background
1514 	 *            whether to run the gc in a background thread.
1515 	 */
1516 	void setBackground(boolean background) {
1517 		this.background = background;
1518 	}
1519 
1520 	private boolean needGc() {
1521 		if (tooManyPacks()) {
1522 			addRepackAllOption();
1523 		} else {
1524 			return tooManyLooseObjects();
1525 		}
1526 		// TODO run pre-auto-gc hook, if it fails return false
1527 		return true;
1528 	}
1529 
1530 	private void addRepackAllOption() {
1531 		// TODO: if JGit GC is enhanced to support repack's option -l this
1532 		// method needs to be implemented
1533 	}
1534 
1535 	/**
1536 	 * @return {@code true} if number of packs > gc.autopacklimit (default 50)
1537 	 */
1538 	boolean tooManyPacks() {
1539 		int autopacklimit = repo.getConfig().getInt(
1540 				ConfigConstants.CONFIG_GC_SECTION,
1541 				ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT,
1542 				DEFAULT_AUTOPACKLIMIT);
1543 		if (autopacklimit <= 0) {
1544 			return false;
1545 		}
1546 		// JGit always creates two packfiles, one for the objects reachable from
1547 		// branches, and another one for the rest
1548 		return repo.getObjectDatabase().getPacks().size() > (autopacklimit + 1);
1549 	}
1550 
1551 	/**
1552 	 * Quickly estimate number of loose objects, SHA1 is distributed evenly so
1553 	 * counting objects in one directory (bucket 17) is sufficient
1554 	 *
1555 	 * @return {@code true} if number of loose objects > gc.auto (default 6700)
1556 	 */
1557 	boolean tooManyLooseObjects() {
1558 		int auto = getLooseObjectLimit();
1559 		if (auto <= 0) {
1560 			return false;
1561 		}
1562 		int n = 0;
1563 		int threshold = (auto + 255) / 256;
1564 		Path dir = repo.getObjectsDirectory().toPath().resolve("17"); //$NON-NLS-1$
1565 		if (!dir.toFile().exists()) {
1566 			return false;
1567 		}
1568 		try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, file -> {
1569 					Path fileName = file.getFileName();
1570 					return file.toFile().isFile() && fileName != null
1571 							&& PATTERN_LOOSE_OBJECT.matcher(fileName.toString())
1572 									.matches();
1573 				})) {
1574 			for (Iterator<Path> iter = stream.iterator(); iter.hasNext(); iter
1575 					.next()) {
1576 				if (++n > threshold) {
1577 					return true;
1578 				}
1579 			}
1580 		} catch (IOException e) {
1581 			LOG.error(e.getMessage(), e);
1582 		}
1583 		return false;
1584 	}
1585 
1586 	private int getLooseObjectLimit() {
1587 		return repo.getConfig().getInt(ConfigConstants.CONFIG_GC_SECTION,
1588 				ConfigConstants.CONFIG_KEY_AUTO, DEFAULT_AUTOLIMIT);
1589 	}
1590 }