View Javadoc
1   /*
2    * Copyright (C) 2008-2011, Google Inc.
3    * Copyright (C) 2006-2008, Shawn O. Pearce <spearce@spearce.org>
4    * and other copyright owners as documented in the project's IP log.
5    *
6    * This program and the accompanying materials are made available
7    * under the terms of the Eclipse Distribution License v1.0 which
8    * accompanies this distribution, is reproduced below, and is
9    * available at http://www.eclipse.org/org/documents/edl-v10.php
10   *
11   * All rights reserved.
12   *
13   * Redistribution and use in source and binary forms, with or
14   * without modification, are permitted provided that the following
15   * conditions are met:
16   *
17   * - Redistributions of source code must retain the above copyright
18   *   notice, this list of conditions and the following disclaimer.
19   *
20   * - Redistributions in binary form must reproduce the above
21   *   copyright notice, this list of conditions and the following
22   *   disclaimer in the documentation and/or other materials provided
23   *   with the distribution.
24   *
25   * - Neither the name of the Eclipse Foundation, Inc. nor the
26   *   names of its contributors may be used to endorse or promote
27   *   products derived from this software without specific prior
28   *   written permission.
29   *
30   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
31   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
32   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
33   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
34   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
37   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
38   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
39   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
40   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
41   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
42   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43   */
44  
45  package org.eclipse.jgit.internal.storage.dfs;
46  
47  import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.UNREACHABLE_GARBAGE;
48  import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
49  import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
50  
51  import java.io.IOException;
52  import java.util.ArrayList;
53  import java.util.Collection;
54  import java.util.Collections;
55  import java.util.Comparator;
56  import java.util.HashSet;
57  import java.util.Iterator;
58  import java.util.LinkedList;
59  import java.util.List;
60  import java.util.Set;
61  import java.util.zip.DataFormatException;
62  import java.util.zip.Inflater;
63  
64  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
65  import org.eclipse.jgit.errors.MissingObjectException;
66  import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
67  import org.eclipse.jgit.internal.JGitText;
68  import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackList;
69  import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
70  import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl;
71  import org.eclipse.jgit.internal.storage.file.PackBitmapIndex;
72  import org.eclipse.jgit.internal.storage.file.PackIndex;
73  import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
74  import org.eclipse.jgit.internal.storage.pack.CachedPack;
75  import org.eclipse.jgit.internal.storage.pack.ObjectReuseAsIs;
76  import org.eclipse.jgit.internal.storage.pack.ObjectToPack;
77  import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
78  import org.eclipse.jgit.internal.storage.pack.PackWriter;
79  import org.eclipse.jgit.lib.AbbreviatedObjectId;
80  import org.eclipse.jgit.lib.AnyObjectId;
81  import org.eclipse.jgit.lib.AsyncObjectLoaderQueue;
82  import org.eclipse.jgit.lib.AsyncObjectSizeQueue;
83  import org.eclipse.jgit.lib.BitmapIndex;
84  import org.eclipse.jgit.lib.BitmapIndex.BitmapBuilder;
85  import org.eclipse.jgit.lib.InflaterCache;
86  import org.eclipse.jgit.lib.ObjectId;
87  import org.eclipse.jgit.lib.ObjectLoader;
88  import org.eclipse.jgit.lib.ObjectReader;
89  import org.eclipse.jgit.lib.ProgressMonitor;
90  import org.eclipse.jgit.util.BlockList;
91  
92  /**
93   * Reader to access repository content through.
94   * <p>
95   * See the base {@link org.eclipse.jgit.lib.ObjectReader} documentation for
96   * details. Notably, a reader is not thread safe.
97   */
98  public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
99  	private static final int MAX_RESOLVE_MATCHES = 256;
100 
101 	/** Temporary buffer large enough for at least one raw object id. */
102 	final byte[] tempId = new byte[OBJECT_ID_LENGTH];
103 
104 	/** Database this reader loads objects from. */
105 	final DfsObjDatabase db;
106 
107 	final DfsReaderIoStats.Accumulator stats = new DfsReaderIoStats.Accumulator();
108 
109 	private Inflater inf;
110 	private DfsBlock block;
111 	private DeltaBaseCache baseCache;
112 	private DfsPackFile last;
113 	private boolean avoidUnreachable;
114 
115 	/**
116 	 * Initialize a new DfsReader
117 	 *
118 	 * @param db
119 	 *            parent DfsObjDatabase.
120 	 */
121 	protected DfsReader(DfsObjDatabase db) {
122 		this.db = db;
123 		this.streamFileThreshold = db.getReaderOptions().getStreamFileThreshold();
124 	}
125 
126 	DfsReaderOptions getOptions() {
127 		return db.getReaderOptions();
128 	}
129 
130 	DeltaBaseCache getDeltaBaseCache() {
131 		if (baseCache == null)
132 			baseCache = new DeltaBaseCache(this);
133 		return baseCache;
134 	}
135 
136 	/** {@inheritDoc} */
137 	@Override
138 	public ObjectReader newReader() {
139 		return db.newReader();
140 	}
141 
142 	/** {@inheritDoc} */
143 	@Override
144 	public void setAvoidUnreachableObjects(boolean avoid) {
145 		avoidUnreachable = avoid;
146 	}
147 
148 	/** {@inheritDoc} */
149 	@Override
150 	public BitmapIndex getBitmapIndex() throws IOException {
151 		for (DfsPackFile pack : db.getPacks()) {
152 			PackBitmapIndex bitmapIndex = pack.getBitmapIndex(this);
153 			if (bitmapIndex != null)
154 				return new BitmapIndexImpl(bitmapIndex);
155 		}
156 		return null;
157 	}
158 
159 	/** {@inheritDoc} */
160 	@Override
161 	public Collection<CachedPack> getCachedPacksAndUpdate(
162 		BitmapBuilder needBitmap) throws IOException {
163 		for (DfsPackFile pack : db.getPacks()) {
164 			PackBitmapIndex bitmapIndex = pack.getBitmapIndex(this);
165 			if (needBitmap.removeAllOrNone(bitmapIndex))
166 				return Collections.<CachedPack> singletonList(
167 						new DfsCachedPack(pack));
168 		}
169 		return Collections.emptyList();
170 	}
171 
172 	/** {@inheritDoc} */
173 	@Override
174 	public Collection<ObjectId> resolve(AbbreviatedObjectId id)
175 			throws IOException {
176 		if (id.isComplete())
177 			return Collections.singleton(id.toObjectId());
178 		HashSet<ObjectId> matches = new HashSet<>(4);
179 		PackList packList = db.getPackList();
180 		resolveImpl(packList, id, matches);
181 		if (matches.size() < MAX_RESOLVE_MATCHES && packList.dirty()) {
182 			stats.scanPacks++;
183 			resolveImpl(db.scanPacks(packList), id, matches);
184 		}
185 		return matches;
186 	}
187 
188 	private void resolveImpl(PackList packList, AbbreviatedObjectId id,
189 			HashSet<ObjectId> matches) throws IOException {
190 		for (DfsPackFile pack : packList.packs) {
191 			if (skipGarbagePack(pack)) {
192 				continue;
193 			}
194 			pack.resolve(this, matches, id, MAX_RESOLVE_MATCHES);
195 			if (matches.size() >= MAX_RESOLVE_MATCHES) {
196 				break;
197 			}
198 		}
199 	}
200 
201 	/** {@inheritDoc} */
202 	@Override
203 	public boolean has(AnyObjectId objectId) throws IOException {
204 		if (last != null
205 				&& !skipGarbagePack(last)
206 				&& last.hasObject(this, objectId))
207 			return true;
208 		PackList packList = db.getPackList();
209 		if (hasImpl(packList, objectId)) {
210 			return true;
211 		} else if (packList.dirty()) {
212 			stats.scanPacks++;
213 			return hasImpl(db.scanPacks(packList), objectId);
214 		}
215 		return false;
216 	}
217 
218 	private boolean hasImpl(PackList packList, AnyObjectId objectId)
219 			throws IOException {
220 		for (DfsPackFile pack : packList.packs) {
221 			if (pack == last || skipGarbagePack(pack))
222 				continue;
223 			if (pack.hasObject(this, objectId)) {
224 				last = pack;
225 				return true;
226 			}
227 		}
228 		return false;
229 	}
230 
231 	/** {@inheritDoc} */
232 	@Override
233 	public ObjectLoader open(AnyObjectId objectId, int typeHint)
234 			throws MissingObjectException, IncorrectObjectTypeException,
235 			IOException {
236 		ObjectLoader ldr;
237 		if (last != null && !skipGarbagePack(last)) {
238 			ldr = last.get(this, objectId);
239 			if (ldr != null) {
240 				return checkType(ldr, objectId, typeHint);
241 			}
242 		}
243 
244 		PackList packList = db.getPackList();
245 		ldr = openImpl(packList, objectId);
246 		if (ldr != null) {
247 			return checkType(ldr, objectId, typeHint);
248 		}
249 		if (packList.dirty()) {
250 			stats.scanPacks++;
251 			ldr = openImpl(db.scanPacks(packList), objectId);
252 			if (ldr != null) {
253 				return checkType(ldr, objectId, typeHint);
254 			}
255 		}
256 
257 		if (typeHint == OBJ_ANY)
258 			throw new MissingObjectException(objectId.copy(),
259 					JGitText.get().unknownObjectType2);
260 		throw new MissingObjectException(objectId.copy(), typeHint);
261 	}
262 
263 	private static ObjectLoader checkType(ObjectLoader ldr, AnyObjectId id,
264 			int typeHint) throws IncorrectObjectTypeException {
265 		if (typeHint != OBJ_ANY && ldr.getType() != typeHint) {
266 			throw new IncorrectObjectTypeException(id.copy(), typeHint);
267 		}
268 		return ldr;
269 	}
270 
271 	private ObjectLoader openImpl(PackList packList, AnyObjectId objectId)
272 			throws IOException {
273 		for (DfsPackFile pack : packList.packs) {
274 			if (pack == last || skipGarbagePack(pack)) {
275 				continue;
276 			}
277 			ObjectLoader ldr = pack.get(this, objectId);
278 			if (ldr != null) {
279 				last = pack;
280 				return ldr;
281 			}
282 		}
283 		return null;
284 	}
285 
286 	/** {@inheritDoc} */
287 	@Override
288 	public Set<ObjectId> getShallowCommits() {
289 		return Collections.emptySet();
290 	}
291 
292 	private static final Comparator<FoundObject<?>> FOUND_OBJECT_SORT = new Comparator<FoundObject<?>>() {
293 		@Override
294 		public int compare(FoundObject<?> a, FoundObject<?> b) {
295 			int cmp = a.packIndex - b.packIndex;
296 			if (cmp == 0)
297 				cmp = Long.signum(a.offset - b.offset);
298 			return cmp;
299 		}
300 	};
301 
302 	private static class FoundObject<T extends ObjectId> {
303 		final T id;
304 		final DfsPackFile pack;
305 		final long offset;
306 		final int packIndex;
307 
308 		FoundObject(T objectId, int packIdx, DfsPackFile pack, long offset) {
309 			this.id = objectId;
310 			this.pack = pack;
311 			this.offset = offset;
312 			this.packIndex = packIdx;
313 		}
314 
315 		FoundObject(T objectId) {
316 			this.id = objectId;
317 			this.pack = null;
318 			this.offset = 0;
319 			this.packIndex = 0;
320 		}
321 	}
322 
323 	private <T extends ObjectId> Iterable<FoundObject<T>> findAll(
324 			Iterable<T> objectIds) throws IOException {
325 		Collection<T> pending = new LinkedList<>();
326 		for (T id : objectIds) {
327 			pending.add(id);
328 		}
329 
330 		PackList packList = db.getPackList();
331 		List<FoundObject<T>> r = new ArrayList<>();
332 		findAllImpl(packList, pending, r);
333 		if (!pending.isEmpty() && packList.dirty()) {
334 			stats.scanPacks++;
335 			findAllImpl(db.scanPacks(packList), pending, r);
336 		}
337 		for (T t : pending) {
338 			r.add(new FoundObject<>(t));
339 		}
340 		Collections.sort(r, FOUND_OBJECT_SORT);
341 		return r;
342 	}
343 
344 	private <T extends ObjectId> void findAllImpl(PackList packList,
345 			Collection<T> pending, List<FoundObject<T>> r) {
346 		DfsPackFile[] packs = packList.packs;
347 		if (packs.length == 0) {
348 			return;
349 		}
350 		int lastIdx = 0;
351 		DfsPackFile lastPack = packs[lastIdx];
352 
353 		OBJECT_SCAN: for (Iterator<T> it = pending.iterator(); it.hasNext();) {
354 			T t = it.next();
355 			if (!skipGarbagePack(lastPack)) {
356 				try {
357 					long p = lastPack.findOffset(this, t);
358 					if (0 < p) {
359 						r.add(new FoundObject<>(t, lastIdx, lastPack, p));
360 						it.remove();
361 						continue;
362 					}
363 				} catch (IOException e) {
364 					// Fall though and try to examine other packs.
365 				}
366 			}
367 
368 			for (int i = 0; i < packs.length; i++) {
369 				if (i == lastIdx)
370 					continue;
371 				DfsPackFile pack = packs[i];
372 				if (skipGarbagePack(pack))
373 					continue;
374 				try {
375 					long p = pack.findOffset(this, t);
376 					if (0 < p) {
377 						r.add(new FoundObject<>(t, i, pack, p));
378 						it.remove();
379 						lastIdx = i;
380 						lastPack = pack;
381 						continue OBJECT_SCAN;
382 					}
383 				} catch (IOException e) {
384 					// Examine other packs.
385 				}
386 			}
387 		}
388 
389 		last = lastPack;
390 	}
391 
392 	private boolean skipGarbagePack(DfsPackFile pack) {
393 		return avoidUnreachable && pack.isGarbage();
394 	}
395 
396 	/** {@inheritDoc} */
397 	@Override
398 	public <T extends ObjectId> AsyncObjectLoaderQueue<T> open(
399 			Iterable<T> objectIds, final boolean reportMissing) {
400 		Iterable<FoundObject<T>> order;
401 		IOException error = null;
402 		try {
403 			order = findAll(objectIds);
404 		} catch (IOException e) {
405 			order = Collections.emptyList();
406 			error = e;
407 		}
408 
409 		final Iterator<FoundObject<T>> idItr = order.iterator();
410 		final IOException findAllError = error;
411 		return new AsyncObjectLoaderQueue<T>() {
412 			private FoundObject<T> cur;
413 
414 			@Override
415 			public boolean next() throws MissingObjectException, IOException {
416 				if (idItr.hasNext()) {
417 					cur = idItr.next();
418 					return true;
419 				} else if (findAllError != null) {
420 					throw findAllError;
421 				} else {
422 					return false;
423 				}
424 			}
425 
426 			@Override
427 			public T getCurrent() {
428 				return cur.id;
429 			}
430 
431 			@Override
432 			public ObjectId getObjectId() {
433 				return cur.id;
434 			}
435 
436 			@Override
437 			public ObjectLoader open() throws IOException {
438 				if (cur.pack == null)
439 					throw new MissingObjectException(cur.id,
440 							JGitText.get().unknownObjectType2);
441 				return cur.pack.load(DfsReader.this, cur.offset);
442 			}
443 
444 			@Override
445 			public boolean cancel(boolean mayInterruptIfRunning) {
446 				return true;
447 			}
448 
449 			@Override
450 			public void release() {
451 				// Nothing to clean up.
452 			}
453 		};
454 	}
455 
456 	/** {@inheritDoc} */
457 	@Override
458 	public <T extends ObjectId> AsyncObjectSizeQueue<T> getObjectSize(
459 			Iterable<T> objectIds, final boolean reportMissing) {
460 		Iterable<FoundObject<T>> order;
461 		IOException error = null;
462 		try {
463 			order = findAll(objectIds);
464 		} catch (IOException e) {
465 			order = Collections.emptyList();
466 			error = e;
467 		}
468 
469 		final Iterator<FoundObject<T>> idItr = order.iterator();
470 		final IOException findAllError = error;
471 		return new AsyncObjectSizeQueue<T>() {
472 			private FoundObject<T> cur;
473 			private long sz;
474 
475 			@Override
476 			public boolean next() throws MissingObjectException, IOException {
477 				if (idItr.hasNext()) {
478 					cur = idItr.next();
479 					if (cur.pack == null)
480 						throw new MissingObjectException(cur.id,
481 								JGitText.get().unknownObjectType2);
482 					sz = cur.pack.getObjectSize(DfsReader.this, cur.offset);
483 					return true;
484 				} else if (findAllError != null) {
485 					throw findAllError;
486 				} else {
487 					return false;
488 				}
489 			}
490 
491 			@Override
492 			public T getCurrent() {
493 				return cur.id;
494 			}
495 
496 			@Override
497 			public ObjectId getObjectId() {
498 				return cur.id;
499 			}
500 
501 			@Override
502 			public long getSize() {
503 				return sz;
504 			}
505 
506 			@Override
507 			public boolean cancel(boolean mayInterruptIfRunning) {
508 				return true;
509 			}
510 
511 			@Override
512 			public void release() {
513 				// Nothing to clean up.
514 			}
515 		};
516 	}
517 
518 	/** {@inheritDoc} */
519 	@Override
520 	public long getObjectSize(AnyObjectId objectId, int typeHint)
521 			throws MissingObjectException, IncorrectObjectTypeException,
522 			IOException {
523 		if (last != null && !skipGarbagePack(last)) {
524 			long sz = last.getObjectSize(this, objectId);
525 			if (0 <= sz) {
526 				return sz;
527 			}
528 		}
529 
530 		PackList packList = db.getPackList();
531 		long sz = getObjectSizeImpl(packList, objectId);
532 		if (0 <= sz) {
533 			return sz;
534 		}
535 		if (packList.dirty()) {
536 			sz = getObjectSizeImpl(packList, objectId);
537 			if (0 <= sz) {
538 				return sz;
539 			}
540 		}
541 
542 		if (typeHint == OBJ_ANY) {
543 			throw new MissingObjectException(objectId.copy(),
544 					JGitText.get().unknownObjectType2);
545 		}
546 		throw new MissingObjectException(objectId.copy(), typeHint);
547 	}
548 
549 	private long getObjectSizeImpl(PackList packList, AnyObjectId objectId)
550 			throws IOException {
551 		for (DfsPackFile pack : packList.packs) {
552 			if (pack == last || skipGarbagePack(pack)) {
553 				continue;
554 			}
555 			long sz = pack.getObjectSize(this, objectId);
556 			if (0 <= sz) {
557 				last = pack;
558 				return sz;
559 			}
560 		}
561 		return -1;
562 	}
563 
564 	/** {@inheritDoc} */
565 	@Override
566 	public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) {
567 		return new DfsObjectToPack(objectId, type);
568 	}
569 
570 	private static final Comparator<DfsObjectToPack> OFFSET_SORT = new Comparator<DfsObjectToPack>() {
571 		@Override
572 		public int compare(DfsObjectToPack a, DfsObjectToPack b) {
573 			return Long.signum(a.getOffset() - b.getOffset());
574 		}
575 	};
576 
577 	@Override
578 	public void selectObjectRepresentation(PackWriter packer,
579 			ProgressMonitor monitor, Iterable<ObjectToPack> objects)
580 			throws IOException, MissingObjectException {
581 		// Don't check dirty bit on PackList; assume ObjectToPacks all came
582 		// from the current list.
583 		List<DfsPackFile> packs = sortPacksForSelectRepresentation();
584 		trySelectRepresentation(packer, monitor, objects, packs, false);
585 
586 		List<DfsPackFile> garbage = garbagePacksForSelectRepresentation();
587 		if (!garbage.isEmpty() && checkGarbagePacks(objects)) {
588 			trySelectRepresentation(packer, monitor, objects, garbage, true);
589 		}
590 	}
591 
592 	private void trySelectRepresentation(PackWriter packer,
593 			ProgressMonitor monitor, Iterable<ObjectToPack> objects,
594 			List<DfsPackFile> packs, boolean skipFound) throws IOException {
595 		for (DfsPackFile pack : packs) {
596 			List<DfsObjectToPack> tmp = findAllFromPack(pack, objects, skipFound);
597 			if (tmp.isEmpty())
598 				continue;
599 			Collections.sort(tmp, OFFSET_SORT);
600 			PackReverseIndex rev = pack.getReverseIdx(this);
601 			DfsObjectRepresentation rep = new DfsObjectRepresentation(pack);
602 			for (DfsObjectToPack otp : tmp) {
603 				pack.representation(rep, otp.getOffset(), this, rev);
604 				otp.setOffset(0);
605 				packer.select(otp, rep);
606 				if (!otp.isFound()) {
607 					otp.setFound();
608 					monitor.update(1);
609 				}
610 			}
611 		}
612 	}
613 
614 	private static final Comparator<DfsPackFile> PACK_SORT_FOR_REUSE = new Comparator<DfsPackFile>() {
615 		@Override
616 		public int compare(DfsPackFile af, DfsPackFile bf) {
617 			DfsPackDescription ad = af.getPackDescription();
618 			DfsPackDescription bd = bf.getPackDescription();
619 			PackSource as = ad.getPackSource();
620 			PackSource bs = bd.getPackSource();
621 
622 			if (as != null && as == bs && DfsPackDescription.isGC(as)) {
623 				// Push smaller GC files last; these likely have higher quality
624 				// delta compression and the contained representation should be
625 				// favored over other files.
626 				return Long.signum(bd.getFileSize(PACK) - ad.getFileSize(PACK));
627 			}
628 
629 			// DfsPackDescription.compareTo already did a reasonable sort.
630 			// Rely on Arrays.sort being stable, leaving equal elements.
631 			return 0;
632 		}
633 	};
634 
635 	private List<DfsPackFile> sortPacksForSelectRepresentation()
636 			throws IOException {
637 		DfsPackFile[] packs = db.getPacks();
638 		List<DfsPackFile> sorted = new ArrayList<>(packs.length);
639 		for (DfsPackFile p : packs) {
640 			if (p.getPackDescription().getPackSource() != UNREACHABLE_GARBAGE) {
641 				sorted.add(p);
642 			}
643 		}
644 		Collections.sort(sorted, PACK_SORT_FOR_REUSE);
645 		return sorted;
646 	}
647 
648 	private List<DfsPackFile> garbagePacksForSelectRepresentation()
649 			throws IOException {
650 		DfsPackFile[] packs = db.getPacks();
651 		List<DfsPackFile> garbage = new ArrayList<>(packs.length);
652 		for (DfsPackFile p : packs) {
653 			if (p.getPackDescription().getPackSource() == UNREACHABLE_GARBAGE) {
654 				garbage.add(p);
655 			}
656 		}
657 		return garbage;
658 	}
659 
660 	private static boolean checkGarbagePacks(Iterable<ObjectToPack> objects) {
661 		for (ObjectToPack otp : objects) {
662 			if (!((DfsObjectToPack) otp).isFound()) {
663 				return true;
664 			}
665 		}
666 		return false;
667 	}
668 
669 	private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack,
670 			Iterable<ObjectToPack> objects, boolean skipFound)
671 					throws IOException {
672 		List<DfsObjectToPack> tmp = new BlockList<>();
673 		PackIndex idx = pack.getPackIndex(this);
674 		for (ObjectToPack obj : objects) {
675 			DfsObjectToPack otp = (DfsObjectToPack) obj;
676 			if (skipFound && otp.isFound()) {
677 				continue;
678 			}
679 			long p = idx.findOffset(otp);
680 			if (0 < p && !pack.isCorrupt(p)) {
681 				otp.setOffset(p);
682 				tmp.add(otp);
683 			}
684 		}
685 		return tmp;
686 	}
687 
688 	/** {@inheritDoc} */
689 	@Override
690 	public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp,
691 			boolean validate) throws IOException,
692 			StoredObjectRepresentationNotAvailableException {
693 		DfsObjectToPack src = (DfsObjectToPack) otp;
694 		src.pack.copyAsIs(out, src, validate, this);
695 	}
696 
697 	/** {@inheritDoc} */
698 	@Override
699 	public void writeObjects(PackOutputStream out, List<ObjectToPack> list)
700 			throws IOException {
701 		for (ObjectToPack otp : list)
702 			out.writeObject(otp);
703 	}
704 
705 	/** {@inheritDoc} */
706 	@Override
707 	public void copyPackAsIs(PackOutputStream out, CachedPack pack)
708 			throws IOException {
709 		((DfsCachedPack) pack).copyAsIs(out, this);
710 	}
711 
712 	/**
713 	 * Copy bytes from the window to a caller supplied buffer.
714 	 *
715 	 * @param file
716 	 *            the file the desired window is stored within.
717 	 * @param position
718 	 *            position within the file to read from.
719 	 * @param dstbuf
720 	 *            destination buffer to copy into.
721 	 * @param dstoff
722 	 *            offset within <code>dstbuf</code> to start copying into.
723 	 * @param cnt
724 	 *            number of bytes to copy. This value may exceed the number of
725 	 *            bytes remaining in the window starting at offset
726 	 *            <code>pos</code>.
727 	 * @return number of bytes actually copied; this may be less than
728 	 *         <code>cnt</code> if <code>cnt</code> exceeded the number of bytes
729 	 *         available.
730 	 * @throws IOException
731 	 *             this cursor does not match the provider or id and the proper
732 	 *             window could not be acquired through the provider's cache.
733 	 */
734 	int copy(BlockBasedFile file, long position, byte[] dstbuf, int dstoff,
735 			int cnt) throws IOException {
736 		if (cnt == 0)
737 			return 0;
738 
739 		long length = file.length;
740 		if (0 <= length && length <= position)
741 			return 0;
742 
743 		int need = cnt;
744 		do {
745 			pin(file, position);
746 			int r = block.copy(position, dstbuf, dstoff, need);
747 			position += r;
748 			dstoff += r;
749 			need -= r;
750 			if (length < 0)
751 				length = file.length;
752 		} while (0 < need && position < length);
753 		return cnt - need;
754 	}
755 
756 	/**
757 	 * Inflate a region of the pack starting at {@code position}.
758 	 *
759 	 * @param pack
760 	 *            the file the desired window is stored within.
761 	 * @param position
762 	 *            position within the file to read from.
763 	 * @param dstbuf
764 	 *            destination buffer the inflater should output decompressed
765 	 *            data to. Must be large enough to store the entire stream,
766 	 *            unless headerOnly is true.
767 	 * @param headerOnly
768 	 *            if true the caller wants only {@code dstbuf.length} bytes.
769 	 * @return number of bytes inflated into <code>dstbuf</code>.
770 	 * @throws IOException
771 	 *             this cursor does not match the provider or id and the proper
772 	 *             window could not be acquired through the provider's cache.
773 	 * @throws DataFormatException
774 	 *             the inflater encountered an invalid chunk of data. Data
775 	 *             stream corruption is likely.
776 	 */
777 	int inflate(DfsPackFile pack, long position, byte[] dstbuf,
778 			boolean headerOnly) throws IOException, DataFormatException {
779 		prepareInflater();
780 		pin(pack, position);
781 		position += block.setInput(position, inf);
782 		for (int dstoff = 0;;) {
783 			int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
784 			dstoff += n;
785 			if (inf.finished() || (headerOnly && dstoff == dstbuf.length)) {
786 				stats.inflatedBytes += dstoff;
787 				return dstoff;
788 			} else if (inf.needsInput()) {
789 				pin(pack, position);
790 				position += block.setInput(position, inf);
791 			} else if (n == 0)
792 				throw new DataFormatException();
793 		}
794 	}
795 
796 	DfsBlock quickCopy(DfsPackFile p, long pos, long cnt)
797 			throws IOException {
798 		pin(p, pos);
799 		if (block.contains(p.key, pos + (cnt - 1)))
800 			return block;
801 		return null;
802 	}
803 
804 	Inflater inflater() {
805 		prepareInflater();
806 		return inf;
807 	}
808 
809 	private void prepareInflater() {
810 		if (inf == null)
811 			inf = InflaterCache.get();
812 		else
813 			inf.reset();
814 	}
815 
816 	void pin(BlockBasedFile file, long position) throws IOException {
817 		if (block == null || !block.contains(file.key, position)) {
818 			// If memory is low, we may need what is in our window field to
819 			// be cleaned up by the GC during the get for the next window.
820 			// So we always clear it, even though we are just going to set
821 			// it again.
822 			block = null;
823 			block = file.getOrLoadBlock(position, this);
824 		}
825 	}
826 
827 	void unpin() {
828 		block = null;
829 	}
830 
831 	/**
832 	 * Get IO statistics accumulated by this reader.
833 	 *
834 	 * @return IO statistics accumulated by this reader.
835 	 */
836 	public DfsReaderIoStats getIoStats() {
837 		return new DfsReaderIoStats(stats);
838 	}
839 
840 	/**
841 	 * {@inheritDoc}
842 	 * <p>
843 	 * Release the current window cursor.
844 	 */
845 	@Override
846 	public void close() {
847 		last = null;
848 		block = null;
849 		baseCache = null;
850 		try {
851 			InflaterCache.release(inf);
852 		} finally {
853 			inf = null;
854 		}
855 	}
856 }