View Javadoc
1   /*
2    * Copyright (C) 2008-2011, Google Inc.
3    * Copyright (C) 2006-2008, Shawn O. Pearce <spearce@spearce.org>
4    * and other copyright owners as documented in the project's IP log.
5    *
6    * This program and the accompanying materials are made available
7    * under the terms of the Eclipse Distribution License v1.0 which
8    * accompanies this distribution, is reproduced below, and is
9    * available at http://www.eclipse.org/org/documents/edl-v10.php
10   *
11   * All rights reserved.
12   *
13   * Redistribution and use in source and binary forms, with or
14   * without modification, are permitted provided that the following
15   * conditions are met:
16   *
17   * - Redistributions of source code must retain the above copyright
18   *   notice, this list of conditions and the following disclaimer.
19   *
20   * - Redistributions in binary form must reproduce the above
21   *   copyright notice, this list of conditions and the following
22   *   disclaimer in the documentation and/or other materials provided
23   *   with the distribution.
24   *
25   * - Neither the name of the Eclipse Foundation, Inc. nor the
26   *   names of its contributors may be used to endorse or promote
27   *   products derived from this software without specific prior
28   *   written permission.
29   *
30   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
31   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
32   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
33   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
34   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
37   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
38   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
39   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
40   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
41   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
42   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43   */
44  
45  package org.eclipse.jgit.internal.storage.dfs;
46  
47  import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.UNREACHABLE_GARBAGE;
48  import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
49  
50  import java.io.IOException;
51  import java.util.ArrayList;
52  import java.util.Collection;
53  import java.util.Collections;
54  import java.util.Comparator;
55  import java.util.HashSet;
56  import java.util.Iterator;
57  import java.util.LinkedList;
58  import java.util.List;
59  import java.util.Set;
60  import java.util.zip.DataFormatException;
61  import java.util.zip.Inflater;
62  
63  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
64  import org.eclipse.jgit.errors.MissingObjectException;
65  import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
66  import org.eclipse.jgit.internal.JGitText;
67  import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackList;
68  import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl;
69  import org.eclipse.jgit.internal.storage.file.PackBitmapIndex;
70  import org.eclipse.jgit.internal.storage.file.PackIndex;
71  import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
72  import org.eclipse.jgit.internal.storage.pack.CachedPack;
73  import org.eclipse.jgit.internal.storage.pack.ObjectReuseAsIs;
74  import org.eclipse.jgit.internal.storage.pack.ObjectToPack;
75  import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
76  import org.eclipse.jgit.internal.storage.pack.PackWriter;
77  import org.eclipse.jgit.lib.AbbreviatedObjectId;
78  import org.eclipse.jgit.lib.AnyObjectId;
79  import org.eclipse.jgit.lib.AsyncObjectLoaderQueue;
80  import org.eclipse.jgit.lib.AsyncObjectSizeQueue;
81  import org.eclipse.jgit.lib.BitmapIndex;
82  import org.eclipse.jgit.lib.BitmapIndex.BitmapBuilder;
83  import org.eclipse.jgit.lib.InflaterCache;
84  import org.eclipse.jgit.lib.ObjectId;
85  import org.eclipse.jgit.lib.ObjectLoader;
86  import org.eclipse.jgit.lib.ObjectReader;
87  import org.eclipse.jgit.lib.ProgressMonitor;
88  import org.eclipse.jgit.util.BlockList;
89  
90  /**
91   * Reader to access repository content through.
92   * <p>
93   * See the base {@link org.eclipse.jgit.lib.ObjectReader} documentation for
94   * details. Notably, a reader is not thread safe.
95   */
96  public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
97  	private static final int MAX_RESOLVE_MATCHES = 256;
98  
99  	/** Temporary buffer large enough for at least one raw object id. */
100 	final byte[] tempId = new byte[OBJECT_ID_LENGTH];
101 
102 	/** Database this reader loads objects from. */
103 	final DfsObjDatabase db;
104 
105 	final DfsReaderIoStats.Accumulator stats = new DfsReaderIoStats.Accumulator();
106 
107 	private Inflater inf;
108 	private DfsBlock block;
109 	private DeltaBaseCache baseCache;
110 	private DfsPackFile last;
111 	private boolean avoidUnreachable;
112 
113 	/**
114 	 * Initialize a new DfsReader
115 	 *
116 	 * @param db
117 	 *            parent DfsObjDatabase.
118 	 */
119 	protected DfsReader(DfsObjDatabase db) {
120 		this.db = db;
121 		this.streamFileThreshold = db.getReaderOptions().getStreamFileThreshold();
122 	}
123 
124 	DfsReaderOptions getOptions() {
125 		return db.getReaderOptions();
126 	}
127 
128 	DeltaBaseCache getDeltaBaseCache() {
129 		if (baseCache == null)
130 			baseCache = new DeltaBaseCache(this);
131 		return baseCache;
132 	}
133 
134 	/** {@inheritDoc} */
135 	@Override
136 	public ObjectReader newReader() {
137 		return db.newReader();
138 	}
139 
140 	/** {@inheritDoc} */
141 	@Override
142 	public void setAvoidUnreachableObjects(boolean avoid) {
143 		avoidUnreachable = avoid;
144 	}
145 
146 	/** {@inheritDoc} */
147 	@Override
148 	public BitmapIndex getBitmapIndex() throws IOException {
149 		for (DfsPackFile pack : db.getPacks()) {
150 			PackBitmapIndex bitmapIndex = pack.getBitmapIndex(this);
151 			if (bitmapIndex != null)
152 				return new BitmapIndexImpl(bitmapIndex);
153 		}
154 		return null;
155 	}
156 
157 	/** {@inheritDoc} */
158 	@Override
159 	public Collection<CachedPack> getCachedPacksAndUpdate(
160 		BitmapBuilder needBitmap) throws IOException {
161 		for (DfsPackFile pack : db.getPacks()) {
162 			PackBitmapIndex bitmapIndex = pack.getBitmapIndex(this);
163 			if (needBitmap.removeAllOrNone(bitmapIndex))
164 				return Collections.<CachedPack> singletonList(
165 						new DfsCachedPack(pack));
166 		}
167 		return Collections.emptyList();
168 	}
169 
170 	/** {@inheritDoc} */
171 	@Override
172 	public Collection<ObjectId> resolve(AbbreviatedObjectId id)
173 			throws IOException {
174 		if (id.isComplete())
175 			return Collections.singleton(id.toObjectId());
176 		HashSet<ObjectId> matches = new HashSet<>(4);
177 		PackList packList = db.getPackList();
178 		resolveImpl(packList, id, matches);
179 		if (matches.size() < MAX_RESOLVE_MATCHES && packList.dirty()) {
180 			stats.scanPacks++;
181 			resolveImpl(db.scanPacks(packList), id, matches);
182 		}
183 		return matches;
184 	}
185 
186 	private void resolveImpl(PackList packList, AbbreviatedObjectId id,
187 			HashSet<ObjectId> matches) throws IOException {
188 		for (DfsPackFile pack : packList.packs) {
189 			if (skipGarbagePack(pack)) {
190 				continue;
191 			}
192 			pack.resolve(this, matches, id, MAX_RESOLVE_MATCHES);
193 			if (matches.size() >= MAX_RESOLVE_MATCHES) {
194 				break;
195 			}
196 		}
197 	}
198 
199 	/** {@inheritDoc} */
200 	@Override
201 	public boolean has(AnyObjectId objectId) throws IOException {
202 		if (last != null
203 				&& !skipGarbagePack(last)
204 				&& last.hasObject(this, objectId))
205 			return true;
206 		PackList packList = db.getPackList();
207 		if (hasImpl(packList, objectId)) {
208 			return true;
209 		} else if (packList.dirty()) {
210 			stats.scanPacks++;
211 			return hasImpl(db.scanPacks(packList), objectId);
212 		}
213 		return false;
214 	}
215 
216 	private boolean hasImpl(PackList packList, AnyObjectId objectId)
217 			throws IOException {
218 		for (DfsPackFile pack : packList.packs) {
219 			if (pack == last || skipGarbagePack(pack))
220 				continue;
221 			if (pack.hasObject(this, objectId)) {
222 				last = pack;
223 				return true;
224 			}
225 		}
226 		return false;
227 	}
228 
229 	/** {@inheritDoc} */
230 	@Override
231 	public ObjectLoader open(AnyObjectId objectId, int typeHint)
232 			throws MissingObjectException, IncorrectObjectTypeException,
233 			IOException {
234 		ObjectLoader ldr;
235 		if (last != null && !skipGarbagePack(last)) {
236 			ldr = last.get(this, objectId);
237 			if (ldr != null) {
238 				return checkType(ldr, objectId, typeHint);
239 			}
240 		}
241 
242 		PackList packList = db.getPackList();
243 		ldr = openImpl(packList, objectId);
244 		if (ldr != null) {
245 			return checkType(ldr, objectId, typeHint);
246 		}
247 		if (packList.dirty()) {
248 			stats.scanPacks++;
249 			ldr = openImpl(db.scanPacks(packList), objectId);
250 			if (ldr != null) {
251 				return checkType(ldr, objectId, typeHint);
252 			}
253 		}
254 
255 		if (typeHint == OBJ_ANY)
256 			throw new MissingObjectException(objectId.copy(),
257 					JGitText.get().unknownObjectType2);
258 		throw new MissingObjectException(objectId.copy(), typeHint);
259 	}
260 
261 	private static ObjectLoader../../../../org/eclipse/jgit/lib/ObjectLoader.html#ObjectLoader">ObjectLoader checkType(ObjectLoader ldr, AnyObjectId id,
262 			int typeHint) throws IncorrectObjectTypeException {
263 		if (typeHint != OBJ_ANY && ldr.getType() != typeHint) {
264 			throw new IncorrectObjectTypeException(id.copy(), typeHint);
265 		}
266 		return ldr;
267 	}
268 
269 	private ObjectLoader openImpl(PackList packList, AnyObjectId objectId)
270 			throws IOException {
271 		for (DfsPackFile pack : packList.packs) {
272 			if (pack == last || skipGarbagePack(pack)) {
273 				continue;
274 			}
275 			ObjectLoader ldr = pack.get(this, objectId);
276 			if (ldr != null) {
277 				last = pack;
278 				return ldr;
279 			}
280 		}
281 		return null;
282 	}
283 
284 	/** {@inheritDoc} */
285 	@Override
286 	public Set<ObjectId> getShallowCommits() {
287 		return Collections.emptySet();
288 	}
289 
290 	private static final Comparator<FoundObject<?>> FOUND_OBJECT_SORT = (
291 			FoundObject<?> a, FoundObject<?> b) -> {
292 		int cmp = a.packIndex - b.packIndex;
293 		if (cmp == 0)
294 			cmp = Long.signum(a.offset - b.offset);
295 		return cmp;
296 	};
297 
298 	private static class FoundObject<T extends ObjectId> {
299 		final T id;
300 		final DfsPackFile pack;
301 		final long offset;
302 		final int packIndex;
303 
304 		FoundObject(T objectId, int packIdx, DfsPackFile pack, long offset) {
305 			this.id = objectId;
306 			this.pack = pack;
307 			this.offset = offset;
308 			this.packIndex = packIdx;
309 		}
310 
311 		FoundObject(T objectId) {
312 			this.id = objectId;
313 			this.pack = null;
314 			this.offset = 0;
315 			this.packIndex = 0;
316 		}
317 	}
318 
319 	private <T extends ObjectId> Iterable<FoundObject<T>> findAll(
320 			Iterable<T> objectIds) throws IOException {
321 		Collection<T> pending = new LinkedList<>();
322 		for (T id : objectIds) {
323 			pending.add(id);
324 		}
325 
326 		PackList packList = db.getPackList();
327 		List<FoundObject<T>> r = new ArrayList<>();
328 		findAllImpl(packList, pending, r);
329 		if (!pending.isEmpty() && packList.dirty()) {
330 			stats.scanPacks++;
331 			findAllImpl(db.scanPacks(packList), pending, r);
332 		}
333 		for (T t : pending) {
334 			r.add(new FoundObject<>(t));
335 		}
336 		Collections.sort(r, FOUND_OBJECT_SORT);
337 		return r;
338 	}
339 
340 	private <T extends ObjectId> void findAllImpl(PackList packList,
341 			Collection<T> pending, List<FoundObject<T>> r) {
342 		DfsPackFile[] packs = packList.packs;
343 		if (packs.length == 0) {
344 			return;
345 		}
346 		int lastIdx = 0;
347 		DfsPackFile lastPack = packs[lastIdx];
348 
349 		OBJECT_SCAN: for (Iterator<T> it = pending.iterator(); it.hasNext();) {
350 			T t = it.next();
351 			if (!skipGarbagePack(lastPack)) {
352 				try {
353 					long p = lastPack.findOffset(this, t);
354 					if (0 < p) {
355 						r.add(new FoundObject<>(t, lastIdx, lastPack, p));
356 						it.remove();
357 						continue;
358 					}
359 				} catch (IOException e) {
360 					// Fall though and try to examine other packs.
361 				}
362 			}
363 
364 			for (int i = 0; i < packs.length; i++) {
365 				if (i == lastIdx)
366 					continue;
367 				DfsPackFile pack = packs[i];
368 				if (skipGarbagePack(pack))
369 					continue;
370 				try {
371 					long p = pack.findOffset(this, t);
372 					if (0 < p) {
373 						r.add(new FoundObject<>(t, i, pack, p));
374 						it.remove();
375 						lastIdx = i;
376 						lastPack = pack;
377 						continue OBJECT_SCAN;
378 					}
379 				} catch (IOException e) {
380 					// Examine other packs.
381 				}
382 			}
383 		}
384 
385 		last = lastPack;
386 	}
387 
388 	private boolean skipGarbagePack(DfsPackFile pack) {
389 		return avoidUnreachable && pack.isGarbage();
390 	}
391 
392 	/** {@inheritDoc} */
393 	@Override
394 	public <T extends ObjectId> AsyncObjectLoaderQueue<T> open(
395 			Iterable<T> objectIds, final boolean reportMissing) {
396 		Iterable<FoundObject<T>> order;
397 		IOException error = null;
398 		try {
399 			order = findAll(objectIds);
400 		} catch (IOException e) {
401 			order = Collections.emptyList();
402 			error = e;
403 		}
404 
405 		final Iterator<FoundObject<T>> idItr = order.iterator();
406 		final IOException findAllError = error;
407 		return new AsyncObjectLoaderQueue<T>() {
408 			private FoundObject<T> cur;
409 
410 			@Override
411 			public boolean next() throws MissingObjectException, IOException {
412 				if (idItr.hasNext()) {
413 					cur = idItr.next();
414 					return true;
415 				} else if (findAllError != null) {
416 					throw findAllError;
417 				} else {
418 					return false;
419 				}
420 			}
421 
422 			@Override
423 			public T getCurrent() {
424 				return cur.id;
425 			}
426 
427 			@Override
428 			public ObjectId getObjectId() {
429 				return cur.id;
430 			}
431 
432 			@Override
433 			public ObjectLoader open() throws IOException {
434 				if (cur.pack == null)
435 					throw new MissingObjectException(cur.id,
436 							JGitText.get().unknownObjectType2);
437 				return cur.pack.load(DfsReader.this, cur.offset);
438 			}
439 
440 			@Override
441 			public boolean cancel(boolean mayInterruptIfRunning) {
442 				return true;
443 			}
444 
445 			@Override
446 			public void release() {
447 				// Nothing to clean up.
448 			}
449 		};
450 	}
451 
452 	/** {@inheritDoc} */
453 	@Override
454 	public <T extends ObjectId> AsyncObjectSizeQueue<T> getObjectSize(
455 			Iterable<T> objectIds, final boolean reportMissing) {
456 		Iterable<FoundObject<T>> order;
457 		IOException error = null;
458 		try {
459 			order = findAll(objectIds);
460 		} catch (IOException e) {
461 			order = Collections.emptyList();
462 			error = e;
463 		}
464 
465 		final Iterator<FoundObject<T>> idItr = order.iterator();
466 		final IOException findAllError = error;
467 		return new AsyncObjectSizeQueue<T>() {
468 			private FoundObject<T> cur;
469 			private long sz;
470 
471 			@Override
472 			public boolean next() throws MissingObjectException, IOException {
473 				if (idItr.hasNext()) {
474 					cur = idItr.next();
475 					if (cur.pack == null)
476 						throw new MissingObjectException(cur.id,
477 								JGitText.get().unknownObjectType2);
478 					sz = cur.pack.getObjectSize(DfsReader.this, cur.offset);
479 					return true;
480 				} else if (findAllError != null) {
481 					throw findAllError;
482 				} else {
483 					return false;
484 				}
485 			}
486 
487 			@Override
488 			public T getCurrent() {
489 				return cur.id;
490 			}
491 
492 			@Override
493 			public ObjectId getObjectId() {
494 				return cur.id;
495 			}
496 
497 			@Override
498 			public long getSize() {
499 				return sz;
500 			}
501 
502 			@Override
503 			public boolean cancel(boolean mayInterruptIfRunning) {
504 				return true;
505 			}
506 
507 			@Override
508 			public void release() {
509 				// Nothing to clean up.
510 			}
511 		};
512 	}
513 
514 	/** {@inheritDoc} */
515 	@Override
516 	public long getObjectSize(AnyObjectId objectId, int typeHint)
517 			throws MissingObjectException, IncorrectObjectTypeException,
518 			IOException {
519 		if (last != null && !skipGarbagePack(last)) {
520 			long sz = last.getObjectSize(this, objectId);
521 			if (0 <= sz) {
522 				return sz;
523 			}
524 		}
525 
526 		PackList packList = db.getPackList();
527 		long sz = getObjectSizeImpl(packList, objectId);
528 		if (0 <= sz) {
529 			return sz;
530 		}
531 		if (packList.dirty()) {
532 			sz = getObjectSizeImpl(packList, objectId);
533 			if (0 <= sz) {
534 				return sz;
535 			}
536 		}
537 
538 		if (typeHint == OBJ_ANY) {
539 			throw new MissingObjectException(objectId.copy(),
540 					JGitText.get().unknownObjectType2);
541 		}
542 		throw new MissingObjectException(objectId.copy(), typeHint);
543 	}
544 
545 	private long getObjectSizeImpl(PackList packList, AnyObjectId objectId)
546 			throws IOException {
547 		for (DfsPackFile pack : packList.packs) {
548 			if (pack == last || skipGarbagePack(pack)) {
549 				continue;
550 			}
551 			long sz = pack.getObjectSize(this, objectId);
552 			if (0 <= sz) {
553 				last = pack;
554 				return sz;
555 			}
556 		}
557 		return -1;
558 	}
559 
560 	/** {@inheritDoc} */
561 	@Override
562 	public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) {
563 		return new DfsObjectToPack(objectId, type);
564 	}
565 
566 	private static final Comparator<DfsObjectToPack> OFFSET_SORT = (
567 			DfsObjectToPack a,
568 			DfsObjectToPack b) -> Long.signum(a.getOffset() - b.getOffset());
569 
570 	@Override
571 	public void selectObjectRepresentation(PackWriter packer,
572 			ProgressMonitor monitor, Iterable<ObjectToPack> objects)
573 			throws IOException, MissingObjectException {
574 		// Don't check dirty bit on PackList; assume ObjectToPacks all came
575 		// from the current list.
576 		List<DfsPackFile> packs = sortPacksForSelectRepresentation();
577 		trySelectRepresentation(packer, monitor, objects, packs, false);
578 
579 		List<DfsPackFile> garbage = garbagePacksForSelectRepresentation();
580 		if (!garbage.isEmpty() && checkGarbagePacks(objects)) {
581 			trySelectRepresentation(packer, monitor, objects, garbage, true);
582 		}
583 	}
584 
585 	private void trySelectRepresentation(PackWriter packer,
586 			ProgressMonitor monitor, Iterable<ObjectToPack> objects,
587 			List<DfsPackFile> packs, boolean skipFound) throws IOException {
588 		for (DfsPackFile pack : packs) {
589 			List<DfsObjectToPack> tmp = findAllFromPack(pack, objects, skipFound);
590 			if (tmp.isEmpty())
591 				continue;
592 			Collections.sort(tmp, OFFSET_SORT);
593 			PackReverseIndex rev = pack.getReverseIdx(this);
594 			DfsObjectRepresentation rep = new DfsObjectRepresentation(pack);
595 			for (DfsObjectToPack otp : tmp) {
596 				pack.representation(rep, otp.getOffset(), this, rev);
597 				otp.setOffset(0);
598 				packer.select(otp, rep);
599 				if (!otp.isFound()) {
600 					otp.setFound();
601 					monitor.update(1);
602 				}
603 			}
604 		}
605 	}
606 
607 	private static final Comparator<DfsPackFile> PACK_SORT_FOR_REUSE =
608 		Comparator.comparing(
609 				DfsPackFile::getPackDescription, DfsPackDescription.reuseComparator());
610 
611 	private List<DfsPackFile> sortPacksForSelectRepresentation()
612 			throws IOException {
613 		DfsPackFile[] packs = db.getPacks();
614 		List<DfsPackFile> sorted = new ArrayList<>(packs.length);
615 		for (DfsPackFile p : packs) {
616 			if (p.getPackDescription().getPackSource() != UNREACHABLE_GARBAGE) {
617 				sorted.add(p);
618 			}
619 		}
620 		Collections.sort(sorted, PACK_SORT_FOR_REUSE);
621 		return sorted;
622 	}
623 
624 	private List<DfsPackFile> garbagePacksForSelectRepresentation()
625 			throws IOException {
626 		DfsPackFile[] packs = db.getPacks();
627 		List<DfsPackFile> garbage = new ArrayList<>(packs.length);
628 		for (DfsPackFile p : packs) {
629 			if (p.getPackDescription().getPackSource() == UNREACHABLE_GARBAGE) {
630 				garbage.add(p);
631 			}
632 		}
633 		return garbage;
634 	}
635 
636 	private static boolean checkGarbagePacks(Iterable<ObjectToPack> objects) {
637 		for (ObjectToPack otp : objects) {
638 			if (!((DfsObjectToPack) otp).isFound()) {
639 				return true;
640 			}
641 		}
642 		return false;
643 	}
644 
645 	private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack,
646 			Iterable<ObjectToPack> objects, boolean skipFound)
647 					throws IOException {
648 		List<DfsObjectToPack> tmp = new BlockList<>();
649 		PackIndex idx = pack.getPackIndex(this);
650 		for (ObjectToPack obj : objects) {
651 			DfsObjectToPack otp = (DfsObjectToPack) obj;
652 			if (skipFound && otp.isFound()) {
653 				continue;
654 			}
655 			long p = idx.findOffset(otp);
656 			if (0 < p && !pack.isCorrupt(p)) {
657 				otp.setOffset(p);
658 				tmp.add(otp);
659 			}
660 		}
661 		return tmp;
662 	}
663 
664 	/** {@inheritDoc} */
665 	@Override
666 	public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp,
667 			boolean validate) throws IOException,
668 			StoredObjectRepresentationNotAvailableException {
669 		DfsObjectToPack src = (DfsObjectToPack) otp;
670 		src.pack.copyAsIs(out, src, validate, this);
671 	}
672 
673 	/** {@inheritDoc} */
674 	@Override
675 	public void writeObjects(PackOutputStream out, List<ObjectToPack> list)
676 			throws IOException {
677 		for (ObjectToPack otp : list)
678 			out.writeObject(otp);
679 	}
680 
681 	/** {@inheritDoc} */
682 	@Override
683 	public void copyPackAsIs(PackOutputStream out, CachedPack pack)
684 			throws IOException {
685 		((DfsCachedPack) pack).copyAsIs(out, this);
686 	}
687 
688 	/**
689 	 * Copy bytes from the window to a caller supplied buffer.
690 	 *
691 	 * @param file
692 	 *            the file the desired window is stored within.
693 	 * @param position
694 	 *            position within the file to read from.
695 	 * @param dstbuf
696 	 *            destination buffer to copy into.
697 	 * @param dstoff
698 	 *            offset within <code>dstbuf</code> to start copying into.
699 	 * @param cnt
700 	 *            number of bytes to copy. This value may exceed the number of
701 	 *            bytes remaining in the window starting at offset
702 	 *            <code>pos</code>.
703 	 * @return number of bytes actually copied; this may be less than
704 	 *         <code>cnt</code> if <code>cnt</code> exceeded the number of bytes
705 	 *         available.
706 	 * @throws IOException
707 	 *             this cursor does not match the provider or id and the proper
708 	 *             window could not be acquired through the provider's cache.
709 	 */
710 	int copy(BlockBasedFile file, long position, byte[] dstbuf, int dstoff,
711 			int cnt) throws IOException {
712 		if (cnt == 0)
713 			return 0;
714 
715 		long length = file.length;
716 		if (0 <= length && length <= position)
717 			return 0;
718 
719 		int need = cnt;
720 		do {
721 			pin(file, position);
722 			int r = block.copy(position, dstbuf, dstoff, need);
723 			position += r;
724 			dstoff += r;
725 			need -= r;
726 			if (length < 0)
727 				length = file.length;
728 		} while (0 < need && position < length);
729 		return cnt - need;
730 	}
731 
732 	/**
733 	 * Inflate a region of the pack starting at {@code position}.
734 	 *
735 	 * @param pack
736 	 *            the file the desired window is stored within.
737 	 * @param position
738 	 *            position within the file to read from.
739 	 * @param dstbuf
740 	 *            destination buffer the inflater should output decompressed
741 	 *            data to. Must be large enough to store the entire stream,
742 	 *            unless headerOnly is true.
743 	 * @param headerOnly
744 	 *            if true the caller wants only {@code dstbuf.length} bytes.
745 	 * @return number of bytes inflated into <code>dstbuf</code>.
746 	 * @throws IOException
747 	 *             this cursor does not match the provider or id and the proper
748 	 *             window could not be acquired through the provider's cache.
749 	 * @throws DataFormatException
750 	 *             the inflater encountered an invalid chunk of data. Data
751 	 *             stream corruption is likely.
752 	 */
753 	int inflate(DfsPackFile pack, long position, byte[] dstbuf,
754 			boolean headerOnly) throws IOException, DataFormatException {
755 		long start = System.nanoTime();
756 		prepareInflater();
757 		pin(pack, position);
758 		position += block.setInput(position, inf);
759 		for (int dstoff = 0;;) {
760 			int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
761 			dstoff += n;
762 			if (inf.finished() || (headerOnly && dstoff == dstbuf.length)) {
763 				stats.inflatedBytes += dstoff;
764 				stats.inflationMicros += BlockBasedFile.elapsedMicros(start);
765 				return dstoff;
766 			} else if (inf.needsInput()) {
767 				pin(pack, position);
768 				position += block.setInput(position, inf);
769 			} else if (n == 0)
770 				throw new DataFormatException();
771 		}
772 	}
773 
774 	DfsBlock quickCopy(DfsPackFile p, long pos, long cnt)
775 			throws IOException {
776 		pin(p, pos);
777 		if (block.contains(p.key, pos + (cnt - 1)))
778 			return block;
779 		return null;
780 	}
781 
782 	Inflater inflater() {
783 		prepareInflater();
784 		return inf;
785 	}
786 
787 	private void prepareInflater() {
788 		if (inf == null)
789 			inf = InflaterCache.get();
790 		else
791 			inf.reset();
792 	}
793 
794 	void pin(BlockBasedFile file, long position) throws IOException {
795 		if (block == null || !block.contains(file.key, position)) {
796 			// If memory is low, we may need what is in our window field to
797 			// be cleaned up by the GC during the get for the next window.
798 			// So we always clear it, even though we are just going to set
799 			// it again.
800 			block = null;
801 			block = file.getOrLoadBlock(position, this);
802 		}
803 	}
804 
805 	void unpin() {
806 		block = null;
807 	}
808 
809 	/**
810 	 * Get IO statistics accumulated by this reader.
811 	 *
812 	 * @return IO statistics accumulated by this reader.
813 	 */
814 	public DfsReaderIoStats getIoStats() {
815 		return new DfsReaderIoStats(stats);
816 	}
817 
818 	/**
819 	 * {@inheritDoc}
820 	 * <p>
821 	 * Release the current window cursor.
822 	 */
823 	@Override
824 	public void close() {
825 		last = null;
826 		block = null;
827 		baseCache = null;
828 		try {
829 			InflaterCache.release(inf);
830 		} finally {
831 			inf = null;
832 		}
833 	}
834 }