View Javadoc
1   /*
2    * Copyright (C) 2008-2011, Google Inc.
3    * Copyright (C) 2006-2008, Shawn O. Pearce <spearce@spearce.org>
4    * and other copyright owners as documented in the project's IP log.
5    *
6    * This program and the accompanying materials are made available
7    * under the terms of the Eclipse Distribution License v1.0 which
8    * accompanies this distribution, is reproduced below, and is
9    * available at http://www.eclipse.org/org/documents/edl-v10.php
10   *
11   * All rights reserved.
12   *
13   * Redistribution and use in source and binary forms, with or
14   * without modification, are permitted provided that the following
15   * conditions are met:
16   *
17   * - Redistributions of source code must retain the above copyright
18   *   notice, this list of conditions and the following disclaimer.
19   *
20   * - Redistributions in binary form must reproduce the above
21   *   copyright notice, this list of conditions and the following
22   *   disclaimer in the documentation and/or other materials provided
23   *   with the distribution.
24   *
25   * - Neither the name of the Eclipse Foundation, Inc. nor the
26   *   names of its contributors may be used to endorse or promote
27   *   products derived from this software without specific prior
28   *   written permission.
29   *
30   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
31   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
32   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
33   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
34   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
37   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
38   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
39   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
40   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
41   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
42   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43   */
44  
45  package org.eclipse.jgit.internal.storage.dfs;
46  
47  import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
48  
49  import java.io.IOException;
50  import java.util.ArrayList;
51  import java.util.Collection;
52  import java.util.Collections;
53  import java.util.Comparator;
54  import java.util.HashSet;
55  import java.util.Iterator;
56  import java.util.List;
57  import java.util.Set;
58  import java.util.zip.DataFormatException;
59  import java.util.zip.Inflater;
60  
61  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
62  import org.eclipse.jgit.errors.MissingObjectException;
63  import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
64  import org.eclipse.jgit.internal.JGitText;
65  import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl;
66  import org.eclipse.jgit.internal.storage.file.PackBitmapIndex;
67  import org.eclipse.jgit.internal.storage.file.PackIndex;
68  import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
69  import org.eclipse.jgit.internal.storage.pack.CachedPack;
70  import org.eclipse.jgit.internal.storage.pack.ObjectReuseAsIs;
71  import org.eclipse.jgit.internal.storage.pack.ObjectToPack;
72  import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
73  import org.eclipse.jgit.internal.storage.pack.PackWriter;
74  import org.eclipse.jgit.lib.AbbreviatedObjectId;
75  import org.eclipse.jgit.lib.AnyObjectId;
76  import org.eclipse.jgit.lib.AsyncObjectLoaderQueue;
77  import org.eclipse.jgit.lib.AsyncObjectSizeQueue;
78  import org.eclipse.jgit.lib.BitmapIndex;
79  import org.eclipse.jgit.lib.BitmapIndex.BitmapBuilder;
80  import org.eclipse.jgit.lib.InflaterCache;
81  import org.eclipse.jgit.lib.ObjectId;
82  import org.eclipse.jgit.lib.ObjectLoader;
83  import org.eclipse.jgit.lib.ObjectReader;
84  import org.eclipse.jgit.lib.ProgressMonitor;
85  import org.eclipse.jgit.util.BlockList;
86  
87  /**
88   * Reader to access repository content through.
89   * <p>
90   * See the base {@link ObjectReader} documentation for details. Notably, a
91   * reader is not thread safe.
92   */
93  public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
94  	/** Temporary buffer large enough for at least one raw object id. */
95  	final byte[] tempId = new byte[OBJECT_ID_LENGTH];
96  
97  	/** Database this reader loads objects from. */
98  	final DfsObjDatabase db;
99  
100 	private Inflater inf;
101 
102 	private DfsBlock block;
103 
104 	private DeltaBaseCache baseCache;
105 
106 	private DfsPackFile last;
107 
108 	private boolean avoidUnreachable;
109 
110 	DfsReader(DfsObjDatabase db) {
111 		this.db = db;
112 	}
113 
114 	DfsReaderOptions getOptions() {
115 		return db.getReaderOptions();
116 	}
117 
118 	DeltaBaseCache getDeltaBaseCache() {
119 		if (baseCache == null)
120 			baseCache = new DeltaBaseCache(this);
121 		return baseCache;
122 	}
123 
124 	int getStreamFileThreshold() {
125 		return getOptions().getStreamFileThreshold();
126 	}
127 
128 	@Override
129 	public ObjectReader newReader() {
130 		return new DfsReader(db);
131 	}
132 
133 	@Override
134 	public void setAvoidUnreachableObjects(boolean avoid) {
135 		avoidUnreachable = avoid;
136 	}
137 
138 	@Override
139 	public BitmapIndex getBitmapIndex() throws IOException {
140 		for (DfsPackFile pack : db.getPacks()) {
141 			PackBitmapIndex bitmapIndex = pack.getBitmapIndex(this);
142 			if (bitmapIndex != null)
143 				return new BitmapIndexImpl(bitmapIndex);
144 		}
145 		return null;
146 	}
147 
148 	public Collection<CachedPack> getCachedPacksAndUpdate(
149 		BitmapBuilder needBitmap) throws IOException {
150 		for (DfsPackFile pack : db.getPacks()) {
151 			PackBitmapIndex bitmapIndex = pack.getBitmapIndex(this);
152 			if (needBitmap.removeAllOrNone(bitmapIndex))
153 				return Collections.<CachedPack> singletonList(
154 						new DfsCachedPack(pack));
155 		}
156 		return Collections.emptyList();
157 	}
158 
159 	@Override
160 	public Collection<ObjectId> resolve(AbbreviatedObjectId id)
161 			throws IOException {
162 		if (id.isComplete())
163 			return Collections.singleton(id.toObjectId());
164 		boolean noGarbage = avoidUnreachable;
165 		HashSet<ObjectId> matches = new HashSet<ObjectId>(4);
166 		for (DfsPackFile pack : db.getPacks()) {
167 			if (noGarbage && pack.isGarbage())
168 				continue;
169 			pack.resolve(this, matches, id, 256);
170 			if (256 <= matches.size())
171 				break;
172 		}
173 		return matches;
174 	}
175 
176 	@Override
177 	public boolean has(AnyObjectId objectId) throws IOException {
178 		if (last != null && last.hasObject(this, objectId))
179 			return true;
180 		boolean noGarbage = avoidUnreachable;
181 		for (DfsPackFile pack : db.getPacks()) {
182 			if (pack == last || (noGarbage && pack.isGarbage()))
183 				continue;
184 			if (pack.hasObject(this, objectId)) {
185 				last = pack;
186 				return true;
187 			}
188 		}
189 		return false;
190 	}
191 
192 	@Override
193 	public ObjectLoader open(AnyObjectId objectId, int typeHint)
194 			throws MissingObjectException, IncorrectObjectTypeException,
195 			IOException {
196 		if (last != null) {
197 			ObjectLoader ldr = last.get(this, objectId);
198 			if (ldr != null)
199 				return ldr;
200 		}
201 
202 		boolean noGarbage = avoidUnreachable;
203 		for (DfsPackFile pack : db.getPacks()) {
204 			if (pack == last || (noGarbage && pack.isGarbage()))
205 				continue;
206 			ObjectLoader ldr = pack.get(this, objectId);
207 			if (ldr != null) {
208 				last = pack;
209 				return ldr;
210 			}
211 		}
212 
213 		if (typeHint == OBJ_ANY)
214 			throw new MissingObjectException(objectId.copy(),
215 					JGitText.get().unknownObjectType2);
216 		throw new MissingObjectException(objectId.copy(), typeHint);
217 	}
218 
219 	@Override
220 	public Set<ObjectId> getShallowCommits() {
221 		return Collections.emptySet();
222 	}
223 
224 	private static final Comparator<FoundObject<?>> FOUND_OBJECT_SORT = new Comparator<FoundObject<?>>() {
225 		public int compare(FoundObject<?> a, FoundObject<?> b) {
226 			int cmp = a.packIndex - b.packIndex;
227 			if (cmp == 0)
228 				cmp = Long.signum(a.offset - b.offset);
229 			return cmp;
230 		}
231 	};
232 
233 	private static class FoundObject<T extends ObjectId> {
234 		final T id;
235 		final DfsPackFile pack;
236 		final long offset;
237 		final int packIndex;
238 
239 		FoundObject(T objectId, int packIdx, DfsPackFile pack, long offset) {
240 			this.id = objectId;
241 			this.pack = pack;
242 			this.offset = offset;
243 			this.packIndex = packIdx;
244 		}
245 
246 		FoundObject(T objectId) {
247 			this.id = objectId;
248 			this.pack = null;
249 			this.offset = 0;
250 			this.packIndex = 0;
251 		}
252 	}
253 
254 	private <T extends ObjectId> Iterable<FoundObject<T>> findAll(
255 			Iterable<T> objectIds) throws IOException {
256 		ArrayList<FoundObject<T>> r = new ArrayList<FoundObject<T>>();
257 		DfsPackFile[] packList = db.getPacks();
258 		if (packList.length == 0) {
259 			for (T t : objectIds)
260 				r.add(new FoundObject<T>(t));
261 			return r;
262 		}
263 
264 		int lastIdx = 0;
265 		DfsPackFile lastPack = packList[lastIdx];
266 		boolean noGarbage = avoidUnreachable;
267 
268 		OBJECT_SCAN: for (T t : objectIds) {
269 			try {
270 				long p = lastPack.findOffset(this, t);
271 				if (0 < p) {
272 					r.add(new FoundObject<T>(t, lastIdx, lastPack, p));
273 					continue;
274 				}
275 			} catch (IOException e) {
276 				// Fall though and try to examine other packs.
277 			}
278 
279 			for (int i = 0; i < packList.length; i++) {
280 				if (i == lastIdx)
281 					continue;
282 				DfsPackFile pack = packList[i];
283 				if (noGarbage && pack.isGarbage())
284 					continue;
285 				try {
286 					long p = pack.findOffset(this, t);
287 					if (0 < p) {
288 						r.add(new FoundObject<T>(t, i, pack, p));
289 						lastIdx = i;
290 						lastPack = pack;
291 						continue OBJECT_SCAN;
292 					}
293 				} catch (IOException e) {
294 					// Examine other packs.
295 				}
296 			}
297 
298 			r.add(new FoundObject<T>(t));
299 		}
300 
301 		Collections.sort(r, FOUND_OBJECT_SORT);
302 		last = lastPack;
303 		return r;
304 	}
305 
306 	@Override
307 	public <T extends ObjectId> AsyncObjectLoaderQueue<T> open(
308 			Iterable<T> objectIds, final boolean reportMissing) {
309 		Iterable<FoundObject<T>> order;
310 		IOException error = null;
311 		try {
312 			order = findAll(objectIds);
313 		} catch (IOException e) {
314 			order = Collections.emptyList();
315 			error = e;
316 		}
317 
318 		final Iterator<FoundObject<T>> idItr = order.iterator();
319 		final IOException findAllError = error;
320 		return new AsyncObjectLoaderQueue<T>() {
321 			private FoundObject<T> cur;
322 
323 			public boolean next() throws MissingObjectException, IOException {
324 				if (idItr.hasNext()) {
325 					cur = idItr.next();
326 					return true;
327 				} else if (findAllError != null) {
328 					throw findAllError;
329 				} else {
330 					return false;
331 				}
332 			}
333 
334 			public T getCurrent() {
335 				return cur.id;
336 			}
337 
338 			public ObjectId getObjectId() {
339 				return cur.id;
340 			}
341 
342 			public ObjectLoader open() throws IOException {
343 				if (cur.pack == null)
344 					throw new MissingObjectException(cur.id,
345 							JGitText.get().unknownObjectType2);
346 				return cur.pack.load(DfsReader.this, cur.offset);
347 			}
348 
349 			public boolean cancel(boolean mayInterruptIfRunning) {
350 				return true;
351 			}
352 
353 			public void release() {
354 				// Nothing to clean up.
355 			}
356 		};
357 	}
358 
359 	@Override
360 	public <T extends ObjectId> AsyncObjectSizeQueue<T> getObjectSize(
361 			Iterable<T> objectIds, final boolean reportMissing) {
362 		Iterable<FoundObject<T>> order;
363 		IOException error = null;
364 		try {
365 			order = findAll(objectIds);
366 		} catch (IOException e) {
367 			order = Collections.emptyList();
368 			error = e;
369 		}
370 
371 		final Iterator<FoundObject<T>> idItr = order.iterator();
372 		final IOException findAllError = error;
373 		return new AsyncObjectSizeQueue<T>() {
374 			private FoundObject<T> cur;
375 
376 			private long sz;
377 
378 			public boolean next() throws MissingObjectException, IOException {
379 				if (idItr.hasNext()) {
380 					cur = idItr.next();
381 					if (cur.pack == null)
382 						throw new MissingObjectException(cur.id,
383 								JGitText.get().unknownObjectType2);
384 					sz = cur.pack.getObjectSize(DfsReader.this, cur.offset);
385 					return true;
386 				} else if (findAllError != null) {
387 					throw findAllError;
388 				} else {
389 					return false;
390 				}
391 			}
392 
393 			public T getCurrent() {
394 				return cur.id;
395 			}
396 
397 			public ObjectId getObjectId() {
398 				return cur.id;
399 			}
400 
401 			public long getSize() {
402 				return sz;
403 			}
404 
405 			public boolean cancel(boolean mayInterruptIfRunning) {
406 				return true;
407 			}
408 
409 			public void release() {
410 				// Nothing to clean up.
411 			}
412 		};
413 	}
414 
415 	@Override
416 	public long getObjectSize(AnyObjectId objectId, int typeHint)
417 			throws MissingObjectException, IncorrectObjectTypeException,
418 			IOException {
419 		if (last != null) {
420 			long sz = last.getObjectSize(this, objectId);
421 			if (0 <= sz)
422 				return sz;
423 		}
424 
425 		for (DfsPackFile pack : db.getPacks()) {
426 			if (pack == last)
427 				continue;
428 			long sz = pack.getObjectSize(this, objectId);
429 			if (0 <= sz) {
430 				last = pack;
431 				return sz;
432 			}
433 		}
434 
435 		if (typeHint == OBJ_ANY)
436 			throw new MissingObjectException(objectId.copy(),
437 					JGitText.get().unknownObjectType2);
438 		throw new MissingObjectException(objectId.copy(), typeHint);
439 	}
440 
441 	public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) {
442 		return new DfsObjectToPack(objectId, type);
443 	}
444 
445 	private static final Comparator<DfsObjectToPack> OFFSET_SORT = new Comparator<DfsObjectToPack>() {
446 		public int compare(DfsObjectToPack a, DfsObjectToPack b) {
447 			return Long.signum(a.getOffset() - b.getOffset());
448 		}
449 	};
450 
451 	public void selectObjectRepresentation(PackWriter packer,
452 			ProgressMonitor monitor, Iterable<ObjectToPack> objects)
453 			throws IOException, MissingObjectException {
454 		for (DfsPackFile pack : db.getPacks()) {
455 			List<DfsObjectToPack> tmp = findAllFromPack(pack, objects);
456 			if (tmp.isEmpty())
457 				continue;
458 			Collections.sort(tmp, OFFSET_SORT);
459 			PackReverseIndex rev = pack.getReverseIdx(this);
460 			DfsObjectRepresentation rep = new DfsObjectRepresentation(pack);
461 			for (DfsObjectToPack otp : tmp) {
462 				pack.representation(rep, otp.getOffset(), this, rev);
463 				otp.setOffset(0);
464 				packer.select(otp, rep);
465 				if (!otp.isFound()) {
466 					otp.setFound();
467 					monitor.update(1);
468 				}
469 			}
470 		}
471 	}
472 
473 	private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack,
474 			Iterable<ObjectToPack> objects) throws IOException {
475 		List<DfsObjectToPack> tmp = new BlockList<DfsObjectToPack>();
476 		PackIndex idx = pack.getPackIndex(this);
477 		for (ObjectToPack otp : objects) {
478 			long p = idx.findOffset(otp);
479 			if (0 < p) {
480 				otp.setOffset(p);
481 				tmp.add((DfsObjectToPack) otp);
482 			}
483 		}
484 		return tmp;
485 	}
486 
487 	public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp,
488 			boolean validate) throws IOException,
489 			StoredObjectRepresentationNotAvailableException {
490 		DfsObjectToPack src = (DfsObjectToPack) otp;
491 		src.pack.copyAsIs(out, src, validate, this);
492 	}
493 
494 	public void writeObjects(PackOutputStream out, List<ObjectToPack> list)
495 			throws IOException {
496 		for (ObjectToPack otp : list)
497 			out.writeObject(otp);
498 	}
499 
500 	public void copyPackAsIs(PackOutputStream out, CachedPack pack)
501 			throws IOException {
502 		((DfsCachedPack) pack).copyAsIs(out, this);
503 	}
504 
505 	/**
506 	 * Copy bytes from the window to a caller supplied buffer.
507 	 *
508 	 * @param pack
509 	 *            the file the desired window is stored within.
510 	 * @param position
511 	 *            position within the file to read from.
512 	 * @param dstbuf
513 	 *            destination buffer to copy into.
514 	 * @param dstoff
515 	 *            offset within <code>dstbuf</code> to start copying into.
516 	 * @param cnt
517 	 *            number of bytes to copy. This value may exceed the number of
518 	 *            bytes remaining in the window starting at offset
519 	 *            <code>pos</code>.
520 	 * @return number of bytes actually copied; this may be less than
521 	 *         <code>cnt</code> if <code>cnt</code> exceeded the number of bytes
522 	 *         available.
523 	 * @throws IOException
524 	 *             this cursor does not match the provider or id and the proper
525 	 *             window could not be acquired through the provider's cache.
526 	 */
527 	int copy(DfsPackFile pack, long position, byte[] dstbuf, int dstoff, int cnt)
528 			throws IOException {
529 		if (cnt == 0)
530 			return 0;
531 
532 		long length = pack.length;
533 		if (0 <= length && length <= position)
534 			return 0;
535 
536 		int need = cnt;
537 		do {
538 			pin(pack, position);
539 			int r = block.copy(position, dstbuf, dstoff, need);
540 			position += r;
541 			dstoff += r;
542 			need -= r;
543 			if (length < 0)
544 				length = pack.length;
545 		} while (0 < need && position < length);
546 		return cnt - need;
547 	}
548 
549 	/**
550 	 * Inflate a region of the pack starting at {@code position}.
551 	 *
552 	 * @param pack
553 	 *            the file the desired window is stored within.
554 	 * @param position
555 	 *            position within the file to read from.
556 	 * @param dstbuf
557 	 *            destination buffer the inflater should output decompressed
558 	 *            data to. Must be large enough to store the entire stream,
559 	 *            unless headerOnly is true.
560 	 * @param headerOnly
561 	 *            if true the caller wants only {@code dstbuf.length} bytes.
562 	 * @return number of bytes inflated into <code>dstbuf</code>.
563 	 * @throws IOException
564 	 *             this cursor does not match the provider or id and the proper
565 	 *             window could not be acquired through the provider's cache.
566 	 * @throws DataFormatException
567 	 *             the inflater encountered an invalid chunk of data. Data
568 	 *             stream corruption is likely.
569 	 */
570 	int inflate(DfsPackFile pack, long position, byte[] dstbuf,
571 			boolean headerOnly) throws IOException, DataFormatException {
572 		prepareInflater();
573 		pin(pack, position);
574 		position += block.setInput(position, inf);
575 		for (int dstoff = 0;;) {
576 			int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
577 			dstoff += n;
578 			if (inf.finished() || (headerOnly && dstoff == dstbuf.length))
579 				return dstoff;
580 			if (inf.needsInput()) {
581 				pin(pack, position);
582 				position += block.setInput(position, inf);
583 			} else if (n == 0)
584 				throw new DataFormatException();
585 		}
586 	}
587 
588 	DfsBlock quickCopy(DfsPackFile p, long pos, long cnt)
589 			throws IOException {
590 		pin(p, pos);
591 		if (block.contains(p.key, pos + (cnt - 1)))
592 			return block;
593 		return null;
594 	}
595 
596 	Inflater inflater() {
597 		prepareInflater();
598 		return inf;
599 	}
600 
601 	private void prepareInflater() {
602 		if (inf == null)
603 			inf = InflaterCache.get();
604 		else
605 			inf.reset();
606 	}
607 
608 	void pin(DfsPackFile pack, long position) throws IOException {
609 		DfsBlock b = block;
610 		if (b == null || !b.contains(pack.key, position)) {
611 			// If memory is low, we may need what is in our window field to
612 			// be cleaned up by the GC during the get for the next window.
613 			// So we always clear it, even though we are just going to set
614 			// it again.
615 			block = null;
616 			block = pack.getOrLoadBlock(position, this);
617 		}
618 	}
619 
620 	void unpin() {
621 		block = null;
622 	}
623 
624 	/** Release the current window cursor. */
625 	@Override
626 	public void close() {
627 		last = null;
628 		block = null;
629 		baseCache = null;
630 		try {
631 			InflaterCache.release(inf);
632 		} finally {
633 			inf = null;
634 		}
635 	}
636 }