View Javadoc
1   /*
2    * Copyright (C) 2008-2011, Google Inc.
3    * Copyright (C) 2006-2008, Shawn O. Pearce <spearce@spearce.org>
4    * and other copyright owners as documented in the project's IP log.
5    *
6    * This program and the accompanying materials are made available
7    * under the terms of the Eclipse Distribution License v1.0 which
8    * accompanies this distribution, is reproduced below, and is
9    * available at http://www.eclipse.org/org/documents/edl-v10.php
10   *
11   * All rights reserved.
12   *
13   * Redistribution and use in source and binary forms, with or
14   * without modification, are permitted provided that the following
15   * conditions are met:
16   *
17   * - Redistributions of source code must retain the above copyright
18   *   notice, this list of conditions and the following disclaimer.
19   *
20   * - Redistributions in binary form must reproduce the above
21   *   copyright notice, this list of conditions and the following
22   *   disclaimer in the documentation and/or other materials provided
23   *   with the distribution.
24   *
25   * - Neither the name of the Eclipse Foundation, Inc. nor the
26   *   names of its contributors may be used to endorse or promote
27   *   products derived from this software without specific prior
28   *   written permission.
29   *
30   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
31   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
32   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
33   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
34   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
37   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
38   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
39   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
40   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
41   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
42   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
43   */
44  
45  package org.eclipse.jgit.internal.storage.dfs;
46  
47  import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
48  import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
49  
50  import java.io.IOException;
51  import java.util.ArrayList;
52  import java.util.Arrays;
53  import java.util.Collection;
54  import java.util.Collections;
55  import java.util.Comparator;
56  import java.util.HashSet;
57  import java.util.Iterator;
58  import java.util.LinkedList;
59  import java.util.List;
60  import java.util.Set;
61  import java.util.zip.DataFormatException;
62  import java.util.zip.Inflater;
63  
64  import org.eclipse.jgit.errors.IncorrectObjectTypeException;
65  import org.eclipse.jgit.errors.MissingObjectException;
66  import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
67  import org.eclipse.jgit.internal.JGitText;
68  import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackList;
69  import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
70  import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl;
71  import org.eclipse.jgit.internal.storage.file.PackBitmapIndex;
72  import org.eclipse.jgit.internal.storage.file.PackIndex;
73  import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
74  import org.eclipse.jgit.internal.storage.pack.CachedPack;
75  import org.eclipse.jgit.internal.storage.pack.ObjectReuseAsIs;
76  import org.eclipse.jgit.internal.storage.pack.ObjectToPack;
77  import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
78  import org.eclipse.jgit.internal.storage.pack.PackWriter;
79  import org.eclipse.jgit.lib.AbbreviatedObjectId;
80  import org.eclipse.jgit.lib.AnyObjectId;
81  import org.eclipse.jgit.lib.AsyncObjectLoaderQueue;
82  import org.eclipse.jgit.lib.AsyncObjectSizeQueue;
83  import org.eclipse.jgit.lib.BitmapIndex;
84  import org.eclipse.jgit.lib.BitmapIndex.BitmapBuilder;
85  import org.eclipse.jgit.lib.InflaterCache;
86  import org.eclipse.jgit.lib.ObjectId;
87  import org.eclipse.jgit.lib.ObjectLoader;
88  import org.eclipse.jgit.lib.ObjectReader;
89  import org.eclipse.jgit.lib.ProgressMonitor;
90  import org.eclipse.jgit.util.BlockList;
91  
92  /**
93   * Reader to access repository content through.
94   * <p>
95   * See the base {@link org.eclipse.jgit.lib.ObjectReader} documentation for
96   * details. Notably, a reader is not thread safe.
97   */
98  public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
99  	private static final int MAX_RESOLVE_MATCHES = 256;
100 
101 	/** Temporary buffer large enough for at least one raw object id. */
102 	final byte[] tempId = new byte[OBJECT_ID_LENGTH];
103 
104 	/** Database this reader loads objects from. */
105 	final DfsObjDatabase db;
106 
107 	final DfsReaderIoStats.Accumulator stats = new DfsReaderIoStats.Accumulator();
108 
109 	private Inflater inf;
110 	private DfsBlock block;
111 	private DeltaBaseCache baseCache;
112 	private DfsPackFile last;
113 	private boolean avoidUnreachable;
114 
115 	/**
116 	 * Initialize a new DfsReader
117 	 *
118 	 * @param db
119 	 *            parent DfsObjDatabase.
120 	 */
121 	protected DfsReader(DfsObjDatabase db) {
122 		this.db = db;
123 		this.streamFileThreshold = db.getReaderOptions().getStreamFileThreshold();
124 	}
125 
126 	DfsReaderOptions getOptions() {
127 		return db.getReaderOptions();
128 	}
129 
130 	DeltaBaseCache getDeltaBaseCache() {
131 		if (baseCache == null)
132 			baseCache = new DeltaBaseCache(this);
133 		return baseCache;
134 	}
135 
136 	/** {@inheritDoc} */
137 	@Override
138 	public ObjectReader newReader() {
139 		return db.newReader();
140 	}
141 
142 	/** {@inheritDoc} */
143 	@Override
144 	public void setAvoidUnreachableObjects(boolean avoid) {
145 		avoidUnreachable = avoid;
146 	}
147 
148 	/** {@inheritDoc} */
149 	@Override
150 	public BitmapIndex getBitmapIndex() throws IOException {
151 		for (DfsPackFile pack : db.getPacks()) {
152 			PackBitmapIndex bitmapIndex = pack.getBitmapIndex(this);
153 			if (bitmapIndex != null)
154 				return new BitmapIndexImpl(bitmapIndex);
155 		}
156 		return null;
157 	}
158 
159 	/** {@inheritDoc} */
160 	@Override
161 	public Collection<CachedPack> getCachedPacksAndUpdate(
162 		BitmapBuilder needBitmap) throws IOException {
163 		for (DfsPackFile pack : db.getPacks()) {
164 			PackBitmapIndex bitmapIndex = pack.getBitmapIndex(this);
165 			if (needBitmap.removeAllOrNone(bitmapIndex))
166 				return Collections.<CachedPack> singletonList(
167 						new DfsCachedPack(pack));
168 		}
169 		return Collections.emptyList();
170 	}
171 
172 	/** {@inheritDoc} */
173 	@Override
174 	public Collection<ObjectId> resolve(AbbreviatedObjectId id)
175 			throws IOException {
176 		if (id.isComplete())
177 			return Collections.singleton(id.toObjectId());
178 		HashSet<ObjectId> matches = new HashSet<>(4);
179 		PackList packList = db.getPackList();
180 		resolveImpl(packList, id, matches);
181 		if (matches.size() < MAX_RESOLVE_MATCHES && packList.dirty()) {
182 			stats.scanPacks++;
183 			resolveImpl(db.scanPacks(packList), id, matches);
184 		}
185 		return matches;
186 	}
187 
188 	private void resolveImpl(PackList packList, AbbreviatedObjectId id,
189 			HashSet<ObjectId> matches) throws IOException {
190 		for (DfsPackFile pack : packList.packs) {
191 			if (skipGarbagePack(pack)) {
192 				continue;
193 			}
194 			pack.resolve(this, matches, id, MAX_RESOLVE_MATCHES);
195 			if (matches.size() >= MAX_RESOLVE_MATCHES) {
196 				break;
197 			}
198 		}
199 	}
200 
201 	/** {@inheritDoc} */
202 	@Override
203 	public boolean has(AnyObjectId objectId) throws IOException {
204 		if (last != null
205 				&& !skipGarbagePack(last)
206 				&& last.hasObject(this, objectId))
207 			return true;
208 		PackList packList = db.getPackList();
209 		if (hasImpl(packList, objectId)) {
210 			return true;
211 		} else if (packList.dirty()) {
212 			stats.scanPacks++;
213 			return hasImpl(db.scanPacks(packList), objectId);
214 		}
215 		return false;
216 	}
217 
218 	private boolean hasImpl(PackList packList, AnyObjectId objectId)
219 			throws IOException {
220 		for (DfsPackFile pack : packList.packs) {
221 			if (pack == last || skipGarbagePack(pack))
222 				continue;
223 			if (pack.hasObject(this, objectId)) {
224 				last = pack;
225 				return true;
226 			}
227 		}
228 		return false;
229 	}
230 
231 	/** {@inheritDoc} */
232 	@Override
233 	public ObjectLoader open(AnyObjectId objectId, int typeHint)
234 			throws MissingObjectException, IncorrectObjectTypeException,
235 			IOException {
236 		ObjectLoader ldr;
237 		if (last != null && !skipGarbagePack(last)) {
238 			ldr = last.get(this, objectId);
239 			if (ldr != null) {
240 				return checkType(ldr, objectId, typeHint);
241 			}
242 		}
243 
244 		PackList packList = db.getPackList();
245 		ldr = openImpl(packList, objectId);
246 		if (ldr != null) {
247 			return checkType(ldr, objectId, typeHint);
248 		}
249 		if (packList.dirty()) {
250 			stats.scanPacks++;
251 			ldr = openImpl(db.scanPacks(packList), objectId);
252 			if (ldr != null) {
253 				return checkType(ldr, objectId, typeHint);
254 			}
255 		}
256 
257 		if (typeHint == OBJ_ANY)
258 			throw new MissingObjectException(objectId.copy(),
259 					JGitText.get().unknownObjectType2);
260 		throw new MissingObjectException(objectId.copy(), typeHint);
261 	}
262 
263 	private static ObjectLoader checkType(ObjectLoader ldr, AnyObjectId id,
264 			int typeHint) throws IncorrectObjectTypeException {
265 		if (typeHint != OBJ_ANY && ldr.getType() != typeHint) {
266 			throw new IncorrectObjectTypeException(id.copy(), typeHint);
267 		}
268 		return ldr;
269 	}
270 
271 	private ObjectLoader openImpl(PackList packList, AnyObjectId objectId)
272 			throws IOException {
273 		for (DfsPackFile pack : packList.packs) {
274 			if (pack == last || skipGarbagePack(pack)) {
275 				continue;
276 			}
277 			ObjectLoader ldr = pack.get(this, objectId);
278 			if (ldr != null) {
279 				last = pack;
280 				return ldr;
281 			}
282 		}
283 		return null;
284 	}
285 
286 	/** {@inheritDoc} */
287 	@Override
288 	public Set<ObjectId> getShallowCommits() {
289 		return Collections.emptySet();
290 	}
291 
292 	private static final Comparator<FoundObject<?>> FOUND_OBJECT_SORT = new Comparator<FoundObject<?>>() {
293 		@Override
294 		public int compare(FoundObject<?> a, FoundObject<?> b) {
295 			int cmp = a.packIndex - b.packIndex;
296 			if (cmp == 0)
297 				cmp = Long.signum(a.offset - b.offset);
298 			return cmp;
299 		}
300 	};
301 
302 	private static class FoundObject<T extends ObjectId> {
303 		final T id;
304 		final DfsPackFile pack;
305 		final long offset;
306 		final int packIndex;
307 
308 		FoundObject(T objectId, int packIdx, DfsPackFile pack, long offset) {
309 			this.id = objectId;
310 			this.pack = pack;
311 			this.offset = offset;
312 			this.packIndex = packIdx;
313 		}
314 
315 		FoundObject(T objectId) {
316 			this.id = objectId;
317 			this.pack = null;
318 			this.offset = 0;
319 			this.packIndex = 0;
320 		}
321 	}
322 
323 	private <T extends ObjectId> Iterable<FoundObject<T>> findAll(
324 			Iterable<T> objectIds) throws IOException {
325 		Collection<T> pending = new LinkedList<>();
326 		for (T id : objectIds) {
327 			pending.add(id);
328 		}
329 
330 		PackList packList = db.getPackList();
331 		List<FoundObject<T>> r = new ArrayList<>();
332 		findAllImpl(packList, pending, r);
333 		if (!pending.isEmpty() && packList.dirty()) {
334 			stats.scanPacks++;
335 			findAllImpl(db.scanPacks(packList), pending, r);
336 		}
337 		for (T t : pending) {
338 			r.add(new FoundObject<>(t));
339 		}
340 		Collections.sort(r, FOUND_OBJECT_SORT);
341 		return r;
342 	}
343 
344 	private <T extends ObjectId> void findAllImpl(PackList packList,
345 			Collection<T> pending, List<FoundObject<T>> r) {
346 		DfsPackFile[] packs = packList.packs;
347 		if (packs.length == 0) {
348 			return;
349 		}
350 		int lastIdx = 0;
351 		DfsPackFile lastPack = packs[lastIdx];
352 
353 		OBJECT_SCAN: for (Iterator<T> it = pending.iterator(); it.hasNext();) {
354 			T t = it.next();
355 			if (!skipGarbagePack(lastPack)) {
356 				try {
357 					long p = lastPack.findOffset(this, t);
358 					if (0 < p) {
359 						r.add(new FoundObject<>(t, lastIdx, lastPack, p));
360 						it.remove();
361 						continue;
362 					}
363 				} catch (IOException e) {
364 					// Fall though and try to examine other packs.
365 				}
366 			}
367 
368 			for (int i = 0; i < packs.length; i++) {
369 				if (i == lastIdx)
370 					continue;
371 				DfsPackFile pack = packs[i];
372 				if (skipGarbagePack(pack))
373 					continue;
374 				try {
375 					long p = pack.findOffset(this, t);
376 					if (0 < p) {
377 						r.add(new FoundObject<>(t, i, pack, p));
378 						it.remove();
379 						lastIdx = i;
380 						lastPack = pack;
381 						continue OBJECT_SCAN;
382 					}
383 				} catch (IOException e) {
384 					// Examine other packs.
385 				}
386 			}
387 		}
388 
389 		last = lastPack;
390 	}
391 
392 	private boolean skipGarbagePack(DfsPackFile pack) {
393 		return avoidUnreachable && pack.isGarbage();
394 	}
395 
396 	/** {@inheritDoc} */
397 	@Override
398 	public <T extends ObjectId> AsyncObjectLoaderQueue<T> open(
399 			Iterable<T> objectIds, final boolean reportMissing) {
400 		Iterable<FoundObject<T>> order;
401 		IOException error = null;
402 		try {
403 			order = findAll(objectIds);
404 		} catch (IOException e) {
405 			order = Collections.emptyList();
406 			error = e;
407 		}
408 
409 		final Iterator<FoundObject<T>> idItr = order.iterator();
410 		final IOException findAllError = error;
411 		return new AsyncObjectLoaderQueue<T>() {
412 			private FoundObject<T> cur;
413 
414 			@Override
415 			public boolean next() throws MissingObjectException, IOException {
416 				if (idItr.hasNext()) {
417 					cur = idItr.next();
418 					return true;
419 				} else if (findAllError != null) {
420 					throw findAllError;
421 				} else {
422 					return false;
423 				}
424 			}
425 
426 			@Override
427 			public T getCurrent() {
428 				return cur.id;
429 			}
430 
431 			@Override
432 			public ObjectId getObjectId() {
433 				return cur.id;
434 			}
435 
436 			@Override
437 			public ObjectLoader open() throws IOException {
438 				if (cur.pack == null)
439 					throw new MissingObjectException(cur.id,
440 							JGitText.get().unknownObjectType2);
441 				return cur.pack.load(DfsReader.this, cur.offset);
442 			}
443 
444 			@Override
445 			public boolean cancel(boolean mayInterruptIfRunning) {
446 				return true;
447 			}
448 
449 			@Override
450 			public void release() {
451 				// Nothing to clean up.
452 			}
453 		};
454 	}
455 
456 	/** {@inheritDoc} */
457 	@Override
458 	public <T extends ObjectId> AsyncObjectSizeQueue<T> getObjectSize(
459 			Iterable<T> objectIds, final boolean reportMissing) {
460 		Iterable<FoundObject<T>> order;
461 		IOException error = null;
462 		try {
463 			order = findAll(objectIds);
464 		} catch (IOException e) {
465 			order = Collections.emptyList();
466 			error = e;
467 		}
468 
469 		final Iterator<FoundObject<T>> idItr = order.iterator();
470 		final IOException findAllError = error;
471 		return new AsyncObjectSizeQueue<T>() {
472 			private FoundObject<T> cur;
473 			private long sz;
474 
475 			@Override
476 			public boolean next() throws MissingObjectException, IOException {
477 				if (idItr.hasNext()) {
478 					cur = idItr.next();
479 					if (cur.pack == null)
480 						throw new MissingObjectException(cur.id,
481 								JGitText.get().unknownObjectType2);
482 					sz = cur.pack.getObjectSize(DfsReader.this, cur.offset);
483 					return true;
484 				} else if (findAllError != null) {
485 					throw findAllError;
486 				} else {
487 					return false;
488 				}
489 			}
490 
491 			@Override
492 			public T getCurrent() {
493 				return cur.id;
494 			}
495 
496 			@Override
497 			public ObjectId getObjectId() {
498 				return cur.id;
499 			}
500 
501 			@Override
502 			public long getSize() {
503 				return sz;
504 			}
505 
506 			@Override
507 			public boolean cancel(boolean mayInterruptIfRunning) {
508 				return true;
509 			}
510 
511 			@Override
512 			public void release() {
513 				// Nothing to clean up.
514 			}
515 		};
516 	}
517 
518 	/** {@inheritDoc} */
519 	@Override
520 	public long getObjectSize(AnyObjectId objectId, int typeHint)
521 			throws MissingObjectException, IncorrectObjectTypeException,
522 			IOException {
523 		if (last != null && !skipGarbagePack(last)) {
524 			long sz = last.getObjectSize(this, objectId);
525 			if (0 <= sz) {
526 				return sz;
527 			}
528 		}
529 
530 		PackList packList = db.getPackList();
531 		long sz = getObjectSizeImpl(packList, objectId);
532 		if (0 <= sz) {
533 			return sz;
534 		}
535 		if (packList.dirty()) {
536 			sz = getObjectSizeImpl(packList, objectId);
537 			if (0 <= sz) {
538 				return sz;
539 			}
540 		}
541 
542 		if (typeHint == OBJ_ANY) {
543 			throw new MissingObjectException(objectId.copy(),
544 					JGitText.get().unknownObjectType2);
545 		}
546 		throw new MissingObjectException(objectId.copy(), typeHint);
547 	}
548 
549 	private long getObjectSizeImpl(PackList packList, AnyObjectId objectId)
550 			throws IOException {
551 		for (DfsPackFile pack : packList.packs) {
552 			if (pack == last || skipGarbagePack(pack)) {
553 				continue;
554 			}
555 			long sz = pack.getObjectSize(this, objectId);
556 			if (0 <= sz) {
557 				last = pack;
558 				return sz;
559 			}
560 		}
561 		return -1;
562 	}
563 
564 	/** {@inheritDoc} */
565 	@Override
566 	public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) {
567 		return new DfsObjectToPack(objectId, type);
568 	}
569 
570 	private static final Comparator<DfsObjectToPack> OFFSET_SORT = new Comparator<DfsObjectToPack>() {
571 		@Override
572 		public int compare(DfsObjectToPack a, DfsObjectToPack b) {
573 			return Long.signum(a.getOffset() - b.getOffset());
574 		}
575 	};
576 
577 	@Override
578 	public void selectObjectRepresentation(PackWriter packer,
579 			ProgressMonitor monitor, Iterable<ObjectToPack> objects)
580 			throws IOException, MissingObjectException {
581 		// Don't check dirty bit on PackList; assume ObjectToPacks all came from the
582 		// current list.
583 		for (DfsPackFile pack : sortPacksForSelectRepresentation()) {
584 			List<DfsObjectToPack> tmp = findAllFromPack(pack, objects);
585 			if (tmp.isEmpty())
586 				continue;
587 			Collections.sort(tmp, OFFSET_SORT);
588 			PackReverseIndex rev = pack.getReverseIdx(this);
589 			DfsObjectRepresentation rep = new DfsObjectRepresentation(pack);
590 			for (DfsObjectToPack otp : tmp) {
591 				pack.representation(rep, otp.getOffset(), this, rev);
592 				otp.setOffset(0);
593 				packer.select(otp, rep);
594 				if (!otp.isFound()) {
595 					otp.setFound();
596 					monitor.update(1);
597 				}
598 			}
599 		}
600 	}
601 
602 	private static final Comparator<DfsPackFile> PACK_SORT_FOR_REUSE = new Comparator<DfsPackFile>() {
603 		@Override
604 		public int compare(DfsPackFile af, DfsPackFile bf) {
605 			DfsPackDescription ad = af.getPackDescription();
606 			DfsPackDescription bd = bf.getPackDescription();
607 			PackSource as = ad.getPackSource();
608 			PackSource bs = bd.getPackSource();
609 
610 			if (as != null && as == bs && DfsPackDescription.isGC(as)) {
611 				// Push smaller GC files last; these likely have higher quality
612 				// delta compression and the contained representation should be
613 				// favored over other files.
614 				return Long.signum(bd.getFileSize(PACK) - ad.getFileSize(PACK));
615 			}
616 
617 			// DfsPackDescription.compareTo already did a reasonable sort.
618 			// Rely on Arrays.sort being stable, leaving equal elements.
619 			return 0;
620 		}
621 	};
622 
623 	private DfsPackFile[] sortPacksForSelectRepresentation()
624 			throws IOException {
625 		DfsPackFile[] packs = db.getPacks();
626 		DfsPackFile[] sorted = new DfsPackFile[packs.length];
627 		System.arraycopy(packs, 0, sorted, 0, packs.length);
628 		Arrays.sort(sorted, PACK_SORT_FOR_REUSE);
629 		return sorted;
630 	}
631 
632 	private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack,
633 			Iterable<ObjectToPack> objects) throws IOException {
634 		List<DfsObjectToPack> tmp = new BlockList<>();
635 		PackIndex idx = pack.getPackIndex(this);
636 		for (ObjectToPack otp : objects) {
637 			long p = idx.findOffset(otp);
638 			if (0 < p && !pack.isCorrupt(p)) {
639 				otp.setOffset(p);
640 				tmp.add((DfsObjectToPack) otp);
641 			}
642 		}
643 		return tmp;
644 	}
645 
646 	/** {@inheritDoc} */
647 	@Override
648 	public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp,
649 			boolean validate) throws IOException,
650 			StoredObjectRepresentationNotAvailableException {
651 		DfsObjectToPack src = (DfsObjectToPack) otp;
652 		src.pack.copyAsIs(out, src, validate, this);
653 	}
654 
655 	/** {@inheritDoc} */
656 	@Override
657 	public void writeObjects(PackOutputStream out, List<ObjectToPack> list)
658 			throws IOException {
659 		for (ObjectToPack otp : list)
660 			out.writeObject(otp);
661 	}
662 
663 	/** {@inheritDoc} */
664 	@Override
665 	public void copyPackAsIs(PackOutputStream out, CachedPack pack)
666 			throws IOException {
667 		((DfsCachedPack) pack).copyAsIs(out, this);
668 	}
669 
670 	/**
671 	 * Copy bytes from the window to a caller supplied buffer.
672 	 *
673 	 * @param file
674 	 *            the file the desired window is stored within.
675 	 * @param position
676 	 *            position within the file to read from.
677 	 * @param dstbuf
678 	 *            destination buffer to copy into.
679 	 * @param dstoff
680 	 *            offset within <code>dstbuf</code> to start copying into.
681 	 * @param cnt
682 	 *            number of bytes to copy. This value may exceed the number of
683 	 *            bytes remaining in the window starting at offset
684 	 *            <code>pos</code>.
685 	 * @return number of bytes actually copied; this may be less than
686 	 *         <code>cnt</code> if <code>cnt</code> exceeded the number of bytes
687 	 *         available.
688 	 * @throws IOException
689 	 *             this cursor does not match the provider or id and the proper
690 	 *             window could not be acquired through the provider's cache.
691 	 */
692 	int copy(BlockBasedFile file, long position, byte[] dstbuf, int dstoff,
693 			int cnt) throws IOException {
694 		if (cnt == 0)
695 			return 0;
696 
697 		long length = file.length;
698 		if (0 <= length && length <= position)
699 			return 0;
700 
701 		int need = cnt;
702 		do {
703 			pin(file, position);
704 			int r = block.copy(position, dstbuf, dstoff, need);
705 			position += r;
706 			dstoff += r;
707 			need -= r;
708 			if (length < 0)
709 				length = file.length;
710 		} while (0 < need && position < length);
711 		return cnt - need;
712 	}
713 
714 	/**
715 	 * Inflate a region of the pack starting at {@code position}.
716 	 *
717 	 * @param pack
718 	 *            the file the desired window is stored within.
719 	 * @param position
720 	 *            position within the file to read from.
721 	 * @param dstbuf
722 	 *            destination buffer the inflater should output decompressed
723 	 *            data to. Must be large enough to store the entire stream,
724 	 *            unless headerOnly is true.
725 	 * @param headerOnly
726 	 *            if true the caller wants only {@code dstbuf.length} bytes.
727 	 * @return number of bytes inflated into <code>dstbuf</code>.
728 	 * @throws IOException
729 	 *             this cursor does not match the provider or id and the proper
730 	 *             window could not be acquired through the provider's cache.
731 	 * @throws DataFormatException
732 	 *             the inflater encountered an invalid chunk of data. Data
733 	 *             stream corruption is likely.
734 	 */
735 	int inflate(DfsPackFile pack, long position, byte[] dstbuf,
736 			boolean headerOnly) throws IOException, DataFormatException {
737 		prepareInflater();
738 		pin(pack, position);
739 		position += block.setInput(position, inf);
740 		for (int dstoff = 0;;) {
741 			int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
742 			dstoff += n;
743 			if (inf.finished() || (headerOnly && dstoff == dstbuf.length)) {
744 				stats.inflatedBytes += dstoff;
745 				return dstoff;
746 			} else if (inf.needsInput()) {
747 				pin(pack, position);
748 				position += block.setInput(position, inf);
749 			} else if (n == 0)
750 				throw new DataFormatException();
751 		}
752 	}
753 
754 	DfsBlock quickCopy(DfsPackFile p, long pos, long cnt)
755 			throws IOException {
756 		pin(p, pos);
757 		if (block.contains(p.key, pos + (cnt - 1)))
758 			return block;
759 		return null;
760 	}
761 
762 	Inflater inflater() {
763 		prepareInflater();
764 		return inf;
765 	}
766 
767 	private void prepareInflater() {
768 		if (inf == null)
769 			inf = InflaterCache.get();
770 		else
771 			inf.reset();
772 	}
773 
774 	void pin(BlockBasedFile file, long position) throws IOException {
775 		if (block == null || !block.contains(file.key, position)) {
776 			// If memory is low, we may need what is in our window field to
777 			// be cleaned up by the GC during the get for the next window.
778 			// So we always clear it, even though we are just going to set
779 			// it again.
780 			block = null;
781 			block = file.getOrLoadBlock(position, this);
782 		}
783 	}
784 
785 	void unpin() {
786 		block = null;
787 	}
788 
789 	/**
790 	 * Get IO statistics accumulated by this reader.
791 	 *
792 	 * @return IO statistics accumulated by this reader.
793 	 */
794 	public DfsReaderIoStats getIoStats() {
795 		return new DfsReaderIoStats(stats);
796 	}
797 
798 	/**
799 	 * {@inheritDoc}
800 	 * <p>
801 	 * Release the current window cursor.
802 	 */
803 	@Override
804 	public void close() {
805 		last = null;
806 		block = null;
807 		baseCache = null;
808 		try {
809 			InflaterCache.release(inf);
810 		} finally {
811 			inf = null;
812 		}
813 	}
814 }