View Javadoc
1   /*
2    * Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
3    * Copyright (C) 2010, Christian Halstrick <christian.halstrick@sap.com>
4    * Copyright (C) 2010, Matthias Sohn <matthias.sohn@sap.com>
5    * Copyright (C) 2012-2013, Robin Rosenberg
6    * and other copyright owners as documented in the project's IP log.
7    *
8    * This program and the accompanying materials are made available
9    * under the terms of the Eclipse Distribution License v1.0 which
10   * accompanies this distribution, is reproduced below, and is
11   * available at http://www.eclipse.org/org/documents/edl-v10.php
12   *
13   * All rights reserved.
14   *
15   * Redistribution and use in source and binary forms, with or
16   * without modification, are permitted provided that the following
17   * conditions are met:
18   *
19   * - Redistributions of source code must retain the above copyright
20   *   notice, this list of conditions and the following disclaimer.
21   *
22   * - Redistributions in binary form must reproduce the above
23   *   copyright notice, this list of conditions and the following
24   *   disclaimer in the documentation and/or other materials provided
25   *   with the distribution.
26   *
27   * - Neither the name of the Eclipse Foundation, Inc. nor the
28   *   names of its contributors may be used to endorse or promote
29   *   products derived from this software without specific prior
30   *   written permission.
31   *
32   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
33   * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
34   * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
35   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
36   * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
37   * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
38   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
39   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
40   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
41   * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
42   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
43   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
44   * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
45   */
46  
47  package org.eclipse.jgit.treewalk;
48  
49  import java.io.ByteArrayInputStream;
50  import java.io.File;
51  import java.io.FileInputStream;
52  import java.io.FileNotFoundException;
53  import java.io.IOException;
54  import java.io.InputStream;
55  import java.nio.ByteBuffer;
56  import java.nio.CharBuffer;
57  import java.nio.charset.CharacterCodingException;
58  import java.nio.charset.CharsetEncoder;
59  import java.text.MessageFormat;
60  import java.util.Arrays;
61  import java.util.Collections;
62  import java.util.Comparator;
63  
64  import org.eclipse.jgit.api.errors.FilterFailedException;
65  import org.eclipse.jgit.attributes.AttributesNode;
66  import org.eclipse.jgit.attributes.AttributesRule;
67  import org.eclipse.jgit.attributes.FilterCommand;
68  import org.eclipse.jgit.attributes.FilterCommandRegistry;
69  import org.eclipse.jgit.diff.RawText;
70  import org.eclipse.jgit.dircache.DirCache;
71  import org.eclipse.jgit.dircache.DirCacheEntry;
72  import org.eclipse.jgit.dircache.DirCacheIterator;
73  import org.eclipse.jgit.errors.CorruptObjectException;
74  import org.eclipse.jgit.errors.MissingObjectException;
75  import org.eclipse.jgit.errors.NoWorkTreeException;
76  import org.eclipse.jgit.ignore.FastIgnoreRule;
77  import org.eclipse.jgit.ignore.IgnoreNode;
78  import org.eclipse.jgit.internal.JGitText;
79  import org.eclipse.jgit.lib.Constants;
80  import org.eclipse.jgit.lib.CoreConfig;
81  import org.eclipse.jgit.lib.CoreConfig.CheckStat;
82  import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
83  import org.eclipse.jgit.lib.CoreConfig.SymLinks;
84  import org.eclipse.jgit.lib.FileMode;
85  import org.eclipse.jgit.lib.ObjectId;
86  import org.eclipse.jgit.lib.ObjectLoader;
87  import org.eclipse.jgit.lib.ObjectReader;
88  import org.eclipse.jgit.lib.Repository;
89  import org.eclipse.jgit.submodule.SubmoduleWalk;
90  import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
91  import org.eclipse.jgit.util.FS;
92  import org.eclipse.jgit.util.FS.ExecutionResult;
93  import org.eclipse.jgit.util.Holder;
94  import org.eclipse.jgit.util.IO;
95  import org.eclipse.jgit.util.Paths;
96  import org.eclipse.jgit.util.RawParseUtils;
97  import org.eclipse.jgit.util.TemporaryBuffer;
98  import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
99  import org.eclipse.jgit.util.io.AutoLFInputStream;
100 import org.eclipse.jgit.util.io.EolStreamTypeUtil;
101 import org.eclipse.jgit.util.sha1.SHA1;
102 
103 /**
104  * Walks a working directory tree as part of a {@link TreeWalk}.
105  * <p>
106  * Most applications will want to use the standard implementation of this
107  * iterator, {@link FileTreeIterator}, as that does all IO through the standard
108  * <code>java.io</code> package. Plugins for a Java based IDE may however wish
109  * to create their own implementations of this class to allow traversal of the
110  * IDE's project space, as well as benefit from any caching the IDE may have.
111  *
112  * @see FileTreeIterator
113  */
114 public abstract class WorkingTreeIterator extends AbstractTreeIterator {
115 	private static final int MAX_EXCEPTION_TEXT_SIZE = 10 * 1024;
116 
117 	/** An empty entry array, suitable for {@link #init(Entry[])}. */
118 	protected static final Entry[] EOF = {};
119 
120 	/** Size we perform file IO in if we have to read and hash a file. */
121 	static final int BUFFER_SIZE = 2048;
122 
123 	/**
124 	 * Maximum size of files which may be read fully into memory for performance
125 	 * reasons.
126 	 */
127 	private static final long MAXIMUM_FILE_SIZE_TO_READ_FULLY = 65536;
128 
129 	/** Inherited state of this iterator, describing working tree, etc. */
130 	private final IteratorState state;
131 
132 	/** The {@link #idBuffer()} for the current entry. */
133 	private byte[] contentId;
134 
135 	/** Index within {@link #entries} that {@link #contentId} came from. */
136 	private int contentIdFromPtr;
137 
138 	/** List of entries obtained from the subclass. */
139 	private Entry[] entries;
140 
141 	/** Total number of entries in {@link #entries} that are valid. */
142 	private int entryCnt;
143 
144 	/** Current position within {@link #entries}. */
145 	private int ptr;
146 
147 	/** If there is a .gitignore file present, the parsed rules from it. */
148 	private IgnoreNode ignoreNode;
149 
150 	/**
151 	 * cached clean filter command. Use a Ref in order to distinguish between
152 	 * the ref not cached yet and the value null
153 	 */
154 	private Holder<String> cleanFilterCommandHolder;
155 
156 	/**
157 	 * cached eol stream type. Use a Ref in order to distinguish between the ref
158 	 * not cached yet and the value null
159 	 */
160 	private Holder<EolStreamType> eolStreamTypeHolder;
161 
162 	/** Repository that is the root level being iterated over */
163 	protected Repository repository;
164 
165 	/** Cached canonical length, initialized from {@link #idBuffer()} */
166 	private long canonLen = -1;
167 
168 	/** The offset of the content id in {@link #idBuffer()} */
169 	private int contentIdOffset;
170 
171 	/**
172 	 * Create a new iterator with no parent.
173 	 *
174 	 * @param options
175 	 *            working tree options to be used
176 	 */
177 	protected WorkingTreeIterator(WorkingTreeOptions options) {
178 		super();
179 		state = new IteratorState(options);
180 	}
181 
182 	/**
183 	 * Create a new iterator with no parent and a prefix.
184 	 * <p>
185 	 * The prefix path supplied is inserted in front of all paths generated by
186 	 * this iterator. It is intended to be used when an iterator is being
187 	 * created for a subsection of an overall repository and needs to be
188 	 * combined with other iterators that are created to run over the entire
189 	 * repository namespace.
190 	 *
191 	 * @param prefix
192 	 *            position of this iterator in the repository tree. The value
193 	 *            may be null or the empty string to indicate the prefix is the
194 	 *            root of the repository. A trailing slash ('/') is
195 	 *            automatically appended if the prefix does not end in '/'.
196 	 * @param options
197 	 *            working tree options to be used
198 	 */
199 	protected WorkingTreeIterator(final String prefix,
200 			WorkingTreeOptions options) {
201 		super(prefix);
202 		state = new IteratorState(options);
203 	}
204 
205 	/**
206 	 * Create an iterator for a subtree of an existing iterator.
207 	 *
208 	 * @param p
209 	 *            parent tree iterator.
210 	 */
211 	protected WorkingTreeIterator(final WorkingTreeIterator p) {
212 		super(p);
213 		state = p.state;
214 		repository = p.repository;
215 	}
216 
217 	/**
218 	 * Initialize this iterator for the root level of a repository.
219 	 * <p>
220 	 * This method should only be invoked after calling {@link #init(Entry[])},
221 	 * and only for the root iterator.
222 	 *
223 	 * @param repo
224 	 *            the repository.
225 	 */
226 	protected void initRootIterator(Repository repo) {
227 		repository = repo;
228 		Entry entry;
229 		if (ignoreNode instanceof PerDirectoryIgnoreNode)
230 			entry = ((PerDirectoryIgnoreNode) ignoreNode).entry;
231 		else
232 			entry = null;
233 		ignoreNode = new RootIgnoreNode(entry, repo);
234 	}
235 
236 	/**
237 	 * Define the matching {@link DirCacheIterator}, to optimize ObjectIds.
238 	 *
239 	 * Once the DirCacheIterator has been set this iterator must only be
240 	 * advanced by the TreeWalk that is supplied, as it assumes that itself and
241 	 * the corresponding DirCacheIterator are positioned on the same file path
242 	 * whenever {@link #idBuffer()} is invoked.
243 	 *
244 	 * @param walk
245 	 *            the walk that will be advancing this iterator.
246 	 * @param treeId
247 	 *            index of the matching {@link DirCacheIterator}.
248 	 */
249 	public void setDirCacheIterator(TreeWalk walk, int treeId) {
250 		state.walk = walk;
251 		state.dirCacheTree = treeId;
252 	}
253 
254 	@Override
255 	public boolean hasId() {
256 		if (contentIdFromPtr == ptr)
257 			return true;
258 		return (mode & FileMode.TYPE_MASK) == FileMode.TYPE_FILE;
259 	}
260 
261 	@Override
262 	public byte[] idBuffer() {
263 		if (contentIdFromPtr == ptr)
264 			return contentId;
265 
266 		if (state.walk != null) {
267 			// If there is a matching DirCacheIterator, we can reuse
268 			// its idBuffer, but only if we appear to be clean against
269 			// the cached index information for the path.
270 			DirCacheIterator i = state.walk.getTree(state.dirCacheTree,
271 							DirCacheIterator.class);
272 			if (i != null) {
273 				DirCacheEntry ent = i.getDirCacheEntry();
274 				if (ent != null && compareMetadata(ent) == MetadataDiff.EQUAL
275 						&& ((ent.getFileMode().getBits()
276 								& FileMode.TYPE_MASK) != FileMode.TYPE_GITLINK)) {
277 					contentIdOffset = i.idOffset();
278 					contentIdFromPtr = ptr;
279 					return contentId = i.idBuffer();
280 				}
281 				contentIdOffset = 0;
282 			} else {
283 				contentIdOffset = 0;
284 			}
285 		}
286 		switch (mode & FileMode.TYPE_MASK) {
287 		case FileMode.TYPE_SYMLINK:
288 		case FileMode.TYPE_FILE:
289 			contentIdFromPtr = ptr;
290 			return contentId = idBufferBlob(entries[ptr]);
291 		case FileMode.TYPE_GITLINK:
292 			contentIdFromPtr = ptr;
293 			return contentId = idSubmodule(entries[ptr]);
294 		}
295 		return zeroid;
296 	}
297 
298 	@Override
299 	public boolean isWorkTree() {
300 		return true;
301 	}
302 
303 	/**
304 	 * Get submodule id for given entry.
305 	 *
306 	 * @param e
307 	 * @return non-null submodule id
308 	 */
309 	protected byte[] idSubmodule(Entry e) {
310 		if (repository == null)
311 			return zeroid;
312 		File directory;
313 		try {
314 			directory = repository.getWorkTree();
315 		} catch (NoWorkTreeException nwte) {
316 			return zeroid;
317 		}
318 		return idSubmodule(directory, e);
319 	}
320 
321 	/**
322 	 * Get submodule id using the repository at the location of the entry
323 	 * relative to the directory.
324 	 *
325 	 * @param directory
326 	 * @param e
327 	 * @return non-null submodule id
328 	 */
329 	protected byte[] idSubmodule(File directory, Entry e) {
330 		final Repository submoduleRepo;
331 		try {
332 			submoduleRepo = SubmoduleWalk.getSubmoduleRepository(directory,
333 					e.getName());
334 		} catch (IOException exception) {
335 			return zeroid;
336 		}
337 		if (submoduleRepo == null)
338 			return zeroid;
339 
340 		final ObjectId head;
341 		try {
342 			head = submoduleRepo.resolve(Constants.HEAD);
343 		} catch (IOException exception) {
344 			return zeroid;
345 		} finally {
346 			submoduleRepo.close();
347 		}
348 		if (head == null)
349 			return zeroid;
350 		final byte[] id = new byte[Constants.OBJECT_ID_LENGTH];
351 		head.copyRawTo(id, 0);
352 		return id;
353 	}
354 
355 	private static final byte[] digits = { '0', '1', '2', '3', '4', '5', '6',
356 			'7', '8', '9' };
357 
358 	private static final byte[] hblob = Constants
359 			.encodedTypeString(Constants.OBJ_BLOB);
360 
361 	private byte[] idBufferBlob(final Entry e) {
362 		try {
363 			final InputStream is = e.openInputStream();
364 			if (is == null)
365 				return zeroid;
366 			try {
367 				state.initializeReadBuffer();
368 
369 				final long len = e.getLength();
370 				InputStream filteredIs = possiblyFilteredInputStream(e, is, len,
371 						OperationType.CHECKIN_OP);
372 				return computeHash(filteredIs, canonLen);
373 			} finally {
374 				safeClose(is);
375 			}
376 		} catch (IOException err) {
377 			// Can't read the file? Don't report the failure either.
378 			return zeroid;
379 		}
380 	}
381 
382 	private InputStream possiblyFilteredInputStream(final Entry e,
383 			final InputStream is, final long len) throws IOException {
384 		return possiblyFilteredInputStream(e, is, len, null);
385 
386 	}
387 
388 	private InputStream possiblyFilteredInputStream(final Entry e,
389 			final InputStream is, final long len, OperationType opType)
390 			throws IOException {
391 		if (getCleanFilterCommand() == null
392 				&& getEolStreamType(opType) == EolStreamType.DIRECT) {
393 			canonLen = len;
394 			return is;
395 		}
396 
397 		if (len <= MAXIMUM_FILE_SIZE_TO_READ_FULLY) {
398 			ByteBuffer rawbuf = IO.readWholeStream(is, (int) len);
399 			rawbuf = filterClean(rawbuf.array(), rawbuf.limit(), opType);
400 			canonLen = rawbuf.limit();
401 			return new ByteArrayInputStream(rawbuf.array(), 0, (int) canonLen);
402 		}
403 
404 		if (getCleanFilterCommand() == null && isBinary(e)) {
405 				canonLen = len;
406 				return is;
407 			}
408 
409 		final InputStream lenIs = filterClean(e.openInputStream(),
410 				opType);
411 		try {
412 			canonLen = computeLength(lenIs);
413 		} finally {
414 			safeClose(lenIs);
415 		}
416 		return filterClean(is, opType);
417 	}
418 
419 	private static void safeClose(final InputStream in) {
420 		try {
421 			in.close();
422 		} catch (IOException err2) {
423 			// Suppress any error related to closing an input
424 			// stream. We don't care, we should not have any
425 			// outstanding data to flush or anything like that.
426 		}
427 	}
428 
429 	private static boolean isBinary(Entry entry) throws IOException {
430 		InputStream in = entry.openInputStream();
431 		try {
432 			return RawText.isBinary(in);
433 		} finally {
434 			safeClose(in);
435 		}
436 	}
437 
438 	private ByteBuffer filterClean(byte[] src, int n, OperationType opType)
439 			throws IOException {
440 		InputStream in = new ByteArrayInputStream(src);
441 		try {
442 			return IO.readWholeStream(filterClean(in, opType), n);
443 		} finally {
444 			safeClose(in);
445 		}
446 	}
447 
448 	private InputStream filterClean(InputStream in) throws IOException {
449 		return filterClean(in, null);
450 	}
451 
452 	private InputStream filterClean(InputStream in, OperationType opType)
453 			throws IOException {
454 		in = handleAutoCRLF(in, opType);
455 		String filterCommand = getCleanFilterCommand();
456 		if (filterCommand != null) {
457 			if (FilterCommandRegistry.isRegistered(filterCommand)) {
458 				LocalFile buffer = new TemporaryBuffer.LocalFile(null);
459 				FilterCommand command = FilterCommandRegistry
460 						.createFilterCommand(filterCommand, repository, in,
461 								buffer);
462 				while (command.run() != -1) {
463 					// loop as long as command.run() tells there is work to do
464 				}
465 				return buffer.openInputStream();
466 			}
467 			FS fs = repository.getFS();
468 			ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand,
469 					new String[0]);
470 			filterProcessBuilder.directory(repository.getWorkTree());
471 			filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
472 					repository.getDirectory().getAbsolutePath());
473 			ExecutionResult result;
474 			try {
475 				result = fs.execute(filterProcessBuilder, in);
476 			} catch (IOException | InterruptedException e) {
477 				throw new IOException(new FilterFailedException(e,
478 						filterCommand, getEntryPathString()));
479 			}
480 			int rc = result.getRc();
481 			if (rc != 0) {
482 				throw new IOException(new FilterFailedException(rc,
483 						filterCommand, getEntryPathString(),
484 						result.getStdout().toByteArray(MAX_EXCEPTION_TEXT_SIZE),
485 						RawParseUtils.decode(result.getStderr()
486 								.toByteArray(MAX_EXCEPTION_TEXT_SIZE))));
487 			}
488 			return result.getStdout().openInputStream();
489 		}
490 		return in;
491 	}
492 
493 	private InputStream handleAutoCRLF(InputStream in, OperationType opType)
494 			throws IOException {
495 		return EolStreamTypeUtil.wrapInputStream(in, getEolStreamType(opType));
496 	}
497 
498 	/**
499 	 * Returns the working tree options used by this iterator.
500 	 *
501 	 * @return working tree options
502 	 */
503 	public WorkingTreeOptions getOptions() {
504 		return state.options;
505 	}
506 
507 	@Override
508 	public int idOffset() {
509 		return contentIdOffset;
510 	}
511 
512 	@Override
513 	public void reset() {
514 		if (!first()) {
515 			ptr = 0;
516 			if (!eof())
517 				parseEntry();
518 		}
519 	}
520 
521 	@Override
522 	public boolean first() {
523 		return ptr == 0;
524 	}
525 
526 	@Override
527 	public boolean eof() {
528 		return ptr == entryCnt;
529 	}
530 
531 	@Override
532 	public void next(final int delta) throws CorruptObjectException {
533 		ptr += delta;
534 		if (!eof()) {
535 			parseEntry();
536 		}
537 	}
538 
539 	@Override
540 	public void back(final int delta) throws CorruptObjectException {
541 		ptr -= delta;
542 		parseEntry();
543 	}
544 
545 	private void parseEntry() {
546 		final Entry e = entries[ptr];
547 		mode = e.getMode().getBits();
548 
549 		final int nameLen = e.encodedNameLen;
550 		ensurePathCapacity(pathOffset + nameLen, pathOffset);
551 		System.arraycopy(e.encodedName, 0, path, pathOffset, nameLen);
552 		pathLen = pathOffset + nameLen;
553 		canonLen = -1;
554 		cleanFilterCommandHolder = null;
555 		eolStreamTypeHolder = null;
556 	}
557 
558 	/**
559 	 * Get the raw byte length of this entry.
560 	 *
561 	 * @return size of this file, in bytes.
562 	 */
563 	public long getEntryLength() {
564 		return current().getLength();
565 	}
566 
567 	/**
568 	 * Get the filtered input length of this entry
569 	 *
570 	 * @return size of the content, in bytes
571 	 * @throws IOException
572 	 */
573 	public long getEntryContentLength() throws IOException {
574 		if (canonLen == -1) {
575 			long rawLen = getEntryLength();
576 			if (rawLen == 0)
577 				canonLen = 0;
578 			InputStream is = current().openInputStream();
579 			try {
580 				// canonLen gets updated here
581 				possiblyFilteredInputStream(current(), is, current()
582 						.getLength());
583 			} finally {
584 				safeClose(is);
585 			}
586 		}
587 		return canonLen;
588 	}
589 
590 	/**
591 	 * Get the last modified time of this entry.
592 	 *
593 	 * @return last modified time of this file, in milliseconds since the epoch
594 	 *         (Jan 1, 1970 UTC).
595 	 */
596 	public long getEntryLastModified() {
597 		return current().getLastModified();
598 	}
599 
600 	/**
601 	 * Obtain an input stream to read the file content.
602 	 * <p>
603 	 * Efficient implementations are not required. The caller will usually
604 	 * obtain the stream only once per entry, if at all.
605 	 * <p>
606 	 * The input stream should not use buffering if the implementation can avoid
607 	 * it. The caller will buffer as necessary to perform efficient block IO
608 	 * operations.
609 	 * <p>
610 	 * The caller will close the stream once complete.
611 	 *
612 	 * @return a stream to read from the file.
613 	 * @throws IOException
614 	 *             the file could not be opened for reading.
615 	 */
616 	public InputStream openEntryStream() throws IOException {
617 		InputStream rawis = current().openInputStream();
618 		if (getCleanFilterCommand() == null
619 				&& getEolStreamType() == EolStreamType.DIRECT)
620 			return rawis;
621 		else
622 			return filterClean(rawis);
623 	}
624 
625 	/**
626 	 * Determine if the current entry path is ignored by an ignore rule.
627 	 *
628 	 * @return true if the entry was ignored by an ignore rule file.
629 	 * @throws IOException
630 	 *             a relevant ignore rule file exists but cannot be read.
631 	 */
632 	public boolean isEntryIgnored() throws IOException {
633 		return isEntryIgnored(pathLen);
634 	}
635 
636 	/**
637 	 * Determine if the entry path is ignored by an ignore rule.
638 	 *
639 	 * @param pLen
640 	 *            the length of the path in the path buffer.
641 	 * @return true if the entry is ignored by an ignore rule.
642 	 * @throws IOException
643 	 *             a relevant ignore rule file exists but cannot be read.
644 	 */
645 	protected boolean isEntryIgnored(final int pLen) throws IOException {
646 		return isEntryIgnored(pLen, mode, false);
647 	}
648 
649 	/**
650 	 * Determine if the entry path is ignored by an ignore rule. Consider
651 	 * possible rule negation from child iterator.
652 	 *
653 	 * @param pLen
654 	 *            the length of the path in the path buffer.
655 	 * @param fileMode
656 	 *            the original iterator file mode
657 	 * @param negatePrevious
658 	 *            true if the previous matching iterator rule was negation
659 	 * @return true if the entry is ignored by an ignore rule.
660 	 * @throws IOException
661 	 *             a relevant ignore rule file exists but cannot be read.
662 	 */
663 	private boolean isEntryIgnored(final int pLen, int fileMode,
664 			boolean negatePrevious)
665 			throws IOException {
666 		IgnoreNode rules = getIgnoreNode();
667 		if (rules != null) {
668 			// The ignore code wants path to start with a '/' if possible.
669 			// If we have the '/' in our path buffer because we are inside
670 			// a subdirectory include it in the range we convert to string.
671 			//
672 			int pOff = pathOffset;
673 			if (0 < pOff)
674 				pOff--;
675 			String p = TreeWalk.pathOf(path, pOff, pLen);
676 			switch (rules.isIgnored(p, FileMode.TREE.equals(fileMode),
677 					negatePrevious)) {
678 			case IGNORED:
679 				return true;
680 			case NOT_IGNORED:
681 				return false;
682 			case CHECK_PARENT:
683 				negatePrevious = false;
684 				break;
685 			case CHECK_PARENT_NEGATE_FIRST_MATCH:
686 				negatePrevious = true;
687 				break;
688 			}
689 		}
690 		if (parent instanceof WorkingTreeIterator)
691 			return ((WorkingTreeIterator) parent).isEntryIgnored(pLen, fileMode,
692 					negatePrevious);
693 		return false;
694 	}
695 
696 	private IgnoreNode getIgnoreNode() throws IOException {
697 		if (ignoreNode instanceof PerDirectoryIgnoreNode)
698 			ignoreNode = ((PerDirectoryIgnoreNode) ignoreNode).load();
699 		return ignoreNode;
700 	}
701 
702 	/**
703 	 * Retrieves the {@link AttributesNode} for the current entry.
704 	 *
705 	 * @return {@link AttributesNode} for the current entry.
706 	 * @throws IOException
707 	 *             if an error is raised while parsing the .gitattributes file
708 	 * @since 3.7
709 	 */
710 	public AttributesNode getEntryAttributesNode() throws IOException {
711 		if (attributesNode instanceof PerDirectoryAttributesNode)
712 			attributesNode = ((PerDirectoryAttributesNode) attributesNode)
713 					.load();
714 		return attributesNode;
715 	}
716 
717 	private static final Comparator<Entry> ENTRY_CMP = new Comparator<Entry>() {
718 		@Override
719 		public int compare(Entry a, Entry b) {
720 			return Paths.compare(
721 					a.encodedName, 0, a.encodedNameLen, a.getMode().getBits(),
722 					b.encodedName, 0, b.encodedNameLen, b.getMode().getBits());
723 		}
724 	};
725 
726 	/**
727 	 * Constructor helper.
728 	 *
729 	 * @param list
730 	 *            files in the subtree of the work tree this iterator operates
731 	 *            on
732 	 */
733 	protected void init(final Entry[] list) {
734 		// Filter out nulls, . and .. as these are not valid tree entries,
735 		// also cache the encoded forms of the path names for efficient use
736 		// later on during sorting and iteration.
737 		//
738 		entries = list;
739 		int i, o;
740 
741 		final CharsetEncoder nameEncoder = state.nameEncoder;
742 		for (i = 0, o = 0; i < entries.length; i++) {
743 			final Entry e = entries[i];
744 			if (e == null)
745 				continue;
746 			final String name = e.getName();
747 			if (".".equals(name) || "..".equals(name)) //$NON-NLS-1$ //$NON-NLS-2$
748 				continue;
749 			if (Constants.DOT_GIT.equals(name))
750 				continue;
751 			if (Constants.DOT_GIT_IGNORE.equals(name))
752 				ignoreNode = new PerDirectoryIgnoreNode(e);
753 			if (Constants.DOT_GIT_ATTRIBUTES.equals(name))
754 				attributesNode = new PerDirectoryAttributesNode(e);
755 			if (i != o)
756 				entries[o] = e;
757 			e.encodeName(nameEncoder);
758 			o++;
759 		}
760 		entryCnt = o;
761 		Arrays.sort(entries, 0, entryCnt, ENTRY_CMP);
762 
763 		contentIdFromPtr = -1;
764 		ptr = 0;
765 		if (!eof())
766 			parseEntry();
767 		else if (pathLen == 0) // see bug 445363
768 			pathLen = pathOffset;
769 	}
770 
771 	/**
772 	 * Obtain the current entry from this iterator.
773 	 *
774 	 * @return the currently selected entry.
775 	 */
776 	protected Entry current() {
777 		return entries[ptr];
778 	}
779 
780 	/**
781 	 * The result of a metadata-comparison between the current entry and a
782 	 * {@link DirCacheEntry}
783 	 */
784 	public enum MetadataDiff {
785 		/**
786 		 * The entries are equal by metaData (mode, length,
787 		 * modification-timestamp) or the <code>assumeValid</code> attribute of
788 		 * the index entry is set
789 		 */
790 		EQUAL,
791 
792 		/**
793 		 * The entries are not equal by metaData (mode, length) or the
794 		 * <code>isUpdateNeeded</code> attribute of the index entry is set
795 		 */
796 		DIFFER_BY_METADATA,
797 
798 		/** index entry is smudged - can't use that entry for comparison */
799 		SMUDGED,
800 
801 		/**
802 		 * The entries are equal by metaData (mode, length) but differ by
803 		 * modification-timestamp.
804 		 */
805 		DIFFER_BY_TIMESTAMP
806 	}
807 
808 	/**
809 	 * Is the file mode of the current entry different than the given raw mode?
810 	 *
811 	 * @param rawMode
812 	 * @return true if different, false otherwise
813 	 */
814 	public boolean isModeDifferent(final int rawMode) {
815 		// Determine difference in mode-bits of file and index-entry. In the
816 		// bitwise presentation of modeDiff we'll have a '1' when the two modes
817 		// differ at this position.
818 		int modeDiff = getEntryRawMode() ^ rawMode;
819 
820 		if (modeDiff == 0)
821 			return false;
822 
823 		// Do not rely on filemode differences in case of symbolic links
824 		if (getOptions().getSymLinks() == SymLinks.FALSE)
825 			if (FileMode.SYMLINK.equals(rawMode))
826 				return false;
827 
828 		// Ignore the executable file bits if WorkingTreeOptions tell me to
829 		// do so. Ignoring is done by setting the bits representing a
830 		// EXECUTABLE_FILE to '0' in modeDiff
831 		if (!state.options.isFileMode())
832 			modeDiff &= ~FileMode.EXECUTABLE_FILE.getBits();
833 		return modeDiff != 0;
834 	}
835 
836 	/**
837 	 * Compare the metadata (mode, length, modification-timestamp) of the
838 	 * current entry and a {@link DirCacheEntry}
839 	 *
840 	 * @param entry
841 	 *            the {@link DirCacheEntry} to compare with
842 	 * @return a {@link MetadataDiff} which tells whether and how the entries
843 	 *         metadata differ
844 	 */
845 	public MetadataDiff compareMetadata(DirCacheEntry entry) {
846 		if (entry.isAssumeValid())
847 			return MetadataDiff.EQUAL;
848 
849 		if (entry.isUpdateNeeded())
850 			return MetadataDiff.DIFFER_BY_METADATA;
851 
852 		if (isModeDifferent(entry.getRawMode()))
853 			return MetadataDiff.DIFFER_BY_METADATA;
854 
855 		// Don't check for length or lastmodified on folders
856 		int type = mode & FileMode.TYPE_MASK;
857 		if (type == FileMode.TYPE_TREE || type == FileMode.TYPE_GITLINK)
858 			return MetadataDiff.EQUAL;
859 
860 		if (!entry.isSmudged() && entry.getLength() != (int) getEntryLength())
861 			return MetadataDiff.DIFFER_BY_METADATA;
862 
863 		// Git under windows only stores seconds so we round the timestamp
864 		// Java gives us if it looks like the timestamp in index is seconds
865 		// only. Otherwise we compare the timestamp at millisecond precision,
866 		// unless core.checkstat is set to "minimal", in which case we only
867 		// compare the whole second part.
868 		long cacheLastModified = entry.getLastModified();
869 		long fileLastModified = getEntryLastModified();
870 		long lastModifiedMillis = fileLastModified % 1000;
871 		long cacheMillis = cacheLastModified % 1000;
872 		if (getOptions().getCheckStat() == CheckStat.MINIMAL) {
873 			fileLastModified = fileLastModified - lastModifiedMillis;
874 			cacheLastModified = cacheLastModified - cacheMillis;
875 		} else if (cacheMillis == 0)
876 			fileLastModified = fileLastModified - lastModifiedMillis;
877 		// Some Java version on Linux return whole seconds only even when
878 		// the file systems supports more precision.
879 		else if (lastModifiedMillis == 0)
880 			cacheLastModified = cacheLastModified - cacheMillis;
881 
882 		if (fileLastModified != cacheLastModified)
883 			return MetadataDiff.DIFFER_BY_TIMESTAMP;
884 		else if (!entry.isSmudged())
885 			// The file is clean when you look at timestamps.
886 			return MetadataDiff.EQUAL;
887 		else
888 			return MetadataDiff.SMUDGED;
889 	}
890 
891 	/**
892 	 * Checks whether this entry differs from a given entry from the
893 	 * {@link DirCache}.
894 	 *
895 	 * File status information is used and if status is same we consider the
896 	 * file identical to the state in the working directory. Native git uses
897 	 * more stat fields than we have accessible in Java.
898 	 *
899 	 * @param entry
900 	 *            the entry from the dircache we want to compare against
901 	 * @param forceContentCheck
902 	 *            True if the actual file content should be checked if
903 	 *            modification time differs.
904 	 * @param reader
905 	 *            access to repository objects if necessary. Should not be null.
906 	 * @return true if content is most likely different.
907 	 * @throws IOException
908 	 * @since 3.3
909 	 */
910 	public boolean isModified(DirCacheEntry entry, boolean forceContentCheck,
911 			ObjectReader reader) throws IOException {
912 		if (entry == null)
913 			return !FileMode.MISSING.equals(getEntryFileMode());
914 		MetadataDiff diff = compareMetadata(entry);
915 		switch (diff) {
916 		case DIFFER_BY_TIMESTAMP:
917 			if (forceContentCheck)
918 				// But we are told to look at content even though timestamps
919 				// tell us about modification
920 				return contentCheck(entry, reader);
921 			else
922 				// We are told to assume a modification if timestamps differs
923 				return true;
924 		case SMUDGED:
925 			// The file is clean by timestamps but the entry was smudged.
926 			// Lets do a content check
927 			return contentCheck(entry, reader);
928 		case EQUAL:
929 			if (mode == FileMode.SYMLINK.getBits()) {
930 				return contentCheck(entry, reader);
931 			}
932 			return false;
933 		case DIFFER_BY_METADATA:
934 			if (mode == FileMode.SYMLINK.getBits())
935 				return contentCheck(entry, reader);
936 			return true;
937 		default:
938 			throw new IllegalStateException(MessageFormat.format(
939 					JGitText.get().unexpectedCompareResult, diff.name()));
940 		}
941 	}
942 
943 	/**
944 	 * Get the file mode to use for the current entry when it is to be updated
945 	 * in the index.
946 	 *
947 	 * @param indexIter
948 	 *            {@link DirCacheIterator} positioned at the same entry as this
949 	 *            iterator or null if no {@link DirCacheIterator} is available
950 	 *            at this iterator's current entry
951 	 * @return index file mode
952 	 */
953 	public FileMode getIndexFileMode(final DirCacheIterator indexIter) {
954 		final FileMode wtMode = getEntryFileMode();
955 		if (indexIter == null) {
956 			return wtMode;
957 		}
958 		final FileMode iMode = indexIter.getEntryFileMode();
959 		if (getOptions().isFileMode() && iMode != FileMode.GITLINK && iMode != FileMode.TREE) {
960 			return wtMode;
961 		}
962 		if (!getOptions().isFileMode()) {
963 			if (FileMode.REGULAR_FILE == wtMode
964 					&& FileMode.EXECUTABLE_FILE == iMode) {
965 				return iMode;
966 			}
967 			if (FileMode.EXECUTABLE_FILE == wtMode
968 					&& FileMode.REGULAR_FILE == iMode) {
969 				return iMode;
970 			}
971 		}
972 		if (FileMode.GITLINK == iMode
973 				&& FileMode.TREE == wtMode) {
974 			return iMode;
975 		}
976 		if (FileMode.TREE == iMode
977 				&& FileMode.GITLINK == wtMode) {
978 			return iMode;
979 		}
980 		return wtMode;
981 	}
982 
983 	/**
984 	 * Compares the entries content with the content in the filesystem.
985 	 * Unsmudges the entry when it is detected that it is clean.
986 	 *
987 	 * @param entry
988 	 *            the entry to be checked
989 	 * @param reader
990 	 *            acccess to repository data if necessary
991 	 * @return <code>true</code> if the content doesn't match,
992 	 *         <code>false</code> if it matches
993 	 * @throws IOException
994 	 */
995 	private boolean contentCheck(DirCacheEntry entry, ObjectReader reader)
996 			throws IOException {
997 		if (getEntryObjectId().equals(entry.getObjectId())) {
998 			// Content has not changed
999 
1000 			// We know the entry can't be racily clean because it's still clean.
1001 			// Therefore we unsmudge the entry!
1002 			// If by any chance we now unsmudge although we are still in the
1003 			// same time-slot as the last modification to the index file the
1004 			// next index write operation will smudge again.
1005 			// Caution: we are unsmudging just by setting the length of the
1006 			// in-memory entry object. It's the callers task to detect that we
1007 			// have modified the entry and to persist the modified index.
1008 			entry.setLength((int) getEntryLength());
1009 
1010 			return false;
1011 		} else {
1012 			if (mode == FileMode.SYMLINK.getBits()) {
1013 				return !new File(readSymlinkTarget(current())).equals(
1014 						new File(readContentAsNormalizedString(entry, reader)));
1015 			}
1016 			// Content differs: that's a real change, perhaps
1017 			if (reader == null) // deprecated use, do no further checks
1018 				return true;
1019 
1020 			switch (getEolStreamType()) {
1021 			case DIRECT:
1022 				return true;
1023 			default:
1024 				try {
1025 					ObjectLoader loader = reader.open(entry.getObjectId());
1026 					if (loader == null)
1027 						return true;
1028 
1029 					// We need to compute the length, but only if it is not
1030 					// a binary stream.
1031 					long dcInLen;
1032 					try (InputStream dcIn = new AutoLFInputStream(
1033 							loader.openStream(), true,
1034 							true /* abort if binary */)) {
1035 						dcInLen = computeLength(dcIn);
1036 					} catch (AutoLFInputStream.IsBinaryException e) {
1037 						return true;
1038 					}
1039 
1040 					try (InputStream dcIn = new AutoLFInputStream(
1041 							loader.openStream(), true)) {
1042 						byte[] autoCrLfHash = computeHash(dcIn, dcInLen);
1043 						boolean changed = getEntryObjectId()
1044 								.compareTo(autoCrLfHash, 0) != 0;
1045 						return changed;
1046 					}
1047 				} catch (IOException e) {
1048 					return true;
1049 				}
1050 			}
1051 		}
1052 	}
1053 
1054 	private static String readContentAsNormalizedString(DirCacheEntry entry,
1055 			ObjectReader reader) throws MissingObjectException, IOException {
1056 		ObjectLoader open = reader.open(entry.getObjectId());
1057 		byte[] cachedBytes = open.getCachedBytes();
1058 		return FS.detect().normalize(RawParseUtils.decode(cachedBytes));
1059 	}
1060 
1061 	/**
1062 	 * Reads the target of a symlink as a string. This default implementation
1063 	 * fully reads the entry's input stream and converts it to a normalized
1064 	 * string. Subclasses may override to provide more specialized
1065 	 * implementations.
1066 	 *
1067 	 * @param entry
1068 	 *            to read
1069 	 * @return the entry's content as a normalized string
1070 	 * @throws IOException
1071 	 *             if the entry cannot be read or does not denote a symlink
1072 	 * @since 4.6
1073 	 */
1074 	protected String readSymlinkTarget(Entry entry) throws IOException {
1075 		if (!entry.getMode().equals(FileMode.SYMLINK)) {
1076 			throw new java.nio.file.NotLinkException(entry.getName());
1077 		}
1078 		long length = entry.getLength();
1079 		byte[] content = new byte[(int) length];
1080 		try (InputStream is = entry.openInputStream()) {
1081 			int bytesRead = IO.readFully(is, content, 0);
1082 			return FS.detect()
1083 					.normalize(RawParseUtils.decode(content, 0, bytesRead));
1084 		}
1085 	}
1086 
1087 	private static long computeLength(InputStream in) throws IOException {
1088 		// Since we only care about the length, use skip. The stream
1089 		// may be able to more efficiently wade through its data.
1090 		//
1091 		long length = 0;
1092 		for (;;) {
1093 			long n = in.skip(1 << 20);
1094 			if (n <= 0)
1095 				break;
1096 			length += n;
1097 		}
1098 		return length;
1099 	}
1100 
1101 	private byte[] computeHash(InputStream in, long length) throws IOException {
1102 		SHA1 contentDigest = SHA1.newInstance();
1103 		final byte[] contentReadBuffer = state.contentReadBuffer;
1104 
1105 		contentDigest.update(hblob);
1106 		contentDigest.update((byte) ' ');
1107 
1108 		long sz = length;
1109 		if (sz == 0) {
1110 			contentDigest.update((byte) '0');
1111 		} else {
1112 			final int bufn = contentReadBuffer.length;
1113 			int p = bufn;
1114 			do {
1115 				contentReadBuffer[--p] = digits[(int) (sz % 10)];
1116 				sz /= 10;
1117 			} while (sz > 0);
1118 			contentDigest.update(contentReadBuffer, p, bufn - p);
1119 		}
1120 		contentDigest.update((byte) 0);
1121 
1122 		for (;;) {
1123 			final int r = in.read(contentReadBuffer);
1124 			if (r <= 0)
1125 				break;
1126 			contentDigest.update(contentReadBuffer, 0, r);
1127 			sz += r;
1128 		}
1129 		if (sz != length)
1130 			return zeroid;
1131 		return contentDigest.digest();
1132 	}
1133 
1134 	/** A single entry within a working directory tree. */
1135 	protected static abstract class Entry {
1136 		byte[] encodedName;
1137 
1138 		int encodedNameLen;
1139 
1140 		void encodeName(final CharsetEncoder enc) {
1141 			final ByteBuffer b;
1142 			try {
1143 				b = enc.encode(CharBuffer.wrap(getName()));
1144 			} catch (CharacterCodingException e) {
1145 				// This should so never happen.
1146 				throw new RuntimeException(MessageFormat.format(
1147 						JGitText.get().unencodeableFile, getName()));
1148 			}
1149 
1150 			encodedNameLen = b.limit();
1151 			if (b.hasArray() && b.arrayOffset() == 0)
1152 				encodedName = b.array();
1153 			else
1154 				b.get(encodedName = new byte[encodedNameLen]);
1155 		}
1156 
1157 		@Override
1158 		public String toString() {
1159 			return getMode().toString() + " " + getName(); //$NON-NLS-1$
1160 		}
1161 
1162 		/**
1163 		 * Get the type of this entry.
1164 		 * <p>
1165 		 * <b>Note: Efficient implementation required.</b>
1166 		 * <p>
1167 		 * The implementation of this method must be efficient. If a subclass
1168 		 * needs to compute the value they should cache the reference within an
1169 		 * instance member instead.
1170 		 *
1171 		 * @return a file mode constant from {@link FileMode}.
1172 		 */
1173 		public abstract FileMode getMode();
1174 
1175 		/**
1176 		 * Get the byte length of this entry.
1177 		 * <p>
1178 		 * <b>Note: Efficient implementation required.</b>
1179 		 * <p>
1180 		 * The implementation of this method must be efficient. If a subclass
1181 		 * needs to compute the value they should cache the reference within an
1182 		 * instance member instead.
1183 		 *
1184 		 * @return size of this file, in bytes.
1185 		 */
1186 		public abstract long getLength();
1187 
1188 		/**
1189 		 * Get the last modified time of this entry.
1190 		 * <p>
1191 		 * <b>Note: Efficient implementation required.</b>
1192 		 * <p>
1193 		 * The implementation of this method must be efficient. If a subclass
1194 		 * needs to compute the value they should cache the reference within an
1195 		 * instance member instead.
1196 		 *
1197 		 * @return time since the epoch (in ms) of the last change.
1198 		 */
1199 		public abstract long getLastModified();
1200 
1201 		/**
1202 		 * Get the name of this entry within its directory.
1203 		 * <p>
1204 		 * Efficient implementations are not required. The caller will obtain
1205 		 * the name only once and cache it once obtained.
1206 		 *
1207 		 * @return name of the entry.
1208 		 */
1209 		public abstract String getName();
1210 
1211 		/**
1212 		 * Obtain an input stream to read the file content.
1213 		 * <p>
1214 		 * Efficient implementations are not required. The caller will usually
1215 		 * obtain the stream only once per entry, if at all.
1216 		 * <p>
1217 		 * The input stream should not use buffering if the implementation can
1218 		 * avoid it. The caller will buffer as necessary to perform efficient
1219 		 * block IO operations.
1220 		 * <p>
1221 		 * The caller will close the stream once complete.
1222 		 *
1223 		 * @return a stream to read from the file.
1224 		 * @throws IOException
1225 		 *             the file could not be opened for reading.
1226 		 */
1227 		public abstract InputStream openInputStream() throws IOException;
1228 	}
1229 
1230 	/** Magic type indicating we know rules exist, but they aren't loaded. */
1231 	private static class PerDirectoryIgnoreNode extends IgnoreNode {
1232 		final Entry entry;
1233 
1234 		PerDirectoryIgnoreNode(Entry entry) {
1235 			super(Collections.<FastIgnoreRule> emptyList());
1236 			this.entry = entry;
1237 		}
1238 
1239 		IgnoreNode load() throws IOException {
1240 			IgnoreNode r = new IgnoreNode();
1241 			InputStream in = entry.openInputStream();
1242 			try {
1243 				r.parse(in);
1244 			} finally {
1245 				in.close();
1246 			}
1247 			return r.getRules().isEmpty() ? null : r;
1248 		}
1249 	}
1250 
1251 	/** Magic type indicating there may be rules for the top level. */
1252 	private static class RootIgnoreNode extends PerDirectoryIgnoreNode {
1253 		final Repository repository;
1254 
1255 		RootIgnoreNode(Entry entry, Repository repository) {
1256 			super(entry);
1257 			this.repository = repository;
1258 		}
1259 
1260 		@Override
1261 		IgnoreNode load() throws IOException {
1262 			IgnoreNode r;
1263 			if (entry != null) {
1264 				r = super.load();
1265 				if (r == null)
1266 					r = new IgnoreNode();
1267 			} else {
1268 				r = new IgnoreNode();
1269 			}
1270 
1271 			FS fs = repository.getFS();
1272 			String path = repository.getConfig().get(CoreConfig.KEY)
1273 					.getExcludesFile();
1274 			if (path != null) {
1275 				File excludesfile;
1276 				if (path.startsWith("~/")) //$NON-NLS-1$
1277 					excludesfile = fs.resolve(fs.userHome(), path.substring(2));
1278 				else
1279 					excludesfile = fs.resolve(null, path);
1280 				loadRulesFromFile(r, excludesfile);
1281 			}
1282 
1283 			File exclude = fs.resolve(repository.getDirectory(),
1284 					Constants.INFO_EXCLUDE);
1285 			loadRulesFromFile(r, exclude);
1286 
1287 			return r.getRules().isEmpty() ? null : r;
1288 		}
1289 
1290 		private static void loadRulesFromFile(IgnoreNode r, File exclude)
1291 				throws FileNotFoundException, IOException {
1292 			if (FS.DETECTED.exists(exclude)) {
1293 				FileInputStream in = new FileInputStream(exclude);
1294 				try {
1295 					r.parse(in);
1296 				} finally {
1297 					in.close();
1298 				}
1299 			}
1300 		}
1301 	}
1302 
1303 	/** Magic type indicating we know rules exist, but they aren't loaded. */
1304 	private static class PerDirectoryAttributesNode extends AttributesNode {
1305 		final Entry entry;
1306 
1307 		PerDirectoryAttributesNode(Entry entry) {
1308 			super(Collections.<AttributesRule> emptyList());
1309 			this.entry = entry;
1310 		}
1311 
1312 		AttributesNode load() throws IOException {
1313 			AttributesNode r = new AttributesNode();
1314 			InputStream in = entry.openInputStream();
1315 			try {
1316 				r.parse(in);
1317 			} finally {
1318 				in.close();
1319 			}
1320 			return r.getRules().isEmpty() ? null : r;
1321 		}
1322 	}
1323 
1324 
1325 	private static final class IteratorState {
1326 		/** Options used to process the working tree. */
1327 		final WorkingTreeOptions options;
1328 
1329 		/** File name character encoder. */
1330 		final CharsetEncoder nameEncoder;
1331 
1332 		/** Buffer used to perform {@link #contentId} computations. */
1333 		byte[] contentReadBuffer;
1334 
1335 		/** TreeWalk with a (supposedly) matching DirCacheIterator. */
1336 		TreeWalk walk;
1337 
1338 		/** Position of the matching {@link DirCacheIterator}. */
1339 		int dirCacheTree;
1340 
1341 		IteratorState(WorkingTreeOptions options) {
1342 			this.options = options;
1343 			this.nameEncoder = Constants.CHARSET.newEncoder();
1344 		}
1345 
1346 		void initializeReadBuffer() {
1347 			if (contentReadBuffer == null) {
1348 				contentReadBuffer = new byte[BUFFER_SIZE];
1349 			}
1350 		}
1351 	}
1352 
1353 	/**
1354 	 * @return the clean filter command for the current entry or
1355 	 *         <code>null</code> if no such command is defined
1356 	 * @throws IOException
1357 	 * @since 4.2
1358 	 */
1359 	public String getCleanFilterCommand() throws IOException {
1360 		if (cleanFilterCommandHolder == null) {
1361 			String cmd = null;
1362 			if (state.walk != null) {
1363 				cmd = state.walk
1364 						.getFilterCommand(Constants.ATTR_FILTER_TYPE_CLEAN);
1365 			}
1366 			cleanFilterCommandHolder = new Holder<>(cmd);
1367 		}
1368 		return cleanFilterCommandHolder.get();
1369 	}
1370 
1371 	/**
1372 	 * @return the eol stream type for the current entry or <code>null</code> if
1373 	 *         it cannot be determined. When state or state.walk is null or the
1374 	 *         {@link TreeWalk} is not based on a {@link Repository} then null
1375 	 *         is returned.
1376 	 * @throws IOException
1377 	 * @since 4.3
1378 	 */
1379 	public EolStreamType getEolStreamType() throws IOException {
1380 		return getEolStreamType(null);
1381 	}
1382 
1383 	/**
1384 	 * @param opType
1385 	 *            The operationtype (checkin/checkout) which should be used
1386 	 * @return the eol stream type for the current entry or <code>null</code> if
1387 	 *         it cannot be determined. When state or state.walk is null or the
1388 	 *         {@link TreeWalk} is not based on a {@link Repository} then null
1389 	 *         is returned.
1390 	 * @throws IOException
1391 	 */
1392 	private EolStreamType getEolStreamType(OperationType opType)
1393 			throws IOException {
1394 		if (eolStreamTypeHolder == null) {
1395 			EolStreamType type=null;
1396 			if (state.walk != null) {
1397 				if (opType != null) {
1398 					type = state.walk.getEolStreamType(opType);
1399 				} else {
1400 					type=state.walk.getEolStreamType();
1401 				}
1402 			} else {
1403 				switch (getOptions().getAutoCRLF()) {
1404 				case FALSE:
1405 					type = EolStreamType.DIRECT;
1406 					break;
1407 				case TRUE:
1408 				case INPUT:
1409 					type = EolStreamType.AUTO_LF;
1410 					break;
1411 				}
1412 			}
1413 			eolStreamTypeHolder = new Holder<>(type);
1414 		}
1415 		return eolStreamTypeHolder.get();
1416 	}
1417 }