View Javadoc
1   /*
2    * Copyright (C) 2022, Google Inc. and others
3    *
4    * This program and the accompanying materials are made available under the
5    * terms of the Eclipse Distribution License v. 1.0 which is available at
6    * https://www.eclipse.org/org/documents/edl-v10.php.
7    *
8    * SPDX-License-Identifier: BSD-3-Clause
9    */
10  package org.eclipse.jgit.patch;
11  
12  import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
13  
14  import java.io.ByteArrayInputStream;
15  import java.io.File;
16  import java.io.FileInputStream;
17  import java.io.IOException;
18  import java.io.InputStream;
19  import java.io.OutputStream;
20  import java.nio.ByteBuffer;
21  import java.nio.charset.StandardCharsets;
22  import java.nio.file.Files;
23  import java.nio.file.StandardCopyOption;
24  import java.text.MessageFormat;
25  import java.time.Instant;
26  import java.util.ArrayList;
27  import java.util.HashSet;
28  import java.util.Iterator;
29  import java.util.List;
30  import java.util.Set;
31  import java.util.stream.Collectors;
32  import java.util.zip.InflaterInputStream;
33  import org.eclipse.jgit.annotations.Nullable;
34  import org.eclipse.jgit.api.errors.FilterFailedException;
35  import org.eclipse.jgit.api.errors.PatchApplyException;
36  import org.eclipse.jgit.api.errors.PatchFormatException;
37  import org.eclipse.jgit.attributes.Attribute;
38  import org.eclipse.jgit.attributes.Attributes;
39  import org.eclipse.jgit.attributes.FilterCommand;
40  import org.eclipse.jgit.attributes.FilterCommandRegistry;
41  import org.eclipse.jgit.diff.DiffEntry.ChangeType;
42  import org.eclipse.jgit.diff.RawText;
43  import org.eclipse.jgit.dircache.DirCache;
44  import org.eclipse.jgit.dircache.DirCacheBuilder;
45  import org.eclipse.jgit.dircache.DirCacheCheckout;
46  import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
47  import org.eclipse.jgit.dircache.DirCacheCheckout.StreamSupplier;
48  import org.eclipse.jgit.dircache.DirCacheEntry;
49  import org.eclipse.jgit.dircache.DirCacheIterator;
50  import org.eclipse.jgit.errors.IndexWriteException;
51  import org.eclipse.jgit.internal.JGitText;
52  import org.eclipse.jgit.lib.Config;
53  import org.eclipse.jgit.lib.ConfigConstants;
54  import org.eclipse.jgit.lib.Constants;
55  import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
56  import org.eclipse.jgit.lib.FileMode;
57  import org.eclipse.jgit.lib.ObjectId;
58  import org.eclipse.jgit.lib.ObjectInserter;
59  import org.eclipse.jgit.lib.ObjectLoader;
60  import org.eclipse.jgit.lib.ObjectReader;
61  import org.eclipse.jgit.lib.Repository;
62  import org.eclipse.jgit.patch.FileHeader.PatchType;
63  import org.eclipse.jgit.revwalk.RevTree;
64  import org.eclipse.jgit.treewalk.FileTreeIterator;
65  import org.eclipse.jgit.treewalk.TreeWalk;
66  import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
67  import org.eclipse.jgit.treewalk.WorkingTreeOptions;
68  import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
69  import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter;
70  import org.eclipse.jgit.treewalk.filter.PathFilterGroup;
71  import org.eclipse.jgit.util.FS.ExecutionResult;
72  import org.eclipse.jgit.util.FileUtils;
73  import org.eclipse.jgit.util.IO;
74  import org.eclipse.jgit.util.LfsFactory;
75  import org.eclipse.jgit.util.LfsFactory.LfsInputStream;
76  import org.eclipse.jgit.util.RawParseUtils;
77  import org.eclipse.jgit.util.TemporaryBuffer;
78  import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
79  import org.eclipse.jgit.util.io.BinaryDeltaInputStream;
80  import org.eclipse.jgit.util.io.BinaryHunkInputStream;
81  import org.eclipse.jgit.util.io.EolStreamTypeUtil;
82  import org.eclipse.jgit.util.sha1.SHA1;
83  
84  /**
85   * Applies a patch to files and the index.
86   * <p>
87   * After instantiating, applyPatch() should be called once.
88   * </p>
89   *
90   * @since 6.4
91   */
92  public class PatchApplier {
93  
94  	/** The tree before applying the patch. Only non-null for inCore operation. */
95  	@Nullable
96  	private final RevTree beforeTree;
97  
98  	private final Repository repo;
99  
100 	private final ObjectInserter inserter;
101 
102 	private final ObjectReader reader;
103 
104 	private WorkingTreeOptions workingTreeOptions;
105 
106 	private int inCoreSizeLimit;
107 
108 	/**
109 	 * @param repo
110 	 *            repository to apply the patch in
111 	 */
112 	public PatchApplier(Repository repo) {
113 		this.repo = repo;
114 		inserter = repo.newObjectInserter();
115 		reader = inserter.newReader();
116 		beforeTree = null;
117 
118 		Config config = repo.getConfig();
119 		workingTreeOptions = config.get(WorkingTreeOptions.KEY);
120 		inCoreSizeLimit = config.getInt(ConfigConstants.CONFIG_MERGE_SECTION,
121 				ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
122 	}
123 
124 	/**
125 	 * @param repo
126 	 *            repository to apply the patch in
127 	 * @param beforeTree
128 	 *            ID of the tree to apply the patch in
129 	 * @param oi
130 	 *            to be used for modifying objects
131 	 * @throws IOException
132 	 *             in case of I/O errors
133 	 */
134 	public PatchApplier(Repository repo, RevTree beforeTree, ObjectInserter oi)
135 			throws IOException {
136 		this.repo = repo;
137 		this.beforeTree = beforeTree;
138 		inserter = oi;
139 		reader = oi.newReader();
140 	}
141 
142 	/**
143 	 * A wrapper for returning both the applied tree ID and the applied files
144 	 * list.
145 	 *
146 	 * @since 6.3
147 	 */
148 	public static class Result {
149 
150 		private ObjectId treeId;
151 
152 		private List<String> paths;
153 
154 		/**
155 		 * @return List of modified paths.
156 		 */
157 		public List<String> getPaths() {
158 			return paths;
159 		}
160 
161 		/**
162 		 * @return The applied tree ID.
163 		 */
164 		public ObjectId getTreeId() {
165 			return treeId;
166 		}
167 	}
168 
169 	/**
170 	 * Applies the given patch
171 	 *
172 	 * @param patchInput
173 	 *            the patch to apply.
174 	 * @return the result of the patch
175 	 * @throws PatchFormatException
176 	 *             if the patch cannot be parsed
177 	 * @throws PatchApplyException
178 	 *             if the patch cannot be applied
179 	 */
180 	public Result applyPatch(InputStream patchInput)
181 			throws PatchFormatException, PatchApplyException {
182 		Result result = new Result();
183 		org.eclipse.jgit.patch.Patch p = new org.eclipse.jgit.patch.Patch();
184 		try (InputStream inStream = patchInput) {
185 			p.parse(inStream);
186 
187 			if (!p.getErrors().isEmpty()) {
188 				throw new PatchFormatException(p.getErrors());
189 			}
190 
191 			DirCache dirCache = (inCore()) ? DirCache.newInCore()
192 					: repo.lockDirCache();
193 
194 			DirCacheBuilder dirCacheBuilder = dirCache.builder();
195 			Set<String> modifiedPaths = new HashSet<>();
196 			for (org.eclipse.jgit.patch.FileHeader fh : p.getFiles()) {
197 				ChangeType type = fh.getChangeType();
198 				switch (type) {
199 				case ADD: {
200 					File f = getFile(fh.getNewPath());
201 					if (f != null) {
202 						try {
203 							FileUtils.mkdirs(f.getParentFile(), true);
204 							FileUtils.createNewFile(f);
205 						} catch (IOException e) {
206 							throw new PatchApplyException(MessageFormat.format(
207 									JGitText.get().createNewFileFailed, f), e);
208 						}
209 					}
210 					apply(fh.getNewPath(), dirCache, dirCacheBuilder, f, fh);
211 				}
212 					break;
213 				case MODIFY:
214 					apply(fh.getOldPath(), dirCache, dirCacheBuilder,
215 							getFile(fh.getOldPath()), fh);
216 					break;
217 				case DELETE:
218 					if (!inCore()) {
219 						File old = getFile(fh.getOldPath());
220 						if (!old.delete())
221 							throw new PatchApplyException(MessageFormat.format(
222 									JGitText.get().cannotDeleteFile, old));
223 					}
224 					break;
225 				case RENAME: {
226 					File src = getFile(fh.getOldPath());
227 					File dest = getFile(fh.getNewPath());
228 
229 					if (!inCore()) {
230 						/*
231 						 * this is odd: we rename the file on the FS, but
232 						 * apply() will write a fresh stream anyway, which will
233 						 * overwrite if there were hunks in the patch.
234 						 */
235 						try {
236 							FileUtils.mkdirs(dest.getParentFile(), true);
237 							FileUtils.rename(src, dest,
238 									StandardCopyOption.ATOMIC_MOVE);
239 						} catch (IOException e) {
240 							throw new PatchApplyException(MessageFormat.format(
241 									JGitText.get().renameFileFailed, src, dest),
242 									e);
243 						}
244 					}
245 					String pathWithOriginalContent = inCore() ?
246 							fh.getOldPath() : fh.getNewPath();
247 					apply(pathWithOriginalContent, dirCache, dirCacheBuilder, dest, fh);
248 					break;
249 				}
250 				case COPY: {
251 					File dest = getFile(fh.getNewPath());
252 					if (!inCore()) {
253 						File src = getFile(fh.getOldPath());
254 						FileUtils.mkdirs(dest.getParentFile(), true);
255 						Files.copy(src.toPath(), dest.toPath());
256 					}
257 					apply(fh.getOldPath(), dirCache, dirCacheBuilder, dest, fh);
258 					break;
259 				}
260 				}
261 				if (fh.getChangeType() != ChangeType.DELETE)
262 					modifiedPaths.add(fh.getNewPath());
263 				if (fh.getChangeType() != ChangeType.COPY
264 						&& fh.getChangeType() != ChangeType.ADD)
265 					modifiedPaths.add(fh.getOldPath());
266 			}
267 
268 			// We processed the patch. Now add things that weren't changed.
269 			for (int i = 0; i < dirCache.getEntryCount(); i++) {
270 				DirCacheEntry dce = dirCache.getEntry(i);
271 				if (!modifiedPaths.contains(dce.getPathString())
272 						|| dce.getStage() != DirCacheEntry.STAGE_0)
273 					dirCacheBuilder.add(dce);
274 			}
275 
276 			if (inCore())
277 				dirCacheBuilder.finish();
278 			else if (!dirCacheBuilder.commit()) {
279 				throw new IndexWriteException();
280 			}
281 
282 			result.treeId = dirCache.writeTree(inserter);
283 			result.paths = modifiedPaths.stream().sorted()
284 					.collect(Collectors.toList());
285 		} catch (IOException e) {
286 			throw new PatchApplyException(MessageFormat.format(
287 					JGitText.get().patchApplyException, e.getMessage()), e);
288 		}
289 		return result;
290 	}
291 
292 	private File getFile(String path) {
293 		return (inCore()) ? null : new File(repo.getWorkTree(), path);
294 	}
295 
296 	/* returns null if the path is not found. */
297 	@Nullable
298 	private TreeWalk getTreeWalkForFile(String path, DirCache cache)
299 			throws PatchApplyException {
300 		try {
301 			if (inCore()) {
302 				// Only this branch may return null.
303 				// TODO: it would be nice if we could return a TreeWalk at EOF
304 				// iso. null.
305 				return TreeWalk.forPath(repo, path, beforeTree);
306 			}
307 			TreeWalk walk = new TreeWalk(repo);
308 
309 			// Use a TreeWalk with a DirCacheIterator to pick up the correct
310 			// clean/smudge filters.
311 			int cacheTreeIdx = walk.addTree(new DirCacheIterator(cache));
312 			FileTreeIterator files = new FileTreeIterator(repo);
313 			if (FILE_TREE_INDEX != walk.addTree(files))
314 				throw new IllegalStateException();
315 
316 			walk.setFilter(AndTreeFilter.create(
317 					PathFilterGroup.createFromStrings(path),
318 					new NotIgnoredFilter(FILE_TREE_INDEX)));
319 			walk.setOperationType(OperationType.CHECKIN_OP);
320 			walk.setRecursive(true);
321 			files.setDirCacheIterator(walk, cacheTreeIdx);
322 			return walk;
323 		} catch (IOException e) {
324 			throw new PatchApplyException(MessageFormat.format(
325 					JGitText.get().patchApplyException, e.getMessage()), e);
326 		}
327 	}
328 
329 	private static final int FILE_TREE_INDEX = 1;
330 
331 	/**
332 	 * Applies patch to a single file.
333 	 *
334 	 * @param pathWithOriginalContent
335 	 *            The path to use for the pre-image. Also determines CRLF and
336 	 *            smudge settings.
337 	 * @param dirCache
338 	 *            Dircache to read existing data from.
339 	 * @param dirCacheBuilder
340 	 *            Builder for Dircache to write new data to.
341 	 * @param f
342 	 *            The file to update with new contents. Null for inCore usage.
343 	 * @param fh
344 	 *            The patch header.
345 	 * @throws PatchApplyException
346 	 */
347 	private void apply(String pathWithOriginalContent, DirCache dirCache,
348 			DirCacheBuilder dirCacheBuilder, @Nullable File f,
349 			org.eclipse.jgit.patch.FileHeader fh) throws PatchApplyException {
350 		if (PatchType.BINARY.equals(fh.getPatchType())) {
351 			// This patch type just says "something changed". We can't do
352 			// anything with that.
353 			// Maybe this should return an error code, though?
354 			return;
355 		}
356 		try {
357 			TreeWalk walk = getTreeWalkForFile(pathWithOriginalContent, dirCache);
358 			boolean loadedFromTreeWalk = false;
359 			// CR-LF handling is determined by whether the file or the patch
360 			// have CR-LF line endings.
361 			boolean convertCrLf = inCore() || needsCrLfConversion(f, fh);
362 			EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF
363 					: EolStreamType.DIRECT;
364 			String smudgeFilterCommand = null;
365 			StreamSupplier fileStreamSupplier = null;
366 			ObjectId fileId = ObjectId.zeroId();
367 			if (walk == null) {
368 				// For new files with inCore()==true, TreeWalk.forPath can be
369 				// null. Stay with defaults.
370 			} else if (inCore()) {
371 				fileId = walk.getObjectId(0);
372 				ObjectLoader loader = LfsFactory.getInstance()
373 						.applySmudgeFilter(repo, reader.open(fileId, OBJ_BLOB),
374 								null);
375 				byte[] data = loader.getBytes();
376 				convertCrLf = RawText.isCrLfText(data);
377 				fileStreamSupplier = () -> new ByteArrayInputStream(data);
378 				streamType = convertCrLf ? EolStreamType.TEXT_CRLF
379 						: EolStreamType.DIRECT;
380 				smudgeFilterCommand = walk
381 						.getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
382 				loadedFromTreeWalk = true;
383 			} else if (walk.next()) {
384 				// If the file on disk has no newline characters,
385 				// convertCrLf will be false. In that case we want to honor the
386 				// normal git settings.
387 				streamType = convertCrLf ? EolStreamType.TEXT_CRLF
388 						: walk.getEolStreamType(OperationType.CHECKOUT_OP);
389 				smudgeFilterCommand = walk
390 						.getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
391 				FileTreeIterator file = walk.getTree(FILE_TREE_INDEX,
392 						FileTreeIterator.class);
393 				if (file != null) {
394 					fileId = file.getEntryObjectId();
395 					fileStreamSupplier = file::openEntryStream;
396 					loadedFromTreeWalk = true;
397 				} else {
398 					throw new PatchApplyException(MessageFormat.format(
399 							JGitText.get().cannotReadFile,
400 							pathWithOriginalContent));
401 				}
402 			}
403 
404 			if (fileStreamSupplier == null)
405 				fileStreamSupplier = inCore() ? InputStream::nullInputStream
406 						: () -> new FileInputStream(f);
407 
408 			FileMode fileMode = fh.getNewMode() != null ? fh.getNewMode()
409 					: FileMode.REGULAR_FILE;
410 			ContentStreamLoader resultStreamLoader;
411 			if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
412 				// binary patches are processed in a streaming fashion. Some
413 				// binary patches do random access on the input data, so we can't
414 				// overwrite the file while we're streaming.
415 				resultStreamLoader = applyBinary(pathWithOriginalContent, f, fh,
416 						fileStreamSupplier, fileId);
417 			} else {
418 				String filterCommand = walk != null
419 						? walk.getFilterCommand(
420 								Constants.ATTR_FILTER_TYPE_CLEAN)
421 						: null;
422 				RawText raw = getRawText(f, fileStreamSupplier, fileId,
423 						pathWithOriginalContent, loadedFromTreeWalk, filterCommand,
424 						convertCrLf);
425 				resultStreamLoader = applyText(raw, fh);
426 			}
427 
428 			if (f != null) {
429 				// Write to a buffer and copy to the file only if everything was
430 				// fine.
431 				TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
432 				try {
433 					CheckoutMetadata metadata = new CheckoutMetadata(streamType,
434 							smudgeFilterCommand);
435 
436 					try (TemporaryBuffer buf = buffer) {
437 						DirCacheCheckout.getContent(repo, pathWithOriginalContent,
438 								metadata, resultStreamLoader.supplier, workingTreeOptions,
439 								buf);
440 					}
441 					try (InputStream bufIn = buffer.openInputStream()) {
442 						Files.copy(bufIn, f.toPath(),
443 								StandardCopyOption.REPLACE_EXISTING);
444 					}
445 				} finally {
446 					buffer.destroy();
447 				}
448 
449 				repo.getFS().setExecute(f,
450 						fileMode == FileMode.EXECUTABLE_FILE);
451 			}
452 
453 			Instant lastModified = f == null ? null
454 					: repo.getFS().lastModifiedInstant(f);
455 			Attributes attributes = walk != null ? walk.getAttributes()
456 					: new Attributes();
457 
458 			DirCacheEntry dce = insertToIndex(
459 					resultStreamLoader.supplier.load(),
460 					fh.getNewPath().getBytes(StandardCharsets.UTF_8), fileMode,
461 					lastModified, resultStreamLoader.length,
462 					attributes.get(Constants.ATTR_FILTER));
463 			dirCacheBuilder.add(dce);
464 			if (PatchType.GIT_BINARY.equals(fh.getPatchType())
465 					&& fh.getNewId() != null && fh.getNewId().isComplete()
466 					&& !fh.getNewId().toObjectId().equals(dce.getObjectId())) {
467 				throw new PatchApplyException(MessageFormat.format(
468 						JGitText.get().applyBinaryResultOidWrong,
469 						pathWithOriginalContent));
470 			}
471 		} catch (IOException | UnsupportedOperationException e) {
472 			throw new PatchApplyException(MessageFormat.format(
473 					JGitText.get().patchApplyException, e.getMessage()), e);
474 		}
475 	}
476 
477 	private DirCacheEntry insertToIndex(InputStream input, byte[] path,
478 			FileMode fileMode, Instant lastModified, long length,
479 			Attribute lfsAttribute) throws IOException {
480 		DirCacheEntry dce = new DirCacheEntry(path, DirCacheEntry.STAGE_0);
481 		dce.setFileMode(fileMode);
482 		if (lastModified != null) {
483 			dce.setLastModified(lastModified);
484 		}
485 		dce.setLength(length);
486 
487 		try (LfsInputStream is = org.eclipse.jgit.util.LfsFactory.getInstance()
488 				.applyCleanFilter(repo, input, length, lfsAttribute)) {
489 			dce.setObjectId(inserter.insert(OBJ_BLOB, is.getLength(), is));
490 		}
491 
492 		return dce;
493 	}
494 
495 	/**
496 	 * Gets the raw text of the given file.
497 	 *
498 	 * @param file
499 	 *            to read from
500 	 * @param fileStreamSupplier
501 	 *            if fromTreewalk, the stream of the file content
502 	 * @param fileId
503 	 *            of the file
504 	 * @param path
505 	 *            of the file
506 	 * @param fromTreeWalk
507 	 *            whether the file was loaded by a {@link TreeWalk}
508 	 * @param filterCommand
509 	 *            for reading the file content
510 	 * @param convertCrLf
511 	 *            whether a CR-LF conversion is needed
512 	 * @return the result raw text
513 	 * @throws IOException
514 	 *             in case of filtering issues
515 	 */
516 	private RawText getRawText(@Nullable File file,
517 			StreamSupplier fileStreamSupplier, ObjectId fileId, String path,
518 			boolean fromTreeWalk, String filterCommand, boolean convertCrLf)
519 			throws IOException {
520 		if (fromTreeWalk) {
521 			// Can't use file.openEntryStream() as we cannot control its CR-LF
522 			// conversion.
523 			try (InputStream input = filterClean(repo, path,
524 					fileStreamSupplier.load(), convertCrLf, filterCommand)) {
525 				return new RawText(org.eclipse.jgit.util.IO
526 						.readWholeStream(input, 0).array());
527 			}
528 		}
529 		if (convertCrLf) {
530 			try (InputStream input = EolStreamTypeUtil.wrapInputStream(
531 					fileStreamSupplier.load(), EolStreamType.TEXT_LF)) {
532 				return new RawText(org.eclipse.jgit.util.IO
533 						.readWholeStream(input, 0).array());
534 			}
535 		}
536 		if (inCore() && fileId.equals(ObjectId.zeroId())) {
537 			return new RawText(new byte[] {});
538 		}
539 		return new RawText(file);
540 	}
541 
542 	private InputStream filterClean(Repository repository, String path,
543 			InputStream fromFile, boolean convertCrLf, String filterCommand)
544 			throws IOException {
545 		InputStream input = fromFile;
546 		if (convertCrLf) {
547 			input = EolStreamTypeUtil.wrapInputStream(input,
548 					EolStreamType.TEXT_LF);
549 		}
550 		if (org.eclipse.jgit.util.StringUtils.isEmptyOrNull(filterCommand)) {
551 			return input;
552 		}
553 		if (FilterCommandRegistry.isRegistered(filterCommand)) {
554 			LocalFile buffer = new org.eclipse.jgit.util.TemporaryBuffer.LocalFile(
555 					null, inCoreSizeLimit);
556 			FilterCommand command = FilterCommandRegistry.createFilterCommand(
557 					filterCommand, repository, input, buffer);
558 			while (command.run() != -1) {
559 				// loop as long as command.run() tells there is work to do
560 			}
561 			return buffer.openInputStreamWithAutoDestroy();
562 		}
563 		org.eclipse.jgit.util.FS fs = repository.getFS();
564 		ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand,
565 				new String[0]);
566 		filterProcessBuilder.directory(repository.getWorkTree());
567 		filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
568 				repository.getDirectory().getAbsolutePath());
569 		ExecutionResult result;
570 		try {
571 			result = fs.execute(filterProcessBuilder, input);
572 		} catch (IOException | InterruptedException e) {
573 			throw new IOException(
574 					new FilterFailedException(e, filterCommand, path));
575 		}
576 		int rc = result.getRc();
577 		if (rc != 0) {
578 			throw new IOException(new FilterFailedException(rc, filterCommand,
579 					path, result.getStdout().toByteArray(4096),
580 					org.eclipse.jgit.util.RawParseUtils
581 							.decode(result.getStderr().toByteArray(4096))));
582 		}
583 		return result.getStdout().openInputStreamWithAutoDestroy();
584 	}
585 
586 	private boolean needsCrLfConversion(File f,
587 			org.eclipse.jgit.patch.FileHeader fileHeader) throws IOException {
588 		if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
589 			return false;
590 		}
591 		if (!hasCrLf(fileHeader)) {
592 			try (InputStream input = new FileInputStream(f)) {
593 				return RawText.isCrLfText(input);
594 			}
595 		}
596 		return false;
597 	}
598 
599 	private static boolean hasCrLf(
600 			org.eclipse.jgit.patch.FileHeader fileHeader) {
601 		if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
602 			return false;
603 		}
604 		for (org.eclipse.jgit.patch.HunkHeader header : fileHeader.getHunks()) {
605 			byte[] buf = header.getBuffer();
606 			int hunkEnd = header.getEndOffset();
607 			int lineStart = header.getStartOffset();
608 			while (lineStart < hunkEnd) {
609 				int nextLineStart = RawParseUtils.nextLF(buf, lineStart);
610 				if (nextLineStart > hunkEnd) {
611 					nextLineStart = hunkEnd;
612 				}
613 				if (nextLineStart <= lineStart) {
614 					break;
615 				}
616 				if (nextLineStart - lineStart > 1) {
617 					char first = (char) (buf[lineStart] & 0xFF);
618 					if (first == ' ' || first == '-') {
619 						// It's an old line. Does it end in CR-LF?
620 						if (buf[nextLineStart - 2] == '\r') {
621 							return true;
622 						}
623 					}
624 				}
625 				lineStart = nextLineStart;
626 			}
627 		}
628 		return false;
629 	}
630 
631 	private ObjectId hash(File f) throws IOException {
632 		try (FileInputStream fis = new FileInputStream(f);
633 				SHA1InputStream shaStream = new SHA1InputStream(fis,
634 						f.length())) {
635 			shaStream.transferTo(OutputStream.nullOutputStream());
636 			return shaStream.getHash().toObjectId();
637 		}
638 	}
639 
640 	private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
641 			String path) throws PatchApplyException, IOException {
642 		boolean hashOk = false;
643 		if (id != null) {
644 			hashOk = baseId.equals(id);
645 			if (!hashOk && ChangeType.ADD.equals(type)
646 					&& ObjectId.zeroId().equals(baseId)) {
647 				// We create a new file. The OID of an empty file is not the
648 				// zero id!
649 				hashOk = Constants.EMPTY_BLOB_ID.equals(id);
650 			}
651 		} else if (!inCore()) {
652 			if (ObjectId.zeroId().equals(baseId)) {
653 				// File empty is OK.
654 				hashOk = !f.exists() || f.length() == 0;
655 			} else {
656 				hashOk = baseId.equals(hash(f));
657 			}
658 		}
659 		if (!hashOk) {
660 			throw new PatchApplyException(MessageFormat
661 					.format(JGitText.get().applyBinaryBaseOidWrong, path));
662 		}
663 	}
664 
665 	private boolean inCore() {
666 		return beforeTree != null;
667 	}
668 
669 	/**
670 	 * Provide stream, along with the length of the object. We use this once to
671 	 * patch to the working tree, once to write the index. For on-disk
672 	 * operation, presumably we could stream to the destination file, and then
673 	 * read back the stream from disk. We don't because it is more complex.
674 	 */
675 	private static class ContentStreamLoader {
676 
677 		StreamSupplier supplier;
678 
679 		long length;
680 
681 		ContentStreamLoader(StreamSupplier supplier, long length) {
682 			this.supplier = supplier;
683 			this.length = length;
684 		}
685 	}
686 
687 	/**
688 	 * Applies a binary patch.
689 	 *
690 	 * @param path
691 	 *            pathname of the file to write.
692 	 * @param f
693 	 *            destination file
694 	 * @param fh
695 	 *            the patch to apply
696 	 * @param inputSupplier
697 	 *            a supplier for the contents of the old file
698 	 * @param id
699 	 *            SHA1 for the old content
700 	 * @return a loader for the new content.
701 	 * @throws PatchApplyException
702 	 * @throws IOException
703 	 * @throws UnsupportedOperationException
704 	 */
705 	private ContentStreamLoader applyBinary(String path, File f,
706 			org.eclipse.jgit.patch.FileHeader fh, StreamSupplier inputSupplier,
707 			ObjectId id) throws PatchApplyException, IOException,
708 			UnsupportedOperationException {
709 		if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
710 			throw new PatchApplyException(MessageFormat
711 					.format(JGitText.get().applyBinaryOidTooShort, path));
712 		}
713 		org.eclipse.jgit.patch.BinaryHunk hunk = fh.getForwardBinaryHunk();
714 		// A BinaryHunk has the start at the "literal" or "delta" token. Data
715 		// starts on the next line.
716 		int start = RawParseUtils.nextLF(hunk.getBuffer(),
717 				hunk.getStartOffset());
718 		int length = hunk.getEndOffset() - start;
719 		switch (hunk.getType()) {
720 		case LITERAL_DEFLATED: {
721 			// This just overwrites the file. We need to check the hash of
722 			// the base.
723 			checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f,
724 					path);
725 			StreamSupplier supp = () -> new InflaterInputStream(
726 					new BinaryHunkInputStream(new ByteArrayInputStream(
727 							hunk.getBuffer(), start, length)));
728 			return new ContentStreamLoader(supp, hunk.getSize());
729 		}
730 		case DELTA_DEFLATED: {
731 			// Unfortunately delta application needs random access to the
732 			// base to construct the result.
733 			byte[] base;
734 			try (InputStream in = inputSupplier.load()) {
735 				base = IO.readWholeStream(in, 0).array();
736 			}
737 			// At least stream the result! We don't have to close these streams,
738 			// as they don't hold resources.
739 			StreamSupplier supp = () -> new BinaryDeltaInputStream(base,
740 					new InflaterInputStream(
741 							new BinaryHunkInputStream(new ByteArrayInputStream(
742 									hunk.getBuffer(), start, length))));
743 
744 			// This just reads the first bits of the stream.
745 			long finalSize = ((BinaryDeltaInputStream) supp.load()).getExpectedResultSize();
746 
747 			return new ContentStreamLoader(supp, finalSize);
748 		}
749 		default:
750 			throw new UnsupportedOperationException(MessageFormat.format(
751 					JGitText.get().applyBinaryPatchTypeNotSupported,
752 					hunk.getType().name()));
753 		}
754 	}
755 
756 	private ContentStreamLoader applyText(RawText rt,
757 			org.eclipse.jgit.patch.FileHeader fh)
758 			throws IOException, PatchApplyException {
759 		List<ByteBuffer> oldLines = new ArrayList<>(rt.size());
760 		for (int i = 0; i < rt.size(); i++) {
761 			oldLines.add(rt.getRawString(i));
762 		}
763 		List<ByteBuffer> newLines = new ArrayList<>(oldLines);
764 		int afterLastHunk = 0;
765 		int lineNumberShift = 0;
766 		int lastHunkNewLine = -1;
767 		for (org.eclipse.jgit.patch.HunkHeader hh : fh.getHunks()) {
768 			// We assume hunks to be ordered
769 			if (hh.getNewStartLine() <= lastHunkNewLine) {
770 				throw new PatchApplyException(MessageFormat
771 						.format(JGitText.get().patchApplyException, hh));
772 			}
773 			lastHunkNewLine = hh.getNewStartLine();
774 
775 			byte[] b = new byte[hh.getEndOffset() - hh.getStartOffset()];
776 			System.arraycopy(hh.getBuffer(), hh.getStartOffset(), b, 0,
777 					b.length);
778 			RawText hrt = new RawText(b);
779 
780 			List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size());
781 			for (int i = 0; i < hrt.size(); i++) {
782 				hunkLines.add(hrt.getRawString(i));
783 			}
784 
785 			if (hh.getNewStartLine() == 0) {
786 				// Must be the single hunk for clearing all content
787 				if (fh.getHunks().size() == 1
788 						&& canApplyAt(hunkLines, newLines, 0)) {
789 					newLines.clear();
790 					break;
791 				}
792 				throw new PatchApplyException(MessageFormat
793 						.format(JGitText.get().patchApplyException, hh));
794 			}
795 			// Hunk lines as reported by the hunk may be off, so don't rely on
796 			// them.
797 			int applyAt = hh.getNewStartLine() - 1 + lineNumberShift;
798 			// But they definitely should not go backwards.
799 			if (applyAt < afterLastHunk && lineNumberShift < 0) {
800 				applyAt = hh.getNewStartLine() - 1;
801 				lineNumberShift = 0;
802 			}
803 			if (applyAt < afterLastHunk) {
804 				throw new PatchApplyException(MessageFormat
805 						.format(JGitText.get().patchApplyException, hh));
806 			}
807 			boolean applies = false;
808 			int oldLinesInHunk = hh.getLinesContext()
809 					+ hh.getOldImage().getLinesDeleted();
810 			if (oldLinesInHunk <= 1) {
811 				// Don't shift hunks without context lines. Just try the
812 				// position corrected by the current lineNumberShift, and if
813 				// that fails, the position recorded in the hunk header.
814 				applies = canApplyAt(hunkLines, newLines, applyAt);
815 				if (!applies && lineNumberShift != 0) {
816 					applyAt = hh.getNewStartLine() - 1;
817 					applies = applyAt >= afterLastHunk
818 							&& canApplyAt(hunkLines, newLines, applyAt);
819 				}
820 			} else {
821 				int maxShift = applyAt - afterLastHunk;
822 				for (int shift = 0; shift <= maxShift; shift++) {
823 					if (canApplyAt(hunkLines, newLines, applyAt - shift)) {
824 						applies = true;
825 						applyAt -= shift;
826 						break;
827 					}
828 				}
829 				if (!applies) {
830 					// Try shifting the hunk downwards
831 					applyAt = hh.getNewStartLine() - 1 + lineNumberShift;
832 					maxShift = newLines.size() - applyAt - oldLinesInHunk;
833 					for (int shift = 1; shift <= maxShift; shift++) {
834 						if (canApplyAt(hunkLines, newLines, applyAt + shift)) {
835 							applies = true;
836 							applyAt += shift;
837 							break;
838 						}
839 					}
840 				}
841 			}
842 			if (!applies) {
843 				throw new PatchApplyException(MessageFormat
844 						.format(JGitText.get().patchApplyException, hh));
845 			}
846 			// Hunk applies at applyAt. Apply it, and update afterLastHunk and
847 			// lineNumberShift
848 			lineNumberShift = applyAt - hh.getNewStartLine() + 1;
849 			int sz = hunkLines.size();
850 			for (int j = 1; j < sz; j++) {
851 				ByteBuffer hunkLine = hunkLines.get(j);
852 				if (!hunkLine.hasRemaining()) {
853 					// Completely empty line; accept as empty context line
854 					applyAt++;
855 					continue;
856 				}
857 				switch (hunkLine.array()[hunkLine.position()]) {
858 				case ' ':
859 					applyAt++;
860 					break;
861 				case '-':
862 					newLines.remove(applyAt);
863 					break;
864 				case '+':
865 					newLines.add(applyAt++, slice(hunkLine, 1));
866 					break;
867 				default:
868 					break;
869 				}
870 			}
871 			afterLastHunk = applyAt;
872 		}
873 		if (!isNoNewlineAtEndOfFile(fh)) {
874 			newLines.add(null);
875 		}
876 		if (!rt.isMissingNewlineAtEnd()) {
877 			oldLines.add(null);
878 		}
879 
880 		// We could check if old == new, but the short-circuiting complicates
881 		// logic for inCore patching, so just write the new thing regardless.
882 		TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
883 		try (OutputStream out = buffer) {
884 			for (Iterator<ByteBuffer> l = newLines.iterator(); l.hasNext();) {
885 				ByteBuffer line = l.next();
886 				if (line == null) {
887 					// Must be the marker for the final newline
888 					break;
889 				}
890 				out.write(line.array(), line.position(), line.remaining());
891 				if (l.hasNext()) {
892 					out.write('\n');
893 				}
894 			}
895 			return new ContentStreamLoader(buffer::openInputStream,
896 					buffer.length());
897 		}
898 	}
899 
900 	private boolean canApplyAt(List<ByteBuffer> hunkLines,
901 			List<ByteBuffer> newLines, int line) {
902 		int sz = hunkLines.size();
903 		int limit = newLines.size();
904 		int pos = line;
905 		for (int j = 1; j < sz; j++) {
906 			ByteBuffer hunkLine = hunkLines.get(j);
907 			if (!hunkLine.hasRemaining()) {
908 				// Empty line. Accept as empty context line.
909 				if (pos >= limit || newLines.get(pos).hasRemaining()) {
910 					return false;
911 				}
912 				pos++;
913 				continue;
914 			}
915 			switch (hunkLine.array()[hunkLine.position()]) {
916 			case ' ':
917 			case '-':
918 				if (pos >= limit
919 						|| !newLines.get(pos).equals(slice(hunkLine, 1))) {
920 					return false;
921 				}
922 				pos++;
923 				break;
924 			default:
925 				break;
926 			}
927 		}
928 		return true;
929 	}
930 
931 	private ByteBuffer slice(ByteBuffer b, int off) {
932 		int newOffset = b.position() + off;
933 		return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset);
934 	}
935 
936 	private boolean isNoNewlineAtEndOfFile(
937 			org.eclipse.jgit.patch.FileHeader fh) {
938 		List<? extends org.eclipse.jgit.patch.HunkHeader> hunks = fh.getHunks();
939 		if (hunks == null || hunks.isEmpty()) {
940 			return false;
941 		}
942 		org.eclipse.jgit.patch.HunkHeader lastHunk = hunks
943 				.get(hunks.size() - 1);
944 		byte[] buf = new byte[lastHunk.getEndOffset()
945 				- lastHunk.getStartOffset()];
946 		System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf,
947 				0, buf.length);
948 		RawText lhrt = new RawText(buf);
949 		return lhrt.getString(lhrt.size() - 1)
950 				.equals("\\ No newline at end of file"); //$NON-NLS-1$
951 	}
952 
953 	/**
954 	 * An {@link InputStream} that updates a {@link SHA1} on every byte read.
955 	 */
956 	private static class SHA1InputStream extends InputStream {
957 
958 		private final SHA1 hash;
959 
960 		private final InputStream in;
961 
962 		SHA1InputStream(InputStream in, long size) {
963 			hash = SHA1.newInstance();
964 			hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB));
965 			hash.update((byte) ' ');
966 			hash.update(Constants.encodeASCII(size));
967 			hash.update((byte) 0);
968 			this.in = in;
969 		}
970 
971 		public SHA1 getHash() {
972 			return hash;
973 		}
974 
975 		@Override
976 		public int read() throws IOException {
977 			int b = in.read();
978 			if (b >= 0) {
979 				hash.update((byte) b);
980 			}
981 			return b;
982 		}
983 
984 		@Override
985 		public int read(byte[] b, int off, int len) throws IOException {
986 			int n = in.read(b, off, len);
987 			if (n > 0) {
988 				hash.update(b, off, n);
989 			}
990 			return n;
991 		}
992 
993 		@Override
994 		public void close() throws IOException {
995 			in.close();
996 		}
997 	}
998 }