1
2
3
4
5
6
7
8
9
10 package org.eclipse.jgit.api;
11
12 import java.io.BufferedInputStream;
13 import java.io.ByteArrayInputStream;
14 import java.io.File;
15 import java.io.FileInputStream;
16 import java.io.FileOutputStream;
17 import java.io.IOException;
18 import java.io.InputStream;
19 import java.io.OutputStream;
20 import java.nio.ByteBuffer;
21 import java.nio.file.Files;
22 import java.nio.file.StandardCopyOption;
23 import java.text.MessageFormat;
24 import java.util.ArrayList;
25 import java.util.Iterator;
26 import java.util.List;
27 import java.util.zip.InflaterInputStream;
28
29 import org.eclipse.jgit.api.errors.FilterFailedException;
30 import org.eclipse.jgit.api.errors.GitAPIException;
31 import org.eclipse.jgit.api.errors.PatchApplyException;
32 import org.eclipse.jgit.api.errors.PatchFormatException;
33 import org.eclipse.jgit.attributes.FilterCommand;
34 import org.eclipse.jgit.attributes.FilterCommandRegistry;
35 import org.eclipse.jgit.diff.DiffEntry.ChangeType;
36 import org.eclipse.jgit.diff.RawText;
37 import org.eclipse.jgit.dircache.DirCache;
38 import org.eclipse.jgit.dircache.DirCacheCheckout;
39 import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
40 import org.eclipse.jgit.dircache.DirCacheIterator;
41 import org.eclipse.jgit.errors.LargeObjectException;
42 import org.eclipse.jgit.errors.MissingObjectException;
43 import org.eclipse.jgit.internal.JGitText;
44 import org.eclipse.jgit.lib.Constants;
45 import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
46 import org.eclipse.jgit.lib.FileMode;
47 import org.eclipse.jgit.lib.ObjectId;
48 import org.eclipse.jgit.lib.ObjectLoader;
49 import org.eclipse.jgit.lib.ObjectStream;
50 import org.eclipse.jgit.lib.Repository;
51 import org.eclipse.jgit.patch.BinaryHunk;
52 import org.eclipse.jgit.patch.FileHeader;
53 import org.eclipse.jgit.patch.FileHeader.PatchType;
54 import org.eclipse.jgit.patch.HunkHeader;
55 import org.eclipse.jgit.patch.Patch;
56 import org.eclipse.jgit.treewalk.FileTreeIterator;
57 import org.eclipse.jgit.treewalk.TreeWalk;
58 import org.eclipse.jgit.treewalk.TreeWalk.OperationType;
59 import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
60 import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter;
61 import org.eclipse.jgit.treewalk.filter.PathFilterGroup;
62 import org.eclipse.jgit.util.FS;
63 import org.eclipse.jgit.util.FS.ExecutionResult;
64 import org.eclipse.jgit.util.FileUtils;
65 import org.eclipse.jgit.util.IO;
66 import org.eclipse.jgit.util.RawParseUtils;
67 import org.eclipse.jgit.util.StringUtils;
68 import org.eclipse.jgit.util.TemporaryBuffer;
69 import org.eclipse.jgit.util.TemporaryBuffer.LocalFile;
70 import org.eclipse.jgit.util.io.BinaryDeltaInputStream;
71 import org.eclipse.jgit.util.io.BinaryHunkInputStream;
72 import org.eclipse.jgit.util.io.EolStreamTypeUtil;
73 import org.eclipse.jgit.util.sha1.SHA1;
74
75
76
77
78
79
80
81
82 public class ApplyCommand extends GitCommand<ApplyResult> {
83
84 private InputStream in;
85
86
87
88
89
90
91 ApplyCommand(Repository repo) {
92 super(repo);
93 }
94
95
96
97
98
99
100
101
102 public ApplyCommand setPatch(InputStream in) {
103 checkCallable();
104 this.in = in;
105 return this;
106 }
107
108
109
110
111
112
113
114
115
116
117 @Override
118 public ApplyResult call() throws GitAPIException, PatchFormatException,
119 PatchApplyException {
120 checkCallable();
121 setCallable(false);
122 ApplyResult r = new ApplyResult();
123 try {
124 final Patch p = new Patch();
125 try {
126 p.parse(in);
127 } finally {
128 in.close();
129 }
130 if (!p.getErrors().isEmpty()) {
131 throw new PatchFormatException(p.getErrors());
132 }
133 Repository repository = getRepository();
134 DirCache cache = repository.readDirCache();
135 for (FileHeader fh : p.getFiles()) {
136 ChangeType type = fh.getChangeType();
137 File f = null;
138 switch (type) {
139 case ADD:
140 f = getFile(fh.getNewPath(), true);
141 apply(repository, fh.getNewPath(), cache, f, fh);
142 break;
143 case MODIFY:
144 f = getFile(fh.getOldPath(), false);
145 apply(repository, fh.getOldPath(), cache, f, fh);
146 break;
147 case DELETE:
148 f = getFile(fh.getOldPath(), false);
149 if (!f.delete())
150 throw new PatchApplyException(MessageFormat.format(
151 JGitText.get().cannotDeleteFile, f));
152 break;
153 case RENAME:
154 f = getFile(fh.getOldPath(), false);
155 File dest = getFile(fh.getNewPath(), false);
156 try {
157 FileUtils.mkdirs(dest.getParentFile(), true);
158 FileUtils.rename(f, dest,
159 StandardCopyOption.ATOMIC_MOVE);
160 } catch (IOException e) {
161 throw new PatchApplyException(MessageFormat.format(
162 JGitText.get().renameFileFailed, f, dest), e);
163 }
164 apply(repository, fh.getOldPath(), cache, dest, fh);
165 break;
166 case COPY:
167 f = getFile(fh.getOldPath(), false);
168 File target = getFile(fh.getNewPath(), false);
169 FileUtils.mkdirs(target.getParentFile(), true);
170 Files.copy(f.toPath(), target.toPath());
171 apply(repository, fh.getOldPath(), cache, target, fh);
172 }
173 r.addUpdatedFile(f);
174 }
175 } catch (IOException e) {
176 throw new PatchApplyException(MessageFormat.format(
177 JGitText.get().patchApplyException, e.getMessage()), e);
178 }
179 return r;
180 }
181
182 private File getFile(String path, boolean create)
183 throws PatchApplyException {
184 File f = new File(getRepository().getWorkTree(), path);
185 if (create) {
186 try {
187 File parent = f.getParentFile();
188 FileUtils.mkdirs(parent, true);
189 FileUtils.createNewFile(f);
190 } catch (IOException e) {
191 throw new PatchApplyException(MessageFormat.format(
192 JGitText.get().createNewFileFailed, f), e);
193 }
194 }
195 return f;
196 }
197
198 private void apply(Repository repository, String path, DirCache cache,
199 File f, FileHeader fh) throws IOException, PatchApplyException {
200 if (PatchType.BINARY.equals(fh.getPatchType())) {
201 return;
202 }
203 boolean convertCrLf = needsCrLfConversion(f, fh);
204
205
206
207 try (TreeWalk walk = new TreeWalk(repository)) {
208 walk.setOperationType(OperationType.CHECKIN_OP);
209 FileTreeIterator files = new FileTreeIterator(repository);
210 int fileIdx = walk.addTree(files);
211 int cacheIdx = walk.addTree(new DirCacheIterator(cache));
212 files.setDirCacheIterator(walk, cacheIdx);
213 walk.setFilter(AndTreeFilter.create(
214 PathFilterGroup.createFromStrings(path),
215 new NotIgnoredFilter(fileIdx)));
216 walk.setRecursive(true);
217 if (walk.next()) {
218
219
220
221 EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF
222 : walk.getEolStreamType(OperationType.CHECKOUT_OP);
223 String command = walk.getFilterCommand(
224 Constants.ATTR_FILTER_TYPE_SMUDGE);
225 CheckoutMetadata checkOut = new CheckoutMetadata(streamType, command);
226 FileTreeIterator file = walk.getTree(fileIdx,
227 FileTreeIterator.class);
228 if (file != null) {
229 if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
230 applyBinary(repository, path, f, fh,
231 file::openEntryStream, file.getEntryObjectId(),
232 checkOut);
233 } else {
234 command = walk.getFilterCommand(
235 Constants.ATTR_FILTER_TYPE_CLEAN);
236 RawText raw;
237
238
239 try (InputStream input = filterClean(repository, path,
240 new FileInputStream(f), convertCrLf, command)) {
241 raw = new RawText(
242 IO.readWholeStream(input, 0).array());
243 }
244 applyText(repository, path, raw, f, fh, checkOut);
245 }
246 return;
247 }
248 }
249 }
250
251 RawText raw;
252 CheckoutMetadata checkOut;
253 if (PatchType.GIT_BINARY.equals(fh.getPatchType())) {
254 checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null);
255 applyBinary(repository, path, f, fh, () -> new FileInputStream(f),
256 null, checkOut);
257 } else {
258 if (convertCrLf) {
259 try (InputStream input = EolStreamTypeUtil.wrapInputStream(
260 new FileInputStream(f), EolStreamType.TEXT_LF)) {
261 raw = new RawText(IO.readWholeStream(input, 0).array());
262 }
263 checkOut = new CheckoutMetadata(EolStreamType.TEXT_CRLF, null);
264 } else {
265 raw = new RawText(f);
266 checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null);
267 }
268 applyText(repository, path, raw, f, fh, checkOut);
269 }
270 }
271
272 private boolean needsCrLfConversion(File f, FileHeader fileHeader)
273 throws IOException {
274 if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
275 return false;
276 }
277 if (!hasCrLf(fileHeader)) {
278 try (InputStream input = new FileInputStream(f)) {
279 return RawText.isCrLfText(input);
280 }
281 }
282 return false;
283 }
284
285 private static boolean hasCrLf(FileHeader fileHeader) {
286 if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) {
287 return false;
288 }
289 for (HunkHeader header : fileHeader.getHunks()) {
290 byte[] buf = header.getBuffer();
291 int hunkEnd = header.getEndOffset();
292 int lineStart = header.getStartOffset();
293 while (lineStart < hunkEnd) {
294 int nextLineStart = RawParseUtils.nextLF(buf, lineStart);
295 if (nextLineStart > hunkEnd) {
296 nextLineStart = hunkEnd;
297 }
298 if (nextLineStart <= lineStart) {
299 break;
300 }
301 if (nextLineStart - lineStart > 1) {
302 char first = (char) (buf[lineStart] & 0xFF);
303 if (first == ' ' || first == '-') {
304
305 if (buf[nextLineStart - 2] == '\r') {
306 return true;
307 }
308 }
309 }
310 lineStart = nextLineStart;
311 }
312 }
313 return false;
314 }
315
316 private InputStream filterClean(Repository repository, String path,
317 InputStream fromFile, boolean convertCrLf, String filterCommand)
318 throws IOException {
319 InputStream input = fromFile;
320 if (convertCrLf) {
321 input = EolStreamTypeUtil.wrapInputStream(input,
322 EolStreamType.TEXT_LF);
323 }
324 if (StringUtils.isEmptyOrNull(filterCommand)) {
325 return input;
326 }
327 if (FilterCommandRegistry.isRegistered(filterCommand)) {
328 LocalFile buffer = new TemporaryBuffer.LocalFile(null);
329 FilterCommand command = FilterCommandRegistry.createFilterCommand(
330 filterCommand, repository, input, buffer);
331 while (command.run() != -1) {
332
333 }
334 return buffer.openInputStreamWithAutoDestroy();
335 }
336 FS fs = repository.getFS();
337 ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand,
338 new String[0]);
339 filterProcessBuilder.directory(repository.getWorkTree());
340 filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
341 repository.getDirectory().getAbsolutePath());
342 ExecutionResult result;
343 try {
344 result = fs.execute(filterProcessBuilder, in);
345 } catch (IOException | InterruptedException e) {
346 throw new IOException(
347 new FilterFailedException(e, filterCommand, path));
348 }
349 int rc = result.getRc();
350 if (rc != 0) {
351 throw new IOException(new FilterFailedException(rc, filterCommand,
352 path, result.getStdout().toByteArray(4096), RawParseUtils
353 .decode(result.getStderr().toByteArray(4096))));
354 }
355 return result.getStdout().openInputStreamWithAutoDestroy();
356 }
357
358
359
360
361 private interface StreamSupplier {
362 InputStream load() throws IOException;
363 }
364
365
366
367
368
369
370
371
372
373 private static class StreamLoader extends ObjectLoader {
374
375 private StreamSupplier data;
376
377 private long size;
378
379 StreamLoader(StreamSupplier data, long length) {
380 this.data = data;
381 this.size = length;
382 }
383
384 @Override
385 public int getType() {
386 return Constants.OBJ_BLOB;
387 }
388
389 @Override
390 public long getSize() {
391 return size;
392 }
393
394 @Override
395 public boolean isLarge() {
396 return true;
397 }
398
399 @Override
400 public byte[] getCachedBytes() throws LargeObjectException {
401 throw new LargeObjectException();
402 }
403
404 @Override
405 public ObjectStream openStream()
406 throws MissingObjectException, IOException {
407 return new ObjectStream.Filter(getType(), getSize(),
408 new BufferedInputStream(data.load()));
409 }
410 }
411
412 private void initHash(SHA1 hash, long size) {
413 hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB));
414 hash.update((byte) ' ');
415 hash.update(Constants.encodeASCII(size));
416 hash.update((byte) 0);
417 }
418
419 private ObjectId hash(File f) throws IOException {
420 SHA1 hash = SHA1.newInstance();
421 initHash(hash, f.length());
422 try (InputStream input = new FileInputStream(f)) {
423 byte[] buf = new byte[8192];
424 int n;
425 while ((n = input.read(buf)) >= 0) {
426 hash.update(buf, 0, n);
427 }
428 }
429 return hash.toObjectId();
430 }
431
432 private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f,
433 String path)
434 throws PatchApplyException, IOException {
435 boolean hashOk = false;
436 if (id != null) {
437 hashOk = baseId.equals(id);
438 if (!hashOk && ChangeType.ADD.equals(type)
439 && ObjectId.zeroId().equals(baseId)) {
440
441
442 hashOk = Constants.EMPTY_BLOB_ID.equals(id);
443 }
444 } else {
445 if (ObjectId.zeroId().equals(baseId)) {
446
447 hashOk = !f.exists() || f.length() == 0;
448 } else {
449 hashOk = baseId.equals(hash(f));
450 }
451 }
452 if (!hashOk) {
453 throw new PatchApplyException(MessageFormat
454 .format(JGitText.get().applyBinaryBaseOidWrong, path));
455 }
456 }
457
458 private void applyBinary(Repository repository, String path, File f,
459 FileHeader fh, StreamSupplier loader, ObjectId id,
460 CheckoutMetadata checkOut)
461 throws PatchApplyException, IOException {
462 if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) {
463 throw new PatchApplyException(MessageFormat
464 .format(JGitText.get().applyBinaryOidTooShort, path));
465 }
466 BinaryHunk hunk = fh.getForwardBinaryHunk();
467
468
469 int start = RawParseUtils.nextLF(hunk.getBuffer(),
470 hunk.getStartOffset());
471 int length = hunk.getEndOffset() - start;
472 SHA1 hash = SHA1.newInstance();
473
474 TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
475 try {
476 switch (hunk.getType()) {
477 case LITERAL_DEFLATED:
478
479
480 checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f,
481 path);
482 initHash(hash, hunk.getSize());
483 try (OutputStream out = buffer;
484 InputStream inflated = new SHA1InputStream(hash,
485 new InflaterInputStream(
486 new BinaryHunkInputStream(
487 new ByteArrayInputStream(
488 hunk.getBuffer(), start,
489 length))))) {
490 DirCacheCheckout.getContent(repository, path, checkOut,
491 new StreamLoader(() -> inflated, hunk.getSize()),
492 null, out);
493 if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) {
494 throw new PatchApplyException(MessageFormat.format(
495 JGitText.get().applyBinaryResultOidWrong,
496 path));
497 }
498 }
499 try (InputStream bufIn = buffer.openInputStream()) {
500 Files.copy(bufIn, f.toPath(),
501 StandardCopyOption.REPLACE_EXISTING);
502 }
503 break;
504 case DELTA_DEFLATED:
505
506
507 byte[] base;
508 try (InputStream input = loader.load()) {
509 base = IO.readWholeStream(input, 0).array();
510 }
511
512 try (BinaryDeltaInputStream input = new BinaryDeltaInputStream(
513 base,
514 new InflaterInputStream(new BinaryHunkInputStream(
515 new ByteArrayInputStream(hunk.getBuffer(),
516 start, length))))) {
517 long finalSize = input.getExpectedResultSize();
518 initHash(hash, finalSize);
519 try (OutputStream out = buffer;
520 SHA1InputStream hashed = new SHA1InputStream(hash,
521 input)) {
522 DirCacheCheckout.getContent(repository, path, checkOut,
523 new StreamLoader(() -> hashed, finalSize), null,
524 out);
525 if (!fh.getNewId().toObjectId()
526 .equals(hash.toObjectId())) {
527 throw new PatchApplyException(MessageFormat.format(
528 JGitText.get().applyBinaryResultOidWrong,
529 path));
530 }
531 }
532 }
533 try (InputStream bufIn = buffer.openInputStream()) {
534 Files.copy(bufIn, f.toPath(),
535 StandardCopyOption.REPLACE_EXISTING);
536 }
537 break;
538 default:
539 break;
540 }
541 } finally {
542 buffer.destroy();
543 }
544 }
545
546 private void applyText(Repository repository, String path, RawText rt,
547 File f, FileHeader fh, CheckoutMetadata checkOut)
548 throws IOException, PatchApplyException {
549 List<ByteBuffer> oldLines = new ArrayList<>(rt.size());
550 for (int i = 0; i < rt.size(); i++) {
551 oldLines.add(rt.getRawString(i));
552 }
553 List<ByteBuffer> newLines = new ArrayList<>(oldLines);
554 int afterLastHunk = 0;
555 int lineNumberShift = 0;
556 int lastHunkNewLine = -1;
557 for (HunkHeader hh : fh.getHunks()) {
558
559
560 if (hh.getNewStartLine() <= lastHunkNewLine) {
561 throw new PatchApplyException(MessageFormat
562 .format(JGitText.get().patchApplyException, hh));
563 }
564 lastHunkNewLine = hh.getNewStartLine();
565
566 byte[] b = new byte[hh.getEndOffset() - hh.getStartOffset()];
567 System.arraycopy(hh.getBuffer(), hh.getStartOffset(), b, 0,
568 b.length);
569 RawText hrt = new RawText(b);
570
571 List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size());
572 for (int i = 0; i < hrt.size(); i++) {
573 hunkLines.add(hrt.getRawString(i));
574 }
575
576 if (hh.getNewStartLine() == 0) {
577
578 if (fh.getHunks().size() == 1
579 && canApplyAt(hunkLines, newLines, 0)) {
580 newLines.clear();
581 break;
582 }
583 throw new PatchApplyException(MessageFormat
584 .format(JGitText.get().patchApplyException, hh));
585 }
586
587
588 int applyAt = hh.getNewStartLine() - 1 + lineNumberShift;
589
590 if (applyAt < afterLastHunk && lineNumberShift < 0) {
591 applyAt = hh.getNewStartLine() - 1;
592 lineNumberShift = 0;
593 }
594 if (applyAt < afterLastHunk) {
595 throw new PatchApplyException(MessageFormat
596 .format(JGitText.get().patchApplyException, hh));
597 }
598 boolean applies = false;
599 int oldLinesInHunk = hh.getLinesContext()
600 + hh.getOldImage().getLinesDeleted();
601 if (oldLinesInHunk <= 1) {
602
603
604
605 applies = canApplyAt(hunkLines, newLines, applyAt);
606 if (!applies && lineNumberShift != 0) {
607 applyAt = hh.getNewStartLine() - 1;
608 applies = applyAt >= afterLastHunk
609 && canApplyAt(hunkLines, newLines, applyAt);
610 }
611 } else {
612 int maxShift = applyAt - afterLastHunk;
613 for (int shift = 0; shift <= maxShift; shift++) {
614 if (canApplyAt(hunkLines, newLines, applyAt - shift)) {
615 applies = true;
616 applyAt -= shift;
617 break;
618 }
619 }
620 if (!applies) {
621
622 applyAt = hh.getNewStartLine() - 1 + lineNumberShift;
623 maxShift = newLines.size() - applyAt - oldLinesInHunk;
624 for (int shift = 1; shift <= maxShift; shift++) {
625 if (canApplyAt(hunkLines, newLines, applyAt + shift)) {
626 applies = true;
627 applyAt += shift;
628 break;
629 }
630 }
631 }
632 }
633 if (!applies) {
634 throw new PatchApplyException(MessageFormat
635 .format(JGitText.get().patchApplyException, hh));
636 }
637
638
639 lineNumberShift = applyAt - hh.getNewStartLine() + 1;
640 int sz = hunkLines.size();
641 for (int j = 1; j < sz; j++) {
642 ByteBuffer hunkLine = hunkLines.get(j);
643 if (!hunkLine.hasRemaining()) {
644
645 applyAt++;
646 continue;
647 }
648 switch (hunkLine.array()[hunkLine.position()]) {
649 case ' ':
650 applyAt++;
651 break;
652 case '-':
653 newLines.remove(applyAt);
654 break;
655 case '+':
656 newLines.add(applyAt++, slice(hunkLine, 1));
657 break;
658 default:
659 break;
660 }
661 }
662 afterLastHunk = applyAt;
663 }
664 if (!isNoNewlineAtEndOfFile(fh)) {
665 newLines.add(null);
666 }
667 if (!rt.isMissingNewlineAtEnd()) {
668 oldLines.add(null);
669 }
670 if (oldLines.equals(newLines)) {
671 return;
672 }
673
674 TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
675 try {
676 try (OutputStream out = buffer) {
677 for (Iterator<ByteBuffer> l = newLines.iterator(); l
678 .hasNext();) {
679 ByteBuffer line = l.next();
680 if (line == null) {
681
682 break;
683 }
684 out.write(line.array(), line.position(), line.remaining());
685 if (l.hasNext()) {
686 out.write('\n');
687 }
688 }
689 }
690 try (OutputStream output = new FileOutputStream(f)) {
691 DirCacheCheckout.getContent(repository, path, checkOut,
692 new StreamLoader(buffer::openInputStream,
693 buffer.length()),
694 null, output);
695 }
696 } finally {
697 buffer.destroy();
698 }
699 repository.getFS().setExecute(f,
700 fh.getNewMode() == FileMode.EXECUTABLE_FILE);
701 }
702
703 private boolean canApplyAt(List<ByteBuffer> hunkLines,
704 List<ByteBuffer> newLines, int line) {
705 int sz = hunkLines.size();
706 int limit = newLines.size();
707 int pos = line;
708 for (int j = 1; j < sz; j++) {
709 ByteBuffer hunkLine = hunkLines.get(j);
710 if (!hunkLine.hasRemaining()) {
711
712 if (pos >= limit || newLines.get(pos).hasRemaining()) {
713 return false;
714 }
715 pos++;
716 continue;
717 }
718 switch (hunkLine.array()[hunkLine.position()]) {
719 case ' ':
720 case '-':
721 if (pos >= limit
722 || !newLines.get(pos).equals(slice(hunkLine, 1))) {
723 return false;
724 }
725 pos++;
726 break;
727 default:
728 break;
729 }
730 }
731 return true;
732 }
733
734 private ByteBuffer slice(ByteBuffer b, int off) {
735 int newOffset = b.position() + off;
736 return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset);
737 }
738
739 private boolean isNoNewlineAtEndOfFile(FileHeader fh) {
740 List<? extends HunkHeader> hunks = fh.getHunks();
741 if (hunks == null || hunks.isEmpty()) {
742 return false;
743 }
744 HunkHeader lastHunk = hunks.get(hunks.size() - 1);
745 byte[] buf = new byte[lastHunk.getEndOffset()
746 - lastHunk.getStartOffset()];
747 System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf,
748 0, buf.length);
749 RawText lhrt = new RawText(buf);
750 return lhrt.getString(lhrt.size() - 1)
751 .equals("\\ No newline at end of file");
752 }
753
754
755
756
757
758 private static class SHA1InputStream extends InputStream {
759
760 private final SHA1 hash;
761
762 private final InputStream in;
763
764 SHA1InputStream(SHA1 hash, InputStream in) {
765 this.hash = hash;
766 this.in = in;
767 }
768
769 @Override
770 public int read() throws IOException {
771 int b = in.read();
772 if (b >= 0) {
773 hash.update((byte) b);
774 }
775 return b;
776 }
777
778 @Override
779 public int read(byte[] b, int off, int len) throws IOException {
780 int n = in.read(b, off, len);
781 if (n > 0) {
782 hash.update(b, off, n);
783 }
784 return n;
785 }
786
787 @Override
788 public void close() throws IOException {
789 in.close();
790 }
791 }
792 }