1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44 package org.eclipse.jgit.internal.storage.file;
45
46 import static java.nio.file.StandardCopyOption.ATOMIC_MOVE;
47 import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
48 import static org.eclipse.jgit.lib.Constants.OBJ_OFS_DELTA;
49 import static org.eclipse.jgit.lib.Constants.OBJ_REF_DELTA;
50
51 import java.io.BufferedInputStream;
52 import java.io.EOFException;
53 import java.io.File;
54 import java.io.FileOutputStream;
55 import java.io.FilterInputStream;
56 import java.io.IOException;
57 import java.io.InputStream;
58 import java.io.OutputStream;
59 import java.io.RandomAccessFile;
60 import java.nio.channels.Channels;
61 import java.text.MessageFormat;
62 import java.util.Collection;
63 import java.util.Collections;
64 import java.util.HashSet;
65 import java.util.List;
66 import java.util.Set;
67 import java.util.zip.CRC32;
68 import java.util.zip.DataFormatException;
69 import java.util.zip.Deflater;
70 import java.util.zip.DeflaterOutputStream;
71 import java.util.zip.Inflater;
72 import java.util.zip.InflaterInputStream;
73
74 import org.eclipse.jgit.errors.CorruptObjectException;
75 import org.eclipse.jgit.errors.IncorrectObjectTypeException;
76 import org.eclipse.jgit.errors.LargeObjectException;
77 import org.eclipse.jgit.errors.MissingObjectException;
78 import org.eclipse.jgit.internal.JGitText;
79 import org.eclipse.jgit.lib.AbbreviatedObjectId;
80 import org.eclipse.jgit.lib.AnyObjectId;
81 import org.eclipse.jgit.lib.Constants;
82 import org.eclipse.jgit.lib.InflaterCache;
83 import org.eclipse.jgit.lib.ObjectId;
84 import org.eclipse.jgit.lib.ObjectIdOwnerMap;
85 import org.eclipse.jgit.lib.ObjectInserter;
86 import org.eclipse.jgit.lib.ObjectLoader;
87 import org.eclipse.jgit.lib.ObjectReader;
88 import org.eclipse.jgit.lib.ObjectStream;
89 import org.eclipse.jgit.transport.PackParser;
90 import org.eclipse.jgit.transport.PackedObjectInfo;
91 import org.eclipse.jgit.util.BlockList;
92 import org.eclipse.jgit.util.FileUtils;
93 import org.eclipse.jgit.util.IO;
94 import org.eclipse.jgit.util.NB;
95 import org.eclipse.jgit.util.io.CountingOutputStream;
96 import org.eclipse.jgit.util.sha1.SHA1;
97
98
99
100
101
102 public class PackInserter extends ObjectInserter {
103
104 private static final int INDEX_VERSION = 2;
105
106 private final ObjectDirectory db;
107
108 private List<PackedObjectInfo> objectList;
109 private ObjectIdOwnerMap<PackedObjectInfo> objectMap;
110 private boolean rollback;
111 private boolean checkExisting = true;
112
113 private int compression = Deflater.BEST_COMPRESSION;
114 private File tmpPack;
115 private PackStream packOut;
116 private Inflater cachedInflater;
117
118 PackInserter(ObjectDirectory db) {
119 this.db = db;
120 }
121
122
123
124
125
126
127
128
129
130 public void checkExisting(boolean check) {
131 checkExisting = check;
132 }
133
134
135
136
137
138
139
140 public void setCompressionLevel(int compression) {
141 this.compression = compression;
142 }
143
144 int getBufferSize() {
145 return buffer().length;
146 }
147
148
149 @Override
150 public ObjectId insert(int type, byte[] data, int off, int len)
151 throws IOException {
152 ObjectId id = idFor(type, data, off, len);
153 if (objectMap != null && objectMap.contains(id)) {
154 return id;
155 }
156
157 if (checkExisting && db.hasPackedObject(id)) {
158 return id;
159 }
160
161 long offset = beginObject(type, len);
162 packOut.compress.write(data, off, len);
163 packOut.compress.finish();
164 return endObject(id, offset);
165 }
166
167
168 @Override
169 public ObjectId insert(int type, long len, InputStream in)
170 throws IOException {
171 byte[] buf = buffer();
172 if (len <= buf.length) {
173 IO.readFully(in, buf, 0, (int) len);
174 return insert(type, buf, 0, (int) len);
175 }
176
177 long offset = beginObject(type, len);
178 SHA1 md = digest();
179 md.update(Constants.encodedTypeString(type));
180 md.update((byte) ' ');
181 md.update(Constants.encodeASCII(len));
182 md.update((byte) 0);
183
184 while (0 < len) {
185 int n = in.read(buf, 0, (int) Math.min(buf.length, len));
186 if (n <= 0) {
187 throw new EOFException();
188 }
189 md.update(buf, 0, n);
190 packOut.compress.write(buf, 0, n);
191 len -= n;
192 }
193 packOut.compress.finish();
194 return endObject(md.toObjectId(), offset);
195 }
196
197 private long beginObject(int type, long len) throws IOException {
198 if (packOut == null) {
199 beginPack();
200 }
201 long offset = packOut.getOffset();
202 packOut.beginObject(type, len);
203 return offset;
204 }
205
206 private ObjectId endObject(ObjectId id, long offset) {
207 PackedObjectInfo obj = new PackedObjectInfo(id);
208 obj.setOffset(offset);
209 obj.setCRC((int) packOut.crc32.getValue());
210 objectList.add(obj);
211 objectMap.addIfAbsent(obj);
212 return id;
213 }
214
215 private static File idxFor(File packFile) {
216 String p = packFile.getName();
217 return new File(
218 packFile.getParentFile(),
219 p.substring(0, p.lastIndexOf('.')) + ".idx");
220 }
221
222 private void beginPack() throws IOException {
223 objectList = new BlockList<>();
224 objectMap = new ObjectIdOwnerMap<>();
225
226 rollback = true;
227 tmpPack = File.createTempFile("insert_", ".pack", db.getDirectory());
228 packOut = new PackStream(tmpPack);
229
230
231 packOut.write(packOut.hdrBuf, 0, writePackHeader(packOut.hdrBuf, 1));
232 }
233
234 private static int writePackHeader(byte[] buf, int objectCount) {
235 System.arraycopy(Constants.PACK_SIGNATURE, 0, buf, 0, 4);
236 NB.encodeInt32(buf, 4, 2);
237 NB.encodeInt32(buf, 8, objectCount);
238 return 12;
239 }
240
241
242 @Override
243 public PackParser newPackParser(InputStream in) {
244 throw new UnsupportedOperationException();
245 }
246
247
248 @Override
249 public ObjectReader newReader() {
250 return new Reader();
251 }
252
253
254 @Override
255 public void flush() throws IOException {
256 if (tmpPack == null) {
257 return;
258 }
259
260 if (packOut == null) {
261 throw new IOException();
262 }
263
264 byte[] packHash;
265 try {
266 packHash = packOut.finishPack();
267 } finally {
268 packOut = null;
269 }
270
271 Collections.sort(objectList);
272 File tmpIdx = idxFor(tmpPack);
273 writePackIndex(tmpIdx, packHash, objectList);
274
275 File realPack = new File(db.getPackDirectory(),
276 "pack-" + computeName(objectList).name() + ".pack");
277 db.closeAllPackHandles(realPack);
278 tmpPack.setReadOnly();
279 FileUtils.rename(tmpPack, realPack, ATOMIC_MOVE);
280
281 File realIdx = idxFor(realPack);
282 tmpIdx.setReadOnly();
283 try {
284 FileUtils.rename(tmpIdx, realIdx, ATOMIC_MOVE);
285 } catch (IOException e) {
286 File newIdx = new File(
287 realIdx.getParentFile(), realIdx.getName() + ".new");
288 try {
289 FileUtils.rename(tmpIdx, newIdx, ATOMIC_MOVE);
290 } catch (IOException e2) {
291 newIdx = tmpIdx;
292 e = e2;
293 }
294 throw new IOException(MessageFormat.format(
295 JGitText.get().panicCantRenameIndexFile, newIdx,
296 realIdx), e);
297 }
298
299 db.openPack(realPack);
300 rollback = false;
301 clear();
302 }
303
304 private static void writePackIndex(File idx, byte[] packHash,
305 List<PackedObjectInfo> list) throws IOException {
306 try (OutputStream os = new FileOutputStream(idx)) {
307 PackIndexWriter w = PackIndexWriter.createVersion(os, INDEX_VERSION);
308 w.write(list, packHash);
309 }
310 }
311
312 private ObjectId computeName(List<PackedObjectInfo> list) {
313 SHA1 md = digest().reset();
314 byte[] buf = buffer();
315 for (PackedObjectInfo otp : list) {
316 otp.copyRawTo(buf, 0);
317 md.update(buf, 0, OBJECT_ID_LENGTH);
318 }
319 return ObjectId.fromRaw(md.digest());
320 }
321
322
323 @Override
324 public void close() {
325 try {
326 if (packOut != null) {
327 try {
328 packOut.close();
329 } catch (IOException err) {
330
331 }
332 }
333 if (rollback && tmpPack != null) {
334 try {
335 FileUtils.delete(tmpPack);
336 } catch (IOException e) {
337
338 }
339 try {
340 FileUtils.delete(idxFor(tmpPack));
341 } catch (IOException e) {
342
343 }
344 rollback = false;
345 }
346 } finally {
347 clear();
348 try {
349 InflaterCache.release(cachedInflater);
350 } finally {
351 cachedInflater = null;
352 }
353 }
354 }
355
356 private void clear() {
357 objectList = null;
358 objectMap = null;
359 tmpPack = null;
360 packOut = null;
361 }
362
363 private Inflater inflater() {
364 if (cachedInflater == null) {
365 cachedInflater = InflaterCache.get();
366 } else {
367 cachedInflater.reset();
368 }
369 return cachedInflater;
370 }
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386 private class PackStream extends OutputStream {
387 final byte[] hdrBuf;
388 final CRC32 crc32;
389 final DeflaterOutputStream compress;
390
391 private final RandomAccessFile file;
392 private final CountingOutputStream out;
393 private final Deflater deflater;
394
395 private boolean atEnd;
396
397 PackStream(File pack) throws IOException {
398 file = new RandomAccessFile(pack, "rw");
399 out = new CountingOutputStream(new FileOutputStream(file.getFD()));
400 deflater = new Deflater(compression);
401 compress = new DeflaterOutputStream(this, deflater, 8192);
402 hdrBuf = new byte[32];
403 crc32 = new CRC32();
404 atEnd = true;
405 }
406
407 long getOffset() {
408
409
410
411
412
413 return out.getCount();
414 }
415
416 void seek(long offset) throws IOException {
417 file.seek(offset);
418 atEnd = false;
419 }
420
421 void beginObject(int objectType, long length) throws IOException {
422 crc32.reset();
423 deflater.reset();
424 write(hdrBuf, 0, encodeTypeSize(objectType, length));
425 }
426
427 private int encodeTypeSize(int type, long rawLength) {
428 long nextLength = rawLength >>> 4;
429 hdrBuf[0] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (rawLength & 0x0F));
430 rawLength = nextLength;
431 int n = 1;
432 while (rawLength > 0) {
433 nextLength >>>= 7;
434 hdrBuf[n++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (rawLength & 0x7F));
435 rawLength = nextLength;
436 }
437 return n;
438 }
439
440 @Override
441 public void write(int b) throws IOException {
442 hdrBuf[0] = (byte) b;
443 write(hdrBuf, 0, 1);
444 }
445
446 @Override
447 public void write(byte[] data, int off, int len) throws IOException {
448 crc32.update(data, off, len);
449 if (!atEnd) {
450 file.seek(file.length());
451 atEnd = true;
452 }
453 out.write(data, off, len);
454 }
455
456 byte[] finishPack() throws IOException {
457
458
459
460
461
462 try {
463 file.seek(0);
464 out.write(hdrBuf, 0, writePackHeader(hdrBuf, objectList.size()));
465
466 byte[] buf = buffer();
467 SHA1 md = digest().reset();
468 file.seek(0);
469 while (true) {
470 int r = file.read(buf);
471 if (r < 0) {
472 break;
473 }
474 md.update(buf, 0, r);
475 }
476 byte[] packHash = md.digest();
477 out.write(packHash, 0, packHash.length);
478 return packHash;
479 } finally {
480 close();
481 }
482 }
483
484 @Override
485 public void close() throws IOException {
486 deflater.end();
487 try {
488 out.close();
489 } finally {
490 file.close();
491 }
492 }
493
494 byte[] inflate(long filePos, int len) throws IOException, DataFormatException {
495 byte[] dstbuf;
496 try {
497 dstbuf = new byte[len];
498 } catch (OutOfMemoryError noMemory) {
499 return null;
500 }
501
502 byte[] srcbuf = buffer();
503 Inflater inf = inflater();
504 filePos += setInput(filePos, inf, srcbuf);
505 for (int dstoff = 0;;) {
506 int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
507 dstoff += n;
508 if (inf.finished()) {
509 return dstbuf;
510 }
511 if (inf.needsInput()) {
512 filePos += setInput(filePos, inf, srcbuf);
513 } else if (n == 0) {
514 throw new DataFormatException();
515 }
516 }
517 }
518
519 private int setInput(long filePos, Inflater inf, byte[] buf)
520 throws IOException {
521 if (file.getFilePointer() != filePos) {
522 seek(filePos);
523 }
524 int n = file.read(buf);
525 if (n < 0) {
526 throw new EOFException(JGitText.get().unexpectedEofInPack);
527 }
528 inf.setInput(buf, 0, n);
529 return n;
530 }
531 }
532
533 private class Reader extends ObjectReader {
534 private final ObjectReader ctx;
535
536 private Reader() {
537 ctx = db.newReader();
538 setStreamFileThreshold(ctx.getStreamFileThreshold());
539 }
540
541 @Override
542 public ObjectReader newReader() {
543 return db.newReader();
544 }
545
546 @Override
547 public ObjectInserter getCreatedFromInserter() {
548 return PackInserter.this;
549 }
550
551 @Override
552 public Collection<ObjectId> resolve(AbbreviatedObjectId id)
553 throws IOException {
554 Collection<ObjectId> stored = ctx.resolve(id);
555 if (objectList == null) {
556 return stored;
557 }
558
559 Set<ObjectId> r = new HashSet<>(stored.size() + 2);
560 r.addAll(stored);
561 for (PackedObjectInfo obj : objectList) {
562 if (id.prefixCompare(obj) == 0) {
563 r.add(obj.copy());
564 }
565 }
566 return r;
567 }
568
569 @Override
570 public ObjectLoader open(AnyObjectId objectId, int typeHint)
571 throws MissingObjectException, IncorrectObjectTypeException,
572 IOException {
573 if (objectMap == null) {
574 return ctx.open(objectId, typeHint);
575 }
576
577 PackedObjectInfo obj = objectMap.get(objectId);
578 if (obj == null) {
579 return ctx.open(objectId, typeHint);
580 }
581
582 byte[] buf = buffer();
583 packOut.seek(obj.getOffset());
584 int cnt = packOut.file.read(buf, 0, 20);
585 if (cnt <= 0) {
586 throw new EOFException(JGitText.get().unexpectedEofInPack);
587 }
588
589 int c = buf[0] & 0xff;
590 int type = (c >> 4) & 7;
591 if (type == OBJ_OFS_DELTA || type == OBJ_REF_DELTA) {
592 throw new IOException(MessageFormat.format(
593 JGitText.get().cannotReadBackDelta, Integer.toString(type)));
594 }
595 if (typeHint != OBJ_ANY && type != typeHint) {
596 throw new IncorrectObjectTypeException(objectId.copy(), typeHint);
597 }
598
599 long sz = c & 0x0f;
600 int ptr = 1;
601 int shift = 4;
602 while ((c & 0x80) != 0) {
603 if (ptr >= cnt) {
604 throw new EOFException(JGitText.get().unexpectedEofInPack);
605 }
606 c = buf[ptr++] & 0xff;
607 sz += ((long) (c & 0x7f)) << shift;
608 shift += 7;
609 }
610
611 long zpos = obj.getOffset() + ptr;
612 if (sz < getStreamFileThreshold()) {
613 byte[] data = inflate(obj, zpos, (int) sz);
614 if (data != null) {
615 return new ObjectLoader.SmallObject(type, data);
616 }
617 }
618 return new StreamLoader(type, sz, zpos);
619 }
620
621 private byte[] inflate(PackedObjectInfo obj, long zpos, int sz)
622 throws IOException, CorruptObjectException {
623 try {
624 return packOut.inflate(zpos, sz);
625 } catch (DataFormatException dfe) {
626 throw new CorruptObjectException(
627 MessageFormat.format(
628 JGitText.get().objectAtHasBadZlibStream,
629 Long.valueOf(obj.getOffset()),
630 tmpPack.getAbsolutePath()),
631 dfe);
632 }
633 }
634
635 @Override
636 public Set<ObjectId> getShallowCommits() throws IOException {
637 return ctx.getShallowCommits();
638 }
639
640 @Override
641 public void close() {
642 ctx.close();
643 }
644
645 private class StreamLoader extends ObjectLoader {
646 private final int type;
647 private final long size;
648 private final long pos;
649
650 StreamLoader(int type, long size, long pos) {
651 this.type = type;
652 this.size = size;
653 this.pos = pos;
654 }
655
656 @Override
657 public ObjectStream openStream()
658 throws MissingObjectException, IOException {
659 int bufsz = buffer().length;
660 packOut.seek(pos);
661
662 InputStream fileStream = new FilterInputStream(
663 Channels.newInputStream(packOut.file.getChannel())) {
664
665
666
667
668
669
670 @Override
671 public int read() throws IOException {
672 packOut.atEnd = false;
673 return super.read();
674 }
675
676 @Override
677 public int read(byte[] b) throws IOException {
678 packOut.atEnd = false;
679 return super.read(b);
680 }
681
682 @Override
683 public int read(byte[] b, int off, int len) throws IOException {
684 packOut.atEnd = false;
685 return super.read(b,off,len);
686 }
687
688 @Override
689 public void close() {
690
691
692 }
693 };
694 return new ObjectStream.Filter(
695 type, size,
696 new BufferedInputStream(
697 new InflaterInputStream(fileStream, inflater(), bufsz), bufsz));
698 }
699
700 @Override
701 public int getType() {
702 return type;
703 }
704
705 @Override
706 public long getSize() {
707 return size;
708 }
709
710 @Override
711 public byte[] getCachedBytes() throws LargeObjectException {
712 throw new LargeObjectException.ExceedsLimit(
713 getStreamFileThreshold(), size);
714 }
715 }
716 }
717 }