1 /* 2 * BlockOutputStream 3 * 4 * Author: Lasse Collin <lasse.collin (at) tukaani.org> 5 * 6 * This file has been put into the public domain. 7 * You can do whatever you want with this file. 8 */ 9 10 package org.tukaani.xz; 11 12 import java.io.OutputStream; 13 import java.io.ByteArrayOutputStream; 14 import java.io.IOException; 15 import org.tukaani.xz.common.EncoderUtil; 16 import org.tukaani.xz.check.Check; 17 18 class BlockOutputStream extends FinishableOutputStream { 19 private final OutputStream out; 20 private final CountingOutputStream outCounted; 21 private FinishableOutputStream filterChain; 22 private final Check check; 23 24 private final int headerSize; 25 private final long compressedSizeLimit; 26 private long uncompressedSize = 0; 27 28 private final byte[] tempBuf = new byte[1]; 29 30 public BlockOutputStream(OutputStream out, FilterEncoder[] filters, 31 Check check) throws IOException { 32 this.out = out; 33 this.check = check; 34 35 // Initialize the filter chain. 36 outCounted = new CountingOutputStream(out); 37 filterChain = outCounted; 38 for (int i = filters.length - 1; i >= 0; --i) 39 filterChain = filters[i].getOutputStream(filterChain); 40 41 // Prepare to encode the Block Header field. 42 ByteArrayOutputStream bufStream = new ByteArrayOutputStream(); 43 44 // Write a dummy Block Header Size field. The real value is written 45 // once everything else except CRC32 has been written. 46 bufStream.write(0x00); 47 48 // Write Block Flags. Storing Compressed Size or Uncompressed Size 49 // isn't supported for now. 50 bufStream.write(filters.length - 1); 51 52 // List of Filter Flags 53 for (int i = 0; i < filters.length; ++i) { 54 EncoderUtil.encodeVLI(bufStream, filters[i].getFilterID()); 55 byte[] filterProps = filters[i].getFilterProps(); 56 EncoderUtil.encodeVLI(bufStream, filterProps.length); 57 bufStream.write(filterProps); 58 } 59 60 // Header Padding 61 while ((bufStream.size() & 3) != 0) 62 bufStream.write(0x00); 63 64 byte[] buf = bufStream.toByteArray(); 65 66 // Total size of the Block Header: Take the size of the CRC32 field 67 // into account. 68 headerSize = buf.length + 4; 69 70 // This is just a sanity check. 71 if (headerSize > EncoderUtil.BLOCK_HEADER_SIZE_MAX) 72 throw new UnsupportedOptionsException(); 73 74 // Block Header Size 75 buf[0] = (byte)(buf.length / 4); 76 77 // Write the Block Header field to the output stream. 78 out.write(buf); 79 EncoderUtil.writeCRC32(out, buf); 80 81 // Calculate the maximum allowed size of the Compressed Data field. 82 // It is hard to exceed it so this is mostly to be pedantic. 83 compressedSizeLimit = (EncoderUtil.VLI_MAX & ~3) 84 - headerSize - check.getSize(); 85 } 86 87 public void write(int b) throws IOException { 88 tempBuf[0] = (byte)b; 89 write(tempBuf, 0, 1); 90 } 91 92 public void write(byte[] buf, int off, int len) throws IOException { 93 filterChain.write(buf, off, len); 94 check.update(buf, off, len); 95 uncompressedSize += len; 96 validate(); 97 } 98 99 public void flush() throws IOException { 100 filterChain.flush(); 101 validate(); 102 } 103 104 public void finish() throws IOException { 105 // Finish the Compressed Data field. 106 filterChain.finish(); 107 validate(); 108 109 // Block Padding 110 for (long i = outCounted.getSize(); (i & 3) != 0; ++i) 111 out.write(0x00); 112 113 // Check 114 out.write(check.finish()); 115 } 116 117 private void validate() throws IOException { 118 long compressedSize = outCounted.getSize(); 119 120 // It is very hard to trigger this exception. 121 // This is just to be pedantic. 122 if (compressedSize < 0 || compressedSize > compressedSizeLimit 123 || uncompressedSize < 0) 124 throw new XZIOException("XZ Stream has grown too big"); 125 } 126 127 public long getUnpaddedSize() { 128 return headerSize + outCounted.getSize() + check.getSize(); 129 } 130 131 public long getUncompressedSize() { 132 return uncompressedSize; 133 } 134 } 135