I am trying decompress a large file of size about 1 GB and I cannot use the file output stream method. My final document requires byte array of the decompressed file to create an new file. For now I have manually been growing array size for each read. But this is too slow for large files. Is there any way I can get efficiency in this method.
if (primaryDocumentInputStream != null) {
byte[] tempbuffer = new byte[536870912];
byte[] mainbuffer = new byte[536870912];
int lenMainBuffer = 0;
try {
int aIntBuffer = aGZIPInputStream.read(tempbuffer);
while (aIntBuffer > 0) {
byte[] copyBuffer = new byte[lenMainBuffer + aIntBuffer];
System.arraycopy(mainbuffer, 0, copyBuffer, 0, lenMainBuffer);
System.arraycopy(tempbuffer, 0, copyBuffer, lenMainBuffer, aIntBuffer);
mainbuffer = copyBuffer;
aIntBuffer = aGZIPInputStream.read(tempbuffer);
lenMainBuffer = mainbuffer.length;
}
primaryDocumentOutputDocument.setBody(mainbuffer);
wfc.putPrimaryDocument(primaryDocumentOutputDocument);
}