refactor some stream code

git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1898213 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
PJ Fanning 2022-02-19 12:08:48 +00:00
parent 4998436a00
commit 788dd3f03c
14 changed files with 158 additions and 148 deletions

View File

@ -16,12 +16,12 @@
==================================================================== */
package org.apache.poi.stress;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.poi.openxml4j.opc.OPCPackage;
import org.apache.poi.openxml4j.opc.PackageAccess;
import org.apache.poi.util.IOUtils;
@ -41,11 +41,11 @@ public class XSSFBFileHandler extends AbstractFileHandler {
public void handleFile(InputStream stream, String path) throws Exception {
byte[] bytes = IOUtils.toByteArray(stream);
try (OPCPackage opcPackage = OPCPackage.open(new ByteArrayInputStream(bytes))) {
try (OPCPackage opcPackage = OPCPackage.open(new UnsynchronizedByteArrayInputStream(bytes))) {
testOne(opcPackage);
}
testNotHandledByWorkbookException(OPCPackage.open(new ByteArrayInputStream(bytes)));
testNotHandledByWorkbookException(OPCPackage.open(new UnsynchronizedByteArrayInputStream(bytes)));
}
private void testNotHandledByWorkbookException(OPCPackage pkg) throws IOException {

View File

@ -17,7 +17,6 @@
package org.apache.poi.xssf.usermodel;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.EnumMap;
@ -29,6 +28,7 @@ import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.commons.io.output.StringBuilderWriter;
import org.apache.poi.ooxml.util.DocumentHelper;
import org.apache.poi.ss.usermodel.DifferentialStyleProvider;
@ -407,7 +407,10 @@ public enum XSSFBuiltinTableStyle {
// hack because I can't figure out how to get XMLBeans to parse a sub-element in a standalone manner
// - build a fake styles.xml file with just this built-in
StylesTable styles = new StylesTable();
styles.readFrom(new ByteArrayInputStream(styleXML(dxfsNode, tableStyleNode).getBytes(StandardCharsets.UTF_8)));
try (UnsynchronizedByteArrayInputStream bis = new UnsynchronizedByteArrayInputStream(
styleXML(dxfsNode, tableStyleNode).getBytes(StandardCharsets.UTF_8))) {
styles.readFrom(bis);
}
styleMap.put(builtIn, new XSSFBuiltinTypeStyleStyle(builtIn, styles.getExplicitTableStyle(styleName)));
}
} catch (Exception e) {

View File

@ -26,7 +26,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Calendar;
import java.util.Date;

View File

@ -24,7 +24,6 @@
package org.apache.poi.poifs.crypt.dsig;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
@ -53,6 +52,7 @@ import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.poi.poifs.crypt.CryptoFunctions;
import org.apache.poi.poifs.storage.RawDataUtil;
import org.apache.poi.util.LocaleUtil;
@ -139,7 +139,7 @@ public class DummyKeystore {
public DummyKeystore(String pfxInput, String storePass) throws GeneralSecurityException, IOException {
CryptoFunctions.registerBouncyCastle();
keystore = KeyStore.getInstance("PKCS12");
try (InputStream fis = new ByteArrayInputStream(RawDataUtil.decompress(pfxInput))) {
try (InputStream fis = new UnsynchronizedByteArrayInputStream(RawDataUtil.decompress(pfxInput))) {
keystore.load(fis, storePass.toCharArray());
}
}

View File

@ -16,7 +16,6 @@
==================================================================== */
package org.apache.poi.extractor.ole2;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
@ -24,6 +23,7 @@ import java.io.InputStream;
import java.util.List;
import java.util.stream.StreamSupport;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.poi.extractor.ExtractorFactory;
@ -185,7 +185,7 @@ public class OLE2ScratchpadExtractorFactory implements ExtractorProvider {
for (AttachmentChunks attachment : msg.getAttachmentFiles()) {
if (attachment.getAttachData() != null) {
byte[] data = attachment.getAttachData().getValue();
nonPOIFS.add( new ByteArrayInputStream(data) );
nonPOIFS.add( new UnsynchronizedByteArrayInputStream(data) );
} else if (attachment.getAttachmentDirectory() != null) {
dirs.add(attachment.getAttachmentDirectory().getDirectory());
}

View File

@ -17,9 +17,9 @@
package org.apache.poi.hdgf.streams;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.poi.hdgf.HDGFLZW;
import org.apache.poi.util.IOUtils;
@ -92,25 +92,26 @@ public final class CompressedStreamStore extends StreamStore {
* Decompresses the given data, returning it as header + contents
*/
public static byte[][] decompress(byte[] data, int offset, int length) throws IOException {
ByteArrayInputStream bais = new ByteArrayInputStream(data, offset, length);
try (UnsynchronizedByteArrayInputStream bais = new UnsynchronizedByteArrayInputStream(data, offset, length)) {
// Decompress
HDGFLZW lzw = new HDGFLZW();
byte[] decompressed = lzw.decompress(bais);
// Decompress
HDGFLZW lzw = new HDGFLZW();
byte[] decompressed = lzw.decompress(bais);
if (decompressed.length < 4) {
throw new IllegalArgumentException("Could not read enough data to decompress: " + decompressed.length);
}
if (decompressed.length < 4) {
throw new IllegalArgumentException("Could not read enough data to decompress: " + decompressed.length);
// Split into header and contents
byte[][] ret = new byte[2][];
ret[0] = new byte[4];
ret[1] = new byte[decompressed.length - 4];
System.arraycopy(decompressed, 0, ret[0], 0, 4);
System.arraycopy(decompressed, 4, ret[1], 0, ret[1].length);
// All done
return ret;
}
// Split into header and contents
byte[][] ret = new byte[2][];
ret[0] = new byte[4];
ret[1] = new byte[decompressed.length - 4];
System.arraycopy(decompressed, 0, ret[0], 0, 4);
System.arraycopy(decompressed, 4, ret[1], 0, ret[1].length);
// All done
return ret;
}
}

View File

@ -25,11 +25,11 @@ import java.awt.RenderingHints;
import java.awt.geom.Dimension2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.poi.common.usermodel.GenericRecord;
import org.apache.poi.hemf.usermodel.HemfPicture;
import org.apache.poi.hwmf.draw.HwmfGraphicsState;
@ -65,7 +65,7 @@ public class HemfImageRenderer implements ImageRenderer, EmbeddedExtractor {
if (!PictureData.PictureType.EMF.contentType.equals(contentType)) {
throw new IOException("Invalid picture type");
}
image = new HemfPicture(new ByteArrayInputStream(data));
image = new HemfPicture(new UnsynchronizedByteArrayInputStream(data));
}
@Override

View File

@ -26,7 +26,6 @@ import java.awt.Color;
import java.awt.geom.AffineTransform;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.AbstractMap;
import java.util.Arrays;
@ -40,6 +39,7 @@ import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
import org.apache.poi.common.usermodel.GenericRecord;
import org.apache.poi.hemf.draw.HemfDrawProperties;
@ -391,7 +391,7 @@ public class HemfPlusBrush {
EmfPlusBrushData brushData = brushType.constructor.get();
byte[] buf = getRawData(continuedObjectData);
try {
brushData.init(new LittleEndianInputStream(new ByteArrayInputStream(buf)), buf.length);
brushData.init(new LittleEndianInputStream(new UnsynchronizedByteArrayInputStream(buf)), buf.length);
} catch (IOException e) {
throw new RuntimeException(e);
}

View File

@ -22,7 +22,6 @@ import static org.apache.poi.hemf.record.emfplus.HemfPlusDraw.readARGB;
import java.awt.Color;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
@ -32,6 +31,7 @@ import java.util.function.Supplier;
import javax.imageio.ImageIO;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
import org.apache.poi.hemf.draw.HemfDrawProperties;
import org.apache.poi.hemf.draw.HemfGraphics;
@ -419,21 +419,24 @@ public class HemfPlusImage {
if (getBitmapType() == EmfPlusBitmapDataType.PIXEL) {
return new Rectangle2D.Double(0, 0, bitmapWidth, bitmapHeight);
} else {
BufferedImage bi = ImageIO.read(new ByteArrayInputStream(getRawData(continuedObjectData)));
return new Rectangle2D.Double(bi.getMinX(), bi.getMinY(), bi.getWidth(), bi.getHeight());
try(UnsynchronizedByteArrayInputStream is = new UnsynchronizedByteArrayInputStream(getRawData(continuedObjectData))) {
BufferedImage bi = ImageIO.read(is);
return new Rectangle2D.Double(bi.getMinX(), bi.getMinY(), bi.getWidth(), bi.getHeight());
}
}
case METAFILE:
ByteArrayInputStream bis = new ByteArrayInputStream(getRawData(continuedObjectData));
switch (getMetafileType()) {
case Wmf:
case WmfPlaceable:
HwmfPicture wmf = new HwmfPicture(bis);
return wmf.getBounds();
case Emf:
case EmfPlusDual:
case EmfPlusOnly:
HemfPicture emf = new HemfPicture(bis);
return emf.getBounds();
try(UnsynchronizedByteArrayInputStream bis = new UnsynchronizedByteArrayInputStream(getRawData(continuedObjectData))) {
switch (getMetafileType()) {
case Wmf:
case WmfPlaceable:
HwmfPicture wmf = new HwmfPicture(bis);
return wmf.getBounds();
case Emf:
case EmfPlusDual:
case EmfPlusOnly:
HemfPicture emf = new HemfPicture(bis);
return emf.getBounds();
}
}
break;
default:

View File

@ -17,13 +17,13 @@
package org.apache.poi.hmef.attribute;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.poi.hmef.Attachment;
import org.apache.poi.hmef.HMEFMessage;
import org.apache.poi.hsmf.datatypes.MAPIProperty;
@ -115,107 +115,108 @@ public class MAPIAttribute {
"instead received a " + parent.getProperty() + " one"
);
}
ByteArrayInputStream inp = new ByteArrayInputStream(parent.getData());
try(UnsynchronizedByteArrayInputStream inp = new UnsynchronizedByteArrayInputStream(parent.getData())) {
// First up, get the number of attributes
int count = LittleEndian.readInt(inp);
List<MAPIAttribute> attrs = new ArrayList<>();
// First up, get the number of attributes
int count = LittleEndian.readInt(inp);
List<MAPIAttribute> attrs = new ArrayList<>();
// Now, read each one in in turn
for(int i=0; i<count; i++) {
int typeAndMV = LittleEndian.readUShort(inp);
int id = LittleEndian.readUShort(inp);
// Now, read each one in in turn
for(int i=0; i<count; i++) {
int typeAndMV = LittleEndian.readUShort(inp);
int id = LittleEndian.readUShort(inp);
// Is it either Multi-Valued or Variable-Length?
boolean isMV = false;
boolean isVL = false;
int typeId = typeAndMV;
if( (typeAndMV & Types.MULTIVALUED_FLAG) != 0 ) {
isMV = true;
typeId -= Types.MULTIVALUED_FLAG;
}
if(typeId == Types.ASCII_STRING.getId() || typeId == Types.UNICODE_STRING.getId() ||
typeId == Types.BINARY.getId() || typeId == Types.DIRECTORY.getId()) {
isVL = true;
}
// Turn the type ID into a strongly typed thing
MAPIType type = Types.getById(typeId);
if (type == null) {
type = Types.createCustom(typeId);
}
// If it's a named property, rather than a standard
// MAPI property, grab the details of it
MAPIProperty prop = MAPIProperty.get(id);
if(id >= 0x8000 && id <= 0xFFFF) {
byte[] guid = new byte[16];
if (IOUtils.readFully(inp, guid) < 0) {
throw new IOException("Not enough data to read guid");
// Is it either Multi-Valued or Variable-Length?
boolean isMV = false;
boolean isVL = false;
int typeId = typeAndMV;
if( (typeAndMV & Types.MULTIVALUED_FLAG) != 0 ) {
isMV = true;
typeId -= Types.MULTIVALUED_FLAG;
}
if(typeId == Types.ASCII_STRING.getId() || typeId == Types.UNICODE_STRING.getId() ||
typeId == Types.BINARY.getId() || typeId == Types.DIRECTORY.getId()) {
isVL = true;
}
int mptype = LittleEndian.readInt(inp);
// Get the name of it
String name;
if(mptype == 0) {
// It's based on a normal one
int mpid = LittleEndian.readInt(inp);
MAPIProperty base = MAPIProperty.get(mpid);
name = base.name;
} else {
// Custom name was stored
int mplen = LittleEndian.readInt(inp);
byte[] mpdata = IOUtils.safelyAllocate(mplen, MAX_RECORD_LENGTH);
if (IOUtils.readFully(inp, mpdata) < 0) {
throw new IOException("Not enough data to read " + mplen + " bytes for attribute name");
// Turn the type ID into a strongly typed thing
MAPIType type = Types.getById(typeId);
if (type == null) {
type = Types.createCustom(typeId);
}
// If it's a named property, rather than a standard
// MAPI property, grab the details of it
MAPIProperty prop = MAPIProperty.get(id);
if(id >= 0x8000 && id <= 0xFFFF) {
byte[] guid = new byte[16];
if (IOUtils.readFully(inp, guid) < 0) {
throw new IOException("Not enough data to read guid");
}
name = StringUtil.getFromUnicodeLE(mpdata, 0, (mplen/2)-1);
skipToBoundary(mplen, inp);
int mptype = LittleEndian.readInt(inp);
// Get the name of it
String name;
if(mptype == 0) {
// It's based on a normal one
int mpid = LittleEndian.readInt(inp);
MAPIProperty base = MAPIProperty.get(mpid);
name = base.name;
} else {
// Custom name was stored
int mplen = LittleEndian.readInt(inp);
byte[] mpdata = IOUtils.safelyAllocate(mplen, MAX_RECORD_LENGTH);
if (IOUtils.readFully(inp, mpdata) < 0) {
throw new IOException("Not enough data to read " + mplen + " bytes for attribute name");
}
name = StringUtil.getFromUnicodeLE(mpdata, 0, (mplen/2)-1);
skipToBoundary(mplen, inp);
}
// Now create
prop = MAPIProperty.createCustom(id, type, name);
}
if(prop == MAPIProperty.UNKNOWN) {
prop = MAPIProperty.createCustom(id, type, "(unknown " + Integer.toHexString(id) + ")");
}
// Now create
prop = MAPIProperty.createCustom(id, type, name);
}
if(prop == MAPIProperty.UNKNOWN) {
prop = MAPIProperty.createCustom(id, type, "(unknown " + Integer.toHexString(id) + ")");
}
// Now read in the value(s)
int values = 1;
if(isMV || isVL) {
values = LittleEndian.readInt(inp);
}
if (type == Types.NULL && values > 1) {
throw new IOException("Placeholder/NULL arrays aren't supported.");
}
for(int j=0; j<values; j++) {
int len = getLength(type, inp);
byte[] data = IOUtils.safelyAllocate(len, MAX_RECORD_LENGTH);
if (IOUtils.readFully(inp, data) < 0) {
throw new IOException("Not enough data to read " + len + " bytes of attribute value");
// Now read in the value(s)
int values = 1;
if(isMV || isVL) {
values = LittleEndian.readInt(inp);
}
skipToBoundary(len, inp);
// Create
MAPIAttribute attr;
if(type == Types.UNICODE_STRING || type == Types.ASCII_STRING) {
attr = new MAPIStringAttribute(prop, typeId, data);
} else if(type == Types.APP_TIME || type == Types.TIME) {
attr = new MAPIDateAttribute(prop, typeId, data);
} else if(id == MAPIProperty.RTF_COMPRESSED.id) {
attr = new MAPIRtfAttribute(prop, typeId, data);
} else {
attr = new MAPIAttribute(prop, typeId, data);
if (type == Types.NULL && values > 1) {
throw new IOException("Placeholder/NULL arrays aren't supported.");
}
for(int j=0; j<values; j++) {
int len = getLength(type, inp);
byte[] data = IOUtils.safelyAllocate(len, MAX_RECORD_LENGTH);
if (IOUtils.readFully(inp, data) < 0) {
throw new IOException("Not enough data to read " + len + " bytes of attribute value");
}
skipToBoundary(len, inp);
// Create
MAPIAttribute attr;
if(type == Types.UNICODE_STRING || type == Types.ASCII_STRING) {
attr = new MAPIStringAttribute(prop, typeId, data);
} else if(type == Types.APP_TIME || type == Types.TIME) {
attr = new MAPIDateAttribute(prop, typeId, data);
} else if(id == MAPIProperty.RTF_COMPRESSED.id) {
attr = new MAPIRtfAttribute(prop, typeId, data);
} else {
attr = new MAPIAttribute(prop, typeId, data);
}
attrs.add(attr);
}
attrs.add(attr);
}
// All done
return attrs;
}
// All done
return attrs;
}
private static int getLength(MAPIType type, InputStream inp) throws IOException {
if (type.isFixedLength()) {
return type.getLength();

View File

@ -17,9 +17,10 @@
package org.apache.poi.hmef.attribute;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.poi.hmef.Attachment;
import org.apache.poi.hmef.CompressedRTF;
import org.apache.poi.hmef.HMEFMessage;
@ -44,7 +45,11 @@ public final class MAPIRtfAttribute extends MAPIAttribute {
// Decompress it, removing any trailing padding as needed
CompressedRTF rtf = new CompressedRTF();
byte[] tmp = rtf.decompress(new ByteArrayInputStream(data));
byte[] tmp;
try (InputStream is = new UnsynchronizedByteArrayInputStream(data)) {
tmp = rtf.decompress(is);
}
if(tmp.length > rtf.getDeCompressedSize()) {
this.decompressed = IOUtils.safelyClone(tmp, 0, rtf.getDeCompressedSize(), MAX_RECORD_LENGTH);
} else {

View File

@ -17,13 +17,13 @@
package org.apache.poi.hwpf.usermodel;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
import java.util.List;
import java.util.zip.InflaterInputStream;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -140,8 +140,8 @@ public final class Picture {
*/
if ( matchSignature( rawContent, COMPRESSED1, 32 )
|| matchSignature( rawContent, COMPRESSED2, 32 ) ) {
try (ByteArrayInputStream bis = new ByteArrayInputStream( rawContent, 33, rawContent.length - 33 );
InflaterInputStream in = new InflaterInputStream(bis);
try (UnsynchronizedByteArrayInputStream bis = new UnsynchronizedByteArrayInputStream( rawContent, 33, rawContent.length - 33 );
InflaterInputStream in = new InflaterInputStream(bis);
UnsynchronizedByteArrayOutputStream out = new UnsynchronizedByteArrayOutputStream()) {
IOUtils.copy(in, out);

View File

@ -16,11 +16,11 @@
==================================================================== */
package org.apache.poi.poifs.storage;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Base64;
import java.util.zip.GZIPInputStream;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
import org.apache.poi.util.HexRead;
import org.apache.poi.util.IOUtils;
@ -53,7 +53,7 @@ public final class RawDataUtil {
*/
public static byte[] decompress(String data) throws IOException {
byte[] base64Bytes = Base64.getDecoder().decode(data);
return IOUtils.toByteArray(new GZIPInputStream(new ByteArrayInputStream(base64Bytes)));
return IOUtils.toByteArray(new GZIPInputStream(new UnsynchronizedByteArrayInputStream(base64Bytes)));
}
/**

View File

@ -17,7 +17,6 @@
package org.apache.poi.ss.util;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileOutputStream;
@ -27,6 +26,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFCellStyle;
@ -212,14 +212,12 @@ public class NumberRenderingSpreadsheetGenerator {
}
private static String interpretLong(byte[] fileContent, int offset) {
InputStream is = new ByteArrayInputStream(fileContent, offset, 8);
long l;
try {
l = new DataInputStream(is).readLong();
try (InputStream is = new UnsynchronizedByteArrayInputStream(fileContent, offset, 8)) {
long l = new DataInputStream(is).readLong();
return "0x" + Long.toHexString(l).toUpperCase(Locale.ROOT);
} catch (IOException e) {
throw new RuntimeException(e);
throw new IllegalStateException("Problem in interpretLong", e);
}
return "0x" + Long.toHexString(l).toUpperCase(Locale.ROOT);
}
private static boolean isNaNBytes(byte[] fileContent, int offset) {