some lgtm issues

git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1904048 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
PJ Fanning 2022-09-13 23:38:12 +00:00
parent e694cf4d53
commit 34601b5377
7 changed files with 55 additions and 38 deletions

View File

@ -353,6 +353,10 @@ public class XSLFPictureShape extends XSLFSimpleShape
*/
public static XSLFPictureShape addSvgImage(XSLFSheet sheet, XSLFPictureData svgPic, PictureType previewType, Rectangle2D anchor) throws IOException {
if (svgPic == null || svgPic.getType() == null) {
throw new IllegalArgumentException("Cannot process svgPic with null type");
}
SVGImageRenderer renderer = new SVGImageRenderer();
try (InputStream is = svgPic.getInputStream()) {
renderer.loadImage(is, svgPic.getType().contentType);
@ -368,16 +372,17 @@ public class XSLFPictureShape extends XSLFSimpleShape
}
BufferedImage thmBI = renderer.getImage(dim);
UnsynchronizedByteArrayOutputStream bos = new UnsynchronizedByteArrayOutputStream(100000);
// use extension instead of enum name, because of "jpeg"
ImageIO.write(thmBI, pt.extension.substring(1), bos);
try (UnsynchronizedByteArrayOutputStream bos = new UnsynchronizedByteArrayOutputStream(100000)) {
// use extension instead of enum name, because of "jpeg"
ImageIO.write(thmBI, pt.extension.substring(1), bos);
XSLFPictureData pngPic = sheet.getSlideShow().addPicture(bos.toInputStream(), pt);
XSLFPictureData pngPic = sheet.getSlideShow().addPicture(bos.toInputStream(), pt);
XSLFPictureShape shape = sheet.createPicture(pngPic);
shape.setAnchor(anc);
shape.setSvgImage(svgPic);
return shape;
XSLFPictureShape shape = sheet.createPicture(pngPic);
shape.setAnchor(anc);
shape.setSvgImage(svgPic);
return shape;
}
}

View File

@ -1025,8 +1025,9 @@ public class SXSSFWorkbook implements Workbook {
//Substitute the template entries with the generated sheet data files
try (
InputStream is = bos.toInputStream();
ZipArchiveInputStream zis = new ZipArchiveInputStream(is);
ZipInputStreamZipEntrySource source = new ZipInputStreamZipEntrySource(
new ZipArchiveThresholdInputStream(new ZipArchiveInputStream(is)))
new ZipArchiveThresholdInputStream(zis))
) {
injectData(source, stream);
}

View File

@ -27,6 +27,7 @@ import java.awt.geom.AffineTransform;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.io.IOException;
import java.io.InputStream;
import java.util.AbstractMap;
import java.util.Arrays;
import java.util.Collections;
@ -387,17 +388,27 @@ public class HemfPlusBrush {
return brushBytes;
}
/**
* @param continuedObjectData list of object data
* @return {@link EmfPlusBrushData}
* @throws IllegalStateException if the data cannot be processed
*/
public EmfPlusBrushData getBrushData(List<? extends EmfPlusObjectData> continuedObjectData) {
EmfPlusBrushData brushData = brushType.constructor.get();
byte[] buf = getRawData(continuedObjectData);
try {
brushData.init(new LittleEndianInputStream(new UnsynchronizedByteArrayInputStream(buf)), buf.length);
try (UnsynchronizedByteArrayInputStream bis = new UnsynchronizedByteArrayInputStream(buf)){
brushData.init(new LittleEndianInputStream(bis), buf.length);
} catch (IOException e) {
throw new RuntimeException(e);
throw new IllegalStateException(e);
}
return brushData;
}
/**
* @param continuedObjectData list of object data
* @return byte array
* @throws IllegalStateException if the data cannot be processed
*/
public byte[] getRawData(List<? extends EmfPlusObjectData> continuedObjectData) {
try (UnsynchronizedByteArrayOutputStream bos = new UnsynchronizedByteArrayOutputStream()) {
bos.write(getBrushBytes());
@ -408,7 +419,7 @@ public class HemfPlusBrush {
}
return bos.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
throw new IllegalStateException(e);
}
}
@ -560,7 +571,7 @@ public class HemfPlusBrush {
}
if (isPreset() && (isBlendH() || isBlendV())) {
throw new RuntimeException("invalid combination of preset colors and blend factors v/h");
throw new IOException("invalid combination of preset colors and blend factors v/h");
}
size += (isPreset()) ? readColors(leis, d -> positions = d, c -> blendColors = c) : 0;
@ -764,7 +775,7 @@ public class HemfPlusBrush {
final boolean isPreset = PRESET_COLORS.isSet(dataFlags);
final boolean blendH = BLEND_FACTORS_H.isSet(dataFlags);
if (isPreset && blendH) {
throw new RuntimeException("invalid combination of preset colors and blend factors h");
throw new IOException("invalid combination of preset colors and blend factors h");
}
size += (isPreset) ? readColors(leis, d -> positions = d, c -> blendColors = c) : 0;
@ -777,7 +788,7 @@ public class HemfPlusBrush {
// A 32-bit unsigned integer that specifies the number of focus scales. This value MUST be 2.
int focusScaleCount = leis.readInt();
if (focusScaleCount != 2) {
throw new RuntimeException("invalid focus scale count");
throw new IOException("invalid focus scale count");
}
// A floating-point value that defines the horizontal/vertical focus scale.
// The focus scale MUST be a value between 0.0 and 1.0, exclusive.

View File

@ -252,7 +252,7 @@ public final class SlideShowDumper {
// Check for corrupt / lying ones
if (recordLen != 8 && (recordLen != (atomLen + 8))) {
out.printf(Locale.ROOT, ind + "** Atom length of $2d ($3d) doesn't match record length of %4d%n", "", atomLen, atomLen + 8, recordLen);
out.printf(Locale.ROOT, ind + "** Atom length of %2d (%3d) doesn't match record length of %4d%n", atomLen, atomLen + 8, recordLen);
}
// Print the record's details

View File

@ -190,11 +190,11 @@ public class HSSFColor implements Color {
Map<Integer,HSSFColor> result = new HashMap<>(eList.size() * 3 / 2);
for (Map.Entry<HSSFColorPredefined,HSSFColor> colorRef : eList.entrySet()) {
Integer index1 = (int)colorRef.getKey().getIndex();
Integer index1 = Integer.valueOf(colorRef.getKey().getIndex());
if (!result.containsKey(index1)) {
result.put(index1, colorRef.getValue());
}
Integer index2 = (int)colorRef.getKey().getIndex2();
Integer index2 = Integer.valueOf(colorRef.getKey().getIndex2());
if (index2 != -1 && !result.containsKey(index2)) {
result.put(index2, colorRef.getValue());
}

View File

@ -258,7 +258,7 @@ public final class CellUtil {
}
// Copy CellStyle
if (policy.isCopyCellStyle()) {
if (policy.isCopyCellStyle() && srcCell != null) {
if (srcCell.getSheet() != null && destCell.getSheet() != null &&
destCell.getSheet().getWorkbook() == srcCell.getSheet().getWorkbook()) {
destCell.setCellStyle(srcCell.getCellStyle());

View File

@ -136,26 +136,26 @@ public final class IOUtils {
checkByteSizeLimit(limit);
stream.mark(limit);
UnsynchronizedByteArrayOutputStream bos = new UnsynchronizedByteArrayOutputStream(limit);
copy(new BoundedInputStream(stream, limit), bos);
try (UnsynchronizedByteArrayOutputStream bos = new UnsynchronizedByteArrayOutputStream(limit)) {
copy(new BoundedInputStream(stream, limit), bos);
int readBytes = bos.size();
if (readBytes == 0) {
throw new EmptyFileException();
}
int readBytes = bos.size();
if (readBytes == 0) {
throw new EmptyFileException();
}
if (readBytes < limit) {
bos.write(new byte[limit-readBytes]);
if (readBytes < limit) {
bos.write(new byte[limit-readBytes]);
}
byte[] peekedBytes = bos.toByteArray();
if(stream instanceof PushbackInputStream) {
PushbackInputStream pin = (PushbackInputStream)stream;
pin.unread(peekedBytes, 0, readBytes);
} else {
stream.reset();
}
return peekedBytes;
}
byte[] peekedBytes = bos.toByteArray();
if(stream instanceof PushbackInputStream) {
PushbackInputStream pin = (PushbackInputStream)stream;
pin.unread(peekedBytes, 0, readBytes);
} else {
stream.reset();
}
return peekedBytes;
}
/**
@ -522,7 +522,7 @@ public final class IOUtils {
}
/*
* N.B. no need to synchronize this because: - we don't care if the buffer is created multiple times (the data
* is ignored) - we always use the same size buffer, so if it it is recreated it will still be OK (if the buffer
* is ignored) - we always use the same size buffer, so if it is recreated it will still be OK (if the buffer
* size were variable, we would need to synch. to ensure some other thread did not create a smaller one)
*/
if (SKIP_BYTE_BUFFER == null) {