mirror of https://github.com/apache/poi.git
Sonar fixes
git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1876157 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
af890fba13
commit
074d68fedf
|
@ -16,14 +16,16 @@
|
|||
==================================================================== */
|
||||
package org.apache.poi.xssf.usermodel;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.poi.ooxml.POIXMLException;
|
||||
import org.apache.poi.util.Internal;
|
||||
import org.apache.poi.ss.usermodel.Font;
|
||||
import org.apache.poi.ss.usermodel.FontCharset;
|
||||
import org.apache.poi.ss.usermodel.FontFamily;
|
||||
import org.apache.poi.ss.usermodel.FontScheme;
|
||||
import org.apache.poi.ss.usermodel.FontUnderline;
|
||||
import org.apache.poi.ss.usermodel.IndexedColors;
|
||||
import org.apache.poi.util.Internal;
|
||||
import org.apache.poi.xssf.model.StylesTable;
|
||||
import org.apache.poi.xssf.model.ThemesTable;
|
||||
import org.openxmlformats.schemas.spreadsheetml.x2006.main.CTBooleanProperty;
|
||||
|
@ -39,8 +41,6 @@ import org.openxmlformats.schemas.spreadsheetml.x2006.main.STFontScheme;
|
|||
import org.openxmlformats.schemas.spreadsheetml.x2006.main.STUnderlineValues;
|
||||
import org.openxmlformats.schemas.spreadsheetml.x2006.main.STVerticalAlignRun;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Represents a font used in a workbook.
|
||||
*
|
||||
|
@ -649,7 +649,6 @@ public class XSSFFont implements Font {
|
|||
&& Objects.equals(this.getBold(), cf.getBold())
|
||||
&& Objects.equals(this.getStrikeout(), cf.getStrikeout())
|
||||
&& Objects.equals(this.getCharSet(), cf.getCharSet())
|
||||
&& Objects.equals(this.getItalic(), cf.getItalic())
|
||||
&& Objects.equals(this.getColor(), cf.getColor())
|
||||
&& Objects.equals(this.getFamily(), cf.getFamily())
|
||||
&& Objects.equals(this.getFontHeight(), cf.getFontHeight())
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.poi.ss.usermodel.Color;
|
|||
import org.apache.poi.ss.usermodel.Font;
|
||||
import org.apache.poi.ss.usermodel.FontFormatting;
|
||||
import org.apache.poi.ss.usermodel.FontUnderline;
|
||||
import org.openxmlformats.schemas.spreadsheetml.x2006.main.CTBooleanProperty;
|
||||
import org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColor;
|
||||
import org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFont;
|
||||
import org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFontSize;
|
||||
|
@ -84,8 +83,7 @@ public class XSSFFontFormatting implements FontFormatting {
|
|||
*/
|
||||
@Override
|
||||
public boolean isStruckout() {
|
||||
for (CTBooleanProperty bProp : _font.getStrikeArray()) return bProp.getVal();
|
||||
return false;
|
||||
return _font.sizeOfStrikeArray() > 0 && _font.getStrikeArray(0).getVal();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1480,7 +1480,7 @@ public class XWPFParagraph implements IBodyElement, IRunBody, ISDTContents, Para
|
|||
|
||||
/**
|
||||
* Appends a new field run to this paragraph
|
||||
*
|
||||
*
|
||||
* @return a new field run
|
||||
*/
|
||||
public XWPFFieldRun createFieldRun() {
|
||||
|
@ -1537,10 +1537,13 @@ public class XWPFParagraph implements IBodyElement, IRunBody, ISDTContents, Para
|
|||
return new XWPFHyperlinkRun(ctHyperLink, ctHyperLink.addNewR(), this);
|
||||
});
|
||||
|
||||
String rId = getPart().getPackagePart().addExternalRelationship(
|
||||
uri, XWPFRelation.HYPERLINK.getRelation()
|
||||
).getId();
|
||||
newRun.getCTHyperlink().setId(rId);
|
||||
if (newRun != null) {
|
||||
String rId = getPart().getPackagePart().addExternalRelationship(
|
||||
uri, XWPFRelation.HYPERLINK.getRelation()
|
||||
).getId();
|
||||
newRun.getCTHyperlink().setId(rId);
|
||||
}
|
||||
|
||||
return newRun;
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ public final class Chunk {
|
|||
/** May be null */
|
||||
private ChunkSeparator separator;
|
||||
/** The possible different commands we can hold */
|
||||
protected CommandDefinition[] commandDefinitions;
|
||||
private CommandDefinition[] commandDefinitions;
|
||||
/** The command+value pairs we hold */
|
||||
private Command[] commands;
|
||||
/* The blocks (if any) we hold */
|
||||
|
@ -89,10 +89,15 @@ public final class Chunk {
|
|||
*
|
||||
* @return the command definitions
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
public CommandDefinition[] getCommandDefinitions() {
|
||||
return commandDefinitions;
|
||||
}
|
||||
|
||||
void setCommandDefinitions(CommandDefinition[] commandDefinitions) {
|
||||
this.commandDefinitions = commandDefinitions;
|
||||
}
|
||||
|
||||
public Command[] getCommands() {
|
||||
return commands;
|
||||
}
|
||||
|
@ -128,7 +133,7 @@ public final class Chunk {
|
|||
* our chunk type has, and figure out the
|
||||
* values for them.
|
||||
*/
|
||||
protected void processCommands() {
|
||||
void processCommands() {
|
||||
if(commandDefinitions == null) {
|
||||
throw new IllegalStateException("You must supply the command definitions before calling processCommands!");
|
||||
}
|
||||
|
@ -162,8 +167,8 @@ public final class Chunk {
|
|||
switch(type) {
|
||||
case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7:
|
||||
case 11: case 21:
|
||||
case 12: case 16: case 17: case 18: case 28: case 29:
|
||||
// Offset is from start of chunk
|
||||
case 12: case 16: case 17: case 28: case 29:
|
||||
// Offset is from start of chunk (case 18 has been taken care of above)
|
||||
break;
|
||||
default:
|
||||
// Offset is from start of header!
|
||||
|
@ -183,16 +188,15 @@ public final class Chunk {
|
|||
try {
|
||||
// Process
|
||||
switch(type) {
|
||||
// Types 0->7 = a flat at bit 0->7
|
||||
// Types 0->7 = a flag at bit 0->7
|
||||
case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7:
|
||||
int val = contents[offset] & (1<<type);
|
||||
command.value = Boolean.valueOf(val > 0);
|
||||
command.value = ((contents[offset] >>> type) & 1) == 1;
|
||||
break;
|
||||
case 8:
|
||||
command.value = Byte.valueOf(contents[offset]);
|
||||
command.value = contents[offset];
|
||||
break;
|
||||
case 9:
|
||||
command.value = Double.valueOf(LittleEndian.getDouble(contents, offset));
|
||||
command.value = LittleEndian.getDouble(contents, offset);
|
||||
break;
|
||||
case 12:
|
||||
// A Little Endian String
|
||||
|
@ -221,14 +225,10 @@ public final class Chunk {
|
|||
command.value = new String(contents, startsAt, strLen, header.getChunkCharset().name());
|
||||
break;
|
||||
case 25:
|
||||
command.value = Short.valueOf(
|
||||
LittleEndian.getShort(contents, offset)
|
||||
);
|
||||
command.value = LittleEndian.getShort(contents, offset);
|
||||
break;
|
||||
case 26:
|
||||
command.value = Integer.valueOf(
|
||||
LittleEndian.getInt(contents, offset)
|
||||
);
|
||||
command.value = LittleEndian.getInt(contents, offset);
|
||||
break;
|
||||
|
||||
// Types 11 and 21 hold the offset to the blocks
|
||||
|
@ -297,12 +297,12 @@ public final class Chunk {
|
|||
* A special kind of command that holds the offset to
|
||||
* a block
|
||||
*/
|
||||
private static class BlockOffsetCommand extends Command {
|
||||
private static final class BlockOffsetCommand extends Command {
|
||||
private BlockOffsetCommand(CommandDefinition definition) {
|
||||
super(definition, null);
|
||||
}
|
||||
private void setOffset(int offset) {
|
||||
value = Integer.valueOf(offset);
|
||||
value = offset;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,19 +82,19 @@ public final class ChunkFactory {
|
|||
}
|
||||
|
||||
inp = new BufferedReader(new InputStreamReader(cpd, LocaleUtil.CHARSET_1252));
|
||||
|
||||
|
||||
while( (line = inp.readLine()) != null ) {
|
||||
if (line.isEmpty() || "# \t".contains(line.substring(0,1))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// Start xxx
|
||||
if(!line.matches("^start [0-9]+$")) {
|
||||
throw new IllegalStateException("Expecting start xxx, found " + line);
|
||||
}
|
||||
int chunkType = Integer.parseInt(line.substring(6));
|
||||
ArrayList<CommandDefinition> defsL = new ArrayList<>();
|
||||
|
||||
|
||||
// Data entries
|
||||
while( (line = inp.readLine()) != null ) {
|
||||
if (line.startsWith("end")) {
|
||||
|
@ -104,15 +104,15 @@ public final class ChunkFactory {
|
|||
int defType = Integer.parseInt(st.nextToken());
|
||||
int offset = Integer.parseInt(st.nextToken());
|
||||
String name = st.nextToken("\uffff").substring(1);
|
||||
|
||||
|
||||
CommandDefinition def = new CommandDefinition(defType,offset,name);
|
||||
defsL.add(def);
|
||||
}
|
||||
|
||||
|
||||
CommandDefinition[] defs = defsL.toArray(new CommandDefinition[0]);
|
||||
|
||||
|
||||
// Add to the map
|
||||
chunkCommandDefinitions.put(Integer.valueOf(chunkType), defs);
|
||||
chunkCommandDefinitions.put(chunkType, defs);
|
||||
}
|
||||
} finally {
|
||||
if (inp != null) {
|
||||
|
@ -193,11 +193,11 @@ public final class ChunkFactory {
|
|||
Chunk chunk = new Chunk(header, trailer, separator, contents);
|
||||
|
||||
// Feed in the stuff from chunks_parse_cmds.tbl
|
||||
CommandDefinition[] defs = chunkCommandDefinitions.get(Integer.valueOf(header.getType()));
|
||||
CommandDefinition[] defs = chunkCommandDefinitions.get(header.getType());
|
||||
if (defs == null) {
|
||||
defs = new CommandDefinition[0];
|
||||
}
|
||||
chunk.commandDefinitions = defs;
|
||||
chunk.setCommandDefinitions(defs);
|
||||
|
||||
// Now get the chunk to process its commands
|
||||
chunk.processCommands();
|
||||
|
|
|
@ -894,7 +894,7 @@ public final class HemfPlusDraw {
|
|||
return LittleEndianConsts.BYTE_SIZE;
|
||||
}
|
||||
// ok we've read a EmfPlusInteger15
|
||||
value[0] = ((value[0] << 8) | leis.readByte()) & 0x7FFF;
|
||||
value[0] = ((value[0] << 8) | (leis.readByte() & 0xFF)) & 0x7FFF;
|
||||
return LittleEndianConsts.SHORT_SIZE;
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@ import java.util.ArrayList;
|
|||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.apache.poi.hslf.util.MutableByteArrayOutputStream;
|
||||
import org.apache.poi.util.ArrayUtil;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
import org.apache.poi.util.Removal;
|
||||
|
@ -101,7 +100,7 @@ public abstract class RecordContainer extends Record
|
|||
}
|
||||
|
||||
/**
|
||||
* Moves {@code number} child records from {@code oldLoc} to {@code newLoc}.
|
||||
* Moves {@code number} child records from {@code oldLoc} to {@code newLoc}.
|
||||
* @param oldLoc the current location of the records to move
|
||||
* @param newLoc the new location for the records
|
||||
* @param number the number of records to move
|
||||
|
@ -161,9 +160,9 @@ public abstract class RecordContainer extends Record
|
|||
|
||||
/**
|
||||
* Add a new child record onto a record's list of children.
|
||||
*
|
||||
*
|
||||
* @param newChild the child record to be added
|
||||
* @return the position of the added child within the list, i.e. the last index
|
||||
* @return the position of the added child within the list, i.e. the last index
|
||||
*/
|
||||
public int appendChildRecord(Record newChild) {
|
||||
return appendChild(newChild);
|
||||
|
@ -207,7 +206,7 @@ public abstract class RecordContainer extends Record
|
|||
|
||||
/**
|
||||
* Moves the given Child Record to before the supplied record
|
||||
*
|
||||
*
|
||||
* @deprecated method is not used within POI and will be removed
|
||||
*/
|
||||
@Removal(version="3.19")
|
||||
|
@ -218,7 +217,7 @@ public abstract class RecordContainer extends Record
|
|||
|
||||
/**
|
||||
* Moves the given Child Records to before the supplied record
|
||||
*
|
||||
*
|
||||
* @deprecated method is not used within POI and will be removed
|
||||
*/
|
||||
@Removal(version="3.19")
|
||||
|
@ -244,11 +243,11 @@ public abstract class RecordContainer extends Record
|
|||
|
||||
/**
|
||||
* Moves the given Child Records to after the supplied record
|
||||
*
|
||||
*
|
||||
* @param firstChild the first child to be moved
|
||||
* @param number the number of records to move
|
||||
* @param after the record after that the children are moved
|
||||
*
|
||||
*
|
||||
* @deprecated method is not used within POI and will be removed
|
||||
*/
|
||||
@Removal(version="3.19")
|
||||
|
@ -296,63 +295,31 @@ public abstract class RecordContainer extends Record
|
|||
* @param out the stream to write to
|
||||
*/
|
||||
public void writeOut(byte headerA, byte headerB, long type, Record[] children, OutputStream out) throws IOException {
|
||||
// If we have a mutable output stream, take advantage of that
|
||||
if(out instanceof MutableByteArrayOutputStream) {
|
||||
MutableByteArrayOutputStream mout =
|
||||
(MutableByteArrayOutputStream)out;
|
||||
// Create a ByteArrayOutputStream to hold everything in
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
|
||||
// Grab current size
|
||||
int oldSize = mout.getBytesWritten();
|
||||
// Write out our header, less the size
|
||||
baos.write(new byte[] {headerA,headerB});
|
||||
byte[] typeB = new byte[2];
|
||||
LittleEndian.putShort(typeB,0,(short)type);
|
||||
baos.write(typeB);
|
||||
baos.write(new byte[] {0,0,0,0});
|
||||
|
||||
// Write out our header, less the size
|
||||
mout.write(new byte[] {headerA,headerB});
|
||||
byte[] typeB = new byte[2];
|
||||
LittleEndian.putShort(typeB, 0, (short)type);
|
||||
mout.write(typeB);
|
||||
mout.write(new byte[4]);
|
||||
|
||||
// Write out the children
|
||||
for (Record aChildren : children) {
|
||||
aChildren.writeOut(mout);
|
||||
}
|
||||
|
||||
// Update our header with the size
|
||||
// Don't forget to knock 8 more off, since we don't include the
|
||||
// header in the size
|
||||
int length = mout.getBytesWritten() - oldSize - 8;
|
||||
byte[] size = new byte[4];
|
||||
LittleEndian.putInt(size,0,length);
|
||||
mout.overwrite(size, oldSize+4);
|
||||
} else {
|
||||
// Going to have to do it a slower way, because we have
|
||||
// to update the length come the end
|
||||
|
||||
// Create a ByteArrayOutputStream to hold everything in
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
|
||||
// Write out our header, less the size
|
||||
baos.write(new byte[] {headerA,headerB});
|
||||
byte[] typeB = new byte[2];
|
||||
LittleEndian.putShort(typeB,0,(short)type);
|
||||
baos.write(typeB);
|
||||
baos.write(new byte[] {0,0,0,0});
|
||||
|
||||
// Write out our children
|
||||
for (Record aChildren : children) {
|
||||
aChildren.writeOut(baos);
|
||||
}
|
||||
|
||||
// Grab the bytes back
|
||||
byte[] toWrite = baos.toByteArray();
|
||||
|
||||
// Update our header with the size
|
||||
// Don't forget to knock 8 more off, since we don't include the
|
||||
// header in the size
|
||||
LittleEndian.putInt(toWrite,4,(toWrite.length-8));
|
||||
|
||||
// Write out the bytes
|
||||
out.write(toWrite);
|
||||
// Write out our children
|
||||
for (Record aChildren : children) {
|
||||
aChildren.writeOut(baos);
|
||||
}
|
||||
|
||||
// Grab the bytes back
|
||||
byte[] toWrite = baos.toByteArray();
|
||||
|
||||
// Update our header with the size
|
||||
// Don't forget to knock 8 more off, since we don't include the
|
||||
// header in the size
|
||||
LittleEndian.putInt(toWrite,4,(toWrite.length-8));
|
||||
|
||||
// Write out the bytes
|
||||
out.write(toWrite);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hslf.util;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
|
||||
/**
|
||||
* This class doesn't work yet, but is here to show the idea of a
|
||||
* ByteArrayOutputStream where you can track how many bytes you've
|
||||
* already written, and go back and write over a previous part of the stream
|
||||
*
|
||||
* @author Nick Burch
|
||||
*/
|
||||
|
||||
public final class MutableByteArrayOutputStream extends ByteArrayOutputStream
|
||||
{
|
||||
/** Return how many bytes we've stuffed in so far */
|
||||
public int getBytesWritten() { return -1; }
|
||||
|
||||
/** Write some bytes to the array */
|
||||
public void write(byte[] b) {}
|
||||
public void write(int b) {}
|
||||
|
||||
/** Write some bytes to an earlier bit of the array */
|
||||
public void overwrite(byte[] b, int startPos) {}
|
||||
}
|
|
@ -154,9 +154,9 @@ public final class PAPFormattedDiskPage extends FormattedDiskPage {
|
|||
/**
|
||||
* Creates a byte array representation of this data structure. Suitable for
|
||||
* writing to a Word document.
|
||||
*
|
||||
*
|
||||
* @param dataStream required if PAPX is too big to fit in FKP
|
||||
*
|
||||
*
|
||||
* @return A byte array representing this data structure.
|
||||
* @throws IOException
|
||||
* if an I/O error occurs.
|
||||
|
@ -325,9 +325,10 @@ public final class PAPFormattedDiskPage extends FormattedDiskPage {
|
|||
|
||||
}
|
||||
|
||||
// LittleEndian.putInt(buf, fcOffset, papx.getEndBytes() + fcMin);
|
||||
LittleEndian.putInt( buf, fcOffset,
|
||||
translator.getByteIndex( papx.getEnd() ) );
|
||||
if (papx != null) {
|
||||
// LittleEndian.putInt(buf, fcOffset, papx.getEndBytes() + fcMin);
|
||||
LittleEndian.putInt(buf, fcOffset, translator.getByteIndex(papx.getEnd()));
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
|
||||
|
|
|
@ -90,6 +90,7 @@ public final class SprmUtils
|
|||
LittleEndian.putShort(sprm, 2, (short)param);
|
||||
break;
|
||||
case 6:
|
||||
assert(varParam != null);
|
||||
sprm = new byte[3 + varParam.length];
|
||||
sprm[2] = (byte)varParam.length;
|
||||
System.arraycopy(varParam, 0, sprm, 3, varParam.length);
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.poi.util.Internal;
|
|||
|
||||
/**
|
||||
* Default implementation of {@link Field}
|
||||
*
|
||||
*
|
||||
* @author Sergey Vladimirov (vlsergey {at} gmail {dot} com)
|
||||
*/
|
||||
@Internal
|
||||
|
@ -48,7 +48,7 @@ public class FieldsImpl implements Fields
|
|||
{
|
||||
checkIndexForBinarySearch( list.size(), startIndex, endIndex );
|
||||
|
||||
int low = startIndex, mid = -1, high = endIndex - 1, result = 0;
|
||||
int low = startIndex, mid = -1, high = endIndex - 1;
|
||||
while ( low <= high )
|
||||
{
|
||||
mid = ( low + high ) >>> 1;
|
||||
|
@ -79,7 +79,7 @@ public class FieldsImpl implements Fields
|
|||
}
|
||||
return -insertPoint - 1;
|
||||
}
|
||||
return -mid - ( result >= 0 ? 1 : 2 );
|
||||
return -mid - 1;
|
||||
}
|
||||
|
||||
private static void checkIndexForBinarySearch( int length, int start,
|
||||
|
@ -127,7 +127,7 @@ public class FieldsImpl implements Fields
|
|||
if ( map == null || map.isEmpty() )
|
||||
return null;
|
||||
|
||||
return map.get( Integer.valueOf( offset ) );
|
||||
return map.get(offset);
|
||||
}
|
||||
|
||||
private Map<Integer, FieldImpl> parseFieldStructure(
|
||||
|
@ -145,14 +145,15 @@ public class FieldsImpl implements Fields
|
|||
fields.size());
|
||||
for ( FieldImpl field : fields )
|
||||
{
|
||||
result.put( Integer.valueOf( field.getFieldStartOffset() ), field );
|
||||
result.put(field.getFieldStartOffset(), field );
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void parseFieldStructureImpl( List<PlexOfField> plexOfFields,
|
||||
int startOffsetInclusive, int endOffsetExclusive,
|
||||
List<FieldImpl> result )
|
||||
@SuppressWarnings("UnnecessaryContinue")
|
||||
private void parseFieldStructureImpl(List<PlexOfField> plexOfFields,
|
||||
int startOffsetInclusive, int endOffsetExclusive,
|
||||
List<FieldImpl> result )
|
||||
{
|
||||
int next = startOffsetInclusive;
|
||||
while ( next < endOffsetExclusive )
|
||||
|
|
|
@ -22,76 +22,31 @@ import static org.junit.Assert.assertNotNull;
|
|||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.hdgf.chunks.ChunkFactory.CommandDefinition;
|
||||
import org.apache.poi.poifs.storage.RawDataUtil;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public final class TestChunks {
|
||||
public static final byte[] data_a = new byte[] { 70, 0, 0, 0,
|
||||
-1, -1, -1, -1, 2, 0, 0, 0, 68, 0, 0, 0, 0, 0, 0, 68, 0, 0, 0, 0, 0,
|
||||
0, 0, 2, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0,
|
||||
0, -1, -1, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0,
|
||||
0, 36, 0, 0, 0, 1, 0, 84, 24, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
|
||||
0, 0, 2, 0, 0, 0, 0, 0, 0, 0, -110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
-124, 0, 0, 0, 2, 0, 85, 73, 0, 0, 0, 0, 0, 0, -56, 63, 73, 0, 0, 0,
|
||||
0, 0, 0, -64, 63, 63, 0, 0, 0, 0, 0, 0, -64, 63, 63, 0, 0, 0, 0, 0, 0,
|
||||
-64, -65, 73, 0, 0, 0, 0, 0, 0, -16, 63, 73, 0, 0, 0, 0, 0, 0, -16, 63,
|
||||
4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
1, -1, 3, 0, 0, 32, 0, 0, 0, 0, 0, -73, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
|
||||
79, 0, 0, 0, 2, 0, 85, 32, 32, 64, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
8, 8, 65, 0, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 1, -13, 15, 0, 0, 0, 0,
|
||||
-56, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 72, 0, 0, 0, 2, 0, 85, 63, 0, 0,
|
||||
0, 0, 0, 0, -48, 63, 63, 0, 0, 0, 0, 0, 0, -48, 63, 63, 0, 0, 0, 0, 0,
|
||||
0, -48, 63, 63, 0, 0, 0, 0, 0, 0, -48, 63, 0, 0, 0, 0, 0, 0, -16, 63,
|
||||
0, 0, 0, 0, 0, 0, -16, 63, 1, 0, 1, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, -1, 15, 7, 0, 0, 0, 0, 101, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 28,
|
||||
0, 0, 0, 1, 0, 84, 24, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
-125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 2, 0, 85, 5, 0, 0,
|
||||
0, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||
};
|
||||
public static final byte[] data_b = new byte[] { 70, 0, 0, 0,
|
||||
-1, -1, -1, -1, 3, 0, 0, 0, 68, 0, 0, 0, 0, 0, 0, 68, 0, 0, 0, 0, 0,
|
||||
0, 0, 2, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 0, 0, 0, -1, -1, -1, -1,
|
||||
0, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 0, 0, 0, 0, 0, 0,
|
||||
0, 2, 0, 0, 0, 32, 0, 0, 0, 1, 0, 84, 24, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 0, 0, 0, 0, 0, 0, 0, -110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -124,
|
||||
0, 0, 0, 2, 0, 85, 63, 0, 0, 0, 0, 0, 0, 33, 64, 63, 0, 0, 0, 0, 0, 0,
|
||||
38, 64, 63, 0, 0, 0, 0, 0, 0, -64, 63, 63, 0, 0, 0, 0, 0, 0, -64, -65,
|
||||
73, 0, 0, 0, 0, 0, 0, -16, 63, 73, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 3,
|
||||
0, 4, 32, 0, 0, 0, 0, 0, -56, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 72, 0, 0,
|
||||
0, 2, 0, 85, 63, 0, 0, 0, 0, 0, 0, -48, 63, 63, 0, 0, 0, 0, 0, 0, -48,
|
||||
63, 63, 0, 0, 0, 0, 0, 0, -48, 63, 63, 0, 0, 0, 0, 0, 0, -48, 63, 0, 0,
|
||||
0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, -16, 63, 1, 0, 1, 0, 0, 1, 1, 0,
|
||||
7, 0, 0, 0, 0, 0, 0, 0, 1, -1, 15, 7, 0, 0, 0, 0, 101, 0, 0, 0, 1, 0, 0,
|
||||
0, 1, 0, 0, 0, 28, 0, 0, 0, 1, 0, 84, 24, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, -125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 2, 0,
|
||||
85, 5, 0, 0, 0, 78, 0, 0, 0, 0, 0, 0, 0, 0, 0, -55, 0, 0, 0, 2, 0, 0, 0,
|
||||
0, 0, 0, 0, -122, 0, 0, 0, 1, 0, 80, 1, 0, 0, 0, 60, 0, 0, 0, 60, 0, 0,
|
||||
0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0
|
||||
};
|
||||
|
||||
private static byte[] data_a, data_b;
|
||||
|
||||
@BeforeClass
|
||||
public static void setup() throws IOException {
|
||||
data_a = RawDataUtil.decompress(
|
||||
"H4sIAAAAAAAAAHNjYGD4DwRMQNqFAQygFAMTWAIbYIBqQqZRARMSOwNKMwOxChAzMoRIACkeNC3MUAwDjEjGTEISb" +
|
||||
"wGLh3pCeCfsoYwD9vbojP1QqQ/2cAYLplNBIACV+8EeuzKE2/4DXaoAZm6HOhUE/CFOU1BwgCnEw+DgcIQxHXGrYv" +
|
||||
"zMD6JOMCACwwNiC9SNF+zxMFC988GeEepUdrg/+MHMVKgnQFiGAR5F6KEFU4IMmpHYXBCXsUIdCQUApUvwomMCAAA=");
|
||||
data_b = RawDataUtil.decompress(
|
||||
"H4sIAAAAAAAAAHNjYGD4DwTMQNqFAQygFAMTWAIbYIBqQqZRATMSOwNuHgODAhAzMoRIACkONC1MUAwDjFB6EpJYC" +
|
||||
"1hNqD2Ep+gAZajBGAfsYYz9nhDGB3s4A9OVYBCAysWpDu4uYFixKICZJ5Cc6YHitAv2eBioFn2wZwQZwsjIwA63gR" +
|
||||
"/MTIUaD8IyDPCAY0F3EJIrYKAZic0FcRkrkPKDC55kQIR2G9iAAJAZNlDMii8EaAoA66WHVpECAAA=");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChunkHeaderA() throws Exception {
|
||||
ChunkHeader h =
|
||||
ChunkHeader.createChunkHeader(11, data_a, 0);
|
||||
public void testChunkHeaderA() {
|
||||
ChunkHeader h = ChunkHeader.createChunkHeader(11, data_a, 0);
|
||||
|
||||
assertTrue(h instanceof ChunkHeaderV11);
|
||||
ChunkHeaderV11 header = (ChunkHeaderV11)h;
|
||||
|
@ -106,11 +61,10 @@ public static final byte[] data_b = new byte[] { 70, 0, 0, 0,
|
|||
assertTrue(header.hasTrailer());
|
||||
assertTrue(header.hasSeparator());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testChunkHeaderB() throws Exception {
|
||||
ChunkHeader h =
|
||||
ChunkHeader.createChunkHeader(11, data_b, 0);
|
||||
public void testChunkHeaderB() {
|
||||
ChunkHeader h = ChunkHeader.createChunkHeader(11, data_b, 0);
|
||||
|
||||
assertTrue(h instanceof ChunkHeaderV11);
|
||||
ChunkHeaderV11 header = (ChunkHeaderV11)h;
|
||||
|
@ -147,16 +101,16 @@ public static final byte[] data_b = new byte[] { 70, 0, 0, 0,
|
|||
|
||||
// Should have two virtual chunk commands, a
|
||||
// 10 (page sheet) and an 18
|
||||
assertEquals(2, chunk.commandDefinitions.length);
|
||||
assertEquals(2, chunk.getCommandDefinitions().length);
|
||||
assertEquals(0, chunk.getCommands().length);
|
||||
|
||||
assertEquals(10, chunk.commandDefinitions[0].getType());
|
||||
assertEquals(0, chunk.commandDefinitions[0].getOffset());
|
||||
assertEquals("PageSheet", chunk.commandDefinitions[0].getName());
|
||||
assertEquals(10, chunk.getCommandDefinitions()[0].getType());
|
||||
assertEquals(0, chunk.getCommandDefinitions()[0].getOffset());
|
||||
assertEquals("PageSheet", chunk.getCommandDefinitions()[0].getName());
|
||||
|
||||
assertEquals(18, chunk.commandDefinitions[1].getType());
|
||||
assertEquals(0, chunk.commandDefinitions[1].getOffset());
|
||||
assertEquals("0", chunk.commandDefinitions[1].getName());
|
||||
assertEquals(18, chunk.getCommandDefinitions()[1].getType());
|
||||
assertEquals(0, chunk.getCommandDefinitions()[1].getOffset());
|
||||
assertEquals("0", chunk.getCommandDefinitions()[1].getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -183,16 +137,17 @@ public static final byte[] data_b = new byte[] { 70, 0, 0, 0,
|
|||
|
||||
// Should have two virtual chunk commands, a
|
||||
// 10 (Unknown) and an 18
|
||||
assertEquals(2, chunk.commandDefinitions.length);
|
||||
final CommandDefinition[] cdef = chunk.getCommandDefinitions();
|
||||
assertEquals(2, cdef.length);
|
||||
assertEquals(0, chunk.getCommands().length);
|
||||
|
||||
assertEquals(10, chunk.commandDefinitions[0].getType());
|
||||
assertEquals(0, chunk.commandDefinitions[0].getOffset());
|
||||
assertEquals("PropList", chunk.commandDefinitions[0].getName());
|
||||
assertEquals(10, cdef[0].getType());
|
||||
assertEquals(0, cdef[0].getOffset());
|
||||
assertEquals("PropList", cdef[0].getName());
|
||||
|
||||
assertEquals(18, chunk.commandDefinitions[1].getType());
|
||||
assertEquals(0, chunk.commandDefinitions[1].getOffset());
|
||||
assertEquals("0", chunk.commandDefinitions[1].getName());
|
||||
assertEquals(18, cdef[1].getType());
|
||||
assertEquals(0, cdef[1].getOffset());
|
||||
assertEquals("0", cdef[1].getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -230,6 +185,5 @@ public static final byte[] data_b = new byte[] { 70, 0, 0, 0,
|
|||
assertNotNull(chunk.getHeader());
|
||||
assertNull(chunk.getTrailer());
|
||||
assertNotNull(chunk.getSeparator());
|
||||
offset += chunk.getOnDiskSize();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue