refactoring aggregate records to a separate hierarchy. just starting

git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@683081 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Josh Micich 2008-08-06 01:39:44 +00:00
parent 1746dea8b1
commit f28c5a8f21
23 changed files with 1414 additions and 1051 deletions

View File

@ -0,0 +1,326 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.model;
import java.util.List;
import org.apache.poi.hssf.record.BOFRecord;
import org.apache.poi.hssf.record.CalcCountRecord;
import org.apache.poi.hssf.record.CalcModeRecord;
import org.apache.poi.hssf.record.DateWindow1904Record;
import org.apache.poi.hssf.record.DefaultRowHeightRecord;
import org.apache.poi.hssf.record.DeltaRecord;
import org.apache.poi.hssf.record.DimensionsRecord;
import org.apache.poi.hssf.record.EOFRecord;
import org.apache.poi.hssf.record.GridsetRecord;
import org.apache.poi.hssf.record.GutsRecord;
import org.apache.poi.hssf.record.HorizontalPageBreakRecord;
import org.apache.poi.hssf.record.HyperlinkRecord;
import org.apache.poi.hssf.record.IndexRecord;
import org.apache.poi.hssf.record.IterationRecord;
import org.apache.poi.hssf.record.PaneRecord;
import org.apache.poi.hssf.record.PrecisionRecord;
import org.apache.poi.hssf.record.PrintGridlinesRecord;
import org.apache.poi.hssf.record.PrintHeadersRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordBase;
import org.apache.poi.hssf.record.RefModeRecord;
import org.apache.poi.hssf.record.SCLRecord;
import org.apache.poi.hssf.record.SaveRecalcRecord;
import org.apache.poi.hssf.record.SelectionRecord;
import org.apache.poi.hssf.record.UncalcedRecord;
import org.apache.poi.hssf.record.VerticalPageBreakRecord;
import org.apache.poi.hssf.record.WindowTwoRecord;
import org.apache.poi.hssf.record.aggregates.ConditionalFormattingTable;
import org.apache.poi.hssf.record.aggregates.DataValidityTable;
import org.apache.poi.hssf.record.aggregates.MergedCellsTable;
/**
* Finds correct insert positions for records in workbook streams<p/>
*
* See OOO excelfileformat.pdf sec. 4.2.5 'Record Order in a BIFF8 Workbook Stream'
*
* @author Josh Micich
*/
final class RecordOrderer {
// TODO - add UninterpretedRecord as base class for many of these
// unimplemented sids
// TODO - simplify logic using a generalised record ordering
private RecordOrderer() {
// no instances of this class
}
/**
* Adds the specified new record in the correct place in sheet records list
*
*/
public static void addNewSheetRecord(List sheetRecords, RecordBase newRecord) {
int index = findSheetInsertPos(sheetRecords, newRecord.getClass());
sheetRecords.add(index, newRecord);
}
private static int findSheetInsertPos(List records, Class recClass) {
if (recClass == DataValidityTable.class) {
return findDataValidationTableInsertPos(records);
}
if (recClass == MergedCellsTable.class) {
return findInsertPosForNewMergedRecordTable(records);
}
if (recClass == ConditionalFormattingTable.class) {
return findInsertPosForNewCondFormatTable(records);
}
if (recClass == GutsRecord.class) {
return getGutsRecordInsertPos(records);
}
if (recClass == HorizontalPageBreakRecord.class) {
return getPageBreakRecordInsertPos(records, true);
}
if (recClass == VerticalPageBreakRecord.class) {
return getPageBreakRecordInsertPos(records, false);
}
throw new RuntimeException("Unexpected record class (" + recClass.getName() + ")");
}
private static int getPageBreakRecordInsertPos(List records, boolean isHorizonal) {
int dimensionsIndex = getDimensionsIndex(records);
int i = dimensionsIndex-1;
while (i > 0) {
i--;
Object rb = records.get(i);
if (isPageBreakPriorRecord(rb, isHorizonal)) {
return i+1;
}
}
throw new RuntimeException("Did not find insert point for GUTS");
}
private static boolean isPageBreakPriorRecord(Object rb, boolean newRecIsHorizontal) {
if (rb instanceof Record) {
Record record = (Record) rb;
switch (record.getSid()) {
case BOFRecord.sid:
case IndexRecord.sid:
// calc settings block
case UncalcedRecord.sid:
case CalcCountRecord.sid:
case CalcModeRecord.sid:
case PrecisionRecord.sid:
case RefModeRecord.sid:
case DeltaRecord.sid:
case IterationRecord.sid:
case DateWindow1904Record.sid:
case SaveRecalcRecord.sid:
// end calc settings
case PrintHeadersRecord.sid:
case PrintGridlinesRecord.sid:
case GridsetRecord.sid:
case DefaultRowHeightRecord.sid:
case 0x0081: // SHEETPR
return true;
}
switch (record.getSid()) {
// page settings block
case HorizontalPageBreakRecord.sid:
if (!newRecIsHorizontal) {
return true;
}
return false;
case VerticalPageBreakRecord.sid:
return false;
// next is case HeaderRecord.sid: case FooterRecord.sid:
// then more records in page settings block
}
}
return false;
}
/**
* Find correct position to add new CFHeader record
*/
private static int findInsertPosForNewCondFormatTable(List records) {
for (int i = records.size() - 2; i >= 0; i--) { // -2 to skip EOF record
Object rb = records.get(i);
if (rb instanceof MergedCellsTable) {
return i + 1;
}
Record rec = (Record) rb;
switch (rec.getSid()) {
case WindowTwoRecord.sid:
case SCLRecord.sid:
case PaneRecord.sid:
case SelectionRecord.sid:
case 0x0099:// STANDARDWIDTH
// MergedCellsTable usually here
case 0x015f:// LABELRANGES
case 0x00ef:// PHONETICPR
return i + 1;
}
}
throw new RuntimeException("Did not find Window2 record");
}
private static int findInsertPosForNewMergedRecordTable(List records) {
for (int i = records.size() - 2; i >= 0; i--) { // -2 to skip EOF record
Object rb = records.get(i);
Record rec = (Record) rb;
switch (rec.getSid()) {
case WindowTwoRecord.sid:
case SCLRecord.sid:
case PaneRecord.sid:
case SelectionRecord.sid:
case 0x0099:// STANDARDWIDTH
return i + 1;
}
}
throw new RuntimeException("Did not find Window2 record");
}
/**
* Finds the index where the sheet validations header record should be inserted
* @param records the records for this sheet
*
* + WINDOW2
* o SCL
* o PANE
* oo SELECTION
* o STANDARDWIDTH
* oo MERGEDCELLS
* o LABELRANGES
* o PHONETICPR
* o Conditional Formatting Table
* o Hyperlink Table
* o Data Validity Table
* o SHEETLAYOUT
* o SHEETPROTECTION
* o RANGEPROTECTION
* + EOF
*/
private static int findDataValidationTableInsertPos(List records) {
int i = records.size() - 1;
if (!(records.get(i) instanceof EOFRecord)) {
throw new IllegalStateException("Last sheet record should be EOFRecord");
}
while (i > 0) {
i--;
Object rb = records.get(i);
if (isDVTPriorRecord(rb)) {
Record nextRec = (Record) records.get(i + 1);
if (!isDVTSubsequentRecord(nextRec.getSid())) {
throw new IllegalStateException("Unexpected (" + nextRec.getClass().getName()
+ ") found after (" + rb.getClass().getName() + ")");
}
return i+1;
}
Record rec = (Record) rb;
if (!isDVTSubsequentRecord(rec.getSid())) {
throw new IllegalStateException("Unexpected (" + rec.getClass().getName()
+ ") while looking for DV Table insert pos");
}
}
return 0;
}
private static boolean isDVTPriorRecord(Object rb) {
if (rb instanceof MergedCellsTable || rb instanceof ConditionalFormattingTable) {
return true;
}
short sid = ((Record)rb).getSid();
switch(sid) {
case WindowTwoRecord.sid:
case 0x00A0: // SCL
case PaneRecord.sid:
case SelectionRecord.sid:
case 0x0099: // STANDARDWIDTH
// MergedCellsTable
case 0x015F: // LABELRANGES
case 0x00EF: // PHONETICPR
// ConditionalFormattingTable
case HyperlinkRecord.sid:
case 0x0800: // QUICKTIP
return true;
}
return false;
}
private static boolean isDVTSubsequentRecord(short sid) {
switch(sid) {
case 0x0862: // SHEETLAYOUT
case 0x0867: // SHEETPROTECTION
case 0x0868: // RANGEPROTECTION
case EOFRecord.sid:
return true;
}
return false;
}
/**
* DIMENSIONS record is always present
*/
private static int getDimensionsIndex(List records) {
int nRecs = records.size();
for(int i=0; i<nRecs; i++) {
if(records.get(i) instanceof DimensionsRecord) {
return i;
}
}
// worksheet stream is seriously broken
throw new RuntimeException("DimensionsRecord not found");
}
private static int getGutsRecordInsertPos(List records) {
int dimensionsIndex = getDimensionsIndex(records);
int i = dimensionsIndex-1;
while (i > 0) {
i--;
Object rb = records.get(i);
if (isGutsPriorRecord(rb)) {
return i+1;
}
}
throw new RuntimeException("Did not find insert point for GUTS");
}
private static boolean isGutsPriorRecord(Object rb) {
if (rb instanceof Record) {
Record record = (Record) rb;
switch (record.getSid()) {
case BOFRecord.sid:
case IndexRecord.sid:
// calc settings block
case UncalcedRecord.sid:
case CalcCountRecord.sid:
case CalcModeRecord.sid:
case PrecisionRecord.sid:
case RefModeRecord.sid:
case DeltaRecord.sid:
case IterationRecord.sid:
case DateWindow1904Record.sid:
case SaveRecalcRecord.sid:
// end calc settings
case PrintHeadersRecord.sid:
case PrintGridlinesRecord.sid:
case GridsetRecord.sid:
return true;
// DefaultRowHeightRecord.sid is next
}
}
return false;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,3 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@ -18,43 +17,51 @@
package org.apache.poi.hssf.record;
import java.util.Iterator;
/**
* HorizontalPageBreak record that stores page breaks at rows
* <p>
* This class is just used so that SID compares work properly in the RecordFactory
* HorizontalPageBreak (0x001B) record that stores page breaks at rows <p/>
*
* @see PageBreakRecord
* @author Danny Mui (dmui at apache dot org)
*/
public class HorizontalPageBreakRecord extends PageBreakRecord {
public final class HorizontalPageBreakRecord extends PageBreakRecord {
public static final short sid = PageBreakRecord.HORIZONTAL_SID;
public static final short sid = 0x001B;
/**
*
* Creates an empty horizontal page break record
*/
public HorizontalPageBreakRecord() {
super();
//
}
/**
* @param sid
*/
public HorizontalPageBreakRecord(short sid) {
super(sid);
}
/**
* @param in the RecordInputstream to read the record from
* @param in
* the RecordInputstream to read the record from
*/
public HorizontalPageBreakRecord(RecordInputStream in) {
super(in);
}
/* (non-Javadoc)
* @see org.apache.poi.hssf.record.Record#getSid()
*/
protected void validateSid(short id) {
if (id != getSid()) {
throw new RecordFormatException(
"NOT A HorizontalPageBreak or VerticalPageBreak RECORD!! " + id);
}
}
public short getSid() {
return sid;
}
public Object clone() {
PageBreakRecord result = new HorizontalPageBreakRecord();
Iterator iterator = getBreaksIterator();
while (iterator.hasNext()) {
Break original = (Break) iterator.next();
result.addBreak(original.main, original.subFrom, original.subTo);
}
return result;
}
}

View File

@ -32,68 +32,51 @@ import org.apache.poi.util.LittleEndian;
*/
public final class MergeCellsRecord extends Record {
public final static short sid = 0x00E5;
private CellRangeAddressList _regions;
/** sometimes the regions array is shared with other MergedCellsRecords */
private CellRangeAddress[] _regions;
private final int _startIndex;
private final int _numberOfRegions;
/**
* Creates an empty <tt>MergedCellsRecord</tt>
*/
public MergeCellsRecord() {
_regions = new CellRangeAddressList();
public MergeCellsRecord(CellRangeAddress[] regions, int startIndex, int numberOfRegions) {
_regions = regions;
_startIndex = startIndex;
_numberOfRegions = numberOfRegions;
}
/**
* Constructs a MergedCellsRecord and sets its fields appropriately
* @param in the RecordInputstream to read the record from
*/
public MergeCellsRecord(RecordInputStream in) {
super(in);
int nRegions = in.readUShort();
CellRangeAddress[] cras = new CellRangeAddress[nRegions];
for (int i = 0; i < nRegions; i++) {
cras[i] = new CellRangeAddress(in);
}
_numberOfRegions = nRegions;
_startIndex = 0;
_regions = cras;
}
protected void fillFields(RecordInputStream in) {
_regions = new CellRangeAddressList(in);
throw new RuntimeException("obsolete");
}
/**
* get the number of merged areas. If this drops down to 0 you should just go
* ahead and delete the record.
* @return number of areas
*/
public short getNumAreas() {
return (short)_regions.countRanges();
}
/**
* Add an area to consider a merged cell. The index returned is only gauranteed to
* be correct provided you do not add ahead of or remove ahead of it (in which case
* you should increment or decrement appropriately....in other words its an arrayList)
*
* @param firstRow - the upper left hand corner's row
* @param firstCol - the upper left hand corner's col
* @param lastRow - the lower right hand corner's row
* @param lastCol - the lower right hand corner's col
* @return new index of said area (don't depend on it if you add/remove)
*/
public void addArea(int firstRow, int firstCol, int lastRow, int lastCol) {
_regions.addCellRangeAddress(firstRow, firstCol, lastRow, lastCol);
}
/**
* essentially unmerge the cells in the "area" stored at the passed in index
* @param areaIndex
*/
public void removeAreaAt(int areaIndex) {
_regions.remove(areaIndex);
return (short)_numberOfRegions;
}
/**
* @return MergedRegion at the given index representing the area that is Merged (r1,c1 - r2,c2)
*/
public CellRangeAddress getAreaAt(int index) {
return _regions.getCellRangeAddress(index);
return _regions[_startIndex + index];
}
public int getRecordSize() {
return 4 + _regions.getSize();
return 4 + CellRangeAddressList.getEncodedSize(_numberOfRegions);
}
public short getSid() {
@ -101,11 +84,16 @@ public final class MergeCellsRecord extends Record {
}
public int serialize(int offset, byte [] data) {
int dataSize = _regions.getSize();
int dataSize = CellRangeAddressList.getEncodedSize(_numberOfRegions);
LittleEndian.putShort(data, offset + 0, sid);
LittleEndian.putUShort(data, offset + 0, sid);
LittleEndian.putUShort(data, offset + 2, dataSize);
_regions.serialize(offset + 4, data);
int nItems = _numberOfRegions;
LittleEndian.putUShort(data, offset + 4, nItems);
int pos = 6;
for (int i = 0; i < _numberOfRegions; i++) {
pos += _regions[_startIndex + i].serialize(offset+pos, data);
}
return 4 + dataSize;
}
@ -113,18 +101,17 @@ public final class MergeCellsRecord extends Record {
StringBuffer retval = new StringBuffer();
retval.append("[MERGEDCELLS]").append("\n");
retval.append(" .sid =").append(sid).append("\n");
retval.append(" .numregions =").append(getNumAreas())
.append("\n");
for (int k = 0; k < _regions.countRanges(); k++) {
CellRangeAddress region = _regions.getCellRangeAddress(k);
for (int k = 0; k < _numberOfRegions; k++) {
CellRangeAddress region = _regions[_startIndex + k];
retval.append(" .rowfrom =").append(region.getFirstRow())
.append("\n");
retval.append(" .colfrom =").append(region.getFirstColumn())
.append("\n");
retval.append(" .rowto =").append(region.getLastRow())
.append("\n");
retval.append(" .colfrom =").append(region.getFirstColumn())
.append("\n");
retval.append(" .colto =").append(region.getLastColumn())
.append("\n");
}
@ -140,13 +127,11 @@ public final class MergeCellsRecord extends Record {
}
public Object clone() {
MergeCellsRecord rec = new MergeCellsRecord();
for (int k = 0; k < _regions.countRanges(); k++) {
CellRangeAddress oldRegion = _regions.getCellRangeAddress(k);
rec.addArea(oldRegion.getFirstRow(), oldRegion.getFirstColumn(),
oldRegion.getLastRow(), oldRegion.getLastColumn());
int nRegions = _numberOfRegions;
CellRangeAddress[] clonedRegions = new CellRangeAddress[nRegions];
for (int i = 0; i < clonedRegions.length; i++) {
clonedRegions[i] = _regions[_startIndex + i].copy();
}
return rec;
return new MergeCellsRecord(clonedRegions, 0, nRegions);
}
}

View File

@ -1,4 +1,3 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@ -19,7 +18,6 @@
package org.apache.poi.hssf.record;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@ -38,13 +36,12 @@ import org.apache.poi.util.LittleEndian;
* @see VerticalPageBreakRecord
* @author Danny Mui (dmui at apache dot org)
*/
public class PageBreakRecord extends Record {
public static final short HORIZONTAL_SID = (short)0x1B;
public static final short VERTICAL_SID = (short)0x1A;
public short sid;
private short numBreaks;
private List breaks;
private Map BreakMap;
public abstract class PageBreakRecord extends Record {
private static final boolean IS_EMPTY_RECORD_WRITTEN = false; //TODO - flip
private static final int[] EMPTY_INT_ARRAY = { };
private List _breaks;
private Map _breakMap;
/**
* Since both records store 2byte integers (short), no point in
@ -53,116 +50,105 @@ public class PageBreakRecord extends Record {
* The subs (rows or columns, don't seem to be able to set but excel sets
* them automatically)
*/
public class Break
{
public class Break {
public short main;
public short subFrom;
public short subTo;
public static final int ENCODED_SIZE = 6;
public int main;
public int subFrom;
public int subTo;
public Break(short main, short subFrom, short subTo)
public Break(int main, int subFrom, int subTo)
{
this.main = main;
this.subFrom = subFrom;
this.subTo = subTo;
}
public Break(RecordInputStream in) {
main = in.readUShort() - 1;
subFrom = in.readUShort();
subTo = in.readUShort();
}
public PageBreakRecord()
{
public int serialize(int offset, byte[] data) {
LittleEndian.putUShort(data, offset + 0, main + 1);
LittleEndian.putUShort(data, offset + 2, subFrom);
LittleEndian.putUShort(data, offset + 4, subTo);
return ENCODED_SIZE;
}
}
/**
*
* @param sid
*/
public PageBreakRecord(short sid) {
super();
this.sid = sid;
protected PageBreakRecord() {
_breaks = new ArrayList();
_breakMap = new HashMap();
}
public PageBreakRecord(RecordInputStream in)
{
protected PageBreakRecord(RecordInputStream in) {
super(in);
this.sid = in.getSid();
}
protected void fillFields(RecordInputStream in)
{
short loadedBreaks = in.readShort();
setNumBreaks(loadedBreaks);
for(int k = 0; k < loadedBreaks; k++)
{
addBreak((short)(in.readShort()-1), in.readShort(), in.readShort());
int nBreaks = in.readShort();
_breaks = new ArrayList(nBreaks + 2);
_breakMap = new HashMap();
for(int k = 0; k < nBreaks; k++) {
Break br = new Break(in);
_breaks.add(br);
_breakMap.put(new Integer(br.main), br);
}
}
public short getSid()
{
return sid;
private int getDataSize() {
return 2 + _breaks.size() * Break.ENCODED_SIZE;
}
public int getRecordSize() {
int nBreaks = _breaks.size();
if (!IS_EMPTY_RECORD_WRITTEN && nBreaks < 1) {
return 0;
}
return 4 + getDataSize();
}
public int serialize(int offset, byte data[])
{
int recordsize = getRecordSize();
public final int serialize(int offset, byte data[]) {
int nBreaks = _breaks.size();
if (!IS_EMPTY_RECORD_WRITTEN && nBreaks < 1) {
return 0;
}
int dataSize = getDataSize();
LittleEndian.putUShort(data, offset + 0, getSid());
LittleEndian.putUShort(data, offset + 2, dataSize);
LittleEndian.putUShort(data, offset + 4, nBreaks);
int pos = 6;
LittleEndian.putShort(data, offset + 0, getSid());
LittleEndian.putShort(data, offset + 2, (short)(recordsize - 4));
LittleEndian.putShort(data, offset + 4, getNumBreaks());
for(Iterator iterator = getBreaksIterator(); iterator.hasNext();)
{
Break Break = (Break)iterator.next();
LittleEndian.putShort(data, offset + pos, (short)(Break.main + 1));
pos += 2;
LittleEndian.putShort(data, offset + pos, Break.subFrom);
pos += 2;
LittleEndian.putShort(data, offset + pos, Break.subTo);
pos += 2;
for (int i=0; i<nBreaks; i++) {
Break br = (Break)_breaks.get(i);
pos += br.serialize(offset+pos, data);
}
return recordsize;
return 4 + dataSize;
}
protected void validateSid(short id)
{
if(id != HORIZONTAL_SID && id != VERTICAL_SID)
throw new RecordFormatException("NOT A HorizontalPageBreak or VerticalPageBreak RECORD!! " + id);
else
return;
public int getNumBreaks() {
return _breaks.size();
}
public short getNumBreaks()
{
return breaks != null ? (short)breaks.size() : numBreaks;
}
public void setNumBreaks(short numBreaks)
{
this.numBreaks = numBreaks;
}
public Iterator getBreaksIterator()
{
if(breaks == null)
return Collections.EMPTY_LIST.iterator();
else
return breaks.iterator();
public final Iterator getBreaksIterator() {
return _breaks.iterator();
}
public String toString()
{
StringBuffer retval = new StringBuffer();
if (getSid() != HORIZONTAL_SID && getSid()!= VERTICAL_SID)
return "[INVALIDPAGEBREAK]\n .sid ="+getSid()+"[INVALIDPAGEBREAK]";
String label;
String mainLabel;
String subLabel;
if (getSid() == HORIZONTAL_SID) {
if (getSid() == HorizontalPageBreakRecord.sid) {
label = "HORIZONTALPAGEBREAK";
mainLabel = "row";
subLabel = "col";
@ -192,46 +178,33 @@ public class PageBreakRecord extends Record {
/**
* Adds the page break at the specified parameters
* @param main Depending on sid, will determine row or column to put page break (zero-based)
* @param subFrom No user-interface to set (defaults to minumum, 0)
* @param subFrom No user-interface to set (defaults to minimum, 0)
* @param subTo No user-interface to set
*/
public void addBreak(short main, short subFrom, short subTo)
{
if(breaks == null)
{
breaks = new ArrayList(getNumBreaks() + 10);
BreakMap = new HashMap();
}
public void addBreak(int main, int subFrom, int subTo) {
Integer key = new Integer(main);
Break region = (Break)BreakMap.get(key);
if(region != null)
{
Break region = (Break)_breakMap.get(key);
if(region == null) {
region = new Break(main, subFrom, subTo);
_breakMap.put(key, region);
_breaks.add(region);
} else {
region.main = main;
region.subFrom = subFrom;
region.subTo = subTo;
} else
{
region = new Break(main, subFrom, subTo);
breaks.add(region);
}
BreakMap.put(key, region);
}
/**
* Removes the break indicated by the parameter
* @param main (zero-based)
*/
public void removeBreak(short main)
{
public final void removeBreak(int main) {
Integer rowKey = new Integer(main);
Break region = (Break)BreakMap.get(rowKey);
breaks.remove(region);
BreakMap.remove(rowKey);
}
public int getRecordSize()
{
return 6 + getNumBreaks() * 6;
Break region = (Break)_breakMap.get(rowKey);
_breaks.remove(region);
_breakMap.remove(rowKey);
}
/**
@ -239,26 +212,22 @@ public class PageBreakRecord extends Record {
* @param main FIXME: Document this!
* @return The Break or null if no break exists at the row/col specified.
*/
public Break getBreak(short main)
{
if (BreakMap == null)
return null;
public final Break getBreak(int main) {
Integer rowKey = new Integer(main);
return (Break)BreakMap.get(rowKey);
}
/* Clones the page break record
* @see java.lang.Object#clone()
*/
public Object clone() {
PageBreakRecord record = new PageBreakRecord(getSid());
Iterator iterator = getBreaksIterator();
while (iterator.hasNext()) {
Break original = (Break)iterator.next();
record.addBreak(original.main, original.subFrom, original.subTo);
}
return record;
return (Break)_breakMap.get(rowKey);
}
public final int[] getBreaks() {
int count = getNumBreaks();
if (count < 1) {
return EMPTY_INT_ARRAY;
}
int[] result = new int[count];
for (int i=0; i<count; i++) {
Break breakItem = (Break)_breaks.get(i);
result[i] = breakItem.main;
}
return result;
}
}

View File

@ -1,4 +1,3 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@ -16,7 +15,6 @@
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record;
import java.io.ByteArrayInputStream;
@ -32,15 +30,13 @@ import java.io.ByteArrayInputStream;
* @author Jason Height (jheight at chariot dot net dot au)
* @version 2.0-pre
*/
public abstract class Record
{
public abstract class Record extends RecordBase {
/**
* instantiates a blank record strictly for ID matching
*/
public Record()
protected Record()
{
}
@ -49,7 +45,7 @@ public abstract class Record
*
* @param in the RecordInputstream to read the record from
*/
public Record(RecordInputStream in)
protected Record(RecordInputStream in)
{
validateSid(in.getSid());
fillFields(in);
@ -89,17 +85,6 @@ public abstract class Record
return retval;
}
/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
* byte array.
*
* @param offset to begin writing at
* @param data byte array containing instance data
* @return number of bytes written
*/
public abstract int serialize(int offset, byte [] data);
/**
* gives the current serialized size of the record. Should include the sid and reclength (4 bytes).

View File

@ -0,0 +1,42 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record;
/**
* Common base class of {@link Record} and {@link RecordAggregate}
*
* @author Josh Micich
*/
public abstract class RecordBase {
/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
* byte array.
*
* @param offset to begin writing at
* @param data byte array containing instance data
* @return number of bytes written
*/
public abstract int serialize(int offset, byte[] data);
/**
* gives the current serialized size of the record. Should include the sid
* and reclength (4 bytes).
*/
public abstract int getRecordSize();
}

View File

@ -1,4 +1,3 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@ -18,29 +17,23 @@
package org.apache.poi.hssf.record;
import java.util.Iterator;
/**
* VerticalPageBreak record that stores page breaks at columns
* <p>
* This class is just used so that SID compares work properly in the RecordFactory
* VerticalPageBreak (0x001A) record that stores page breaks at columns<p/>
*
* @see PageBreakRecord
* @author Danny Mui (dmui at apache dot org)
*/
public class VerticalPageBreakRecord extends PageBreakRecord {
public final class VerticalPageBreakRecord extends PageBreakRecord {
public static final short sid = PageBreakRecord.VERTICAL_SID;
public static final short sid = 0x001A;
/**
*
* Creates an empty vertical page break record
*/
public VerticalPageBreakRecord() {
super();
}
/**
* @param sid
*/
public VerticalPageBreakRecord(short sid) {
super(sid);
}
/**
@ -50,11 +43,24 @@ public class VerticalPageBreakRecord extends PageBreakRecord {
super(in);
}
/* (non-Javadoc)
* @see org.apache.poi.hssf.record.Record#getSid()
*/
protected void validateSid(short id) {
if (id != getSid()) {
throw new RecordFormatException(
"NOT A HorizontalPageBreak or VerticalPageBreak RECORD!! " + id);
}
}
public short getSid() {
return sid;
}
public Object clone() {
PageBreakRecord result = new VerticalPageBreakRecord();
Iterator iterator = getBreaksIterator();
while (iterator.hasNext()) {
Break original = (Break) iterator.next();
result.addBreak(original.main, original.subFrom, original.subTo);
}
return result;
}
}

View File

@ -14,19 +14,19 @@
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.aggregates;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.poi.hssf.model.RecordStream;
import org.apache.poi.hssf.record.CFHeaderRecord;
import org.apache.poi.hssf.record.CFRuleRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.util.CellRangeAddress;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;
/**
* CFRecordsAggregate - aggregates Conditional Formatting records CFHeaderRecord
@ -36,15 +36,12 @@ import org.apache.poi.util.POILogger;
* @author Dmitriy Kumshayev
*
*/
public final class CFRecordsAggregate extends Record
{
public final class CFRecordsAggregate extends Record {
/** Excel allows up to 3 conditional formating rules */
private static final int MAX_CONDTIONAL_FORMAT_RULES = 3;
public final static short sid = -2008; // not a real BIFF record
private static POILogger log = POILogFactory.getLogger(CFRecordsAggregate.class);
private final CFHeaderRecord header;
/** List of CFRuleRecord objects */
@ -78,9 +75,8 @@ public final class CFRecordsAggregate extends Record
* @param offset - position of {@link CFHeaderRecord} object in the list of Record objects
* @return CFRecordsAggregate object
*/
public static CFRecordsAggregate createCFAggregate(List recs, int pOffset)
{
Record rec = ( Record ) recs.get(pOffset);
public static CFRecordsAggregate createCFAggregate(RecordStream rs) {
Record rec = rs.getNext();
if (rec.getSid() != CFHeaderRecord.sid) {
throw new IllegalStateException("next record sid was " + rec.getSid()
+ " instead of " + CFHeaderRecord.sid + " as expected");
@ -90,35 +86,10 @@ public final class CFRecordsAggregate extends Record
int nRules = header.getNumberOfConditionalFormats();
CFRuleRecord[] rules = new CFRuleRecord[nRules];
int offset = pOffset;
int countFound = 0;
while (countFound < rules.length) {
offset++;
if(offset>=recs.size()) {
break;
}
rec = (Record)recs.get(offset);
if(rec instanceof CFRuleRecord) {
rules[countFound] = (CFRuleRecord) rec;
countFound++;
} else {
break;
}
for (int i = 0; i < rules.length; i++) {
rules[i] = (CFRuleRecord) rs.getNext();
}
if (countFound < nRules)
{ // TODO -(MAR-2008) can this ever happen? write junit
if (log.check(POILogger.DEBUG))
{
log.log(POILogger.DEBUG, "Expected " + nRules + " Conditional Formats, "
+ "but found " + countFound + " rules");
}
header.setNumberOfConditionalFormats(nRules);
CFRuleRecord[] lessRules = new CFRuleRecord[countFound];
System.arraycopy(rules, 0, lessRules, 0, countFound);
rules = lessRules;
}
return new CFRecordsAggregate(header, rules);
}

View File

@ -1,71 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.aggregates;
import java.util.ArrayList;
import java.util.List;
import org.apache.poi.hssf.model.RecordStream;
import org.apache.poi.hssf.record.ColumnInfoRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordInputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* @author Glen Stampoultzis
* @version $Id$
*/
public class ColumnInfoRecordsAggregate
extends Record
{
// int size = 0;
List records = null;
public final class ColumnInfoRecordsAggregate extends RecordAggregate {
private final List records;
public ColumnInfoRecordsAggregate()
{
/**
* Creates an empty aggregate
*/
public ColumnInfoRecordsAggregate() {
records = new ArrayList();
}
public ColumnInfoRecordsAggregate(RecordStream rs) {
this();
/** You never fill an aggregate */
protected void fillFields(RecordInputStream in)
{
while(rs.peekNextClass() == ColumnInfoRecord.class) {
records.add(rs.getNext());
}
/** Not required by an aggregate */
protected void validateSid(short id)
{
if (records.size() < 1) {
throw new RuntimeException("No column info records found");
}
/** It's an aggregate... just made something up */
public short getSid()
{
return -1012;
}
public int getRecordSize()
{
int size = 0;
for ( Iterator iterator = records.iterator(); iterator.hasNext(); )
size += ( (ColumnInfoRecord) iterator.next() ).getRecordSize();
return size;
}
public Iterator getIterator()
{
return records.iterator();
}
/**
@ -105,25 +85,14 @@ public class ColumnInfoRecordsAggregate
return records.size();
}
/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
* byte array.
*
* @param offset offset to begin writing at
* @param data byte array containing instance data
* @return number of bytes written
*/
public int serialize(int offset, byte [] data)
{
Iterator itr = records.iterator();
int pos = offset;
while (itr.hasNext())
{
pos += (( Record ) itr.next()).serialize(pos, data);
public void visitContainedRecords(RecordVisitor rv) {
int nItems = records.size();
if (nItems < 1) {
return;
}
for(int i=0; i<nItems; i++) {
rv.visitRecord((Record)records.get(i));
}
return pos - offset;
}
public int findStartOfColumnOutlineGroup(int idx)
@ -178,8 +147,7 @@ public class ColumnInfoRecordsAggregate
return idx;
}
public ColumnInfoRecord getColInfo(int idx)
{
private ColumnInfoRecord getColInfo(int idx) {
return (ColumnInfoRecord) records.get( idx );
}
@ -191,7 +159,7 @@ public class ColumnInfoRecordsAggregate
columnInfo.setHidden( hidden );
if (idx + 1 < records.size())
{
ColumnInfoRecord nextColumnInfo = (ColumnInfoRecord) records.get( idx + 1 );
ColumnInfoRecord nextColumnInfo = getColInfo(idx + 1);
if (columnInfo.getLastColumn() + 1 == nextColumnInfo.getFirstColumn())
{
if (nextColumnInfo.getOutlineLevel() < level)
@ -279,7 +247,7 @@ public class ColumnInfoRecordsAggregate
return;
// Find the start of the group.
ColumnInfoRecord columnInfo = (ColumnInfoRecord) records.get( findStartOfColumnOutlineGroup( idx ) );
ColumnInfoRecord columnInfo = getColInfo( findStartOfColumnOutlineGroup( idx ) );
// Hide all the columns until the end of the group
columnInfo = writeHidden( columnInfo, idx, true );
@ -331,7 +299,7 @@ public class ColumnInfoRecordsAggregate
* @see org.apache.poi.hssf.record.ColumnInfoRecord
* @return record containing a ColumnInfoRecord
*/
public static Record createColInfo()
public static ColumnInfoRecord createColInfo()
{
ColumnInfoRecord retval = new ColumnInfoRecord();
@ -452,7 +420,7 @@ public class ColumnInfoRecordsAggregate
ci.setCollapsed( collapsed.booleanValue() );
}
public int findColumnIdx(int column, int fromIdx)
private int findColumnIdx(int column, int fromIdx)
{
if (column < 0)
throw new IllegalArgumentException( "column parameter out of range: " + column );
@ -462,7 +430,7 @@ public class ColumnInfoRecordsAggregate
ColumnInfoRecord ci;
for (int k = fromIdx; k < records.size(); k++)
{
ci = ( ColumnInfoRecord ) records.get(k);
ci = getColInfo(k);
if ((ci.getFirstColumn() <= column)
&& (column <= ci.getLastColumn()))
{
@ -477,8 +445,8 @@ public class ColumnInfoRecordsAggregate
{
if (columnIdx == 0)
return;
ColumnInfoRecord previousCol = (ColumnInfoRecord) records.get( columnIdx - 1);
ColumnInfoRecord currentCol = (ColumnInfoRecord) records.get( columnIdx );
ColumnInfoRecord previousCol = getColInfo( columnIdx - 1);
ColumnInfoRecord currentCol = getColInfo( columnIdx );
boolean adjacentColumns = previousCol.getLastColumn() == currentCol.getFirstColumn() - 1;
if (!adjacentColumns)
return;
@ -513,7 +481,7 @@ public class ColumnInfoRecordsAggregate
int columnIdx = findColumnIdx( i, Math.max(0,fromIdx) );
if (columnIdx != -1)
{
level = ((ColumnInfoRecord)records.get( columnIdx )).getOutlineLevel();
level = getColInfo(columnIdx).getOutlineLevel();
if (indent) level++; else level--;
level = Math.max(0, level);
level = Math.min(7, level);
@ -525,6 +493,30 @@ public class ColumnInfoRecordsAggregate
}
}
/**
* Finds the <tt>ColumnInfoRecord</tt> which contains the specified columnIndex
* @param columnIndex index of the column (not the index of the ColumnInfoRecord)
* @return <code>null</code> if no column info found for the specified column
*/
public ColumnInfoRecord findColumnInfo(int columnIndex) {
int nInfos = records.size();
for(int i=0; i< nInfos; i++) {
ColumnInfoRecord ci = getColInfo(i);
if (ci.getFirstColumn() <= columnIndex && columnIndex <= ci.getLastColumn()) {
return ci;
}
}
return null;
}
public int getMaxOutlineLevel() {
int result = 0;
int count=records.size();
for (int i=0; i<count; i++) {
ColumnInfoRecord columnInfoRecord = getColInfo(i);
result = Math.max(columnInfoRecord.getOutlineLevel(), result);
}
return result;
}
}

View File

@ -0,0 +1,88 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.aggregates;
import java.util.ArrayList;
import java.util.List;
import org.apache.poi.hssf.model.RecordStream;
import org.apache.poi.hssf.record.CFHeaderRecord;
import org.apache.poi.hssf.record.Record;
/**
* Holds all the conditional formatting for a workbook sheet.<p/>
*
* See OOO exelfileformat.pdf sec 4.12 'Conditional Formatting Table'
*
* @author Josh Micich
*/
public final class ConditionalFormattingTable extends RecordAggregate {
private final List _cfHeaders;
/**
* Creates an empty ConditionalFormattingTable
*/
public ConditionalFormattingTable() {
_cfHeaders = new ArrayList();
}
public ConditionalFormattingTable(RecordStream rs) {
List temp = new ArrayList();
while (rs.peekNextClass() == CFHeaderRecord.class) {
temp.add(CFRecordsAggregate.createCFAggregate(rs));
}
_cfHeaders = temp;
}
public void visitContainedRecords(RecordVisitor rv) {
for (int i = 0; i < _cfHeaders.size(); i++) {
rv.visitRecord((Record) _cfHeaders.get(i));
}
}
/**
* @return index of the newly added CF header aggregate
*/
public int add(CFRecordsAggregate cfAggregate) {
_cfHeaders.add(cfAggregate);
return _cfHeaders.size() - 1;
}
public int size() {
return _cfHeaders.size();
}
public CFRecordsAggregate get(int index) {
checkIndex(index);
return (CFRecordsAggregate) _cfHeaders.get(index);
}
public void remove(int index) {
checkIndex(index);
_cfHeaders.remove(index);
}
private void checkIndex(int index) {
if (index < 0 || index >= _cfHeaders.size()) {
throw new IllegalArgumentException("Specified CF index " + index
+ " is outside the allowable range (0.." + (_cfHeaders.size() - 1) + ")");
}
}
}

View File

@ -21,17 +21,9 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.poi.hssf.model.RecordStream;
import org.apache.poi.hssf.record.CFHeaderRecord;
import org.apache.poi.hssf.record.CFRuleRecord;
import org.apache.poi.hssf.record.DVALRecord;
import org.apache.poi.hssf.record.DVRecord;
import org.apache.poi.hssf.record.EOFRecord;
import org.apache.poi.hssf.record.HyperlinkRecord;
import org.apache.poi.hssf.record.MergeCellsRecord;
import org.apache.poi.hssf.record.PaneRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.SelectionRecord;
import org.apache.poi.hssf.record.WindowTwoRecord;
/**
* Manages the DVALRecord and DVRecords for a single sheet<br/>
@ -40,7 +32,6 @@ import org.apache.poi.hssf.record.WindowTwoRecord;
*/
public final class DataValidityTable extends RecordAggregate {
private static final short sid = -0x01B2; // not a real record
private final DVALRecord _headerRec;
/**
* The list of data validations for the current sheet.
@ -57,118 +48,19 @@ public final class DataValidityTable extends RecordAggregate {
_validationList = temp;
}
private DataValidityTable() {
public DataValidityTable() {
_headerRec = new DVALRecord();
_validationList = new ArrayList();
}
public short getSid() {
return sid;
public void visitContainedRecords(RecordVisitor rv) {
if (_validationList.isEmpty()) {
return;
}
public int serialize(int offset, byte[] data) {
int result = _headerRec.serialize(offset, data);
rv.visitRecord(_headerRec);
for (int i = 0; i < _validationList.size(); i++) {
result += ((Record) _validationList.get(i)).serialize(offset + result, data);
rv.visitRecord((Record) _validationList.get(i));
}
return result;
}
public int getRecordSize() {
int result = _headerRec.getRecordSize();
for (int i = _validationList.size() - 1; i >= 0; i--) {
result += ((Record) _validationList.get(i)).getRecordSize();
}
return result;
}
/**
* Creates a new <tt>DataValidityTable</tt> and inserts it in the right
* place in the sheetRecords list.
*/
public static DataValidityTable createForSheet(List sheetRecords) {
int index = findDVTableInsertPos(sheetRecords);
DataValidityTable result = new DataValidityTable();
sheetRecords.add(index, result);
return result;
}
/**
* Finds the index where the sheet validations header record should be inserted
* @param records the records for this sheet
*
* + WINDOW2
* o SCL
* o PANE
* oo SELECTION
* o STANDARDWIDTH
* oo MERGEDCELLS
* o LABELRANGES
* o PHONETICPR
* o Conditional Formatting Table
* o Hyperlink Table
* o Data Validity Table
* o SHEETLAYOUT
* o SHEETPROTECTION
* o RANGEPROTECTION
* + EOF
*/
private static int findDVTableInsertPos(List records) {
int i = records.size() - 1;
if (!(records.get(i) instanceof EOFRecord)) {
throw new IllegalStateException("Last sheet record should be EOFRecord");
}
while (i > 0) {
i--;
Record rec = (Record) records.get(i);
if (isPriorRecord(rec.getSid())) {
Record nextRec = (Record) records.get(i + 1);
if (!isSubsequentRecord(nextRec.getSid())) {
throw new IllegalStateException("Unexpected (" + nextRec.getClass().getName()
+ ") found after (" + rec.getClass().getName() + ")");
}
return i;
}
if (!isSubsequentRecord(rec.getSid())) {
throw new IllegalStateException("Unexpected (" + rec.getClass().getName()
+ ") while looking for DV Table insert pos");
}
}
return 0;
}
// TODO - add UninterpretedRecord as base class for many of these
// unimplemented sids
private static boolean isPriorRecord(short sid) {
switch(sid) {
case WindowTwoRecord.sid:
case 0x00A0: // SCL
case PaneRecord.sid:
case SelectionRecord.sid:
case 0x0099: // STANDARDWIDTH
case MergeCellsRecord.sid:
case 0x015F: // LABELRANGES
case 0x00EF: // PHONETICPR
case CFHeaderRecord.sid:
case CFRuleRecord.sid:
case HyperlinkRecord.sid:
case 0x0800: // QUICKTIP
return true;
}
return false;
}
private static boolean isSubsequentRecord(short sid) {
switch(sid) {
case 0x0862: // SHEETLAYOUT
case 0x0867: // SHEETPROTECTION
case 0x0868: // RANGEPROTECTION
case EOFRecord.sid:
return true;
}
return false;
}
public void addDataValidation(DVRecord dvRecord) {

View File

@ -0,0 +1,122 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.aggregates;
import java.util.ArrayList;
import java.util.List;
import org.apache.poi.hssf.model.RecordStream;
import org.apache.poi.hssf.record.MergeCellsRecord;
import org.apache.poi.hssf.util.CellRangeAddress;
import org.apache.poi.hssf.util.CellRangeAddressList;
/**
*
* @author Josh Micich
*/
public final class MergedCellsTable extends RecordAggregate {
private static int MAX_MERGED_REGIONS = 1027; // enforced by the 8224 byte limit
private final List _mergedRegions;
/**
* Creates an empty aggregate
*/
public MergedCellsTable() {
_mergedRegions = new ArrayList();
}
public MergedCellsTable(RecordStream rs) {
List temp = new ArrayList();
while (rs.peekNextClass() == MergeCellsRecord.class) {
MergeCellsRecord mcr = (MergeCellsRecord) rs.getNext();
int nRegions = mcr.getNumAreas();
for (int i = 0; i < nRegions; i++) {
temp.add(mcr.getAreaAt(i));
}
}
_mergedRegions = temp;
}
public int getRecordSize() {
// a bit cheaper than the default impl
int nRegions = _mergedRegions.size();
if (nRegions < 1) {
// no need to write a single empty MergeCellsRecord
return 0;
}
int nMergedCellsRecords = nRegions / MAX_MERGED_REGIONS;
int nLeftoverMergedRegions = nRegions % MAX_MERGED_REGIONS;
int result = nMergedCellsRecords
* (4 + CellRangeAddressList.getEncodedSize(MAX_MERGED_REGIONS)) + 4
+ CellRangeAddressList.getEncodedSize(nLeftoverMergedRegions);
return result;
}
public void visitContainedRecords(RecordVisitor rv) {
int nRegions = _mergedRegions.size();
if (nRegions < 1) {
// no need to write a single empty MergeCellsRecord
return;
}
int nFullMergedCellsRecords = nRegions / MAX_MERGED_REGIONS;
int nLeftoverMergedRegions = nRegions % MAX_MERGED_REGIONS;
CellRangeAddress[] cras = new CellRangeAddress[nRegions];
_mergedRegions.toArray(cras);
for (int i = 0; i < nFullMergedCellsRecords; i++) {
int startIx = i * MAX_MERGED_REGIONS;
rv.visitRecord(new MergeCellsRecord(cras, startIx, MAX_MERGED_REGIONS));
}
if (nLeftoverMergedRegions > 0) {
int startIx = nFullMergedCellsRecords * MAX_MERGED_REGIONS;
rv.visitRecord(new MergeCellsRecord(cras, startIx, nLeftoverMergedRegions));
}
}
public void add(MergeCellsRecord mcr) {
_mergedRegions.add(mcr);
}
public CellRangeAddress get(int index) {
checkIndex(index);
return (CellRangeAddress) _mergedRegions.get(index);
}
public void remove(int index) {
checkIndex(index);
_mergedRegions.remove(index);
}
private void checkIndex(int index) {
if (index < 0 || index >= _mergedRegions.size()) {
throw new IllegalArgumentException("Specified CF index " + index
+ " is outside the allowable range (0.." + (_mergedRegions.size() - 1) + ")");
}
}
public void addArea(int rowFrom, int colFrom, int rowTo, int colTo) {
_mergedRegions.add(new CellRangeAddress(rowFrom, rowTo, colFrom, colTo));
}
public int getNumberOfMergedRegions() {
return _mergedRegions.size();
}
}

View File

@ -18,6 +18,7 @@
package org.apache.poi.hssf.record.aggregates;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordBase;
import org.apache.poi.hssf.record.RecordInputStream;
/**
@ -27,15 +28,66 @@ import org.apache.poi.hssf.record.RecordInputStream;
*
* @author Josh Micich
*/
public abstract class RecordAggregate extends Record {
// TODO - convert existing aggregate classes to proper subclasses of this one
public abstract class RecordAggregate extends RecordBase {
// TODO - delete these methods when all subclasses have been converted
protected final void validateSid(short id) {
// TODO - break class hierarchy and make separate from Record
throw new RuntimeException("Should not be called");
}
protected final void fillFields(RecordInputStream in) {
throw new RuntimeException("Should not be called");
}
// force subclassses to provide better implementation than default
public abstract int getRecordSize();
public final short getSid() {
throw new RuntimeException("Should not be called");
}
public abstract void visitContainedRecords(RecordVisitor rv);
public final int serialize(int offset, byte[] data) {
SerializingRecordVisitor srv = new SerializingRecordVisitor(data, offset);
visitContainedRecords(srv);
return srv.countBytesWritten();
}
public int getRecordSize() {
RecordSizingVisitor rsv = new RecordSizingVisitor();
visitContainedRecords(rsv);
return rsv.getTotalSize();
}
public interface RecordVisitor {
void visitRecord(Record r);
}
private static final class SerializingRecordVisitor implements RecordVisitor {
private final byte[] _data;
private final int _startOffset;
private int _countBytesWritten;
public SerializingRecordVisitor(byte[] data, int startOffset) {
_data = data;
_startOffset = startOffset;
_countBytesWritten = 0;
}
public int countBytesWritten() {
return _countBytesWritten;
}
public void visitRecord(Record r) {
int currentOffset = _startOffset + _countBytesWritten;
_countBytesWritten += r.serialize(currentOffset, _data);
}
}
private static final class RecordSizingVisitor implements RecordVisitor {
private int _totalSize;
public RecordSizingVisitor() {
_totalSize = 0;
}
public int getTotalSize() {
return _totalSize;
}
public void visitRecord(Record r) {
_totalSize += r.getRecordSize();
}
}
}

View File

@ -48,6 +48,7 @@ import org.apache.poi.hssf.record.NoteRecord;
import org.apache.poi.hssf.record.NumberRecord;
import org.apache.poi.hssf.record.ObjRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordBase;
import org.apache.poi.hssf.record.StringRecord;
import org.apache.poi.hssf.record.SubRecord;
import org.apache.poi.hssf.record.TextObjectRecord;
@ -1144,7 +1145,7 @@ public class HSSFCell
HSSFComment comment = null;
HashMap txshapes = new HashMap(); //map shapeId and TextObjectRecord
for (Iterator it = sheet.getRecords().iterator(); it.hasNext(); ) {
Record rec = ( Record ) it.next();
RecordBase rec = (RecordBase) it.next();
if (rec instanceof NoteRecord){
NoteRecord note = (NoteRecord)rec;
if (note.getRow() == row && note.getColumn() == column){
@ -1186,7 +1187,7 @@ public class HSSFCell
*/
public HSSFHyperlink getHyperlink(){
for (Iterator it = sheet.getRecords().iterator(); it.hasNext(); ) {
Record rec = ( Record ) it.next();
RecordBase rec = (RecordBase) it.next();
if (rec instanceof HyperlinkRecord){
HyperlinkRecord link = (HyperlinkRecord)rec;
if(link.getFirstColumn() == record.getColumn() && link.getFirstRow() == record.getRow()){

View File

@ -1446,43 +1446,19 @@ public final class HSSFSheet {
}
/**
* Retrieves all the horizontal page breaks
* @return all the horizontal page breaks, or null if there are no row page breaks
* @return row indexes of all the horizontal page breaks, never <code>null</code>
*/
public int[] getRowBreaks(){
//we can probably cache this information, but this should be a sparsely used function
int count = sheet.getNumRowBreaks();
if (count > 0) {
int[] returnValue = new int[count];
Iterator iterator = sheet.getRowBreaks();
int i = 0;
while (iterator.hasNext()) {
PageBreakRecord.Break breakItem = (PageBreakRecord.Break)iterator.next();
returnValue[i++] = breakItem.main;
}
return returnValue;
}
return null;
return sheet.getRowBreaks();
}
/**
* Retrieves all the vertical page breaks
* @return all the vertical page breaks, or null if there are no column page breaks
* @return column indexes of all the vertical page breaks, never <code>null</code>
*/
public short[] getColumnBreaks(){
public int[] getColumnBreaks(){
//we can probably cache this information, but this should be a sparsely used function
int count = sheet.getNumColumnBreaks();
if (count > 0) {
short[] returnValue = new short[count];
Iterator iterator = sheet.getColumnBreaks();
int i = 0;
while (iterator.hasNext()) {
PageBreakRecord.Break breakItem = (PageBreakRecord.Break)iterator.next();
returnValue[i++] = breakItem.main;
}
return returnValue;
}
return null;
return sheet.getColumnBreaks();
}

View File

@ -125,7 +125,14 @@ public final class CellRangeAddressList {
}
public int getSize() {
return 2 + CellRangeAddress.getEncodedSize(_list.size());
return getEncodedSize(_list.size());
}
/**
* @return the total size of for the specified number of ranges,
* including the initial 2 byte range count
*/
public static int getEncodedSize(int numberOfRanges) {
return 2 + CellRangeAddress.getEncodedSize(numberOfRanges);
}
public CellRangeAddressList copy() {
CellRangeAddressList result = new CellRangeAddressList();

View File

@ -17,20 +17,31 @@
package org.apache.poi.hssf.model;
import java.io.ByteArrayInputStream;
import java.util.ArrayList;
import java.util.List;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import org.apache.poi.hssf.eventmodel.ERFListener;
import org.apache.poi.hssf.eventmodel.EventRecordFactory;
import org.apache.poi.hssf.record.*;
import org.apache.poi.hssf.record.BOFRecord;
import org.apache.poi.hssf.record.BlankRecord;
import org.apache.poi.hssf.record.CellValueRecordInterface;
import org.apache.poi.hssf.record.ColumnInfoRecord;
import org.apache.poi.hssf.record.DimensionsRecord;
import org.apache.poi.hssf.record.EOFRecord;
import org.apache.poi.hssf.record.IndexRecord;
import org.apache.poi.hssf.record.MergeCellsRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RowRecord;
import org.apache.poi.hssf.record.StringRecord;
import org.apache.poi.hssf.record.UncalcedRecord;
import org.apache.poi.hssf.record.aggregates.ColumnInfoRecordsAggregate;
import org.apache.poi.hssf.record.aggregates.RowRecordsAggregate;
import org.apache.poi.hssf.record.aggregates.ValueRecordsAggregate;
import java.io.ByteArrayInputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.poi.hssf.util.CellRangeAddress;
/**
* Unit test for the Sheet class.
@ -55,10 +66,24 @@ public final class TestSheet extends TestCase {
assertTrue( sheet.records.get(pos++) instanceof EOFRecord );
}
private static final class MergedCellListener implements ERFListener {
private int _count;
public MergedCellListener() {
_count = 0;
}
public boolean processRecord(Record rec) {
_count++;
return true;
}
public int getCount() {
return _count;
}
}
public void testAddMergedRegion() {
Sheet sheet = Sheet.createSheet();
int regionsToAdd = 4096;
int startRecords = sheet.getRecords().size();
//simple test that adds a load of regions
for (int n = 0; n < regionsToAdd; n++)
@ -71,11 +96,18 @@ public final class TestSheet extends TestCase {
assertTrue(sheet.getNumMergedRegions() == regionsToAdd);
//test that the regions were spread out over the appropriate number of records
int recordsAdded = sheet.getRecords().size() - startRecords;
byte[] sheetData = new byte[sheet.getSize()];
sheet.serialize(0, sheetData);
MergedCellListener mcListener = new MergedCellListener();
EventRecordFactory erf = new EventRecordFactory(mcListener, new short[] { MergeCellsRecord.sid, });
// POIFSFileSystem poifs = new POIFSFileSystem(new ByteArrayInputStream(sheetData));
erf.processRecords(new ByteArrayInputStream(sheetData));
int recordsAdded = mcListener.getCount();
int recordsExpected = regionsToAdd/1027;
if ((regionsToAdd % 1027) != 0)
recordsExpected++;
assertTrue("The " + regionsToAdd + " merged regions should have been spread out over " + recordsExpected + " records, not " + recordsAdded, recordsAdded == recordsExpected);
assertTrue("The " + regionsToAdd + " merged regions should have been spread out over "
+ recordsExpected + " records, not " + recordsAdded, recordsAdded == recordsExpected);
// Check we can't add one with invalid date
try {
sheet.addMergedRegion(10, (short)10, 9, (short)12);
@ -97,22 +129,23 @@ public final class TestSheet extends TestCase {
Sheet sheet = Sheet.createSheet();
int regionsToAdd = 4096;
for (int n = 0; n < regionsToAdd; n++)
sheet.addMergedRegion(0, (short) 0, 1, (short) 1);
for (int n = 0; n < regionsToAdd; n++) {
sheet.addMergedRegion(n, 0, n, 1);
}
int records = sheet.getRecords().size();
int nSheetRecords = sheet.getRecords().size();
//remove a third from the beginning
for (int n = 0; n < regionsToAdd/3; n++)
{
sheet.removeMergedRegion(0);
//assert they have been deleted
assertTrue("Num of regions should be " + (regionsToAdd - n - 1) + " not " + sheet.getNumMergedRegions(), sheet.getNumMergedRegions() == regionsToAdd - n - 1);
assertEquals("Num of regions", regionsToAdd - n - 1, sheet.getNumMergedRegions());
}
//assert any record removing was done
int recordsRemoved = (regionsToAdd/3)/1027; //doesn't work for particular values of regionsToAdd
assertTrue("Expected " + recordsRemoved + " record to be removed from the starting " + records + ". Currently there are " + sheet.getRecords().size() + " records", records - sheet.getRecords().size() == recordsRemoved);
// merge records are removed from within the MergedCellsTable,
// so the sheet record count should not change
assertEquals("Sheet Records", nSheetRecords, sheet.getRecords().size());
}
/**
@ -125,8 +158,11 @@ public final class TestSheet extends TestCase {
public void testMovingMergedRegion() {
List records = new ArrayList();
MergeCellsRecord merged = new MergeCellsRecord();
merged.addArea(0, (short)0, 1, (short)2);
CellRangeAddress[] cras = {
new CellRangeAddress(0, 1, 0, 2),
};
MergeCellsRecord merged = new MergeCellsRecord(cras, 0, cras.length);
records.add(new DimensionsRecord());
records.add(new RowRecord(0));
records.add(new RowRecord(1));
records.add(new RowRecord(2));
@ -155,6 +191,7 @@ public final class TestSheet extends TestCase {
public void testRowAggregation() {
List records = new ArrayList();
records.add(new DimensionsRecord());
records.add(new RowRecord(0));
records.add(new RowRecord(1));
records.add(new StringRecord());
@ -196,10 +233,9 @@ public final class TestSheet extends TestCase {
boolean is0 = false;
boolean is11 = false;
Iterator iterator = sheet.getRowBreaks();
while (iterator.hasNext()) {
PageBreakRecord.Break breakItem = (PageBreakRecord.Break)iterator.next();
int main = breakItem.main;
int[] rowBreaks = sheet.getRowBreaks();
for (int i = 0; i < rowBreaks.length; i++) {
int main = rowBreaks[i];
if (main != 0 && main != 10 && main != 11) fail("Invalid page break");
if (main == 0) is0 = true;
if (main == 10) is10= true;
@ -253,10 +289,9 @@ public final class TestSheet extends TestCase {
boolean is1 = false;
boolean is15 = false;
Iterator iterator = sheet.getColumnBreaks();
while (iterator.hasNext()) {
PageBreakRecord.Break breakItem = (PageBreakRecord.Break)iterator.next();
int main = breakItem.main;
int[] colBreaks = sheet.getColumnBreaks();
for (int i = 0; i < colBreaks.length; i++) {
int main = colBreaks[i];
if (main != 0 && main != 1 && main != 10 && main != 15) fail("Invalid page break");
if (main == 0) is0 = true;
if (main == 1) is1 = true;
@ -297,9 +332,8 @@ public final class TestSheet extends TestCase {
xfindex = sheet.getXFIndexForColAt((short) 1);
assertEquals(DEFAULT_IDX, xfindex);
// TODO change return type to ColumnInfoRecord
ColumnInfoRecord nci = (ColumnInfoRecord)ColumnInfoRecordsAggregate.createColInfo();
sheet.columns.insertColumn(nci);
ColumnInfoRecord nci = ColumnInfoRecordsAggregate.createColInfo();
sheet._columnInfos.insertColumn(nci);
// single column ColumnInfoRecord
nci.setFirstColumn((short) 2);
@ -361,6 +395,7 @@ public final class TestSheet extends TestCase {
List records = new ArrayList();
records.add(new BOFRecord());
records.add(new UncalcedRecord());
records.add(new DimensionsRecord());
records.add(new EOFRecord());
Sheet sheet = Sheet.createSheet(records, 0, 0);
@ -369,7 +404,7 @@ public final class TestSheet extends TestCase {
if (serializedSize != estimatedSize) {
throw new AssertionFailedError("Identified bug 45066 b");
}
assertEquals(50, serializedSize);
assertEquals(68, serializedSize);
}
/**
@ -393,7 +428,7 @@ public final class TestSheet extends TestCase {
int dbCellRecordPos = getDbCellRecordPos(sheet);
if (dbCellRecordPos == 264) {
if (dbCellRecordPos == 252) {
// The overt symptom of the bug
// DBCELL record pos is calculated wrong if VRA comes before RRA
throw new AssertionFailedError("Identified bug 45145");
@ -405,7 +440,7 @@ public final class TestSheet extends TestCase {
assertEquals(RowRecordsAggregate.class, recs.get(rraIx).getClass());
assertEquals(ValueRecordsAggregate.class, recs.get(rraIx+1).getClass());
assertEquals(254, dbCellRecordPos);
assertEquals(242, dbCellRecordPos);
}
/**

View File

@ -29,8 +29,7 @@ public final class TestSheetAdditional extends TestCase {
public void testGetCellWidth() {
Sheet sheet = Sheet.createSheet();
// TODO change return type to ColumnInfoRecord
ColumnInfoRecord nci = (ColumnInfoRecord)ColumnInfoRecordsAggregate.createColInfo();
ColumnInfoRecord nci = ColumnInfoRecordsAggregate.createColInfo();
// Prepare test model
nci.setFirstColumn((short)5);
@ -38,7 +37,7 @@ public final class TestSheetAdditional extends TestCase {
nci.setColumnWidth((short)100);
sheet.columns.insertColumn(nci);
sheet._columnInfos.insertColumn(nci);
assertEquals((short)100,sheet.getColumnWidth((short)5));
assertEquals((short)100,sheet.getColumnWidth((short)6));
@ -58,6 +57,3 @@ public final class TestSheetAdditional extends TestCase {
}
}

View File

@ -33,8 +33,8 @@ public final class TestMergeCellsRecord extends TestCase {
* @throws Exception
*/
public void testCloneReferences() throws Exception {
MergeCellsRecord merge = new MergeCellsRecord();
merge.addArea(0, (short)0, 1, (short)2);
CellRangeAddress[] cras = { new CellRangeAddress(0, 1, 0, 2), };
MergeCellsRecord merge = new MergeCellsRecord(cras, 0, cras.length);
MergeCellsRecord clone = (MergeCellsRecord)merge.clone();
assertNotSame("Merged and cloned objects are the same", merge, clone);
@ -47,7 +47,6 @@ public final class TestMergeCellsRecord extends TestCase {
assertEquals("New Clone Col From doesnt match", mergeRegion.getFirstColumn(), cloneRegion.getFirstColumn());
assertEquals("New Clone Col To doesnt match", mergeRegion.getLastColumn(), cloneRegion.getLastColumn());
merge.removeAreaAt(0);
assertNotNull("Clone's item not removed", clone.getAreaAt(0));
assertFalse(merge.getAreaAt(0) == clone.getAreaAt(0));
}
}

View File

@ -24,6 +24,7 @@ import java.util.List;
import junit.framework.TestCase;
import org.apache.poi.hssf.model.RecordStream;
import org.apache.poi.hssf.record.CFHeaderRecord;
import org.apache.poi.hssf.record.CFRuleRecord;
import org.apache.poi.hssf.record.RecordFactory;
@ -59,7 +60,7 @@ public final class TestCFRecordsAggregate extends TestCase
recs.add(rule2);
recs.add(rule3);
CFRecordsAggregate record;
record = CFRecordsAggregate.createCFAggregate(recs, 0);
record = CFRecordsAggregate.createCFAggregate(new RecordStream(recs, 0));
// Serialize
byte [] serializedRecord = record.serialize();
@ -81,7 +82,7 @@ public final class TestCFRecordsAggregate extends TestCase
assertEquals(2, cellRanges.length);
assertEquals(3, header.getNumberOfConditionalFormats());
record = CFRecordsAggregate.createCFAggregate(recs, 0);
record = CFRecordsAggregate.createCFAggregate(new RecordStream(recs, 0));
record = record.cloneCFAggregate();

View File

@ -19,42 +19,39 @@ package org.apache.poi.hssf.record.aggregates;
import junit.framework.TestCase;
import org.apache.poi.hssf.record.ColumnInfoRecord;
import org.apache.poi.hssf.record.RecordBase;
/**
* @author Glen Stampoultzis
*/
public final class TestColumnInfoRecordsAggregate extends TestCase
{
ColumnInfoRecordsAggregate columnInfoRecordsAggregate;
public final class TestColumnInfoRecordsAggregate extends TestCase {
public void testGetRecordSize() throws Exception
{
columnInfoRecordsAggregate = new ColumnInfoRecordsAggregate();
columnInfoRecordsAggregate.insertColumn( createColumn( (short)1, (short)3 ));
columnInfoRecordsAggregate.insertColumn( createColumn( (short)4, (short)7 ));
columnInfoRecordsAggregate.insertColumn( createColumn( (short)8, (short)8 ));
// columnInfoRecordsAggregate.setColumn( (short)2, new Short( (short)200 ), new Integer( 1 ), new Boolean( true ), null);
columnInfoRecordsAggregate.groupColumnRange( (short)2, (short)5, true );
assertEquals(6, columnInfoRecordsAggregate.getNumColumns());
public void testGetRecordSize() {
ColumnInfoRecordsAggregate agg = new ColumnInfoRecordsAggregate();
agg.insertColumn(createColumn(1, 3));
agg.insertColumn(createColumn(4, 7));
agg.insertColumn(createColumn(8, 8));
agg.groupColumnRange((short) 2, (short) 5, true);
assertEquals(6, agg.getNumColumns());
assertEquals(columnInfoRecordsAggregate.getRecordSize(), columnInfoRecordsAggregate.serialize().length);
confirmSerializedSize(agg);
columnInfoRecordsAggregate = new ColumnInfoRecordsAggregate();
columnInfoRecordsAggregate.groupColumnRange( (short)3, (short)6, true );
assertEquals(columnInfoRecordsAggregate.getRecordSize(), serializedSize());
agg = new ColumnInfoRecordsAggregate();
agg.groupColumnRange((short) 3, (short) 6, true);
confirmSerializedSize(agg);
}
private int serializedSize()
{
return columnInfoRecordsAggregate.serialize(0, new byte[columnInfoRecordsAggregate.getRecordSize()]);
private static void confirmSerializedSize(RecordBase cirAgg) {
int estimatedSize = cirAgg.getRecordSize();
byte[] buf = new byte[estimatedSize];
int serializedSize = cirAgg.serialize(0, buf);
assertEquals(estimatedSize, serializedSize);
}
private ColumnInfoRecord createColumn( short firstCol, short lastCol )
{
private static ColumnInfoRecord createColumn(int firstCol, int lastCol) {
ColumnInfoRecord columnInfoRecord = new ColumnInfoRecord();
columnInfoRecord.setFirstColumn(firstCol);
columnInfoRecord.setLastColumn(lastCol);
columnInfoRecord.setFirstColumn((short) firstCol);
columnInfoRecord.setLastColumn((short) lastCol);
return columnInfoRecord;
}
}

View File

@ -659,7 +659,7 @@ public final class TestBugs extends TestCase {
HSSFSheet sheet = wb.getSheetAt( 0 );
int[] breaks = sheet.getRowBreaks();
assertNull(breaks);
assertEquals(0, breaks.length);
//add 3 row breaks
for (int j = 1; j <= 3; j++) {