Partial NPOIFS write implementation, and tidy up of methods around this

git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1053261 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Nick Burch 2010-12-28 05:31:32 +00:00
parent 4d0313fd2e
commit e797edb37b
8 changed files with 136 additions and 174 deletions

View File

@ -19,6 +19,7 @@
package org.apache.poi.poifs.filesystem; package org.apache.poi.poifs.filesystem;
import java.io.ByteArrayOutputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileOutputStream; import java.io.FileOutputStream;
@ -87,8 +88,6 @@ public class NPOIFSFileSystem extends BlockStore
private DataSource _data; private DataSource _data;
private List _documents; // TODO - probably remove this shortly
/** /**
* What big block size the file uses. Most files * What big block size the file uses. Most files
* use 512 bytes, but a few use 4096 * use 512 bytes, but a few use 4096
@ -520,6 +519,26 @@ public class NPOIFSFileSystem extends BlockStore
return getRoot().createDirectory(name); return getRoot().createDirectory(name);
} }
/**
* Write the filesystem out to the open file. Will thrown an
* {@link IllegalArgumentException} if opened from an
* {@link InputStream}.
*
* @exception IOException thrown on errors writing to the stream
*/
public void writeFilesystem() throws IOException
{
if(_data instanceof FileBackedDataSource) {
// Good, correct type
} else {
throw new IllegalArgumentException(
"POIFS opened from an inputstream, so writeFilesystem() may " +
"not be called. Use writeFilesystem(OutputStream) instead"
);
}
syncWithDataSource();
}
/** /**
* Write the filesystem out * Write the filesystem out
* *
@ -532,94 +551,36 @@ public class NPOIFSFileSystem extends BlockStore
public void writeFilesystem(final OutputStream stream) public void writeFilesystem(final OutputStream stream)
throws IOException throws IOException
{ {
// create the small block store, and the SBAT // Have the datasource updated
SmallBlockTableWriter sbtw = syncWithDataSource();
new SmallBlockTableWriter(bigBlockSize, _documents, _property_table.getRoot());
// create the block allocation table // Now copy the contents to the stream
BlockAllocationTableWriter bat = _data.copyTo(stream);
new BlockAllocationTableWriter(bigBlockSize); }
// create a list of BATManaged objects: the documents plus the /**
// property table and the small block table * Has our in-memory objects write their state
List bm_objects = new ArrayList(); * to their backing blocks
*/
private void syncWithDataSource() throws IOException
{
// HeaderBlock
HeaderBlockWriter hbw = new HeaderBlockWriter(_header);
hbw.writeBlock( getBlockAt(0) );
bm_objects.addAll(_documents); // BATs
bm_objects.add(_property_table); for(BATBlock bat : _bat_blocks) {
bm_objects.add(sbtw); ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
bm_objects.add(sbtw.getSBAT()); BlockAllocationTableWriter.writeBlock(bat, block);
}
// walk the list, allocating space for each and assigning each // SBATs
// a starting block number _mini_store.syncWithDataSource();
Iterator iter = bm_objects.iterator();
while (iter.hasNext()) // Properties
{ _property_table.write(
BATManaged bmo = ( BATManaged ) iter.next(); new NPOIFSStream(this, _header.getPropertyStart())
int block_count = bmo.countBlocks(); );
if (block_count != 0)
{
bmo.setStartBlock(bat.allocateSpace(block_count));
}
else
{
// Either the BATManaged object is empty or its data
// is composed of SmallBlocks; in either case,
// allocating space in the BAT is inappropriate
}
}
// allocate space for the block allocation table and take its
// starting block
int batStartBlock = bat.createBlocks();
// get the extended block allocation table blocks
HeaderBlockWriter header_block_writer = new HeaderBlockWriter(bigBlockSize);
BATBlock[] xbat_blocks =
header_block_writer.setBATBlocks(bat.countBlocks(),
batStartBlock);
// set the property table start block
header_block_writer.setPropertyStart(_property_table.getStartBlock());
// set the small block allocation table start block
header_block_writer.setSBATStart(sbtw.getSBAT().getStartBlock());
// set the small block allocation table block count
header_block_writer.setSBATBlockCount(sbtw.getSBATBlockCount());
// the header is now properly initialized. Make a list of
// writers (the header block, followed by the documents, the
// property table, the small block store, the small block
// allocation table, the block allocation table, and the
// extended block allocation table blocks)
List writers = new ArrayList();
writers.add(header_block_writer);
writers.addAll(_documents);
writers.add(sbtw);
writers.add(sbtw.getSBAT());
writers.add(bat);
for (int j = 0; j < xbat_blocks.length; j++)
{
writers.add(xbat_blocks[ j ]);
}
// now, write everything out
iter = writers.iterator();
while (iter.hasNext())
{
BlockWritable writer = ( BlockWritable ) iter.next();
writer.writeBlocks(stream);
}
// Finally have the property table serialise itself
_property_table.write(
new NPOIFSStream(this, _header.getPropertyStart())
);
} }
/** /**
@ -682,29 +643,6 @@ public class NPOIFSFileSystem extends BlockStore
return getRoot().createDocumentInputStream(documentName); return getRoot().createDocumentInputStream(documentName);
} }
/**
* add a new POIFSDocument
*
* @param document the POIFSDocument being added
*/
void addDocument(final POIFSDocument document)
{
_documents.add(document);
_property_table.addProperty(document.getDocumentProperty());
}
/**
* add a new DirectoryProperty
*
* @param directory the DirectoryProperty being added
*/
void addDirectory(final DirectoryProperty directory)
{
_property_table.addProperty(directory);
}
/** /**
* remove an entry * remove an entry
* *
@ -714,62 +652,6 @@ public class NPOIFSFileSystem extends BlockStore
void remove(EntryNode entry) void remove(EntryNode entry)
{ {
_property_table.removeProperty(entry.getProperty()); _property_table.removeProperty(entry.getProperty());
if (entry.isDocumentEntry())
{
_documents.remove((( DocumentNode ) entry).getDocument());
}
}
private void processProperties(final BlockList small_blocks,
final BlockList big_blocks,
final Iterator properties,
final DirectoryNode dir,
final int headerPropertiesStartAt)
throws IOException
{
while (properties.hasNext())
{
Property property = ( Property ) properties.next();
String name = property.getName();
DirectoryNode parent = (dir == null)
? (( DirectoryNode ) getRoot())
: dir;
if (property.isDirectory())
{
DirectoryNode new_dir =
( DirectoryNode ) parent.createDirectory(name);
new_dir.setStorageClsid( property.getStorageClsid() );
processProperties(
small_blocks, big_blocks,
(( DirectoryProperty ) property).getChildren(),
new_dir, headerPropertiesStartAt);
}
else
{
int startBlock = property.getStartBlock();
int size = property.getSize();
POIFSDocument document = null;
if (property.shouldUseSmallBlocks())
{
document =
new POIFSDocument(name,
small_blocks.fetchBlocks(startBlock, headerPropertiesStartAt),
size);
}
else
{
document =
new POIFSDocument(name,
big_blocks.fetchBlocks(startBlock, headerPropertiesStartAt),
size);
}
parent.createDocument(document);
}
}
} }
/* ********** START begin implementation of POIFSViewable ********** */ /* ********** START begin implementation of POIFSViewable ********** */

View File

@ -27,6 +27,7 @@ import java.util.List;
import org.apache.poi.poifs.common.POIFSConstants; import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.property.RootProperty; import org.apache.poi.poifs.property.RootProperty;
import org.apache.poi.poifs.storage.BATBlock; import org.apache.poi.poifs.storage.BATBlock;
import org.apache.poi.poifs.storage.BlockAllocationTableWriter;
import org.apache.poi.poifs.storage.HeaderBlock; import org.apache.poi.poifs.storage.HeaderBlock;
import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex; import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
@ -194,5 +195,14 @@ public class NPOIFSMiniStore extends BlockStore
protected int getBlockStoreBlockSize() { protected int getBlockStoreBlockSize() {
return POIFSConstants.SMALL_BLOCK_SIZE; return POIFSConstants.SMALL_BLOCK_SIZE;
} }
}
/**
* Writes the SBATs to their backing blocks
*/
protected void syncWithDataSource() throws IOException {
for(BATBlock sbat : _sbat_blocks) {
ByteBuffer block = _filesystem.getBlockAt(sbat.getOurBlockIndex());
BlockAllocationTableWriter.writeBlock(sbat, block);
}
}
}

View File

@ -17,6 +17,8 @@
package org.apache.poi.poifs.nio; package org.apache.poi.poifs.nio;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** /**
@ -77,6 +79,10 @@ public class ByteArrayBackedDataSource extends DataSource {
buffer = nb; buffer = nb;
} }
public void copyTo(OutputStream stream) throws IOException {
stream.write(buffer, 0, (int)size);
}
public long size() { public long size() {
return size; return size;
} }

View File

@ -18,6 +18,7 @@
package org.apache.poi.poifs.nio; package org.apache.poi.poifs.nio;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** /**
@ -27,5 +28,8 @@ public abstract class DataSource {
public abstract ByteBuffer read(int length, long position) throws IOException; public abstract ByteBuffer read(int length, long position) throws IOException;
public abstract void write(ByteBuffer src, long position) throws IOException; public abstract void write(ByteBuffer src, long position) throws IOException;
public abstract long size() throws IOException; public abstract long size() throws IOException;
/** Close the underlying stream */
public abstract void close() throws IOException; public abstract void close() throws IOException;
/** Copies the contents to the specified OutputStream */
public abstract void copyTo(OutputStream stream) throws IOException;
} }

View File

@ -20,9 +20,12 @@ package org.apache.poi.poifs.nio;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream;
import java.io.RandomAccessFile; import java.io.RandomAccessFile;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import org.apache.poi.util.IOUtils; import org.apache.poi.util.IOUtils;
@ -68,6 +71,13 @@ public class FileBackedDataSource extends DataSource {
channel.write(src, position); channel.write(src, position);
} }
public void copyTo(OutputStream stream) throws IOException {
// Wrap the OutputSteam as a channel
WritableByteChannel out = Channels.newChannel(stream);
// Now do the transfer
channel.transferTo(0, channel.size(), out);
}
public long size() throws IOException { public long size() throws IOException {
return channel.size(); return channel.size();
} }

View File

@ -354,6 +354,18 @@ public final class BATBlock extends BigBlock {
void writeData(final OutputStream stream) void writeData(final OutputStream stream)
throws IOException throws IOException
{ {
// Save it out
stream.write( serialize() );
}
void writeData(final ByteBuffer block)
throws IOException
{
// Save it out
block.put( serialize() );
}
private byte[] serialize() {
// Create the empty array // Create the empty array
byte[] data = new byte[ bigBlockSize.getBigBlockSize() ]; byte[] data = new byte[ bigBlockSize.getBigBlockSize() ];
@ -364,8 +376,8 @@ public final class BATBlock extends BigBlock {
offset += LittleEndian.INT_SIZE; offset += LittleEndian.INT_SIZE;
} }
// Save it out // Done
stream.write(data); return data;
} }
/* ********** END extension of BigBlock ********** */ /* ********** END extension of BigBlock ********** */

View File

@ -19,6 +19,7 @@ package org.apache.poi.poifs.storage;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.poi.poifs.common.POIFSBigBlockSize; import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants; import org.apache.poi.poifs.common.POIFSConstants;
@ -156,6 +157,15 @@ public final class BlockAllocationTableWriter implements BlockWritable, BATManag
} }
} }
/**
* Write the BAT into its associated block
*/
public static void writeBlock(final BATBlock bat, final ByteBuffer block)
throws IOException
{
bat.writeData(block);
}
/** /**
* Return the number of BigBlock's this instance uses * Return the number of BigBlock's this instance uses
* *

View File

@ -19,8 +19,10 @@
package org.apache.poi.poifs.storage; package org.apache.poi.poifs.storage;
import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.poi.poifs.common.POIFSBigBlockSize; import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants; import org.apache.poi.poifs.common.POIFSConstants;
@ -37,12 +39,20 @@ public class HeaderBlockWriter implements HeaderBlockConstants, BlockWritable
/** /**
* Create a single instance initialized with default values * Create a single instance initialized with default values
*/ */
public HeaderBlockWriter(POIFSBigBlockSize bigBlockSize) public HeaderBlockWriter(POIFSBigBlockSize bigBlockSize)
{ {
_header_block = new HeaderBlock(bigBlockSize); _header_block = new HeaderBlock(bigBlockSize);
} }
/**
* Create a single instance initialized with the specified
* existing values
*/
public HeaderBlockWriter(HeaderBlock headerBlock)
{
_header_block = headerBlock;
}
/** /**
* Set BAT block parameters. Assumes that all BAT blocks are * Set BAT block parameters. Assumes that all BAT blocks are
* contiguous. Will construct XBAT blocks if necessary and return * contiguous. Will construct XBAT blocks if necessary and return
@ -155,13 +165,31 @@ public class HeaderBlockWriter implements HeaderBlockConstants, BlockWritable
* @exception IOException on problems writing to the specified * @exception IOException on problems writing to the specified
* stream * stream
*/ */
public void writeBlocks(final OutputStream stream) public void writeBlocks(final OutputStream stream)
throws IOException throws IOException
{ {
_header_block.writeData(stream); _header_block.writeData(stream);
} }
/**
* Write the block's data to an existing block
*
* @param block the ByteBuffer of the block to which the
* stored data should be written
*
* @exception IOException on problems writing to the block
*/
public void writeBlock(ByteBuffer block)
throws IOException
{
ByteArrayOutputStream baos = new ByteArrayOutputStream(
_header_block.getBigBlockSize().getBigBlockSize()
);
_header_block.writeData(baos);
block.put(baos.toByteArray());
}
/* ********** END extension of BigBlock ********** */ /* ********** END extension of BigBlock ********** */
} // end public class HeaderBlockWriter } // end public class HeaderBlockWriter