mirror of https://github.com/apache/poi.git
#62649 - Remove OPOIFS
Fix HPSF UTF-8 encoding error with Bug52311.doc while refactoring CopyCompare/EntryUtils git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1839201 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
636d9495a5
commit
96d5fab483
|
@ -20,8 +20,6 @@ package org.apache.poi.hpsf.examples;
|
|||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -30,19 +28,21 @@ import java.io.UnsupportedEncodingException;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.poi.hpsf.DocumentSummaryInformation;
|
||||
import org.apache.poi.hpsf.HPSFRuntimeException;
|
||||
import org.apache.poi.hpsf.MarkUnsupportedException;
|
||||
import org.apache.poi.hpsf.NoPropertySetStreamException;
|
||||
import org.apache.poi.hpsf.PropertySet;
|
||||
import org.apache.poi.hpsf.PropertySetFactory;
|
||||
import org.apache.poi.hpsf.SummaryInformation;
|
||||
import org.apache.poi.hpsf.UnexpectedPropertySetTypeException;
|
||||
import org.apache.poi.hpsf.WritingNotSupportedException;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReader;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReaderEvent;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
|
||||
import org.apache.poi.poifs.filesystem.DirectoryEntry;
|
||||
import org.apache.poi.poifs.filesystem.DocumentEntry;
|
||||
import org.apache.poi.poifs.filesystem.DocumentInputStream;
|
||||
import org.apache.poi.poifs.filesystem.Entry;
|
||||
import org.apache.poi.poifs.filesystem.EntryUtils;
|
||||
import org.apache.poi.poifs.filesystem.POIFSDocumentPath;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.util.TempFile;
|
||||
|
@ -66,36 +66,27 @@ import org.apache.poi.util.TempFile;
|
|||
* with the same attributes, and the sections must contain the same properties.
|
||||
* Details like the ordering of the properties do not matter.</p>
|
||||
*/
|
||||
public class CopyCompare {
|
||||
public final class CopyCompare {
|
||||
private CopyCompare() {}
|
||||
|
||||
/**
|
||||
* <p>Runs the example program. The application expects one or two
|
||||
* arguments:</p>
|
||||
* <p>
|
||||
* Runs the example program. The application expects one or two arguments:
|
||||
*
|
||||
* <ol>
|
||||
* <p>
|
||||
* <li><p>The first argument is the disk file name of the POI filesystem to
|
||||
* copy.</p></li>
|
||||
* <p>
|
||||
* <li><p>The second argument is optional. If it is given, it is the name of
|
||||
* <li>The first argument is the disk file name of the POI filesystem to copy.</li>
|
||||
* <li>The second argument is optional. If it is given, it is the name of
|
||||
* a disk file the copy of the POI filesystem will be written to. If it is
|
||||
* not given, the copy will be written to a temporary file which will be
|
||||
* deleted at the end of the program.</p></li>
|
||||
* <p>
|
||||
* deleted at the end of the program.</li>
|
||||
* </ol>
|
||||
*
|
||||
* @param args Command-line arguments.
|
||||
* @throws MarkUnsupportedException if a POI document stream does not
|
||||
* support the mark() operation.
|
||||
* @throws NoPropertySetStreamException if the application tries to
|
||||
* create a property set from a POI document stream that is not a property
|
||||
* set stream.
|
||||
* @throws IOException if any I/O exception occurs.
|
||||
* @throws UnsupportedEncodingException if a character encoding is not
|
||||
* supported.
|
||||
*/
|
||||
public static void main(final String[] args)
|
||||
throws NoPropertySetStreamException, MarkUnsupportedException,
|
||||
UnsupportedEncodingException, IOException {
|
||||
throws UnsupportedEncodingException, IOException {
|
||||
String originalFileName = null;
|
||||
String copyFileName = null;
|
||||
|
||||
|
@ -120,10 +111,9 @@ public class CopyCompare {
|
|||
final CopyFile cf = new CopyFile(copyFileName);
|
||||
r.registerListener(cf);
|
||||
r.setNotifyEmptyDirectories(true);
|
||||
try (FileInputStream fis = new FileInputStream(originalFileName)) {
|
||||
r.read(fis);
|
||||
}
|
||||
|
||||
|
||||
r.read(new File(originalFileName));
|
||||
|
||||
/* Write the new POIFS to disk. */
|
||||
cf.close();
|
||||
|
||||
|
@ -133,124 +123,10 @@ public class CopyCompare {
|
|||
POIFSFileSystem cpfs = new POIFSFileSystem(new File(copyFileName))) {
|
||||
final DirectoryEntry oRoot = opfs.getRoot();
|
||||
final DirectoryEntry cRoot = cpfs.getRoot();
|
||||
final StringBuffer messages = new StringBuffer();
|
||||
if (equal(oRoot, cRoot, messages)) {
|
||||
System.out.println("Equal");
|
||||
} else {
|
||||
System.out.println("Not equal: " + messages);
|
||||
}
|
||||
System.out.println(EntryUtils.areDirectoriesIdentical(oRoot, cRoot) ? "Equal" : "Not equal");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <p>Compares two {@link DirectoryEntry} instances of a POI file system.
|
||||
* The directories must contain the same streams with the same names and
|
||||
* contents.</p>
|
||||
*
|
||||
* @param d1 The first directory.
|
||||
* @param d2 The second directory.
|
||||
* @param msg The method may append human-readable comparison messages to
|
||||
* this string buffer.
|
||||
* @return <code>true</code> if the directories are equal, else
|
||||
* <code>false</code>.
|
||||
* @throws MarkUnsupportedException if a POI document stream does not
|
||||
* support the mark() operation.
|
||||
* @throws NoPropertySetStreamException if the application tries to
|
||||
* create a property set from a POI document stream that is not a property
|
||||
* set stream.
|
||||
* @throws IOException if any I/O exception occurs.
|
||||
*/
|
||||
private static boolean equal(final DirectoryEntry d1,
|
||||
final DirectoryEntry d2,
|
||||
final StringBuffer msg)
|
||||
throws NoPropertySetStreamException, MarkUnsupportedException,
|
||||
UnsupportedEncodingException, IOException {
|
||||
boolean equal = true;
|
||||
/* Iterate over d1 and compare each entry with its counterpart in d2. */
|
||||
for (final Entry e1 : d1) {
|
||||
final String n1 = e1.getName();
|
||||
if (!d2.hasEntry(n1)) {
|
||||
msg.append("Document \"").append(n1).append("\" exists only in the source.\n");
|
||||
equal = false;
|
||||
break;
|
||||
}
|
||||
Entry e2 = d2.getEntry(n1);
|
||||
|
||||
if (e1.isDirectoryEntry() && e2.isDirectoryEntry()) {
|
||||
equal = equal((DirectoryEntry) e1, (DirectoryEntry) e2, msg);
|
||||
} else if (e1.isDocumentEntry() && e2.isDocumentEntry()) {
|
||||
equal = equal((DocumentEntry) e1, (DocumentEntry) e2, msg);
|
||||
} else {
|
||||
msg.append("One of \"").append(e1).append("\" and \"").append(e2).append("\" is a ").append("document while the other one is a directory.\n");
|
||||
equal = false;
|
||||
}
|
||||
}
|
||||
|
||||
/* Iterate over d2 just to make sure that there are no entries in d2
|
||||
* that are not in d1. */
|
||||
for (final Entry e2 : d2) {
|
||||
final String n2 = e2.getName();
|
||||
Entry e1 = null;
|
||||
try {
|
||||
e1 = d1.getEntry(n2);
|
||||
} catch (FileNotFoundException ex) {
|
||||
msg.append("Document \"").append(e2).append("\" exitsts, document \"").append(e1).append("\" does not.\n");
|
||||
equal = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return equal;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <p>Compares two {@link DocumentEntry} instances of a POI file system.
|
||||
* Documents that are not property set streams must be bitwise identical.
|
||||
* Property set streams must be logically equal.</p>
|
||||
*
|
||||
* @param d1 The first document.
|
||||
* @param d2 The second document.
|
||||
* @param msg The method may append human-readable comparison messages to
|
||||
* this string buffer.
|
||||
* @return <code>true</code> if the documents are equal, else
|
||||
* <code>false</code>.
|
||||
* @throws MarkUnsupportedException if a POI document stream does not
|
||||
* support the mark() operation.
|
||||
* @throws NoPropertySetStreamException if the application tries to
|
||||
* create a property set from a POI document stream that is not a property
|
||||
* set stream.
|
||||
* @throws IOException if any I/O exception occurs.
|
||||
*/
|
||||
private static boolean equal(final DocumentEntry d1, final DocumentEntry d2,
|
||||
final StringBuffer msg)
|
||||
throws NoPropertySetStreamException, MarkUnsupportedException,
|
||||
UnsupportedEncodingException, IOException {
|
||||
try (DocumentInputStream dis1 = new DocumentInputStream(d1); DocumentInputStream dis2 = new DocumentInputStream(d2)) {
|
||||
if (PropertySet.isPropertySetStream(dis1) &&
|
||||
PropertySet.isPropertySetStream(dis2)) {
|
||||
final PropertySet ps1 = PropertySetFactory.create(dis1);
|
||||
final PropertySet ps2 = PropertySetFactory.create(dis2);
|
||||
if (!ps1.equals(ps2)) {
|
||||
msg.append("Property sets are not equal.\n");
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
int i1, i2;
|
||||
do {
|
||||
i1 = dis1.read();
|
||||
i2 = dis2.read();
|
||||
if (i1 != i2) {
|
||||
msg.append("Documents are not equal.\n");
|
||||
return false;
|
||||
}
|
||||
} while (i1 > -1);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <p>This class does all the work. Its method {@link
|
||||
* #processPOIFSReaderEvent(POIFSReaderEvent)} is called for each file in
|
||||
|
@ -274,7 +150,7 @@ public class CopyCompare {
|
|||
* @param dstName The name of the disk file the destination POIFS is to
|
||||
* be written to.
|
||||
*/
|
||||
public CopyFile(final String dstName) {
|
||||
CopyFile(final String dstName) {
|
||||
this.dstName = dstName;
|
||||
poiFs = new POIFSFileSystem();
|
||||
}
|
||||
|
@ -332,7 +208,7 @@ public class CopyCompare {
|
|||
|
||||
|
||||
/**
|
||||
* <p>Writes a {@link PropertySet} to a POI filesystem.</p>
|
||||
* Writes a {@link PropertySet} to a POI filesystem.
|
||||
*
|
||||
* @param poiFs The POI filesystem to write to.
|
||||
* @param path The file's path in the POI filesystem.
|
||||
|
@ -345,14 +221,25 @@ public class CopyCompare {
|
|||
final PropertySet ps)
|
||||
throws WritingNotSupportedException, IOException {
|
||||
final DirectoryEntry de = getPath(poiFs, path);
|
||||
final PropertySet mps = new PropertySet(ps);
|
||||
final PropertySet mps;
|
||||
try {
|
||||
if (ps instanceof DocumentSummaryInformation) {
|
||||
mps = new DocumentSummaryInformation(ps);
|
||||
} else if (ps instanceof SummaryInformation) {
|
||||
mps = new SummaryInformation(ps);
|
||||
} else {
|
||||
mps = new PropertySet(ps);
|
||||
}
|
||||
} catch (UnexpectedPropertySetTypeException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
de.createDocument(name, mps.toInputStream());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <p>Copies the bytes from a {@link DocumentInputStream} to a new
|
||||
* stream in a POI filesystem.</p>
|
||||
* Copies the bytes from a {@link DocumentInputStream} to a new
|
||||
* stream in a POI filesystem.
|
||||
*
|
||||
* @param poiFs The POI filesystem to write to.
|
||||
* @param path The source document's path.
|
||||
|
@ -385,9 +272,9 @@ public class CopyCompare {
|
|||
|
||||
|
||||
/**
|
||||
* <p>Writes the POI file system to a disk file.</p>
|
||||
* Writes the POI file system to a disk file.
|
||||
*/
|
||||
public void close() throws FileNotFoundException, IOException {
|
||||
public void close() throws IOException {
|
||||
out = new FileOutputStream(dstName);
|
||||
poiFs.writeFilesystem(out);
|
||||
out.close();
|
||||
|
@ -456,11 +343,7 @@ public class CopyCompare {
|
|||
/* This exception will be thrown if the directory already
|
||||
* exists. However, since we have full control about directory
|
||||
* creation we can ensure that this will never happen. */
|
||||
ex.printStackTrace(System.err);
|
||||
throw new RuntimeException(ex.toString());
|
||||
/* FIXME (2): Replace the previous line by the following once we
|
||||
* no longer need JDK 1.3 compatibility. */
|
||||
// throw new RuntimeException(ex);
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
package org.apache.poi.hpsf.examples;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -37,8 +37,9 @@ import org.apache.poi.util.HexDump;
|
|||
*
|
||||
* <p>Explanations can be found in the HPSF HOW-TO.</p>
|
||||
*/
|
||||
public class ReadCustomPropertySets
|
||||
{
|
||||
public final class ReadCustomPropertySets {
|
||||
|
||||
private ReadCustomPropertySets() {}
|
||||
|
||||
/**
|
||||
* <p>Runs the example program.</p>
|
||||
|
@ -54,7 +55,7 @@ public class ReadCustomPropertySets
|
|||
|
||||
/* Register a listener for *all* documents. */
|
||||
r.registerListener(new MyPOIFSReaderListener());
|
||||
r.read(new FileInputStream(filename));
|
||||
r.read(new File(filename));
|
||||
}
|
||||
|
||||
|
||||
|
@ -117,12 +118,12 @@ public class ReadCustomPropertySets
|
|||
}
|
||||
}
|
||||
|
||||
static void out(final String msg)
|
||||
private static void out(final String msg)
|
||||
{
|
||||
System.out.println(msg);
|
||||
}
|
||||
|
||||
static String hex(final byte[] bytes)
|
||||
private static String hex(final byte[] bytes)
|
||||
{
|
||||
return HexDump.dump(bytes, 0L, 0);
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
package org.apache.poi.hpsf.examples;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.hpsf.PropertySetFactory;
|
||||
|
@ -33,8 +33,10 @@ import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
|
|||
*
|
||||
* <p>Explanations can be found in the HPSF HOW-TO.</p>
|
||||
*/
|
||||
public class ReadTitle
|
||||
public final class ReadTitle
|
||||
{
|
||||
private ReadTitle() {}
|
||||
|
||||
/**
|
||||
* <p>Runs the example program.</p>
|
||||
*
|
||||
|
@ -47,7 +49,7 @@ public class ReadTitle
|
|||
final String filename = args[0];
|
||||
POIFSReader r = new POIFSReader();
|
||||
r.registerListener(new MyPOIFSReaderListener(), SummaryInformation.DEFAULT_STREAM_NAME);
|
||||
r.read(new FileInputStream(filename));
|
||||
r.read(new File(filename));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -19,8 +19,7 @@ package org.apache.poi.hpsf.examples;
|
|||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -28,7 +27,15 @@ import java.io.OutputStream;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.poi.hpsf.*;
|
||||
import org.apache.poi.hpsf.HPSFRuntimeException;
|
||||
import org.apache.poi.hpsf.MarkUnsupportedException;
|
||||
import org.apache.poi.hpsf.NoPropertySetStreamException;
|
||||
import org.apache.poi.hpsf.PropertySet;
|
||||
import org.apache.poi.hpsf.PropertySetFactory;
|
||||
import org.apache.poi.hpsf.Section;
|
||||
import org.apache.poi.hpsf.SummaryInformation;
|
||||
import org.apache.poi.hpsf.Variant;
|
||||
import org.apache.poi.hpsf.WritingNotSupportedException;
|
||||
import org.apache.poi.hpsf.wellknown.PropertyIDMap;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReader;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReaderEvent;
|
||||
|
@ -72,8 +79,9 @@ import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
|||
*
|
||||
* <p>Further explanations can be found in the HPSF HOW-TO.</p>
|
||||
*/
|
||||
public class WriteAuthorAndTitle
|
||||
{
|
||||
public final class WriteAuthorAndTitle {
|
||||
private WriteAuthorAndTitle() {}
|
||||
|
||||
/**
|
||||
* <p>Runs the example program.</p>
|
||||
*
|
||||
|
@ -101,10 +109,8 @@ public class WriteAuthorAndTitle
|
|||
final POIFSReader r = new POIFSReader();
|
||||
final ModifySICopyTheRest msrl = new ModifySICopyTheRest(dstName);
|
||||
r.registerListener(msrl);
|
||||
FileInputStream fis = new FileInputStream(srcName);
|
||||
r.read(fis);
|
||||
fis.close();
|
||||
|
||||
r.read(new File(srcName));
|
||||
|
||||
/* Write the new POIFS to disk. */
|
||||
msrl.close();
|
||||
}
|
||||
|
@ -127,14 +133,14 @@ public class WriteAuthorAndTitle
|
|||
|
||||
|
||||
/**
|
||||
* <p>The constructor of a {@link ModifySICopyTheRest} instance creates
|
||||
* The constructor of a {@link ModifySICopyTheRest} instance creates
|
||||
* the target POIFS. It also stores the name of the file the POIFS will
|
||||
* be written to once it is complete.</p>
|
||||
* be written to once it is complete.
|
||||
*
|
||||
* @param dstName The name of the disk file the destination POIFS is to
|
||||
* be written to.
|
||||
*/
|
||||
public ModifySICopyTheRest(final String dstName)
|
||||
ModifySICopyTheRest(final String dstName)
|
||||
{
|
||||
this.dstName = dstName;
|
||||
poiFs = new POIFSFileSystem();
|
||||
|
@ -142,8 +148,8 @@ public class WriteAuthorAndTitle
|
|||
|
||||
|
||||
/**
|
||||
* <p>The method is called by POI's eventing API for each file in the
|
||||
* origin POIFS.</p>
|
||||
* The method is called by POI's eventing API for each file in the
|
||||
* origin POIFS.
|
||||
*/
|
||||
@Override
|
||||
public void processPOIFSReaderEvent(final POIFSReaderEvent event)
|
||||
|
@ -213,7 +219,7 @@ public class WriteAuthorAndTitle
|
|||
* @param si The property set. It should be a summary information
|
||||
* property set.
|
||||
*/
|
||||
public void editSI(final POIFSFileSystem poiFs,
|
||||
void editSI(final POIFSFileSystem poiFs,
|
||||
final POIFSDocumentPath path,
|
||||
final String name,
|
||||
final PropertySet si)
|
||||
|
@ -297,9 +303,9 @@ public class WriteAuthorAndTitle
|
|||
|
||||
|
||||
/**
|
||||
* <p>Writes the POI file system to a disk file.</p>
|
||||
* Writes the POI file system to a disk file.
|
||||
*/
|
||||
public void close() throws FileNotFoundException, IOException
|
||||
public void close() throws IOException
|
||||
{
|
||||
out = new FileOutputStream(dstName);
|
||||
poiFs.writeFilesystem(out);
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.poi.poifs.poibrowser;
|
|||
|
||||
import java.awt.event.WindowAdapter;
|
||||
import java.awt.event.WindowEvent;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.swing.JFrame;
|
||||
|
@ -42,12 +42,6 @@ import org.apache.poi.poifs.eventfilesystem.POIFSReader;
|
|||
public class POIBrowser extends JFrame
|
||||
{
|
||||
|
||||
/**
|
||||
* <p>The tree's root node must be visible to all methods.</p>
|
||||
*/
|
||||
protected MutableTreeNode rootNode;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* <p>Takes a bunch of file names as command line parameters,
|
||||
|
@ -59,8 +53,6 @@ public class POIBrowser extends JFrame
|
|||
new POIBrowser().run(args);
|
||||
}
|
||||
|
||||
|
||||
|
||||
protected void run(String[] args)
|
||||
{
|
||||
addWindowListener(new WindowAdapter()
|
||||
|
@ -74,8 +66,11 @@ public class POIBrowser extends JFrame
|
|||
|
||||
/* Create the tree model with a root node. The latter is
|
||||
* invisible but it must be present because a tree model
|
||||
* always needs a root. */
|
||||
rootNode = new DefaultMutableTreeNode("POI Filesystems");
|
||||
* always needs a root.
|
||||
*
|
||||
* The tree's root node must be visible to all methods.
|
||||
*/
|
||||
MutableTreeNode rootNode = new DefaultMutableTreeNode("POI Filesystems");
|
||||
DefaultTreeModel treeModel = new DefaultTreeModel(rootNode);
|
||||
|
||||
/* Create the tree UI element. */
|
||||
|
@ -85,10 +80,10 @@ public class POIBrowser extends JFrame
|
|||
/* Add the POI filesystems to the tree. */
|
||||
int displayedFiles = 0;
|
||||
for (final String filename : args) {
|
||||
try (FileInputStream fis = new FileInputStream(filename)) {
|
||||
try {
|
||||
POIFSReader r = new POIFSReader();
|
||||
r.registerListener(new TreeReaderListener(filename, rootNode));
|
||||
r.read(fis);
|
||||
r.read(new File(filename));
|
||||
displayedFiles++;
|
||||
} catch (IOException ex) {
|
||||
System.err.println(filename + ": " + ex);
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIEncryptor;
|
|||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.DocumentInputStream;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.apache.poi.util.Internal;
|
||||
|
@ -75,14 +74,6 @@ public abstract class POIDocument implements Closeable {
|
|||
this.directory = dir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs from an old-style OPOIFS
|
||||
*
|
||||
* @param fs the filesystem the document is read from
|
||||
*/
|
||||
protected POIDocument(OPOIFSFileSystem fs) {
|
||||
this(fs.getRoot());
|
||||
}
|
||||
/**
|
||||
* Constructs from an old-style OPOIFS
|
||||
*
|
||||
|
@ -195,6 +186,7 @@ public abstract class POIDocument implements Closeable {
|
|||
* @param setName The property to read
|
||||
* @return The value of the given property or null if it wasn't found.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
protected PropertySet getPropertySet(String setName) throws IOException {
|
||||
return getPropertySet(setName, getEncryptionInfo());
|
||||
}
|
||||
|
@ -207,6 +199,7 @@ public abstract class POIDocument implements Closeable {
|
|||
* @param encryptionInfo the encryption descriptor in case of cryptoAPI encryption
|
||||
* @return The value of the given property or null if it wasn't found.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
protected PropertySet getPropertySet(String setName, EncryptionInfo encryptionInfo) throws IOException {
|
||||
DirectoryNode dirNode = directory;
|
||||
|
||||
|
@ -329,7 +322,7 @@ public abstract class POIDocument implements Closeable {
|
|||
* @throws IOException if an error when writing to the
|
||||
* {@link NPOIFSFileSystem} occurs
|
||||
*/
|
||||
protected void writePropertySet(String name, PropertySet set, NPOIFSFileSystem outFS) throws IOException {
|
||||
private void writePropertySet(String name, PropertySet set, NPOIFSFileSystem outFS) throws IOException {
|
||||
try {
|
||||
PropertySet mSet = new PropertySet(set);
|
||||
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
|
||||
|
@ -420,8 +413,9 @@ public abstract class POIDocument implements Closeable {
|
|||
/**
|
||||
* Closes the underlying {@link NPOIFSFileSystem} from which
|
||||
* the document was read, if any. Has no effect on documents
|
||||
* opened from an InputStream, or newly created ones.
|
||||
* <p>Once close() has been called, no further operations
|
||||
* opened from an InputStream, or newly created ones.<p>
|
||||
*
|
||||
* Once {@code close()} has been called, no further operations
|
||||
* should be called on the document.
|
||||
*/
|
||||
@Override
|
||||
|
@ -468,13 +462,10 @@ public abstract class POIDocument implements Closeable {
|
|||
* to a new POIFSFileSystem
|
||||
*
|
||||
* @param newDirectory the new directory
|
||||
* @return the old/previous directory
|
||||
*/
|
||||
@Internal
|
||||
protected DirectoryNode replaceDirectory(DirectoryNode newDirectory) {
|
||||
DirectoryNode dn = directory;
|
||||
protected void replaceDirectory(DirectoryNode newDirectory) {
|
||||
directory = newDirectory;
|
||||
return dn;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -21,8 +21,6 @@ import java.io.OutputStream;
|
|||
|
||||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -32,16 +30,10 @@ import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
|||
* @since POI 3.15 beta 3
|
||||
*/
|
||||
public abstract class POIReadOnlyDocument extends POIDocument {
|
||||
public POIReadOnlyDocument(DirectoryNode dir) {
|
||||
protected POIReadOnlyDocument(DirectoryNode dir) {
|
||||
super(dir);
|
||||
}
|
||||
public POIReadOnlyDocument(NPOIFSFileSystem fs) {
|
||||
super(fs);
|
||||
}
|
||||
public POIReadOnlyDocument(OPOIFSFileSystem fs) {
|
||||
super(fs);
|
||||
}
|
||||
public POIReadOnlyDocument(POIFSFileSystem fs) {
|
||||
protected POIReadOnlyDocument(NPOIFSFileSystem fs) {
|
||||
super(fs);
|
||||
}
|
||||
|
||||
|
|
|
@ -26,20 +26,14 @@ import java.util.ArrayList;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.poi.EncryptedDocumentException;
|
||||
import org.apache.poi.hssf.OldExcelFormatException;
|
||||
import org.apache.poi.hssf.extractor.EventBasedExcelExtractor;
|
||||
import org.apache.poi.hssf.extractor.ExcelExtractor;
|
||||
import org.apache.poi.hssf.record.crypto.Biff8EncryptionKey;
|
||||
import org.apache.poi.poifs.crypt.Decryptor;
|
||||
import org.apache.poi.poifs.crypt.EncryptionInfo;
|
||||
import org.apache.poi.poifs.filesystem.DirectoryEntry;
|
||||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.Entry;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.apache.poi.util.POILogFactory;
|
||||
import org.apache.poi.util.POILogger;
|
||||
|
||||
|
@ -50,24 +44,24 @@ import org.apache.poi.util.POILogger;
|
|||
* <p>Note 1 - will fail for many file formats if the POI Scratchpad jar is
|
||||
* not present on the runtime classpath</p>
|
||||
* <p>Note 2 - for text extractor creation across all formats, use
|
||||
* {@link org.apache.poi.extractor.ExtractorFactory} contained within
|
||||
* {@link org.apache.poi.ooxml.extractor.ExtractorFactory} contained within
|
||||
* the OOXML jar.</p>
|
||||
* <p>Note 3 - rather than using this, for most cases you would be better
|
||||
* off switching to <a href="http://tika.apache.org">Apache Tika</a> instead!</p>
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public class OLE2ExtractorFactory {
|
||||
public final class OLE2ExtractorFactory {
|
||||
private static final POILogger LOGGER = POILogFactory.getLogger(OLE2ExtractorFactory.class);
|
||||
|
||||
/** Should this thread prefer event based over usermodel based extractors? */
|
||||
private static final ThreadLocal<Boolean> threadPreferEventExtractors = new ThreadLocal<Boolean>() {
|
||||
@Override
|
||||
protected Boolean initialValue() { return Boolean.FALSE; }
|
||||
};
|
||||
private static final ThreadLocal<Boolean> threadPreferEventExtractors = ThreadLocal.withInitial(() -> Boolean.FALSE);
|
||||
|
||||
/** Should all threads prefer event based over usermodel based extractors? */
|
||||
private static Boolean allPreferEventExtractors;
|
||||
|
||||
private OLE2ExtractorFactory() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Should this thread prefer event based over usermodel based extractors?
|
||||
* (usermodel extractors tend to be more accurate, but use more memory)
|
||||
|
@ -113,16 +107,16 @@ public class OLE2ExtractorFactory {
|
|||
return threadPreferEventExtractors.get();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends POITextExtractor> T createExtractor(POIFSFileSystem fs) throws IOException {
|
||||
return (T)createExtractor(fs.getRoot());
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends POITextExtractor> T createExtractor(NPOIFSFileSystem fs) throws IOException {
|
||||
return (T)createExtractor(fs.getRoot());
|
||||
}
|
||||
public static <T extends POITextExtractor> T createExtractor(OPOIFSFileSystem fs) throws IOException {
|
||||
return (T)createExtractor(fs.getRoot());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends POITextExtractor> T createExtractor(InputStream input) throws IOException {
|
||||
Class<?> cls = getOOXMLClass();
|
||||
if (cls != null) {
|
||||
|
@ -165,7 +159,7 @@ public class OLE2ExtractorFactory {
|
|||
/**
|
||||
* Create the Extractor, if possible. Generally needs the Scratchpad jar.
|
||||
* Note that this won't check for embedded OOXML resources either, use
|
||||
* {@link org.apache.poi.extractor.ExtractorFactory} for that.
|
||||
* {@link org.apache.poi.ooxml.extractor.ExtractorFactory} for that.
|
||||
*/
|
||||
public static POITextExtractor createExtractor(DirectoryNode poifsDir) throws IOException {
|
||||
// Look for certain entries in the stream, to figure it
|
||||
|
@ -205,6 +199,7 @@ public class OLE2ExtractorFactory {
|
|||
* empty array. Otherwise, you'll get one open
|
||||
* {@link POITextExtractor} for each embedded file.
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
public static POITextExtractor[] getEmbededDocsTextExtractors(POIOLE2TextExtractor ext)
|
||||
throws IOException
|
||||
{
|
||||
|
@ -254,40 +249,11 @@ public class OLE2ExtractorFactory {
|
|||
for (InputStream nonPOIF : nonPOIFS) {
|
||||
try {
|
||||
e.add(createExtractor(nonPOIF));
|
||||
} catch (IllegalArgumentException ie) {
|
||||
// Ignore, just means it didn't contain
|
||||
// a format we support as yet
|
||||
LOGGER.log(POILogger.WARN, ie);
|
||||
} catch (Exception xe) {
|
||||
// Ignore, invalid format
|
||||
LOGGER.log(POILogger.WARN, xe);
|
||||
}
|
||||
}
|
||||
return e.toArray(new POITextExtractor[e.size()]);
|
||||
}
|
||||
|
||||
private static POITextExtractor createEncyptedOOXMLExtractor(DirectoryNode poifsDir)
|
||||
throws IOException {
|
||||
String pass = Biff8EncryptionKey.getCurrentUserPassword();
|
||||
if (pass == null) {
|
||||
pass = Decryptor.DEFAULT_PASSWORD;
|
||||
}
|
||||
|
||||
EncryptionInfo ei = new EncryptionInfo(poifsDir);
|
||||
Decryptor dec = ei.getDecryptor();
|
||||
InputStream is = null;
|
||||
try {
|
||||
if (!dec.verifyPassword(pass)) {
|
||||
throw new EncryptedDocumentException("Invalid password specified - use Biff8EncryptionKey.setCurrentUserPassword() before calling extractor");
|
||||
}
|
||||
is = dec.getDataStream(poifsDir);
|
||||
return createExtractor(is);
|
||||
} catch (IOException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new IOException(e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
return e.toArray(new POITextExtractor[0]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.poi.POIDocument;
|
|||
import org.apache.poi.poifs.filesystem.EntryUtils;
|
||||
import org.apache.poi.poifs.filesystem.FilteringDirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
|
||||
/**
|
||||
|
@ -39,9 +38,6 @@ public class HPSFPropertiesOnlyDocument extends POIDocument {
|
|||
public HPSFPropertiesOnlyDocument(NPOIFSFileSystem fs) {
|
||||
super(fs.getRoot());
|
||||
}
|
||||
public HPSFPropertiesOnlyDocument(OPOIFSFileSystem fs) {
|
||||
super(fs);
|
||||
}
|
||||
public HPSFPropertiesOnlyDocument(POIFSFileSystem fs) {
|
||||
super(fs);
|
||||
}
|
||||
|
@ -60,24 +56,18 @@ public class HPSFPropertiesOnlyDocument extends POIDocument {
|
|||
* Write out, with any properties changes, but nothing else
|
||||
*/
|
||||
public void write(File newFile) throws IOException {
|
||||
POIFSFileSystem fs = POIFSFileSystem.create(newFile);
|
||||
try {
|
||||
try (POIFSFileSystem fs = POIFSFileSystem.create(newFile)) {
|
||||
write(fs);
|
||||
fs.writeFilesystem();
|
||||
} finally {
|
||||
fs.close();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Write out, with any properties changes, but nothing else
|
||||
*/
|
||||
public void write(OutputStream out) throws IOException {
|
||||
NPOIFSFileSystem fs = new NPOIFSFileSystem();
|
||||
try {
|
||||
try (NPOIFSFileSystem fs = new NPOIFSFileSystem()) {
|
||||
write(fs);
|
||||
fs.writeFilesystem(out);
|
||||
} finally {
|
||||
fs.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -345,9 +345,13 @@ public class Property {
|
|||
* @return the truncated size with a maximum of 4 bytes shorter (3 bytes + trailing 0 of strings)
|
||||
*/
|
||||
private static int unpaddedLength(byte[] buf) {
|
||||
int len;
|
||||
for (len = buf.length; len > 0 && len > buf.length-4 && buf[len-1] == 0; len--);
|
||||
return len;
|
||||
final int end = (buf.length-(buf.length+3)%4);
|
||||
for (int i = buf.length; i>end; i--) {
|
||||
if (buf[i-1] != 0) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return end;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -34,8 +34,11 @@ import org.apache.poi.poifs.filesystem.Entry;
|
|||
import org.apache.poi.util.CodePageUtil;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
import org.apache.poi.util.LittleEndianByteArrayInputStream;
|
||||
import org.apache.poi.util.LittleEndianConsts;
|
||||
import org.apache.poi.util.LittleEndianOutputStream;
|
||||
import org.apache.poi.util.NotImplemented;
|
||||
import org.apache.poi.util.Removal;
|
||||
|
||||
/**
|
||||
* Represents a property set in the Horrible Property Set Format
|
||||
|
@ -175,8 +178,6 @@ public class PropertySet {
|
|||
*
|
||||
* @param stream Holds the data making out the property set
|
||||
* stream.
|
||||
* @throws MarkUnsupportedException
|
||||
* if the stream does not support the {@link InputStream#markSupported} method.
|
||||
* @throws IOException
|
||||
* if the {@link InputStream} cannot be accessed as needed.
|
||||
* @exception NoPropertySetStreamException
|
||||
|
@ -185,8 +186,7 @@ public class PropertySet {
|
|||
* if a character encoding is not supported.
|
||||
*/
|
||||
public PropertySet(final InputStream stream)
|
||||
throws NoPropertySetStreamException, MarkUnsupportedException,
|
||||
IOException, UnsupportedEncodingException {
|
||||
throws NoPropertySetStreamException, IOException {
|
||||
if (!isPropertySetStream(stream)) {
|
||||
throw new NoPropertySetStreamException();
|
||||
}
|
||||
|
@ -266,6 +266,7 @@ public class PropertySet {
|
|||
*
|
||||
* @param byteOrder The property set stream's low-level "byte order" field.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public void setByteOrder(int byteOrder) {
|
||||
this.byteOrder = byteOrder;
|
||||
}
|
||||
|
@ -298,6 +299,7 @@ public class PropertySet {
|
|||
*
|
||||
* @param osVersion The property set stream's low-level "OS version" field.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public void setOSVersion(int osVersion) {
|
||||
this.osVersion = osVersion;
|
||||
}
|
||||
|
@ -315,6 +317,7 @@ public class PropertySet {
|
|||
*
|
||||
* @param classID The property set stream's low-level "class ID" field.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public void setClassID(ClassID classID) {
|
||||
this.classID = classID;
|
||||
}
|
||||
|
@ -374,12 +377,10 @@ public class PropertySet {
|
|||
* {@link InputStream#mark} method.
|
||||
* @return {@code true} if the stream is a property set
|
||||
* stream, else {@code false}.
|
||||
* @throws MarkUnsupportedException if the {@link InputStream}
|
||||
* does not support the {@link InputStream#mark} method.
|
||||
* @exception IOException if an I/O error occurs
|
||||
*/
|
||||
public static boolean isPropertySetStream(final InputStream stream)
|
||||
throws MarkUnsupportedException, IOException {
|
||||
throws IOException {
|
||||
/*
|
||||
* Read at most this many bytes.
|
||||
*/
|
||||
|
@ -408,30 +409,34 @@ public class PropertySet {
|
|||
* @return {@code true} if the byte array is a property set
|
||||
* stream, {@code false} if not.
|
||||
*/
|
||||
@SuppressWarnings({"unused", "WeakerAccess"})
|
||||
public static boolean isPropertySetStream(final byte[] src, final int offset, final int length) {
|
||||
/* FIXME (3): Ensure that at most "length" bytes are read. */
|
||||
LittleEndianByteArrayInputStream leis = new LittleEndianByteArrayInputStream(src, offset, length);
|
||||
|
||||
/*
|
||||
* Read the header fields of the stream. They must always be
|
||||
* there.
|
||||
*/
|
||||
int o = offset;
|
||||
final int byteOrder = LittleEndian.getUShort(src, o);
|
||||
o += LittleEndianConsts.SHORT_SIZE;
|
||||
if (byteOrder != BYTE_ORDER_ASSERTION) {
|
||||
try {
|
||||
final int byteOrder = leis.readUShort();
|
||||
if (byteOrder != BYTE_ORDER_ASSERTION) {
|
||||
return false;
|
||||
}
|
||||
final int format = leis.readUShort();
|
||||
if (format != FORMAT_ASSERTION) {
|
||||
return false;
|
||||
}
|
||||
final long osVersion = leis.readUInt();
|
||||
byte[] clsBuf = new byte[ClassID.LENGTH];
|
||||
leis.readFully(clsBuf);
|
||||
|
||||
final ClassID classID = new ClassID(clsBuf, 0);
|
||||
|
||||
final long sectionCount = leis.readUInt();
|
||||
return (sectionCount >= 0);
|
||||
} catch (RuntimeException e) {
|
||||
return false;
|
||||
}
|
||||
final int format = LittleEndian.getUShort(src, o);
|
||||
o += LittleEndianConsts.SHORT_SIZE;
|
||||
if (format != FORMAT_ASSERTION) {
|
||||
return false;
|
||||
}
|
||||
// final long osVersion = LittleEndian.getUInt(src, offset);
|
||||
o += LittleEndianConsts.INT_SIZE;
|
||||
// final ClassID classID = new ClassID(src, offset);
|
||||
o += ClassID.LENGTH;
|
||||
final long sectionCount = LittleEndian.getUInt(src, o);
|
||||
return (sectionCount >= 0);
|
||||
}
|
||||
|
||||
|
||||
|
@ -452,7 +457,7 @@ public class PropertySet {
|
|||
private void init(final byte[] src, final int offset, final int length)
|
||||
throws UnsupportedEncodingException {
|
||||
/* FIXME (3): Ensure that at most "length" bytes are read. */
|
||||
|
||||
|
||||
/*
|
||||
* Read the stream's header fields.
|
||||
*/
|
||||
|
@ -504,50 +509,60 @@ public class PropertySet {
|
|||
* @exception WritingNotSupportedException if HPSF does not yet support
|
||||
* writing a property's variant type.
|
||||
*/
|
||||
public void write(final OutputStream out)
|
||||
throws WritingNotSupportedException, IOException {
|
||||
public void write(final OutputStream out) throws IOException, WritingNotSupportedException {
|
||||
|
||||
out.write(toBytes());
|
||||
|
||||
/* Indicate that we're done */
|
||||
out.close();
|
||||
}
|
||||
|
||||
private byte[] toBytes() throws WritingNotSupportedException, IOException {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
LittleEndianOutputStream leos = new LittleEndianOutputStream(bos);
|
||||
|
||||
/* Write the number of sections in this property set stream. */
|
||||
final int nrSections = getSectionCount();
|
||||
|
||||
/* Write the property set's header. */
|
||||
LittleEndian.putShort(out, (short) getByteOrder());
|
||||
LittleEndian.putShort(out, (short) getFormat());
|
||||
LittleEndian.putInt(getOSVersion(), out);
|
||||
putClassId(out, getClassID());
|
||||
LittleEndian.putInt(nrSections, out);
|
||||
int offset = OFFSET_HEADER;
|
||||
leos.writeShort(getByteOrder());
|
||||
leos.writeShort(getFormat());
|
||||
leos.writeInt(getOSVersion());
|
||||
putClassId(bos, getClassID());
|
||||
leos.writeInt(nrSections);
|
||||
|
||||
assert(bos.size() == OFFSET_HEADER);
|
||||
|
||||
final int[][] offsets = new int[getSectionCount()][2];
|
||||
|
||||
/* Write the section list, i.e. the references to the sections. Each
|
||||
* entry in the section list consist of the section's class ID and the
|
||||
* section's offset relative to the beginning of the stream. */
|
||||
offset += nrSections * (ClassID.LENGTH + LittleEndianConsts.INT_SIZE);
|
||||
final int sectionsBegin = offset;
|
||||
int secCnt = 0;
|
||||
for (final Section section : getSections()) {
|
||||
final ClassID formatID = section.getFormatID();
|
||||
if (formatID == null) {
|
||||
throw new NoFormatIDException();
|
||||
}
|
||||
putClassId(out, formatID);
|
||||
LittleEndian.putUInt(offset, out);
|
||||
try {
|
||||
offset += section.getSize();
|
||||
} catch (HPSFRuntimeException ex) {
|
||||
final Throwable cause = ex.getReason();
|
||||
if (cause instanceof UnsupportedEncodingException) {
|
||||
throw new IllegalPropertySetDataException(cause);
|
||||
}
|
||||
throw ex;
|
||||
}
|
||||
putClassId(bos, formatID);
|
||||
offsets[secCnt++][0] = bos.size();
|
||||
// offset dummy - filled later
|
||||
leos.writeInt(-1);
|
||||
}
|
||||
|
||||
/* Write the sections themselves. */
|
||||
offset = sectionsBegin;
|
||||
secCnt = 0;
|
||||
for (final Section section : getSections()) {
|
||||
offset += section.write(out);
|
||||
offsets[secCnt++][1] = bos.size();
|
||||
section.write(bos);
|
||||
}
|
||||
|
||||
/* Indicate that we're done */
|
||||
out.close();
|
||||
|
||||
byte[] result = bos.toByteArray();
|
||||
for (int[] off : offsets) {
|
||||
LittleEndian.putInt(result, off[0], off[1]);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -586,15 +601,8 @@ public class PropertySet {
|
|||
* of a property's variant type.
|
||||
* @throws IOException if an I/O exception occurs.
|
||||
*/
|
||||
public InputStream toInputStream() throws IOException, WritingNotSupportedException {
|
||||
final ByteArrayOutputStream psStream = new ByteArrayOutputStream();
|
||||
try {
|
||||
write(psStream);
|
||||
} finally {
|
||||
psStream.close();
|
||||
}
|
||||
final byte[] streamData = psStream.toByteArray();
|
||||
return new ByteArrayInputStream(streamData);
|
||||
public InputStream toInputStream() throws WritingNotSupportedException, IOException {
|
||||
return new ByteArrayInputStream(toBytes());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -605,7 +613,7 @@ public class PropertySet {
|
|||
*
|
||||
* @return The property as a String, or null if unavailable
|
||||
*/
|
||||
protected String getPropertyStringValue(final int propertyId) {
|
||||
String getPropertyStringValue(final int propertyId) {
|
||||
Object propertyValue = getProperty(propertyId);
|
||||
return getPropertyStringValue(propertyValue);
|
||||
}
|
||||
|
@ -724,7 +732,7 @@ public class PropertySet {
|
|||
* @throws NoSingleSectionException if the {@link PropertySet} has
|
||||
* more or less than one {@link Section}.
|
||||
*/
|
||||
protected boolean getPropertyBooleanValue(final int id) throws NoSingleSectionException {
|
||||
boolean getPropertyBooleanValue(final int id) throws NoSingleSectionException {
|
||||
return getFirstSection().getPropertyBooleanValue(id);
|
||||
}
|
||||
|
||||
|
@ -742,7 +750,7 @@ public class PropertySet {
|
|||
* @throws NoSingleSectionException if the {@link PropertySet} has
|
||||
* more or less than one {@link Section}.
|
||||
*/
|
||||
protected int getPropertyIntValue(final int id) throws NoSingleSectionException {
|
||||
int getPropertyIntValue(final int id) throws NoSingleSectionException {
|
||||
return getFirstSection().getPropertyIntValue(id);
|
||||
}
|
||||
|
||||
|
@ -774,6 +782,7 @@ public class PropertySet {
|
|||
*
|
||||
* @return The {@link PropertySet}'s first section.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public Section getFirstSection() {
|
||||
if (sections.isEmpty()) {
|
||||
throw new MissingSectionException("Property set does not contain any sections.");
|
||||
|
@ -787,7 +796,11 @@ public class PropertySet {
|
|||
* If the {@link PropertySet} has only a single section this method returns it.
|
||||
*
|
||||
* @return The singleSection value
|
||||
*
|
||||
* @deprecated superfluous convenience method
|
||||
*/
|
||||
@Deprecated
|
||||
@Removal(version="5.0.0")
|
||||
public Section getSingleSection() {
|
||||
final int sectionCount = getSectionCount();
|
||||
if (sectionCount != 1) {
|
||||
|
@ -809,7 +822,7 @@ public class PropertySet {
|
|||
*/
|
||||
@Override
|
||||
public boolean equals(final Object o) {
|
||||
if (o == null || !(o instanceof PropertySet)) {
|
||||
if (!(o instanceof PropertySet)) {
|
||||
return false;
|
||||
}
|
||||
final PropertySet ps = (PropertySet) o;
|
||||
|
@ -877,27 +890,28 @@ public class PropertySet {
|
|||
}
|
||||
|
||||
|
||||
protected void remove1stProperty(long id) {
|
||||
void remove1stProperty(long id) {
|
||||
getFirstSection().removeProperty(id);
|
||||
}
|
||||
|
||||
protected void set1stProperty(long id, String value) {
|
||||
void set1stProperty(long id, String value) {
|
||||
getFirstSection().setProperty((int)id, value);
|
||||
}
|
||||
|
||||
protected void set1stProperty(long id, int value) {
|
||||
void set1stProperty(long id, int value) {
|
||||
getFirstSection().setProperty((int)id, value);
|
||||
}
|
||||
|
||||
protected void set1stProperty(long id, boolean value) {
|
||||
void set1stProperty(long id, boolean value) {
|
||||
getFirstSection().setProperty((int)id, value);
|
||||
}
|
||||
|
||||
protected void set1stProperty(long id, byte[] value) {
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
void set1stProperty(long id, byte[] value) {
|
||||
getFirstSection().setProperty((int)id, value);
|
||||
}
|
||||
|
||||
private static void putClassId(final OutputStream out, final ClassID n) throws IOException {
|
||||
private static void putClassId(final ByteArrayOutputStream out, final ClassID n) {
|
||||
byte[] b = new byte[16];
|
||||
n.write(b, 0);
|
||||
out.write(b, 0, b.length);
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.poi.util.IOUtils;
|
|||
import org.apache.poi.util.LittleEndian;
|
||||
import org.apache.poi.util.LittleEndianByteArrayInputStream;
|
||||
import org.apache.poi.util.LittleEndianConsts;
|
||||
import org.apache.poi.util.LittleEndianOutputStream;
|
||||
import org.apache.poi.util.POILogFactory;
|
||||
import org.apache.poi.util.POILogger;
|
||||
|
||||
|
@ -81,7 +82,7 @@ public class Section {
|
|||
* #getPropertyIntValue} or {@link #getProperty} tried to access a
|
||||
* property that was not available, else {@code false}.
|
||||
*/
|
||||
private boolean wasNull;
|
||||
private transient boolean wasNull;
|
||||
|
||||
/**
|
||||
* Creates an empty {@link Section}.
|
||||
|
@ -292,6 +293,7 @@ public class Section {
|
|||
* @param formatID The section's format ID as a byte array. It components
|
||||
* are in big-endian format.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public void setFormatID(final byte[] formatID) {
|
||||
ClassID fid = getFormatID();
|
||||
if (fid == null) {
|
||||
|
@ -325,7 +327,7 @@ public class Section {
|
|||
* @return This section's properties.
|
||||
*/
|
||||
public Property[] getProperties() {
|
||||
return properties.values().toArray(new Property[properties.size()]);
|
||||
return properties.values().toArray(new Property[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -375,7 +377,7 @@ public class Section {
|
|||
* @see #getProperty
|
||||
*/
|
||||
public void setProperty(final int id, final int value) {
|
||||
setProperty(id, Variant.VT_I4, Integer.valueOf(value));
|
||||
setProperty(id, Variant.VT_I4, value);
|
||||
}
|
||||
|
||||
|
||||
|
@ -390,7 +392,7 @@ public class Section {
|
|||
* @see #getProperty
|
||||
*/
|
||||
public void setProperty(final int id, final long value) {
|
||||
setProperty(id, Variant.VT_I8, Long.valueOf(value));
|
||||
setProperty(id, Variant.VT_I8, value);
|
||||
}
|
||||
|
||||
|
||||
|
@ -405,7 +407,7 @@ public class Section {
|
|||
* @see #getProperty
|
||||
*/
|
||||
public void setProperty(final int id, final boolean value) {
|
||||
setProperty(id, Variant.VT_BOOL, Boolean.valueOf(value));
|
||||
setProperty(id, Variant.VT_BOOL, value);
|
||||
}
|
||||
|
||||
|
||||
|
@ -487,7 +489,7 @@ public class Section {
|
|||
*
|
||||
* @return The property's value
|
||||
*/
|
||||
protected int getPropertyIntValue(final long id) {
|
||||
int getPropertyIntValue(final long id) {
|
||||
final Number i;
|
||||
final Object o = getProperty(id);
|
||||
if (o == null) {
|
||||
|
@ -513,9 +515,9 @@ public class Section {
|
|||
*
|
||||
* @return The property's value
|
||||
*/
|
||||
protected boolean getPropertyBooleanValue(final int id) {
|
||||
boolean getPropertyBooleanValue(final int id) {
|
||||
final Boolean b = (Boolean) getProperty(id);
|
||||
return b != null && b.booleanValue();
|
||||
return b != null && b;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -529,8 +531,9 @@ public class Section {
|
|||
* @see #getProperty
|
||||
* @see Variant
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
protected void setPropertyBooleanValue(final int id, final boolean value) {
|
||||
setProperty(id, Variant.VT_BOOL, Boolean.valueOf(value));
|
||||
setProperty(id, Variant.VT_BOOL, value);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -588,6 +591,7 @@ public class Section {
|
|||
* #getPropertyIntValue} or {@link #getProperty} tried to access a
|
||||
* property that was not available, else {@code false}.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public boolean wasNull() {
|
||||
return wasNull;
|
||||
}
|
||||
|
@ -674,7 +678,7 @@ public class Section {
|
|||
for (Long id : propIds) {
|
||||
Property p1 = properties.get(id);
|
||||
Property p2 = s.properties.get(id);
|
||||
if (p1 == null || p2 == null || !p1.equals(p2)) {
|
||||
if (p1 == null || !p1.equals(p2)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -683,7 +687,7 @@ public class Section {
|
|||
Map<Long,String> d1 = getDictionary();
|
||||
Map<Long,String> d2 = s.getDictionary();
|
||||
|
||||
return (d1 == null && d2 == null) || (d1 != null && d2 != null && d1.equals(d2));
|
||||
return (d1 == null && d2 == null) || (d1 != null && d1.equals(d2));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -691,6 +695,7 @@ public class Section {
|
|||
*
|
||||
* @param id The ID of the property to be removed
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public void removeProperty(final long id) {
|
||||
if (properties.remove(id) != null) {
|
||||
sectionBytes.reset();
|
||||
|
@ -731,60 +736,54 @@ public class Section {
|
|||
codepage = Property.DEFAULT_CODEPAGE;
|
||||
}
|
||||
|
||||
/* The properties are written to this stream. */
|
||||
final ByteArrayOutputStream propertyStream = new ByteArrayOutputStream();
|
||||
final int[][] offsets = new int[properties.size()][2];
|
||||
final ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
final LittleEndianOutputStream leos = new LittleEndianOutputStream(bos);
|
||||
|
||||
/* The property list is established here. After each property that has
|
||||
* been written to "propertyStream", a property list entry is written to
|
||||
* "propertyListStream". */
|
||||
final ByteArrayOutputStream propertyListStream = new ByteArrayOutputStream();
|
||||
/* Write the section's length - dummy value, fixed later */
|
||||
leos.writeInt(-1);
|
||||
|
||||
/* Maintain the current position in the list. */
|
||||
int position = 0;
|
||||
/* Write the section's number of properties: */
|
||||
leos.writeInt(properties.size());
|
||||
|
||||
int propCnt = 0;
|
||||
for (Property p : properties.values()) {
|
||||
/* Write the property list entry. */
|
||||
leos.writeUInt(p.getID());
|
||||
// dummy offset to be fixed later
|
||||
offsets[propCnt++][0] = bos.size();
|
||||
leos.writeInt(-1);
|
||||
}
|
||||
|
||||
/* Increase the position variable by the size of the property list so
|
||||
* that it points behind the property list and to the beginning of the
|
||||
* properties themselves. */
|
||||
position += 2 * LittleEndianConsts.INT_SIZE + getPropertyCount() * 2 * LittleEndianConsts.INT_SIZE;
|
||||
|
||||
/* Write the properties and the property list into their respective
|
||||
* streams: */
|
||||
propCnt = 0;
|
||||
for (Property p : properties.values()) {
|
||||
final long id = p.getID();
|
||||
|
||||
/* Write the property list entry. */
|
||||
LittleEndian.putUInt(id, propertyListStream);
|
||||
LittleEndian.putUInt(position, propertyListStream);
|
||||
|
||||
offsets[propCnt++][1] = bos.size();
|
||||
/* If the property ID is not equal 0 we write the property and all
|
||||
* is fine. However, if it equals 0 we have to write the section's
|
||||
* dictionary which has an implicit type only and an explicit
|
||||
* value. */
|
||||
if (id != 0) {
|
||||
if (p.getID() != 0) {
|
||||
/* Write the property and update the position to the next
|
||||
* property. */
|
||||
position += p.write(propertyStream, codepage);
|
||||
p.write(bos, codepage);
|
||||
} else {
|
||||
position += writeDictionary(propertyStream, codepage);
|
||||
writeDictionary(bos, codepage);
|
||||
}
|
||||
}
|
||||
|
||||
/* Write the section: */
|
||||
int streamLength = LittleEndianConsts.INT_SIZE * 2 + propertyListStream.size() + propertyStream.size();
|
||||
byte[] result = bos.toByteArray();
|
||||
LittleEndian.putInt(result, 0, bos.size());
|
||||
|
||||
/* Write the section's length: */
|
||||
LittleEndian.putInt(streamLength, out);
|
||||
for (int[] off : offsets) {
|
||||
LittleEndian.putUInt(result, off[0], off[1]);
|
||||
}
|
||||
|
||||
/* Write the section's number of properties: */
|
||||
LittleEndian.putInt(getPropertyCount(), out);
|
||||
out.write(result);
|
||||
|
||||
/* Write the property list: */
|
||||
propertyListStream.writeTo(out);
|
||||
|
||||
/* Write the properties: */
|
||||
propertyStream.writeTo(out);
|
||||
|
||||
return streamLength;
|
||||
return bos.size();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -795,12 +794,8 @@ public class Section {
|
|||
* @param codepage The codepage of the string values.
|
||||
*
|
||||
* @return {@code true} if dictionary was read successful, {@code false} otherwise
|
||||
*
|
||||
* @throws UnsupportedEncodingException if the dictionary's codepage is not
|
||||
* (yet) supported.
|
||||
*/
|
||||
private boolean readDictionary(LittleEndianByteArrayInputStream leis, final int length, final int codepage)
|
||||
throws UnsupportedEncodingException {
|
||||
private boolean readDictionary(LittleEndianByteArrayInputStream leis, final int length, final int codepage) {
|
||||
Map<Long,String> dic = new HashMap<>();
|
||||
|
||||
/*
|
||||
|
@ -863,13 +858,12 @@ public class Section {
|
|||
*
|
||||
* @param out The output stream to write to.
|
||||
* @param codepage The codepage to be used to write the dictionary items.
|
||||
* @return The number of bytes written
|
||||
* @exception IOException if an I/O exception occurs.
|
||||
*/
|
||||
private int writeDictionary(final OutputStream out, final int codepage)
|
||||
private void writeDictionary(final OutputStream out, final int codepage)
|
||||
throws IOException {
|
||||
final byte padding[] = new byte[4];
|
||||
Map<Long,String> dic = getDictionary();
|
||||
final Map<Long,String> dic = getDictionary();
|
||||
|
||||
LittleEndian.putUInt(dic.size(), out);
|
||||
int length = LittleEndianConsts.INT_SIZE;
|
||||
|
@ -878,26 +872,23 @@ public class Section {
|
|||
LittleEndian.putUInt(ls.getKey(), out);
|
||||
length += LittleEndianConsts.INT_SIZE;
|
||||
|
||||
String value = ls.getValue()+"\0";
|
||||
LittleEndian.putUInt( value.length(), out );
|
||||
final String value = ls.getValue()+"\0";
|
||||
final byte bytes[] = CodePageUtil.getBytesInCodePage(value, codepage);
|
||||
final int len = (codepage == CodePageUtil.CP_UNICODE) ? value.length() : bytes.length;
|
||||
|
||||
LittleEndian.putUInt( len, out );
|
||||
length += LittleEndianConsts.INT_SIZE;
|
||||
|
||||
byte bytes[] = CodePageUtil.getBytesInCodePage(value, codepage);
|
||||
out.write(bytes);
|
||||
length += bytes.length;
|
||||
|
||||
if (codepage == CodePageUtil.CP_UNICODE) {
|
||||
int pad = (4 - (length & 0x3)) & 0x3;
|
||||
out.write(padding, 0, pad);
|
||||
length += pad;
|
||||
}
|
||||
final int pad = (codepage == CodePageUtil.CP_UNICODE) ? ((4 - (length & 0x3)) & 0x3) : 0;
|
||||
out.write(padding, 0, pad);
|
||||
length += pad;
|
||||
}
|
||||
|
||||
int pad = (4 - (length & 0x3)) & 0x3;
|
||||
final int pad = (4 - (length & 0x3)) & 0x3;
|
||||
out.write(padding, 0, pad);
|
||||
length += pad;
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
|
@ -15,27 +14,8 @@
|
|||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* A list of SmallDocumentBlocks instances, and methods to manage the list
|
||||
* common package contains constants and other classes shared across all POIFS subpackages
|
||||
*/
|
||||
public class SmallDocumentBlockList
|
||||
extends BlockListImpl
|
||||
{
|
||||
/**
|
||||
* Constructor SmallDocumentBlockList
|
||||
*
|
||||
* @param blocks a list of SmallDocumentBlock instances
|
||||
*/
|
||||
|
||||
public SmallDocumentBlockList(final List<SmallDocumentBlock> blocks)
|
||||
{
|
||||
setBlocks(blocks.toArray(new SmallDocumentBlock[blocks.size()]));
|
||||
}
|
||||
}
|
||||
|
||||
package org.apache.poi.poifs.common;
|
|
@ -27,7 +27,6 @@ import javax.crypto.spec.SecretKeySpec;
|
|||
import org.apache.poi.EncryptedDocumentException;
|
||||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
|
||||
public abstract class Decryptor implements Cloneable {
|
||||
|
@ -87,7 +86,7 @@ public abstract class Decryptor implements Cloneable {
|
|||
* @param cipher may be null, otherwise the given instance is reset to the new block index
|
||||
* @param block the block index, e.g. the persist/slide id (hslf)
|
||||
* @return a new cipher object, if cipher was null, otherwise the reinitialized cipher
|
||||
* @throws GeneralSecurityException
|
||||
* @throws GeneralSecurityException if the cipher can't be initialized
|
||||
*/
|
||||
public Cipher initCipherForBlock(Cipher cipher, int block)
|
||||
throws GeneralSecurityException {
|
||||
|
@ -126,10 +125,6 @@ public abstract class Decryptor implements Cloneable {
|
|||
return getDataStream(fs.getRoot());
|
||||
}
|
||||
|
||||
public InputStream getDataStream(OPOIFSFileSystem fs) throws IOException, GeneralSecurityException {
|
||||
return getDataStream(fs.getRoot());
|
||||
}
|
||||
|
||||
public InputStream getDataStream(POIFSFileSystem fs) throws IOException, GeneralSecurityException {
|
||||
return getDataStream(fs.getRoot());
|
||||
}
|
||||
|
@ -147,6 +142,7 @@ public abstract class Decryptor implements Cloneable {
|
|||
return integrityHmacKey;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public byte[] getIntegrityHmacValue() {
|
||||
return integrityHmacValue;
|
||||
}
|
||||
|
@ -167,6 +163,7 @@ public abstract class Decryptor implements Cloneable {
|
|||
this.integrityHmacValue = (integrityHmacValue == null) ? null : integrityHmacValue.clone();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
protected int getBlockSizeInBytes() {
|
||||
return encryptionInfo.getHeader().getBlockSize();
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import java.io.IOException;
|
|||
import org.apache.poi.EncryptedDocumentException;
|
||||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.util.BitField;
|
||||
import org.apache.poi.util.BitFieldFactory;
|
||||
|
@ -59,12 +58,14 @@ public class EncryptionInfo implements Cloneable {
|
|||
* A value that MUST be 0 if document properties are encrypted.
|
||||
* The encryption of document properties is specified in section 2.3.5.4.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public static final BitField flagDocProps = BitFieldFactory.getInstance(0x08);
|
||||
|
||||
/**
|
||||
* A value that MUST be 1 if extensible encryption is used. If this value is 1,
|
||||
* the value of every other field in this structure MUST be 0.
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public static final BitField flagExternal = BitFieldFactory.getInstance(0x10);
|
||||
|
||||
/**
|
||||
|
@ -81,13 +82,6 @@ public class EncryptionInfo implements Cloneable {
|
|||
this(fs.getRoot());
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens for decryption
|
||||
*/
|
||||
public EncryptionInfo(OPOIFSFileSystem fs) throws IOException {
|
||||
this(fs.getRoot());
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens for decryption
|
||||
*/
|
||||
|
@ -167,11 +161,11 @@ public class EncryptionInfo implements Cloneable {
|
|||
*
|
||||
* @param encryptionMode see {@link EncryptionMode} for values, {@link EncryptionMode#cryptoAPI} is for
|
||||
* internal use only, as it's record based
|
||||
* @param cipherAlgorithm
|
||||
* @param hashAlgorithm
|
||||
* @param keyBits
|
||||
* @param blockSize
|
||||
* @param chainingMode
|
||||
* @param cipherAlgorithm the cipher algorithm
|
||||
* @param hashAlgorithm the hash algorithm
|
||||
* @param keyBits the bit count of the key
|
||||
* @param blockSize the size of a cipher block
|
||||
* @param chainingMode the chaining mode
|
||||
*
|
||||
* @throws EncryptedDocumentException if the given parameters mismatch, e.g. only certain combinations
|
||||
* of keyBits, blockSize are allowed for a given {@link CipherAlgorithm}
|
||||
|
@ -211,10 +205,11 @@ public class EncryptionInfo implements Cloneable {
|
|||
*
|
||||
* @param encryptionMode the encryption mode
|
||||
* @return an encryption info builder
|
||||
* @throws ClassNotFoundException
|
||||
* @throws IllegalAccessException
|
||||
* @throws InstantiationException
|
||||
* @throws ClassNotFoundException if the builder class is not on the classpath
|
||||
* @throws IllegalAccessException if the builder class can't be loaded
|
||||
* @throws InstantiationException if the builder class can't be loaded
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
protected static EncryptionInfoBuilder getBuilder(EncryptionMode encryptionMode)
|
||||
throws ClassNotFoundException, IllegalAccessException, InstantiationException {
|
||||
ClassLoader cl = EncryptionInfo.class.getClassLoader();
|
||||
|
|
|
@ -26,7 +26,6 @@ import javax.crypto.spec.SecretKeySpec;
|
|||
import org.apache.poi.EncryptedDocumentException;
|
||||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
|
||||
public abstract class Encryptor implements Cloneable {
|
||||
|
@ -55,9 +54,6 @@ public abstract class Encryptor implements Cloneable {
|
|||
public OutputStream getDataStream(NPOIFSFileSystem fs) throws IOException, GeneralSecurityException {
|
||||
return getDataStream(fs.getRoot());
|
||||
}
|
||||
public OutputStream getDataStream(OPOIFSFileSystem fs) throws IOException, GeneralSecurityException {
|
||||
return getDataStream(fs.getRoot());
|
||||
}
|
||||
public OutputStream getDataStream(POIFSFileSystem fs) throws IOException, GeneralSecurityException {
|
||||
return getDataStream(fs.getRoot());
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
/**
|
||||
* Implementation of the ECMA-376 and MS-propritary document encryptions<p>
|
||||
*
|
||||
* The implementation is split into the following packages:<p>
|
||||
*
|
||||
* <ul>
|
||||
* <li>This package contains common functions for both current implemented cipher modes.</li>
|
||||
* <li>the {@link org.apache.poi.poifs.crypt.agile agile} package is part of the poi ooxml jar and the provides agile encryption support.</li>
|
||||
* <li>the {@link org.apache.poi.poifs.crypt.binaryrc4 binaryrc} package is used for the fixed length RC4 encryption of biff/H**F formats</li>
|
||||
* <li>the {@link org.apache.poi.poifs.crypt.cryptoapi cryptoapi} package is used for the variable length RC encryption of biff/H**F formats</li>
|
||||
* <li>the {@link org.apache.poi.poifs.crypt.standard standard} package contains classes for the standard encryption ...</li>
|
||||
* <li>the {@link org.apache.poi.poifs.crypt.xor xor} package contains classes for the xor obfuscation of biff/H**F formats</li>
|
||||
* </ul>
|
||||
*
|
||||
* @see <a href="http://poi.apache.org/encryption.html">Apache POI - Encryption support</a>
|
||||
* @see <a href="http://msdn.microsoft.com/en-us/library/dd952186(v=office.12).aspx">ECMA-376 Document Encryption</a>
|
||||
*/
|
||||
package org.apache.poi.poifs.crypt;
|
|
@ -1,44 +0,0 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<!--
|
||||
====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
|
||||
<p>Implementation of the <a href="http://msdn.microsoft.com/en-us/library/dd952186(v=office.12).aspx">ECMA-376 Document Encryption</a></p>
|
||||
<p>The implementation is split into three packages:</p>
|
||||
<ul>
|
||||
<li>This package contains common functions for both current implemented cipher modes.</li>
|
||||
<li>the {@link org.apache.poi.poifs.crypt.standard standard} package is part of the base poi jar and contains classes for the standard encryption ...</li>
|
||||
<li>the {@link org.apache.poi.poifs.crypt.agile agile} package is part of the poi ooxml jar and the provides agile encryption support.</li>
|
||||
</ul>
|
||||
|
||||
<h2>Related Documentation</h2>
|
||||
|
||||
Some implementations informations can be found under:
|
||||
<ul>
|
||||
<li><a href="http://poi.apache.org/encryption.html">Apache POI - Encryption support</a>
|
||||
</ul>
|
||||
|
||||
<!-- Put @see and @since tags down here. -->
|
||||
@see org.apache.poi.poifs.crypt.standard
|
||||
@see org.apache.poi.poifs.crypt.agile
|
||||
</body>
|
||||
</html>
|
|
@ -1,181 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.dev;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSBigBlockSize;
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.poifs.property.DirectoryProperty;
|
||||
import org.apache.poi.poifs.property.Property;
|
||||
import org.apache.poi.poifs.property.PropertyTable;
|
||||
import org.apache.poi.poifs.storage.BlockAllocationTableReader;
|
||||
import org.apache.poi.poifs.storage.HeaderBlock;
|
||||
import org.apache.poi.poifs.storage.ListManagedBlock;
|
||||
import org.apache.poi.poifs.storage.RawDataBlockList;
|
||||
import org.apache.poi.poifs.storage.SmallBlockTableReader;
|
||||
import org.apache.poi.util.HexDump;
|
||||
import org.apache.poi.util.IntList;
|
||||
|
||||
/**
|
||||
* A very low level debugging tool, for printing out core
|
||||
* information on the headers and FAT blocks.
|
||||
* You probably only want to use this if you're trying
|
||||
* to understand POIFS, or if you're trying to track
|
||||
* down the source of corruption in a file.
|
||||
*/
|
||||
public class POIFSHeaderDumper {
|
||||
/**
|
||||
* Display the entries of multiple POIFS files
|
||||
*
|
||||
* @param args the names of the files to be displayed
|
||||
*/
|
||||
public static void main(final String args[]) throws Exception {
|
||||
if (args.length == 0) {
|
||||
System.err.println("Must specify at least one file to view");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
for (int j = 0; j < args.length; j++) {
|
||||
viewFile(args[j]);
|
||||
}
|
||||
}
|
||||
|
||||
public static void viewFile(final String filename) throws Exception {
|
||||
System.out.println("Dumping headers from: " + filename);
|
||||
InputStream inp = new FileInputStream(filename);
|
||||
|
||||
// Header
|
||||
HeaderBlock header_block = new HeaderBlock(inp);
|
||||
displayHeader(header_block);
|
||||
|
||||
// Raw blocks
|
||||
POIFSBigBlockSize bigBlockSize = header_block.getBigBlockSize();
|
||||
RawDataBlockList data_blocks = new RawDataBlockList(inp, bigBlockSize);
|
||||
displayRawBlocksSummary(data_blocks);
|
||||
|
||||
// Main FAT Table
|
||||
BlockAllocationTableReader batReader =
|
||||
new BlockAllocationTableReader(
|
||||
header_block.getBigBlockSize(),
|
||||
header_block.getBATCount(),
|
||||
header_block.getBATArray(),
|
||||
header_block.getXBATCount(),
|
||||
header_block.getXBATIndex(),
|
||||
data_blocks);
|
||||
displayBATReader("Big Blocks", batReader);
|
||||
|
||||
// Properties Table
|
||||
PropertyTable properties =
|
||||
new PropertyTable(header_block, data_blocks);
|
||||
|
||||
// Mini Fat
|
||||
BlockAllocationTableReader sbatReader =
|
||||
SmallBlockTableReader._getSmallDocumentBlockReader(
|
||||
bigBlockSize, data_blocks, properties.getRoot(),
|
||||
header_block.getSBATStart()
|
||||
);
|
||||
displayBATReader("Small Blocks", sbatReader);
|
||||
|
||||
// Summary of the properties
|
||||
displayPropertiesSummary(properties);
|
||||
}
|
||||
|
||||
public static void displayHeader(HeaderBlock header_block) throws Exception {
|
||||
System.out.println("Header Details:");
|
||||
System.out.println(" Block size: " + header_block.getBigBlockSize().getBigBlockSize());
|
||||
System.out.println(" BAT (FAT) header blocks: " + header_block.getBATArray().length);
|
||||
System.out.println(" BAT (FAT) block count: " + header_block.getBATCount());
|
||||
if (header_block.getBATCount() > 0)
|
||||
System.out.println(" BAT (FAT) block 1 at: " + header_block.getBATArray()[0]);
|
||||
System.out.println(" XBAT (FAT) block count: " + header_block.getXBATCount());
|
||||
System.out.println(" XBAT (FAT) block 1 at: " + header_block.getXBATIndex());
|
||||
System.out.println(" SBAT (MiniFAT) block count: " + header_block.getSBATCount());
|
||||
System.out.println(" SBAT (MiniFAT) block 1 at: " + header_block.getSBATStart());
|
||||
System.out.println(" Property table at: " + header_block.getPropertyStart());
|
||||
System.out.println("");
|
||||
}
|
||||
|
||||
public static void displayRawBlocksSummary(RawDataBlockList data_blocks) throws Exception {
|
||||
System.out.println("Raw Blocks Details:");
|
||||
System.out.println(" Number of blocks: " + data_blocks.blockCount());
|
||||
|
||||
for(int i=0; i<Math.min(16, data_blocks.blockCount()); i++) {
|
||||
ListManagedBlock block = data_blocks.get(i);
|
||||
byte[] data = new byte[Math.min(48, block.getData().length)];
|
||||
System.arraycopy(block.getData(), 0, data, 0, data.length);
|
||||
|
||||
System.out.println(" Block #" + i + ":");
|
||||
System.out.println(HexDump.dump(data, 0, 0));
|
||||
}
|
||||
|
||||
System.out.println("");
|
||||
}
|
||||
|
||||
public static void displayBATReader(String type, BlockAllocationTableReader batReader) throws Exception {
|
||||
System.out.println("Sectors, as referenced from the "+type+" FAT:");
|
||||
IntList entries = batReader.getEntries();
|
||||
|
||||
for(int i=0; i<entries.size(); i++) {
|
||||
int bn = entries.get(i);
|
||||
String bnS = Integer.toString(bn);
|
||||
if(bn == POIFSConstants.END_OF_CHAIN) {
|
||||
bnS = "End Of Chain";
|
||||
} else if(bn == POIFSConstants.DIFAT_SECTOR_BLOCK) {
|
||||
bnS = "DI Fat Block";
|
||||
} else if(bn == POIFSConstants.FAT_SECTOR_BLOCK) {
|
||||
bnS = "Normal Fat Block";
|
||||
} else if(bn == POIFSConstants.UNUSED_BLOCK) {
|
||||
bnS = "Block Not Used (Free)";
|
||||
}
|
||||
|
||||
System.out.println(" Block # " + i + " -> " + bnS);
|
||||
}
|
||||
|
||||
System.out.println("");
|
||||
}
|
||||
|
||||
public static void displayPropertiesSummary(PropertyTable properties) {
|
||||
System.out.println("Mini Stream starts at " + properties.getRoot().getStartBlock());
|
||||
System.out.println("Mini Stream length is " + properties.getRoot().getSize());
|
||||
System.out.println();
|
||||
|
||||
System.out.println("Properties and their block start:");
|
||||
displayProperties(properties.getRoot(), "");
|
||||
System.out.println("");
|
||||
}
|
||||
public static void displayProperties(DirectoryProperty prop, String indent) {
|
||||
String nextIndent = indent + " ";
|
||||
System.out.println(indent + "-> " + prop.getName());
|
||||
for (Property cp : prop) {
|
||||
if (cp instanceof DirectoryProperty) {
|
||||
displayProperties((DirectoryProperty)cp, nextIndent);
|
||||
} else {
|
||||
System.out.println(nextIndent + "=> " + cp.getName());
|
||||
System.out.print(nextIndent + " " + cp.getSize() + " bytes in ");
|
||||
if (cp.shouldUseSmallBlocks()) {
|
||||
System.out.print("mini");
|
||||
} else {
|
||||
System.out.print("main");
|
||||
}
|
||||
System.out.println(" stream, starts at " + cp.getStartBlock());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,4 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<!--
|
||||
====================================================================
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
|
@ -15,21 +13,14 @@
|
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
==================================================================== */
|
||||
|
||||
common package contains constants and other classes shared across all POIFS subpackages
|
||||
|
||||
<h2>Related Documentation</h2>
|
||||
|
||||
For overviews, tutorials, examples, guides, and tool documentation, please see:
|
||||
<ul>
|
||||
<li><a href="http://poi.apache.org">Apache POI Project</a>
|
||||
</ul>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
/**
|
||||
* DEV package serves two purposes.
|
||||
*
|
||||
* <ol>
|
||||
* <li>Examples for how to use POIFS</li>
|
||||
* <li>tools for developing and validating POIFS</li>
|
||||
* </ol>
|
||||
*/
|
||||
package org.apache.poi.poifs.dev;
|
|
@ -19,23 +19,19 @@
|
|||
|
||||
package org.apache.poi.poifs.eventfilesystem;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.poi.poifs.filesystem.DocumentInputStream;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSDocument;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSDocument;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSDocumentPath;
|
||||
import org.apache.poi.poifs.property.DirectoryProperty;
|
||||
import org.apache.poi.poifs.property.DocumentProperty;
|
||||
import org.apache.poi.poifs.property.NPropertyTable;
|
||||
import org.apache.poi.poifs.property.Property;
|
||||
import org.apache.poi.poifs.property.PropertyTable;
|
||||
import org.apache.poi.poifs.property.RootProperty;
|
||||
import org.apache.poi.poifs.storage.BlockAllocationTableReader;
|
||||
import org.apache.poi.poifs.storage.BlockList;
|
||||
import org.apache.poi.poifs.storage.HeaderBlock;
|
||||
import org.apache.poi.poifs.storage.RawDataBlockList;
|
||||
import org.apache.poi.poifs.storage.SmallBlockTableReader;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
|
||||
/**
|
||||
|
@ -49,19 +45,10 @@ import org.apache.poi.util.IOUtils;
|
|||
|
||||
public class POIFSReader
|
||||
{
|
||||
private final POIFSReaderRegistry registry;
|
||||
private boolean registryClosed;
|
||||
private final POIFSReaderRegistry registry = new POIFSReaderRegistry();
|
||||
private boolean registryClosed = false;
|
||||
private boolean notifyEmptyDirectories;
|
||||
|
||||
/**
|
||||
* Create a POIFSReader
|
||||
*/
|
||||
|
||||
public POIFSReader()
|
||||
{
|
||||
registry = new POIFSReaderRegistry();
|
||||
registryClosed = false;
|
||||
}
|
||||
// private NPOIFSFileSystem poifs;
|
||||
|
||||
/**
|
||||
* Read from an InputStream and process the documents we get
|
||||
|
@ -71,40 +58,41 @@ public class POIFSReader
|
|||
* @exception IOException on errors reading, or on invalid data
|
||||
*/
|
||||
|
||||
public void read(final InputStream stream)
|
||||
throws IOException
|
||||
{
|
||||
public void read(final InputStream stream) throws IOException {
|
||||
try (NPOIFSFileSystem poifs = new NPOIFSFileSystem(stream)) {
|
||||
read(poifs);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a File and process the documents we get
|
||||
*
|
||||
* @param poifsFile the file from which to read the data
|
||||
*
|
||||
* @exception IOException on errors reading, or on invalid data
|
||||
*/
|
||||
public void read(final File poifsFile) throws IOException {
|
||||
try (NPOIFSFileSystem poifs = new NPOIFSFileSystem(poifsFile, true)) {
|
||||
read(poifs);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a NPOIFSFileSystem and process the documents we get
|
||||
*
|
||||
* @param poifs the NPOIFSFileSystem from which to read the data
|
||||
*
|
||||
* @exception IOException on errors reading, or on invalid data
|
||||
*/
|
||||
public void read(final NPOIFSFileSystem poifs) throws IOException {
|
||||
registryClosed = true;
|
||||
|
||||
// read the header block from the stream
|
||||
HeaderBlock header_block = new HeaderBlock(stream);
|
||||
|
||||
// read the rest of the stream into blocks
|
||||
RawDataBlockList data_blocks = new RawDataBlockList(stream, header_block.getBigBlockSize());
|
||||
|
||||
// set up the block allocation table (necessary for the
|
||||
// data_blocks to be manageable
|
||||
new BlockAllocationTableReader(header_block.getBigBlockSize(),
|
||||
header_block.getBATCount(),
|
||||
header_block.getBATArray(),
|
||||
header_block.getXBATCount(),
|
||||
header_block.getXBATIndex(),
|
||||
data_blocks);
|
||||
|
||||
// get property table from the document
|
||||
PropertyTable properties =
|
||||
new PropertyTable(header_block, data_blocks);
|
||||
NPropertyTable properties = poifs.getPropertyTable();
|
||||
|
||||
// process documents
|
||||
RootProperty root = properties.getRoot();
|
||||
processProperties(SmallBlockTableReader
|
||||
.getSmallDocumentBlocks(
|
||||
header_block.getBigBlockSize(),
|
||||
data_blocks, root,
|
||||
header_block.getSBATStart()
|
||||
),
|
||||
data_blocks, root.getChildren(), new POIFSDocumentPath()
|
||||
);
|
||||
processProperties(poifs, root, new POIFSDocumentPath());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -117,14 +105,11 @@ public class POIFSReader
|
|||
* called
|
||||
*/
|
||||
|
||||
public void registerListener(final POIFSReaderListener listener)
|
||||
{
|
||||
if (listener == null)
|
||||
{
|
||||
public void registerListener(final POIFSReaderListener listener) {
|
||||
if (listener == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (registryClosed)
|
||||
{
|
||||
if (registryClosed) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
registry.registerListener(listener);
|
||||
|
@ -143,9 +128,7 @@ public class POIFSReader
|
|||
* called
|
||||
*/
|
||||
|
||||
public void registerListener(final POIFSReaderListener listener,
|
||||
final String name)
|
||||
{
|
||||
public void registerListener(final POIFSReaderListener listener, final String name) {
|
||||
registerListener(listener, null, name);
|
||||
}
|
||||
|
||||
|
@ -166,19 +149,14 @@ public class POIFSReader
|
|||
|
||||
public void registerListener(final POIFSReaderListener listener,
|
||||
final POIFSDocumentPath path,
|
||||
final String name)
|
||||
{
|
||||
if ((listener == null) || (name == null) || (name.length() == 0))
|
||||
{
|
||||
final String name) {
|
||||
if ((listener == null) || (name == null) || (name.length() == 0)) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (registryClosed)
|
||||
{
|
||||
if (registryClosed) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
registry.registerListener(listener,
|
||||
(path == null) ? new POIFSDocumentPath()
|
||||
: path, name);
|
||||
registry.registerListener(listener, (path == null) ? new POIFSDocumentPath() : path, name);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -186,7 +164,7 @@ public class POIFSReader
|
|||
* If this flag is activated, the {@link POIFSReaderListener listener} receives
|
||||
* {@link POIFSReaderEvent POIFSReaderEvents} with nulled {@code name} and {@code stream}
|
||||
*
|
||||
* @param notifyEmptyDirectories
|
||||
* @param notifyEmptyDirectories if {@code true}, empty directories will be notified
|
||||
*/
|
||||
public void setNotifyEmptyDirectories(boolean notifyEmptyDirectories) {
|
||||
this.notifyEmptyDirectories = notifyEmptyDirectories;
|
||||
|
@ -198,139 +176,72 @@ public class POIFSReader
|
|||
*
|
||||
* @param args names of the files
|
||||
*
|
||||
* @exception IOException
|
||||
* @exception IOException if the files can't be read or have invalid content
|
||||
*/
|
||||
|
||||
public static void main(String args[])
|
||||
throws IOException
|
||||
{
|
||||
if (args.length == 0)
|
||||
{
|
||||
public static void main(String args[]) throws IOException {
|
||||
if (args.length == 0) {
|
||||
System.err.println("at least one argument required: input filename(s)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// register for all
|
||||
for (String arg : args)
|
||||
{
|
||||
POIFSReader reader = new POIFSReader();
|
||||
POIFSReaderListener listener = new SampleListener();
|
||||
|
||||
reader.registerListener(listener);
|
||||
for (String arg : args) {
|
||||
POIFSReader reader = new POIFSReader();
|
||||
reader.registerListener(POIFSReader::readEntry);
|
||||
System.out.println("reading " + arg);
|
||||
FileInputStream istream = new FileInputStream(arg);
|
||||
|
||||
reader.read(istream);
|
||||
istream.close();
|
||||
reader.read(new File(arg));
|
||||
}
|
||||
}
|
||||
|
||||
private void processProperties(final BlockList small_blocks,
|
||||
final BlockList big_blocks,
|
||||
final Iterator<Property> properties,
|
||||
final POIFSDocumentPath path)
|
||||
throws IOException {
|
||||
if (!properties.hasNext() && notifyEmptyDirectories) {
|
||||
Iterator<POIFSReaderListener> listeners = registry.getListeners(path, ".");
|
||||
while (listeners.hasNext()) {
|
||||
POIFSReaderListener pl = listeners.next();
|
||||
POIFSReaderEvent pe = new POIFSReaderEvent(null, path, null);
|
||||
pl.processPOIFSReaderEvent(pe);
|
||||
}
|
||||
return;
|
||||
}
|
||||
private static void readEntry(POIFSReaderEvent event) {
|
||||
POIFSDocumentPath path = event.getPath();
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
while (properties.hasNext())
|
||||
{
|
||||
Property property = properties.next();
|
||||
String name = property.getName();
|
||||
try (DocumentInputStream istream = event.getStream()) {
|
||||
sb.setLength(0);
|
||||
int pathLength = path.length();
|
||||
for (int k = 0; k < pathLength; k++) {
|
||||
sb.append("/").append(path.getComponent(k));
|
||||
}
|
||||
byte[] data = IOUtils.toByteArray(istream);
|
||||
sb.append("/").append(event.getName()).append(": ").append(data.length).append(" bytes read");
|
||||
System.out.println(sb);
|
||||
} catch (IOException ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
private void processProperties(final NPOIFSFileSystem poifs, DirectoryProperty dir, final POIFSDocumentPath path) {
|
||||
boolean hasChildren = false;
|
||||
for (final Property property : dir) {
|
||||
hasChildren = true;
|
||||
String name = property.getName();
|
||||
|
||||
if (property.isDirectory()) {
|
||||
POIFSDocumentPath new_path = new POIFSDocumentPath(path,new String[]{name});
|
||||
DirectoryProperty dp = (DirectoryProperty) property;
|
||||
processProperties(small_blocks, big_blocks, dp.getChildren(), new_path);
|
||||
processProperties(poifs, (DirectoryProperty) property, new_path);
|
||||
} else {
|
||||
int startBlock = property.getStartBlock();
|
||||
Iterator<POIFSReaderListener> listeners = registry.getListeners(path, name);
|
||||
|
||||
if (listeners.hasNext())
|
||||
{
|
||||
int size = property.getSize();
|
||||
OPOIFSDocument document = null;
|
||||
|
||||
if (property.shouldUseSmallBlocks())
|
||||
{
|
||||
document =
|
||||
new OPOIFSDocument(name, small_blocks
|
||||
.fetchBlocks(startBlock, -1), size);
|
||||
NPOIFSDocument document = null;
|
||||
for (POIFSReaderListener rl : registry.getListeners(path, name)) {
|
||||
if (document == null) {
|
||||
document = new NPOIFSDocument((DocumentProperty)property, poifs);
|
||||
}
|
||||
else
|
||||
{
|
||||
document =
|
||||
new OPOIFSDocument(name, big_blocks
|
||||
.fetchBlocks(startBlock, -1), size);
|
||||
}
|
||||
while (listeners.hasNext())
|
||||
{
|
||||
POIFSReaderListener listener = listeners.next();
|
||||
try (DocumentInputStream dis = new DocumentInputStream(document)) {
|
||||
listener.processPOIFSReaderEvent(new POIFSReaderEvent(dis, path, name));
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
// consume the document's data and discard it
|
||||
if (property.shouldUseSmallBlocks())
|
||||
{
|
||||
small_blocks.fetchBlocks(startBlock, -1);
|
||||
}
|
||||
else
|
||||
{
|
||||
big_blocks.fetchBlocks(startBlock, -1);
|
||||
try (DocumentInputStream dis = new DocumentInputStream(document)) {
|
||||
POIFSReaderEvent pe = new POIFSReaderEvent(dis, path, name);
|
||||
rl.processPOIFSReaderEvent(pe);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static class SampleListener
|
||||
implements POIFSReaderListener
|
||||
{
|
||||
|
||||
/**
|
||||
* Constructor SampleListener
|
||||
*/
|
||||
|
||||
SampleListener()
|
||||
{
|
||||
if (hasChildren || !notifyEmptyDirectories) {
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method processPOIFSReaderEvent
|
||||
*
|
||||
* @param event
|
||||
*/
|
||||
|
||||
@Override
|
||||
public void processPOIFSReaderEvent(final POIFSReaderEvent event) {
|
||||
DocumentInputStream istream = event.getStream();
|
||||
POIFSDocumentPath path = event.getPath();
|
||||
String name = event.getName();
|
||||
|
||||
try {
|
||||
byte[] data = IOUtils.toByteArray(istream);
|
||||
int pathLength = path.length();
|
||||
|
||||
for (int k = 0; k < pathLength; k++) {
|
||||
System.out.print("/" + path.getComponent(k));
|
||||
}
|
||||
System.out.println("/" + name + ": " + data.length + " bytes read");
|
||||
} catch (IOException ignored) {
|
||||
} finally {
|
||||
IOUtils.closeQuietly(istream);
|
||||
}
|
||||
for (POIFSReaderListener rl : registry.getListeners(path, ".")) {
|
||||
POIFSReaderEvent pe = new POIFSReaderEvent(null, path, null);
|
||||
rl.processPOIFSReaderEvent(pe);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -79,34 +79,21 @@ class POIFSReaderRegistry
|
|||
|
||||
// not an omnivorous listener (if it was, this method is a
|
||||
// no-op)
|
||||
Set<DocumentDescriptor> descriptors = selectiveListeners.get(listener);
|
||||
Set<DocumentDescriptor> descriptors =
|
||||
selectiveListeners.computeIfAbsent(listener, k -> new HashSet<>());
|
||||
|
||||
if (descriptors == null)
|
||||
{
|
||||
// this listener has not registered before
|
||||
DocumentDescriptor descriptor = new DocumentDescriptor(path, documentName);
|
||||
|
||||
// this listener has not registered before
|
||||
descriptors = new HashSet<>();
|
||||
selectiveListeners.put(listener, descriptors);
|
||||
}
|
||||
DocumentDescriptor descriptor = new DocumentDescriptor(path,
|
||||
documentName);
|
||||
|
||||
if (descriptors.add(descriptor))
|
||||
{
|
||||
if (descriptors.add(descriptor)) {
|
||||
|
||||
// this listener wasn't already listening for this
|
||||
// document -- add the listener to the set of
|
||||
// listeners for this document
|
||||
Set<POIFSReaderListener> listeners =
|
||||
chosenDocumentDescriptors.get(descriptor);
|
||||
chosenDocumentDescriptors.computeIfAbsent(descriptor, k -> new HashSet<>());
|
||||
|
||||
if (listeners == null)
|
||||
{
|
||||
|
||||
// nobody was listening for this document before
|
||||
listeners = new HashSet<>();
|
||||
chosenDocumentDescriptors.put(descriptor, listeners);
|
||||
}
|
||||
// nobody was listening for this document before
|
||||
listeners.add(listener);
|
||||
}
|
||||
}
|
||||
|
@ -141,7 +128,7 @@ class POIFSReaderRegistry
|
|||
* @return an Iterator POIFSReaderListeners; may be empty
|
||||
*/
|
||||
|
||||
Iterator<POIFSReaderListener> getListeners(final POIFSDocumentPath path, final String name)
|
||||
Iterable<POIFSReaderListener> getListeners(final POIFSDocumentPath path, final String name)
|
||||
{
|
||||
Set<POIFSReaderListener> rval = new HashSet<>(omnivorousListeners);
|
||||
Set<POIFSReaderListener> selectiveListenersInner =
|
||||
|
@ -151,20 +138,16 @@ class POIFSReaderRegistry
|
|||
{
|
||||
rval.addAll(selectiveListenersInner);
|
||||
}
|
||||
return rval.iterator();
|
||||
return rval;
|
||||
}
|
||||
|
||||
private void removeSelectiveListener(final POIFSReaderListener listener)
|
||||
{
|
||||
Set<DocumentDescriptor> selectedDescriptors = selectiveListeners.remove(listener);
|
||||
|
||||
if (selectedDescriptors != null)
|
||||
{
|
||||
Iterator<DocumentDescriptor> iter = selectedDescriptors.iterator();
|
||||
|
||||
while (iter.hasNext())
|
||||
{
|
||||
dropDocument(listener, iter.next());
|
||||
if (selectedDescriptors != null) {
|
||||
for (DocumentDescriptor selectedDescriptor : selectedDescriptors) {
|
||||
dropDocument(listener, selectedDescriptor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<!--
|
||||
====================================================================
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
|
@ -15,22 +13,11 @@
|
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
==================================================================== */
|
||||
|
||||
DEV package serves two purposes. 1. Examples for how to use POIFS and 2. tools for developing
|
||||
and validating POIFS.
|
||||
|
||||
<h2>Related Documentation</h2>
|
||||
|
||||
For overviews, tutorials, examples, guides, and tool documentation, please see:
|
||||
<ul>
|
||||
<li><a href="http://poi.apache.org">Apache POI Project</a>
|
||||
</ul>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
/**
|
||||
* The eventfilesystem is an efficient method for reading OLE 2 CDF files. It is to OLE 2 CDF what SAX is to XML.
|
||||
*
|
||||
* @see org.apache.poi.poifs.filesystem
|
||||
*/
|
||||
package org.apache.poi.poifs.eventfilesystem;
|
|
@ -1,37 +0,0 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<!--
|
||||
====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
|
||||
The eventfilesystem is an efficient method for reading OLE 2 CDF files. It is to OLE 2 CDF what SAX is to XML.
|
||||
|
||||
<h2>Related Documentation</h2>
|
||||
|
||||
For overviews, tutorials, examples, guides, and tool documentation, please see:
|
||||
<ul>
|
||||
<li><a href="http://poi.apache.org">Apache POI Project</a>
|
||||
</ul>
|
||||
|
||||
<!-- Put @see and @since tags down here. -->
|
||||
@see org.apache.poi.poifs.filesystem
|
||||
</body>
|
||||
</html>
|
|
@ -44,33 +44,16 @@ public class DirectoryNode
|
|||
{
|
||||
|
||||
// Map of Entry instances, keyed by their names
|
||||
private Map<String,Entry> _byname;
|
||||
// Our list of entries, kept sorted to preserve order
|
||||
private ArrayList<Entry> _entries;
|
||||
private final Map<String,Entry> _byname = new HashMap<>();
|
||||
|
||||
// Our list of entries, kept sorted to preserve order
|
||||
private final ArrayList<Entry> _entries = new ArrayList<>();
|
||||
|
||||
// Only one of these two will exist
|
||||
// the OPOIFSFileSystem we belong to
|
||||
private OPOIFSFileSystem _ofilesystem;
|
||||
// the NPOIFSFileSytem we belong to
|
||||
private NPOIFSFileSystem _nfilesystem;
|
||||
private final NPOIFSFileSystem _nfilesystem;
|
||||
|
||||
// the path described by this document
|
||||
private POIFSDocumentPath _path;
|
||||
|
||||
/**
|
||||
* create a DirectoryNode. This method is not public by design; it
|
||||
* is intended strictly for the internal use of this package
|
||||
*
|
||||
* @param property the DirectoryProperty for this DirectoryEntry
|
||||
* @param filesystem the OPOIFSFileSystem we belong to
|
||||
* @param parent the parent of this entry
|
||||
*/
|
||||
DirectoryNode(final DirectoryProperty property,
|
||||
final OPOIFSFileSystem filesystem,
|
||||
final DirectoryNode parent)
|
||||
{
|
||||
this(property, parent, filesystem, null);
|
||||
}
|
||||
private final POIFSDocumentPath _path;
|
||||
|
||||
/**
|
||||
* create a DirectoryNode. This method is not public by design; it
|
||||
|
@ -83,17 +66,8 @@ public class DirectoryNode
|
|||
DirectoryNode(final DirectoryProperty property,
|
||||
final NPOIFSFileSystem nfilesystem,
|
||||
final DirectoryNode parent)
|
||||
{
|
||||
this(property, parent, null, nfilesystem);
|
||||
}
|
||||
|
||||
private DirectoryNode(final DirectoryProperty property,
|
||||
final DirectoryNode parent,
|
||||
final OPOIFSFileSystem ofilesystem,
|
||||
final NPOIFSFileSystem nfilesystem)
|
||||
{
|
||||
super(property, parent);
|
||||
this._ofilesystem = ofilesystem;
|
||||
this._nfilesystem = nfilesystem;
|
||||
|
||||
if (parent == null)
|
||||
|
@ -103,12 +77,10 @@ public class DirectoryNode
|
|||
else
|
||||
{
|
||||
_path = new POIFSDocumentPath(parent._path, new String[]
|
||||
{
|
||||
property.getName()
|
||||
});
|
||||
{
|
||||
property.getName()
|
||||
});
|
||||
}
|
||||
_byname = new HashMap<>();
|
||||
_entries = new ArrayList<>();
|
||||
Iterator<Property> iter = property.getChildren();
|
||||
|
||||
while (iter.hasNext())
|
||||
|
@ -119,11 +91,7 @@ public class DirectoryNode
|
|||
if (child.isDirectory())
|
||||
{
|
||||
DirectoryProperty childDir = (DirectoryProperty) child;
|
||||
if(_ofilesystem != null) {
|
||||
childNode = new DirectoryNode(childDir, _ofilesystem, this);
|
||||
} else {
|
||||
childNode = new DirectoryNode(childDir, _nfilesystem, this);
|
||||
}
|
||||
childNode = new DirectoryNode(childDir, _nfilesystem, this);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -151,15 +119,6 @@ public class DirectoryNode
|
|||
return _nfilesystem;
|
||||
}
|
||||
|
||||
/**
|
||||
* If this is OPOIFS based, return the NPOIFSFileSystem
|
||||
* that this belong to, otherwise Null if NPOIFS based
|
||||
* @return the filesystem that this belongs to
|
||||
*/
|
||||
public OPOIFSFileSystem getOFileSystem()
|
||||
{
|
||||
return _ofilesystem;
|
||||
}
|
||||
|
||||
/**
|
||||
* If this is NPOIFS based, return the NPOIFSFileSystem
|
||||
|
@ -218,30 +177,7 @@ public class DirectoryNode
|
|||
*
|
||||
* @return the new DocumentEntry
|
||||
*
|
||||
* @exception IOException
|
||||
*/
|
||||
DocumentEntry createDocument(final OPOIFSDocument document)
|
||||
throws IOException
|
||||
{
|
||||
DocumentProperty property = document.getDocumentProperty();
|
||||
DocumentNode rval = new DocumentNode(property, this);
|
||||
|
||||
(( DirectoryProperty ) getProperty()).addChild(property);
|
||||
_ofilesystem.addDocument(document);
|
||||
|
||||
_entries.add(rval);
|
||||
_byname.put(property.getName(), rval);
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* create a new DocumentEntry
|
||||
*
|
||||
* @param document the new document
|
||||
*
|
||||
* @return the new DocumentEntry
|
||||
*
|
||||
* @exception IOException
|
||||
* @exception IOException if the document can't be created
|
||||
*/
|
||||
DocumentEntry createDocument(final NPOIFSDocument document)
|
||||
throws IOException
|
||||
|
@ -302,14 +238,11 @@ public class DirectoryNode
|
|||
_entries.remove(entry);
|
||||
_byname.remove(entry.getName());
|
||||
|
||||
if(_ofilesystem != null) {
|
||||
_ofilesystem.remove(entry);
|
||||
} else {
|
||||
try {
|
||||
_nfilesystem.remove(entry);
|
||||
} catch (IOException e) {
|
||||
// TODO Work out how to report this, given we can't change the method signature...
|
||||
}
|
||||
try {
|
||||
_nfilesystem.remove(entry);
|
||||
} catch (IOException e) {
|
||||
// TODO Work out how to report this, given we can't change the method signature...
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return rval;
|
||||
|
@ -411,18 +344,14 @@ public class DirectoryNode
|
|||
*
|
||||
* @return the new DocumentEntry
|
||||
*
|
||||
* @exception IOException
|
||||
* @exception IOException if the document can't be created
|
||||
*/
|
||||
|
||||
public DocumentEntry createDocument(final String name,
|
||||
final InputStream stream)
|
||||
throws IOException
|
||||
{
|
||||
if(_nfilesystem != null) {
|
||||
return createDocument(new NPOIFSDocument(name, _nfilesystem, stream));
|
||||
} else {
|
||||
return createDocument(new OPOIFSDocument(name, stream));
|
||||
}
|
||||
return createDocument(new NPOIFSDocument(name, _nfilesystem, stream));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -434,18 +363,14 @@ public class DirectoryNode
|
|||
*
|
||||
* @return the new DocumentEntry
|
||||
*
|
||||
* @exception IOException
|
||||
* @exception IOException if the document can't be created
|
||||
*/
|
||||
|
||||
public DocumentEntry createDocument(final String name, final int size,
|
||||
final POIFSWriterListener writer)
|
||||
throws IOException
|
||||
{
|
||||
if(_nfilesystem != null) {
|
||||
return createDocument(new NPOIFSDocument(name, size, _nfilesystem, writer));
|
||||
} else {
|
||||
return createDocument(new OPOIFSDocument(name, size, _path, writer));
|
||||
}
|
||||
return createDocument(new NPOIFSDocument(name, size, _nfilesystem, writer));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -455,22 +380,16 @@ public class DirectoryNode
|
|||
*
|
||||
* @return the new DirectoryEntry
|
||||
*
|
||||
* @exception IOException
|
||||
* @exception IOException if the directory can't be created
|
||||
*/
|
||||
|
||||
public DirectoryEntry createDirectory(final String name)
|
||||
throws IOException
|
||||
{
|
||||
DirectoryNode rval;
|
||||
DirectoryProperty property = new DirectoryProperty(name);
|
||||
|
||||
if(_ofilesystem != null) {
|
||||
rval = new DirectoryNode(property, _ofilesystem, this);
|
||||
_ofilesystem.addDirectory(property);
|
||||
} else {
|
||||
rval = new DirectoryNode(property, _nfilesystem, this);
|
||||
_nfilesystem.addDirectory(property);
|
||||
}
|
||||
DirectoryNode rval = new DirectoryNode(property, _nfilesystem, this);
|
||||
_nfilesystem.addDirectory(property);
|
||||
|
||||
(( DirectoryProperty ) getProperty()).addChild(property);
|
||||
_entries.add(rval);
|
||||
|
@ -487,9 +406,9 @@ public class DirectoryNode
|
|||
*
|
||||
* @return the new or updated DocumentEntry
|
||||
*
|
||||
* @exception IOException
|
||||
* @exception IOException if the document can't be created or its content be replaced
|
||||
*/
|
||||
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public DocumentEntry createOrUpdateDocument(final String name,
|
||||
final InputStream stream)
|
||||
throws IOException
|
||||
|
@ -498,15 +417,9 @@ public class DirectoryNode
|
|||
return createDocument(name, stream);
|
||||
} else {
|
||||
DocumentNode existing = (DocumentNode)getEntry(name);
|
||||
if (_nfilesystem != null) {
|
||||
NPOIFSDocument nDoc = new NPOIFSDocument(existing);
|
||||
nDoc.replaceContents(stream);
|
||||
return existing;
|
||||
} else {
|
||||
// Do it the hard way for Old POIFS...
|
||||
deleteEntry(existing);
|
||||
return createDocument(name, stream);
|
||||
}
|
||||
NPOIFSDocument nDoc = new NPOIFSDocument(existing);
|
||||
nDoc.replaceContents(stream);
|
||||
return existing;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.poi.poifs.filesystem;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.apache.poi.util.LittleEndianInput;
|
||||
import org.apache.poi.util.SuppressForbidden;
|
||||
|
||||
|
@ -34,10 +33,6 @@ public class DocumentInputStream extends InputStream implements LittleEndianInpu
|
|||
/** returned by read operations if we're at end of document */
|
||||
protected static final int EOF = -1;
|
||||
|
||||
protected static final int SIZE_SHORT = 2;
|
||||
protected static final int SIZE_INT = 4;
|
||||
protected static final int SIZE_LONG = 8;
|
||||
|
||||
private DocumentInputStream delegate;
|
||||
|
||||
/** For use by downstream implementations */
|
||||
|
@ -55,27 +50,7 @@ public class DocumentInputStream extends InputStream implements LittleEndianInpu
|
|||
if (!(document instanceof DocumentNode)) {
|
||||
throw new IOException("Cannot open internal document storage");
|
||||
}
|
||||
DocumentNode documentNode = (DocumentNode)document;
|
||||
DirectoryNode parentNode = (DirectoryNode)document.getParent();
|
||||
|
||||
if(documentNode.getDocument() != null) {
|
||||
delegate = new ODocumentInputStream(document);
|
||||
} else if(parentNode.getOFileSystem() != null) {
|
||||
delegate = new ODocumentInputStream(document);
|
||||
} else if(parentNode.getNFileSystem() != null) {
|
||||
delegate = new NDocumentInputStream(document);
|
||||
} else {
|
||||
throw new IOException("No FileSystem bound on the parent, can't read contents");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an InputStream from the specified Document
|
||||
*
|
||||
* @param document the Document to be read
|
||||
*/
|
||||
public DocumentInputStream(OPOIFSDocument document) {
|
||||
delegate = new ODocumentInputStream(document);
|
||||
delegate = new NDocumentInputStream(document);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -35,7 +35,7 @@ public class DocumentNode
|
|||
{
|
||||
|
||||
// underlying POIFSDocument instance
|
||||
private OPOIFSDocument _document;
|
||||
private NPOIFSDocument _document;
|
||||
|
||||
/**
|
||||
* create a DocumentNode. This method is not public by design; it
|
||||
|
@ -56,7 +56,7 @@ public class DocumentNode
|
|||
*
|
||||
* @return the internal POIFSDocument
|
||||
*/
|
||||
OPOIFSDocument getDocument()
|
||||
NPOIFSDocument getDocument()
|
||||
{
|
||||
return _document;
|
||||
}
|
||||
|
|
|
@ -16,27 +16,31 @@
|
|||
==================================================================== */
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import org.apache.poi.hpsf.MarkUnsupportedException;
|
||||
import org.apache.poi.hpsf.NoPropertySetStreamException;
|
||||
import org.apache.poi.hpsf.PropertySet;
|
||||
import org.apache.poi.hpsf.PropertySetFactory;
|
||||
import org.apache.poi.util.Internal;
|
||||
|
||||
@Internal
|
||||
public class EntryUtils
|
||||
{
|
||||
public final class EntryUtils {
|
||||
private EntryUtils() {}
|
||||
|
||||
/**
|
||||
* Copies an Entry into a target POIFS directory, recursively
|
||||
*/
|
||||
@Internal
|
||||
public static void copyNodeRecursively( Entry entry, DirectoryEntry target )
|
||||
throws IOException {
|
||||
// logger.log( POILogger.ERROR, "copyNodeRecursively called with "+entry.getName()+
|
||||
// ","+target.getName());
|
||||
throws IOException {
|
||||
if ( entry.isDirectoryEntry() ) {
|
||||
DirectoryEntry dirEntry = (DirectoryEntry)entry;
|
||||
DirectoryEntry newTarget = target.createDirectory( entry.getName() );
|
||||
|
@ -62,8 +66,8 @@ public class EntryUtils
|
|||
* @param targetRoot
|
||||
* is the target Directory to copy to
|
||||
*/
|
||||
public static void copyNodes(DirectoryEntry sourceRoot,
|
||||
DirectoryEntry targetRoot) throws IOException {
|
||||
public static void copyNodes(DirectoryEntry sourceRoot, DirectoryEntry targetRoot)
|
||||
throws IOException {
|
||||
for (Entry entry : sourceRoot) {
|
||||
copyNodeRecursively( entry, targetRoot );
|
||||
}
|
||||
|
@ -77,22 +81,8 @@ public class EntryUtils
|
|||
* @param target
|
||||
* is the target POIFS to copy to
|
||||
*/
|
||||
public static void copyNodes( OPOIFSFileSystem source,
|
||||
OPOIFSFileSystem target ) throws IOException
|
||||
{
|
||||
copyNodes( source.getRoot(), target.getRoot() );
|
||||
}
|
||||
/**
|
||||
* Copies all nodes from one POIFS to the other
|
||||
*
|
||||
* @param source
|
||||
* is the source POIFS to copy from
|
||||
* @param target
|
||||
* is the target POIFS to copy to
|
||||
*/
|
||||
public static void copyNodes( NPOIFSFileSystem source,
|
||||
NPOIFSFileSystem target ) throws IOException
|
||||
{
|
||||
public static void copyNodes( NPOIFSFileSystem source, NPOIFSFileSystem target )
|
||||
throws IOException {
|
||||
copyNodes( source.getRoot(), target.getRoot() );
|
||||
}
|
||||
|
||||
|
@ -106,27 +96,8 @@ public class EntryUtils
|
|||
* @param target is the target POIFS to copy to
|
||||
* @param excepts is a list of Entry Names to be excluded from the copy
|
||||
*/
|
||||
public static void copyNodes( OPOIFSFileSystem source,
|
||||
OPOIFSFileSystem target, List<String> excepts ) throws IOException
|
||||
{
|
||||
copyNodes(
|
||||
new FilteringDirectoryNode(source.getRoot(), excepts),
|
||||
new FilteringDirectoryNode(target.getRoot(), excepts)
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Copies nodes from one POIFS to the other, minus the excepts.
|
||||
* This delegates the filtering work to {@link FilteringDirectoryNode},
|
||||
* so excepts can be of the form "NodeToExclude" or
|
||||
* "FilteringDirectory/ExcludedChildNode"
|
||||
*
|
||||
* @param source is the source POIFS to copy from
|
||||
* @param target is the target POIFS to copy to
|
||||
* @param excepts is a list of Entry Names to be excluded from the copy
|
||||
*/
|
||||
public static void copyNodes( NPOIFSFileSystem source,
|
||||
NPOIFSFileSystem target, List<String> excepts ) throws IOException
|
||||
{
|
||||
public static void copyNodes( NPOIFSFileSystem source, NPOIFSFileSystem target, List<String> excepts )
|
||||
throws IOException {
|
||||
copyNodes(
|
||||
new FilteringDirectoryNode(source.getRoot(), excepts),
|
||||
new FilteringDirectoryNode(target.getRoot(), excepts)
|
||||
|
@ -142,114 +113,137 @@ public class EntryUtils
|
|||
* use a {@link FilteringDirectoryNode}
|
||||
*/
|
||||
public static boolean areDirectoriesIdentical(DirectoryEntry dirA, DirectoryEntry dirB) {
|
||||
// First, check names
|
||||
if (! dirA.getName().equals(dirB.getName())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Next up, check they have the same number of children
|
||||
if (dirA.getEntryCount() != dirB.getEntryCount()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Next, check entries and their types/sizes
|
||||
Map<String,Integer> aSizes = new HashMap<>();
|
||||
final int isDirectory = -12345;
|
||||
for (Entry a : dirA) {
|
||||
String aName = a.getName();
|
||||
if (a.isDirectoryEntry()) {
|
||||
aSizes.put(aName, isDirectory);
|
||||
} else {
|
||||
aSizes.put(aName, ((DocumentNode)a).getSize());
|
||||
}
|
||||
}
|
||||
for (Entry b : dirB) {
|
||||
String bName = b.getName();
|
||||
if (! aSizes.containsKey(bName)) {
|
||||
// In B but not A
|
||||
return false;
|
||||
}
|
||||
|
||||
int size;
|
||||
if (b.isDirectoryEntry()) {
|
||||
size = isDirectory;
|
||||
} else {
|
||||
size = ((DocumentNode)b).getSize();
|
||||
}
|
||||
if (size != aSizes.get(bName)) {
|
||||
// Either the wrong type, or they're different sizes
|
||||
return false;
|
||||
}
|
||||
|
||||
// Track it as checked
|
||||
aSizes.remove(bName);
|
||||
}
|
||||
if (!aSizes.isEmpty()) {
|
||||
// Nodes were in A but not B
|
||||
return false;
|
||||
}
|
||||
|
||||
// If that passed, check entry contents
|
||||
for (Entry a : dirA) {
|
||||
try {
|
||||
Entry b = dirB.getEntry(a.getName());
|
||||
boolean match;
|
||||
if (a.isDirectoryEntry()) {
|
||||
match = areDirectoriesIdentical(
|
||||
(DirectoryEntry)a, (DirectoryEntry)b);
|
||||
} else {
|
||||
match = areDocumentsIdentical(
|
||||
(DocumentEntry)a, (DocumentEntry)b);
|
||||
}
|
||||
if (!match) return false;
|
||||
} catch(FileNotFoundException e) {
|
||||
// Shouldn't really happen...
|
||||
return false;
|
||||
} catch(IOException e) {
|
||||
// Something's messed up with one document, not a match
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, they match!
|
||||
return true;
|
||||
return new DirectoryDelegate(dirA).equals(new DirectoryDelegate(dirB));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if two Documents have the same name
|
||||
* and the same contents. (Their parent directories are
|
||||
* not checked)
|
||||
* Compares two {@link DocumentEntry} instances of a POI file system.
|
||||
* Documents that are not property set streams must be bitwise identical.
|
||||
* Property set streams must be logically equal.<p>
|
||||
*
|
||||
* (Their parent directories are not checked)
|
||||
*/
|
||||
public static boolean areDocumentsIdentical(DocumentEntry docA, DocumentEntry docB) throws IOException {
|
||||
if (! docA.getName().equals(docB.getName())) {
|
||||
// Names don't match, not the same
|
||||
return false;
|
||||
}
|
||||
if (docA.getSize() != docB.getSize()) {
|
||||
// Wrong sizes, can't have the same contents
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public static boolean areDocumentsIdentical(DocumentEntry docA, DocumentEntry docB)
|
||||
throws IOException {
|
||||
try {
|
||||
return new DocumentDelegate(docA).equals(new DocumentDelegate(docB));
|
||||
} catch (RuntimeException e) {
|
||||
if (e.getCause() instanceof IOException) {
|
||||
throw (IOException)e.getCause();
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean matches = true;
|
||||
DocumentInputStream inpA = null, inpB = null;
|
||||
try {
|
||||
inpA = new DocumentInputStream(docA);
|
||||
inpB = new DocumentInputStream(docB);
|
||||
|
||||
int readA, readB;
|
||||
do {
|
||||
readA = inpA.read();
|
||||
readB = inpB.read();
|
||||
if (readA != readB) {
|
||||
matches = false;
|
||||
break;
|
||||
}
|
||||
} while(readA != -1 && readB != -1);
|
||||
} finally {
|
||||
if (inpA != null) inpA.close();
|
||||
if (inpB != null) inpB.close();
|
||||
}
|
||||
|
||||
return matches;
|
||||
private interface POIDelegate {
|
||||
}
|
||||
|
||||
private static class DirectoryDelegate implements POIDelegate {
|
||||
final DirectoryEntry dir;
|
||||
|
||||
DirectoryDelegate(DirectoryEntry dir) {
|
||||
this.dir = dir;
|
||||
}
|
||||
|
||||
private Map<String,POIDelegate> entries() {
|
||||
return StreamSupport.stream(dir.spliterator(), false)
|
||||
.collect(Collectors.toMap(Entry::getName, DirectoryDelegate::toDelegate));
|
||||
}
|
||||
|
||||
private static POIDelegate toDelegate(Entry entry) {
|
||||
return (entry.isDirectoryEntry())
|
||||
? new DirectoryDelegate((DirectoryEntry)entry)
|
||||
: new DocumentDelegate((DocumentEntry)entry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof DirectoryDelegate)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DirectoryDelegate dd = (DirectoryDelegate)other;
|
||||
|
||||
if (this == dd) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// First, check names
|
||||
if (!Objects.equals(dir.getName(),dd.dir.getName())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Next up, check they have the same number of children
|
||||
if (dir.getEntryCount() != dd.dir.getEntryCount()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return entries().equals(dd.entries());
|
||||
}
|
||||
}
|
||||
|
||||
private static class DocumentDelegate implements POIDelegate {
|
||||
final DocumentEntry doc;
|
||||
|
||||
DocumentDelegate(DocumentEntry doc) {
|
||||
this.doc = doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof DocumentDelegate)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DocumentDelegate dd = (DocumentDelegate)other;
|
||||
|
||||
if (this == dd) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
if (!Objects.equals(doc.getName(), dd.doc.getName())) {
|
||||
// Names don't match, not the same
|
||||
return false;
|
||||
}
|
||||
|
||||
try (DocumentInputStream inpA = new DocumentInputStream(doc);
|
||||
DocumentInputStream inpB = new DocumentInputStream(dd.doc)) {
|
||||
|
||||
if (PropertySet.isPropertySetStream(inpA) &&
|
||||
PropertySet.isPropertySetStream(inpB)) {
|
||||
final PropertySet ps1 = PropertySetFactory.create(inpA);
|
||||
final PropertySet ps2 = PropertySetFactory.create(inpB);
|
||||
return ps1.equals(ps2);
|
||||
} else {
|
||||
return isEqual(inpA, inpB);
|
||||
}
|
||||
} catch (MarkUnsupportedException | NoPropertySetStreamException | IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean isEqual(DocumentInputStream i1, DocumentInputStream i2)
|
||||
throws IOException {
|
||||
final byte[] buf1 = new byte[4*1024];
|
||||
final byte[] buf2 = new byte[4*1024];
|
||||
try {
|
||||
int len;
|
||||
while ((len = i1.read(buf1)) > 0) {
|
||||
i2.readFully(buf2,0,len);
|
||||
for(int i=0;i<len;i++) {
|
||||
if (buf1[i] != buf2[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
// is the end of the second file also.
|
||||
return i2.read() < 0;
|
||||
} catch(EOFException | RuntimeException ioe) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,10 @@
|
|||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import static org.apache.poi.util.LittleEndianConsts.INT_SIZE;
|
||||
import static org.apache.poi.util.LittleEndianConsts.LONG_SIZE;
|
||||
import static org.apache.poi.util.LittleEndianConsts.SHORT_SIZE;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Iterator;
|
||||
|
@ -71,9 +75,9 @@ public final class NDocumentInputStream extends DocumentInputStream {
|
|||
_document_size = document.getSize();
|
||||
_closed = false;
|
||||
|
||||
if (_document_size < 0) {
|
||||
//throw new RecordFormatException("Document size can't be < 0");
|
||||
}
|
||||
// can't be asserted ... see bug 61300
|
||||
// assert (_document_size >= 0) : "Document size can't be < 0";
|
||||
|
||||
DocumentNode doc = (DocumentNode)document;
|
||||
DocumentProperty property = (DocumentProperty)doc.getProperty();
|
||||
_document = new NPOIFSDocument(
|
||||
|
@ -284,33 +288,33 @@ public final class NDocumentInputStream extends DocumentInputStream {
|
|||
|
||||
@Override
|
||||
public long readLong() {
|
||||
checkAvaliable(SIZE_LONG);
|
||||
byte[] data = new byte[SIZE_LONG];
|
||||
readFully(data, 0, SIZE_LONG);
|
||||
checkAvaliable(LONG_SIZE);
|
||||
byte[] data = new byte[LONG_SIZE];
|
||||
readFully(data, 0, LONG_SIZE);
|
||||
return LittleEndian.getLong(data, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short readShort() {
|
||||
checkAvaliable(SIZE_SHORT);
|
||||
byte[] data = new byte[SIZE_SHORT];
|
||||
readFully(data, 0, SIZE_SHORT);
|
||||
checkAvaliable(SHORT_SIZE);
|
||||
byte[] data = new byte[SHORT_SIZE];
|
||||
readFully(data, 0, SHORT_SIZE);
|
||||
return LittleEndian.getShort(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readInt() {
|
||||
checkAvaliable(SIZE_INT);
|
||||
byte[] data = new byte[SIZE_INT];
|
||||
readFully(data, 0, SIZE_INT);
|
||||
checkAvaliable(INT_SIZE);
|
||||
byte[] data = new byte[INT_SIZE];
|
||||
readFully(data, 0, INT_SIZE);
|
||||
return LittleEndian.getInt(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUShort() {
|
||||
checkAvaliable(SIZE_SHORT);
|
||||
byte[] data = new byte[SIZE_SHORT];
|
||||
readFully(data, 0, SIZE_SHORT);
|
||||
checkAvaliable(SHORT_SIZE);
|
||||
byte[] data = new byte[SHORT_SIZE];
|
||||
readFully(data, 0, SHORT_SIZE);
|
||||
return LittleEndian.getUShort(data);
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -25,7 +27,6 @@ import java.nio.ByteBuffer;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.poifs.dev.POIFSViewable;
|
||||
|
@ -37,7 +38,7 @@ import org.apache.poi.util.IOUtils;
|
|||
* This class manages a document in the NIO POIFS filesystem.
|
||||
* This is the {@link NPOIFSFileSystem} version.
|
||||
*/
|
||||
public final class NPOIFSDocument implements POIFSViewable {
|
||||
public final class NPOIFSDocument implements POIFSViewable, Iterable<ByteBuffer> {
|
||||
|
||||
//arbitrarily selected; may need to increase
|
||||
private static final int MAX_RECORD_LENGTH = 100_000;
|
||||
|
@ -51,7 +52,7 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
/**
|
||||
* Constructor for an existing Document
|
||||
*/
|
||||
public NPOIFSDocument(DocumentNode document) throws IOException {
|
||||
public NPOIFSDocument(DocumentNode document) {
|
||||
this((DocumentProperty)document.getProperty(),
|
||||
((DirectoryNode)document.getParent()).getNFileSystem());
|
||||
}
|
||||
|
@ -59,9 +60,7 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
/**
|
||||
* Constructor for an existing Document
|
||||
*/
|
||||
public NPOIFSDocument(DocumentProperty property, NPOIFSFileSystem filesystem)
|
||||
throws IOException
|
||||
{
|
||||
public NPOIFSDocument(DocumentProperty property, NPOIFSFileSystem filesystem) {
|
||||
this._property = property;
|
||||
this._filesystem = filesystem;
|
||||
|
||||
|
@ -90,7 +89,8 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
|
||||
// Build the property for it
|
||||
this._property = new DocumentProperty(name, length);
|
||||
_property.setStartBlock(_stream.getStartBlock());
|
||||
_property.setStartBlock(_stream.getStartBlock());
|
||||
_property.setDocument(this);
|
||||
}
|
||||
|
||||
public NPOIFSDocument(String name, int size, NPOIFSFileSystem filesystem, POIFSWriterListener writer)
|
||||
|
@ -116,7 +116,8 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
|
||||
// And build the property for it
|
||||
this._property = new DocumentProperty(name, size);
|
||||
_property.setStartBlock(_stream.getStartBlock());
|
||||
_property.setStartBlock(_stream.getStartBlock());
|
||||
_property.setDocument(this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -128,7 +129,8 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
bis.mark(bigBlockSize);
|
||||
|
||||
// Do we need to store as a mini stream or a full one?
|
||||
if(bis.skip(bigBlockSize) < bigBlockSize) {
|
||||
long streamBlockSize = IOUtils.skipFully(bis, bigBlockSize);
|
||||
if (streamBlockSize < bigBlockSize) {
|
||||
_stream = new NPOIFSStream(_filesystem.getMiniStore());
|
||||
_block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
|
||||
} else {
|
||||
|
@ -140,26 +142,21 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
bis.reset();
|
||||
|
||||
// Store it
|
||||
OutputStream os = _stream.getOutputStream();
|
||||
byte buf[] = new byte[1024];
|
||||
int length = 0;
|
||||
|
||||
for (int readBytes; (readBytes = bis.read(buf)) != -1; length += readBytes) {
|
||||
os.write(buf, 0, readBytes);
|
||||
final long length;
|
||||
try (OutputStream os = _stream.getOutputStream()) {
|
||||
length = IOUtils.copy(bis, os);
|
||||
|
||||
// Pad to the end of the block with -1s
|
||||
int usedInBlock = (int) (length % _block_size);
|
||||
if (usedInBlock != 0 && usedInBlock != _block_size) {
|
||||
int toBlockEnd = _block_size - usedInBlock;
|
||||
byte[] padding = IOUtils.safelyAllocate(toBlockEnd, MAX_RECORD_LENGTH);
|
||||
Arrays.fill(padding, (byte) 0xFF);
|
||||
os.write(padding);
|
||||
}
|
||||
}
|
||||
|
||||
// Pad to the end of the block with -1s
|
||||
int usedInBlock = length % _block_size;
|
||||
if (usedInBlock != 0 && usedInBlock != _block_size) {
|
||||
int toBlockEnd = _block_size - usedInBlock;
|
||||
byte[] padding = IOUtils.safelyAllocate(toBlockEnd, MAX_RECORD_LENGTH);
|
||||
Arrays.fill(padding, (byte)0xFF);
|
||||
os.write(padding);
|
||||
}
|
||||
|
||||
// Tidy and return the length
|
||||
os.close();
|
||||
return length;
|
||||
|
||||
return (int)length;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -178,15 +175,15 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
int getDocumentBlockSize() {
|
||||
return _block_size;
|
||||
}
|
||||
|
||||
Iterator<ByteBuffer> getBlockIterator() {
|
||||
if(getSize() > 0) {
|
||||
return _stream.getBlockIterator();
|
||||
} else {
|
||||
List<ByteBuffer> empty = Collections.emptyList();
|
||||
return empty.iterator();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<ByteBuffer> iterator() {
|
||||
return getBlockIterator();
|
||||
}
|
||||
|
||||
Iterator<ByteBuffer> getBlockIterator() {
|
||||
return (getSize() > 0 ? _stream : Collections.<ByteBuffer>emptyList()).iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return size of the document
|
||||
|
@ -240,7 +237,7 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
* store
|
||||
*/
|
||||
public Iterator<Object> getViewableIterator() {
|
||||
return Collections.emptyList().iterator();
|
||||
return emptyList().iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -261,10 +258,7 @@ public final class NPOIFSDocument implements POIFSViewable {
|
|||
* @return short description
|
||||
*/
|
||||
public String getShortDescription() {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
|
||||
buffer.append("Document: \"").append(_property.getName()).append("\"");
|
||||
buffer.append(" size = ").append(getSize());
|
||||
return buffer.toString();
|
||||
return "Document: \"" + _property.getName() + "\" size = " + getSize();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,341 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.poi.poifs.storage.DataInputBlock;
|
||||
import org.apache.poi.util.RecordFormatException;
|
||||
|
||||
/**
|
||||
* This class provides methods to read a DocumentEntry managed by a
|
||||
* {@link OPOIFSFileSystem} instance.
|
||||
*/
|
||||
public final class ODocumentInputStream extends DocumentInputStream {
|
||||
/** current offset into the Document */
|
||||
private int _current_offset;
|
||||
|
||||
/** current marked offset into the Document (used by mark and reset) */
|
||||
private int _marked_offset;
|
||||
|
||||
/** the Document's size */
|
||||
private final int _document_size;
|
||||
|
||||
/** have we been closed? */
|
||||
private boolean _closed;
|
||||
|
||||
/** the actual Document */
|
||||
private final OPOIFSDocument _document;
|
||||
|
||||
/** the data block containing the current stream pointer */
|
||||
private DataInputBlock _currentBlock;
|
||||
|
||||
/**
|
||||
* Create an InputStream from the specified DocumentEntry
|
||||
*
|
||||
* @param document the DocumentEntry to be read
|
||||
*
|
||||
* @exception IOException if the DocumentEntry cannot be opened (like, maybe it has
|
||||
* been deleted?)
|
||||
*/
|
||||
public ODocumentInputStream(DocumentEntry document) throws IOException {
|
||||
if (!(document instanceof DocumentNode)) {
|
||||
throw new IOException("Cannot open internal document storage");
|
||||
}
|
||||
DocumentNode documentNode = (DocumentNode)document;
|
||||
if (documentNode.getDocument() == null) {
|
||||
throw new IOException("Cannot open internal document storage");
|
||||
}
|
||||
|
||||
_current_offset = 0;
|
||||
_marked_offset = 0;
|
||||
_document_size = document.getSize();
|
||||
if (_document_size < 0) {
|
||||
throw new RecordFormatException("document_size cannot be < 0");
|
||||
}
|
||||
_closed = false;
|
||||
_document = documentNode.getDocument();
|
||||
_currentBlock = getDataInputBlock(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an InputStream from the specified Document
|
||||
*
|
||||
* @param document the Document to be read
|
||||
*/
|
||||
public ODocumentInputStream(OPOIFSDocument document) {
|
||||
_current_offset = 0;
|
||||
_marked_offset = 0;
|
||||
_document_size = document.getSize();
|
||||
_closed = false;
|
||||
_document = document;
|
||||
_currentBlock = getDataInputBlock(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int available() {
|
||||
if (_closed) {
|
||||
throw new IllegalStateException("cannot perform requested operation on a closed stream");
|
||||
}
|
||||
return _document_size - _current_offset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
_closed = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mark(int ignoredReadlimit) {
|
||||
_marked_offset = _current_offset;
|
||||
}
|
||||
|
||||
private DataInputBlock getDataInputBlock(int offset) {
|
||||
return _document.getDataInputBlock(offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
dieIfClosed();
|
||||
if (atEOD()) {
|
||||
return EOF;
|
||||
}
|
||||
int result = _currentBlock.readUByte();
|
||||
_current_offset++;
|
||||
if (_currentBlock.available() < 1) {
|
||||
_currentBlock = getDataInputBlock(_current_offset);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] b, int off, int len) throws IOException {
|
||||
dieIfClosed();
|
||||
if (b == null) {
|
||||
throw new IllegalArgumentException("buffer must not be null");
|
||||
}
|
||||
if (off < 0 || len < 0 || b.length < off + len) {
|
||||
throw new IndexOutOfBoundsException("can't read past buffer boundaries");
|
||||
}
|
||||
if (len == 0) {
|
||||
return 0;
|
||||
}
|
||||
if (atEOD()) {
|
||||
return EOF;
|
||||
}
|
||||
int limit = Math.min(_document_size - _current_offset, len);
|
||||
readFully(b, off, limit);
|
||||
return limit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Repositions this stream to the position at the time the mark() method was
|
||||
* last called on this input stream. If mark() has not been called this
|
||||
* method repositions the stream to its beginning.
|
||||
*/
|
||||
@Override
|
||||
public void reset() {
|
||||
_current_offset = _marked_offset;
|
||||
_currentBlock = getDataInputBlock(_current_offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long skip(long n) throws IOException {
|
||||
dieIfClosed();
|
||||
if (n < 0) {
|
||||
return 0;
|
||||
}
|
||||
int new_offset = _current_offset + (int) n;
|
||||
|
||||
if (new_offset < _current_offset) {
|
||||
|
||||
// wrap around in converting a VERY large long to an int
|
||||
new_offset = _document_size;
|
||||
} else if (new_offset > _document_size) {
|
||||
new_offset = _document_size;
|
||||
}
|
||||
long rval = new_offset - _current_offset;
|
||||
|
||||
_current_offset = new_offset;
|
||||
_currentBlock = getDataInputBlock(_current_offset);
|
||||
return rval;
|
||||
}
|
||||
|
||||
private void dieIfClosed() throws IOException {
|
||||
if (_closed) {
|
||||
throw new IOException("cannot perform requested operation on a closed stream");
|
||||
}
|
||||
}
|
||||
|
||||
private boolean atEOD() {
|
||||
return _current_offset == _document_size;
|
||||
}
|
||||
|
||||
private void checkAvaliable(int requestedSize) {
|
||||
if (_closed) {
|
||||
throw new IllegalStateException("cannot perform requested operation on a closed stream");
|
||||
}
|
||||
if (requestedSize > _document_size - _current_offset) {
|
||||
throw new RuntimeException("Buffer underrun - requested " + requestedSize
|
||||
+ " bytes but " + (_document_size - _current_offset) + " was available");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte readByte() {
|
||||
return (byte) readUByte();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double readDouble() {
|
||||
return Double.longBitsToDouble(readLong());
|
||||
}
|
||||
|
||||
@Override
|
||||
public short readShort() {
|
||||
return (short) readUShort();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte[] buf, int off, int len) {
|
||||
checkAvaliable(len);
|
||||
|
||||
Function<Integer,DataInputBlock> nextDataInputBlock = (offset) -> {
|
||||
if (offset >= _document_size) {
|
||||
_currentBlock = null;
|
||||
} else if (offset != _current_offset) {
|
||||
_currentBlock = getDataInputBlock(offset);
|
||||
}
|
||||
return _currentBlock;
|
||||
};
|
||||
|
||||
_current_offset = readFullyInternal(buf, off, len, _current_offset, _document_size, nextDataInputBlock);
|
||||
}
|
||||
|
||||
/* package */ static int readFullyInternal(byte[] buf, int off, int len, int currentOffset, int maxSize, Function<Integer,DataInputBlock> nextDataInputBlock) {
|
||||
DataInputBlock currentBlock = nextDataInputBlock.apply(currentOffset);
|
||||
if (currentBlock == null) {
|
||||
throw new IllegalStateException("reached end of document stream unexpectedly");
|
||||
}
|
||||
int blockAvailable = currentBlock.available();
|
||||
if (blockAvailable > len) {
|
||||
currentBlock.readFully(buf, off, len);
|
||||
return currentOffset + len;
|
||||
}
|
||||
// else read big amount in chunks
|
||||
int remaining = len;
|
||||
int writePos = off;
|
||||
int offset = currentOffset;
|
||||
while (remaining > 0) {
|
||||
final boolean blockIsExpiring = remaining >= blockAvailable;
|
||||
final int reqSize = (blockIsExpiring) ? blockAvailable : remaining;
|
||||
currentBlock.readFully(buf, writePos, reqSize);
|
||||
remaining -= reqSize;
|
||||
writePos += reqSize;
|
||||
offset += reqSize;
|
||||
if (blockIsExpiring) {
|
||||
if (offset >= maxSize) {
|
||||
if (remaining > 0) {
|
||||
throw new IllegalStateException(
|
||||
"reached end of document stream unexpectedly");
|
||||
}
|
||||
break;
|
||||
}
|
||||
currentBlock = nextDataInputBlock.apply(offset);
|
||||
if (currentBlock == null) {
|
||||
throw new IllegalStateException(
|
||||
"reached end of document stream unexpectedly");
|
||||
}
|
||||
blockAvailable = currentBlock.available();
|
||||
}
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long readLong() {
|
||||
checkAvaliable(SIZE_LONG);
|
||||
int blockAvailable = _currentBlock.available();
|
||||
long result;
|
||||
if (blockAvailable > SIZE_LONG) {
|
||||
result = _currentBlock.readLongLE();
|
||||
} else {
|
||||
DataInputBlock nextBlock = getDataInputBlock(_current_offset + blockAvailable);
|
||||
if (blockAvailable == SIZE_LONG) {
|
||||
result = _currentBlock.readLongLE();
|
||||
} else {
|
||||
result = nextBlock.readLongLE(_currentBlock, blockAvailable);
|
||||
}
|
||||
_currentBlock = nextBlock;
|
||||
}
|
||||
_current_offset += SIZE_LONG;
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readInt() {
|
||||
checkAvaliable(SIZE_INT);
|
||||
int blockAvailable = _currentBlock.available();
|
||||
int result;
|
||||
if (blockAvailable > SIZE_INT) {
|
||||
result = _currentBlock.readIntLE();
|
||||
} else {
|
||||
DataInputBlock nextBlock = getDataInputBlock(_current_offset + blockAvailable);
|
||||
if (blockAvailable == SIZE_INT) {
|
||||
result = _currentBlock.readIntLE();
|
||||
} else {
|
||||
result = nextBlock.readIntLE(_currentBlock, blockAvailable);
|
||||
}
|
||||
_currentBlock = nextBlock;
|
||||
}
|
||||
_current_offset += SIZE_INT;
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUShort() {
|
||||
checkAvaliable(SIZE_SHORT);
|
||||
int blockAvailable = _currentBlock.available();
|
||||
int result;
|
||||
if (blockAvailable > SIZE_SHORT) {
|
||||
result = _currentBlock.readUShortLE();
|
||||
} else {
|
||||
DataInputBlock nextBlock = getDataInputBlock(_current_offset + blockAvailable);
|
||||
if (blockAvailable == SIZE_SHORT) {
|
||||
result = _currentBlock.readUShortLE();
|
||||
} else {
|
||||
result = nextBlock.readUShortLE(_currentBlock);
|
||||
}
|
||||
_currentBlock = nextBlock;
|
||||
}
|
||||
_current_offset += SIZE_SHORT;
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUByte() {
|
||||
checkAvaliable(1);
|
||||
int result = _currentBlock.readUByte();
|
||||
_current_offset++;
|
||||
if (_currentBlock.available() < 1) {
|
||||
_currentBlock = getDataInputBlock(_current_offset);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -1,516 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSBigBlockSize;
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.poifs.dev.POIFSViewable;
|
||||
import org.apache.poi.poifs.property.DocumentProperty;
|
||||
import org.apache.poi.poifs.property.Property;
|
||||
import org.apache.poi.poifs.storage.BlockWritable;
|
||||
import org.apache.poi.poifs.storage.DataInputBlock;
|
||||
import org.apache.poi.poifs.storage.DocumentBlock;
|
||||
import org.apache.poi.poifs.storage.ListManagedBlock;
|
||||
import org.apache.poi.poifs.storage.RawDataBlock;
|
||||
import org.apache.poi.poifs.storage.SmallDocumentBlock;
|
||||
import org.apache.poi.util.HexDump;
|
||||
|
||||
/**
|
||||
* This class manages a document in a old-style
|
||||
* OPOIFS filesystem.
|
||||
*/
|
||||
public final class OPOIFSDocument implements BATManaged, BlockWritable, POIFSViewable {
|
||||
private static final DocumentBlock[] EMPTY_BIG_BLOCK_ARRAY = { };
|
||||
private static final SmallDocumentBlock[] EMPTY_SMALL_BLOCK_ARRAY = { };
|
||||
private DocumentProperty _property;
|
||||
private int _size;
|
||||
|
||||
private final POIFSBigBlockSize _bigBigBlockSize;
|
||||
|
||||
// one of these stores will be valid
|
||||
private SmallBlockStore _small_store;
|
||||
private BigBlockStore _big_store;
|
||||
|
||||
/**
|
||||
* Constructor from large blocks
|
||||
*
|
||||
* @param name the name of the POIFSDocument
|
||||
* @param blocks the big blocks making up the POIFSDocument
|
||||
* @param length the actual length of the POIFSDocument
|
||||
*/
|
||||
public OPOIFSDocument(String name, RawDataBlock[] blocks, int length) throws IOException {
|
||||
_size = length;
|
||||
if(blocks.length == 0) {
|
||||
_bigBigBlockSize = POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;
|
||||
} else {
|
||||
_bigBigBlockSize = (blocks[0].getBigBlockSize() == POIFSConstants.SMALLER_BIG_BLOCK_SIZE ?
|
||||
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS :
|
||||
POIFSConstants.LARGER_BIG_BLOCK_SIZE_DETAILS
|
||||
);
|
||||
}
|
||||
|
||||
_big_store = new BigBlockStore(_bigBigBlockSize, convertRawBlocksToBigBlocks(blocks));
|
||||
_property = new DocumentProperty(name, _size);
|
||||
_small_store = new SmallBlockStore(_bigBigBlockSize, EMPTY_SMALL_BLOCK_ARRAY);
|
||||
_property.setDocument(this);
|
||||
}
|
||||
|
||||
// TODO - awkward typing going on here
|
||||
private static DocumentBlock[] convertRawBlocksToBigBlocks(ListManagedBlock[] blocks) throws IOException {
|
||||
DocumentBlock[] result = new DocumentBlock[blocks.length];
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
result[i] = new DocumentBlock((RawDataBlock)blocks[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
private static SmallDocumentBlock[] convertRawBlocksToSmallBlocks(ListManagedBlock[] blocks) {
|
||||
if (blocks instanceof SmallDocumentBlock[]) {
|
||||
return (SmallDocumentBlock[]) blocks;
|
||||
}
|
||||
SmallDocumentBlock[] result = new SmallDocumentBlock[blocks.length];
|
||||
System.arraycopy(blocks, 0, result, 0, blocks.length);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor from small blocks
|
||||
*
|
||||
* @param name the name of the POIFSDocument
|
||||
* @param blocks the small blocks making up the POIFSDocument
|
||||
* @param length the actual length of the POIFSDocument
|
||||
*/
|
||||
public OPOIFSDocument(String name, SmallDocumentBlock[] blocks, int length) {
|
||||
_size = length;
|
||||
|
||||
if(blocks.length == 0) {
|
||||
_bigBigBlockSize = POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;
|
||||
} else {
|
||||
_bigBigBlockSize = blocks[0].getBigBlockSize();
|
||||
}
|
||||
|
||||
_big_store = new BigBlockStore(_bigBigBlockSize, EMPTY_BIG_BLOCK_ARRAY);
|
||||
_property = new DocumentProperty(name, _size);
|
||||
_small_store = new SmallBlockStore(_bigBigBlockSize, blocks);
|
||||
_property.setDocument(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor from small blocks
|
||||
*
|
||||
* @param name the name of the POIFSDocument
|
||||
* @param blocks the small blocks making up the POIFSDocument
|
||||
* @param length the actual length of the POIFSDocument
|
||||
*/
|
||||
public OPOIFSDocument(String name, POIFSBigBlockSize bigBlockSize, ListManagedBlock[] blocks, int length) throws IOException {
|
||||
_size = length;
|
||||
_bigBigBlockSize = bigBlockSize;
|
||||
_property = new DocumentProperty(name, _size);
|
||||
_property.setDocument(this);
|
||||
if (Property.isSmall(_size)) {
|
||||
_big_store = new BigBlockStore(bigBlockSize,EMPTY_BIG_BLOCK_ARRAY);
|
||||
_small_store = new SmallBlockStore(bigBlockSize,convertRawBlocksToSmallBlocks(blocks));
|
||||
} else {
|
||||
_big_store = new BigBlockStore(bigBlockSize,convertRawBlocksToBigBlocks(blocks));
|
||||
_small_store = new SmallBlockStore(bigBlockSize,EMPTY_SMALL_BLOCK_ARRAY);
|
||||
}
|
||||
}
|
||||
public OPOIFSDocument(String name, ListManagedBlock[] blocks, int length) throws IOException {
|
||||
this(name, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, blocks, length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param name the name of the POIFSDocument
|
||||
* @param stream the InputStream we read data from
|
||||
*/
|
||||
public OPOIFSDocument(String name, POIFSBigBlockSize bigBlockSize, InputStream stream) throws IOException {
|
||||
List<DocumentBlock> blocks = new ArrayList<>();
|
||||
|
||||
_size = 0;
|
||||
_bigBigBlockSize = bigBlockSize;
|
||||
while (true) {
|
||||
DocumentBlock block = new DocumentBlock(stream, bigBlockSize);
|
||||
int blockSize = block.size();
|
||||
|
||||
if (blockSize > 0) {
|
||||
blocks.add(block);
|
||||
_size += blockSize;
|
||||
}
|
||||
if (block.partiallyRead()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
DocumentBlock[] bigBlocks = blocks.toArray(new DocumentBlock[blocks.size()]);
|
||||
|
||||
_big_store = new BigBlockStore(bigBlockSize,bigBlocks);
|
||||
_property = new DocumentProperty(name, _size);
|
||||
_property.setDocument(this);
|
||||
if (_property.shouldUseSmallBlocks()) {
|
||||
_small_store = new SmallBlockStore(bigBlockSize,SmallDocumentBlock.convert(bigBlockSize,bigBlocks, _size));
|
||||
_big_store = new BigBlockStore(bigBlockSize,new DocumentBlock[0]);
|
||||
} else {
|
||||
_small_store = new SmallBlockStore(bigBlockSize,EMPTY_SMALL_BLOCK_ARRAY);
|
||||
}
|
||||
}
|
||||
public OPOIFSDocument(String name, InputStream stream) throws IOException {
|
||||
this(name, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param name the name of the POIFSDocument
|
||||
* @param size the length of the POIFSDocument
|
||||
* @param path the path of the POIFSDocument
|
||||
* @param writer the writer who will eventually write the document contents
|
||||
*/
|
||||
public OPOIFSDocument(String name, int size, POIFSBigBlockSize bigBlockSize, POIFSDocumentPath path, POIFSWriterListener writer) {
|
||||
_size = size;
|
||||
_bigBigBlockSize = bigBlockSize;
|
||||
_property = new DocumentProperty(name, _size);
|
||||
_property.setDocument(this);
|
||||
if (_property.shouldUseSmallBlocks()) {
|
||||
_small_store = new SmallBlockStore(_bigBigBlockSize, path, name, size, writer);
|
||||
_big_store = new BigBlockStore(_bigBigBlockSize, EMPTY_BIG_BLOCK_ARRAY);
|
||||
} else {
|
||||
_small_store = new SmallBlockStore(_bigBigBlockSize, EMPTY_SMALL_BLOCK_ARRAY);
|
||||
_big_store = new BigBlockStore(_bigBigBlockSize, path, name, size, writer);
|
||||
}
|
||||
}
|
||||
public OPOIFSDocument(String name, int size, POIFSDocumentPath path, POIFSWriterListener writer) {
|
||||
this(name, size, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, path, writer);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return array of SmallDocumentBlocks; may be empty, cannot be null
|
||||
*/
|
||||
public SmallDocumentBlock[] getSmallBlocks() {
|
||||
return _small_store.getBlocks();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return size of the document
|
||||
*/
|
||||
public int getSize() {
|
||||
return _size;
|
||||
}
|
||||
|
||||
/**
|
||||
* read data from the internal stores
|
||||
*
|
||||
* @param buffer the buffer to write to
|
||||
* @param offset the offset into our storage to read from
|
||||
* This method is currently (Oct 2008) only used by test code. Perhaps it can be deleted
|
||||
*/
|
||||
void read(byte[] buffer, int offset) {
|
||||
ODocumentInputStream.readFullyInternal(buffer, 0, buffer.length, offset, _size, this::getDataInputBlock);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return <code>null</code> if <tt>offset</tt> points to the end of the document stream
|
||||
*/
|
||||
DataInputBlock getDataInputBlock(int offset) {
|
||||
if (offset >= _size) {
|
||||
if (offset > _size) {
|
||||
throw new RuntimeException("Request for Offset " + offset + " doc size is " + _size);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
if (_property.shouldUseSmallBlocks()) {
|
||||
return SmallDocumentBlock.getDataInputBlock(_small_store.getBlocks(), offset);
|
||||
}
|
||||
return DocumentBlock.getDataInputBlock(_big_store.getBlocks(), offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the instance's DocumentProperty
|
||||
*/
|
||||
|
||||
DocumentProperty getDocumentProperty() {
|
||||
return _property;
|
||||
}
|
||||
|
||||
/* ********** START implementation of BlockWritable ********** */
|
||||
|
||||
/**
|
||||
* Write the storage to an OutputStream
|
||||
*
|
||||
* @param stream the OutputStream to which the stored data should be written
|
||||
*/
|
||||
public void writeBlocks(OutputStream stream) throws IOException {
|
||||
_big_store.writeBlocks(stream);
|
||||
}
|
||||
|
||||
/* ********** END implementation of BlockWritable ********** */
|
||||
/* ********** START implementation of BATManaged ********** */
|
||||
|
||||
/**
|
||||
* Return the number of BigBlock's this instance uses
|
||||
*
|
||||
* @return count of BigBlock instances
|
||||
*/
|
||||
public int countBlocks() {
|
||||
return _big_store.countBlocks();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the start block for this instance
|
||||
*
|
||||
* @param index index into the array of blocks making up the filesystem
|
||||
*/
|
||||
public void setStartBlock(int index) {
|
||||
_property.setStartBlock(index);
|
||||
}
|
||||
|
||||
/* ********** END implementation of BATManaged ********** */
|
||||
/* ********** START begin implementation of POIFSViewable ********** */
|
||||
|
||||
/**
|
||||
* Get an array of objects, some of which may implement POIFSViewable
|
||||
*
|
||||
* @return an array of Object; may not be null, but may be empty
|
||||
*/
|
||||
public Object[] getViewableArray() {
|
||||
String result = "<NO DATA>";
|
||||
|
||||
try {
|
||||
BlockWritable[] blocks = null;
|
||||
|
||||
if (_big_store.isValid()) {
|
||||
blocks = _big_store.getBlocks();
|
||||
} else if (_small_store.isValid()) {
|
||||
blocks = _small_store.getBlocks();
|
||||
}
|
||||
if (blocks != null) {
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
for (BlockWritable bw : blocks) {
|
||||
bw.writeBlocks(output);
|
||||
}
|
||||
int length = Math.min(output.size(), _property.getSize());
|
||||
result = HexDump.dump(output.toByteArray(), 0, 0, length);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
result = e.getMessage();
|
||||
}
|
||||
return new String[]{ result };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an Iterator of objects, some of which may implement POIFSViewable
|
||||
*
|
||||
* @return an Iterator; may not be null, but may have an empty back end
|
||||
* store
|
||||
*/
|
||||
public Iterator<Object> getViewableIterator() {
|
||||
return Collections.emptyList().iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Give viewers a hint as to whether to call getViewableArray or
|
||||
* getViewableIterator
|
||||
*
|
||||
* @return <code>true</code> if a viewer should call getViewableArray,
|
||||
* <code>false</code> if a viewer should call getViewableIterator
|
||||
*/
|
||||
public boolean preferArray() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a short description of the object, to be used when a
|
||||
* POIFSViewable object has not provided its contents.
|
||||
*
|
||||
* @return short description
|
||||
*/
|
||||
public String getShortDescription() {
|
||||
return "Document: \"" + _property.getName() + "\"" +
|
||||
" size = " + getSize();
|
||||
}
|
||||
|
||||
/* ********** END begin implementation of POIFSViewable ********** */
|
||||
private static final class SmallBlockStore {
|
||||
private SmallDocumentBlock[] _smallBlocks;
|
||||
private final POIFSDocumentPath _path;
|
||||
private final String _name;
|
||||
private final int _size;
|
||||
private final POIFSWriterListener _writer;
|
||||
private final POIFSBigBlockSize _bigBlockSize;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param blocks blocks to construct the store from
|
||||
*/
|
||||
SmallBlockStore(POIFSBigBlockSize bigBlockSize, SmallDocumentBlock[] blocks) {
|
||||
_bigBlockSize = bigBlockSize;
|
||||
_smallBlocks = blocks.clone();
|
||||
this._path = null;
|
||||
this._name = null;
|
||||
this._size = -1;
|
||||
this._writer = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for a small block store that will be written later
|
||||
*
|
||||
* @param path path of the document
|
||||
* @param name name of the document
|
||||
* @param size length of the document
|
||||
* @param writer the object that will eventually write the document
|
||||
*/
|
||||
SmallBlockStore(POIFSBigBlockSize bigBlockSize, POIFSDocumentPath path,
|
||||
String name, int size, POIFSWriterListener writer) {
|
||||
_bigBlockSize = bigBlockSize;
|
||||
_smallBlocks = new SmallDocumentBlock[0];
|
||||
this._path = path;
|
||||
this._name = name;
|
||||
this._size = size;
|
||||
this._writer = writer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return <code>true</code> if this store is a valid source of data
|
||||
*/
|
||||
boolean isValid() {
|
||||
return _smallBlocks.length > 0 || _writer != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the SmallDocumentBlocks
|
||||
*/
|
||||
SmallDocumentBlock[] getBlocks() {
|
||||
if (isValid() && _writer != null) {
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream(_size);
|
||||
DocumentOutputStream dstream = new DocumentOutputStream(stream, _size);
|
||||
|
||||
_writer.processPOIFSWriterEvent(new POIFSWriterEvent(dstream, _path, _name, _size));
|
||||
_smallBlocks = SmallDocumentBlock.convert(_bigBlockSize, stream.toByteArray(), _size);
|
||||
}
|
||||
return _smallBlocks;
|
||||
}
|
||||
} // end private class SmallBlockStore
|
||||
|
||||
private static final class BigBlockStore {
|
||||
private DocumentBlock[] bigBlocks;
|
||||
private final POIFSDocumentPath _path;
|
||||
private final String _name;
|
||||
private final int _size;
|
||||
private final POIFSWriterListener _writer;
|
||||
private final POIFSBigBlockSize _bigBlockSize;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param blocks the blocks making up the store
|
||||
*/
|
||||
BigBlockStore(POIFSBigBlockSize bigBlockSize, DocumentBlock[] blocks) {
|
||||
_bigBlockSize = bigBlockSize;
|
||||
bigBlocks = blocks.clone();
|
||||
_path = null;
|
||||
_name = null;
|
||||
_size = -1;
|
||||
_writer = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for a big block store that will be written later
|
||||
*
|
||||
* @param path path of the document
|
||||
* @param name name of the document
|
||||
* @param size length of the document
|
||||
* @param writer the object that will eventually write the document
|
||||
*/
|
||||
BigBlockStore(POIFSBigBlockSize bigBlockSize, POIFSDocumentPath path,
|
||||
String name, int size, POIFSWriterListener writer) {
|
||||
_bigBlockSize = bigBlockSize;
|
||||
bigBlocks = new DocumentBlock[0];
|
||||
_path = path;
|
||||
_name = name;
|
||||
_size = size;
|
||||
_writer = writer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return <code>true</code> if this store is a valid source of data
|
||||
*/
|
||||
boolean isValid() {
|
||||
return bigBlocks.length > 0 || _writer != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the DocumentBlocks
|
||||
*/
|
||||
DocumentBlock[] getBlocks() {
|
||||
if (isValid() && _writer != null) {
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream(_size);
|
||||
DocumentOutputStream dstream = new DocumentOutputStream(stream, _size);
|
||||
|
||||
_writer.processPOIFSWriterEvent(new POIFSWriterEvent(dstream, _path, _name, _size));
|
||||
bigBlocks = DocumentBlock.convert(_bigBlockSize, stream.toByteArray(), _size);
|
||||
}
|
||||
return bigBlocks;
|
||||
}
|
||||
|
||||
/**
|
||||
* write the blocks to a stream
|
||||
*
|
||||
* @param stream the stream to which the data is to be written
|
||||
*/
|
||||
void writeBlocks(OutputStream stream) throws IOException {
|
||||
if (isValid()) {
|
||||
if (_writer != null) {
|
||||
DocumentOutputStream dstream = new DocumentOutputStream(stream, _size);
|
||||
|
||||
_writer.processPOIFSWriterEvent(new POIFSWriterEvent(dstream, _path, _name, _size));
|
||||
dstream.writeFiller(countBlocks() * _bigBlockSize.getBigBlockSize(),
|
||||
DocumentBlock.getFillByte());
|
||||
} else {
|
||||
for (DocumentBlock bigBlock : bigBlocks) {
|
||||
bigBlock.writeBlocks(stream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return number of big blocks making up this document
|
||||
*/
|
||||
int countBlocks() {
|
||||
|
||||
if (isValid()) {
|
||||
if (_writer == null) {
|
||||
return bigBlocks.length;
|
||||
}
|
||||
return (_size + _bigBlockSize.getBigBlockSize() - 1)
|
||||
/ _bigBlockSize.getBigBlockSize();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
} // end private class BigBlockStore
|
||||
}
|
|
@ -1,570 +0,0 @@
|
|||
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSBigBlockSize;
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.poifs.dev.POIFSViewable;
|
||||
import org.apache.poi.poifs.property.DirectoryProperty;
|
||||
import org.apache.poi.poifs.property.Property;
|
||||
import org.apache.poi.poifs.property.PropertyTable;
|
||||
import org.apache.poi.poifs.storage.BATBlock;
|
||||
import org.apache.poi.poifs.storage.BlockAllocationTableReader;
|
||||
import org.apache.poi.poifs.storage.BlockAllocationTableWriter;
|
||||
import org.apache.poi.poifs.storage.BlockList;
|
||||
import org.apache.poi.poifs.storage.BlockWritable;
|
||||
import org.apache.poi.poifs.storage.HeaderBlock;
|
||||
import org.apache.poi.poifs.storage.HeaderBlockWriter;
|
||||
import org.apache.poi.poifs.storage.RawDataBlockList;
|
||||
import org.apache.poi.poifs.storage.SmallBlockTableReader;
|
||||
import org.apache.poi.poifs.storage.SmallBlockTableWriter;
|
||||
import org.apache.poi.util.CloseIgnoringInputStream;
|
||||
import org.apache.poi.util.POILogFactory;
|
||||
import org.apache.poi.util.POILogger;
|
||||
|
||||
/**
|
||||
* <p>This is the main class of the POIFS system; it manages the entire
|
||||
* life cycle of the filesystem.</p>
|
||||
* <p>This is the older version, which uses more memory, and doesn't
|
||||
* support in-place writes.</p>
|
||||
*/
|
||||
public class OPOIFSFileSystem
|
||||
implements POIFSViewable
|
||||
{
|
||||
private static final POILogger _logger =
|
||||
POILogFactory.getLogger(OPOIFSFileSystem.class);
|
||||
|
||||
/**
|
||||
* Convenience method for clients that want to avoid the auto-close behaviour of the constructor.
|
||||
*/
|
||||
public static InputStream createNonClosingInputStream(InputStream is) {
|
||||
return new CloseIgnoringInputStream(is);
|
||||
}
|
||||
|
||||
private PropertyTable _property_table;
|
||||
private List<OPOIFSDocument> _documents;
|
||||
private DirectoryNode _root;
|
||||
|
||||
/**
|
||||
* What big block size the file uses. Most files
|
||||
* use 512 bytes, but a few use 4096
|
||||
*/
|
||||
private POIFSBigBlockSize bigBlockSize =
|
||||
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;
|
||||
|
||||
/**
|
||||
* Constructor, intended for writing
|
||||
*/
|
||||
public OPOIFSFileSystem()
|
||||
{
|
||||
HeaderBlock header_block = new HeaderBlock(bigBlockSize);
|
||||
_property_table = new PropertyTable(header_block);
|
||||
_documents = new ArrayList<>();
|
||||
_root = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a OPOIFSFileSystem from an <tt>InputStream</tt>. Normally the stream is read until
|
||||
* EOF. The stream is always closed.<p>
|
||||
*
|
||||
* Some streams are usable after reaching EOF (typically those that return <code>true</code>
|
||||
* for <tt>markSupported()</tt>). In the unlikely case that the caller has such a stream
|
||||
* <i>and</i> needs to use it after this constructor completes, a work around is to wrap the
|
||||
* stream in order to trap the <tt>close()</tt> call. A convenience method (
|
||||
* <tt>createNonClosingInputStream()</tt>) has been provided for this purpose:
|
||||
* <pre>
|
||||
* InputStream wrappedStream = OPOIFSFileSystem.createNonClosingInputStream(is);
|
||||
* HSSFWorkbook wb = new HSSFWorkbook(wrappedStream);
|
||||
* is.reset();
|
||||
* doSomethingElse(is);
|
||||
* </pre>
|
||||
* Note also the special case of <tt>ByteArrayInputStream</tt> for which the <tt>close()</tt>
|
||||
* method does nothing.
|
||||
* <pre>
|
||||
* ByteArrayInputStream bais = ...
|
||||
* HSSFWorkbook wb = new HSSFWorkbook(bais); // calls bais.close() !
|
||||
* bais.reset(); // no problem
|
||||
* doSomethingElse(bais);
|
||||
* </pre>
|
||||
*
|
||||
* @param stream the InputStream from which to read the data
|
||||
*
|
||||
* @exception IOException on errors reading, or on invalid data
|
||||
*/
|
||||
|
||||
public OPOIFSFileSystem(InputStream stream)
|
||||
throws IOException
|
||||
{
|
||||
this();
|
||||
boolean success = false;
|
||||
|
||||
HeaderBlock header_block;
|
||||
RawDataBlockList data_blocks;
|
||||
try {
|
||||
// read the header block from the stream
|
||||
header_block = new HeaderBlock(stream);
|
||||
bigBlockSize = header_block.getBigBlockSize();
|
||||
|
||||
// read the rest of the stream into blocks
|
||||
data_blocks = new RawDataBlockList(stream, bigBlockSize);
|
||||
success = true;
|
||||
} finally {
|
||||
closeInputStream(stream, success);
|
||||
}
|
||||
|
||||
|
||||
// set up the block allocation table (necessary for the
|
||||
// data_blocks to be manageable
|
||||
new BlockAllocationTableReader(header_block.getBigBlockSize(),
|
||||
header_block.getBATCount(),
|
||||
header_block.getBATArray(),
|
||||
header_block.getXBATCount(),
|
||||
header_block.getXBATIndex(),
|
||||
data_blocks);
|
||||
|
||||
// get property table from the document
|
||||
PropertyTable properties =
|
||||
new PropertyTable(header_block, data_blocks);
|
||||
|
||||
// init documents
|
||||
processProperties(
|
||||
SmallBlockTableReader.getSmallDocumentBlocks(
|
||||
bigBlockSize, data_blocks, properties.getRoot(),
|
||||
header_block.getSBATStart()
|
||||
),
|
||||
data_blocks,
|
||||
properties.getRoot().getChildren(),
|
||||
null,
|
||||
header_block.getPropertyStart()
|
||||
);
|
||||
|
||||
// For whatever reason CLSID of root is always 0.
|
||||
getRoot().setStorageClsid(properties.getRoot().getStorageClsid());
|
||||
}
|
||||
/**
|
||||
* @param stream the stream to be closed
|
||||
* @param success <code>false</code> if an exception is currently being thrown in the calling method
|
||||
*/
|
||||
protected void closeInputStream(InputStream stream, boolean success) {
|
||||
|
||||
if(stream.markSupported() && !(stream instanceof ByteArrayInputStream)) {
|
||||
String msg = "POIFS is closing the supplied input stream of type ("
|
||||
+ stream.getClass().getName() + ") which supports mark/reset. "
|
||||
+ "This will be a problem for the caller if the stream will still be used. "
|
||||
+ "If that is the case the caller should wrap the input stream to avoid this close logic. "
|
||||
+ "This warning is only temporary and will not be present in future versions of POI.";
|
||||
_logger.log(POILogger.WARN, msg);
|
||||
}
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException e) {
|
||||
if(success) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
// else not success? Try block did not complete normally
|
||||
// just print stack trace and leave original ex to be thrown
|
||||
_logger.log(POILogger.ERROR, "can't close input stream", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new document to be added to the root directory
|
||||
*
|
||||
* @param stream the InputStream from which the document's data
|
||||
* will be obtained
|
||||
* @param name the name of the new POIFSDocument
|
||||
*
|
||||
* @return the new DocumentEntry
|
||||
*
|
||||
* @exception IOException on error creating the new POIFSDocument
|
||||
*/
|
||||
|
||||
public DocumentEntry createDocument(final InputStream stream,
|
||||
final String name)
|
||||
throws IOException
|
||||
{
|
||||
return getRoot().createDocument(name, stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* create a new DocumentEntry in the root entry; the data will be
|
||||
* provided later
|
||||
*
|
||||
* @param name the name of the new DocumentEntry
|
||||
* @param size the size of the new DocumentEntry
|
||||
* @param writer the writer of the new DocumentEntry
|
||||
*
|
||||
* @return the new DocumentEntry
|
||||
*
|
||||
* @exception IOException
|
||||
*/
|
||||
public DocumentEntry createDocument(final String name, final int size,
|
||||
final POIFSWriterListener writer)
|
||||
throws IOException
|
||||
{
|
||||
return getRoot().createDocument(name, size, writer);
|
||||
}
|
||||
|
||||
/**
|
||||
* create a new DirectoryEntry in the root directory
|
||||
*
|
||||
* @param name the name of the new DirectoryEntry
|
||||
*
|
||||
* @return the new DirectoryEntry
|
||||
*
|
||||
* @exception IOException on name duplication
|
||||
*/
|
||||
|
||||
public DirectoryEntry createDirectory(final String name)
|
||||
throws IOException
|
||||
{
|
||||
return getRoot().createDirectory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the filesystem out
|
||||
*
|
||||
* @param stream the OutputStream to which the filesystem will be
|
||||
* written
|
||||
*
|
||||
* @exception IOException thrown on errors writing to the stream
|
||||
*/
|
||||
|
||||
public void writeFilesystem(final OutputStream stream)
|
||||
throws IOException
|
||||
{
|
||||
|
||||
// get the property table ready
|
||||
_property_table.preWrite();
|
||||
|
||||
// create the small block store, and the SBAT
|
||||
SmallBlockTableWriter sbtw =
|
||||
new SmallBlockTableWriter(bigBlockSize, _documents, _property_table.getRoot());
|
||||
|
||||
// create the block allocation table
|
||||
BlockAllocationTableWriter bat =
|
||||
new BlockAllocationTableWriter(bigBlockSize);
|
||||
|
||||
// create a list of BATManaged objects: the documents plus the
|
||||
// property table and the small block table
|
||||
List<Object> bm_objects = new ArrayList<>();
|
||||
|
||||
bm_objects.addAll(_documents);
|
||||
bm_objects.add(_property_table);
|
||||
bm_objects.add(sbtw);
|
||||
bm_objects.add(sbtw.getSBAT());
|
||||
|
||||
// walk the list, allocating space for each and assigning each
|
||||
// a starting block number
|
||||
Iterator<Object> iter = bm_objects.iterator();
|
||||
|
||||
while (iter.hasNext())
|
||||
{
|
||||
BATManaged bmo = ( BATManaged ) iter.next();
|
||||
int block_count = bmo.countBlocks();
|
||||
|
||||
if (block_count != 0) {
|
||||
bmo.setStartBlock(bat.allocateSpace(block_count));
|
||||
} /*else {
|
||||
// Either the BATManaged object is empty or its data
|
||||
// is composed of SmallBlocks; in either case,
|
||||
// allocating space in the BAT is inappropriate
|
||||
}*/
|
||||
}
|
||||
|
||||
// allocate space for the block allocation table and take its
|
||||
// starting block
|
||||
int batStartBlock = bat.createBlocks();
|
||||
|
||||
// get the extended block allocation table blocks
|
||||
HeaderBlockWriter header_block_writer = new HeaderBlockWriter(bigBlockSize);
|
||||
BATBlock[] xbat_blocks =
|
||||
header_block_writer.setBATBlocks(bat.countBlocks(),
|
||||
batStartBlock);
|
||||
|
||||
// set the property table start block
|
||||
header_block_writer.setPropertyStart(_property_table.getStartBlock());
|
||||
|
||||
// set the small block allocation table start block
|
||||
header_block_writer.setSBATStart(sbtw.getSBAT().getStartBlock());
|
||||
|
||||
// set the small block allocation table block count
|
||||
header_block_writer.setSBATBlockCount(sbtw.getSBATBlockCount());
|
||||
|
||||
// the header is now properly initialized. Make a list of
|
||||
// writers (the header block, followed by the documents, the
|
||||
// property table, the small block store, the small block
|
||||
// allocation table, the block allocation table, and the
|
||||
// extended block allocation table blocks)
|
||||
List<Object> writers = new ArrayList<>();
|
||||
|
||||
writers.add(header_block_writer);
|
||||
writers.addAll(_documents);
|
||||
writers.add(_property_table);
|
||||
writers.add(sbtw);
|
||||
writers.add(sbtw.getSBAT());
|
||||
writers.add(bat);
|
||||
Collections.addAll(writers, xbat_blocks);
|
||||
|
||||
// now, write everything out
|
||||
iter = writers.iterator();
|
||||
while (iter.hasNext())
|
||||
{
|
||||
BlockWritable writer = ( BlockWritable ) iter.next();
|
||||
|
||||
writer.writeBlocks(stream);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* read in a file and write it back out again
|
||||
*
|
||||
* @param args names of the files; arg[ 0 ] is the input file,
|
||||
* arg[ 1 ] is the output file
|
||||
*
|
||||
* @exception IOException
|
||||
*/
|
||||
public static void main(String args[])
|
||||
throws IOException
|
||||
{
|
||||
if (args.length != 2)
|
||||
{
|
||||
System.err.println(
|
||||
"two arguments required: input filename and output filename");
|
||||
System.exit(1);
|
||||
}
|
||||
FileInputStream istream = new FileInputStream(args[ 0 ]);
|
||||
FileOutputStream ostream = new FileOutputStream(args[ 1 ]);
|
||||
|
||||
new OPOIFSFileSystem(istream).writeFilesystem(ostream);
|
||||
istream.close();
|
||||
ostream.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* get the root entry
|
||||
*
|
||||
* @return the root entry
|
||||
*/
|
||||
|
||||
public DirectoryNode getRoot()
|
||||
{
|
||||
if (_root == null)
|
||||
{
|
||||
_root = new DirectoryNode(_property_table.getRoot(), this, null);
|
||||
}
|
||||
return _root;
|
||||
}
|
||||
|
||||
/**
|
||||
* open a document in the root entry's list of entries
|
||||
*
|
||||
* @param documentName the name of the document to be opened
|
||||
*
|
||||
* @return a newly opened DocumentInputStream
|
||||
*
|
||||
* @exception IOException if the document does not exist or the
|
||||
* name is that of a DirectoryEntry
|
||||
*/
|
||||
|
||||
public DocumentInputStream createDocumentInputStream(
|
||||
final String documentName)
|
||||
throws IOException
|
||||
{
|
||||
return getRoot().createDocumentInputStream(documentName);
|
||||
}
|
||||
|
||||
/**
|
||||
* add a new POIFSDocument
|
||||
*
|
||||
* @param document the POIFSDocument being added
|
||||
*/
|
||||
|
||||
void addDocument(final OPOIFSDocument document)
|
||||
{
|
||||
_documents.add(document);
|
||||
_property_table.addProperty(document.getDocumentProperty());
|
||||
}
|
||||
|
||||
/**
|
||||
* add a new DirectoryProperty
|
||||
*
|
||||
* @param directory the DirectoryProperty being added
|
||||
*/
|
||||
|
||||
void addDirectory(final DirectoryProperty directory)
|
||||
{
|
||||
_property_table.addProperty(directory);
|
||||
}
|
||||
|
||||
/**
|
||||
* remove an entry
|
||||
*
|
||||
* @param entry to be removed
|
||||
*/
|
||||
|
||||
void remove(EntryNode entry)
|
||||
{
|
||||
_property_table.removeProperty(entry.getProperty());
|
||||
if (entry.isDocumentEntry())
|
||||
{
|
||||
_documents.remove((( DocumentNode ) entry).getDocument());
|
||||
}
|
||||
}
|
||||
|
||||
private void processProperties(final BlockList small_blocks,
|
||||
final BlockList big_blocks,
|
||||
final Iterator<Property> properties,
|
||||
final DirectoryNode dir,
|
||||
final int headerPropertiesStartAt)
|
||||
throws IOException
|
||||
{
|
||||
while (properties.hasNext())
|
||||
{
|
||||
Property property = properties.next();
|
||||
String name = property.getName();
|
||||
DirectoryNode parent = (dir == null)
|
||||
? getRoot()
|
||||
: dir;
|
||||
|
||||
if (property.isDirectory())
|
||||
{
|
||||
DirectoryNode new_dir =
|
||||
( DirectoryNode ) parent.createDirectory(name);
|
||||
|
||||
new_dir.setStorageClsid( property.getStorageClsid() );
|
||||
|
||||
processProperties(
|
||||
small_blocks, big_blocks,
|
||||
(( DirectoryProperty ) property).getChildren(),
|
||||
new_dir, headerPropertiesStartAt);
|
||||
}
|
||||
else
|
||||
{
|
||||
int startBlock = property.getStartBlock();
|
||||
int size = property.getSize();
|
||||
OPOIFSDocument document;
|
||||
|
||||
if (property.shouldUseSmallBlocks())
|
||||
{
|
||||
document =
|
||||
new OPOIFSDocument(name,
|
||||
small_blocks.fetchBlocks(startBlock, headerPropertiesStartAt),
|
||||
size);
|
||||
}
|
||||
else
|
||||
{
|
||||
document =
|
||||
new OPOIFSDocument(name,
|
||||
big_blocks.fetchBlocks(startBlock, headerPropertiesStartAt),
|
||||
size);
|
||||
}
|
||||
parent.createDocument(document);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ********** START begin implementation of POIFSViewable ********** */
|
||||
|
||||
/**
|
||||
* Get an array of objects, some of which may implement
|
||||
* POIFSViewable
|
||||
*
|
||||
* @return an array of Object; may not be null, but may be empty
|
||||
*/
|
||||
|
||||
public Object [] getViewableArray()
|
||||
{
|
||||
if (preferArray())
|
||||
{
|
||||
return getRoot().getViewableArray();
|
||||
}
|
||||
return new Object[ 0 ];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an Iterator of objects, some of which may implement
|
||||
* POIFSViewable
|
||||
*
|
||||
* @return an Iterator; may not be null, but may have an empty
|
||||
* back end store
|
||||
*/
|
||||
|
||||
public Iterator<Object> getViewableIterator()
|
||||
{
|
||||
if (!preferArray())
|
||||
{
|
||||
return getRoot().getViewableIterator();
|
||||
}
|
||||
return Collections.emptyList().iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Give viewers a hint as to whether to call getViewableArray or
|
||||
* getViewableIterator
|
||||
*
|
||||
* @return true if a viewer should call getViewableArray, false if
|
||||
* a viewer should call getViewableIterator
|
||||
*/
|
||||
|
||||
public boolean preferArray()
|
||||
{
|
||||
return getRoot().preferArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a short description of the object, to be used when a
|
||||
* POIFSViewable object has not provided its contents.
|
||||
*
|
||||
* @return short description
|
||||
*/
|
||||
|
||||
public String getShortDescription()
|
||||
{
|
||||
return "POIFS FileSystem";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
|
||||
*/
|
||||
public int getBigBlockSize() {
|
||||
return bigBlockSize.getBigBlockSize();
|
||||
}
|
||||
/**
|
||||
* @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
|
||||
*/
|
||||
public POIFSBigBlockSize getBigBlockSizeDetails() {
|
||||
return bigBlockSize;
|
||||
}
|
||||
|
||||
/* ********** END begin implementation of POIFSViewable ********** */
|
||||
} // end public class OPOIFSFileSystem
|
||||
|
|
@ -141,6 +141,6 @@ public class POIFSFileSystem
|
|||
* arg[ 1 ] is the output file
|
||||
*/
|
||||
public static void main(String args[]) throws IOException {
|
||||
OPOIFSFileSystem.main(args);
|
||||
NPOIFSFileSystem.main(args);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<!--
|
||||
====================================================================
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
|
@ -15,22 +13,11 @@
|
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
==================================================================== */
|
||||
|
||||
storage package contains low level binary structures for POIFS's implementation of the OLE 2
|
||||
Compound Document Format.
|
||||
|
||||
<h2>Related Documentation</h2>
|
||||
|
||||
For overviews, tutorials, examples, guides, and tool documentation, please see:
|
||||
<ul>
|
||||
<li><a href="http://poi.apache.org">Apache POI Project</a>
|
||||
</ul>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
/**
|
||||
* filesystem package maps OLE 2 Compound document files to a more familiar filesystem interface.
|
||||
*
|
||||
* @see org.apache.poi.poifs.eventfilesystem
|
||||
*/
|
||||
package org.apache.poi.poifs.filesystem;
|
|
@ -1,37 +0,0 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<!--
|
||||
====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
|
||||
filesystem package maps OLE 2 Compound document files to a more familiar filesystem interface.
|
||||
|
||||
<h2>Related Documentation</h2>
|
||||
|
||||
For overviews, tutorials, examples, guides, and tool documentation, please see:
|
||||
<ul>
|
||||
<li><a href="http://poi.apache.org">Apache POI Project</a>
|
||||
</ul>
|
||||
|
||||
<!-- Put @see and @since tags down here. -->
|
||||
@see org.apache.poi.poifs.eventfilesystem
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,25 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
/**
|
||||
* Poor Obfuscation Implementation FileSystem APIs implement the OLE 2 Compound Document format in
|
||||
* pure Java. All POI subprojects are based upon this API.
|
||||
*
|
||||
* @see org.apache.poi.hssf
|
||||
* @see org.apache.poi.hpsf
|
||||
*/
|
||||
package org.apache.poi.poifs;
|
|
@ -1,39 +0,0 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<!--
|
||||
====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
|
||||
Poor Obfuscation Implementation FileSystem APIs implement the OLE 2 Compound Document format in
|
||||
pure Java. All POI subprojects are based upon this API.
|
||||
|
||||
<h2>Related Documentation</h2>
|
||||
|
||||
For overviews, tutorials, examples, guides, and tool documentation, please see:
|
||||
<ul>
|
||||
<li><a href="http://poi.apache.org">Apache POI Project</a>
|
||||
</ul>
|
||||
|
||||
<!-- Put @see and @since tags down here. -->
|
||||
@see org.apache.poi.hssf
|
||||
@see org.apache.poi.hpsf
|
||||
</body>
|
||||
</html>
|
|
@ -30,7 +30,7 @@ import java.util.Set;
|
|||
/**
|
||||
* Directory property
|
||||
*/
|
||||
public class DirectoryProperty extends Property implements Parent, Iterable<Property> { // TODO - fix instantiable superclass
|
||||
public class DirectoryProperty extends Property implements Parent, Iterable<Property> {
|
||||
|
||||
/** List of Property instances */
|
||||
private List<Property> _children;
|
||||
|
|
|
@ -19,14 +19,14 @@
|
|||
|
||||
package org.apache.poi.poifs.property;
|
||||
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSDocument;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSDocument;
|
||||
|
||||
/**
|
||||
* Trivial extension of Property for POIFSDocuments
|
||||
*/
|
||||
public class DocumentProperty extends Property {
|
||||
// the POIFSDocument this property is associated with
|
||||
private OPOIFSDocument _document;
|
||||
private NPOIFSDocument _document;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -64,7 +64,7 @@ public class DocumentProperty extends Property {
|
|||
*
|
||||
* @param doc the associated POIFSDocument
|
||||
*/
|
||||
public void setDocument(OPOIFSDocument doc)
|
||||
public void setDocument(NPOIFSDocument doc)
|
||||
{
|
||||
_document = doc;
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ public class DocumentProperty extends Property {
|
|||
*
|
||||
* @return the associated document
|
||||
*/
|
||||
public OPOIFSDocument getDocument()
|
||||
public NPOIFSDocument getDocument()
|
||||
{
|
||||
return _document;
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
package org.apache.poi.poifs.property;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.poifs.storage.SmallDocumentBlock;
|
||||
|
||||
/**
|
||||
* Root property
|
||||
|
@ -43,9 +42,7 @@ public final class RootProperty extends DirectoryProperty {
|
|||
* @param array byte data
|
||||
* @param offset offset into byte data
|
||||
*/
|
||||
protected RootProperty(final int index, final byte [] array,
|
||||
final int offset)
|
||||
{
|
||||
RootProperty(final int index, final byte [] array, final int offset) {
|
||||
super(index, array, offset);
|
||||
}
|
||||
|
||||
|
@ -56,7 +53,9 @@ public final class RootProperty extends DirectoryProperty {
|
|||
*/
|
||||
public void setSize(int size)
|
||||
{
|
||||
super.setSize(SmallDocumentBlock.calcSize(size));
|
||||
final int BLOCK_SHIFT = 6;
|
||||
final int _block_size = 1 << BLOCK_SHIFT;
|
||||
super.setSize(size * _block_size);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
/**
|
||||
* property package contains high and low level Property structures for POIFS.
|
||||
*
|
||||
* @see org.apache.poi.poifs.filesystem
|
||||
*/
|
||||
package org.apache.poi.poifs.property;
|
|
@ -1,37 +0,0 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<!--
|
||||
====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
|
||||
property package contains high and low level Property structures for POIFS.
|
||||
|
||||
<h2>Related Documentation</h2>
|
||||
|
||||
For overviews, tutorials, examples, guides, and tool documentation, please see:
|
||||
<ul>
|
||||
<li><a href="http://poi.apache.org">Apache POI Project</a>
|
||||
</ul>
|
||||
|
||||
<!-- Put @see and @since tags down here. -->
|
||||
@see org.apache.poi.poifs.filesystem
|
||||
</body>
|
||||
</html>
|
|
@ -27,9 +27,9 @@ import org.apache.poi.util.LittleEndianConsts;
|
|||
*/
|
||||
public interface HeaderBlockConstants
|
||||
{
|
||||
public static final long _signature = 0xE11AB1A1E011CFD0L;
|
||||
public static final int _bat_array_offset = 0x4c;
|
||||
public static final int _max_bats_in_header =
|
||||
long _signature = 0xE11AB1A1E011CFD0L;
|
||||
int _bat_array_offset = 0x4c;
|
||||
int _max_bats_in_header =
|
||||
(POIFSConstants.SMALLER_BIG_BLOCK_SIZE - _bat_array_offset)
|
||||
/ LittleEndianConsts.INT_SIZE; // If 4k blocks, rest is blank
|
||||
|
||||
|
@ -39,12 +39,12 @@ public interface HeaderBlockConstants
|
|||
// XBAT ~= DIFat
|
||||
|
||||
// useful offsets
|
||||
public static final int _signature_offset = 0;
|
||||
public static final int _bat_count_offset = 0x2C;
|
||||
public static final int _property_start_offset = 0x30;
|
||||
public static final int _sbat_start_offset = 0x3C;
|
||||
public static final int _sbat_block_count_offset = 0x40;
|
||||
public static final int _xbat_start_offset = 0x44;
|
||||
public static final int _xbat_count_offset = 0x48;
|
||||
} // end public interface HeaderBlockConstants
|
||||
int _signature_offset = 0;
|
||||
int _bat_count_offset = 0x2C;
|
||||
int _property_start_offset = 0x30;
|
||||
int _sbat_start_offset = 0x3C;
|
||||
int _sbat_block_count_offset = 0x40;
|
||||
int _xbat_start_offset = 0x44;
|
||||
int _xbat_count_offset = 0x48;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,109 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSBigBlockSize;
|
||||
import org.apache.poi.poifs.property.RootProperty;
|
||||
|
||||
/**
|
||||
* This class implements reading the small document block list from an
|
||||
* existing file
|
||||
*/
|
||||
public final class SmallBlockTableReader {
|
||||
private static BlockList prepareSmallDocumentBlocks(
|
||||
final POIFSBigBlockSize bigBlockSize,
|
||||
final RawDataBlockList blockList, final RootProperty root,
|
||||
final int sbatStart)
|
||||
throws IOException
|
||||
{
|
||||
// Fetch the blocks which hold the Small Blocks stream
|
||||
ListManagedBlock [] smallBlockBlocks =
|
||||
blockList.fetchBlocks(root.getStartBlock(), -1);
|
||||
|
||||
// Turn that into a list
|
||||
|
||||
return new SmallDocumentBlockList(
|
||||
SmallDocumentBlock.extract(bigBlockSize, smallBlockBlocks));
|
||||
}
|
||||
private static BlockAllocationTableReader prepareReader(
|
||||
final POIFSBigBlockSize bigBlockSize,
|
||||
final RawDataBlockList blockList, final BlockList list,
|
||||
final RootProperty root, final int sbatStart)
|
||||
throws IOException
|
||||
{
|
||||
// Process the SBAT and blocks
|
||||
return new BlockAllocationTableReader(bigBlockSize,
|
||||
blockList.fetchBlocks(sbatStart, -1),
|
||||
list);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the small document block reader from an existing file, normally
|
||||
* needed for debugging and low level dumping. You should typically call
|
||||
* {@link #getSmallDocumentBlocks(POIFSBigBlockSize, RawDataBlockList, RootProperty, int)}
|
||||
* instead.
|
||||
*
|
||||
* @param blockList the raw data from which the small block table
|
||||
* will be extracted
|
||||
* @param root the root property (which contains the start block
|
||||
* and small block table size)
|
||||
* @param sbatStart the start block of the SBAT
|
||||
*
|
||||
* @return the small document block reader
|
||||
*
|
||||
* @exception IOException
|
||||
*/
|
||||
public static BlockAllocationTableReader _getSmallDocumentBlockReader(
|
||||
final POIFSBigBlockSize bigBlockSize,
|
||||
final RawDataBlockList blockList, final RootProperty root,
|
||||
final int sbatStart)
|
||||
throws IOException
|
||||
{
|
||||
BlockList list = prepareSmallDocumentBlocks(
|
||||
bigBlockSize, blockList, root, sbatStart);
|
||||
return prepareReader(
|
||||
bigBlockSize, blockList, list, root, sbatStart);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the small document block list from an existing file
|
||||
*
|
||||
* @param blockList the raw data from which the small block table
|
||||
* will be extracted
|
||||
* @param root the root property (which contains the start block
|
||||
* and small block table size)
|
||||
* @param sbatStart the start block of the SBAT
|
||||
*
|
||||
* @return the small document block list
|
||||
*
|
||||
* @exception IOException
|
||||
*/
|
||||
public static BlockList getSmallDocumentBlocks(
|
||||
final POIFSBigBlockSize bigBlockSize,
|
||||
final RawDataBlockList blockList, final RootProperty root,
|
||||
final int sbatStart)
|
||||
throws IOException
|
||||
{
|
||||
BlockList list = prepareSmallDocumentBlocks(
|
||||
bigBlockSize, blockList, root, sbatStart);
|
||||
prepareReader(bigBlockSize, blockList, list, root, sbatStart);
|
||||
return list;
|
||||
}
|
||||
}
|
|
@ -1,149 +0,0 @@
|
|||
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSBigBlockSize;
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.poifs.filesystem.BATManaged;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSDocument;
|
||||
import org.apache.poi.poifs.property.RootProperty;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
/**
|
||||
* This class implements storage for writing the small blocks used by
|
||||
* small documents.
|
||||
*
|
||||
* @author Marc Johnson (mjohnson at apache dot org)
|
||||
*/
|
||||
|
||||
public class SmallBlockTableWriter
|
||||
implements BlockWritable, BATManaged
|
||||
{
|
||||
private BlockAllocationTableWriter _sbat;
|
||||
private List<SmallDocumentBlock> _small_blocks;
|
||||
private int _big_block_count;
|
||||
private RootProperty _root;
|
||||
|
||||
/**
|
||||
* Creates new SmallBlockTable
|
||||
*
|
||||
* @param documents a List of POIFSDocument instances
|
||||
* @param root the Filesystem's root property
|
||||
*/
|
||||
public SmallBlockTableWriter(final POIFSBigBlockSize bigBlockSize,
|
||||
final List<OPOIFSDocument> documents,
|
||||
final RootProperty root)
|
||||
{
|
||||
_sbat = new BlockAllocationTableWriter(bigBlockSize);
|
||||
_small_blocks = new ArrayList<>();
|
||||
_root = root;
|
||||
|
||||
for (OPOIFSDocument doc : documents)
|
||||
{
|
||||
SmallDocumentBlock[] blocks = doc.getSmallBlocks();
|
||||
|
||||
if (blocks.length != 0)
|
||||
{
|
||||
doc.setStartBlock(_sbat.allocateSpace(blocks.length));
|
||||
for (int j = 0; j < blocks.length; j++)
|
||||
{
|
||||
_small_blocks.add(blocks[ j ]);
|
||||
}
|
||||
} else {
|
||||
doc.setStartBlock(POIFSConstants.END_OF_CHAIN);
|
||||
}
|
||||
}
|
||||
_sbat.simpleCreateBlocks();
|
||||
_root.setSize(_small_blocks.size());
|
||||
_big_block_count = SmallDocumentBlock.fill(bigBlockSize,_small_blocks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of SBAT blocks
|
||||
*
|
||||
* @return number of SBAT big blocks
|
||||
*/
|
||||
|
||||
public int getSBATBlockCount()
|
||||
{
|
||||
return (_big_block_count + 15) / 16;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the SBAT
|
||||
*
|
||||
* @return the Small Block Allocation Table
|
||||
*/
|
||||
|
||||
public BlockAllocationTableWriter getSBAT()
|
||||
{
|
||||
return _sbat;
|
||||
}
|
||||
|
||||
/* ********** START implementation of BATManaged ********** */
|
||||
|
||||
/**
|
||||
* Return the number of BigBlock's this instance uses
|
||||
*
|
||||
* @return count of BigBlock instances
|
||||
*/
|
||||
|
||||
public int countBlocks()
|
||||
{
|
||||
return _big_block_count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the start block for this instance
|
||||
*
|
||||
* @param start_block
|
||||
*/
|
||||
|
||||
public void setStartBlock(int start_block)
|
||||
{
|
||||
_root.setStartBlock(start_block);
|
||||
}
|
||||
|
||||
/* ********** END implementation of BATManaged ********** */
|
||||
/* ********** START implementation of BlockWritable ********** */
|
||||
|
||||
/**
|
||||
* Write the storage to an OutputStream
|
||||
*
|
||||
* @param stream the OutputStream to which the stored data should
|
||||
* be written
|
||||
*
|
||||
* @exception IOException on problems writing to the specified
|
||||
* stream
|
||||
*/
|
||||
|
||||
public void writeBlocks(final OutputStream stream)
|
||||
throws IOException
|
||||
{
|
||||
for (BlockWritable block : _small_blocks) {
|
||||
block.writeBlocks(stream);
|
||||
}
|
||||
}
|
||||
|
||||
/* ********** END implementation of BlockWritable ********** */
|
||||
}
|
|
@ -1,253 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSBigBlockSize;
|
||||
|
||||
/**
|
||||
* Storage for documents that are too small to use regular
|
||||
* DocumentBlocks for their data
|
||||
*/
|
||||
public final class SmallDocumentBlock implements BlockWritable, ListManagedBlock {
|
||||
private static final int BLOCK_SHIFT = 6;
|
||||
|
||||
private byte[] _data;
|
||||
private static final byte _default_fill = ( byte ) 0xff;
|
||||
private static final int _block_size = 1 << BLOCK_SHIFT;
|
||||
private static final int BLOCK_MASK = _block_size-1;
|
||||
|
||||
private final int _blocks_per_big_block;
|
||||
private final POIFSBigBlockSize _bigBlockSize;
|
||||
|
||||
private SmallDocumentBlock(final POIFSBigBlockSize bigBlockSize, final byte [] data, final int index)
|
||||
{
|
||||
this(bigBlockSize);
|
||||
System.arraycopy(data, index * _block_size, _data, 0, _block_size);
|
||||
}
|
||||
|
||||
protected SmallDocumentBlock(final POIFSBigBlockSize bigBlockSize)
|
||||
{
|
||||
_bigBlockSize = bigBlockSize;
|
||||
_blocks_per_big_block = getBlocksPerBigBlock(bigBlockSize);
|
||||
_data = new byte[ _block_size ];
|
||||
}
|
||||
|
||||
private static int getBlocksPerBigBlock(final POIFSBigBlockSize bigBlockSize)
|
||||
{
|
||||
return bigBlockSize.getBigBlockSize() / _block_size;
|
||||
}
|
||||
|
||||
/**
|
||||
* convert a single long array into an array of SmallDocumentBlock
|
||||
* instances
|
||||
*
|
||||
* @param array the byte array to be converted
|
||||
* @param size the intended size of the array (which may be smaller)
|
||||
*
|
||||
* @return an array of SmallDocumentBlock instances, filled from
|
||||
* the array
|
||||
*/
|
||||
public static SmallDocumentBlock [] convert(POIFSBigBlockSize bigBlockSize,
|
||||
byte [] array,
|
||||
int size)
|
||||
{
|
||||
SmallDocumentBlock[] rval =
|
||||
new SmallDocumentBlock[ (size + _block_size - 1) / _block_size ];
|
||||
int offset = 0;
|
||||
|
||||
for (int k = 0; k < rval.length; k++)
|
||||
{
|
||||
rval[ k ] = new SmallDocumentBlock(bigBlockSize);
|
||||
if (offset < array.length)
|
||||
{
|
||||
int length = Math.min(_block_size, array.length - offset);
|
||||
|
||||
System.arraycopy(array, offset, rval[ k ]._data, 0, length);
|
||||
if (length != _block_size)
|
||||
{
|
||||
Arrays.fill(rval[ k ]._data, length, _block_size,
|
||||
_default_fill);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Arrays.fill(rval[ k ]._data, _default_fill);
|
||||
}
|
||||
offset += _block_size;
|
||||
}
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* fill out a List of SmallDocumentBlocks so that it fully occupies
|
||||
* a set of big blocks
|
||||
*
|
||||
* @param blocks the List to be filled out
|
||||
*
|
||||
* @return number of big blocks the list encompasses
|
||||
*/
|
||||
public static int fill(POIFSBigBlockSize bigBlockSize, List<SmallDocumentBlock> blocks)
|
||||
{
|
||||
int _blocks_per_big_block = getBlocksPerBigBlock(bigBlockSize);
|
||||
|
||||
int count = blocks.size();
|
||||
int big_block_count = (count + _blocks_per_big_block - 1)
|
||||
/ _blocks_per_big_block;
|
||||
int full_count = big_block_count * _blocks_per_big_block;
|
||||
|
||||
for (; count < full_count; count++)
|
||||
{
|
||||
blocks.add(makeEmptySmallDocumentBlock(bigBlockSize));
|
||||
}
|
||||
return big_block_count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory for creating SmallDocumentBlocks from DocumentBlocks
|
||||
*
|
||||
* @param store the original DocumentBlocks
|
||||
* @param size the total document size
|
||||
*
|
||||
* @return an array of new SmallDocumentBlocks instances
|
||||
*
|
||||
* @exception IOException on errors reading from the DocumentBlocks
|
||||
* @exception ArrayIndexOutOfBoundsException if, somehow, the store
|
||||
* contains less data than size indicates
|
||||
*/
|
||||
public static SmallDocumentBlock [] convert(POIFSBigBlockSize bigBlockSize,
|
||||
BlockWritable [] store,
|
||||
int size)
|
||||
throws IOException, ArrayIndexOutOfBoundsException
|
||||
{
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
||||
for (int j = 0; j < store.length; j++)
|
||||
{
|
||||
store[ j ].writeBlocks(stream);
|
||||
}
|
||||
byte[] data = stream.toByteArray();
|
||||
SmallDocumentBlock[] rval =
|
||||
new SmallDocumentBlock[ convertToBlockCount(size) ];
|
||||
|
||||
for (int index = 0; index < rval.length; index++)
|
||||
{
|
||||
rval[ index ] = new SmallDocumentBlock(bigBlockSize, data, index);
|
||||
}
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* create a list of SmallDocumentBlock's from raw data
|
||||
*
|
||||
* @param blocks the raw data containing the SmallDocumentBlock
|
||||
* data
|
||||
*
|
||||
* @return a List of SmallDocumentBlock's extracted from the input
|
||||
*/
|
||||
public static List<SmallDocumentBlock> extract(POIFSBigBlockSize bigBlockSize, ListManagedBlock [] blocks)
|
||||
throws IOException
|
||||
{
|
||||
int _blocks_per_big_block = getBlocksPerBigBlock(bigBlockSize);
|
||||
|
||||
List<SmallDocumentBlock> sdbs = new ArrayList<>();
|
||||
|
||||
for (int j = 0; j < blocks.length; j++)
|
||||
{
|
||||
byte[] data = blocks[ j ].getData();
|
||||
|
||||
for (int k = 0; k < _blocks_per_big_block; k++)
|
||||
{
|
||||
sdbs.add(new SmallDocumentBlock(bigBlockSize, data, k));
|
||||
}
|
||||
}
|
||||
return sdbs;
|
||||
}
|
||||
|
||||
public static DataInputBlock getDataInputBlock(SmallDocumentBlock[] blocks, int offset) {
|
||||
int firstBlockIndex = offset >> BLOCK_SHIFT;
|
||||
int firstBlockOffset= offset & BLOCK_MASK;
|
||||
return new DataInputBlock(blocks[firstBlockIndex]._data, firstBlockOffset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the storage size of a set of SmallDocumentBlocks
|
||||
*
|
||||
* @param size number of SmallDocumentBlocks
|
||||
*
|
||||
* @return total size
|
||||
*/
|
||||
public static int calcSize(int size)
|
||||
{
|
||||
return size * _block_size;
|
||||
}
|
||||
|
||||
protected int getSmallBlocksPerBigBlock()
|
||||
{
|
||||
return _blocks_per_big_block;
|
||||
}
|
||||
|
||||
private static SmallDocumentBlock makeEmptySmallDocumentBlock(POIFSBigBlockSize bigBlockSize)
|
||||
{
|
||||
SmallDocumentBlock block = new SmallDocumentBlock(bigBlockSize);
|
||||
|
||||
Arrays.fill(block._data, _default_fill);
|
||||
return block;
|
||||
}
|
||||
|
||||
private static int convertToBlockCount(int size)
|
||||
{
|
||||
return (size + _block_size - 1) / _block_size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the storage to an OutputStream
|
||||
*
|
||||
* @param stream the OutputStream to which the stored data should
|
||||
* be written
|
||||
*
|
||||
* @exception IOException on problems writing to the specified
|
||||
* stream
|
||||
*/
|
||||
public void writeBlocks(OutputStream stream)
|
||||
throws IOException
|
||||
{
|
||||
stream.write(_data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the data from the block
|
||||
*
|
||||
* @return the block's data as a byte array
|
||||
*
|
||||
* @exception IOException if there is no data
|
||||
*/
|
||||
public byte [] getData() {
|
||||
return _data;
|
||||
}
|
||||
|
||||
public POIFSBigBlockSize getBigBlockSize() {
|
||||
return _bigBlockSize;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
/**
|
||||
* storage package contains low level binary structures for POIFS's implementation of the OLE 2
|
||||
* Compound Document Format.
|
||||
*/
|
||||
package org.apache.poi.poifs.storage;
|
|
@ -21,10 +21,6 @@ import java.io.FilterOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Josh Micich
|
||||
*/
|
||||
public final class LittleEndianOutputStream extends FilterOutputStream implements LittleEndianOutput {
|
||||
public LittleEndianOutputStream(OutputStream out) {
|
||||
super(out);
|
||||
|
@ -49,7 +45,7 @@ public final class LittleEndianOutputStream extends FilterOutputStream implement
|
|||
int b3 = (v >>> 24) & 0xFF;
|
||||
int b2 = (v >>> 16) & 0xFF;
|
||||
int b1 = (v >>> 8) & 0xFF;
|
||||
int b0 = (v/* >>> 0*/) & 0xFF;
|
||||
int b0 = (v) & 0xFF;
|
||||
try {
|
||||
out.write(b0);
|
||||
out.write(b1);
|
||||
|
@ -69,7 +65,7 @@ public final class LittleEndianOutputStream extends FilterOutputStream implement
|
|||
@Override
|
||||
public void writeShort(int v) {
|
||||
int b1 = (v >>> 8) & 0xFF;
|
||||
int b0 = (v/* >>> 0*/) & 0xFF;
|
||||
int b0 = (v) & 0xFF;
|
||||
try {
|
||||
out.write(b0);
|
||||
out.write(b1);
|
||||
|
@ -95,4 +91,37 @@ public final class LittleEndianOutputStream extends FilterOutputStream implement
|
|||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Put unsigned int into output stream
|
||||
*
|
||||
* @param value
|
||||
* the int (32-bit) value
|
||||
*/
|
||||
public void writeUInt( long value ) {
|
||||
try {
|
||||
out.write( (byte) ( ( value ) & 0xFF ) );
|
||||
out.write( (byte) ( ( value >>> 8 ) & 0xFF ) );
|
||||
out.write( (byte) ( ( value >>> 16 ) & 0xFF ) );
|
||||
out.write( (byte) ( ( value >>> 24 ) & 0xFF ) );
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Put unsigned short into output stream
|
||||
*
|
||||
* @param value
|
||||
* the unsigned short (16-bit) value
|
||||
*/
|
||||
public void putUShort( int value ) {
|
||||
try {
|
||||
out.write( (byte) ( ( value ) & 0xFF ) );
|
||||
out.write( (byte) ( ( value >>> 8 ) & 0xFF ) );
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,6 @@ import org.apache.poi.poifs.filesystem.Entry;
|
|||
import org.apache.poi.poifs.filesystem.FileMagic;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.NotOLE2FileException;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.sl.extractor.SlideShowExtractor;
|
||||
|
@ -79,12 +78,15 @@ import org.apache.xmlbeans.XmlException;
|
|||
* off switching to <a href="http://tika.apache.org">Apache Tika</a> instead!</p>
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public class ExtractorFactory {
|
||||
public final class ExtractorFactory {
|
||||
private static final POILogger logger = POILogFactory.getLogger(ExtractorFactory.class);
|
||||
|
||||
public static final String CORE_DOCUMENT_REL = PackageRelationshipTypes.CORE_DOCUMENT;
|
||||
protected static final String VISIO_DOCUMENT_REL = PackageRelationshipTypes.VISIO_CORE_DOCUMENT;
|
||||
protected static final String STRICT_DOCUMENT_REL = PackageRelationshipTypes.STRICT_CORE_DOCUMENT;
|
||||
private static final String VISIO_DOCUMENT_REL = PackageRelationshipTypes.VISIO_CORE_DOCUMENT;
|
||||
private static final String STRICT_DOCUMENT_REL = PackageRelationshipTypes.STRICT_CORE_DOCUMENT;
|
||||
|
||||
private ExtractorFactory() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Should this thread prefer event based over usermodel based extractors?
|
||||
|
@ -128,6 +130,7 @@ public class ExtractorFactory {
|
|||
return OLE2ExtractorFactory.getPreferEventExtractor();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends POITextExtractor> T createExtractor(File f) throws IOException, OpenXML4JException, XmlException {
|
||||
NPOIFSFileSystem fs = null;
|
||||
try {
|
||||
|
@ -230,13 +233,13 @@ public class ExtractorFactory {
|
|||
// Is it XSLF?
|
||||
for (XSLFRelation rel : XSLFPowerPointExtractor.SUPPORTED_TYPES) {
|
||||
if ( rel.getContentType().equals( contentType ) ) {
|
||||
return new SlideShowExtractor(new XMLSlideShow(pkg));
|
||||
return new SlideShowExtractor<>(new XMLSlideShow(pkg));
|
||||
}
|
||||
}
|
||||
|
||||
// special handling for SlideShow-Theme-files,
|
||||
if (XSLFRelation.THEME_MANAGER.getContentType().equals(contentType)) {
|
||||
return new SlideShowExtractor(new XMLSlideShow(pkg));
|
||||
return new SlideShowExtractor<>(new XMLSlideShow(pkg));
|
||||
}
|
||||
|
||||
// How about xlsb?
|
||||
|
@ -262,10 +265,8 @@ public class ExtractorFactory {
|
|||
public static <T extends POITextExtractor> T createExtractor(NPOIFSFileSystem fs) throws IOException, OpenXML4JException, XmlException {
|
||||
return createExtractor(fs.getRoot());
|
||||
}
|
||||
public static <T extends POITextExtractor> T createExtractor(OPOIFSFileSystem fs) throws IOException, OpenXML4JException, XmlException {
|
||||
return createExtractor(fs.getRoot());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends POITextExtractor> T createExtractor(DirectoryNode poifsDir) throws IOException, OpenXML4JException, XmlException
|
||||
{
|
||||
// First, check for OOXML
|
||||
|
@ -374,7 +375,7 @@ public class ExtractorFactory {
|
|||
throw new IOException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
return textExtractors.toArray(new POITextExtractor[textExtractors.size()]);
|
||||
return textExtractors.toArray(new POITextExtractor[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -30,10 +30,9 @@ import java.io.IOException;
|
|||
import java.util.Locale;
|
||||
|
||||
import org.apache.poi.POIDataSamples;
|
||||
import org.apache.poi.UnsupportedFileFormatException;
|
||||
import org.apache.poi.extractor.POIOLE2TextExtractor;
|
||||
import org.apache.poi.extractor.POITextExtractor;
|
||||
import org.apache.poi.ooxml.extractor.POIXMLTextExtractor;
|
||||
import org.apache.poi.UnsupportedFileFormatException;
|
||||
import org.apache.poi.hdgf.extractor.VisioTextExtractor;
|
||||
import org.apache.poi.hpbf.extractor.PublisherTextExtractor;
|
||||
import org.apache.poi.hsmf.extractor.OutlookTextExtactor;
|
||||
|
@ -44,14 +43,12 @@ import org.apache.poi.hssf.extractor.ExcelExtractor;
|
|||
import org.apache.poi.hwpf.extractor.Word6Extractor;
|
||||
import org.apache.poi.hwpf.extractor.WordExtractor;
|
||||
import org.apache.poi.ooxml.extractor.ExtractorFactory;
|
||||
import org.apache.poi.ooxml.extractor.POIXMLTextExtractor;
|
||||
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
|
||||
import org.apache.poi.openxml4j.opc.OPCPackage;
|
||||
import org.apache.poi.openxml4j.opc.PackageAccess;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.sl.extractor.SlideShowExtractor;
|
||||
import org.apache.poi.util.POILogFactory;
|
||||
import org.apache.poi.util.POILogger;
|
||||
import org.apache.poi.xdgf.extractor.XDGFVisioExtractor;
|
||||
import org.apache.poi.xssf.extractor.XSSFBEventBasedExcelExtractor;
|
||||
import org.apache.poi.xssf.extractor.XSSFEventBasedExcelExtractor;
|
||||
|
@ -65,11 +62,10 @@ import org.junit.Test;
|
|||
*/
|
||||
public class TestExtractorFactory {
|
||||
|
||||
private static final POILogger LOG = POILogFactory.getLogger(TestExtractorFactory.class);
|
||||
|
||||
private static final POIDataSamples ssTests = POIDataSamples.getSpreadSheetInstance();
|
||||
private static final File xls = getFileAndCheck(ssTests, "SampleSS.xls");
|
||||
private static final File xlsx = getFileAndCheck(ssTests, "SampleSS.xlsx");
|
||||
@SuppressWarnings("unused")
|
||||
private static final File xlsxStrict = getFileAndCheck(ssTests, "SampleSS.strict.xlsx");
|
||||
private static final File xltx = getFileAndCheck(ssTests, "test.xltx");
|
||||
private static final File xlsEmb = getFileAndCheck(ssTests, "excel_with_embeded.xls");
|
||||
|
@ -150,17 +146,19 @@ public class TestExtractorFactory {
|
|||
@Test(expected = IllegalArgumentException.class)
|
||||
public void testFileInvalid() throws Exception {
|
||||
// Text
|
||||
try (POITextExtractor te = ExtractorFactory.createExtractor(txt)) {}
|
||||
try (POITextExtractor ignored = ExtractorFactory.createExtractor(txt)) {
|
||||
fail("extracting from invalid package");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInputStream() throws Exception {
|
||||
testStream((f) -> ExtractorFactory.createExtractor(f), true);
|
||||
testStream(ExtractorFactory::createExtractor, true);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void testInputStreamInvalid() throws Exception {
|
||||
testInvalid((f) -> ExtractorFactory.createExtractor(f));
|
||||
testInvalid(ExtractorFactory::createExtractor);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -173,17 +171,6 @@ public class TestExtractorFactory {
|
|||
testInvalid((f) -> ExtractorFactory.createExtractor(new POIFSFileSystem(f)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOPOIFS() throws Exception {
|
||||
testStream((f) -> ExtractorFactory.createExtractor(new OPOIFSFileSystem(f)), false);
|
||||
}
|
||||
|
||||
@Test(expected = IOException.class)
|
||||
public void testOPOIFSInvalid() throws Exception {
|
||||
testInvalid((f) -> ExtractorFactory.createExtractor(new OPOIFSFileSystem(f)));
|
||||
}
|
||||
|
||||
|
||||
private void testStream(final FunctionEx<FileInputStream, POITextExtractor> poifsIS, final boolean loadOOXML)
|
||||
throws IOException, OpenXML4JException, XmlException {
|
||||
for (int i = 0; i < TEST_SET.length; i += 4) {
|
||||
|
@ -213,7 +200,8 @@ public class TestExtractorFactory {
|
|||
private void testInvalid(FunctionEx<FileInputStream, POITextExtractor> poifs) throws IOException, OpenXML4JException, XmlException {
|
||||
// Text
|
||||
try (FileInputStream fis = new FileInputStream(txt);
|
||||
POITextExtractor te = poifs.apply(fis)) {
|
||||
POITextExtractor ignored = poifs.apply(fis)) {
|
||||
fail("extracting from invalid package");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -237,7 +225,9 @@ public class TestExtractorFactory {
|
|||
public void testPackageInvalid() throws Exception {
|
||||
// Text
|
||||
try (final OPCPackage pkg = OPCPackage.open(txt, PackageAccess.READ);
|
||||
final POITextExtractor te = ExtractorFactory.createExtractor(pkg)) {}
|
||||
final POITextExtractor ignored = ExtractorFactory.createExtractor(pkg)) {
|
||||
fail("extracting from invalid package");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -452,7 +442,7 @@ public class TestExtractorFactory {
|
|||
};
|
||||
|
||||
@Test
|
||||
public void testFileLeak() throws Exception {
|
||||
public void testFileLeak() {
|
||||
// run a number of files that might fail in order to catch
|
||||
// leaked file resources when using file-leak-detector while
|
||||
// running the test
|
||||
|
|
|
@ -27,7 +27,6 @@ import static org.junit.Assert.assertTrue;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
|
@ -35,12 +34,10 @@ import java.util.List;
|
|||
import org.apache.poi.POIDataSamples;
|
||||
import org.apache.poi.hslf.usermodel.HSLFObjectShape;
|
||||
import org.apache.poi.hslf.usermodel.HSLFSlideShow;
|
||||
import org.apache.poi.hslf.usermodel.HSLFSlideShowImpl;
|
||||
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
|
||||
import org.apache.poi.hwpf.HWPFDocument;
|
||||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.sl.extractor.SlideShowExtractor;
|
||||
import org.apache.poi.sl.usermodel.ObjectShape;
|
||||
|
@ -68,18 +65,7 @@ public final class TestExtractor {
|
|||
*/
|
||||
private static POIDataSamples slTests = POIDataSamples.getSlideShowInstance();
|
||||
|
||||
// @Before
|
||||
// public void setUp() throws Exception {
|
||||
// ppe = new PowerPointExtractor(slTests.getFile("basic_test_ppt_file.ppt").getCanonicalPath());
|
||||
// ppe2 = new PowerPointExtractor(slTests.getFile("with_textbox.ppt").getCanonicalPath());
|
||||
// }
|
||||
|
||||
// @After
|
||||
// public void closeResources() throws Exception {
|
||||
// ppe2.close();
|
||||
// ppe.close();
|
||||
// }
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private SlideShowExtractor<?,?> openExtractor(String fileName) throws IOException {
|
||||
try (InputStream is = slTests.openResourceAsStream(fileName)) {
|
||||
return new SlideShowExtractor(SlideShowFactory.create(is));
|
||||
|
@ -151,8 +137,6 @@ public final class TestExtractor {
|
|||
/**
|
||||
* Test that when presented with a PPT file missing the odd
|
||||
* core record, we can still get the rest of the text out
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testMissingCoreRecords() throws IOException {
|
||||
|
@ -191,7 +175,7 @@ public final class TestExtractor {
|
|||
assertTrue(dir.hasEntry(HSLFSlideShow.POWERPOINT_DOCUMENT));
|
||||
|
||||
try (final SlideShow<?,?> ppt = SlideShowFactory.create(dir);
|
||||
final SlideShowExtractor<?,?> ppe = new SlideShowExtractor(ppt)) {
|
||||
final SlideShowExtractor<?,?> ppe = new SlideShowExtractor<>(ppt)) {
|
||||
assertEquals(TEST_SET[i+1], ppe.getText());
|
||||
}
|
||||
}
|
||||
|
@ -297,7 +281,7 @@ public final class TestExtractor {
|
|||
}
|
||||
|
||||
private void testHeaderFooterInner(final HSLFSlideShow ppt) throws IOException {
|
||||
try (final SlideShowExtractor<?,?> ppe = new SlideShowExtractor(ppt)) {
|
||||
try (final SlideShowExtractor<?,?> ppe = new SlideShowExtractor<>(ppt)) {
|
||||
String text = ppe.getText();
|
||||
assertFalse("Header shouldn't be there by default\n" + text, text.contains("testdoc"));
|
||||
assertFalse("Header shouldn't be there by default\n" + text, text.contains("test phrase"));
|
||||
|
@ -399,19 +383,11 @@ public final class TestExtractor {
|
|||
public void testDifferentPOIFS() throws IOException {
|
||||
// Open the two filesystems
|
||||
File pptFile = slTests.getFile("basic_test_ppt_file.ppt");
|
||||
try (final InputStream is1 = new FileInputStream(pptFile);
|
||||
final NPOIFSFileSystem npoifs = new NPOIFSFileSystem(pptFile)) {
|
||||
|
||||
final OPOIFSFileSystem opoifs = new OPOIFSFileSystem(is1);
|
||||
|
||||
DirectoryNode[] files = {opoifs.getRoot(), npoifs.getRoot()};
|
||||
|
||||
try (final NPOIFSFileSystem npoifs = new NPOIFSFileSystem(pptFile, true)) {
|
||||
// Open directly
|
||||
for (DirectoryNode dir : files) {
|
||||
try (SlideShow<?,?> ppt = SlideShowFactory.create(dir);
|
||||
SlideShowExtractor<?,?> extractor = new SlideShowExtractor(ppt)) {
|
||||
assertEquals(expectText, extractor.getText());
|
||||
}
|
||||
try (SlideShow<?,?> ppt = SlideShowFactory.create(npoifs.getRoot());
|
||||
SlideShowExtractor<?,?> extractor = new SlideShowExtractor<>(ppt)) {
|
||||
assertEquals(expectText, extractor.getText());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ import static org.junit.Assert.assertNotNull;
|
|||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
|
@ -36,7 +35,6 @@ import org.apache.poi.hwpf.OldWordFileFormatException;
|
|||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.Entry;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.util.StringUtil;
|
||||
import org.junit.Test;
|
||||
|
@ -48,7 +46,7 @@ public final class TestWordExtractor {
|
|||
|
||||
private static POIDataSamples docTests = POIDataSamples.getDocumentInstance();
|
||||
|
||||
public static void assertEqualsTrim( String expected, String actual )
|
||||
private static void assertEqualsTrim( String expected, String actual )
|
||||
{
|
||||
String newExpected = expected.replaceAll( "\r\n", "\n" )
|
||||
.replaceAll( "\r", "\n" ).trim();
|
||||
|
@ -188,7 +186,8 @@ public final class TestWordExtractor {
|
|||
HWPFDocument doc1 = HWPFTestDataSamples.openSampleFile("ThreeColHeadFoot.doc");
|
||||
WordExtractor extractor1 = new WordExtractor(doc1);
|
||||
|
||||
assertEquals("First header column!\tMid header Right header!\n", extractor1.getHeaderText());
|
||||
//noinspection deprecation
|
||||
assertEquals("First header column!\tMid header Right header!\n", extractor1.getHeaderText());
|
||||
assertContains(extractor1.getText(), "First header column!");
|
||||
extractor1.close();
|
||||
doc1.close();
|
||||
|
@ -197,7 +196,8 @@ public final class TestWordExtractor {
|
|||
HWPFDocument doc2 = HWPFTestDataSamples.openSampleFile("HeaderFooterUnicode.doc");
|
||||
WordExtractor extractor2 = new WordExtractor(doc2);
|
||||
|
||||
assertEquals("This is a simple header, with a \u20ac euro symbol in it.\n\n", extractor2.getHeaderText());
|
||||
//noinspection deprecation
|
||||
assertEquals("This is a simple header, with a \u20ac euro symbol in it.\n\n", extractor2.getHeaderText());
|
||||
assertContains(extractor2.getText(), "This is a simple header");
|
||||
extractor2.close();
|
||||
doc2.close();
|
||||
|
@ -209,7 +209,8 @@ public final class TestWordExtractor {
|
|||
HWPFDocument doc1 = HWPFTestDataSamples.openSampleFile("ThreeColHeadFoot.doc");
|
||||
WordExtractor extractor1 = new WordExtractor(doc1);
|
||||
|
||||
assertEquals("Footer Left\tFooter Middle Footer Right\n", extractor1.getFooterText());
|
||||
//noinspection deprecation
|
||||
assertEquals("Footer Left\tFooter Middle Footer Right\n", extractor1.getFooterText());
|
||||
assertContains(extractor1.getText(), "Footer Left");
|
||||
extractor1.close();
|
||||
doc1.close();
|
||||
|
@ -218,7 +219,8 @@ public final class TestWordExtractor {
|
|||
HWPFDocument doc2 = HWPFTestDataSamples.openSampleFile("HeaderFooterUnicode.doc");
|
||||
WordExtractor extractor2 = new WordExtractor(doc2);
|
||||
|
||||
assertEquals("The footer, with Moli\u00e8re, has Unicode in it.\n", extractor2.getFooterText());
|
||||
//noinspection deprecation
|
||||
assertEquals("The footer, with Moli\u00e8re, has Unicode in it.\n", extractor2.getFooterText());
|
||||
assertContains(extractor2.getText(), "The footer, with");
|
||||
extractor2.close();
|
||||
doc2.close();
|
||||
|
@ -279,6 +281,7 @@ public final class TestWordExtractor {
|
|||
assertContains(text, "Paragraph 3. Has some RED text and some BLUE BOLD text in it");
|
||||
assertContains(text, "Last (4th) paragraph");
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
String[] tp = w6e.getParagraphText();
|
||||
assertEquals(7, tp.length);
|
||||
assertEquals("The quick brown fox jumps over the lazy dog\r\n", tp[0]);
|
||||
|
@ -299,17 +302,17 @@ public final class TestWordExtractor {
|
|||
|
||||
@Test
|
||||
public void testWord6() throws Exception {
|
||||
InputStream is = docTests.openResourceAsStream("Word6.doc");
|
||||
Word6Extractor w6e = new Word6Extractor(is);
|
||||
is.close();
|
||||
String text = w6e.getText();
|
||||
|
||||
assertContains(text, "The quick brown fox jumps over the lazy dog");
|
||||
|
||||
String[] tp = w6e.getParagraphText();
|
||||
assertEquals(1, tp.length);
|
||||
assertEquals("The quick brown fox jumps over the lazy dog\r\n", tp[0]);
|
||||
w6e.close();
|
||||
try (InputStream is = docTests.openResourceAsStream("Word6.doc");
|
||||
Word6Extractor w6e = new Word6Extractor(is)) {
|
||||
String text = w6e.getText();
|
||||
|
||||
assertContains(text, "The quick brown fox jumps over the lazy dog");
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
String[] tp = w6e.getParagraphText();
|
||||
assertEquals(1, tp.length);
|
||||
assertEquals("The quick brown fox jumps over the lazy dog\r\n", tp[0]);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -341,30 +344,23 @@ public final class TestWordExtractor {
|
|||
public void testDifferentPOIFS() throws Exception {
|
||||
// Open the two filesystems
|
||||
File file = docTests.getFile("test2.doc");
|
||||
InputStream is = new FileInputStream(file);
|
||||
OPOIFSFileSystem opoifs = new OPOIFSFileSystem(is);
|
||||
is.close();
|
||||
NPOIFSFileSystem npoifs = new NPOIFSFileSystem(file);
|
||||
|
||||
DirectoryNode[] files = { opoifs.getRoot(), npoifs.getRoot() };
|
||||
|
||||
// Open directly
|
||||
for(DirectoryNode dir : files) {
|
||||
@SuppressWarnings("resource")
|
||||
WordExtractor extractor = new WordExtractor(dir);
|
||||
assertEqualsTrim(p_text1_block, extractor.getText());
|
||||
// extractor.close();
|
||||
}
|
||||
try (NPOIFSFileSystem npoifs = new NPOIFSFileSystem(file, true)) {
|
||||
|
||||
DirectoryNode dir = npoifs.getRoot();
|
||||
|
||||
// Open directly
|
||||
@SuppressWarnings("resource")
|
||||
WordExtractor extractor1 = new WordExtractor(dir);
|
||||
assertEqualsTrim(p_text1_block, extractor1.getText());
|
||||
// extractor.close();
|
||||
|
||||
// Open via a HWPFDocument
|
||||
try (HWPFDocument doc = new HWPFDocument(dir);
|
||||
WordExtractor extractor2 = new WordExtractor(doc)) {
|
||||
assertEqualsTrim(p_text1_block, extractor2.getText());
|
||||
}
|
||||
|
||||
// Open via a HWPFDocument
|
||||
for(DirectoryNode dir : files) {
|
||||
HWPFDocument doc = new HWPFDocument(dir);
|
||||
WordExtractor extractor = new WordExtractor(doc);
|
||||
assertEqualsTrim(p_text1_block, extractor.getText());
|
||||
extractor.close();
|
||||
}
|
||||
|
||||
npoifs.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -381,11 +377,8 @@ public final class TestWordExtractor {
|
|||
|
||||
for (Entry entry : fs.getRoot()) {
|
||||
if ("WordDocument".equals(entry.getName())) {
|
||||
WordExtractor ex = new WordExtractor(fs);
|
||||
try {
|
||||
try (WordExtractor ex = new WordExtractor(fs)) {
|
||||
text = ex.getText();
|
||||
} finally {
|
||||
ex.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -396,35 +389,22 @@ public final class TestWordExtractor {
|
|||
|
||||
@Test
|
||||
public void testExtractorFromWord6Extractor() throws Exception {
|
||||
InputStream is = POIDataSamples.getHPSFInstance().openResourceAsStream("TestMickey.doc");
|
||||
POIFSFileSystem fs = new POIFSFileSystem(is);
|
||||
is.close();
|
||||
Word6Extractor wExt = new Word6Extractor(fs);
|
||||
try {
|
||||
POITextExtractor ext = wExt.getMetadataTextExtractor();
|
||||
try {
|
||||
// Now overall
|
||||
String text = ext.getText();
|
||||
assertContains(text, "TEMPLATE = Normal");
|
||||
assertContains(text, "SUBJECT = sample subject");
|
||||
assertContains(text, "MANAGER = sample manager");
|
||||
assertContains(text, "COMPANY = sample company");
|
||||
} finally {
|
||||
ext.close();
|
||||
}
|
||||
} finally {
|
||||
wExt.close();
|
||||
fs.close();
|
||||
try (InputStream is = POIDataSamples.getHPSFInstance().openResourceAsStream("TestMickey.doc");
|
||||
POIFSFileSystem fs = new POIFSFileSystem(is);
|
||||
Word6Extractor wExt = new Word6Extractor(fs);
|
||||
POITextExtractor ext = wExt.getMetadataTextExtractor()) {
|
||||
// Now overall
|
||||
String text = ext.getText();
|
||||
assertContains(text, "TEMPLATE = Normal");
|
||||
assertContains(text, "SUBJECT = sample subject");
|
||||
assertContains(text, "MANAGER = sample manager");
|
||||
assertContains(text, "COMPANY = sample company");
|
||||
}
|
||||
}
|
||||
|
||||
private WordExtractor openExtractor(String fileName) throws IOException {
|
||||
InputStream is = docTests.openResourceAsStream(fileName);
|
||||
try {
|
||||
try (InputStream is = docTests.openResourceAsStream(fileName)) {
|
||||
return new WordExtractor(is);
|
||||
} finally {
|
||||
is.close();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.poi.hwpf.HWPFDocument;
|
|||
import org.apache.poi.hwpf.HWPFTestCase;
|
||||
import org.apache.poi.hwpf.HWPFTestDataSamples;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.apache.poi.util.TempFile;
|
||||
|
@ -99,16 +98,9 @@ public final class TestHWPFWrite extends HWPFTestCase {
|
|||
public void testInPlaceWrite() throws Exception {
|
||||
// Setup as a copy of a known-good file
|
||||
final File file = TempFile.createTempFile("TestDocument", ".doc");
|
||||
InputStream inputStream = SAMPLES.openResourceAsStream("SampleDoc.doc");
|
||||
try {
|
||||
FileOutputStream outputStream = new FileOutputStream(file);
|
||||
try {
|
||||
IOUtils.copy(inputStream, outputStream);
|
||||
} finally {
|
||||
outputStream.close();
|
||||
}
|
||||
} finally {
|
||||
inputStream.close();
|
||||
try (InputStream inputStream = SAMPLES.openResourceAsStream("SampleDoc.doc");
|
||||
FileOutputStream outputStream = new FileOutputStream(file)) {
|
||||
IOUtils.copy(inputStream, outputStream);
|
||||
}
|
||||
|
||||
// Open from the temp file in read-write mode
|
||||
|
@ -136,28 +128,13 @@ public final class TestHWPFWrite extends HWPFTestCase {
|
|||
@Test(expected=IllegalStateException.class)
|
||||
public void testInvalidInPlaceWriteInputStream() throws IOException {
|
||||
// Can't work for InputStream opened files
|
||||
InputStream is = SAMPLES.openResourceAsStream("SampleDoc.doc");
|
||||
HWPFDocument doc = new HWPFDocument(is);
|
||||
is.close();
|
||||
try {
|
||||
|
||||
try (InputStream is = SAMPLES.openResourceAsStream("SampleDoc.doc");
|
||||
HWPFDocument doc = new HWPFDocument(is)) {
|
||||
doc.write();
|
||||
} finally {
|
||||
doc.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expected=IllegalStateException.class)
|
||||
public void testInvalidInPlaceWriteOPOIFS() throws Exception {
|
||||
// Can't work for OPOIFS
|
||||
OPOIFSFileSystem ofs = new OPOIFSFileSystem(SAMPLES.openResourceAsStream("SampleDoc.doc"));
|
||||
HWPFDocument doc = new HWPFDocument(ofs.getRoot());
|
||||
try {
|
||||
doc.write();
|
||||
} finally {
|
||||
doc.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expected=IllegalStateException.class)
|
||||
public void testInvalidInPlaceWriteNPOIFS() throws Exception {
|
||||
// Can't work for Read-Only files
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.poi.hpsf.SummaryInformation;
|
|||
import org.apache.poi.hssf.HSSFTestDataSamples;
|
||||
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -78,7 +78,7 @@ public final class TestPOIDocumentMain {
|
|||
assertNotNull(doc2.getSummaryInformation());
|
||||
|
||||
assertEquals("Avik Sengupta", doc2.getSummaryInformation().getAuthor());
|
||||
assertEquals(null, doc2.getSummaryInformation().getKeywords());
|
||||
assertNull(doc2.getSummaryInformation().getKeywords());
|
||||
assertEquals(0, doc2.getDocumentSummaryInformation().getByteCount());
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ public final class TestPOIDocumentMain {
|
|||
|
||||
// Create a new version
|
||||
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
|
||||
OPOIFSFileSystem inFS = new OPOIFSFileSystem(bais);
|
||||
POIFSFileSystem inFS = new POIFSFileSystem(bais);
|
||||
|
||||
// Check they're still there
|
||||
POIDocument doc3 = new HPSFPropertiesOnlyDocument(inFS);
|
||||
|
|
|
@ -21,6 +21,7 @@ import static org.hamcrest.core.IsEqual.equalTo;
|
|||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
@ -28,7 +29,6 @@ import static org.junit.Assert.fail;
|
|||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -51,7 +51,6 @@ import org.apache.poi.hpsf.NoPropertySetStreamException;
|
|||
import org.apache.poi.hpsf.Property;
|
||||
import org.apache.poi.hpsf.PropertySet;
|
||||
import org.apache.poi.hpsf.PropertySetFactory;
|
||||
import org.apache.poi.hpsf.ReadingNotSupportedException;
|
||||
import org.apache.poi.hpsf.Section;
|
||||
import org.apache.poi.hpsf.SummaryInformation;
|
||||
import org.apache.poi.hpsf.UnsupportedVariantTypeException;
|
||||
|
@ -94,8 +93,6 @@ public class TestWrite {
|
|||
"LANG environment variable to a proper value, e.g. " +
|
||||
"\"de_DE\".";
|
||||
|
||||
POIFile[] poiFiles;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
VariantSupport.setLogUnsupportedTypes(false);
|
||||
|
@ -113,24 +110,20 @@ public class TestWrite {
|
|||
|
||||
/* Create a mutable property set with a section that does not have the
|
||||
* formatID set: */
|
||||
final OutputStream out = new FileOutputStream(filename);
|
||||
final POIFSFileSystem poiFs = new POIFSFileSystem();
|
||||
final PropertySet ps = new PropertySet();
|
||||
ps.clearSections();
|
||||
ps.addSection(new Section());
|
||||
|
||||
/* Write it to a POIFS and the latter to disk: */
|
||||
try {
|
||||
try (OutputStream out = new FileOutputStream(filename);
|
||||
POIFSFileSystem poiFs = new POIFSFileSystem()) {
|
||||
final ByteArrayOutputStream psStream = new ByteArrayOutputStream();
|
||||
ps.write(psStream);
|
||||
psStream.close();
|
||||
final byte[] streamData = psStream.toByteArray();
|
||||
poiFs.createDocument(new ByteArrayInputStream(streamData),
|
||||
SummaryInformation.DEFAULT_STREAM_NAME);
|
||||
SummaryInformation.DEFAULT_STREAM_NAME);
|
||||
poiFs.writeFilesystem(out);
|
||||
} finally {
|
||||
poiFs.close();
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,12 +163,7 @@ public class TestWrite {
|
|||
final POIFSReader r = new POIFSReader();
|
||||
r.registerListener(new MyPOIFSReaderListener(),
|
||||
SummaryInformation.DEFAULT_STREAM_NAME);
|
||||
FileInputStream stream = new FileInputStream(filename);
|
||||
try {
|
||||
r.read(stream);
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
r.read(filename);
|
||||
}
|
||||
|
||||
|
||||
|
@ -221,24 +209,16 @@ public class TestWrite {
|
|||
/* Read the POIFS: */
|
||||
final PropertySet[] psa = new PropertySet[1];
|
||||
final POIFSReader r = new POIFSReader();
|
||||
r.registerListener(new POIFSReaderListener() {
|
||||
@Override
|
||||
public void processPOIFSReaderEvent(final POIFSReaderEvent event) {
|
||||
try {
|
||||
psa[0] = PropertySetFactory.create(event.getStream());
|
||||
} catch (Exception ex) {
|
||||
fail(ex.getMessage());
|
||||
}
|
||||
}},
|
||||
SummaryInformation.DEFAULT_STREAM_NAME
|
||||
);
|
||||
|
||||
InputStream stream = new FileInputStream(filename);
|
||||
try {
|
||||
r.read(stream);
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
final POIFSReaderListener listener = event -> {
|
||||
try {
|
||||
psa[0] = PropertySetFactory.create(event.getStream());
|
||||
} catch (Exception ex) {
|
||||
fail(ex.getMessage());
|
||||
}
|
||||
};
|
||||
r.registerListener(listener, SummaryInformation.DEFAULT_STREAM_NAME);
|
||||
|
||||
r.read(filename);
|
||||
assertNotNull(psa[0]);
|
||||
assertTrue(psa[0].isSummaryInformation());
|
||||
|
||||
|
@ -295,23 +275,17 @@ public class TestWrite {
|
|||
/* Read the POIFS: */
|
||||
final PropertySet[] psa = new PropertySet[1];
|
||||
final POIFSReader r = new POIFSReader();
|
||||
r.registerListener(new POIFSReaderListener() {
|
||||
@Override
|
||||
public void processPOIFSReaderEvent(final POIFSReaderEvent event) {
|
||||
try {
|
||||
psa[0] = PropertySetFactory.create(event.getStream());
|
||||
} catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
},
|
||||
STREAM_NAME);
|
||||
FileInputStream stream = new FileInputStream(filename);
|
||||
try {
|
||||
r.read(stream);
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
final POIFSReaderListener listener = (event) -> {
|
||||
try {
|
||||
psa[0] = PropertySetFactory.create(event.getStream());
|
||||
} catch (Exception ex) {
|
||||
fail(ex.getMessage());
|
||||
}
|
||||
};
|
||||
|
||||
r.registerListener(listener,STREAM_NAME);
|
||||
r.read(filename);
|
||||
|
||||
assertNotNull(psa[0]);
|
||||
Section s = (psa[0].getSections().get(0));
|
||||
assertEquals(s.getFormatID(), formatID);
|
||||
|
@ -338,12 +312,8 @@ public class TestWrite {
|
|||
|
||||
|
||||
/**
|
||||
* <p>Writes and reads back various variant types and checks whether the
|
||||
* stuff that has been read back equals the stuff that was written.</p>
|
||||
* @throws IOException
|
||||
* @throws UnsupportedEncodingException
|
||||
* @throws UnsupportedVariantTypeException
|
||||
* @throws ReadingNotSupportedException
|
||||
* Writes and reads back various variant types and checks whether the
|
||||
* stuff that has been read back equals the stuff that was written.
|
||||
*/
|
||||
@Test
|
||||
public void variantTypes() throws Exception {
|
||||
|
@ -379,9 +349,8 @@ public class TestWrite {
|
|||
* was written.
|
||||
*/
|
||||
@Test
|
||||
public void codepages() throws ReadingNotSupportedException, UnsupportedVariantTypeException, IOException
|
||||
public void codepages() throws UnsupportedVariantTypeException, IOException
|
||||
{
|
||||
Throwable thr = null;
|
||||
final int[] validCodepages = {CODEPAGE_DEFAULT, CodePageUtil.CP_UTF8, CodePageUtil.CP_UNICODE, CodePageUtil.CP_WINDOWS_1252};
|
||||
for (final int cp : validCodepages) {
|
||||
if (cp == -1 && !hasProperDefaultCharset())
|
||||
|
@ -400,9 +369,8 @@ public class TestWrite {
|
|||
|
||||
final int[] invalidCodepages = new int[] {0, 1, 2, 4711, 815};
|
||||
for (int cp : invalidCodepages) {
|
||||
final long type = (cp == CodePageUtil.CP_UNICODE) ? Variant.VT_LPWSTR : Variant.VT_LPSTR;
|
||||
try {
|
||||
checkString(type, "\u00e4\u00f6\u00fc\u00c4\u00d6\u00dc\u00df", cp);
|
||||
checkString(Variant.VT_LPSTR, "\u00e4\u00f6\u00fc\u00c4\u00d6\u00dc\u00df", cp);
|
||||
fail("UnsupportedEncodingException for codepage " + cp + " expected.");
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
/* This is the expected behaviour. */
|
||||
|
@ -441,7 +409,7 @@ public class TestWrite {
|
|||
}
|
||||
|
||||
private void checkString(final long variantType, final String value, final int codepage)
|
||||
throws UnsupportedVariantTypeException, IOException, ReadingNotSupportedException, UnsupportedEncodingException {
|
||||
throws UnsupportedVariantTypeException, IOException {
|
||||
for (int i=0; i<value.length(); i++) {
|
||||
check(variantType, value.substring(0, i), codepage);
|
||||
}
|
||||
|
@ -457,7 +425,7 @@ public class TestWrite {
|
|||
* @throws IOException if an I/O exception occurs.
|
||||
*/
|
||||
private void check(final long variantType, final Object value, final int codepage)
|
||||
throws UnsupportedVariantTypeException, IOException, ReadingNotSupportedException, UnsupportedEncodingException
|
||||
throws UnsupportedVariantTypeException, IOException
|
||||
{
|
||||
final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
VariantSupport.write(out, variantType, value, codepage);
|
||||
|
@ -474,8 +442,6 @@ public class TestWrite {
|
|||
|
||||
/**
|
||||
* <p>Tests writing and reading back a proper dictionary.</p>
|
||||
* @throws IOException
|
||||
* @throws HPSFException
|
||||
*/
|
||||
@Test
|
||||
public void dictionary() throws IOException, HPSFException {
|
||||
|
@ -488,9 +454,9 @@ public class TestWrite {
|
|||
final PropertySet ps1 = new PropertySet();
|
||||
final Section s = ps1.getSections().get(0);
|
||||
final Map<Long,String> m = new HashMap<>(3, 1.0f);
|
||||
m.put(Long.valueOf(1), "String 1");
|
||||
m.put(Long.valueOf(2), "String 2");
|
||||
m.put(Long.valueOf(3), "String 3");
|
||||
m.put(1L, "String 1");
|
||||
m.put(2L, "String 2");
|
||||
m.put(3L, "String 3");
|
||||
s.setDictionary(m);
|
||||
s.setFormatID(DocumentSummaryInformation.FORMAT_ID[0]);
|
||||
int codepage = CodePageUtil.CP_UNICODE;
|
||||
|
@ -522,12 +488,12 @@ public class TestWrite {
|
|||
*/
|
||||
@Test
|
||||
public void inPlaceNPOIFSWrite() throws Exception {
|
||||
NPOIFSFileSystem fs = null;
|
||||
DirectoryEntry root = null;
|
||||
DocumentNode sinfDoc = null;
|
||||
DocumentNode dinfDoc = null;
|
||||
SummaryInformation sinf = null;
|
||||
DocumentSummaryInformation dinf = null;
|
||||
NPOIFSFileSystem fs;
|
||||
DirectoryEntry root;
|
||||
DocumentNode sinfDoc;
|
||||
DocumentNode dinfDoc;
|
||||
SummaryInformation sinf;
|
||||
DocumentSummaryInformation dinf;
|
||||
|
||||
// We need to work on a File for in-place changes, so create a temp one
|
||||
final File copy = TempFile.createTempFile("Test-HPSF", "ole2");
|
||||
|
@ -567,10 +533,13 @@ public class TestWrite {
|
|||
assertEquals("\u7b2c1\u7ae0", sinf.getTitle());
|
||||
|
||||
assertEquals("", dinf.getCompany());
|
||||
assertEquals(null, dinf.getManager());
|
||||
assertNull(dinf.getManager());
|
||||
|
||||
|
||||
// Do an in-place replace via an InputStream
|
||||
assertNotNull(sinfDoc);
|
||||
assertNotNull(dinfDoc);
|
||||
|
||||
new NPOIFSDocument(sinfDoc).replaceContents(sinf.toInputStream());
|
||||
new NPOIFSDocument(dinfDoc).replaceContents(dinf.toInputStream());
|
||||
|
||||
|
@ -661,7 +630,7 @@ public class TestWrite {
|
|||
assertEquals("\u7b2c1\u7ae0", sinf.getTitle());
|
||||
|
||||
assertEquals("", dinf.getCompany());
|
||||
assertEquals(null, dinf.getManager());
|
||||
assertNull(dinf.getManager());
|
||||
|
||||
|
||||
// Now alter a few of them
|
||||
|
@ -730,43 +699,37 @@ public class TestWrite {
|
|||
|
||||
// Tidy up
|
||||
fs.close();
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
copy.delete();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <p>Tests writing and reading back a proper dictionary with an invalid
|
||||
* codepage. (HPSF writes Unicode dictionaries only.)</p>
|
||||
* @throws IOException
|
||||
* @throws HPSFException
|
||||
* Tests writing and reading back a proper dictionary with an invalid
|
||||
* codepage. (HPSF writes Unicode dictionaries only.)
|
||||
*/
|
||||
@Test(expected=IllegalPropertySetDataException.class)
|
||||
@Test(expected=UnsupportedEncodingException.class)
|
||||
public void dictionaryWithInvalidCodepage() throws IOException, HPSFException {
|
||||
final File copy = TempFile.createTempFile("Test-HPSF", "ole2");
|
||||
copy.deleteOnExit();
|
||||
|
||||
/* Write: */
|
||||
final OutputStream out = new FileOutputStream(copy);
|
||||
|
||||
final POIFSFileSystem poiFs = new POIFSFileSystem();
|
||||
|
||||
final PropertySet ps1 = new PropertySet();
|
||||
final Section s = ps1.getSections().get(0);
|
||||
final Map<Long,String> m = new HashMap<>(3, 1.0f);
|
||||
m.put(Long.valueOf(1), "String 1");
|
||||
m.put(Long.valueOf(2), "String 2");
|
||||
m.put(Long.valueOf(3), "String 3");
|
||||
m.put(1L, "String 1");
|
||||
m.put(2L, "String 2");
|
||||
m.put(3L, "String 3");
|
||||
|
||||
try {
|
||||
try (OutputStream out = new FileOutputStream(copy);
|
||||
POIFSFileSystem poiFs = new POIFSFileSystem()) {
|
||||
s.setDictionary(m);
|
||||
s.setFormatID(DocumentSummaryInformation.FORMAT_ID[0]);
|
||||
int codepage = 12345;
|
||||
s.setProperty(PropertyIDMap.PID_CODEPAGE, Variant.VT_I2,
|
||||
Integer.valueOf(codepage));
|
||||
s.setProperty(PropertyIDMap.PID_CODEPAGE, Variant.VT_I2, codepage);
|
||||
poiFs.createDocument(ps1.toInputStream(), "Test");
|
||||
poiFs.writeFilesystem(out);
|
||||
} finally {
|
||||
poiFs.close();
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,8 +18,9 @@
|
|||
|
||||
package org.apache.poi.hpsf.basic;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -28,7 +29,6 @@ import java.util.List;
|
|||
|
||||
import org.apache.poi.hpsf.PropertySet;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReader;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReaderEvent;
|
||||
import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
|
||||
|
@ -39,6 +39,9 @@ import org.apache.poi.util.IOUtils;
|
|||
*/
|
||||
final class Util {
|
||||
|
||||
private Util() {
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>Reads a set of files from a POI filesystem and returns them
|
||||
* as an array of {@link POIFile} instances. This method loads all
|
||||
|
@ -58,24 +61,20 @@ final class Util {
|
|||
*
|
||||
* @exception IOException if an I/O exception occurs
|
||||
*/
|
||||
public static List<POIFile> readPOIFiles(final File poiFs, final String... poiFiles)
|
||||
throws FileNotFoundException, IOException {
|
||||
static List<POIFile> readPOIFiles(final File poiFs, final String... poiFiles) throws IOException {
|
||||
final List<POIFile> files = new ArrayList<>();
|
||||
POIFSReader r = new POIFSReader();
|
||||
POIFSReaderListener pfl = new POIFSReaderListener() {
|
||||
@Override
|
||||
public void processPOIFSReaderEvent(final POIFSReaderEvent event) {
|
||||
try {
|
||||
final POIFile f = new POIFile();
|
||||
f.setName(event.getName());
|
||||
f.setPath(event.getPath());
|
||||
final InputStream in = event.getStream();
|
||||
f.setBytes(IOUtils.toByteArray(in));
|
||||
in.close();
|
||||
files.add(f);
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
POIFSReaderListener pfl = event -> {
|
||||
try {
|
||||
final POIFile f = new POIFile();
|
||||
f.setName(event.getName());
|
||||
f.setPath(event.getPath());
|
||||
final InputStream in = event.getStream();
|
||||
f.setBytes(IOUtils.toByteArray(in));
|
||||
in.close();
|
||||
files.add(f);
|
||||
} catch (IOException ex) {
|
||||
fail(ex.getMessage());
|
||||
}
|
||||
};
|
||||
if (poiFiles.length == 0) {
|
||||
|
@ -88,12 +87,8 @@ final class Util {
|
|||
}
|
||||
|
||||
/* Read the POI filesystem. */
|
||||
FileInputStream stream = new FileInputStream(poiFs);
|
||||
try {
|
||||
r.read(stream);
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
r.read(poiFs);
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
|
@ -110,29 +105,23 @@ final class Util {
|
|||
* @return The property sets. The elements are ordered in the same way
|
||||
* as the files in the POI filesystem.
|
||||
*
|
||||
* @exception FileNotFoundException if the file containing the POI
|
||||
* filesystem does not exist
|
||||
*
|
||||
* @exception IOException if an I/O exception occurs
|
||||
*/
|
||||
public static List<POIFile> readPropertySets(final File poiFs) throws IOException {
|
||||
static List<POIFile> readPropertySets(final File poiFs) throws IOException {
|
||||
final List<POIFile> files = new ArrayList<>(7);
|
||||
final POIFSReader r = new POIFSReader();
|
||||
POIFSReaderListener pfl = new POIFSReaderListener() {
|
||||
@Override
|
||||
public void processPOIFSReaderEvent(final POIFSReaderEvent event) {
|
||||
try {
|
||||
final POIFile f = new POIFile();
|
||||
f.setName(event.getName());
|
||||
f.setPath(event.getPath());
|
||||
final InputStream in = event.getStream();
|
||||
if (PropertySet.isPropertySetStream(in)) {
|
||||
f.setBytes(IOUtils.toByteArray(in));
|
||||
files.add(f);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
final POIFSReaderListener pfl = event -> {
|
||||
try {
|
||||
final POIFile f = new POIFile();
|
||||
f.setName(event.getName());
|
||||
f.setPath(event.getPath());
|
||||
final InputStream in = event.getStream();
|
||||
if (PropertySet.isPropertySetStream(in)) {
|
||||
f.setBytes(IOUtils.toByteArray(in));
|
||||
files.add(f);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
fail(ex.getMessage());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -140,12 +129,7 @@ final class Util {
|
|||
r.registerListener(pfl);
|
||||
|
||||
/* Read the POI filesystem. */
|
||||
InputStream is = new FileInputStream(poiFs);
|
||||
try {
|
||||
r.read(is);
|
||||
} finally {
|
||||
is.close();
|
||||
}
|
||||
r.read(poiFs);
|
||||
|
||||
return files;
|
||||
}
|
||||
|
|
|
@ -68,7 +68,6 @@ import org.apache.poi.hssf.record.crypto.Biff8EncryptionKey;
|
|||
import org.apache.poi.poifs.filesystem.DocumentEntry;
|
||||
import org.apache.poi.poifs.filesystem.DocumentInputStream;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.ss.formula.ptg.Area3DPtg;
|
||||
import org.apache.poi.ss.formula.ptg.DeletedArea3DPtg;
|
||||
|
@ -89,7 +88,6 @@ import org.apache.poi.ss.usermodel.SheetVisibility;
|
|||
import org.apache.poi.ss.usermodel.Workbook;
|
||||
import org.apache.poi.ss.util.CellRangeAddress;
|
||||
import org.apache.poi.util.LocaleUtil;
|
||||
import org.junit.After;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
@ -1022,7 +1020,7 @@ public final class TestBugs extends BaseTestBugzillaIssues {
|
|||
|
||||
assertEquals(4, wb.getNumberOfFontsAsInt());
|
||||
|
||||
HSSFFont f1 = wb.getFontAt((short) 0);
|
||||
HSSFFont f1 = wb.getFontAt(0);
|
||||
assertFalse(f1.getBold());
|
||||
|
||||
// Check that asking for the same font
|
||||
|
@ -1617,7 +1615,7 @@ public final class TestBugs extends BaseTestBugzillaIssues {
|
|||
@Test
|
||||
public void bug46904() throws Exception {
|
||||
try {
|
||||
OPOIFSFileSystem fs = new OPOIFSFileSystem(
|
||||
POIFSFileSystem fs = new POIFSFileSystem(
|
||||
HSSFITestDataProvider.instance.openWorkbookStream("46904.xls"));
|
||||
new HSSFWorkbook(fs.getRoot(), false).close();
|
||||
fail("Should catch exception here");
|
||||
|
@ -2505,7 +2503,7 @@ public final class TestBugs extends BaseTestBugzillaIssues {
|
|||
|
||||
@Test
|
||||
public void bug53432() throws IOException {
|
||||
Workbook wb1 = new HSSFWorkbook(); //or new HSSFWorkbook();
|
||||
HSSFWorkbook wb1 = new HSSFWorkbook(); //or new HSSFWorkbook();
|
||||
wb1.addPicture(new byte[]{123, 22}, Workbook.PICTURE_TYPE_JPEG);
|
||||
assertEquals(wb1.getAllPictures().size(), 1);
|
||||
wb1.close();
|
||||
|
@ -2513,13 +2511,13 @@ public final class TestBugs extends BaseTestBugzillaIssues {
|
|||
wb1.close();
|
||||
wb1 = new HSSFWorkbook();
|
||||
|
||||
Workbook wb2 = writeOutAndReadBack((HSSFWorkbook) wb1);
|
||||
HSSFWorkbook wb2 = writeOutAndReadBack(wb1);
|
||||
wb1.close();
|
||||
assertEquals(wb2.getAllPictures().size(), 0);
|
||||
wb2.addPicture(new byte[]{123, 22}, Workbook.PICTURE_TYPE_JPEG);
|
||||
assertEquals(wb2.getAllPictures().size(), 1);
|
||||
|
||||
Workbook wb3 = writeOutAndReadBack((HSSFWorkbook) wb2);
|
||||
HSSFWorkbook wb3 = writeOutAndReadBack(wb2);
|
||||
wb2.close();
|
||||
assertEquals(wb3.getAllPictures().size(), 1);
|
||||
|
||||
|
@ -3093,8 +3091,8 @@ public final class TestBugs extends BaseTestBugzillaIssues {
|
|||
|
||||
@Test
|
||||
public void test61287() throws IOException {
|
||||
final Workbook wb = HSSFTestDataSamples.openSampleWorkbook("61287.xls");
|
||||
ExcelExtractor ex = new ExcelExtractor((HSSFWorkbook) wb);
|
||||
final HSSFWorkbook wb = HSSFTestDataSamples.openSampleWorkbook("61287.xls");
|
||||
ExcelExtractor ex = new ExcelExtractor(wb);
|
||||
String text = ex.getText();
|
||||
assertContains(text, "\u8D44\u4EA7\u8D1F\u503A\u8868");
|
||||
wb.close();
|
||||
|
|
|
@ -29,7 +29,6 @@ import java.io.ByteArrayInputStream;
|
|||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -38,7 +37,6 @@ import java.util.Collection;
|
|||
import java.util.List;
|
||||
|
||||
import junit.framework.AssertionFailedError;
|
||||
|
||||
import org.apache.poi.POIDataSamples;
|
||||
import org.apache.poi.ddf.EscherBSERecord;
|
||||
import org.apache.poi.hpsf.ClassID;
|
||||
|
@ -56,7 +54,6 @@ import org.apache.poi.hssf.record.WindowOneRecord;
|
|||
import org.apache.poi.poifs.filesystem.DirectoryEntry;
|
||||
import org.apache.poi.poifs.filesystem.DirectoryNode;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSFileSystem;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.ss.formula.ptg.Area3DPtg;
|
||||
import org.apache.poi.ss.usermodel.BaseTestWorkbook;
|
||||
|
@ -95,7 +92,6 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
|
||||
/**
|
||||
* Tests for {@link HSSFWorkbook#isHidden()} etc
|
||||
* @throws IOException
|
||||
*/
|
||||
@Test
|
||||
public void hidden() throws IOException {
|
||||
|
@ -103,23 +99,23 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
|
||||
WindowOneRecord w1 = wb.getWorkbook().getWindowOne();
|
||||
|
||||
assertEquals(false, wb.isHidden());
|
||||
assertEquals(false, w1.getHidden());
|
||||
assertFalse(wb.isHidden());
|
||||
assertFalse(w1.getHidden());
|
||||
|
||||
wb.setHidden(true);
|
||||
assertEquals(true, wb.isHidden());
|
||||
assertEquals(true, w1.getHidden());
|
||||
assertTrue(wb.isHidden());
|
||||
assertTrue(w1.getHidden());
|
||||
|
||||
HSSFWorkbook wbBack = HSSFTestDataSamples.writeOutAndReadBack(wb);
|
||||
w1 = wbBack.getWorkbook().getWindowOne();
|
||||
|
||||
wbBack.setHidden(true);
|
||||
assertEquals(true, wbBack.isHidden());
|
||||
assertEquals(true, w1.getHidden());
|
||||
assertTrue(wbBack.isHidden());
|
||||
assertTrue(w1.getHidden());
|
||||
|
||||
wbBack.setHidden(false);
|
||||
assertEquals(false, wbBack.isHidden());
|
||||
assertEquals(false, w1.getHidden());
|
||||
assertFalse(wbBack.isHidden());
|
||||
assertFalse(w1.getHidden());
|
||||
|
||||
wbBack.close();
|
||||
wb.close();
|
||||
|
@ -257,30 +253,30 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
wb.setSelectedTabs(selected);
|
||||
|
||||
assertCollectionsEquals(selected, wb.getSelectedTabs());
|
||||
assertEquals(true, sheet0.isSelected());
|
||||
assertEquals(false, sheet1.isSelected());
|
||||
assertEquals(true, sheet2.isSelected());
|
||||
assertEquals(true, sheet3.isSelected());
|
||||
assertEquals(false, sheet4.isSelected());
|
||||
assertEquals(false, sheet5.isSelected());
|
||||
assertTrue(sheet0.isSelected());
|
||||
assertFalse(sheet1.isSelected());
|
||||
assertTrue(sheet2.isSelected());
|
||||
assertTrue(sheet3.isSelected());
|
||||
assertFalse(sheet4.isSelected());
|
||||
assertFalse(sheet5.isSelected());
|
||||
|
||||
selected = arrayToList(new int[] { 1, 3, 5 });
|
||||
wb.setSelectedTabs(selected);
|
||||
|
||||
// previous selection should be cleared
|
||||
assertCollectionsEquals(selected, wb.getSelectedTabs());
|
||||
assertEquals(false, sheet0.isSelected());
|
||||
assertEquals(true, sheet1.isSelected());
|
||||
assertEquals(false, sheet2.isSelected());
|
||||
assertEquals(true, sheet3.isSelected());
|
||||
assertEquals(false, sheet4.isSelected());
|
||||
assertEquals(true, sheet5.isSelected());
|
||||
assertFalse(sheet0.isSelected());
|
||||
assertTrue(sheet1.isSelected());
|
||||
assertFalse(sheet2.isSelected());
|
||||
assertTrue(sheet3.isSelected());
|
||||
assertFalse(sheet4.isSelected());
|
||||
assertTrue(sheet5.isSelected());
|
||||
|
||||
assertEquals(true, sheet0.isActive());
|
||||
assertEquals(false, sheet2.isActive());
|
||||
assertTrue(sheet0.isActive());
|
||||
assertFalse(sheet2.isActive());
|
||||
wb.setActiveSheet(2);
|
||||
assertEquals(false, sheet0.isActive());
|
||||
assertEquals(true, sheet2.isActive());
|
||||
assertFalse(sheet0.isActive());
|
||||
assertTrue(sheet2.isActive());
|
||||
|
||||
/*{ // helpful if viewing this workbook in excel:
|
||||
sheet0.createRow(0).createCell(0).setCellValue(new HSSFRichTextString("Sheet0"));
|
||||
|
@ -383,7 +379,6 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
* records to be written with invalid offset indexes. Excel does not like this, and such
|
||||
* errors are particularly hard to track down. This test ensures that HSSFWorkbook throws
|
||||
* a specific exception as soon as the situation is detected. See bugzilla 45066
|
||||
* @throws IOException
|
||||
*/
|
||||
@Test
|
||||
public void sheetSerializeSizeMismatch_bug45066() throws IOException {
|
||||
|
@ -496,7 +491,7 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
* result returned by getRecordSize() differs from result returned by serialize()
|
||||
*/
|
||||
private static final class BadlyBehavedRecord extends Record {
|
||||
public BadlyBehavedRecord() {
|
||||
BadlyBehavedRecord() {
|
||||
//
|
||||
}
|
||||
@Override
|
||||
|
@ -576,7 +571,7 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
POIFSFileSystem fs2 = new POIFSFileSystem(new ByteArrayInputStream(bytes));
|
||||
ClassID clsid2 = fs2.getRoot().getStorageClsid();
|
||||
|
||||
assertTrue(clsid1.equals(clsid2));
|
||||
assertEquals(clsid1, clsid2);
|
||||
|
||||
fs2.close();
|
||||
wb.close();
|
||||
|
@ -625,108 +620,89 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
public void differentPOIFS() throws Exception {
|
||||
// Open the two filesystems
|
||||
DirectoryNode[] files = new DirectoryNode[2];
|
||||
POIFSFileSystem poifsFileSystem = new POIFSFileSystem(HSSFTestDataSamples.openSampleFileStream("Simple.xls"));
|
||||
try {
|
||||
files[0] = poifsFileSystem.getRoot();
|
||||
NPOIFSFileSystem npoifsFileSystem = new NPOIFSFileSystem(HSSFTestDataSamples.getSampleFile("Simple.xls"));
|
||||
try {
|
||||
files[1] = npoifsFileSystem.getRoot();
|
||||
|
||||
// Open without preserving nodes
|
||||
for(DirectoryNode dir : files) {
|
||||
HSSFWorkbook workbook = new HSSFWorkbook(dir, false);
|
||||
HSSFSheet sheet = workbook.getSheetAt(0);
|
||||
HSSFCell cell = sheet.getRow(0).getCell(0);
|
||||
assertEquals("replaceMe", cell .getRichStringCellValue().getString());
|
||||
|
||||
workbook.close();
|
||||
}
|
||||
|
||||
// Now re-check with preserving
|
||||
for(DirectoryNode dir : files) {
|
||||
HSSFWorkbook workbook = new HSSFWorkbook(dir, true);
|
||||
HSSFSheet sheet = workbook.getSheetAt(0);
|
||||
HSSFCell cell = sheet.getRow(0).getCell(0);
|
||||
assertEquals("replaceMe", cell .getRichStringCellValue().getString());
|
||||
|
||||
workbook.close();
|
||||
}
|
||||
} finally {
|
||||
npoifsFileSystem.close();
|
||||
}
|
||||
} finally {
|
||||
poifsFileSystem.close();
|
||||
}
|
||||
try (POIFSFileSystem poifsFileSystem = new POIFSFileSystem(HSSFTestDataSamples.openSampleFileStream("Simple.xls"))) {
|
||||
files[0] = poifsFileSystem.getRoot();
|
||||
try (NPOIFSFileSystem npoifsFileSystem = new NPOIFSFileSystem(HSSFTestDataSamples.getSampleFile("Simple.xls"))) {
|
||||
files[1] = npoifsFileSystem.getRoot();
|
||||
|
||||
// Open without preserving nodes
|
||||
for (DirectoryNode dir : files) {
|
||||
HSSFWorkbook workbook = new HSSFWorkbook(dir, false);
|
||||
HSSFSheet sheet = workbook.getSheetAt(0);
|
||||
HSSFCell cell = sheet.getRow(0).getCell(0);
|
||||
assertEquals("replaceMe", cell.getRichStringCellValue().getString());
|
||||
|
||||
workbook.close();
|
||||
}
|
||||
|
||||
// Now re-check with preserving
|
||||
for (DirectoryNode dir : files) {
|
||||
HSSFWorkbook workbook = new HSSFWorkbook(dir, true);
|
||||
HSSFSheet sheet = workbook.getSheetAt(0);
|
||||
HSSFCell cell = sheet.getRow(0).getCell(0);
|
||||
assertEquals("replaceMe", cell.getRichStringCellValue().getString());
|
||||
|
||||
workbook.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void wordDocEmbeddedInXls() throws IOException {
|
||||
// Open the two filesystems
|
||||
DirectoryNode[] files = new DirectoryNode[2];
|
||||
POIFSFileSystem poifsFileSystem = new POIFSFileSystem(HSSFTestDataSamples.openSampleFileStream("WithEmbeddedObjects.xls"));
|
||||
try {
|
||||
files[0] = poifsFileSystem.getRoot();
|
||||
NPOIFSFileSystem npoifsFileSystem = new NPOIFSFileSystem(HSSFTestDataSamples.getSampleFile("WithEmbeddedObjects.xls"));
|
||||
try {
|
||||
files[1] = npoifsFileSystem.getRoot();
|
||||
|
||||
// Check the embedded parts
|
||||
for(DirectoryNode root : files) {
|
||||
HSSFWorkbook hw = new HSSFWorkbook(root, true);
|
||||
List<HSSFObjectData> objects = hw.getAllEmbeddedObjects();
|
||||
boolean found = false;
|
||||
for (HSSFObjectData embeddedObject : objects) {
|
||||
if (embeddedObject.hasDirectoryEntry()) {
|
||||
DirectoryEntry dir = embeddedObject.getDirectory();
|
||||
if (dir instanceof DirectoryNode) {
|
||||
DirectoryNode dNode = (DirectoryNode) dir;
|
||||
if (hasEntry(dNode, "WordDocument")) {
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assertTrue(found);
|
||||
|
||||
hw.close();
|
||||
}
|
||||
} finally {
|
||||
npoifsFileSystem.close();
|
||||
}
|
||||
} finally {
|
||||
poifsFileSystem.close();
|
||||
}
|
||||
try (POIFSFileSystem poifsFileSystem = new POIFSFileSystem(HSSFTestDataSamples.openSampleFileStream("WithEmbeddedObjects.xls"))) {
|
||||
files[0] = poifsFileSystem.getRoot();
|
||||
try (NPOIFSFileSystem npoifsFileSystem = new NPOIFSFileSystem(HSSFTestDataSamples.getSampleFile("WithEmbeddedObjects.xls"))) {
|
||||
files[1] = npoifsFileSystem.getRoot();
|
||||
|
||||
// Check the embedded parts
|
||||
for (DirectoryNode root : files) {
|
||||
HSSFWorkbook hw = new HSSFWorkbook(root, true);
|
||||
List<HSSFObjectData> objects = hw.getAllEmbeddedObjects();
|
||||
boolean found = false;
|
||||
for (HSSFObjectData embeddedObject : objects) {
|
||||
if (embeddedObject.hasDirectoryEntry()) {
|
||||
DirectoryEntry dir = embeddedObject.getDirectory();
|
||||
if (dir instanceof DirectoryNode) {
|
||||
DirectoryNode dNode = (DirectoryNode) dir;
|
||||
if (dNode.hasEntry("WordDocument")) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assertTrue(found);
|
||||
|
||||
hw.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that we can open a workbook with NPOIFS, and write it out
|
||||
* again (via POIFS) and have it be valid
|
||||
* @throws IOException
|
||||
*/
|
||||
@Test
|
||||
public void writeWorkbookFromNPOIFS() throws IOException {
|
||||
InputStream is = HSSFTestDataSamples.openSampleFileStream("WithEmbeddedObjects.xls");
|
||||
try {
|
||||
NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
|
||||
try {
|
||||
// Start as NPOIFS
|
||||
HSSFWorkbook wb = new HSSFWorkbook(fs.getRoot(), true);
|
||||
assertEquals(3, wb.getNumberOfSheets());
|
||||
assertEquals("Root xls", wb.getSheetAt(0).getRow(0).getCell(0).getStringCellValue());
|
||||
try (InputStream is = HSSFTestDataSamples.openSampleFileStream("WithEmbeddedObjects.xls");
|
||||
NPOIFSFileSystem fs = new NPOIFSFileSystem(is)) {
|
||||
// Start as NPOIFS
|
||||
HSSFWorkbook wb = new HSSFWorkbook(fs.getRoot(), true);
|
||||
assertEquals(3, wb.getNumberOfSheets());
|
||||
assertEquals("Root xls", wb.getSheetAt(0).getRow(0).getCell(0).getStringCellValue());
|
||||
|
||||
// Will switch to POIFS
|
||||
HSSFWorkbook wbBack = HSSFTestDataSamples.writeOutAndReadBack(wb);
|
||||
assertEquals(3, wbBack.getNumberOfSheets());
|
||||
assertEquals("Root xls", wbBack.getSheetAt(0).getRow(0).getCell(0).getStringCellValue());
|
||||
wbBack.close();
|
||||
|
||||
wb.close();
|
||||
} finally {
|
||||
fs.close();
|
||||
}
|
||||
} finally {
|
||||
is.close();
|
||||
}
|
||||
// Will switch to POIFS
|
||||
HSSFWorkbook wbBack = HSSFTestDataSamples.writeOutAndReadBack(wb);
|
||||
assertEquals(3, wbBack.getNumberOfSheets());
|
||||
assertEquals("Root xls", wbBack.getSheetAt(0).getRow(0).getCell(0).getStringCellValue());
|
||||
wbBack.close();
|
||||
|
||||
wb.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -795,7 +771,9 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
wb.setSheetOrder("other sheet", 0);
|
||||
|
||||
// names
|
||||
//noinspection ConstantConditions
|
||||
assertEquals("'first sheet'!D1", wb.getName("name1").getRefersToFormula());
|
||||
//noinspection ConstantConditions
|
||||
assertEquals("'other sheet'!C1", wb.getName("name2").getRefersToFormula());
|
||||
|
||||
// cells
|
||||
|
@ -811,15 +789,6 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
wb.close();
|
||||
}
|
||||
|
||||
private boolean hasEntry(DirectoryNode dirNode, String entryName) {
|
||||
try {
|
||||
dirNode.getEntry(entryName);
|
||||
return true;
|
||||
} catch (FileNotFoundException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void clonePictures() throws IOException {
|
||||
HSSFWorkbook wb = HSSFTestDataSamples.openSampleWorkbook("SimpleWithImages.xls");
|
||||
|
@ -854,11 +823,8 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
// Should throw exception about invalid POIFSFileSystem
|
||||
@Test(expected=IllegalArgumentException.class)
|
||||
public void emptyDirectoryNode() throws IOException {
|
||||
POIFSFileSystem fs = new POIFSFileSystem();
|
||||
try {
|
||||
try (POIFSFileSystem fs = new POIFSFileSystem()) {
|
||||
new HSSFWorkbook(fs).close();
|
||||
} finally {
|
||||
fs.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1093,7 +1059,8 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
wb.close();
|
||||
}
|
||||
|
||||
private void expectName(HSSFWorkbook wb, String name, String expect) {
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private void expectName(HSSFWorkbook wb, String name, String expect) {
|
||||
final HSSFName hssfName = wb.getName(name);
|
||||
assertNotNull(hssfName);
|
||||
assertEquals(expect, hssfName.getRefersToFormula());
|
||||
|
@ -1149,16 +1116,13 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
|
||||
// edit the workbook
|
||||
{
|
||||
NPOIFSFileSystem fs = new NPOIFSFileSystem(file, false);
|
||||
try {
|
||||
try (NPOIFSFileSystem fs = new NPOIFSFileSystem(file, false)) {
|
||||
DirectoryNode root = fs.getRoot();
|
||||
final Workbook workbook = new HSSFWorkbook(root, true);
|
||||
final Sheet sheet = workbook.getSheet("foo");
|
||||
sheet.getRow(1).createCell(2).setCellValue("baz");
|
||||
|
||||
|
||||
writeAndCloseWorkbook(workbook, file);
|
||||
} finally {
|
||||
fs.close();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
|
@ -1239,18 +1203,6 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
}
|
||||
wb.close();
|
||||
|
||||
// Can't work for OPOIFS
|
||||
OPOIFSFileSystem ofs = new OPOIFSFileSystem(
|
||||
POIDataSamples.getSpreadSheetInstance().openResourceAsStream("SampleSS.xls"));
|
||||
wb = new HSSFWorkbook(ofs.getRoot(), true);
|
||||
try {
|
||||
wb.write();
|
||||
fail("Shouldn't work for OPOIFSFileSystem");
|
||||
} catch (IllegalStateException e) {
|
||||
// expected here
|
||||
}
|
||||
wb.close();
|
||||
|
||||
// Can't work for Read-Only files
|
||||
NPOIFSFileSystem fs = new NPOIFSFileSystem(
|
||||
POIDataSamples.getSpreadSheetInstance().getFile("SampleSS.xls"), true);
|
||||
|
@ -1268,16 +1220,9 @@ public final class TestHSSFWorkbook extends BaseTestWorkbook {
|
|||
public void inPlaceWrite() throws Exception {
|
||||
// Setup as a copy of a known-good file
|
||||
final File file = TempFile.createTempFile("TestHSSFWorkbook", ".xls");
|
||||
InputStream inputStream = POIDataSamples.getSpreadSheetInstance().openResourceAsStream("SampleSS.xls");
|
||||
try {
|
||||
FileOutputStream outputStream = new FileOutputStream(file);
|
||||
try {
|
||||
IOUtils.copy(inputStream, outputStream);
|
||||
} finally {
|
||||
outputStream.close();
|
||||
}
|
||||
} finally {
|
||||
inputStream.close();
|
||||
try (InputStream inputStream = POIDataSamples.getSpreadSheetInstance().openResourceAsStream("SampleSS.xls");
|
||||
FileOutputStream outputStream = new FileOutputStream(file)) {
|
||||
IOUtils.copy(inputStream, outputStream);
|
||||
}
|
||||
|
||||
// Open from the temp file in read-write mode
|
||||
|
|
|
@ -17,20 +17,22 @@
|
|||
|
||||
package org.apache.poi.poifs.eventfilesystem;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.poi.poifs.filesystem.POIFSDocumentPath;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Class to test POIFSReaderRegistry functionality
|
||||
*
|
||||
* @author Marc Johnson
|
||||
*/
|
||||
public final class TestPOIFSReaderRegistry extends TestCase {
|
||||
public final class TestPOIFSReaderRegistry {
|
||||
private final POIFSReaderListener[] listeners =
|
||||
{
|
||||
new Listener(), new Listener(), new Listener(), new Listener()
|
||||
|
@ -56,13 +58,14 @@ public final class TestPOIFSReaderRegistry extends TestCase {
|
|||
/**
|
||||
* Test empty registry
|
||||
*/
|
||||
@Test
|
||||
public void testEmptyRegistry() {
|
||||
POIFSReaderRegistry registry = new POIFSReaderRegistry();
|
||||
|
||||
for (POIFSDocumentPath path : paths) {
|
||||
for (String name : names) {
|
||||
Iterator<POIFSReaderListener> listeners =
|
||||
registry.getListeners(path, name);
|
||||
registry.getListeners(path, name).iterator();
|
||||
|
||||
assertTrue(!listeners.hasNext());
|
||||
}
|
||||
|
@ -72,6 +75,7 @@ public final class TestPOIFSReaderRegistry extends TestCase {
|
|||
/**
|
||||
* Test mixed registration operations
|
||||
*/
|
||||
@Test
|
||||
public void testMixedRegistrationOperations() {
|
||||
POIFSReaderRegistry registry = new POIFSReaderRegistry();
|
||||
|
||||
|
@ -93,21 +97,20 @@ public final class TestPOIFSReaderRegistry extends TestCase {
|
|||
{
|
||||
for (int n = 0; n < names.length; n++)
|
||||
{
|
||||
Iterator<POIFSReaderListener> listeners =
|
||||
Iterable<POIFSReaderListener> listeners =
|
||||
registry.getListeners(paths[ k ], names[ n ]);
|
||||
|
||||
if (k == n)
|
||||
{
|
||||
assertTrue(!listeners.hasNext());
|
||||
assertTrue(!listeners.iterator().hasNext());
|
||||
}
|
||||
else
|
||||
{
|
||||
Set<POIFSReaderListener> registeredListeners =
|
||||
new HashSet<>();
|
||||
|
||||
while (listeners.hasNext())
|
||||
{
|
||||
registeredListeners.add(listeners.next());
|
||||
for (POIFSReaderListener rl : listeners) {
|
||||
registeredListeners.add(rl);
|
||||
}
|
||||
assertEquals(this.listeners.length - 1,
|
||||
registeredListeners.size());
|
||||
|
@ -132,14 +135,13 @@ public final class TestPOIFSReaderRegistry extends TestCase {
|
|||
}
|
||||
for (POIFSDocumentPath path : paths) {
|
||||
for (String name : names) {
|
||||
Iterator<POIFSReaderListener> listeners =
|
||||
Iterable<POIFSReaderListener> listeners =
|
||||
registry.getListeners(path, name);
|
||||
Set<POIFSReaderListener> registeredListeners =
|
||||
new HashSet<>();
|
||||
|
||||
while (listeners.hasNext())
|
||||
{
|
||||
registeredListeners.add(listeners.next());
|
||||
for (POIFSReaderListener rl : listeners) {
|
||||
registeredListeners.add(rl);
|
||||
}
|
||||
assertEquals(this.listeners.length,
|
||||
registeredListeners.size());
|
||||
|
|
|
@ -21,7 +21,7 @@ import org.junit.runner.RunWith;
|
|||
import org.junit.runners.Suite;
|
||||
|
||||
/**
|
||||
* Tests for org.apache.poi.poifs.filesystem<br>
|
||||
* Tests for org.apache.poi.poifs.filesystem
|
||||
*/
|
||||
@RunWith(Suite.class)
|
||||
@Suite.SuiteClasses({
|
||||
|
@ -29,7 +29,6 @@ import org.junit.runners.Suite;
|
|||
, TestDocument.class
|
||||
, TestDocumentDescriptor.class
|
||||
, TestDocumentInputStream.class
|
||||
, TestDocumentNode.class
|
||||
, TestDocumentOutputStream.class
|
||||
, TestEmptyDocument.class
|
||||
, TestNotOLE2Exception.class
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -38,42 +38,19 @@ import org.apache.poi.util.IOUtils;
|
|||
* @author Marc Johnson (mjohnson at apache dot org)
|
||||
*/
|
||||
|
||||
public class ReaderWriter
|
||||
public final class ReaderWriter
|
||||
implements POIFSReaderListener, POIFSWriterListener
|
||||
{
|
||||
private final POIFSFileSystem filesystem;
|
||||
private final DirectoryEntry root;
|
||||
|
||||
// keys are DocumentDescriptors, values are byte[]s
|
||||
private final Map<DocumentDescriptor, byte[]> dataMap;
|
||||
private final Map<DocumentDescriptor, byte[]> dataMap = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Constructor ReaderWriter
|
||||
*
|
||||
*
|
||||
* @param filesystem
|
||||
*
|
||||
*/
|
||||
|
||||
ReaderWriter(final POIFSFileSystem filesystem)
|
||||
{
|
||||
this.filesystem = filesystem;
|
||||
root = this.filesystem.getRoot();
|
||||
dataMap = new HashMap<>();
|
||||
private ReaderWriter(final POIFSFileSystem filesystem) {
|
||||
root = filesystem.getRoot();
|
||||
}
|
||||
|
||||
/**
|
||||
* Method main
|
||||
*
|
||||
*
|
||||
* @param args
|
||||
*
|
||||
* @exception IOException
|
||||
*
|
||||
*/
|
||||
|
||||
public static void main(String [] args)
|
||||
throws IOException
|
||||
public static void main(String [] args) throws IOException
|
||||
{
|
||||
if (args.length != 2)
|
||||
{
|
||||
|
@ -86,10 +63,8 @@ public class ReaderWriter
|
|||
POIFSFileSystem filesystem = new POIFSFileSystem();
|
||||
|
||||
reader.registerListener(new ReaderWriter(filesystem));
|
||||
FileInputStream istream = new FileInputStream(args[ 0 ]);
|
||||
|
||||
reader.read(istream);
|
||||
istream.close();
|
||||
reader.read(new File(args[ 0 ]));
|
||||
FileOutputStream ostream = new FileOutputStream(args[ 1 ]);
|
||||
|
||||
filesystem.writeFilesystem(ostream);
|
||||
|
|
|
@ -17,218 +17,131 @@
|
|||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import static org.apache.poi.poifs.common.POIFSConstants.LARGER_BIG_BLOCK_SIZE;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import org.apache.poi.poifs.property.DocumentProperty;
|
||||
import org.apache.poi.poifs.storage.RawDataBlock;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.poi.poifs.storage.RawDataUtil;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Class to test OPOIFSDocument functionality
|
||||
* Class to test POIFSDocument functionality
|
||||
*/
|
||||
public final class TestDocument extends TestCase {
|
||||
public class TestDocument {
|
||||
|
||||
/**
|
||||
* Integration test -- really about all we can do
|
||||
*/
|
||||
public void testOPOIFSDocument() throws IOException {
|
||||
@Test
|
||||
public void testNPOIFSDocument() throws IOException {
|
||||
|
||||
// verify correct number of blocks get created for document
|
||||
// that is exact multituple of block size
|
||||
OPOIFSDocument document;
|
||||
byte[] array = new byte[ 4096 ];
|
||||
try (NPOIFSFileSystem poifs = new NPOIFSFileSystem()) {
|
||||
|
||||
for (int j = 0; j < array.length; j++)
|
||||
{
|
||||
array[ j ] = ( byte ) j;
|
||||
}
|
||||
document = new OPOIFSDocument("foo", new SlowInputStream(new ByteArrayInputStream(array)));
|
||||
checkDocument(document, array);
|
||||
// verify correct number of blocks get created for document
|
||||
// that is exact multiple of block size
|
||||
checkDocument(poifs, LARGER_BIG_BLOCK_SIZE);
|
||||
|
||||
// verify correct number of blocks get created for document
|
||||
// that is not an exact multiple of block size
|
||||
array = new byte[ 4097 ];
|
||||
for (int j = 0; j < array.length; j++)
|
||||
{
|
||||
array[ j ] = ( byte ) j;
|
||||
}
|
||||
document = new OPOIFSDocument("bar", new ByteArrayInputStream(array));
|
||||
checkDocument(document, array);
|
||||
// verify correct number of blocks get created for document
|
||||
// that is not an exact multiple of block size
|
||||
checkDocument(poifs, LARGER_BIG_BLOCK_SIZE + 1);
|
||||
|
||||
// verify correct number of blocks get created for document
|
||||
// that is small
|
||||
array = new byte[ 4095 ];
|
||||
for (int j = 0; j < array.length; j++)
|
||||
{
|
||||
array[ j ] = ( byte ) j;
|
||||
}
|
||||
document = new OPOIFSDocument("_bar", new ByteArrayInputStream(array));
|
||||
checkDocument(document, array);
|
||||
// verify correct number of blocks get created for document
|
||||
// that is small
|
||||
checkDocument(poifs, LARGER_BIG_BLOCK_SIZE - 1);
|
||||
|
||||
// verify correct number of blocks get created for document
|
||||
// that is rather small
|
||||
array = new byte[ 199 ];
|
||||
for (int j = 0; j < array.length; j++)
|
||||
{
|
||||
array[ j ] = ( byte ) j;
|
||||
}
|
||||
document = new OPOIFSDocument("_bar2",
|
||||
new ByteArrayInputStream(array));
|
||||
checkDocument(document, array);
|
||||
// verify correct number of blocks get created for document
|
||||
// that is rather small
|
||||
checkDocument(poifs, 199);
|
||||
|
||||
// verify that output is correct
|
||||
array = new byte[ 4097 ];
|
||||
for (int j = 0; j < array.length; j++)
|
||||
{
|
||||
array[ j ] = ( byte ) j;
|
||||
}
|
||||
document = new OPOIFSDocument("foobar",
|
||||
new ByteArrayInputStream(array));
|
||||
checkDocument(document, array);
|
||||
document.setStartBlock(0x12345678); // what a big file!!
|
||||
DocumentProperty property = document.getDocumentProperty();
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
||||
property.writeData(stream);
|
||||
byte[] output = stream.toByteArray();
|
||||
byte[] array2 =
|
||||
{
|
||||
( byte ) 'f', ( byte ) 0, ( byte ) 'o', ( byte ) 0, ( byte ) 'o',
|
||||
( byte ) 0, ( byte ) 'b', ( byte ) 0, ( byte ) 'a', ( byte ) 0,
|
||||
( byte ) 'r', ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 14,
|
||||
( byte ) 0, ( byte ) 2, ( byte ) 1, ( byte ) -1, ( byte ) -1,
|
||||
( byte ) -1, ( byte ) -1, ( byte ) -1, ( byte ) -1, ( byte ) -1,
|
||||
( byte ) -1, ( byte ) -1, ( byte ) -1, ( byte ) -1, ( byte ) -1,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0x78, ( byte ) 0x56, ( byte ) 0x34,
|
||||
( byte ) 0x12, ( byte ) 1, ( byte ) 16, ( byte ) 0, ( byte ) 0,
|
||||
( byte ) 0, ( byte ) 0, ( byte ) 0, ( byte ) 0
|
||||
};
|
||||
// verify that output is correct
|
||||
NPOIFSDocument document = checkDocument(poifs, LARGER_BIG_BLOCK_SIZE + 1);
|
||||
DocumentProperty property = document.getDocumentProperty();
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
||||
assertEquals(array2.length, output.length);
|
||||
for (int j = 0; j < output.length; j++)
|
||||
{
|
||||
assertEquals("Checking property offset " + j, array2[ j ],
|
||||
output[ j ]);
|
||||
property.writeData(stream);
|
||||
byte[] output = stream.toByteArray();
|
||||
byte[] array2 = RawDataUtil.decompress("H4sIAAAAAAAAAEtlyGMoYShiqGSwYCAH8DEwMf5HAsToMQdiRgEIGwCDyzEQgAAAAA==");
|
||||
|
||||
assertArrayEquals(array2, output);
|
||||
}
|
||||
}
|
||||
|
||||
private static OPOIFSDocument makeCopy(OPOIFSDocument document, byte[] input, byte[] data)
|
||||
throws IOException {
|
||||
OPOIFSDocument copy = null;
|
||||
private static NPOIFSDocument checkDocument(final NPOIFSFileSystem poifs, final int size) throws IOException {
|
||||
final byte[] input = new byte[size];
|
||||
IntStream.range(0, size).forEach(i -> input[i] = (byte)i);
|
||||
|
||||
if (input.length >= 4096)
|
||||
{
|
||||
RawDataBlock[] blocks =
|
||||
new RawDataBlock[ (input.length + 511) / 512 ];
|
||||
ByteArrayInputStream stream = new ByteArrayInputStream(data);
|
||||
int index = 0;
|
||||
NPOIFSDocument document = ((DocumentNode)poifs.createDocument(
|
||||
new SlowInputStream(new ByteArrayInputStream(input)),
|
||||
"entry"+poifs.getRoot().getEntryCount())).getDocument();
|
||||
|
||||
while (true)
|
||||
{
|
||||
RawDataBlock block = new RawDataBlock(stream);
|
||||
final int blockSize = (size >= 4096) ? 512 : 64;
|
||||
final int blockCount = (size + (blockSize-1)) / blockSize;
|
||||
|
||||
if (block.eof())
|
||||
{
|
||||
break;
|
||||
}
|
||||
blocks[ index++ ] = block;
|
||||
}
|
||||
copy = new OPOIFSDocument("test" + input.length, blocks,
|
||||
input.length);
|
||||
}
|
||||
else
|
||||
{
|
||||
copy = new OPOIFSDocument("test"+input.length, document.getSmallBlocks(), input.length);
|
||||
}
|
||||
return copy;
|
||||
final byte[] bytCpy = checkValues(blockCount, document, input);
|
||||
final NPOIFSDocument copied = makeCopy(document,bytCpy);
|
||||
|
||||
checkValues(blockCount, copied, input);
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
private static void checkDocument(final OPOIFSDocument document, final byte[] input)
|
||||
throws IOException {
|
||||
int big_blocks = 0;
|
||||
int small_blocks = 0;
|
||||
int total_output = 0;
|
||||
|
||||
if (input.length >= 4096)
|
||||
{
|
||||
big_blocks = (input.length + 511) / 512;
|
||||
total_output = big_blocks * 512;
|
||||
private static NPOIFSDocument makeCopy(NPOIFSDocument document, byte[] input) throws IOException {
|
||||
NPOIFSFileSystem poifs = document.getFileSystem();
|
||||
String name = "test" + input.length;
|
||||
DirectoryNode root = poifs.getRoot();
|
||||
if (root.hasEntry(name)) {
|
||||
root.deleteEntry((EntryNode)root.getEntry(name));
|
||||
}
|
||||
else
|
||||
{
|
||||
small_blocks = (input.length + 63) / 64;
|
||||
total_output = 0;
|
||||
}
|
||||
checkValues(
|
||||
big_blocks, small_blocks, total_output,
|
||||
makeCopy(
|
||||
document, input,
|
||||
checkValues(
|
||||
big_blocks, small_blocks, total_output, document,
|
||||
input)), input);
|
||||
return ((DocumentNode)root
|
||||
.createDocument(name, new ByteArrayInputStream(input)))
|
||||
.getDocument();
|
||||
}
|
||||
|
||||
private static byte[] checkValues(int big_blocks, int small_blocks, int total_output,
|
||||
OPOIFSDocument document, byte[] input) throws IOException {
|
||||
private static byte[] checkValues(final int blockCountExp, NPOIFSDocument document, byte[] input) throws IOException {
|
||||
assertNotNull(document);
|
||||
assertNotNull(document.getDocumentProperty().getDocument());
|
||||
assertEquals(document, document.getDocumentProperty().getDocument());
|
||||
int increment = ( int ) Math.sqrt(input.length);
|
||||
|
||||
for (int j = 1; j <= input.length; j += increment)
|
||||
{
|
||||
byte[] buffer = new byte[ j ];
|
||||
int offset = 0;
|
||||
ByteArrayInputStream bis = new ByteArrayInputStream(input);
|
||||
|
||||
for (int k = 0; k < (input.length / j); k++)
|
||||
{
|
||||
document.read(buffer, offset);
|
||||
for (int n = 0; n < buffer.length; n++)
|
||||
{
|
||||
assertEquals("checking byte " + (k * j) + n,
|
||||
input[ (k * j) + n ], buffer[ n ]);
|
||||
}
|
||||
offset += j;
|
||||
}
|
||||
int blockCountAct = 0, bytesRemaining = input.length;
|
||||
for (ByteBuffer bb : document) {
|
||||
assertTrue(bytesRemaining > 0);
|
||||
int bytesAct = Math.min(bb.remaining(), bytesRemaining);
|
||||
assertTrue(bytesAct <= document.getDocumentBlockSize());
|
||||
byte[] bufAct = new byte[bytesAct];
|
||||
bb.get(bufAct);
|
||||
|
||||
byte[] bufExp = new byte[bytesAct];
|
||||
int bytesExp = bis.read(bufExp, 0, bytesAct);
|
||||
assertEquals(bytesExp, bytesAct);
|
||||
|
||||
assertArrayEquals(bufExp, bufAct);
|
||||
blockCountAct++;
|
||||
bytesRemaining -= bytesAct;
|
||||
}
|
||||
assertEquals(big_blocks, document.countBlocks());
|
||||
assertEquals(small_blocks, document.getSmallBlocks().length);
|
||||
|
||||
assertEquals(blockCountExp, blockCountAct);
|
||||
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
try (DocumentInputStream dis = document.getFileSystem().createDocumentInputStream(
|
||||
document.getDocumentProperty().getName())) {
|
||||
IOUtils.copy(dis, stream);
|
||||
}
|
||||
|
||||
document.writeBlocks(stream);
|
||||
byte[] output = stream.toByteArray();
|
||||
|
||||
assertEquals(total_output, output.length);
|
||||
int limit = Math.min(total_output, input.length);
|
||||
|
||||
for (int j = 0; j < limit; j++)
|
||||
{
|
||||
assertEquals("Checking document offset " + j, input[ j ],
|
||||
output[ j ]);
|
||||
}
|
||||
for (int j = limit; j < output.length; j++)
|
||||
{
|
||||
assertEquals("Checking document offset " + j, ( byte ) -1,
|
||||
output[ j ]);
|
||||
}
|
||||
assertArrayEquals(input, stream.toByteArray());
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,19 +18,17 @@
|
|||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.poi.POIDataSamples;
|
||||
import org.apache.poi.poifs.property.DirectoryProperty;
|
||||
import org.apache.poi.poifs.storage.RawDataBlock;
|
||||
import org.apache.poi.util.SuppressForbidden;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -39,448 +37,369 @@ import org.junit.Test;
|
|||
* Class to test DocumentInputStream functionality
|
||||
*/
|
||||
public final class TestDocumentInputStream {
|
||||
private DocumentNode _workbook_n;
|
||||
private DocumentNode _workbook_o;
|
||||
private byte[] _workbook_data;
|
||||
private static final int _workbook_size = 5000;
|
||||
private DocumentNode _workbook_n;
|
||||
private byte[] _workbook_data;
|
||||
private static final int _workbook_size = 5000;
|
||||
|
||||
// non-even division of _workbook_size, also non-even division of
|
||||
// any block size
|
||||
private static final int _buffer_size = 6;
|
||||
// non-even division of _workbook_size, also non-even division of
|
||||
// any block size
|
||||
private static final int _buffer_size = 6;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
int blocks = (_workbook_size + 511) / 512;
|
||||
|
||||
_workbook_data = new byte[ 512 * blocks ];
|
||||
Arrays.fill(_workbook_data, ( byte ) -1);
|
||||
for (int j = 0; j < _workbook_size; j++)
|
||||
{
|
||||
_workbook_data[ j ] = ( byte ) (j * j);
|
||||
_workbook_data = new byte[512 * blocks];
|
||||
Arrays.fill(_workbook_data, (byte) -1);
|
||||
for (int j = 0; j < _workbook_size; j++) {
|
||||
_workbook_data[j] = (byte) (j * j);
|
||||
}
|
||||
|
||||
// Create the Old POIFS Version
|
||||
RawDataBlock[] rawBlocks = new RawDataBlock[ blocks ];
|
||||
ByteArrayInputStream stream =
|
||||
new ByteArrayInputStream(_workbook_data);
|
||||
|
||||
for (int j = 0; j < blocks; j++)
|
||||
{
|
||||
rawBlocks[ j ] = new RawDataBlock(stream);
|
||||
}
|
||||
OPOIFSDocument document = new OPOIFSDocument("Workbook", rawBlocks,
|
||||
_workbook_size);
|
||||
|
||||
_workbook_o = new DocumentNode(
|
||||
document.getDocumentProperty(),
|
||||
new DirectoryNode(
|
||||
new DirectoryProperty("Root Entry"), (POIFSFileSystem)null, null));
|
||||
|
||||
// Now create the NPOIFS Version
|
||||
byte[] _workbook_data_only = new byte[_workbook_size];
|
||||
System.arraycopy(_workbook_data, 0, _workbook_data_only, 0, _workbook_size);
|
||||
|
||||
|
||||
NPOIFSFileSystem npoifs = new NPOIFSFileSystem();
|
||||
// Make it easy when debugging to see what isn't the doc
|
||||
byte[] minus1 = new byte[512];
|
||||
Arrays.fill(minus1, (byte)-1);
|
||||
Arrays.fill(minus1, (byte) -1);
|
||||
npoifs.getBlockAt(-1).put(minus1);
|
||||
npoifs.getBlockAt(0).put(minus1);
|
||||
npoifs.getBlockAt(1).put(minus1);
|
||||
|
||||
|
||||
// Create the NPOIFS document
|
||||
_workbook_n = (DocumentNode)npoifs.createDocument(
|
||||
new ByteArrayInputStream(_workbook_data_only),
|
||||
"Workbook"
|
||||
_workbook_n = (DocumentNode) npoifs.createDocument(
|
||||
new ByteArrayInputStream(_workbook_data_only),
|
||||
"Workbook"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* test constructor
|
||||
*/
|
||||
@Test
|
||||
public void testConstructor() throws IOException {
|
||||
DocumentInputStream ostream = new ODocumentInputStream(_workbook_o);
|
||||
DocumentInputStream nstream = new NDocumentInputStream(_workbook_n);
|
||||
|
||||
assertEquals(_workbook_size, _workbook_o.getSize());
|
||||
assertEquals(_workbook_size, _workbook_n.getSize());
|
||||
|
||||
assertEquals(_workbook_size, available(ostream));
|
||||
assertEquals(_workbook_size, available(nstream));
|
||||
|
||||
ostream.close();
|
||||
nstream.close();
|
||||
try (DocumentInputStream nstream = new NDocumentInputStream(_workbook_n)) {
|
||||
assertEquals(_workbook_size, _workbook_n.getSize());
|
||||
assertEquals(_workbook_size, available(nstream));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test available() behavior
|
||||
*/
|
||||
@Test
|
||||
@Test(expected = IllegalStateException.class)
|
||||
public void testAvailable() throws IOException {
|
||||
DocumentInputStream ostream = new DocumentInputStream(_workbook_o);
|
||||
DocumentInputStream nstream = new NDocumentInputStream(_workbook_n);
|
||||
|
||||
assertEquals(_workbook_size, available(ostream));
|
||||
assertEquals(_workbook_size, available(nstream));
|
||||
ostream.close();
|
||||
nstream.close();
|
||||
|
||||
try {
|
||||
available(ostream);
|
||||
fail("Should have caught IOException");
|
||||
} catch (IllegalStateException ignored) {
|
||||
// as expected
|
||||
}
|
||||
try {
|
||||
available(nstream);
|
||||
fail("Should have caught IOException");
|
||||
} catch (IllegalStateException ignored) {
|
||||
// as expected
|
||||
}
|
||||
|
||||
available(nstream);
|
||||
}
|
||||
|
||||
/**
|
||||
* test mark/reset/markSupported.
|
||||
*/
|
||||
@SuppressWarnings("ResultOfMethodCallIgnored")
|
||||
@Test
|
||||
public void testMarkFunctions() throws IOException {
|
||||
byte[] buffer = new byte[ _workbook_size / 5 ];
|
||||
byte[] buffer = new byte[_workbook_size / 5];
|
||||
byte[] small_buffer = new byte[212];
|
||||
|
||||
DocumentInputStream[] streams = new DocumentInputStream[] {
|
||||
new DocumentInputStream(_workbook_o),
|
||||
new NDocumentInputStream(_workbook_n)
|
||||
};
|
||||
for(DocumentInputStream stream : streams) {
|
||||
// Read a fifth of it, and check all's correct
|
||||
stream.read(buffer);
|
||||
for (int j = 0; j < buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[ j ], buffer[ j ]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - buffer.length, available(stream));
|
||||
|
||||
// Reset, and check the available goes back to being the
|
||||
// whole of the stream
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
|
||||
|
||||
// Read part of a block
|
||||
stream.read(small_buffer);
|
||||
for (int j = 0; j < small_buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[ j ], small_buffer[ j ]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - small_buffer.length, available(stream));
|
||||
stream.mark(0);
|
||||
|
||||
// Read the next part
|
||||
stream.read(small_buffer);
|
||||
for (int j = 0; j < small_buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[ j+small_buffer.length ], small_buffer[ j ]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - 2*small_buffer.length, available(stream));
|
||||
|
||||
// Reset, check it goes back to where it was
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size - small_buffer.length, available(stream));
|
||||
|
||||
// Read
|
||||
stream.read(small_buffer);
|
||||
for (int j = 0; j < small_buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[ j+small_buffer.length ], small_buffer[ j ]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - 2*small_buffer.length, available(stream));
|
||||
|
||||
|
||||
// Now read at various points
|
||||
Arrays.fill(small_buffer, ( byte ) 0);
|
||||
stream.read(small_buffer, 6, 8);
|
||||
stream.read(small_buffer, 100, 10);
|
||||
stream.read(small_buffer, 150, 12);
|
||||
int pos = small_buffer.length * 2;
|
||||
for (int j = 0; j < small_buffer.length; j++) {
|
||||
byte exp = 0;
|
||||
if(j>= 6 && j<6+8) {
|
||||
exp = _workbook_data[pos];
|
||||
pos++;
|
||||
}
|
||||
if(j>= 100 && j<100+10) {
|
||||
exp = _workbook_data[pos];
|
||||
pos++;
|
||||
}
|
||||
if(j>= 150 && j<150+12) {
|
||||
exp = _workbook_data[pos];
|
||||
pos++;
|
||||
}
|
||||
|
||||
assertEquals("checking byte " + j, exp, small_buffer[j]);
|
||||
}
|
||||
|
||||
DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
|
||||
// Read a fifth of it, and check all's correct
|
||||
stream.read(buffer);
|
||||
for (int j = 0; j < buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[j], buffer[j]
|
||||
);
|
||||
}
|
||||
|
||||
assertEquals(_workbook_size - buffer.length, available(stream));
|
||||
|
||||
// Reset, and check the available goes back to being the
|
||||
// whole of the stream
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
|
||||
|
||||
// Read part of a block
|
||||
stream.read(small_buffer);
|
||||
for (int j = 0; j < small_buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[j], small_buffer[j]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - small_buffer.length, available(stream));
|
||||
stream.mark(0);
|
||||
|
||||
// Read the next part
|
||||
stream.read(small_buffer);
|
||||
for (int j = 0; j < small_buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[j + small_buffer.length], small_buffer[j]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - 2 * small_buffer.length, available(stream));
|
||||
|
||||
// Reset, check it goes back to where it was
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size - small_buffer.length, available(stream));
|
||||
|
||||
// Read
|
||||
stream.read(small_buffer);
|
||||
for (int j = 0; j < small_buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[j + small_buffer.length], small_buffer[j]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - 2 * small_buffer.length, available(stream));
|
||||
|
||||
|
||||
// Now read at various points
|
||||
Arrays.fill(small_buffer, (byte) 0);
|
||||
stream.read(small_buffer, 6, 8);
|
||||
stream.read(small_buffer, 100, 10);
|
||||
stream.read(small_buffer, 150, 12);
|
||||
int pos = small_buffer.length * 2;
|
||||
for (int j = 0; j < small_buffer.length; j++) {
|
||||
byte exp = 0;
|
||||
if (j >= 6 && j < 6 + 8) {
|
||||
exp = _workbook_data[pos];
|
||||
pos++;
|
||||
}
|
||||
if (j >= 100 && j < 100 + 10) {
|
||||
exp = _workbook_data[pos];
|
||||
pos++;
|
||||
}
|
||||
if (j >= 150 && j < 150 + 12) {
|
||||
exp = _workbook_data[pos];
|
||||
pos++;
|
||||
}
|
||||
|
||||
assertEquals("checking byte " + j, exp, small_buffer[j]);
|
||||
}
|
||||
|
||||
// Now repeat it with spanning multiple blocks
|
||||
streams = new DocumentInputStream[] {
|
||||
new DocumentInputStream(_workbook_o),
|
||||
new NDocumentInputStream(_workbook_n)
|
||||
};
|
||||
for(DocumentInputStream stream : streams) {
|
||||
// Read several blocks work
|
||||
buffer = new byte[ _workbook_size / 5 ];
|
||||
stream.read(buffer);
|
||||
for (int j = 0; j < buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[ j ], buffer[ j ]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - buffer.length, available(stream));
|
||||
|
||||
// Read all of it again, check it began at the start again
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
|
||||
stream.read(buffer);
|
||||
for (int j = 0; j < buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[ j ], buffer[ j ]
|
||||
);
|
||||
}
|
||||
|
||||
// Mark our position, and read another whole buffer
|
||||
stream.mark(12);
|
||||
stream.read(buffer);
|
||||
assertEquals(_workbook_size - (2 * buffer.length),
|
||||
available(stream));
|
||||
for (int j = buffer.length; j < (2 * buffer.length); j++)
|
||||
{
|
||||
assertEquals("checking byte " + j, _workbook_data[ j ],
|
||||
buffer[ j - buffer.length ]);
|
||||
}
|
||||
|
||||
// Reset, should go back to only one buffer full read
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size - buffer.length, available(stream));
|
||||
|
||||
// Read the buffer again
|
||||
stream.read(buffer);
|
||||
assertEquals(_workbook_size - (2 * buffer.length),
|
||||
available(stream));
|
||||
for (int j = buffer.length; j < (2 * buffer.length); j++)
|
||||
{
|
||||
assertEquals("checking byte " + j, _workbook_data[ j ],
|
||||
buffer[ j - buffer.length ]);
|
||||
}
|
||||
assertTrue(stream.markSupported());
|
||||
stream = new NDocumentInputStream(_workbook_n);
|
||||
// Read several blocks work
|
||||
buffer = new byte[_workbook_size / 5];
|
||||
stream.read(buffer);
|
||||
for (int j = 0; j < buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[j], buffer[j]
|
||||
);
|
||||
}
|
||||
assertEquals(_workbook_size - buffer.length, available(stream));
|
||||
|
||||
// Read all of it again, check it began at the start again
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
|
||||
stream.read(buffer);
|
||||
for (int j = 0; j < buffer.length; j++) {
|
||||
assertEquals(
|
||||
"checking byte " + j,
|
||||
_workbook_data[j], buffer[j]
|
||||
);
|
||||
}
|
||||
|
||||
// Mark our position, and read another whole buffer
|
||||
stream.mark(12);
|
||||
stream.read(buffer);
|
||||
assertEquals(_workbook_size - (2 * buffer.length),
|
||||
available(stream));
|
||||
for (int j = buffer.length; j < (2 * buffer.length); j++) {
|
||||
assertEquals("checking byte " + j, _workbook_data[j],
|
||||
buffer[j - buffer.length]);
|
||||
}
|
||||
|
||||
// Reset, should go back to only one buffer full read
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size - buffer.length, available(stream));
|
||||
|
||||
// Read the buffer again
|
||||
stream.read(buffer);
|
||||
assertEquals(_workbook_size - (2 * buffer.length),
|
||||
available(stream));
|
||||
for (int j = buffer.length; j < (2 * buffer.length); j++) {
|
||||
assertEquals("checking byte " + j, _workbook_data[j],
|
||||
buffer[j - buffer.length]);
|
||||
}
|
||||
assertTrue(stream.markSupported());
|
||||
}
|
||||
|
||||
/**
|
||||
* test simple read method
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("ResultOfMethodCallIgnored")
|
||||
@Test(expected = IOException.class)
|
||||
public void testReadSingleByte() throws IOException {
|
||||
DocumentInputStream[] streams = new DocumentInputStream[] {
|
||||
new DocumentInputStream(_workbook_o),
|
||||
new NDocumentInputStream(_workbook_n)
|
||||
};
|
||||
for(DocumentInputStream stream : streams) {
|
||||
int remaining = _workbook_size;
|
||||
DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
|
||||
int remaining = _workbook_size;
|
||||
|
||||
// Try and read each byte in turn
|
||||
for (int j = 0; j < _workbook_size; j++) {
|
||||
int b = stream.read();
|
||||
assertTrue("checking sign of " + j, b >= 0);
|
||||
assertEquals("validating byte " + j, _workbook_data[ j ],
|
||||
( byte ) b);
|
||||
remaining--;
|
||||
assertEquals("checking remaining after reading byte " + j,
|
||||
remaining, available(stream));
|
||||
}
|
||||
|
||||
// Ensure we fell off the end
|
||||
assertEquals(-1, stream.read());
|
||||
|
||||
// Check that after close we can no longer read
|
||||
stream.close();
|
||||
try {
|
||||
stream.read();
|
||||
fail("Should have caught IOException");
|
||||
} catch (IOException ignored) {
|
||||
// as expected
|
||||
}
|
||||
}
|
||||
// Try and read each byte in turn
|
||||
for (int j = 0; j < _workbook_size; j++) {
|
||||
int b = stream.read();
|
||||
assertTrue("checking sign of " + j, b >= 0);
|
||||
assertEquals("validating byte " + j, _workbook_data[j],
|
||||
(byte) b);
|
||||
remaining--;
|
||||
assertEquals("checking remaining after reading byte " + j,
|
||||
remaining, available(stream));
|
||||
}
|
||||
|
||||
// Ensure we fell off the end
|
||||
assertEquals(-1, stream.read());
|
||||
|
||||
// Check that after close we can no longer read
|
||||
stream.close();
|
||||
stream.read();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test buffered read
|
||||
*/
|
||||
@SuppressWarnings("ResultOfMethodCallIgnored")
|
||||
@Test
|
||||
public void testBufferRead() throws IOException {
|
||||
DocumentInputStream[] streams = new DocumentInputStream[] {
|
||||
new DocumentInputStream(_workbook_o),
|
||||
new NDocumentInputStream(_workbook_n)
|
||||
};
|
||||
for(DocumentInputStream stream : streams) {
|
||||
// Need to give a byte array to read
|
||||
try {
|
||||
stream.read(null);
|
||||
fail("Should have caught NullPointerException");
|
||||
} catch (NullPointerException ignored) {
|
||||
// as expected
|
||||
}
|
||||
DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
|
||||
// Need to give a byte array to read
|
||||
try {
|
||||
stream.read(null);
|
||||
fail("Should have caught NullPointerException");
|
||||
} catch (NullPointerException ignored) {
|
||||
// as expected
|
||||
}
|
||||
|
||||
// test reading zero length buffer
|
||||
assertEquals(0, stream.read(new byte[ 0 ]));
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
byte[] buffer = new byte[ _buffer_size ];
|
||||
int offset = 0;
|
||||
// test reading zero length buffer
|
||||
assertEquals(0, stream.read(new byte[0]));
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
byte[] buffer = new byte[_buffer_size];
|
||||
int offset = 0;
|
||||
|
||||
while (available(stream) >= buffer.length)
|
||||
{
|
||||
assertEquals(_buffer_size, stream.read(buffer));
|
||||
for (byte element : buffer) {
|
||||
while (available(stream) >= buffer.length) {
|
||||
assertEquals(_buffer_size, stream.read(buffer));
|
||||
for (byte element : buffer) {
|
||||
assertEquals("in main loop, byte " + offset,
|
||||
_workbook_data[ offset ], element);
|
||||
_workbook_data[offset], element);
|
||||
offset++;
|
||||
}
|
||||
assertEquals("offset " + offset, _workbook_size - offset,
|
||||
available(stream));
|
||||
}
|
||||
assertEquals(_workbook_size % _buffer_size, available(stream));
|
||||
Arrays.fill(buffer, ( byte ) 0);
|
||||
int count = stream.read(buffer);
|
||||
}
|
||||
assertEquals("offset " + offset, _workbook_size - offset,
|
||||
available(stream));
|
||||
}
|
||||
assertEquals(_workbook_size % _buffer_size, available(stream));
|
||||
Arrays.fill(buffer, (byte) 0);
|
||||
int count = stream.read(buffer);
|
||||
|
||||
assertEquals(_workbook_size % _buffer_size, count);
|
||||
for (int j = 0; j < count; j++)
|
||||
{
|
||||
assertEquals("past main loop, byte " + offset,
|
||||
_workbook_data[ offset ], buffer[ j ]);
|
||||
offset++;
|
||||
}
|
||||
assertEquals(_workbook_size, offset);
|
||||
for (int j = count; j < buffer.length; j++)
|
||||
{
|
||||
assertEquals("checking remainder, byte " + j, 0, buffer[ j ]);
|
||||
}
|
||||
assertEquals(-1, stream.read(buffer));
|
||||
stream.close();
|
||||
try {
|
||||
stream.read(buffer);
|
||||
fail("Should have caught IOException");
|
||||
} catch (IOException ignored) {
|
||||
// as expected
|
||||
}
|
||||
}
|
||||
assertEquals(_workbook_size % _buffer_size, count);
|
||||
for (int j = 0; j < count; j++) {
|
||||
assertEquals("past main loop, byte " + offset,
|
||||
_workbook_data[offset], buffer[j]);
|
||||
offset++;
|
||||
}
|
||||
assertEquals(_workbook_size, offset);
|
||||
for (int j = count; j < buffer.length; j++) {
|
||||
assertEquals("checking remainder, byte " + j, 0, buffer[j]);
|
||||
}
|
||||
assertEquals(-1, stream.read(buffer));
|
||||
stream.close();
|
||||
try {
|
||||
stream.read(buffer);
|
||||
fail("Should have caught IOException");
|
||||
} catch (IOException ignored) {
|
||||
// as expected
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test complex buffered read
|
||||
*/
|
||||
@SuppressWarnings("ResultOfMethodCallIgnored")
|
||||
@Test
|
||||
public void testComplexBufferRead() throws IOException {
|
||||
DocumentInputStream[] streams = new DocumentInputStream[] {
|
||||
new DocumentInputStream(_workbook_o),
|
||||
new NDocumentInputStream(_workbook_n)
|
||||
};
|
||||
for(DocumentInputStream stream : streams) {
|
||||
try {
|
||||
stream.read(null, 0, 1);
|
||||
fail("Should have caught NullPointerException");
|
||||
} catch (IllegalArgumentException ignored) {
|
||||
// as expected
|
||||
}
|
||||
DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
|
||||
try {
|
||||
stream.read(null, 0, 1);
|
||||
fail("Should have caught NullPointerException");
|
||||
} catch (IllegalArgumentException ignored) {
|
||||
// as expected
|
||||
}
|
||||
|
||||
// test illegal offsets and lengths
|
||||
try {
|
||||
stream.read(new byte[ 5 ], -4, 0);
|
||||
fail("Should have caught IndexOutOfBoundsException");
|
||||
} catch (IndexOutOfBoundsException ignored) {
|
||||
// as expected
|
||||
}
|
||||
try {
|
||||
stream.read(new byte[ 5 ], 0, -4);
|
||||
fail("Should have caught IndexOutOfBoundsException");
|
||||
} catch (IndexOutOfBoundsException ignored) {
|
||||
// as expected
|
||||
}
|
||||
try {
|
||||
stream.read(new byte[ 5 ], 0, 6);
|
||||
fail("Should have caught IndexOutOfBoundsException");
|
||||
} catch (IndexOutOfBoundsException ignored) {
|
||||
// as expected
|
||||
}
|
||||
// test illegal offsets and lengths
|
||||
try {
|
||||
stream.read(new byte[5], -4, 0);
|
||||
fail("Should have caught IndexOutOfBoundsException");
|
||||
} catch (IndexOutOfBoundsException ignored) {
|
||||
// as expected
|
||||
}
|
||||
try {
|
||||
stream.read(new byte[5], 0, -4);
|
||||
fail("Should have caught IndexOutOfBoundsException");
|
||||
} catch (IndexOutOfBoundsException ignored) {
|
||||
// as expected
|
||||
}
|
||||
try {
|
||||
stream.read(new byte[5], 0, 6);
|
||||
fail("Should have caught IndexOutOfBoundsException");
|
||||
} catch (IndexOutOfBoundsException ignored) {
|
||||
// as expected
|
||||
}
|
||||
|
||||
// test reading zero
|
||||
assertEquals(0, stream.read(new byte[ 5 ], 0, 0));
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
byte[] buffer = new byte[ _workbook_size ];
|
||||
int offset = 0;
|
||||
// test reading zero
|
||||
assertEquals(0, stream.read(new byte[5], 0, 0));
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
byte[] buffer = new byte[_workbook_size];
|
||||
int offset = 0;
|
||||
|
||||
while (available(stream) >= _buffer_size)
|
||||
{
|
||||
Arrays.fill(buffer, ( byte ) 0);
|
||||
assertEquals(_buffer_size,
|
||||
stream.read(buffer, offset, _buffer_size));
|
||||
for (int j = 0; j < offset; j++)
|
||||
{
|
||||
assertEquals("checking byte " + j, 0, buffer[ j ]);
|
||||
}
|
||||
for (int j = offset; j < (offset + _buffer_size); j++)
|
||||
{
|
||||
assertEquals("checking byte " + j, _workbook_data[ j ],
|
||||
buffer[ j ]);
|
||||
}
|
||||
for (int j = offset + _buffer_size; j < buffer.length; j++)
|
||||
{
|
||||
assertEquals("checking byte " + j, 0, buffer[ j ]);
|
||||
}
|
||||
offset += _buffer_size;
|
||||
assertEquals("offset " + offset, _workbook_size - offset,
|
||||
available(stream));
|
||||
}
|
||||
assertEquals(_workbook_size % _buffer_size, available(stream));
|
||||
Arrays.fill(buffer, ( byte ) 0);
|
||||
int count = stream.read(buffer, offset,
|
||||
while (available(stream) >= _buffer_size) {
|
||||
Arrays.fill(buffer, (byte) 0);
|
||||
assertEquals(_buffer_size,
|
||||
stream.read(buffer, offset, _buffer_size));
|
||||
for (int j = 0; j < offset; j++) {
|
||||
assertEquals("checking byte " + j, 0, buffer[j]);
|
||||
}
|
||||
for (int j = offset; j < (offset + _buffer_size); j++) {
|
||||
assertEquals("checking byte " + j, _workbook_data[j],
|
||||
buffer[j]);
|
||||
}
|
||||
for (int j = offset + _buffer_size; j < buffer.length; j++) {
|
||||
assertEquals("checking byte " + j, 0, buffer[j]);
|
||||
}
|
||||
offset += _buffer_size;
|
||||
assertEquals("offset " + offset, _workbook_size - offset,
|
||||
available(stream));
|
||||
}
|
||||
assertEquals(_workbook_size % _buffer_size, available(stream));
|
||||
Arrays.fill(buffer, (byte) 0);
|
||||
int count = stream.read(buffer, offset,
|
||||
_workbook_size % _buffer_size);
|
||||
|
||||
assertEquals(_workbook_size % _buffer_size, count);
|
||||
for (int j = 0; j < offset; j++)
|
||||
{
|
||||
assertEquals("checking byte " + j, 0, buffer[ j ]);
|
||||
}
|
||||
for (int j = offset; j < buffer.length; j++)
|
||||
{
|
||||
assertEquals("checking byte " + j, _workbook_data[ j ],
|
||||
buffer[ j ]);
|
||||
}
|
||||
assertEquals(_workbook_size, offset + count);
|
||||
for (int j = count; j < offset; j++)
|
||||
{
|
||||
assertEquals("byte " + j, 0, buffer[ j ]);
|
||||
}
|
||||
|
||||
assertEquals(-1, stream.read(buffer, 0, 1));
|
||||
stream.close();
|
||||
try {
|
||||
stream.read(buffer, 0, 1);
|
||||
fail("Should have caught IOException");
|
||||
} catch (IOException ignored) {
|
||||
// as expected
|
||||
}
|
||||
}
|
||||
assertEquals(_workbook_size % _buffer_size, count);
|
||||
for (int j = 0; j < offset; j++) {
|
||||
assertEquals("checking byte " + j, 0, buffer[j]);
|
||||
}
|
||||
for (int j = offset; j < buffer.length; j++) {
|
||||
assertEquals("checking byte " + j, _workbook_data[j],
|
||||
buffer[j]);
|
||||
}
|
||||
assertEquals(_workbook_size, offset + count);
|
||||
for (int j = count; j < offset; j++) {
|
||||
assertEquals("byte " + j, 0, buffer[j]);
|
||||
}
|
||||
|
||||
assertEquals(-1, stream.read(buffer, 0, 1));
|
||||
stream.close();
|
||||
try {
|
||||
stream.read(buffer, 0, 1);
|
||||
fail("Should have caught IOException");
|
||||
} catch (IOException ignored) {
|
||||
// as expected
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -488,82 +407,67 @@ public final class TestDocumentInputStream {
|
|||
*/
|
||||
@Test
|
||||
public void testSkip() throws IOException {
|
||||
DocumentInputStream[] streams = new DocumentInputStream[] {
|
||||
new DocumentInputStream(_workbook_o),
|
||||
new NDocumentInputStream(_workbook_n)
|
||||
};
|
||||
for(DocumentInputStream stream : streams) {
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
int count = available(stream);
|
||||
DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
int count = available(stream);
|
||||
|
||||
while (available(stream) >= _buffer_size) {
|
||||
assertEquals(_buffer_size, stream.skip(_buffer_size));
|
||||
count -= _buffer_size;
|
||||
assertEquals(count, available(stream));
|
||||
}
|
||||
assertEquals(_workbook_size % _buffer_size,
|
||||
while (available(stream) >= _buffer_size) {
|
||||
assertEquals(_buffer_size, stream.skip(_buffer_size));
|
||||
count -= _buffer_size;
|
||||
assertEquals(count, available(stream));
|
||||
}
|
||||
assertEquals(_workbook_size % _buffer_size,
|
||||
stream.skip(_buffer_size));
|
||||
assertEquals(0, available(stream));
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
assertEquals(_workbook_size, stream.skip(_workbook_size * 2));
|
||||
assertEquals(0, available(stream));
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
assertEquals(_workbook_size,
|
||||
stream.skip(2 + ( long ) Integer.MAX_VALUE));
|
||||
assertEquals(0, available(stream));
|
||||
}
|
||||
assertEquals(0, available(stream));
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
assertEquals(_workbook_size, stream.skip(_workbook_size * 2));
|
||||
assertEquals(0, available(stream));
|
||||
stream.reset();
|
||||
assertEquals(_workbook_size, available(stream));
|
||||
assertEquals(_workbook_size,
|
||||
stream.skip(2 + (long) Integer.MAX_VALUE));
|
||||
assertEquals(0, available(stream));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Test that we can read files at multiple levels down the tree
|
||||
*/
|
||||
@Test
|
||||
public void testReadMultipleTreeLevels() throws Exception {
|
||||
final POIDataSamples _samples = POIDataSamples.getPublisherInstance();
|
||||
File sample = _samples.getFile("Sample.pub");
|
||||
|
||||
DocumentInputStream stream;
|
||||
|
||||
NPOIFSFileSystem npoifs = new NPOIFSFileSystem(sample);
|
||||
try {
|
||||
OPOIFSFileSystem opoifs = new OPOIFSFileSystem(new FileInputStream(sample));
|
||||
|
||||
// Ensure we have what we expect on the root
|
||||
assertEquals(npoifs, npoifs.getRoot().getNFileSystem());
|
||||
assertEquals(npoifs, npoifs.getRoot().getFileSystem());
|
||||
assertEquals(null, npoifs.getRoot().getOFileSystem());
|
||||
assertEquals(null, opoifs.getRoot().getFileSystem());
|
||||
assertEquals(opoifs, opoifs.getRoot().getOFileSystem());
|
||||
assertEquals(null, opoifs.getRoot().getNFileSystem());
|
||||
|
||||
// Check inside
|
||||
for(DirectoryNode root : new DirectoryNode[] { opoifs.getRoot(), npoifs.getRoot() }) {
|
||||
// Top Level
|
||||
Entry top = root.getEntry("Contents");
|
||||
assertEquals(true, top.isDocumentEntry());
|
||||
stream = root.createDocumentInputStream(top);
|
||||
stream.read();
|
||||
|
||||
// One Level Down
|
||||
DirectoryNode escher = (DirectoryNode)root.getEntry("Escher");
|
||||
Entry one = escher.getEntry("EscherStm");
|
||||
assertEquals(true, one.isDocumentEntry());
|
||||
stream = escher.createDocumentInputStream(one);
|
||||
stream.read();
|
||||
|
||||
// Two Levels Down
|
||||
DirectoryNode quill = (DirectoryNode)root.getEntry("Quill");
|
||||
DirectoryNode quillSub = (DirectoryNode)quill.getEntry("QuillSub");
|
||||
Entry two = quillSub.getEntry("CONTENTS");
|
||||
assertEquals(true, two.isDocumentEntry());
|
||||
stream = quillSub.createDocumentInputStream(two);
|
||||
stream.read();
|
||||
}
|
||||
} finally {
|
||||
npoifs.close();
|
||||
}
|
||||
final POIDataSamples _samples = POIDataSamples.getPublisherInstance();
|
||||
File sample = _samples.getFile("Sample.pub");
|
||||
|
||||
DocumentInputStream stream;
|
||||
|
||||
try (NPOIFSFileSystem npoifs = new NPOIFSFileSystem(sample)) {
|
||||
// Ensure we have what we expect on the root
|
||||
assertEquals(npoifs, npoifs.getRoot().getNFileSystem());
|
||||
assertEquals(npoifs, npoifs.getRoot().getFileSystem());
|
||||
|
||||
// Check inside
|
||||
DirectoryNode root = npoifs.getRoot();
|
||||
// Top Level
|
||||
Entry top = root.getEntry("Contents");
|
||||
assertTrue(top.isDocumentEntry());
|
||||
stream = root.createDocumentInputStream(top);
|
||||
assertNotEquals(-1, stream.read());
|
||||
|
||||
// One Level Down
|
||||
DirectoryNode escher = (DirectoryNode) root.getEntry("Escher");
|
||||
Entry one = escher.getEntry("EscherStm");
|
||||
assertTrue(one.isDocumentEntry());
|
||||
stream = escher.createDocumentInputStream(one);
|
||||
assertNotEquals(-1, stream.read());
|
||||
|
||||
// Two Levels Down
|
||||
DirectoryNode quill = (DirectoryNode) root.getEntry("Quill");
|
||||
DirectoryNode quillSub = (DirectoryNode) quill.getEntry("QuillSub");
|
||||
Entry two = quillSub.getEntry("CONTENTS");
|
||||
assertTrue(two.isDocumentEntry());
|
||||
stream = quillSub.createDocumentInputStream(two);
|
||||
assertNotEquals(-1, stream.read());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden("just for testing")
|
||||
|
|
|
@ -1,73 +0,0 @@
|
|||
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
import junit.framework.*;
|
||||
|
||||
import org.apache.poi.poifs.property.DirectoryProperty;
|
||||
import org.apache.poi.poifs.property.DocumentProperty;
|
||||
import org.apache.poi.poifs.storage.RawDataBlock;
|
||||
|
||||
/**
|
||||
* Class to test DocumentNode functionality
|
||||
*
|
||||
* @author Marc Johnson
|
||||
*/
|
||||
public final class TestDocumentNode extends TestCase {
|
||||
|
||||
/**
|
||||
* test constructor
|
||||
*/
|
||||
public void testConstructor() throws IOException {
|
||||
DirectoryProperty property1 = new DirectoryProperty("directory");
|
||||
RawDataBlock[] rawBlocks = new RawDataBlock[ 4 ];
|
||||
ByteArrayInputStream stream =
|
||||
new ByteArrayInputStream(new byte[ 2048 ]);
|
||||
|
||||
for (int j = 0; j < 4; j++)
|
||||
{
|
||||
rawBlocks[ j ] = new RawDataBlock(stream);
|
||||
}
|
||||
OPOIFSDocument document = new OPOIFSDocument("document", rawBlocks,
|
||||
2000);
|
||||
DocumentProperty property2 = document.getDocumentProperty();
|
||||
DirectoryNode parent = new DirectoryNode(property1, (POIFSFileSystem)null, null);
|
||||
DocumentNode node = new DocumentNode(property2, parent);
|
||||
|
||||
// verify we can retrieve the document
|
||||
assertEquals(property2.getDocument(), node.getDocument());
|
||||
|
||||
// verify we can get the size
|
||||
assertEquals(property2.getSize(), node.getSize());
|
||||
|
||||
// verify isDocumentEntry returns true
|
||||
assertTrue(node.isDocumentEntry());
|
||||
|
||||
// verify isDirectoryEntry returns false
|
||||
assertTrue(!node.isDirectoryEntry());
|
||||
|
||||
// verify getName behaves correctly
|
||||
assertEquals(property2.getName(), node.getName());
|
||||
|
||||
// verify getParent behaves correctly
|
||||
assertEquals(parent, node.getParent());
|
||||
}
|
||||
}
|
|
@ -17,6 +17,9 @@
|
|||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.InputStream;
|
||||
|
@ -26,20 +29,21 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.poi.POIDataSamples;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Tests bugs across both POIFSFileSystem and NPOIFSFileSystem
|
||||
*/
|
||||
public final class TestFileSystemBugs extends TestCase {
|
||||
protected static POIDataSamples _samples = POIDataSamples.getPOIFSInstance();
|
||||
protected static POIDataSamples _ssSamples = POIDataSamples.getSpreadSheetInstance();
|
||||
|
||||
protected List<NPOIFSFileSystem> openedFSs;
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
public final class TestFileSystemBugs {
|
||||
private static POIDataSamples _samples = POIDataSamples.getPOIFSInstance();
|
||||
private static POIDataSamples _ssSamples = POIDataSamples.getSpreadSheetInstance();
|
||||
|
||||
private List<NPOIFSFileSystem> openedFSs;
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
if (openedFSs != null && !openedFSs.isEmpty()) {
|
||||
for (NPOIFSFileSystem fs : openedFSs) {
|
||||
try {
|
||||
|
@ -51,65 +55,60 @@ public final class TestFileSystemBugs extends TestCase {
|
|||
}
|
||||
openedFSs = null;
|
||||
}
|
||||
protected DirectoryNode[] openSample(String name, boolean oldFails) throws Exception {
|
||||
return openSamples(new InputStream[] {
|
||||
_samples.openResourceAsStream(name),
|
||||
_samples.openResourceAsStream(name)
|
||||
}, oldFails);
|
||||
}
|
||||
protected DirectoryNode[] openSSSample(String name, boolean oldFails) throws Exception {
|
||||
return openSamples(new InputStream[] {
|
||||
_ssSamples.openResourceAsStream(name),
|
||||
_ssSamples.openResourceAsStream(name)
|
||||
}, oldFails);
|
||||
}
|
||||
protected DirectoryNode[] openSamples(InputStream[] inps, boolean oldFails) throws Exception {
|
||||
NPOIFSFileSystem nfs = new NPOIFSFileSystem(inps[0]);
|
||||
if (openedFSs == null) openedFSs = new ArrayList<>();
|
||||
openedFSs.add(nfs);
|
||||
|
||||
OPOIFSFileSystem ofs = null;
|
||||
try {
|
||||
ofs = new OPOIFSFileSystem(inps[1]);
|
||||
if (oldFails) fail("POIFSFileSystem should have failed but didn't");
|
||||
} catch (Exception e) {
|
||||
if (!oldFails) throw e;
|
||||
}
|
||||
|
||||
if (ofs == null) return new DirectoryNode[] { nfs.getRoot() };
|
||||
return new DirectoryNode[] { ofs.getRoot(), nfs.getRoot() };
|
||||
private DirectoryNode openSample(String name) throws Exception {
|
||||
try (InputStream inps = _samples.openResourceAsStream(name)) {
|
||||
return openSample(inps);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private DirectoryNode openSSSample(String name) throws Exception {
|
||||
try (InputStream inps = _ssSamples.openResourceAsStream(name)) {
|
||||
return openSample(inps);
|
||||
}
|
||||
}
|
||||
|
||||
private DirectoryNode openSample(InputStream inps) throws Exception {
|
||||
NPOIFSFileSystem nfs = new NPOIFSFileSystem(inps);
|
||||
if (openedFSs == null) {
|
||||
openedFSs = new ArrayList<>();
|
||||
}
|
||||
openedFSs.add(nfs);
|
||||
|
||||
return nfs.getRoot();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that we can open files that come via Lotus notes.
|
||||
* These have a top level directory without a name....
|
||||
*/
|
||||
@Test
|
||||
public void testNotesOLE2Files() throws Exception {
|
||||
// Check the contents
|
||||
for (DirectoryNode root : openSample("Notes.ole2", false)) {
|
||||
assertEquals(1, root.getEntryCount());
|
||||
DirectoryNode root = openSample("Notes.ole2");
|
||||
assertEquals(1, root.getEntryCount());
|
||||
|
||||
Entry entry = root.getEntries().next();
|
||||
assertTrue(entry.isDirectoryEntry());
|
||||
assertTrue(entry instanceof DirectoryEntry);
|
||||
Entry entry = root.getEntries().next();
|
||||
assertTrue(entry.isDirectoryEntry());
|
||||
assertTrue(entry instanceof DirectoryEntry);
|
||||
|
||||
// The directory lacks a name!
|
||||
DirectoryEntry dir = (DirectoryEntry)entry;
|
||||
assertEquals("", dir.getName());
|
||||
// The directory lacks a name!
|
||||
DirectoryEntry dir = (DirectoryEntry)entry;
|
||||
assertEquals("", dir.getName());
|
||||
|
||||
// Has two children
|
||||
assertEquals(2, dir.getEntryCount());
|
||||
// Has two children
|
||||
assertEquals(2, dir.getEntryCount());
|
||||
|
||||
// Check them
|
||||
Iterator<Entry> it = dir.getEntries();
|
||||
entry = it.next();
|
||||
assertEquals(true, entry.isDocumentEntry());
|
||||
assertEquals(Ole10Native.OLE10_NATIVE, entry.getName());
|
||||
// Check them
|
||||
Iterator<Entry> it = dir.getEntries();
|
||||
entry = it.next();
|
||||
assertTrue(entry.isDocumentEntry());
|
||||
assertEquals(Ole10Native.OLE10_NATIVE, entry.getName());
|
||||
|
||||
entry = it.next();
|
||||
assertEquals(true, entry.isDocumentEntry());
|
||||
assertEquals("\u0001CompObj", entry.getName());
|
||||
}
|
||||
entry = it.next();
|
||||
assertTrue(entry.isDocumentEntry());
|
||||
assertEquals("\u0001CompObj", entry.getName());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -119,46 +118,39 @@ public final class TestFileSystemBugs extends TestCase {
|
|||
* Note - only works for NPOIFSFileSystem, POIFSFileSystem
|
||||
* can't cope with this level of corruption
|
||||
*/
|
||||
@Test
|
||||
public void testCorruptedProperties() throws Exception {
|
||||
for (DirectoryNode root : openSample("unknown_properties.msg", true)) {
|
||||
assertEquals(42, root.getEntryCount());
|
||||
}
|
||||
DirectoryNode root = openSample("unknown_properties.msg");
|
||||
assertEquals(42, root.getEntryCount());
|
||||
}
|
||||
|
||||
/**
|
||||
* With heavily nested documents, ensure we still re-write the same
|
||||
*/
|
||||
@Test
|
||||
public void testHeavilyNestedReWrite() throws Exception {
|
||||
for (DirectoryNode root : openSSSample("ex42570-20305.xls", false)) {
|
||||
// Record the structure
|
||||
Map<String,Integer> entries = new HashMap<>();
|
||||
fetchSizes("/", root, entries);
|
||||
|
||||
// Prepare to copy
|
||||
DirectoryNode dest;
|
||||
if (root.getNFileSystem() != null) {
|
||||
dest = (new NPOIFSFileSystem()).getRoot();
|
||||
} else {
|
||||
dest = (new OPOIFSFileSystem()).getRoot();
|
||||
}
|
||||
|
||||
// Copy over
|
||||
EntryUtils.copyNodes(root, dest);
|
||||
|
||||
// Re-load, always as NPOIFS
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
if (root.getNFileSystem() != null) {
|
||||
root.getNFileSystem().writeFilesystem(baos);
|
||||
} else {
|
||||
root.getOFileSystem().writeFilesystem(baos);
|
||||
}
|
||||
NPOIFSFileSystem read = new NPOIFSFileSystem(
|
||||
new ByteArrayInputStream(baos.toByteArray()));
|
||||
|
||||
// Check the structure matches
|
||||
checkSizes("/", read.getRoot(), entries);
|
||||
}
|
||||
DirectoryNode root = openSSSample("ex42570-20305.xls");
|
||||
// Record the structure
|
||||
Map<String,Integer> entries = new HashMap<>();
|
||||
fetchSizes("/", root, entries);
|
||||
|
||||
// Prepare to copy
|
||||
DirectoryNode dest = new NPOIFSFileSystem().getRoot();
|
||||
|
||||
// Copy over
|
||||
EntryUtils.copyNodes(root, dest);
|
||||
|
||||
// Re-load, always as NPOIFS
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
root.getNFileSystem().writeFilesystem(baos);
|
||||
|
||||
NPOIFSFileSystem read = new NPOIFSFileSystem(
|
||||
new ByteArrayInputStream(baos.toByteArray()));
|
||||
|
||||
// Check the structure matches
|
||||
checkSizes("/", read.getRoot(), entries);
|
||||
}
|
||||
|
||||
private void fetchSizes(String path, DirectoryNode dir, Map<String,Integer> entries) {
|
||||
for (Entry entry : dir) {
|
||||
if (entry instanceof DirectoryNode) {
|
||||
|
|
|
@ -17,6 +17,26 @@
|
|||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import static org.hamcrest.core.IsCollectionContaining.hasItem;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.poi.POIDataSamples;
|
||||
import org.apache.poi.hpsf.DocumentSummaryInformation;
|
||||
import org.apache.poi.hpsf.PropertySet;
|
||||
|
@ -34,14 +54,6 @@ import org.junit.Assume;
|
|||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Iterator;
|
||||
|
||||
import static org.hamcrest.core.IsCollectionContaining.hasItem;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* Tests for the new NIO POIFSFileSystem implementation
|
||||
*/
|
||||
|
@ -52,7 +64,7 @@ public final class TestNPOIFSFileSystem {
|
|||
* Returns test files with 512 byte and 4k block sizes, loaded
|
||||
* both from InputStreams and Files
|
||||
*/
|
||||
protected NPOIFSFileSystem[] get512and4kFileAndInput() throws IOException {
|
||||
private NPOIFSFileSystem[] get512and4kFileAndInput() throws IOException {
|
||||
NPOIFSFileSystem fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
|
||||
NPOIFSFileSystem fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
|
||||
NPOIFSFileSystem fsC = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
|
||||
|
@ -60,7 +72,7 @@ public final class TestNPOIFSFileSystem {
|
|||
return new NPOIFSFileSystem[] {fsA,fsB,fsC,fsD};
|
||||
}
|
||||
|
||||
protected static void assertBATCount(NPOIFSFileSystem fs, int expectedBAT, int expectedXBAT) throws IOException {
|
||||
private static void assertBATCount(NPOIFSFileSystem fs, int expectedBAT, int expectedXBAT) throws IOException {
|
||||
int foundBAT = 0;
|
||||
int foundXBAT = 0;
|
||||
int sz = (int)(fs.size() / fs.getBigBlockSize());
|
||||
|
@ -75,7 +87,7 @@ public final class TestNPOIFSFileSystem {
|
|||
assertEquals("Wrong number of BATs", expectedBAT, foundBAT);
|
||||
assertEquals("Wrong number of XBATs with " + expectedBAT + " BATs", expectedXBAT, foundXBAT);
|
||||
}
|
||||
protected void assertContentsMatches(byte[] expected, DocumentEntry doc) throws IOException {
|
||||
private void assertContentsMatches(byte[] expected, DocumentEntry doc) throws IOException {
|
||||
NDocumentInputStream inp = new NDocumentInputStream(doc);
|
||||
byte[] contents = new byte[doc.getSize()];
|
||||
assertEquals(doc.getSize(), inp.read(contents));
|
||||
|
@ -85,21 +97,21 @@ public final class TestNPOIFSFileSystem {
|
|||
assertThat(expected, equalTo(contents));
|
||||
}
|
||||
}
|
||||
|
||||
protected static HeaderBlock writeOutAndReadHeader(NPOIFSFileSystem fs) throws IOException {
|
||||
|
||||
private static HeaderBlock writeOutAndReadHeader(NPOIFSFileSystem fs) throws IOException {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
fs.writeFilesystem(baos);
|
||||
|
||||
return new HeaderBlock(new ByteArrayInputStream(baos.toByteArray()));
|
||||
}
|
||||
|
||||
protected static NPOIFSFileSystem writeOutAndReadBack(NPOIFSFileSystem original) throws IOException {
|
||||
static NPOIFSFileSystem writeOutAndReadBack(NPOIFSFileSystem original) throws IOException {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
original.writeFilesystem(baos);
|
||||
return new NPOIFSFileSystem(new ByteArrayInputStream(baos.toByteArray()));
|
||||
}
|
||||
|
||||
protected static NPOIFSFileSystem writeOutFileAndReadBack(NPOIFSFileSystem original) throws IOException {
|
||||
private static NPOIFSFileSystem writeOutFileAndReadBack(NPOIFSFileSystem original) throws IOException {
|
||||
final File file = TempFile.createTempFile("TestPOIFS", ".ole2");
|
||||
try (OutputStream fout = new FileOutputStream(file)) {
|
||||
original.writeFilesystem(fout);
|
||||
|
@ -179,7 +191,7 @@ public final class TestNPOIFSFileSystem {
|
|||
assertEquals("Image", prop.getName());
|
||||
prop = pi.next();
|
||||
assertEquals("Tags", prop.getName());
|
||||
assertEquals(false, pi.hasNext());
|
||||
assertFalse(pi.hasNext());
|
||||
|
||||
|
||||
// Check the SBAT (Small Blocks FAT) was properly processed
|
||||
|
@ -250,7 +262,7 @@ public final class TestNPOIFSFileSystem {
|
|||
assertEquals("Image", prop.getName());
|
||||
prop = pi.next();
|
||||
assertEquals("Tags", prop.getName());
|
||||
assertEquals(false, pi.hasNext());
|
||||
assertFalse(pi.hasNext());
|
||||
|
||||
|
||||
// Check the SBAT (Small Blocks FAT) was properly processed
|
||||
|
@ -422,7 +434,7 @@ public final class TestNPOIFSFileSystem {
|
|||
NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
|
||||
|
||||
// Our first BAT block has spares
|
||||
assertEquals(true, fs.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
|
||||
assertTrue(fs.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
|
||||
|
||||
// First free one is 100
|
||||
assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
|
||||
|
@ -463,7 +475,7 @@ public final class TestNPOIFSFileSystem {
|
|||
}
|
||||
|
||||
// Check our BAT knows it's free
|
||||
assertEquals(true, fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
|
||||
assertTrue(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
|
||||
|
||||
// Allocate all the spare ones
|
||||
for(int i=100; i<128; i++) {
|
||||
|
@ -471,9 +483,9 @@ public final class TestNPOIFSFileSystem {
|
|||
}
|
||||
|
||||
// BAT is now full, but there's only the one
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
|
||||
try {
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
|
||||
fail("Should only be one BAT");
|
||||
} catch(IndexOutOfBoundsException e) {
|
||||
// expected here
|
||||
|
@ -483,9 +495,9 @@ public final class TestNPOIFSFileSystem {
|
|||
|
||||
// Now ask for a free one, will need to extend the file
|
||||
assertEquals(129, fs1.getFreeBlock());
|
||||
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
|
||||
assertEquals(true, fs1.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
|
||||
|
||||
assertFalse(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
|
||||
assertTrue(fs1.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
|
||||
assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(128));
|
||||
assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(129));
|
||||
|
||||
|
@ -502,10 +514,10 @@ public final class TestNPOIFSFileSystem {
|
|||
fs1.setNextBlock(free, POIFSConstants.END_OF_CHAIN);
|
||||
}
|
||||
}
|
||||
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(109*128-1).getBlock().hasFreeSectors());
|
||||
|
||||
assertFalse(fs1.getBATBlockAndIndex(109 * 128 - 1).getBlock().hasFreeSectors());
|
||||
try {
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(109*128).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(109 * 128).getBlock().hasFreeSectors());
|
||||
fail("Should only be 109 BATs");
|
||||
} catch(IndexOutOfBoundsException e) {
|
||||
// expected here
|
||||
|
@ -525,10 +537,10 @@ public final class TestNPOIFSFileSystem {
|
|||
free = fs1.getFreeBlock();
|
||||
assertTrue("Had: " + free, free > 0);
|
||||
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(109*128-1).getBlock().hasFreeSectors());
|
||||
assertEquals(true, fs1.getBATBlockAndIndex(110*128-1).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(109 * 128 - 1).getBlock().hasFreeSectors());
|
||||
assertTrue(fs1.getBATBlockAndIndex(110 * 128 - 1).getBlock().hasFreeSectors());
|
||||
try {
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(110*128).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(110 * 128).getBlock().hasFreeSectors());
|
||||
fail("Should only be 110 BATs");
|
||||
} catch(IndexOutOfBoundsException e) {
|
||||
// expected here
|
||||
|
@ -552,9 +564,9 @@ public final class TestNPOIFSFileSystem {
|
|||
}
|
||||
|
||||
// Should now have 109+127 = 236 BATs
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(236*128-1).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
|
||||
try {
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(236*128).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(236 * 128).getBlock().hasFreeSectors());
|
||||
fail("Should only be 236 BATs");
|
||||
} catch(IndexOutOfBoundsException e) {
|
||||
// expected here
|
||||
|
@ -566,10 +578,10 @@ public final class TestNPOIFSFileSystem {
|
|||
free = fs1.getFreeBlock();
|
||||
assertTrue("Had: " + free, free > 0);
|
||||
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(236*128-1).getBlock().hasFreeSectors());
|
||||
assertEquals(true, fs1.getBATBlockAndIndex(237*128-1).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
|
||||
assertTrue(fs1.getBATBlockAndIndex(237 * 128 - 1).getBlock().hasFreeSectors());
|
||||
try {
|
||||
assertEquals(false, fs1.getBATBlockAndIndex(237*128).getBlock().hasFreeSectors());
|
||||
assertFalse(fs1.getBATBlockAndIndex(237 * 128).getBlock().hasFreeSectors());
|
||||
fail("Should only be 237 BATs");
|
||||
} catch(IndexOutOfBoundsException e) {
|
||||
// expected here
|
||||
|
@ -590,10 +602,10 @@ public final class TestNPOIFSFileSystem {
|
|||
// Check that it is seen correctly
|
||||
assertBATCount(fs2, 237, 2);
|
||||
|
||||
assertEquals(false, fs2.getBATBlockAndIndex(236*128-1).getBlock().hasFreeSectors());
|
||||
assertEquals(true, fs2.getBATBlockAndIndex(237*128-1).getBlock().hasFreeSectors());
|
||||
assertFalse(fs2.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
|
||||
assertTrue(fs2.getBATBlockAndIndex(237 * 128 - 1).getBlock().hasFreeSectors());
|
||||
try {
|
||||
assertEquals(false, fs2.getBATBlockAndIndex(237*128).getBlock().hasFreeSectors());
|
||||
assertFalse(fs2.getBATBlockAndIndex(237 * 128).getBlock().hasFreeSectors());
|
||||
fail("Should only be 237 BATs");
|
||||
} catch(IndexOutOfBoundsException e) {
|
||||
// expected here
|
||||
|
@ -620,12 +632,12 @@ public final class TestNPOIFSFileSystem {
|
|||
Entry si = root.getEntry("\u0005SummaryInformation");
|
||||
Entry image = root.getEntry("Image");
|
||||
Entry tags = root.getEntry("Tags");
|
||||
|
||||
assertEquals(false, thumbnail.isDirectoryEntry());
|
||||
assertEquals(false, dsi.isDirectoryEntry());
|
||||
assertEquals(false, si.isDirectoryEntry());
|
||||
assertEquals(true, image.isDirectoryEntry());
|
||||
assertEquals(false, tags.isDirectoryEntry());
|
||||
|
||||
assertFalse(thumbnail.isDirectoryEntry());
|
||||
assertFalse(dsi.isDirectoryEntry());
|
||||
assertFalse(si.isDirectoryEntry());
|
||||
assertTrue(image.isDirectoryEntry());
|
||||
assertFalse(tags.isDirectoryEntry());
|
||||
|
||||
// Check via the iterator
|
||||
Iterator<Entry> it = root.getEntries();
|
||||
|
@ -652,8 +664,8 @@ public final class TestNPOIFSFileSystem {
|
|||
for(NPOIFSFileSystem fs : get512and4kFileAndInput()) {
|
||||
DirectoryEntry root = fs.getRoot();
|
||||
Entry si = root.getEntry("\u0005SummaryInformation");
|
||||
|
||||
assertEquals(true, si.isDocumentEntry());
|
||||
|
||||
assertTrue(si.isDocumentEntry());
|
||||
DocumentNode doc = (DocumentNode)si;
|
||||
|
||||
// Check we can read it
|
||||
|
@ -665,9 +677,9 @@ public final class TestNPOIFSFileSystem {
|
|||
SummaryInformation inf = (SummaryInformation)ps;
|
||||
|
||||
// Check some bits in it
|
||||
assertEquals(null, inf.getApplicationName());
|
||||
assertEquals(null, inf.getAuthor());
|
||||
assertEquals(null, inf.getSubject());
|
||||
assertNull(inf.getApplicationName());
|
||||
assertNull(inf.getAuthor());
|
||||
assertNull(inf.getSubject());
|
||||
assertEquals(131333, inf.getOSVersion());
|
||||
|
||||
// Finish with this one
|
||||
|
@ -676,7 +688,7 @@ public final class TestNPOIFSFileSystem {
|
|||
|
||||
// Try the other summary information
|
||||
si = root.getEntry("\u0005DocumentSummaryInformation");
|
||||
assertEquals(true, si.isDocumentEntry());
|
||||
assertTrue(si.isDocumentEntry());
|
||||
doc = (DocumentNode)si;
|
||||
assertContentsMatches(null, doc);
|
||||
|
||||
|
@ -1541,7 +1553,7 @@ public final class TestNPOIFSFileSystem {
|
|||
DirectoryEntry vbaProj = (DirectoryEntry)src.getRoot().getEntry("_VBA_PROJECT_CUR");
|
||||
assertEquals(3, vbaProj.getEntryCount());
|
||||
// Can't delete yet, has stuff
|
||||
assertEquals(false, vbaProj.delete());
|
||||
assertFalse(vbaProj.delete());
|
||||
// Recursively delete
|
||||
_recursiveDeletee(vbaProj);
|
||||
|
||||
|
@ -1554,7 +1566,7 @@ public final class TestNPOIFSFileSystem {
|
|||
}
|
||||
private void _recursiveDeletee(Entry entry) throws IOException {
|
||||
if (entry.isDocumentEntry()) {
|
||||
assertEquals(true, entry.delete());
|
||||
assertTrue(entry.delete());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1564,7 +1576,7 @@ public final class TestNPOIFSFileSystem {
|
|||
Entry ce = dir.getEntry(name);
|
||||
_recursiveDeletee(ce);
|
||||
}
|
||||
assertEquals(true, dir.delete());
|
||||
assertTrue(dir.delete());
|
||||
}
|
||||
@SuppressWarnings("unused")
|
||||
private int _countChildren(DirectoryProperty p) {
|
||||
|
@ -1677,24 +1689,24 @@ public final class TestNPOIFSFileSystem {
|
|||
fs.createDocument(new DummyDataInputStream(s2gb), "Big");
|
||||
}
|
||||
|
||||
protected static class DummyDataInputStream extends InputStream {
|
||||
protected final long maxSize;
|
||||
protected long size;
|
||||
public DummyDataInputStream(long maxSize) {
|
||||
private static final class DummyDataInputStream extends InputStream {
|
||||
private final long maxSize;
|
||||
private long size;
|
||||
private DummyDataInputStream(long maxSize) {
|
||||
this.maxSize = maxSize;
|
||||
this.size = 0;
|
||||
}
|
||||
|
||||
public int read() throws IOException {
|
||||
public int read() {
|
||||
if (size >= maxSize) return -1;
|
||||
size++;
|
||||
return (int)(size % 128);
|
||||
}
|
||||
|
||||
public int read(byte[] b) throws IOException {
|
||||
public int read(byte[] b) {
|
||||
return read(b, 0, b.length);
|
||||
}
|
||||
public int read(byte[] b, int offset, int len) throws IOException {
|
||||
public int read(byte[] b, int offset, int len) {
|
||||
if (size >= maxSize) return -1;
|
||||
int sz = (int)Math.min(len, maxSize-size);
|
||||
for (int i=0; i<sz; i++) {
|
||||
|
@ -1715,8 +1727,8 @@ public final class TestNPOIFSFileSystem {
|
|||
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
try (InputStream inputStream = POIDataSamples.getHSMFInstance().openResourceAsStream("lots-of-recipients.msg")) {
|
||||
OPOIFSFileSystem srcFileSystem = new OPOIFSFileSystem(inputStream);
|
||||
OPOIFSFileSystem destFileSystem = new OPOIFSFileSystem();
|
||||
NPOIFSFileSystem srcFileSystem = new NPOIFSFileSystem(inputStream);
|
||||
NPOIFSFileSystem destFileSystem = new NPOIFSFileSystem();
|
||||
|
||||
copyAllEntries(srcFileSystem.getRoot(), destFileSystem.getRoot());
|
||||
|
||||
|
@ -1727,7 +1739,6 @@ public final class TestNPOIFSFileSystem {
|
|||
|
||||
assertTrue(file.delete());
|
||||
if (i % 10 == 0) System.out.print(".");
|
||||
if (i % 800 == 0 && i > 0) System.out.println();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1754,7 +1765,6 @@ public final class TestNPOIFSFileSystem {
|
|||
|
||||
assertTrue(file.delete());
|
||||
if (i % 10 == 0) System.out.print(".");
|
||||
if (i % 800 == 0 && i > 0) System.out.println();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,14 +17,16 @@
|
|||
|
||||
package org.apache.poi.poifs.filesystem;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.poi.POIDataSamples;
|
||||
import org.apache.poi.hssf.HSSFTestDataSamples;
|
||||
import org.apache.poi.poifs.common.POIFSBigBlockSize;
|
||||
|
@ -34,18 +36,21 @@ import org.apache.poi.poifs.storage.BlockAllocationTableReader;
|
|||
import org.apache.poi.poifs.storage.HeaderBlock;
|
||||
import org.apache.poi.poifs.storage.RawDataBlockList;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
/**
|
||||
* Tests for the older OPOIFS-based POIFSFileSystem
|
||||
*/
|
||||
public final class TestPOIFSFileSystem extends TestCase {
|
||||
public final class TestPOIFSFileSystem {
|
||||
private final POIDataSamples _samples = POIDataSamples.getPOIFSInstance();
|
||||
|
||||
/**
|
||||
* Mock exception used to ensure correct error handling
|
||||
*/
|
||||
private static final class MyEx extends RuntimeException {
|
||||
public MyEx() {
|
||||
MyEx() {
|
||||
// no fields to initialise
|
||||
}
|
||||
}
|
||||
|
@ -60,7 +65,7 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
private int _currentIx;
|
||||
private boolean _isClosed;
|
||||
|
||||
public TestIS(InputStream is, int failIndex) {
|
||||
TestIS(InputStream is, int failIndex) {
|
||||
_is = is;
|
||||
_failIndex = failIndex;
|
||||
_currentIx = 0;
|
||||
|
@ -93,7 +98,7 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
_isClosed = true;
|
||||
_is.close();
|
||||
}
|
||||
public boolean isClosed() {
|
||||
boolean isClosed() {
|
||||
return _isClosed;
|
||||
}
|
||||
}
|
||||
|
@ -102,29 +107,26 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
* Test for undesired behaviour observable as of svn revision 618865 (5-Feb-2008).
|
||||
* POIFSFileSystem was not closing the input stream.
|
||||
*/
|
||||
public void testAlwaysClose() {
|
||||
@Test
|
||||
public void testAlwaysClose() throws IOException {
|
||||
TestIS testIS;
|
||||
|
||||
// Normal case - read until EOF and close
|
||||
testIS = new TestIS(openSampleStream("13224.xls"), -1);
|
||||
try {
|
||||
new OPOIFSFileSystem(testIS);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
try (NPOIFSFileSystem ignored = new NPOIFSFileSystem(testIS)){
|
||||
assertTrue("input stream was not closed", testIS.isClosed());
|
||||
}
|
||||
assertTrue("input stream was not closed", testIS.isClosed());
|
||||
|
||||
// intended to crash after reading 10000 bytes
|
||||
testIS = new TestIS(openSampleStream("13224.xls"), 10000);
|
||||
try {
|
||||
new OPOIFSFileSystem(testIS);
|
||||
try (NPOIFSFileSystem ignored = new NPOIFSFileSystem(testIS)){
|
||||
fail("ex should have been thrown");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} catch (MyEx e) {
|
||||
// expected
|
||||
assertTrue("input stream was not closed", testIS.isClosed()); // but still should close
|
||||
} catch (Exception e) {
|
||||
fail("MyEx is expected to be thrown");
|
||||
}
|
||||
assertTrue("input stream was not closed", testIS.isClosed()); // but still should close
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -138,6 +140,7 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
* The other is to fix the handling of the last block in
|
||||
* POIFS, since it seems to be slight wrong
|
||||
*/
|
||||
@Test
|
||||
public void testShortLastBlock() throws Exception {
|
||||
String[] files = new String[] {
|
||||
"ShortLastBlock.qwp", "ShortLastBlock.wps"
|
||||
|
@ -145,7 +148,7 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
|
||||
for (String file : files) {
|
||||
// Open the file up
|
||||
OPOIFSFileSystem fs = new OPOIFSFileSystem(
|
||||
NPOIFSFileSystem fs = new NPOIFSFileSystem(
|
||||
_samples.openResourceAsStream(file)
|
||||
);
|
||||
|
||||
|
@ -156,26 +159,24 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
// Check sizes
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedEx = ExpectedException.none();
|
||||
|
||||
/**
|
||||
* Check that we do the right thing when the list of which
|
||||
* sectors are BAT blocks points off the list of
|
||||
* sectors that exist in the file.
|
||||
*/
|
||||
@Test
|
||||
public void testFATandDIFATsectors() throws Exception {
|
||||
// Open the file up
|
||||
try {
|
||||
InputStream stream = _samples.openResourceAsStream("ReferencesInvalidSectors.mpp");
|
||||
try {
|
||||
new OPOIFSFileSystem(stream);
|
||||
fail("File is corrupt and shouldn't have been opened");
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
String msg = e.getMessage();
|
||||
assertTrue(msg.startsWith("Your file contains 695 sectors"));
|
||||
}
|
||||
expectedEx.expect(IndexOutOfBoundsException.class);
|
||||
expectedEx.expectMessage("Block 1148 not found");
|
||||
try (InputStream stream = _samples.openResourceAsStream("ReferencesInvalidSectors.mpp")) {
|
||||
new NPOIFSFileSystem(stream);
|
||||
fail("File is corrupt and shouldn't have been opened");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -184,9 +185,10 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
* However, because a file needs to be at least 6.875mb big
|
||||
* to have an XBAT in it, we don't have a test one. So, generate it.
|
||||
*/
|
||||
@Test
|
||||
public void testBATandXBAT() throws Exception {
|
||||
byte[] hugeStream = new byte[8*1024*1024];
|
||||
OPOIFSFileSystem fs = new OPOIFSFileSystem();
|
||||
NPOIFSFileSystem fs = new NPOIFSFileSystem();
|
||||
fs.getRoot().createDocument(
|
||||
"BIG", new ByteArrayInputStream(hugeStream)
|
||||
);
|
||||
|
@ -229,8 +231,7 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
assertEquals(fsData.length / 512, blockList.blockCount() + 1); // Header not counted
|
||||
|
||||
// Now load it and check
|
||||
fs = null;
|
||||
fs = new OPOIFSFileSystem(
|
||||
fs = new NPOIFSFileSystem(
|
||||
new ByteArrayInputStream(fsData)
|
||||
);
|
||||
|
||||
|
@ -244,41 +245,39 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
* Most OLE2 files use 512byte blocks. However, a small number
|
||||
* use 4k blocks. Check that we can open these.
|
||||
*/
|
||||
@Test
|
||||
public void test4KBlocks() throws Exception {
|
||||
POIDataSamples _samples = POIDataSamples.getPOIFSInstance();
|
||||
InputStream inp = _samples.openResourceAsStream("BlockSize4096.zvi");
|
||||
try {
|
||||
// First up, check that we can process the header properly
|
||||
HeaderBlock header_block = new HeaderBlock(inp);
|
||||
POIFSBigBlockSize bigBlockSize = header_block.getBigBlockSize();
|
||||
assertEquals(4096, bigBlockSize.getBigBlockSize());
|
||||
try (InputStream inp = _samples.openResourceAsStream("BlockSize4096.zvi")) {
|
||||
// First up, check that we can process the header properly
|
||||
HeaderBlock header_block = new HeaderBlock(inp);
|
||||
POIFSBigBlockSize bigBlockSize = header_block.getBigBlockSize();
|
||||
assertEquals(4096, bigBlockSize.getBigBlockSize());
|
||||
|
||||
// Check the fat info looks sane
|
||||
assertEquals(1, header_block.getBATArray().length);
|
||||
assertEquals(1, header_block.getBATCount());
|
||||
assertEquals(0, header_block.getXBATCount());
|
||||
// Check the fat info looks sane
|
||||
assertEquals(1, header_block.getBATArray().length);
|
||||
assertEquals(1, header_block.getBATCount());
|
||||
assertEquals(0, header_block.getXBATCount());
|
||||
|
||||
// Now check we can get the basic fat
|
||||
RawDataBlockList data_blocks = new RawDataBlockList(inp,
|
||||
bigBlockSize);
|
||||
assertEquals(15, data_blocks.blockCount());
|
||||
// Now check we can get the basic fat
|
||||
RawDataBlockList data_blocks = new RawDataBlockList(inp,
|
||||
bigBlockSize);
|
||||
assertEquals(15, data_blocks.blockCount());
|
||||
|
||||
// Now try and open properly
|
||||
OPOIFSFileSystem fs = new OPOIFSFileSystem(
|
||||
_samples.openResourceAsStream("BlockSize4096.zvi"));
|
||||
assertTrue(fs.getRoot().getEntryCount() > 3);
|
||||
// Now try and open properly
|
||||
NPOIFSFileSystem fs = new NPOIFSFileSystem(
|
||||
_samples.openResourceAsStream("BlockSize4096.zvi"));
|
||||
assertTrue(fs.getRoot().getEntryCount() > 3);
|
||||
|
||||
// Check we can get at all the contents
|
||||
checkAllDirectoryContents(fs.getRoot());
|
||||
// Check we can get at all the contents
|
||||
checkAllDirectoryContents(fs.getRoot());
|
||||
|
||||
// Finally, check we can do a similar 512byte one too
|
||||
fs = new OPOIFSFileSystem(
|
||||
_samples.openResourceAsStream("BlockSize512.zvi"));
|
||||
assertTrue(fs.getRoot().getEntryCount() > 3);
|
||||
checkAllDirectoryContents(fs.getRoot());
|
||||
} finally {
|
||||
inp.close();
|
||||
}
|
||||
// Finally, check we can do a similar 512byte one too
|
||||
fs = new NPOIFSFileSystem(
|
||||
_samples.openResourceAsStream("BlockSize512.zvi"));
|
||||
assertTrue(fs.getRoot().getEntryCount() > 3);
|
||||
checkAllDirectoryContents(fs.getRoot());
|
||||
}
|
||||
}
|
||||
private void checkAllDirectoryContents(DirectoryEntry dir) throws IOException {
|
||||
for(Entry entry : dir) {
|
||||
|
@ -293,6 +292,7 @@ public final class TestPOIFSFileSystem extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private static InputStream openSampleStream(String sampleFileName) {
|
||||
return HSSFTestDataSamples.openSampleFileStream(sampleFileName);
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.poi.poifs.storage;
|
|||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Suite;
|
||||
/**
|
||||
* Tests for org.apache.poi.poifs.storage<br>
|
||||
* Tests for org.apache.poi.poifs.storage
|
||||
*/
|
||||
@RunWith(Suite.class)
|
||||
@Suite.SuiteClasses({
|
||||
|
@ -33,11 +33,7 @@ import org.junit.runners.Suite;
|
|||
TestHeaderBlockWriting.class,
|
||||
TestPropertyBlock.class,
|
||||
TestRawDataBlock.class,
|
||||
TestRawDataBlockList.class,
|
||||
TestSmallBlockTableReader.class,
|
||||
TestSmallBlockTableWriter.class,
|
||||
TestSmallDocumentBlock.class,
|
||||
TestSmallDocumentBlockList.class
|
||||
TestRawDataBlockList.class
|
||||
})
|
||||
public class AllPOIFSStorageTests {
|
||||
}
|
||||
|
|
|
@ -17,196 +17,58 @@
|
|||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
|
||||
import junit.framework.AssertionFailedError;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSBigBlockSize;
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.util.HexRead;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
import org.apache.poi.util.LittleEndianConsts;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Class to test BlockAllocationTableReader functionality
|
||||
*
|
||||
* @author Marc Johnson
|
||||
*/
|
||||
public final class TestBlockAllocationTableReader extends TestCase {
|
||||
public class TestBlockAllocationTableReader {
|
||||
|
||||
/**
|
||||
* Test small block allocation table constructor
|
||||
*/
|
||||
@Test
|
||||
public void testSmallBATConstructor() throws IOException {
|
||||
|
||||
// need to create an array of raw blocks containing the SBAT,
|
||||
// and a small document block list
|
||||
String[] sbat_data = {
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
|
||||
"FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
|
||||
"FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
|
||||
"FE FF FF FF 22 00 00 00 FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
};
|
||||
final String sbat_data = "H4sIAAAAAAAAAPv/nzjwj4ZYiYGBAZfcKKAtAAC/sexrAAIAAA==";
|
||||
|
||||
RawDataBlock[] sbats = { new RawDataBlock(makeDataStream(sbat_data)) };
|
||||
RawDataBlock[] sbats = { new RawDataBlock(new ByteArrayInputStream(RawDataUtil.decompress(sbat_data))) };
|
||||
|
||||
String[] sbt_data = {
|
||||
"08 00 28 00 6A 61 6D 65 73 2D 55 37 37 32 37 39 32 2D 28 31 36 2D 4F 63 74 2D 32 30 30 31 40 31",
|
||||
"36 2D 34 31 2D 33 33 29 2E 5A 44 46 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"07 00 00 00 00 00 80 27 E2 40 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"07 00 00 00 00 00 80 27 E2 40 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"07 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"0B 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"03 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 02 00 20 31 08 00 05 00 53 61 76 65 64 08 00 17 00 53 2E 48 55 53 53 41 49 4E 20 41 20 44",
|
||||
"45 56 20 4F 46 46 52 20 55 4B 08 00 0B 00 31 36 2D 4F 63 74 2D 32 30 30 31 08 00 05 00 35 2E 33",
|
||||
"2E 32 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 05 00 6A 61 6D 65 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 03 00 47 42 50 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 1D 00 28 41 29 31 36 2D 4F 63 74 2D 32 30 30 31 20 74 6F 20 31 36 2D 4F 63 74 2D 32 30 30",
|
||||
"31 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 01 00 31 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 18 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00",
|
||||
"02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00",
|
||||
"02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 03 00 47 42 50 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 17 00 53 2E 48 55 53 53 41 49 4E 20 41 20 44 45 56 20 4F 46 46 52 20 55 4B 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"08 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 02 00 00 00 02 00 00 00 02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"03 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
};
|
||||
final String sbt_data =
|
||||
"H4sIAAAAAAAAAONg0GDISsxNLdYNNTc3Mrc00tUwNNP1Ty7RNTIwMHQAsk0MdY2NNfWiXNwYsAB2MNmg/sgBmyxhQB395AMm" +
|
||||
"BkaK9HNQaD83hfqZKXY/E4OCIQcDK0NwYllqCgeDOEOwnkdocLCjp5+Co4KLa5iCv5tbkEKoNwfQrUhJA6TFVM9Yz4gy94OM" +
|
||||
"Aac/svVTaj8zg7tTAAX6ZRk0HDWRAkahJF8BiUtQPyMDITX4ABMFegeDfsrjjzLAxCBBoX7KwED7n/LwG2j7KSv/Bt79A2s/" +
|
||||
"NdzPQUWaVDDQ/h/o+meop5+hrx9ng4ku9jOhYVIBM4X2j4KhDQAtwD4rAA4AAA==";
|
||||
|
||||
RawDataBlock[] sbts = new RawDataBlock[7];
|
||||
InputStream sbt_input = makeDataStream(sbt_data);
|
||||
InputStream sbt_input = new ByteArrayInputStream(RawDataUtil.decompress(sbt_data));
|
||||
|
||||
for (int j = 0; j < 7; j++) {
|
||||
sbts[j] = new RawDataBlock(sbt_input);
|
||||
BlockListImpl small_blocks = new RawDataBlockList(sbt_input, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
|
||||
int blockCount = small_blocks.blockCount();
|
||||
ListManagedBlock[] lmb = new ListManagedBlock[7*blockCount];
|
||||
for (int i=0; i<lmb.length; i++) {
|
||||
lmb[i] = small_blocks.get(i % blockCount);
|
||||
}
|
||||
SmallDocumentBlockList small_blocks = new SmallDocumentBlockList(SmallDocumentBlock
|
||||
.extract(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, sbts));
|
||||
small_blocks.setBlocks(lmb);
|
||||
|
||||
BlockAllocationTableReader sbat = new BlockAllocationTableReader(
|
||||
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, sbats, small_blocks);
|
||||
boolean[] isUsed = {
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, true, true, true, true, true, true, true, true, true, true,
|
||||
true, true, true, true, true, true, true, true, true, true, true,
|
||||
true, true, true, true, true, true, true, true, true, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false, false, false, false,
|
||||
false, false, false, false, false, false
|
||||
};
|
||||
int[] nextIndex = {
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -2, -2, -2, -2, -2, -2,
|
||||
-2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
|
||||
|
@ -219,13 +81,13 @@ public final class TestBlockAllocationTableReader extends TestCase {
|
|||
};
|
||||
|
||||
for (int j = 0; j < 128; j++) {
|
||||
if (isUsed[j]) {
|
||||
assertTrue("checking usage of block " + j, sbat.isUsed(j));
|
||||
assertEquals("checking usage of block " + j, nextIndex[j], sbat
|
||||
.getNextBlockIndex(j));
|
||||
final boolean isUsed = nextIndex[j] != -1;
|
||||
assertEquals("checking usage of block " + j, isUsed, sbat.isUsed(j));
|
||||
|
||||
if (isUsed) {
|
||||
assertEquals("checking usage of block " + j, nextIndex[j], sbat.getNextBlockIndex(j));
|
||||
small_blocks.remove(j);
|
||||
} else {
|
||||
assertTrue("checking usage of block " + j, !sbat.isUsed(j));
|
||||
try {
|
||||
small_blocks.remove(j);
|
||||
fail("removing block " + j + " should have failed");
|
||||
|
@ -236,10 +98,7 @@ public final class TestBlockAllocationTableReader extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private static InputStream makeDataStream(String[] hexDataLines) {
|
||||
return new ByteArrayInputStream(RawDataUtil.decode(hexDataLines));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadingConstructor() throws IOException {
|
||||
|
||||
// create a document, minus the header block, and use that to
|
||||
|
@ -288,6 +147,7 @@ public final class TestBlockAllocationTableReader extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchBlocks() throws IOException {
|
||||
|
||||
// strategy:
|
||||
|
@ -372,10 +232,8 @@ public final class TestBlockAllocationTableReader extends TestCase {
|
|||
assertEquals(expected_length[j], dataBlocks.length);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
if (expected_length[j] == -1) {
|
||||
|
||||
// no problem, we expected a failure here
|
||||
} else {
|
||||
if (expected_length[j] != -1) {
|
||||
// -1 would be a expected failure here, anything else not
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
@ -386,6 +244,7 @@ public final class TestBlockAllocationTableReader extends TestCase {
|
|||
* Bugzilla 48085 describes an error where a corrupted Excel file causes POI to throw an
|
||||
* {@link OutOfMemoryError}.
|
||||
*/
|
||||
@Test
|
||||
public void testBadSectorAllocationTableSize_bug48085() {
|
||||
int BLOCK_SIZE = 512;
|
||||
POIFSBigBlockSize bigBlockSize = POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;
|
||||
|
|
|
@ -1,317 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.poifs.property.PropertyTable;
|
||||
import org.apache.poi.poifs.property.RootProperty;
|
||||
|
||||
/**
|
||||
* Class to test SmallBlockTableReader functionality
|
||||
*
|
||||
* @author Marc Johnson
|
||||
*/
|
||||
public final class TestSmallBlockTableReader extends TestCase {
|
||||
|
||||
public void testReadingConstructor() throws IOException {
|
||||
|
||||
// first, we need the raw data blocks
|
||||
String[] raw_data_array = {
|
||||
"52 00 6F 00 6F 00 74 00 20 00 45 00 6E 00 74 00 72 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"16 00 05 01 FF FF FF FF FF FF FF FF 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0A 00 00 00 80 07 00 00 00 00 00 00",
|
||||
"44 00 65 00 61 00 6C 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"22 00 01 01 FF FF FF FF FF FF FF FF 15 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"46 00 55 00 44 00 20 00 47 00 72 00 69 00 64 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00",
|
||||
"74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"2A 00 02 01 FF FF FF FF 0E 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"44 00 6F 00 75 00 62 00 6C 00 65 00 20 00 44 00 65 00 61 00 6C 00 69 00 6E 00 67 00 20 00 49 00",
|
||||
"6E 00 64 00 69 00 63 00 61 00 74 00 6F 00 72 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"32 00 02 01 FF FF FF FF 09 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"43 00 68 00 69 00 6C 00 64 00 20 00 50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00",
|
||||
"20 00 50 00 65 00 72 00 6D 00 69 00 74 00 74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"36 00 02 01 FF FF FF FF 07 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 01 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
|
||||
"20 00 46 00 69 00 78 00 65 00 64 00 20 00 56 00 61 00 6C 00 75 00 65 00 00 00 00 00 00 00 00 00",
|
||||
"3A 00 02 01 FF FF FF FF 06 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 02 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"55 00 6D 00 62 00 72 00 65 00 6C 00 6C 00 61 00 20 00 4C 00 69 00 6E 00 6B 00 73 00 20 00 61 00",
|
||||
"6E 00 64 00 20 00 50 00 61 00 73 00 73 00 65 00 6E 00 67 00 65 00 72 00 73 00 00 00 00 00 00 00",
|
||||
"3C 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
|
||||
"20 00 50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"38 00 02 01 FF FF FF FF 05 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 03 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"49 00 6E 00 66 00 61 00 6E 00 74 00 20 00 44 00 69 00 73 00 63 00 6F 00 75 00 6E 00 74 00 20 00",
|
||||
"50 00 65 00 72 00 6D 00 69 00 74 00 74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"34 00 02 01 FF FF FF FF 04 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 04 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
|
||||
"20 00 43 00 75 00 72 00 72 00 65 00 6E 00 63 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"34 00 02 01 FF FF FF FF 08 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 05 00 00 00 07 00 00 00 00 00 00 00",
|
||||
"4F 00 75 00 74 00 62 00 6F 00 75 00 6E 00 64 00 20 00 54 00 72 00 61 00 76 00 65 00 6C 00 20 00",
|
||||
"44 00 61 00 74 00 65 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"2C 00 02 01 FF FF FF FF 0B 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 06 00 00 00 21 00 00 00 00 00 00 00",
|
||||
"42 00 75 00 73 00 69 00 6E 00 65 00 73 00 73 00 20 00 4A 00 75 00 73 00 74 00 69 00 66 00 69 00",
|
||||
"63 00 61 00 74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"2E 00 02 01 FF FF FF FF 03 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 07 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"49 00 6E 00 66 00 61 00 6E 00 74 00 20 00 44 00 69 00 73 00 63 00 6F 00 75 00 6E 00 74 00 20 00",
|
||||
"56 00 61 00 6C 00 75 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"2C 00 02 01 FF FF FF FF 0D 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 08 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"4F 00 74 00 68 00 65 00 72 00 20 00 43 00 61 00 72 00 72 00 69 00 65 00 72 00 20 00 53 00 65 00",
|
||||
"63 00 74 00 6F 00 72 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"2C 00 02 01 FF FF FF FF 0A 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 09 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"4E 00 75 00 6D 00 62 00 65 00 72 00 20 00 6F 00 66 00 20 00 50 00 61 00 73 00 73 00 65 00 6E 00",
|
||||
"67 00 65 00 72 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"2A 00 02 01 FF FF FF FF 0C 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0A 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"53 00 61 00 6C 00 65 00 73 00 20 00 41 00 72 00 65 00 61 00 20 00 43 00 6F 00 64 00 65 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"20 00 02 01 1C 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0B 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"4F 00 74 00 68 00 65 00 72 00 20 00 52 00 65 00 66 00 75 00 6E 00 64 00 20 00 54 00 65 00 78 00",
|
||||
"74 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"24 00 02 01 17 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0C 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"4D 00 61 00 78 00 69 00 6D 00 75 00 6D 00 20 00 53 00 74 00 61 00 79 00 20 00 50 00 65 00 72 00",
|
||||
"69 00 6F 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"28 00 02 01 FF FF FF FF 14 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0D 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"4E 00 65 00 74 00 20 00 52 00 65 00 6D 00 69 00 74 00 20 00 50 00 65 00 72 00 6D 00 69 00 74 00",
|
||||
"74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"28 00 02 01 FF FF FF FF 13 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0E 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00 20 00 6F 00 66 00 20 00 59 00 69 00",
|
||||
"65 00 6C 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"28 00 02 01 FF FF FF FF 02 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0F 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"4E 00 61 00 74 00 75 00 72 00 65 00 20 00 6F 00 66 00 20 00 56 00 61 00 72 00 69 00 61 00 74 00",
|
||||
"69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"28 00 02 01 FF FF FF FF 12 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 10 00 00 00 50 00 00 00 00 00 00 00",
|
||||
"46 00 55 00 44 00 20 00 47 00 72 00 69 00 64 00 20 00 44 00 69 00 6D 00 65 00 6E 00 73 00 69 00",
|
||||
"6F 00 6E 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"28 00 02 01 10 00 00 00 11 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 12 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"44 00 65 00 61 00 6C 00 20 00 44 00 65 00 73 00 63 00 72 00 69 00 70 00 74 00 69 00 6F 00 6E 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"22 00 02 01 19 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 13 00 00 00 09 00 00 00 00 00 00 00",
|
||||
"54 00 52 00 56 00 41 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"22 00 02 01 18 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 14 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"50 00 72 00 6F 00 72 00 61 00 74 00 65 00 20 00 43 00 6F 00 6D 00 6D 00 65 00 6E 00 74 00 73 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"22 00 02 01 16 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"43 00 6F 00 6D 00 6D 00 69 00 73 00 73 00 69 00 6F 00 6E 00 20 00 56 00 61 00 6C 00 75 00 65 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"22 00 02 01 0F 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 15 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"4D 00 61 00 78 00 69 00 6D 00 75 00 6D 00 20 00 53 00 74 00 61 00 79 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"1A 00 02 01 20 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 16 00 00 00 05 00 00 00 00 00 00 00",
|
||||
"44 00 65 00 61 00 6C 00 20 00 43 00 75 00 72 00 72 00 65 00 6E 00 63 00 79 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"1C 00 02 01 1D 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 17 00 00 00 07 00 00 00 00 00 00 00",
|
||||
"43 00 6F 00 6E 00 73 00 6F 00 72 00 74 00 69 00 61 00 20 00 43 00 6F 00 64 00 65 00 73 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"20 00 02 01 1B 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"42 00 75 00 73 00 69 00 6E 00 65 00 73 00 73 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"1C 00 02 01 1A 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 18 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"44 00 65 00 61 00 6C 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"14 00 02 01 23 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 19 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"53 00 75 00 72 00 63 00 68 00 61 00 72 00 67 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"14 00 02 01 21 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1A 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"41 00 67 00 65 00 6E 00 74 00 73 00 20 00 4E 00 61 00 6D 00 65 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"18 00 02 01 1F 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1B 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"46 00 61 00 72 00 65 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"14 00 02 01 1E 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1C 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"53 00 75 00 62 00 20 00 44 00 65 00 61 00 6C 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"12 00 02 01 24 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1D 00 00 00 04 00 00 00 00 00 00 00",
|
||||
"41 00 4C 00 43 00 20 00 43 00 6F 00 64 00 65 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"14 00 02 01 22 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"52 00 65 00 6D 00 61 00 72 00 6B 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"10 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 03 00 47 42 50 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 1D 00 28 41 29 31 36 2D 4F 63 74 2D 32 30 30 31 20 74 6F 20 31 36 2D 4F 63 74 2D 32 30 30",
|
||||
"31 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 01 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00",
|
||||
"02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00",
|
||||
"02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 18 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 05 00 6A 61 6D 65 73 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 01 00 31 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 03 00 47 42 50 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
|
||||
"FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
|
||||
"11 00 00 00 FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
|
||||
"FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"01 00 00 00 02 00 00 00 03 00 00 00 04 00 00 00 05 00 00 00 06 00 00 00 07 00 00 00 08 00 00 00",
|
||||
"09 00 00 00 FE FF FF FF 0B 00 00 00 0C 00 00 00 0D 00 00 00 FE FF FF FF FE FF FF FF FE FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
"FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
|
||||
};
|
||||
|
||||
RawDataBlockList data_blocks = new RawDataBlockList(new ByteArrayInputStream(RawDataUtil
|
||||
.decode(raw_data_array)), POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
|
||||
int[] bat_array = { 15 };
|
||||
|
||||
// need to initialize the block list with a block allocation
|
||||
// table
|
||||
new BlockAllocationTableReader(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, 1, bat_array, 0, -2, data_blocks);
|
||||
|
||||
// Fake up a header
|
||||
HeaderBlock header_block = new HeaderBlock(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
|
||||
header_block.setPropertyStart(0);
|
||||
|
||||
// get property table from the document
|
||||
PropertyTable properties = new PropertyTable(header_block, data_blocks);
|
||||
RootProperty root = properties.getRoot();
|
||||
BlockList bl = SmallBlockTableReader.getSmallDocumentBlocks(
|
||||
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, data_blocks, root, 14);
|
||||
assertNotNull(bl);
|
||||
}
|
||||
}
|
|
@ -1,96 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
import org.apache.poi.poifs.filesystem.OPOIFSDocument;
|
||||
import org.apache.poi.poifs.property.PropertyTable;
|
||||
import org.apache.poi.poifs.property.RootProperty;
|
||||
|
||||
/**
|
||||
* Class to test SmallBlockTableWriter functionality
|
||||
*
|
||||
* @author Marc Johnson
|
||||
*/
|
||||
public final class TestSmallBlockTableWriter extends TestCase {
|
||||
|
||||
public void testWritingConstructor() throws IOException {
|
||||
List<OPOIFSDocument> documents = new ArrayList<>();
|
||||
|
||||
documents.add(
|
||||
new OPOIFSDocument(
|
||||
"doc340", new ByteArrayInputStream(new byte[ 340 ])));
|
||||
documents.add(
|
||||
new OPOIFSDocument(
|
||||
"doc5000", new ByteArrayInputStream(new byte[ 5000 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc0",
|
||||
new ByteArrayInputStream(new byte[ 0 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc1",
|
||||
new ByteArrayInputStream(new byte[ 1 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc2",
|
||||
new ByteArrayInputStream(new byte[ 2 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc3",
|
||||
new ByteArrayInputStream(new byte[ 3 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc4",
|
||||
new ByteArrayInputStream(new byte[ 4 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc5",
|
||||
new ByteArrayInputStream(new byte[ 5 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc6",
|
||||
new ByteArrayInputStream(new byte[ 6 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc7",
|
||||
new ByteArrayInputStream(new byte[ 7 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc8",
|
||||
new ByteArrayInputStream(new byte[ 8 ])));
|
||||
documents
|
||||
.add(new OPOIFSDocument("doc9",
|
||||
new ByteArrayInputStream(new byte[ 9 ])));
|
||||
|
||||
HeaderBlock header = new HeaderBlock(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
|
||||
RootProperty root = new PropertyTable(header).getRoot();
|
||||
SmallBlockTableWriter sbtw = new SmallBlockTableWriter(
|
||||
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, documents,root);
|
||||
BlockAllocationTableWriter bat = sbtw.getSBAT();
|
||||
|
||||
// 15 small blocks: 6 for doc340, 0 for doc5000 (too big), 0
|
||||
// for doc0 (no storage needed), 1 each for doc1 through doc9
|
||||
assertEquals(15 * 64, root.getSize());
|
||||
|
||||
// 15 small blocks rounds up to 2 big blocks
|
||||
assertEquals(2, sbtw.countBlocks());
|
||||
int start_block = 1000 + root.getStartBlock();
|
||||
|
||||
sbtw.setStartBlock(start_block);
|
||||
assertEquals(start_block, root.getStartBlock());
|
||||
}
|
||||
}
|
|
@ -1,223 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
|
||||
/**
|
||||
* Class to test SmallDocumentBlock functionality
|
||||
*
|
||||
* @author Marc Johnson
|
||||
*/
|
||||
public final class TestSmallDocumentBlock extends TestCase {
|
||||
static final private byte[] _testdata;
|
||||
static final private int _testdata_size = 2999;
|
||||
|
||||
static
|
||||
{
|
||||
_testdata = new byte[ _testdata_size ];
|
||||
for (int j = 0; j < _testdata.length; j++)
|
||||
{
|
||||
_testdata[ j ] = ( byte ) j;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test conversion from DocumentBlocks
|
||||
*/
|
||||
public void testConvert1()
|
||||
throws IOException
|
||||
{
|
||||
ByteArrayInputStream stream = new ByteArrayInputStream(_testdata);
|
||||
List<DocumentBlock> documents = new ArrayList<>();
|
||||
|
||||
while (true)
|
||||
{
|
||||
DocumentBlock block = new DocumentBlock(stream,POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
|
||||
|
||||
documents.add(block);
|
||||
if (block.partiallyRead())
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
SmallDocumentBlock[] results =
|
||||
SmallDocumentBlock.convert(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,
|
||||
documents.toArray(new DocumentBlock[ 0 ]), _testdata_size);
|
||||
|
||||
assertEquals("checking correct result size: ",
|
||||
(_testdata_size + 63) / 64, results.length);
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
|
||||
for (SmallDocumentBlock result : results) {
|
||||
result.writeBlocks(output);
|
||||
}
|
||||
byte[] output_array = output.toByteArray();
|
||||
|
||||
assertEquals("checking correct output size: ", 64 * results.length,
|
||||
output_array.length);
|
||||
int index = 0;
|
||||
|
||||
for (; index < _testdata_size; index++)
|
||||
{
|
||||
assertEquals("checking output " + index, _testdata[ index ],
|
||||
output_array[ index ]);
|
||||
}
|
||||
for (; index < output_array.length; index++)
|
||||
{
|
||||
assertEquals("checking output " + index, ( byte ) 0xff,
|
||||
output_array[ index ]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test conversion from byte array
|
||||
*/
|
||||
public void testConvert2()
|
||||
throws IOException
|
||||
{
|
||||
for (int j = 0; j < 320; j++)
|
||||
{
|
||||
byte[] array = new byte[ j ];
|
||||
|
||||
for (int k = 0; k < j; k++)
|
||||
{
|
||||
array[ k ] = ( byte ) k;
|
||||
}
|
||||
SmallDocumentBlock[] blocks = SmallDocumentBlock.convert(
|
||||
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, array, 319);
|
||||
|
||||
assertEquals(5, blocks.length);
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
||||
for (SmallDocumentBlock block : blocks) {
|
||||
block.writeBlocks(stream);
|
||||
}
|
||||
stream.close();
|
||||
byte[] output = stream.toByteArray();
|
||||
|
||||
for (int k = 0; k < array.length; k++)
|
||||
{
|
||||
assertEquals(String.valueOf(k), array[ k ], output[ k ]);
|
||||
}
|
||||
for (int k = array.length; k < 320; k++)
|
||||
{
|
||||
assertEquals(String.valueOf(k), ( byte ) 0xFF, output[ k ]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test fill
|
||||
*/
|
||||
public void testFill()
|
||||
throws IOException
|
||||
{
|
||||
for (int j = 0; j <= 8; j++)
|
||||
{
|
||||
List<SmallDocumentBlock> blocks = new ArrayList<>();
|
||||
|
||||
for (int k = 0; k < j; k++)
|
||||
{
|
||||
blocks.add(new SmallDocumentBlock(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS));
|
||||
}
|
||||
int result = SmallDocumentBlock.fill(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, blocks);
|
||||
|
||||
assertEquals("correct big block count: ", (j + 7) / 8, result);
|
||||
assertEquals("correct small block count: ", 8 * result,
|
||||
blocks.size());
|
||||
for (int m = j; m < blocks.size(); m++)
|
||||
{
|
||||
BlockWritable block = blocks.get(m);
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
||||
block.writeBlocks(stream);
|
||||
byte[] output = stream.toByteArray();
|
||||
|
||||
assertEquals("correct output size (block[ " + m + " ]): ",
|
||||
64, output.length);
|
||||
for (int n = 0; n < 64; n++)
|
||||
{
|
||||
assertEquals("correct value (block[ " + m + " ][ " + n
|
||||
+ " ]): ", ( byte ) 0xff, output[ n ]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test calcSize
|
||||
*/
|
||||
|
||||
public void testCalcSize()
|
||||
{
|
||||
for (int j = 0; j < 10; j++)
|
||||
{
|
||||
assertEquals("testing " + j, j * 64,
|
||||
SmallDocumentBlock.calcSize(j));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test extract method
|
||||
*
|
||||
* @exception IOException
|
||||
*/
|
||||
|
||||
public void testExtract()
|
||||
throws IOException
|
||||
{
|
||||
byte[] data = new byte[ 512 ];
|
||||
int offset = 0;
|
||||
|
||||
for (int j = 0; j < 8; j++)
|
||||
{
|
||||
for (int k = 0; k < 64; k++)
|
||||
{
|
||||
data[ offset++ ] = ( byte ) (k + j);
|
||||
}
|
||||
}
|
||||
RawDataBlock[] blocks =
|
||||
{
|
||||
new RawDataBlock(new ByteArrayInputStream(data))
|
||||
};
|
||||
List<SmallDocumentBlock> output = SmallDocumentBlock.extract(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,blocks);
|
||||
|
||||
offset = 0;
|
||||
for (SmallDocumentBlock block : output)
|
||||
{
|
||||
byte[] out_data = block.getData();
|
||||
|
||||
assertEquals("testing block at offset " + offset, 64,
|
||||
out_data.length);
|
||||
for (byte b : out_data) {
|
||||
assertEquals("testing byte at offset " + offset,
|
||||
data[ offset ], b);
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.poifs.storage;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.poifs.common.POIFSConstants;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Class to test SmallDocumentBlockList functionality
|
||||
*
|
||||
* @author Marc Johnson
|
||||
*/
|
||||
public final class TestSmallDocumentBlockList extends TestCase {
|
||||
|
||||
public void testConstructor() throws IOException {
|
||||
byte[] data = new byte[ 2560 ];
|
||||
|
||||
for (int j = 0; j < 2560; j++)
|
||||
{
|
||||
data[ j ] = ( byte ) j;
|
||||
}
|
||||
ByteArrayInputStream stream = new ByteArrayInputStream(data);
|
||||
RawDataBlock[] blocks = new RawDataBlock[ 5 ];
|
||||
|
||||
for (int j = 0; j < 5; j++)
|
||||
{
|
||||
blocks[ j ] = new RawDataBlock(stream);
|
||||
}
|
||||
SmallDocumentBlockList sdbl =
|
||||
new SmallDocumentBlockList(SmallDocumentBlock.extract(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,blocks));
|
||||
|
||||
// proof we added the blocks
|
||||
for (int j = 0; j < 40; j++)
|
||||
{
|
||||
sdbl.remove(j);
|
||||
}
|
||||
try
|
||||
{
|
||||
sdbl.remove(41);
|
||||
fail("there should have been an Earth-shattering ka-boom!");
|
||||
}
|
||||
catch (IOException ignored)
|
||||
{
|
||||
|
||||
// it better have thrown one!!
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue