Move the kahadb disk IO classes under org.apache.activemq.store.kahadb. Drop redundant util classes.

git-svn-id: https://svn.apache.org/repos/asf/activemq/trunk@1406369 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Hiram R. Chirino 2012-11-06 22:04:26 +00:00
parent 715010a1ea
commit 1aab71b2a7
94 changed files with 319 additions and 1473 deletions

View File

@ -1933,7 +1933,7 @@ public class BrokerService implements Service {
if (usage.getTempUsage().getStore() != null) {
maxJournalFileSize = usage.getTempUsage().getStore().getJournalMaxFileLength();
} else {
maxJournalFileSize = org.apache.kahadb.journal.Journal.DEFAULT_MAX_FILE_LENGTH;
maxJournalFileSize = org.apache.activemq.store.kahadb.disk.journal.Journal.DEFAULT_MAX_FILE_LENGTH;
}
if (storeLimit < maxJournalFileSize) {

View File

@ -39,7 +39,7 @@ import org.apache.activemq.usage.UsageListener;
import org.apache.activemq.wireformat.WireFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
/**
* persist pending messages pending message (messages awaiting dispatch to a

View File

@ -19,7 +19,7 @@ package org.apache.activemq.broker.scheduler;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
public class JobImpl implements Job {

View File

@ -16,7 +16,7 @@
*/
package org.apache.activemq.broker.scheduler;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
public interface JobListener {

View File

@ -23,8 +23,8 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
class JobLocation {

View File

@ -17,7 +17,7 @@
package org.apache.activemq.broker.scheduler;
import java.util.List;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
public interface JobScheduler {

View File

@ -18,7 +18,7 @@ package org.apache.activemq.broker.scheduler;
import java.util.Collections;
import java.util.List;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
public class JobSchedulerFacade implements JobScheduler {

View File

@ -33,12 +33,12 @@ import org.apache.activemq.util.ServiceStopper;
import org.apache.activemq.util.ServiceSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kahadb.index.BTreeIndex;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.LongMarshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.index.BTreeIndex;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.store.kahadb.disk.util.LongMarshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
class JobSchedulerImpl extends ServiceSupport implements Runnable, JobScheduler {
private static final Logger LOG = LoggerFactory.getLogger(JobSchedulerImpl.class);

View File

@ -19,17 +19,17 @@ package org.apache.activemq.broker.scheduler;
import org.apache.activemq.util.IOHelper;
import org.apache.activemq.util.ServiceStopper;
import org.apache.activemq.util.ServiceSupport;
import org.apache.kahadb.index.BTreeIndex;
import org.apache.kahadb.journal.Journal;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.IntegerMarshaller;
import org.apache.kahadb.util.LockFile;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.index.BTreeIndex;
import org.apache.activemq.store.kahadb.disk.journal.Journal;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.store.kahadb.disk.util.IntegerMarshaller;
import org.apache.activemq.util.LockFile;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -39,7 +39,7 @@ import org.apache.activemq.util.TypeConversionSupport;
import org.apache.activemq.wireformat.WireFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
public class SchedulerBroker extends BrokerFilter implements JobListener {
private static final Logger LOG = LoggerFactory.getLogger(SchedulerBroker.class);

View File

@ -19,7 +19,7 @@ package org.apache.activemq.store;
import org.apache.activemq.broker.AbstractLocker;
import org.apache.activemq.store.kahadb.MessageDatabase;
import org.apache.activemq.util.ServiceStopper;
import org.apache.kahadb.util.LockFile;
import org.apache.activemq.util.LockFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -68,8 +68,8 @@ import org.apache.activemq.util.ThreadPoolUtils;
import org.apache.activemq.wireformat.WireFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.page.Transaction;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
public class KahaDBStore extends MessageDatabase implements PersistenceAdapter {
static final Logger LOG = LoggerFactory.getLogger(KahaDBStore.class);

View File

@ -72,26 +72,26 @@ import org.apache.activemq.util.Callback;
import org.apache.activemq.util.IOHelper;
import org.apache.activemq.util.ServiceStopper;
import org.apache.activemq.util.ServiceSupport;
import org.apache.kahadb.index.BTreeIndex;
import org.apache.kahadb.index.BTreeVisitor;
import org.apache.kahadb.index.ListIndex;
import org.apache.kahadb.journal.DataFile;
import org.apache.kahadb.journal.Journal;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.DataByteArrayInputStream;
import org.apache.kahadb.util.DataByteArrayOutputStream;
import org.apache.kahadb.util.LocationMarshaller;
import org.apache.kahadb.util.LockFile;
import org.apache.kahadb.util.LongMarshaller;
import org.apache.kahadb.util.Marshaller;
import org.apache.kahadb.util.Sequence;
import org.apache.kahadb.util.SequenceSet;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.index.BTreeIndex;
import org.apache.activemq.store.kahadb.disk.index.BTreeVisitor;
import org.apache.activemq.store.kahadb.disk.index.ListIndex;
import org.apache.activemq.store.kahadb.disk.journal.DataFile;
import org.apache.activemq.store.kahadb.disk.journal.Journal;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.util.DataByteArrayInputStream;
import org.apache.activemq.util.DataByteArrayOutputStream;
import org.apache.activemq.store.kahadb.disk.util.LocationMarshaller;
import org.apache.activemq.util.LockFile;
import org.apache.activemq.store.kahadb.disk.util.LongMarshaller;
import org.apache.activemq.store.kahadb.disk.util.Marshaller;
import org.apache.activemq.store.kahadb.disk.util.Sequence;
import org.apache.activemq.store.kahadb.disk.util.SequenceSet;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -1740,7 +1740,7 @@ public abstract class MessageDatabase extends ServiceSupport implements BrokerSe
// Figure out the next key using the last entry in the destination.
rc.orderIndex.configureLast(tx);
rc.locationIndex.setKeyMarshaller(org.apache.kahadb.util.LocationMarshaller.INSTANCE);
rc.locationIndex.setKeyMarshaller(org.apache.activemq.store.kahadb.disk.util.LocationMarshaller.INSTANCE);
rc.locationIndex.setValueMarshaller(LongMarshaller.INSTANCE);
rc.locationIndex.load(tx);

View File

@ -44,10 +44,10 @@ import org.apache.activemq.store.kahadb.data.KahaEntryType;
import org.apache.activemq.store.kahadb.data.KahaPrepareCommand;
import org.apache.activemq.store.kahadb.data.KahaTraceCommand;
import org.apache.activemq.util.IOHelper;
import org.apache.kahadb.journal.Journal;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.util.DataByteArrayInputStream;
import org.apache.kahadb.util.DataByteArrayOutputStream;
import org.apache.activemq.store.kahadb.disk.journal.Journal;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.util.DataByteArrayInputStream;
import org.apache.activemq.util.DataByteArrayOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -57,8 +57,8 @@ import org.apache.activemq.usage.MemoryUsage;
import org.apache.activemq.usage.SystemUsage;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.wireformat.WireFormat;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.page.Transaction;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
public class TempKahaDBStore extends TempMessageDatabase implements PersistenceAdapter {

View File

@ -41,13 +41,12 @@ import org.apache.activemq.store.kahadb.data.KahaSubscriptionCommand;
import org.apache.activemq.util.ByteSequence;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kahadb.index.BTreeIndex;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.LongMarshaller;
import org.apache.kahadb.util.Marshaller;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.index.BTreeIndex;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.LongMarshaller;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
public class TempMessageDatabase {

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.IOException;
import java.io.OutputStream;
@ -25,10 +25,10 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.Marshaller;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.Marshaller;
/**
* BTreeIndex represents a Variable Magnitude B+Tree in a Page File.

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.DataInput;
import java.io.DataOutput;
@ -26,10 +26,10 @@ import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Map.Entry;
import org.apache.kahadb.index.BTreeIndex.Prefixer;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.index.BTreeIndex.Prefixer;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
/**

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.util.List;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.DataInput;
import java.io.DataOutput;
@ -22,9 +22,9 @@ import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
/**
* Bin in a HashIndex

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.DataInput;
import java.io.DataOutput;
@ -26,11 +26,11 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.Marshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.Marshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
/**
* BTree implementation

View File

@ -14,14 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.Marshaller;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.Marshaller;
/**
* Simpler than a Map

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.IOException;
import java.lang.ref.WeakReference;
@ -23,11 +23,11 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.kahadb.index.ListNode.ListIterator;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.Marshaller;
import org.apache.activemq.store.kahadb.disk.index.ListNode.ListIterator;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.Marshaller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.DataInput;
import java.io.DataOutput;
@ -23,12 +23,12 @@ import java.util.Iterator;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.LinkedNode;
import org.apache.kahadb.util.LinkedNodeList;
import org.apache.kahadb.util.Marshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.LinkedNode;
import org.apache.activemq.store.kahadb.disk.util.LinkedNodeList;
import org.apache.activemq.store.kahadb.disk.util.Marshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
/**
* The ListNode class represents a node in the List object graph. It is stored

View File

@ -14,14 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.zip.Adler32;
import java.util.zip.Checksum;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.DataByteArrayOutputStream;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.store.kahadb.disk.util.DataByteArrayOutputStream;
/**
* An optimized writer to do batch appends to a data file. This object is thread

View File

@ -14,15 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import org.apache.kahadb.util.IOHelper;
import org.apache.kahadb.util.LinkedNode;
import org.apache.kahadb.util.SequenceSet;
import org.apache.activemq.util.IOHelper;
import org.apache.activemq.store.kahadb.disk.util.LinkedNode;
import org.apache.activemq.store.kahadb.disk.util.SequenceSet;
/**
* DataFile

View File

@ -14,13 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.Map;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
/**
* Optimized Store reader and updater. Single threaded and synchronous. Use in

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.IOException;
import java.util.ArrayList;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.IOException;
import java.io.InterruptedIOException;
@ -25,9 +25,9 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.zip.Adler32;
import java.util.zip.Checksum;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.DataByteArrayOutputStream;
import org.apache.kahadb.util.LinkedNodeList;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.store.kahadb.disk.util.DataByteArrayOutputStream;
import org.apache.activemq.store.kahadb.disk.util.LinkedNodeList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -14,9 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
import java.io.IOException;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.File;
import java.io.FilenameFilter;
@ -36,15 +36,15 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.zip.Adler32;
import java.util.zip.Checksum;
import org.apache.kahadb.util.LinkedNode;
import org.apache.activemq.store.kahadb.disk.util.LinkedNode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.DataByteArrayInputStream;
import org.apache.kahadb.util.DataByteArrayOutputStream;
import org.apache.kahadb.util.LinkedNodeList;
import org.apache.kahadb.util.SchedulerTimerTask;
import org.apache.kahadb.util.Sequence;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.util.DataByteArrayInputStream;
import org.apache.activemq.util.DataByteArrayOutputStream;
import org.apache.activemq.store.kahadb.disk.util.LinkedNodeList;
import org.apache.activemq.store.kahadb.disk.util.SchedulerTimerTask;
import org.apache.activemq.store.kahadb.disk.util.Sequence;
/**
* Manages DataFiles

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.DataInput;
import java.io.DataOutput;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.File;
import java.io.IOException;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.File;
import java.io.FilenameFilter;

View File

@ -14,9 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
public interface ReplicationTarget {

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.page;
package org.apache.activemq.store.kahadb.disk.page;
import java.io.DataInput;
import java.io.DataOutput;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.page;
package org.apache.activemq.store.kahadb.disk.page;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -42,14 +42,14 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.zip.Adler32;
import java.util.zip.Checksum;
import org.apache.kahadb.util.DataByteArrayOutputStream;
import org.apache.kahadb.util.IOExceptionSupport;
import org.apache.kahadb.util.IOHelper;
import org.apache.kahadb.util.IntrospectionSupport;
import org.apache.kahadb.util.LFUCache;
import org.apache.kahadb.util.LRUCache;
import org.apache.kahadb.util.Sequence;
import org.apache.kahadb.util.SequenceSet;
import org.apache.activemq.util.DataByteArrayOutputStream;
import org.apache.activemq.util.IOExceptionSupport;
import org.apache.activemq.util.IOHelper;
import org.apache.activemq.util.IntrospectionSupport;
import org.apache.activemq.util.LFUCache;
import org.apache.activemq.util.LRUCache;
import org.apache.activemq.store.kahadb.disk.util.Sequence;
import org.apache.activemq.store.kahadb.disk.util.SequenceSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -14,10 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.page;
package org.apache.activemq.store.kahadb.disk.page;
import org.apache.kahadb.page.PageFile.PageWrite;
import org.apache.kahadb.util.*;
import org.apache.activemq.store.kahadb.disk.page.PageFile.PageWrite;
import org.apache.activemq.store.kahadb.disk.util.*;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.store.kahadb.disk.util.DataByteArrayInputStream;
import org.apache.activemq.store.kahadb.disk.util.DataByteArrayOutputStream;
import org.apache.activemq.util.IOHelper;
import java.io.*;
import java.util.Iterator;
@ -432,8 +436,8 @@ public class Transaction implements Iterable<Page> {
}
/**
* @see org.apache.kahadb.page.Transaction#load(org.apache.kahadb.page.Page,
* org.apache.kahadb.util.Marshaller)
* @see org.apache.activemq.store.kahadb.disk.page.Transaction#load(org.apache.activemq.store.kahadb.disk.page.Page,
* org.apache.activemq.store.kahadb.disk.util.Marshaller)
*/
public InputStream openInputStream(final Page p) throws IOException {
@ -702,7 +706,7 @@ public class Transaction implements Iterable<Page> {
protected File getTempFile() {
if (txFile == null) {
txFile = new File(getPageFile().getDirectory(), IOHelper.toFileSystemSafeName("tx-"+ Long.toString(getWriteTransactionId()) + "-" + Long.toString(System.currentTimeMillis()) + ".tmp"));
txFile = new File(getPageFile().getDirectory(), IOHelper.toFileSystemSafeName("tx-" + Long.toString(getWriteTransactionId()) + "-" + Long.toString(System.currentTimeMillis()) + ".tmp"));
}
return txFile;
}

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.DataInput;
import java.io.DataOutput;

View File

@ -15,7 +15,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import org.apache.activemq.util.IntrospectionSupport;
import java.util.ArrayList;
@ -72,7 +74,7 @@ public class CommandLineSupport {
}
String propName = convertOptionToPropertyName(name);
if( !IntrospectionSupport.setProperty(target, propName, value) ) {
if( !IntrospectionSupport.setProperty(target, propName, value) ) {
rc.add(args[i]);
continue;
}

View File

@ -14,7 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import org.apache.activemq.util.ByteSequence;
import java.io.DataInput;
import java.io.IOException;

View File

@ -14,14 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.util.ByteSequence;
import java.io.DataOutput;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UTFDataFormatException;
import org.apache.kahadb.page.PageFile;
/**
* Optimized ByteArrayOutputStream

View File

@ -14,10 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.DataInput;
import java.io.DataOutput;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
/**
* Provides a base class for you to extend when you want object to maintain a

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.util.ArrayList;

View File

@ -14,12 +14,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.kahadb.journal.Location;
import org.apache.activemq.store.kahadb.disk.journal.Location;
public class LocationMarshaller implements Marshaller<Location> {
public final static LocationMarshaller INSTANCE = new LocationMarshaller();

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.DataInput;
import java.io.DataOutput;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.DataInput;
import java.io.DataOutput;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.util.HashMap;
import java.util.Timer;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.util.TimerTask;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
/**
* Represents a range of numbers.

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.DataInput;
import java.io.DataOutput;
@ -33,7 +33,7 @@ import java.util.NoSuchElementException;
*/
public class SequenceSet extends LinkedNodeList<Sequence> implements Iterable<Long> {
public static class Marshaller implements org.apache.kahadb.util.Marshaller<SequenceSet> {
public static class Marshaller implements org.apache.activemq.store.kahadb.disk.util.Marshaller<SequenceSet> {
public static final Marshaller INSTANCE = new Marshaller();

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import java.io.DataInput;
import java.io.DataOutput;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
/**
* Convenience base class for Marshaller implementations which do not deepCopy and

View File

@ -26,12 +26,12 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.kahadb.index.ListIndex;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.LocationMarshaller;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.index.ListIndex;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.store.kahadb.disk.util.LocationMarshaller;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -16,7 +16,7 @@
*/
package org.apache.activemq.store.kahadb.plist;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
public class PListEntry {

View File

@ -34,16 +34,16 @@ import org.apache.activemq.thread.Scheduler;
import org.apache.activemq.util.IOHelper;
import org.apache.activemq.util.ServiceStopper;
import org.apache.activemq.util.ServiceSupport;
import org.apache.kahadb.index.BTreeIndex;
import org.apache.kahadb.journal.Journal;
import org.apache.kahadb.journal.Location;
import org.apache.kahadb.page.Page;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.LockFile;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.index.BTreeIndex;
import org.apache.activemq.store.kahadb.disk.journal.Journal;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.util.LockFile;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -71,4 +71,35 @@ public class ByteSequence {
}
}
public int indexOf(ByteSequence needle, int pos) {
int max = length - needle.length;
for (int i = pos; i < max; i++) {
if (matches(needle, i)) {
return i;
}
}
return -1;
}
private boolean matches(ByteSequence needle, int pos) {
for (int i = 0; i < needle.length; i++) {
if( data[offset + pos+ i] != needle.data[needle.offset + i] ) {
return false;
}
}
return true;
}
private byte getByte(int i) {
return data[offset+i];
}
final public int indexOf(byte value, int pos) {
for (int i = pos; i < length; i++) {
if (data[offset + i] == value) {
return i;
}
}
return -1;
}
}

View File

@ -16,16 +16,10 @@
*/
package org.apache.activemq.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
/**
*
@ -107,7 +101,44 @@ public final class IOHelper {
}
return result;
}
public static boolean delete(File top) {
boolean result = true;
Stack<File> files = new Stack<File>();
// Add file to the stack to be processed...
files.push(top);
// Process all files until none remain...
while (!files.isEmpty()) {
File file = files.pop();
if (file.isDirectory()) {
File list[] = file.listFiles();
if (list == null || list.length == 0) {
// The current directory contains no entries...
// delete directory and continue...
result &= file.delete();
} else {
// Add back the directory since it is not empty....
// and when we process it again it will be empty and can be
// deleted safely...
files.push(file);
for (File dirFile : list) {
if (dirFile.isDirectory()) {
// Place the directory on the stack...
files.push(dirFile);
} else {
// This is a simple file, delete it...
result &= dirFile.delete();
}
}
}
} else {
// This is a simple file, delete it...
result &= file.delete();
}
}
return result;
}
public static boolean deleteFile(File fileToDelete) {
if (fileToDelete == null || !fileToDelete.exists()) {
return true;
@ -153,7 +184,7 @@ public final class IOHelper {
}
public static void copyFile(File src, File dest) throws IOException {
copyFile(src,dest,null);
copyFile(src, dest, null);
}
public static void copyFile(File src, File dest, FilenameFilter filter) throws IOException {
@ -234,8 +265,8 @@ public final class IOHelper {
}
static {
MAX_DIR_NAME_LENGTH = Integer.valueOf(System.getProperty("MaximumDirNameLength","200")).intValue();
MAX_FILE_NAME_LENGTH = Integer.valueOf(System.getProperty("MaximumFileNameLength","64")).intValue();
MAX_DIR_NAME_LENGTH = Integer.getInteger("MaximumDirNameLength",200);
MAX_FILE_NAME_LENGTH = Integer.getInteger("MaximumFileNameLength",64);
}

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.util;
import java.util.Collection;
import java.util.HashMap;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.util;
import java.io.File;
import java.io.IOException;

View File

@ -1,100 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
import java.io.IOException;
import java.io.InputStream;
/**
* Very similar to the java.io.ByteArrayInputStream but this version is not
* thread safe.
*/
public class ByteArrayInputStream extends InputStream {
byte buffer[];
int limit;
int pos;
int mark;
public ByteArrayInputStream(byte data[]) {
this(data, 0, data.length);
}
public ByteArrayInputStream(ByteSequence sequence) {
this(sequence.getData(), sequence.getOffset(), sequence.getLength());
}
public ByteArrayInputStream(byte data[], int offset, int size) {
this.buffer = data;
this.mark = offset;
this.pos = offset;
this.limit = offset + size;
}
public int read() throws IOException {
if (pos < limit) {
return buffer[pos++] & 0xff;
} else {
return -1;
}
}
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
public int read(byte b[], int off, int len) {
if (pos < limit) {
len = Math.min(len, limit - pos);
if (len > 0) {
System.arraycopy(buffer, pos, b, off, len);
pos += len;
}
return len;
} else {
return -1;
}
}
public long skip(long len) throws IOException {
if (pos < limit) {
len = Math.min(len, limit - pos);
if (len > 0) {
pos += len;
}
return len;
} else {
return -1;
}
}
public int available() {
return limit - pos;
}
public boolean markSupported() {
return true;
}
public void mark(int markpos) {
mark = pos;
}
public void reset() {
pos = mark;
}
}

View File

@ -1,82 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
import java.io.OutputStream;
/**
* Very similar to the java.io.ByteArrayOutputStream but this version
* is not thread safe and the resulting data is returned in a ByteSequence
* to avoid an extra byte[] allocation.
*/
public class ByteArrayOutputStream extends OutputStream {
byte buffer[];
int size;
public ByteArrayOutputStream() {
this(1028);
}
public ByteArrayOutputStream(int capacity) {
buffer = new byte[capacity];
}
public void write(int b) {
int newsize = size + 1;
checkCapacity(newsize);
buffer[size] = (byte) b;
size = newsize;
}
public void write(byte b[], int off, int len) {
int newsize = size + len;
checkCapacity(newsize);
System.arraycopy(b, off, buffer, size, len);
size = newsize;
}
/**
* Ensures the the buffer has at least the minimumCapacity specified.
* @param i
*/
private void checkCapacity(int minimumCapacity) {
if (minimumCapacity > buffer.length) {
byte b[] = new byte[Math.max(buffer.length << 1, minimumCapacity)];
System.arraycopy(buffer, 0, b, 0, size);
buffer = b;
}
}
public void reset() {
size = 0;
}
public ByteSequence toByteSequence() {
return new ByteSequence(buffer, 0, size);
}
public byte[] toByteArray() {
byte rc[] = new byte[size];
System.arraycopy(buffer, 0, rc, 0, size);
return rc;
}
public int size() {
return size;
}
}

View File

@ -1,105 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
public class ByteSequence {
public byte[] data;
public int offset;
public int length;
public ByteSequence() {
}
public ByteSequence(byte data[]) {
this.data = data;
this.offset = 0;
this.length = data.length;
}
public ByteSequence(byte data[], int offset, int length) {
this.data = data;
this.offset = offset;
this.length = length;
}
public byte[] getData() {
return data;
}
public int getLength() {
return length;
}
public int getOffset() {
return offset;
}
public void setData(byte[] data) {
this.data = data;
}
public void setLength(int length) {
this.length = length;
}
public void setOffset(int offset) {
this.offset = offset;
}
public void compact() {
if (length != data.length) {
byte t[] = new byte[length];
System.arraycopy(data, offset, t, 0, length);
data = t;
offset = 0;
}
}
public int indexOf(ByteSequence needle, int pos) {
int max = length - needle.length;
for (int i = pos; i < max; i++) {
if (matches(needle, i)) {
return i;
}
}
return -1;
}
private boolean matches(ByteSequence needle, int pos) {
for (int i = 0; i < needle.length; i++) {
if( data[offset + pos+ i] != needle.data[needle.offset + i] ) {
return false;
}
}
return true;
}
private byte getByte(int i) {
return data[offset+i];
}
final public int indexOf(byte value, int pos) {
for (int i = pos; i < length; i++) {
if (data[offset + i] == value) {
return i;
}
}
return -1;
}
}

View File

@ -1,95 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
/**
* Used to convert to hex from byte arrays and back.
*
*
*/
public final class HexSupport {
private static final String[] HEX_TABLE = new String[]{
"00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "0a", "0b", "0c", "0d", "0e", "0f",
"10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "1a", "1b", "1c", "1d", "1e", "1f",
"20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "2a", "2b", "2c", "2d", "2e", "2f",
"30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "3a", "3b", "3c", "3d", "3e", "3f",
"40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "4a", "4b", "4c", "4d", "4e", "4f",
"50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "5a", "5b", "5c", "5d", "5e", "5f",
"60", "61", "62", "63", "64", "65", "66", "67", "68", "69", "6a", "6b", "6c", "6d", "6e", "6f",
"70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "7a", "7b", "7c", "7d", "7e", "7f",
"80", "81", "82", "83", "84", "85", "86", "87", "88", "89", "8a", "8b", "8c", "8d", "8e", "8f",
"90", "91", "92", "93", "94", "95", "96", "97", "98", "99", "9a", "9b", "9c", "9d", "9e", "9f",
"a0", "a1", "a2", "a3", "a4", "a5", "a6", "a7", "a8", "a9", "aa", "ab", "ac", "ad", "ae", "af",
"b0", "b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8", "b9", "ba", "bb", "bc", "bd", "be", "bf",
"c0", "c1", "c2", "c3", "c4", "c5", "c6", "c7", "c8", "c9", "ca", "cb", "cc", "cd", "ce", "cf",
"d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9", "da", "db", "dc", "dd", "de", "df",
"e0", "e1", "e2", "e3", "e4", "e5", "e6", "e7", "e8", "e9", "ea", "eb", "ec", "ed", "ee", "ef",
"f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "fa", "fb", "fc", "fd", "fe", "ff",
};
private static final int[] INT_OFFSETS = new int[]{
24,16,8,0
};
private HexSupport() {
}
/**
* @param hex
* @return
*/
public static byte[] toBytesFromHex(String hex) {
byte rc[] = new byte[hex.length() / 2];
for (int i = 0; i < rc.length; i++) {
String h = hex.substring(i * 2, i * 2 + 2);
int x = Integer.parseInt(h, 16);
rc[i] = (byte) x;
}
return rc;
}
/**
* @param bytes
* @return
*/
public static String toHexFromBytes(byte[] bytes) {
StringBuffer rc = new StringBuffer(bytes.length * 2);
for (int i = 0; i < bytes.length; i++) {
rc.append(HEX_TABLE[0xFF & bytes[i]]);
}
return rc.toString();
}
/**
*
* @param value
* @param trim if the leading 0's should be trimmed off.
* @return
*/
public static String toHexFromInt(int value, boolean trim) {
StringBuffer rc = new StringBuffer(INT_OFFSETS.length*2);
for (int i = 0; i < INT_OFFSETS.length; i++) {
int b = 0xFF & (value>>INT_OFFSETS[i]);
if( !(trim && b == 0) ) {
rc.append(HEX_TABLE[b]);
trim=false;
}
}
return rc.toString();
}
}

View File

@ -1,50 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
import java.io.IOException;
public final class IOExceptionSupport {
private IOExceptionSupport() {
}
public static IOException create(String msg, Throwable cause) {
IOException exception = new IOException(msg);
exception.initCause(cause);
return exception;
}
public static IOException create(String msg, Exception cause) {
IOException exception = new IOException(msg);
exception.initCause(cause);
return exception;
}
public static IOException create(Throwable cause) {
IOException exception = new IOException(cause.getMessage());
exception.initCause(cause);
return exception;
}
public static IOException create(Exception cause) {
IOException exception = new IOException(cause.getMessage());
exception.initCause(cause);
return exception;
}
}

View File

@ -1,221 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Stack;
public final class IOHelper {
protected static final int MAX_DIR_NAME_LENGTH;
protected static final int MAX_FILE_NAME_LENGTH;
private static final int DEFAULT_BUFFER_SIZE = 4096;
private IOHelper() {
}
public static String getDefaultDataDirectory() {
return getDefaultDirectoryPrefix() + "activemq-data";
}
public static String getDefaultStoreDirectory() {
return getDefaultDirectoryPrefix() + "amqstore";
}
/**
* Allows a system property to be used to overload the default data
* directory which can be useful for forcing the test cases to use a target/
* prefix
*/
public static String getDefaultDirectoryPrefix() {
try {
return System.getProperty("org.apache.activemq.default.directory.prefix", "");
} catch (Exception e) {
return "";
}
}
/**
* Converts any string into a string that is safe to use as a file name.
* The result will only include ascii characters and numbers, and the "-","_", and "." characters.
*
* @param name
* @return
*/
public static String toFileSystemDirectorySafeName(String name) {
return toFileSystemSafeName(name, true, MAX_DIR_NAME_LENGTH);
}
public static String toFileSystemSafeName(String name) {
return toFileSystemSafeName(name, false, MAX_FILE_NAME_LENGTH);
}
/**
* Converts any string into a string that is safe to use as a file name.
* The result will only include ascii characters and numbers, and the "-","_", and "." characters.
*
* @param name
* @param dirSeparators
* @param maxFileLength
* @return
*/
public static String toFileSystemSafeName(String name,boolean dirSeparators,int maxFileLength) {
int size = name.length();
StringBuffer rc = new StringBuffer(size * 2);
for (int i = 0; i < size; i++) {
char c = name.charAt(i);
boolean valid = c >= 'a' && c <= 'z';
valid = valid || (c >= 'A' && c <= 'Z');
valid = valid || (c >= '0' && c <= '9');
valid = valid || (c == '_') || (c == '-') || (c == '.') || (c=='#')
||(dirSeparators && ( (c == '/') || (c == '\\')));
if (valid) {
rc.append(c);
} else {
// Encode the character using hex notation
rc.append('#');
rc.append(HexSupport.toHexFromInt(c, true));
}
}
String result = rc.toString();
if (result.length() > maxFileLength) {
result = result.substring(result.length()-maxFileLength,result.length());
}
return result;
}
public static boolean delete(File top) {
boolean result = true;
Stack<File> files = new Stack<File>();
// Add file to the stack to be processed...
files.push(top);
// Process all files until none remain...
while (!files.isEmpty()) {
File file = files.pop();
if (file.isDirectory()) {
File list[] = file.listFiles();
if (list == null || list.length == 0) {
// The current directory contains no entries...
// delete directory and continue...
result &= file.delete();
} else {
// Add back the directory since it is not empty....
// and when we process it again it will be empty and can be
// deleted safely...
files.push(file);
for (File dirFile : list) {
if (dirFile.isDirectory()) {
// Place the directory on the stack...
files.push(dirFile);
} else {
// This is a simple file, delete it...
result &= dirFile.delete();
}
}
}
} else {
// This is a simple file, delete it...
result &= file.delete();
}
}
return result;
}
private static boolean deleteFile(File fileToDelete) {
if (fileToDelete == null || !fileToDelete.exists()) {
return true;
}
boolean result = deleteChildren(fileToDelete);
result &= fileToDelete.delete();
return result;
}
private static boolean deleteChildren(File parent) {
if (parent == null || !parent.exists()) {
return false;
}
boolean result = true;
if (parent.isDirectory()) {
File[] files = parent.listFiles();
if (files == null) {
result = false;
} else {
for (int i = 0; i < files.length; i++) {
File file = files[i];
if (file.getName().equals(".")
|| file.getName().equals("..")) {
continue;
}
if (file.isDirectory()) {
result &= deleteFile(file);
} else {
result &= file.delete();
}
}
}
}
return result;
}
public static void moveFile(File src, File targetDirectory) throws IOException {
if (!src.renameTo(new File(targetDirectory, src.getName()))) {
throw new IOException("Failed to move " + src + " to " + targetDirectory);
}
}
public static void copyFile(File src, File dest) throws IOException {
FileInputStream fileSrc = new FileInputStream(src);
FileOutputStream fileDest = new FileOutputStream(dest);
copyInputStream(fileSrc, fileDest);
}
public static void copyInputStream(InputStream in, OutputStream out) throws IOException {
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
int len = in.read(buffer);
while (len >= 0) {
out.write(buffer, 0, len);
len = in.read(buffer);
}
in.close();
out.close();
}
static {
MAX_DIR_NAME_LENGTH = Integer.getInteger("MaximumDirNameLength",200);
MAX_FILE_NAME_LENGTH = Integer.getInteger("MaximumFileNameLength",64);
}
public static void mkdirs(File dir) throws IOException {
if (dir.exists()) {
if (!dir.isDirectory()) {
throw new IOException("Failed to create directory '" + dir +"', regular file already existed with that name");
}
} else {
if (!dir.mkdirs()) {
throw new IOException("Failed to create directory '" + dir+"'");
}
}
}
}

View File

@ -1,300 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
import java.beans.PropertyEditor;
import java.beans.PropertyEditorManager;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
public final class IntrospectionSupport {
private IntrospectionSupport() {
}
public static boolean getProperties(Object target, Map props, String optionPrefix) {
boolean rc = false;
if (target == null) {
throw new IllegalArgumentException("target was null.");
}
if (props == null) {
throw new IllegalArgumentException("props was null.");
}
if (optionPrefix == null) {
optionPrefix = "";
}
Class clazz = target.getClass();
Method[] methods = clazz.getMethods();
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
String name = method.getName();
Class type = method.getReturnType();
Class params[] = method.getParameterTypes();
if ((name.startsWith("is") || name.startsWith("get")) && params.length == 0 && type != null && isSettableType(type)) {
try {
Object value = method.invoke(target, new Object[] {});
if (value == null) {
continue;
}
String strValue = convertToString(value, type);
if (strValue == null) {
continue;
}
if (name.startsWith("get")) {
name = name.substring(3, 4).toLowerCase()
+ name.substring(4);
} else {
name = name.substring(2, 3).toLowerCase()
+ name.substring(3);
}
props.put(optionPrefix + name, strValue);
rc = true;
} catch (Throwable ignore) {
ignore.printStackTrace();
}
}
}
return rc;
}
public static boolean setProperties(Object target, Map props, String optionPrefix) {
boolean rc = false;
if (target == null) {
throw new IllegalArgumentException("target was null.");
}
if (props == null) {
throw new IllegalArgumentException("props was null.");
}
for (Iterator<String> iter = props.keySet().iterator(); iter.hasNext();) {
String name = iter.next();
if (name.startsWith(optionPrefix)) {
Object value = props.get(name);
name = name.substring(optionPrefix.length());
if (setProperty(target, name, value)) {
iter.remove();
rc = true;
}
}
}
return rc;
}
public static Map<String, Object> extractProperties(Map props, String optionPrefix) {
if (props == null) {
throw new IllegalArgumentException("props was null.");
}
HashMap<String, Object> rc = new HashMap<String, Object>(props.size());
for (Iterator iter = props.keySet().iterator(); iter.hasNext();) {
String name = (String)iter.next();
if (name.startsWith(optionPrefix)) {
Object value = props.get(name);
name = name.substring(optionPrefix.length());
rc.put(name, value);
iter.remove();
}
}
return rc;
}
public static boolean setProperties(Object target, Map props) {
boolean rc = false;
if (target == null) {
throw new IllegalArgumentException("target was null.");
}
if (props == null) {
throw new IllegalArgumentException("props was null.");
}
for (Iterator iter = props.entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Entry)iter.next();
if (setProperty(target, (String)entry.getKey(), entry.getValue())) {
iter.remove();
rc = true;
}
}
return rc;
}
public static boolean setProperty(Object target, String name, Object value) {
try {
Class clazz = target.getClass();
Method setter = findSetterMethod(clazz, name);
if (setter == null) {
return false;
}
// If the type is null or it matches the needed type, just use the
// value directly
if (value == null || value.getClass() == setter.getParameterTypes()[0]) {
setter.invoke(target, new Object[] {value});
} else {
// We need to convert it
setter.invoke(target, new Object[] {convert(value, setter.getParameterTypes()[0])});
}
return true;
} catch (Throwable ignore) {
return false;
}
}
private static Object convert(Object value, Class type) throws URISyntaxException {
PropertyEditor editor = PropertyEditorManager.findEditor(type);
if (editor != null) {
editor.setAsText(value.toString());
return editor.getValue();
}
if (type == URI.class) {
return new URI(value.toString());
}
return null;
}
private static String convertToString(Object value, Class type) throws URISyntaxException {
PropertyEditor editor = PropertyEditorManager.findEditor(type);
if (editor != null) {
editor.setValue(value);
return editor.getAsText();
}
if (type == URI.class) {
return ((URI)value).toString();
}
return null;
}
private static Method findSetterMethod(Class clazz, String name) {
// Build the method name.
name = "set" + name.substring(0, 1).toUpperCase() + name.substring(1);
Method[] methods = clazz.getMethods();
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
Class params[] = method.getParameterTypes();
if (method.getName().equals(name) && params.length == 1 ) {
return method;
}
}
return null;
}
private static boolean isSettableType(Class clazz) {
if (PropertyEditorManager.findEditor(clazz) != null) {
return true;
}
if (clazz == URI.class) {
return true;
}
if (clazz == Boolean.class) {
return true;
}
return false;
}
public static String toString(Object target) {
return toString(target, Object.class);
}
public static String toString(Object target, Class stopClass) {
LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();
addFields(target, target.getClass(), stopClass, map);
StringBuffer buffer = new StringBuffer(simpleName(target.getClass()));
buffer.append(" {");
Set entrySet = map.entrySet();
boolean first = true;
for (Iterator iter = entrySet.iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry)iter.next();
if (first) {
first = false;
} else {
buffer.append(", ");
}
buffer.append(entry.getKey());
buffer.append(" = ");
appendToString(buffer, entry.getValue());
}
buffer.append("}");
return buffer.toString();
}
protected static void appendToString(StringBuffer buffer, Object value) {
buffer.append(value);
}
public static String simpleName(Class clazz) {
String name = clazz.getName();
int p = name.lastIndexOf(".");
if (p >= 0) {
name = name.substring(p + 1);
}
return name;
}
private static void addFields(Object target, Class startClass, Class<Object> stopClass, LinkedHashMap<String, Object> map) {
if (startClass != stopClass) {
addFields(target, startClass.getSuperclass(), stopClass, map);
}
Field[] fields = startClass.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
if (Modifier.isStatic(field.getModifiers()) || Modifier.isTransient(field.getModifiers())
|| Modifier.isPrivate(field.getModifiers())) {
continue;
}
try {
field.setAccessible(true);
Object o = field.get(target);
if (o != null && o.getClass().isArray()) {
try {
o = Arrays.asList((Object[])o);
} catch (Throwable e) {
}
}
map.put(field.getName(), o);
} catch (Throwable e) {
e.printStackTrace();
}
}
}
}

View File

@ -1,85 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* A Simple LRU Cache
*
*
* @param <K>
* @param <V>
*/
public class LRUCache<K, V> extends LinkedHashMap<K, V> {
private static final long serialVersionUID = -342098639681884413L;
protected int maxCacheSize = 10000;
/**
* Default constructor for an LRU Cache The default capacity is 10000
*/
public LRUCache() {
this(0,10000, 0.75f, true);
}
/**
* Constructs a LRUCache with a maximum capacity
*
* @param maximumCacheSize
*/
public LRUCache(int maximumCacheSize) {
this(0, maximumCacheSize, 0.75f, true);
}
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified
* initial capacity, maximumCacheSize,load factor and ordering mode.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize
* @param loadFactor the load factor.
* @param accessOrder the ordering mode - <tt>true</tt> for access-order,
* <tt>false</tt> for insertion-order.
* @throws IllegalArgumentException if the initial capacity is negative or
* the load factor is non-positive.
*/
public LRUCache(int initialCapacity, int maximumCacheSize, float loadFactor, boolean accessOrder) {
super(initialCapacity, loadFactor, accessOrder);
this.maxCacheSize = maximumCacheSize;
}
/**
* @return Returns the maxCacheSize.
*/
public int getMaxCacheSize() {
return maxCacheSize;
}
/**
* @param maxCacheSize The maxCacheSize to set.
*/
public void setMaxCacheSize(int maxCacheSize) {
this.maxCacheSize = maxCacheSize;
}
protected boolean removeEldestEntry(Map.Entry<K,V> eldest) {
return size() > maxCacheSize;
}
}

View File

@ -28,8 +28,8 @@ import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.MessageId;
import org.apache.activemq.store.kahadb.plist.PList;
import org.apache.activemq.usage.SystemUsage;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.util.ByteSequence;
import org.junit.After;
import org.junit.Test;
import org.slf4j.Logger;

View File

@ -18,7 +18,7 @@ package org.apache.activemq.broker.scheduler;
import junit.framework.TestCase;
import org.apache.activemq.util.IOHelper;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
import java.io.File;
import java.util.ArrayList;

View File

@ -26,7 +26,7 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.activemq.util.IOHelper;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

View File

@ -33,12 +33,10 @@ import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.ConnectionControl;
import org.apache.activemq.kaha.impl.async.DataFileAppenderTest;
import org.apache.kahadb.journal.FileAppender;
import org.apache.kahadb.journal.Journal;
import org.apache.activemq.store.kahadb.disk.journal.FileAppender;
import org.apache.activemq.store.kahadb.disk.journal.Journal;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -21,7 +21,6 @@ import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.RecoveryBrokerTest;
import org.apache.activemq.broker.StubConnection;
import org.apache.activemq.command.*;
import org.apache.kahadb.page.PageFile;
import java.io.File;
import java.io.RandomAccessFile;

View File

@ -21,14 +21,13 @@ import java.io.IOException;
import junit.framework.TestCase;
import org.apache.activemq.protobuf.Buffer;
import org.apache.kahadb.journal.Location;
import org.apache.activemq.store.kahadb.data.KahaAddMessageCommand;
import org.apache.activemq.store.kahadb.data.KahaDestination;
import org.apache.activemq.store.kahadb.data.KahaEntryType;
import org.apache.activemq.store.kahadb.data.KahaDestination.DestinationType;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.DataByteArrayInputStream;
import org.apache.kahadb.util.DataByteArrayOutputStream;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.util.DataByteArrayInputStream;
import org.apache.activemq.util.DataByteArrayOutputStream;
public class PBMesssagesTest extends TestCase {

View File

@ -14,14 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.IOException;
import java.text.NumberFormat;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.LongMarshaller;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.LongMarshaller;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
public class BTreeIndexBenchMark extends IndexBenchmark {

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -32,11 +32,11 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.LongMarshaller;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.LongMarshaller;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -14,11 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.LongMarshaller;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.LongMarshaller;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
public class HashIndexBenchMark extends IndexBenchmark {

View File

@ -14,12 +14,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import org.apache.kahadb.index.HashIndex;
import org.apache.kahadb.index.Index;
import org.apache.kahadb.util.LongMarshaller;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.util.LongMarshaller;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
public class HashIndexTest extends IndexTestSupport {

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.File;
import java.io.IOException;
@ -24,9 +24,9 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import junit.framework.TestCase;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.IOHelper;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.util.IOHelper;
/**
* @author chirino

View File

@ -14,16 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.File;
import java.io.IOException;
import junit.framework.TestCase;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.page.Transaction;
import org.apache.kahadb.util.IOHelper;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.util.IOHelper;
/**
* Test a HashIndex

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.index;
package org.apache.activemq.store.kahadb.disk.index;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -30,12 +30,12 @@ import java.util.LinkedList;
import java.util.Map;
import java.util.Random;
import org.apache.kahadb.page.PageFile;
import org.apache.kahadb.util.LongMarshaller;
import org.apache.kahadb.util.Sequence;
import org.apache.kahadb.util.SequenceSet;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.kahadb.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.util.LongMarshaller;
import org.apache.activemq.store.kahadb.disk.util.Sequence;
import org.apache.activemq.store.kahadb.disk.util.SequenceSet;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -14,16 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.journal;
package org.apache.activemq.store.kahadb.disk.journal;
import java.io.File;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import junit.framework.TestCase;
import org.apache.kahadb.journal.Journal;
import org.apache.kahadb.util.ByteSequence;
import org.apache.kahadb.util.IOHelper;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.util.IOHelper;
public class JournalTest extends TestCase {
protected static final int DEFAULT_MAX_BATCH_SIZE = 1024 * 1024 * 4;

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.page;
package org.apache.activemq.store.kahadb.disk.page;
import java.io.DataInputStream;
import java.io.DataOutputStream;
@ -24,7 +24,7 @@ import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashSet;
import org.apache.kahadb.util.StringMarshaller;
import org.apache.activemq.store.kahadb.disk.util.StringMarshaller;
import junit.framework.TestCase;

View File

@ -15,7 +15,7 @@
* limitations under the License.
*/
package org.apache.kahadb.util;
package org.apache.activemq.store.kahadb.disk.util;
import static org.junit.Assert.*;

View File

@ -32,7 +32,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.activemq.util.IOHelper;
import org.apache.kahadb.util.ByteSequence;
import org.apache.activemq.util.ByteSequence;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

View File

@ -17,11 +17,9 @@
package org.apache.activemq.usecases;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.HashSet;
import java.util.Vector;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
@ -46,8 +44,8 @@ import org.apache.activemq.command.MessageId;
import org.apache.activemq.store.jdbc.JDBCPersistenceAdapter;
import org.apache.activemq.store.kahadb.KahaDBPersistenceAdapter;
import org.apache.activemq.util.Wait;
import org.apache.kahadb.journal.Journal;
import org.apache.kahadb.page.PageFile;
import org.apache.activemq.store.kahadb.disk.journal.Journal;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -1,76 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
import junit.framework.TestCase;
public class DataByteArrayInputStreamTest extends TestCase {
/**
* https://issues.apache.org/activemq/browse/AMQ-1911
*/
public void testNonAscii() throws Exception {
doMarshallUnMarshallValidation("meißen");
String accumulator = new String();
int test = 0; // int to get Supplementary chars
while(Character.isDefined(test)) {
String toTest = String.valueOf((char)test);
accumulator += toTest;
doMarshallUnMarshallValidation(toTest);
test++;
}
int massiveThreeByteCharValue = 0x0FFF;
String toTest = String.valueOf((char)massiveThreeByteCharValue);
accumulator += toTest;
doMarshallUnMarshallValidation(String.valueOf((char)massiveThreeByteCharValue));
// Altogether
doMarshallUnMarshallValidation(accumulator);
// the three byte values
char t = '\u0800';
final char max = '\uffff';
accumulator = String.valueOf(t);
while (t < max) {
String val = String.valueOf(t);
accumulator += val;
doMarshallUnMarshallValidation(val);
t++;
}
// Altogether so long as it is not too big
while (accumulator.length() > 20000) {
accumulator = accumulator.substring(20000);
}
doMarshallUnMarshallValidation(accumulator);
}
void doMarshallUnMarshallValidation(String value) throws Exception {
DataByteArrayOutputStream out = new DataByteArrayOutputStream();
out.writeBoolean(true);
out.writeUTF(value);
out.close();
DataByteArrayInputStream in = new DataByteArrayInputStream(out.getData());
in.readBoolean();
String readBack = in.readUTF();
assertEquals(value, readBack);
}
}

View File

@ -1,100 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kahadb.util;
import java.io.IOException;
import junit.framework.TestCase;
public class DataByteArrayOutputStreamTest extends TestCase {
/**
* This test case assumes that an ArrayIndexOutOfBoundsException will be thrown when the buffer fails to resize
* @throws IOException
*/
public void testResize() throws IOException {
int initSize = 64;
DataByteArrayOutputStream out = new DataByteArrayOutputStream();
fillOut(out, initSize);
// Should resized here
out.writeBoolean(true);
fillOut(out, initSize);
// Should resized here
out.writeByte(1);
fillOut(out, initSize);
// Should resized here
out.writeBytes("test");
fillOut(out, initSize);
// Should resized here
out.writeChar('C');
fillOut(out, initSize);
// Should resized here
out.writeChars("test");
fillOut(out, initSize);
// Should resized here
out.writeDouble(3.1416);
fillOut(out, initSize);
// Should resized here
out.writeFloat((float)3.1416);
fillOut(out, initSize);
// Should resized here
out.writeInt(12345);
fillOut(out, initSize);
// Should resized here
out.writeLong(12345);
fillOut(out, initSize);
// Should resized here
out.writeShort(1234);
fillOut(out, initSize);
// Should resized here
out.writeUTF("test");
fillOut(out, initSize);
// Should resized here
out.write(1234);
fillOut(out, initSize);
// Should resized here
out.write(new byte[10], 5, 5);
fillOut(out, initSize);
// Should resized here
out.write(new byte[10]);
}
/**
* This method restarts the stream to the init size, and fills it up with data
* @param out
* @param size
* @throws IOException
*/
public void fillOut(DataByteArrayOutputStream out, int size) throws IOException {
out.restart(size);
out.write(new byte[size]);
}
}

View File

@ -35,7 +35,6 @@ import concurrent.CountDownLatch
import javax.management.ObjectName
import org.apache.activemq.broker.jmx.AnnotatedMBean
import org.apache.activemq.util._
import org.apache.kahadb.util.LockFile
import org.apache.activemq.leveldb.util.{RetrySupport, FileSupport, Log}
object LevelDBStore extends Log {