From 15953786d19e25ec05fb103d6778761edb9932d0 Mon Sep 17 00:00:00 2001 From: Gary Tully Date: Tue, 13 Sep 2011 15:01:37 +0000 Subject: [PATCH] https://issues.apache.org/jira/browse/AMQ-2922 - rework, introduce new store 'mKahaDB' that contains multiple filtered kahadb persistence adapters, destinations match a store using destination wildcards in the same way as policy entries. Transactions that span multiple stores use a local xa variant to ensure consistency git-svn-id: https://svn.apache.org/repos/asf/activemq/trunk@1170201 13f79535-47bb-0310-9956-ffa450edef68 --- .../activemq/filter/AnyDestination.java | 58 ++ .../store/kahadb/DefaultJournalManager.java | 105 --- .../kahadb/DestinationJournalManager.java | 239 ------- .../FilteredKahaDBPersistenceAdapter.java | 40 ++ .../activemq/store/kahadb/JournalManager.java | 61 -- .../kahadb/KahaDBPersistenceAdapter.java | 142 ++-- .../activemq/store/kahadb/KahaDBStore.java | 272 ++++---- .../store/kahadb/KahaDBTransactionStore.java | 79 +-- .../store/kahadb/MessageDatabase.java | 610 +++++++++--------- .../kahadb/MultiKahaDBPersistenceAdapter.java | 295 +++++++++ .../kahadb/MultiKahaDBTransactionStore.java | 419 ++++++++++++ .../store/kahadb/TransactionIdConversion.java | 77 +++ .../kahadb/TransactionIdTransformer.java | 8 + .../activemq/broker/XARecoveryBrokerTest.java | 123 ++-- .../broker/mKahaDBXARecoveryBrokerTest.java | 78 +++ .../org/apache/activemq/bugs/AMQ2736Test.java | 4 +- .../org/apache/activemq/bugs/AMQ2982Test.java | 9 +- .../org/apache/activemq/bugs/AMQ2983Test.java | 8 +- .../activemq/perf/SimpleDurableTopicTest.java | 4 +- .../store/StorePerDestinationTest.java | 282 ++++++++ .../DurableSubscriptionOfflineTest.java | 215 +++--- .../org/apache/kahadb/journal/Journal.java | 2 +- 22 files changed, 2017 insertions(+), 1113 deletions(-) create mode 100644 activemq-core/src/main/java/org/apache/activemq/filter/AnyDestination.java delete mode 100644 activemq-core/src/main/java/org/apache/activemq/store/kahadb/DefaultJournalManager.java delete mode 100644 activemq-core/src/main/java/org/apache/activemq/store/kahadb/DestinationJournalManager.java create mode 100644 activemq-core/src/main/java/org/apache/activemq/store/kahadb/FilteredKahaDBPersistenceAdapter.java delete mode 100644 activemq-core/src/main/java/org/apache/activemq/store/kahadb/JournalManager.java create mode 100644 activemq-core/src/main/java/org/apache/activemq/store/kahadb/MultiKahaDBPersistenceAdapter.java create mode 100644 activemq-core/src/main/java/org/apache/activemq/store/kahadb/MultiKahaDBTransactionStore.java create mode 100644 activemq-core/src/main/java/org/apache/activemq/store/kahadb/TransactionIdConversion.java create mode 100644 activemq-core/src/main/java/org/apache/activemq/store/kahadb/TransactionIdTransformer.java create mode 100644 activemq-core/src/test/java/org/apache/activemq/broker/mKahaDBXARecoveryBrokerTest.java create mode 100644 activemq-core/src/test/java/org/apache/activemq/store/StorePerDestinationTest.java diff --git a/activemq-core/src/main/java/org/apache/activemq/filter/AnyDestination.java b/activemq-core/src/main/java/org/apache/activemq/filter/AnyDestination.java new file mode 100644 index 0000000000..f6c3653a8b --- /dev/null +++ b/activemq-core/src/main/java/org/apache/activemq/filter/AnyDestination.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.activemq.filter; + +import java.lang.IllegalStateException; +import javax.jms.*; +import org.apache.activemq.command.ActiveMQDestination; + +/* + * allow match to any set of composite destinations, both queues and topics + */ +public class AnyDestination extends ActiveMQDestination { + + public AnyDestination(ActiveMQDestination[] destinations) { + super(destinations); + // ensure we are small when it comes to comparison in DestinationMap + physicalName = "0"; + } + + @Override + protected String getQualifiedPrefix() { + return "Any://"; + } + + @Override + public byte getDestinationType() { + return ActiveMQDestination.QUEUE_TYPE & ActiveMQDestination.TOPIC_TYPE; + } + + @Override + public byte getDataStructureType() { + throw new IllegalStateException("not for marshalling"); + } + + @Override + public boolean isQueue() { + return true; + } + + @Override + public boolean isTopic() { + return true; + } +} diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/DefaultJournalManager.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/DefaultJournalManager.java deleted file mode 100644 index bfb08c5802..0000000000 --- a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/DefaultJournalManager.java +++ /dev/null @@ -1,105 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.activemq.store.kahadb; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicLong; - -import org.apache.activemq.command.ActiveMQDestination; -import org.apache.kahadb.journal.DataFile; -import org.apache.kahadb.journal.Journal; - - -public class DefaultJournalManager implements JournalManager { - - private final Journal journal; - private final List journals; - - public DefaultJournalManager() { - this.journal = new Journal(); - List list = new ArrayList(1); - list.add(this.journal); - this.journals = Collections.unmodifiableList(list); - } - - public void start() throws IOException { - journal.start(); - } - - public void close() throws IOException { - journal.close(); - } - - public Journal getJournal(ActiveMQDestination destination) { - return journal; - } - - public void setDirectory(File directory) { - journal.setDirectory(directory); - } - - public void setMaxFileLength(int maxFileLength) { - journal.setMaxFileLength(maxFileLength); - } - - public void setCheckForCorruptionOnStartup(boolean checkForCorruptJournalFiles) { - journal.setCheckForCorruptionOnStartup(checkForCorruptJournalFiles); - } - - public void setChecksum(boolean checksum) { - journal.setChecksum(checksum); - } - - public void setWriteBatchSize(int batchSize) { - journal.setWriteBatchSize(batchSize); - } - - public void setArchiveDataLogs(boolean archiveDataLogs) { - journal.setArchiveDataLogs(archiveDataLogs); - } - - public void setStoreSize(AtomicLong storeSize) { - journal.setSizeAccumulator(storeSize); - } - - public void setDirectoryArchive(File directoryArchive) { - journal.setDirectoryArchive(directoryArchive); - } - - public void delete() throws IOException { - journal.delete(); - } - - public Map getFileMap() { - return journal.getFileMap(); - } - - public Collection getJournals() { - return journals; - } - - public Collection getJournals(Set set) { - return journals; - } -} diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/DestinationJournalManager.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/DestinationJournalManager.java deleted file mode 100644 index bf59bdcc6d..0000000000 --- a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/DestinationJournalManager.java +++ /dev/null @@ -1,239 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.activemq.store.kahadb; - -import java.io.File; -import java.io.FilenameFilter; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; - -import org.apache.activemq.command.ActiveMQDestination; -import org.apache.activemq.command.ActiveMQQueue; -import org.apache.activemq.command.ActiveMQTopic; -import org.apache.activemq.util.IOHelper; -import org.apache.kahadb.journal.DataFile; -import org.apache.kahadb.journal.Journal; - -public class DestinationJournalManager implements JournalManager { - private static final String PREPEND = "JournalDest-"; - private static final String QUEUE_PREPEND = PREPEND + "Queue-"; - private static final String TOPIC_PREPEND = PREPEND + "Topic-"; - private AtomicBoolean started = new AtomicBoolean(); - private final Map journalMap = new ConcurrentHashMap(); - private File directory = new File("KahaDB"); - private File directoryArchive; - private int maxFileLength = Journal.DEFAULT_MAX_FILE_LENGTH; - private boolean checkForCorruptionOnStartup; - private boolean checksum = false; - private int writeBatchSize = Journal.DEFAULT_MAX_WRITE_BATCH_SIZE; - private boolean archiveDataLogs; - private AtomicLong storeSize = new AtomicLong(0); - - - public AtomicBoolean getStarted() { - return started; - } - - public void setStarted(AtomicBoolean started) { - this.started = started; - } - - public File getDirectory() { - return directory; - } - - public void setDirectory(File directory) { - this.directory = directory; - } - - public File getDirectoryArchive() { - return directoryArchive; - } - - public void setDirectoryArchive(File directoryArchive) { - this.directoryArchive = directoryArchive; - } - - public int getMaxFileLength() { - return maxFileLength; - } - - public void setMaxFileLength(int maxFileLength) { - this.maxFileLength = maxFileLength; - } - - public boolean isCheckForCorruptionOnStartup() { - return checkForCorruptionOnStartup; - } - - public void setCheckForCorruptionOnStartup(boolean checkForCorruptionOnStartup) { - this.checkForCorruptionOnStartup = checkForCorruptionOnStartup; - } - - public boolean isChecksum() { - return checksum; - } - - public void setChecksum(boolean checksum) { - this.checksum = checksum; - } - - public int getWriteBatchSize() { - return writeBatchSize; - } - - public void setWriteBatchSize(int writeBatchSize) { - this.writeBatchSize = writeBatchSize; - } - - public boolean isArchiveDataLogs() { - return archiveDataLogs; - } - - public void setArchiveDataLogs(boolean archiveDataLogs) { - this.archiveDataLogs = archiveDataLogs; - } - - public AtomicLong getStoreSize() { - return storeSize; - } - - public void setStoreSize(AtomicLong storeSize) { - this.storeSize = storeSize; - } - - - public void start() throws IOException { - if (started.compareAndSet(false, true)) { - File[] files = getDirectory().listFiles(new FilenameFilter() { - public boolean accept(File file, String s) { - if (file.isDirectory() && s != null && s.startsWith(PREPEND)) { - return true; - } - return false; - } - }); - if (files != null) { - for (File file : files) { - ActiveMQDestination destination; - if (file.getName().startsWith(TOPIC_PREPEND)) { - String destinationName = file.getName().substring(TOPIC_PREPEND.length()); - destination = new ActiveMQTopic(destinationName); - } else { - String destinationName = file.getName().substring(QUEUE_PREPEND.length()); - destination = new ActiveMQQueue(destinationName); - } - - Journal journal = new Journal(); - journal.setDirectory(file); - if (getDirectoryArchive() != null) { - IOHelper.mkdirs(getDirectoryArchive()); - File archive = new File(getDirectoryArchive(), file.getName()); - IOHelper.mkdirs(archive); - journal.setDirectoryArchive(archive); - } - configure(journal); - journalMap.put(destination, journal); - } - } - for (Journal journal : journalMap.values()) { - journal.start(); - } - } - - } - - public void close() throws IOException { - started.set(false); - for (Journal journal : journalMap.values()) { - journal.close(); - } - journalMap.clear(); - } - - - public void delete() throws IOException { - for (Journal journal : journalMap.values()) { - journal.delete(); - } - journalMap.clear(); - } - - public Journal getJournal(ActiveMQDestination destination) throws IOException { - Journal journal = journalMap.get(destination); - if (journal == null && !destination.isTemporary()) { - journal = new Journal(); - String fileName; - if (destination.isTopic()) { - fileName = TOPIC_PREPEND + destination.getPhysicalName(); - } else { - fileName = QUEUE_PREPEND + destination.getPhysicalName(); - } - File file = new File(getDirectory(), fileName); - IOHelper.mkdirs(file); - journal.setDirectory(file); - if (getDirectoryArchive() != null) { - IOHelper.mkdirs(getDirectoryArchive()); - File archive = new File(getDirectoryArchive(), fileName); - IOHelper.mkdirs(archive); - journal.setDirectoryArchive(archive); - } - configure(journal); - if (started.get()) { - journal.start(); - } - return journal; - } else { - return journal; - } - } - - public Map getFileMap() { - throw new RuntimeException("Not supported"); - } - - public Collection getJournals() { - return journalMap.values(); - } - - public Collection getJournals(Set set) { - List list = new ArrayList(); - for (ActiveMQDestination destination : set) { - Journal j = journalMap.get(destination); - if (j != null) { - list.add(j); - } - } - return list; - } - - protected void configure(Journal journal) { - journal.setMaxFileLength(getMaxFileLength()); - journal.setCheckForCorruptionOnStartup(isCheckForCorruptionOnStartup()); - journal.setChecksum(isChecksum() || isCheckForCorruptionOnStartup()); - journal.setWriteBatchSize(getWriteBatchSize()); - journal.setArchiveDataLogs(isArchiveDataLogs()); - journal.setSizeAccumulator(getStoreSize()); - } -} diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/FilteredKahaDBPersistenceAdapter.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/FilteredKahaDBPersistenceAdapter.java new file mode 100644 index 0000000000..b588652b94 --- /dev/null +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/FilteredKahaDBPersistenceAdapter.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.activemq.store.kahadb; + +import org.apache.activemq.filter.DestinationMapEntry; + +/** + * @org.apache.xbean.XBean element="filteredKahaDB" + * + */ +public class FilteredKahaDBPersistenceAdapter extends DestinationMapEntry { + private KahaDBPersistenceAdapter persistenceAdapter; + + public KahaDBPersistenceAdapter getPersistenceAdapter() { + return persistenceAdapter; + } + + public void setPersistenceAdapter(KahaDBPersistenceAdapter persistenceAdapter) { + this.persistenceAdapter = persistenceAdapter; + } + + @Override + public void afterPropertiesSet() throws Exception { + // ok to have no destination, we default it + } +} diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/JournalManager.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/JournalManager.java deleted file mode 100644 index a71fd24767..0000000000 --- a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/JournalManager.java +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.activemq.store.kahadb; - -import java.io.File; -import java.io.IOException; -import java.util.Collection; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicLong; - -import org.apache.activemq.command.ActiveMQDestination; -import org.apache.kahadb.journal.DataFile; -import org.apache.kahadb.journal.Journal; - -public interface JournalManager { - - void start() throws IOException; - - void close() throws IOException; - - Journal getJournal(ActiveMQDestination destination) throws IOException; - - void setDirectory(File directory); - - void setMaxFileLength(int maxFileLength); - - void setCheckForCorruptionOnStartup(boolean checkForCorruptJournalFiles); - - void setChecksum(boolean checksum); - - void setWriteBatchSize(int batchSize); - - void setArchiveDataLogs(boolean archiveDataLogs); - - void setStoreSize(AtomicLong storeSize); - - void setDirectoryArchive(File directoryArchive); - - void delete() throws IOException; - - Map getFileMap(); - - Collection getJournals(); - - Collection getJournals(Set set); -} diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBPersistenceAdapter.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBPersistenceAdapter.java index 560117ce28..57a02d19ae 100644 --- a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBPersistenceAdapter.java +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBPersistenceAdapter.java @@ -19,7 +19,6 @@ package org.apache.activemq.store.kahadb; import java.io.File; import java.io.IOException; import java.util.Set; - import org.apache.activeio.journal.Journal; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.broker.BrokerServiceAware; @@ -27,11 +26,18 @@ import org.apache.activemq.broker.ConnectionContext; import org.apache.activemq.command.ActiveMQDestination; import org.apache.activemq.command.ActiveMQQueue; import org.apache.activemq.command.ActiveMQTopic; +import org.apache.activemq.command.LocalTransactionId; import org.apache.activemq.command.ProducerId; +import org.apache.activemq.command.TransactionId; +import org.apache.activemq.command.XATransactionId; +import org.apache.activemq.protobuf.Buffer; import org.apache.activemq.store.MessageStore; import org.apache.activemq.store.PersistenceAdapter; import org.apache.activemq.store.TopicMessageStore; import org.apache.activemq.store.TransactionStore; +import org.apache.activemq.store.kahadb.data.KahaLocalTransactionId; +import org.apache.activemq.store.kahadb.data.KahaTransactionInfo; +import org.apache.activemq.store.kahadb.data.KahaXATransactionId; import org.apache.activemq.usage.SystemUsage; /** @@ -46,6 +52,8 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi private final KahaDBStore letter = new KahaDBStore(); /** + * @param context + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#beginTransaction(org.apache.activemq.broker.ConnectionContext) */ public void beginTransaction(ConnectionContext context) throws IOException { @@ -53,6 +61,8 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param sync + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#checkpoint(boolean) */ public void checkpoint(boolean sync) throws IOException { @@ -60,6 +70,8 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param context + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#commitTransaction(org.apache.activemq.broker.ConnectionContext) */ public void commitTransaction(ConnectionContext context) throws IOException { @@ -67,7 +79,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param destination * @return MessageStore + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#createQueueMessageStore(org.apache.activemq.command.ActiveMQQueue) */ public MessageStore createQueueMessageStore(ActiveMQQueue destination) throws IOException { @@ -75,7 +89,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param destination * @return TopicMessageStore + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#createTopicMessageStore(org.apache.activemq.command.ActiveMQTopic) */ public TopicMessageStore createTopicMessageStore(ActiveMQTopic destination) throws IOException { @@ -83,7 +99,8 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** - * @return TrandactionStore + * @return TransactionStore + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#createTransactionStore() */ public TransactionStore createTransactionStore() throws IOException { @@ -91,6 +108,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#deleteAllMessages() */ public void deleteAllMessages() throws IOException { @@ -107,6 +125,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * @return lastMessageBrokerSequenceId + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#getLastMessageBrokerSequenceId() */ public long getLastMessageBrokerSequenceId() throws IOException { @@ -118,6 +137,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param destination * @see org.apache.activemq.store.PersistenceAdapter#removeQueueMessageStore(org.apache.activemq.command.ActiveMQQueue) */ public void removeQueueMessageStore(ActiveMQQueue destination) { @@ -125,6 +145,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param destination * @see org.apache.activemq.store.PersistenceAdapter#removeTopicMessageStore(org.apache.activemq.command.ActiveMQTopic) */ public void removeTopicMessageStore(ActiveMQTopic destination) { @@ -132,6 +153,8 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param context + * @throws IOException * @see org.apache.activemq.store.PersistenceAdapter#rollbackTransaction(org.apache.activemq.broker.ConnectionContext) */ public void rollbackTransaction(ConnectionContext context) throws IOException { @@ -139,6 +162,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param brokerName * @see org.apache.activemq.store.PersistenceAdapter#setBrokerName(java.lang.String) */ public void setBrokerName(String brokerName) { @@ -146,6 +170,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param usageManager * @see org.apache.activemq.store.PersistenceAdapter#setUsageManager(org.apache.activemq.usage.SystemUsage) */ public void setUsageManager(SystemUsage usageManager) { @@ -161,6 +186,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @throws Exception * @see org.apache.activemq.Service#start() */ public void start() throws Exception { @@ -168,6 +194,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @throws Exception * @see org.apache.activemq.Service#stop() */ public void stop() throws Exception { @@ -176,7 +203,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the journalMaxFileLength - * + * * @return the journalMaxFileLength */ public int getJournalMaxFileLength() { @@ -186,6 +213,8 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * When set using Xbean, values of the form "20 Mb", "1024kb", and "1g" can * be used + * + * @org.apache.xbean.Property propertyEditor="org.apache.activemq.util.MemoryIntPropertyEditor" */ public void setJournalMaxFileLength(int journalMaxFileLength) { this.letter.setJournalMaxFileLength(journalMaxFileLength); @@ -197,7 +226,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi public void setMaxFailoverProducersToTrack(int maxFailoverProducersToTrack) { this.letter.setMaxFailoverProducersToTrack(maxFailoverProducersToTrack); } - + public int getMaxFailoverProducersToTrack() { return this.letter.getMaxFailoverProducersToTrack(); } @@ -209,14 +238,14 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi public void setFailoverProducersAuditDepth(int failoverProducersAuditDepth) { this.letter.setFailoverProducersAuditDepth(failoverProducersAuditDepth); } - + public int getFailoverProducersAuditDepth() { return this.getFailoverProducersAuditDepth(); } - + /** * Get the checkpointInterval - * + * * @return the checkpointInterval */ public long getCheckpointInterval() { @@ -225,8 +254,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Set the checkpointInterval - * - * @param checkpointInterval the checkpointInterval to set + * + * @param checkpointInterval + * the checkpointInterval to set */ public void setCheckpointInterval(long checkpointInterval) { this.letter.setCheckpointInterval(checkpointInterval); @@ -234,7 +264,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the cleanupInterval - * + * * @return the cleanupInterval */ public long getCleanupInterval() { @@ -243,8 +273,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Set the cleanupInterval - * - * @param cleanupInterval the cleanupInterval to set + * + * @param cleanupInterval + * the cleanupInterval to set */ public void setCleanupInterval(long cleanupInterval) { this.letter.setCleanupInterval(cleanupInterval); @@ -252,7 +283,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the indexWriteBatchSize - * + * * @return the indexWriteBatchSize */ public int getIndexWriteBatchSize() { @@ -262,8 +293,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Set the indexWriteBatchSize * When set using Xbean, values of the form "20 Mb", "1024kb", and "1g" can be used - * - * @param indexWriteBatchSize the indexWriteBatchSize to set + * @org.apache.xbean.Property propertyEditor="org.apache.activemq.util.MemoryPropertyEditor" + * @param indexWriteBatchSize + * the indexWriteBatchSize to set */ public void setIndexWriteBatchSize(int indexWriteBatchSize) { this.letter.setIndexWriteBatchSize(indexWriteBatchSize); @@ -271,7 +303,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the journalMaxWriteBatchSize - * + * * @return the journalMaxWriteBatchSize */ public int getJournalMaxWriteBatchSize() { @@ -280,9 +312,10 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Set the journalMaxWriteBatchSize - * * When set using Xbean, values of the form "20 Mb", "1024kb", and "1g" can be used - * - * @param journalMaxWriteBatchSize the journalMaxWriteBatchSize to set + * * When set using Xbean, values of the form "20 Mb", "1024kb", and "1g" can be used + * @org.apache.xbean.Property propertyEditor="org.apache.activemq.util.MemoryPropertyEditor" + * @param journalMaxWriteBatchSize + * the journalMaxWriteBatchSize to set */ public void setJournalMaxWriteBatchSize(int journalMaxWriteBatchSize) { this.letter.setJournalMaxWriteBatchSize(journalMaxWriteBatchSize); @@ -290,7 +323,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the enableIndexWriteAsync - * + * * @return the enableIndexWriteAsync */ public boolean isEnableIndexWriteAsync() { @@ -299,8 +332,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Set the enableIndexWriteAsync - * - * @param enableIndexWriteAsync the enableIndexWriteAsync to set + * + * @param enableIndexWriteAsync + * the enableIndexWriteAsync to set */ public void setEnableIndexWriteAsync(boolean enableIndexWriteAsync) { this.letter.setEnableIndexWriteAsync(enableIndexWriteAsync); @@ -308,7 +342,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the directory - * + * * @return the directory */ public File getDirectory() { @@ -316,6 +350,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi } /** + * @param dir * @see org.apache.activemq.store.PersistenceAdapter#setDirectory(java.io.File) */ public void setDirectory(File dir) { @@ -324,7 +359,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the enableJournalDiskSyncs - * + * * @return the enableJournalDiskSyncs */ public boolean isEnableJournalDiskSyncs() { @@ -333,8 +368,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Set the enableJournalDiskSyncs - * - * @param enableJournalDiskSyncs the enableJournalDiskSyncs to set + * + * @param enableJournalDiskSyncs + * the enableJournalDiskSyncs to set */ public void setEnableJournalDiskSyncs(boolean enableJournalDiskSyncs) { this.letter.setEnableJournalDiskSyncs(enableJournalDiskSyncs); @@ -342,7 +378,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the indexCacheSize - * + * * @return the indexCacheSize */ public int getIndexCacheSize() { @@ -352,8 +388,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Set the indexCacheSize * When set using Xbean, values of the form "20 Mb", "1024kb", and "1g" can be used - * - * @param indexCacheSize the indexCacheSize to set + * @org.apache.xbean.Property propertyEditor="org.apache.activemq.util.MemoryPropertyEditor" + * @param indexCacheSize + * the indexCacheSize to set */ public void setIndexCacheSize(int indexCacheSize) { this.letter.setIndexCacheSize(indexCacheSize); @@ -361,7 +398,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Get the ignoreMissingJournalfiles - * + * * @return the ignoreMissingJournalfiles */ public boolean isIgnoreMissingJournalfiles() { @@ -370,8 +407,9 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi /** * Set the ignoreMissingJournalfiles - * - * @param ignoreMissingJournalfiles the ignoreMissingJournalfiles to set + * + * @param ignoreMissingJournalfiles + * the ignoreMissingJournalfiles to set */ public void setIgnoreMissingJournalfiles(boolean ignoreMissingJournalfiles) { this.letter.setIgnoreMissingJournalfiles(ignoreMissingJournalfiles); @@ -432,14 +470,14 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi public int getMaxAsyncJobs() { return letter.getMaxAsyncJobs(); } - /** - * @param maxAsyncJobs the maxAsyncJobs to set + * @param maxAsyncJobs + * the maxAsyncJobs to set */ public void setMaxAsyncJobs(int maxAsyncJobs) { letter.setMaxAsyncJobs(maxAsyncJobs); } - + /** * @return the databaseLockedWaitDelay */ @@ -451,7 +489,7 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi * @param databaseLockedWaitDelay the databaseLockedWaitDelay to set */ public void setDatabaseLockedWaitDelay(int databaseLockedWaitDelay) { - letter.setDatabaseLockedWaitDelay(databaseLockedWaitDelay); + letter.setDatabaseLockedWaitDelay(databaseLockedWaitDelay); } public boolean getForceRecoverIndex() { @@ -462,19 +500,33 @@ public class KahaDBPersistenceAdapter implements PersistenceAdapter, BrokerServi letter.setForceRecoverIndex(forceRecoverIndex); } - public boolean isJournalPerDestination() { - return letter.isJournalPerDestination(); - } - - public void setJournalPerDestination(boolean journalPerDestination) { - letter.setJournalPerDestination(journalPerDestination); - } - - // for testing public KahaDBStore getStore() { return letter; } + public KahaTransactionInfo createTransactionInfo(TransactionId txid) { + if (txid == null) { + return null; + } + KahaTransactionInfo rc = new KahaTransactionInfo(); + + if (txid.isLocalTransaction()) { + LocalTransactionId t = (LocalTransactionId) txid; + KahaLocalTransactionId kahaTxId = new KahaLocalTransactionId(); + kahaTxId.setConnectionId(t.getConnectionId().getValue()); + kahaTxId.setTransacitonId(t.getValue()); + rc.setLocalTransacitonId(kahaTxId); + } else { + XATransactionId t = (XATransactionId) txid; + KahaXATransactionId kahaTxId = new KahaXATransactionId(); + kahaTxId.setBranchQualifier(new Buffer(t.getBranchQualifier())); + kahaTxId.setGlobalTransactionId(new Buffer(t.getGlobalTransactionId())); + kahaTxId.setFormatId(t.getFormatId()); + rc.setXaTransacitonId(kahaTxId); + } + return rc; + } + @Override public String toString() { String path = getDirectory() != null ? getDirectory().getAbsolutePath() : "DIRECTORY_NOT_SET"; diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBStore.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBStore.java index f4782ed093..b03dc8e23a 100644 --- a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBStore.java +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBStore.java @@ -26,22 +26,23 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; -import java.util.concurrent.FutureTask; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.Semaphore; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; +import java.util.Map.Entry; +import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.activemq.broker.ConnectionContext; -import org.apache.activemq.command.*; +import org.apache.activemq.command.ActiveMQDestination; +import org.apache.activemq.command.ActiveMQQueue; +import org.apache.activemq.command.ActiveMQTempQueue; +import org.apache.activemq.command.ActiveMQTempTopic; +import org.apache.activemq.command.ActiveMQTopic; +import org.apache.activemq.command.Message; +import org.apache.activemq.command.MessageAck; +import org.apache.activemq.command.MessageId; +import org.apache.activemq.command.ProducerId; +import org.apache.activemq.command.SubscriptionInfo; +import org.apache.activemq.command.TransactionId; import org.apache.activemq.openwire.OpenWireFormat; import org.apache.activemq.protobuf.Buffer; import org.apache.activemq.store.AbstractMessageStore; @@ -52,20 +53,20 @@ import org.apache.activemq.store.TopicMessageStore; import org.apache.activemq.store.TransactionStore; import org.apache.activemq.store.kahadb.data.KahaAddMessageCommand; import org.apache.activemq.store.kahadb.data.KahaDestination; -import org.apache.activemq.store.kahadb.data.KahaDestination.DestinationType; import org.apache.activemq.store.kahadb.data.KahaLocation; import org.apache.activemq.store.kahadb.data.KahaRemoveDestinationCommand; import org.apache.activemq.store.kahadb.data.KahaRemoveMessageCommand; import org.apache.activemq.store.kahadb.data.KahaSubscriptionCommand; +import org.apache.activemq.store.kahadb.data.KahaTransactionInfo; +import org.apache.activemq.store.kahadb.data.KahaDestination.DestinationType; import org.apache.activemq.usage.MemoryUsage; import org.apache.activemq.usage.SystemUsage; import org.apache.activemq.util.ServiceStopper; import org.apache.activemq.wireformat.WireFormat; -import org.apache.kahadb.journal.Journal; -import org.apache.kahadb.journal.Location; -import org.apache.kahadb.page.Transaction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.kahadb.journal.Location; +import org.apache.kahadb.page.Transaction; public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { static final Logger LOG = LoggerFactory.getLogger(KahaDBStore.class); @@ -76,7 +77,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { PROPERTY_CANCELED_TASK_MOD_METRIC, "0"), 10); public static final String PROPERTY_ASYNC_EXECUTOR_MAX_THREADS = "org.apache.activemq.store.kahadb.ASYNC_EXECUTOR_MAX_THREADS"; private static final int asyncExecutorMaxThreads = Integer.parseInt(System.getProperty( - PROPERTY_ASYNC_EXECUTOR_MAX_THREADS, "1"), 10); + PROPERTY_ASYNC_EXECUTOR_MAX_THREADS, "1"), 10);; protected ExecutorService queueExecutor; protected ExecutorService topicExecutor; @@ -95,9 +96,16 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { private boolean concurrentStoreAndDispatchTransactions = false; private int maxAsyncJobs = MAX_ASYNC_JOBS; private final KahaDBTransactionStore transactionStore; + private TransactionIdTransformer transactionIdTransformer; public KahaDBStore() { this.transactionStore = new KahaDBTransactionStore(this); + this.transactionIdTransformer = new TransactionIdTransformer() { + @Override + public KahaTransactionInfo transform(TransactionId txid) { + return TransactionIdConversion.convert(txid); + } + }; } @Override @@ -124,7 +132,8 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { } /** - * @param concurrentStoreAndDispatch the concurrentStoreAndDispatch to set + * @param concurrentStoreAndDispatch + * the concurrentStoreAndDispatch to set */ public void setConcurrentStoreAndDispatchQueues(boolean concurrentStoreAndDispatch) { this.concurrentStoreAndDispatchQueues = concurrentStoreAndDispatch; @@ -138,7 +147,8 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { } /** - * @param concurrentStoreAndDispatch the concurrentStoreAndDispatch to set + * @param concurrentStoreAndDispatch + * the concurrentStoreAndDispatch to set */ public void setConcurrentStoreAndDispatchTopics(boolean concurrentStoreAndDispatch) { this.concurrentStoreAndDispatchTopics = concurrentStoreAndDispatch; @@ -147,16 +157,16 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { public boolean isConcurrentStoreAndDispatchTransactions() { return this.concurrentStoreAndDispatchTransactions; } - + /** * @return the maxAsyncJobs */ public int getMaxAsyncJobs() { return this.maxAsyncJobs; } - /** - * @param maxAsyncJobs the maxAsyncJobs to set + * @param maxAsyncJobs + * the maxAsyncJobs to set */ public void setMaxAsyncJobs(int maxAsyncJobs) { this.maxAsyncJobs = maxAsyncJobs; @@ -171,20 +181,20 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { this.asyncTopicJobQueue = new LinkedBlockingQueue(getMaxAsyncJobs()); this.queueExecutor = new StoreTaskExecutor(1, asyncExecutorMaxThreads, 0L, TimeUnit.MILLISECONDS, asyncQueueJobQueue, new ThreadFactory() { - public Thread newThread(Runnable runnable) { - Thread thread = new Thread(runnable, "ConcurrentQueueStoreAndDispatch"); - thread.setDaemon(true); - return thread; - } - }); + public Thread newThread(Runnable runnable) { + Thread thread = new Thread(runnable, "ConcurrentQueueStoreAndDispatch"); + thread.setDaemon(true); + return thread; + } + }); this.topicExecutor = new StoreTaskExecutor(1, asyncExecutorMaxThreads, 0L, TimeUnit.MILLISECONDS, asyncTopicJobQueue, new ThreadFactory() { - public Thread newThread(Runnable runnable) { - Thread thread = new Thread(runnable, "ConcurrentTopicStoreAndDispatch"); - thread.setDaemon(true); - return thread; - } - }); + public Thread newThread(Runnable runnable) { + Thread thread = new Thread(runnable, "ConcurrentTopicStoreAndDispatch"); + thread.setDaemon(true); + return thread; + } + }); } @Override @@ -281,16 +291,14 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { protected KahaDestination dest; private final int maxAsyncJobs; private final Semaphore localDestinationSemaphore; - private final Journal journal; double doneTasks, canceledTasks = 0; - public KahaDBMessageStore(ActiveMQDestination destination) throws IOException { + public KahaDBMessageStore(ActiveMQDestination destination) { super(destination); this.dest = convert(destination); this.maxAsyncJobs = getMaxAsyncJobs(); this.localDestinationSemaphore = new Semaphore(this.maxAsyncJobs); - this.journal = getJournalManager().getJournal(destination); } @Override @@ -347,30 +355,30 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { KahaAddMessageCommand command = new KahaAddMessageCommand(); command.setDestination(dest); command.setMessageId(message.getMessageId().toString()); - command.setTransactionInfo(createTransactionInfo(message.getTransactionId())); + command.setTransactionInfo(transactionIdTransformer.transform(message.getTransactionId())); command.setPriority(message.getPriority()); command.setPrioritySupported(isPrioritizedMessages()); org.apache.activemq.util.ByteSequence packet = wireFormat.marshal(message); command.setMessage(new Buffer(packet.getData(), packet.getOffset(), packet.getLength())); - store(journal, command, isEnableJournalDiskSyncs() && message.isResponseRequired(), null, null); - + store(command, isEnableJournalDiskSyncs() && message.isResponseRequired(), null, null); + } public void removeMessage(ConnectionContext context, MessageAck ack) throws IOException { KahaRemoveMessageCommand command = new KahaRemoveMessageCommand(); command.setDestination(dest); command.setMessageId(ack.getLastMessageId().toString()); - command.setTransactionInfo(createTransactionInfo(ack.getTransactionId())); + command.setTransactionInfo(transactionIdTransformer.transform(ack.getTransactionId())); org.apache.activemq.util.ByteSequence packet = wireFormat.marshal(ack); command.setAck(new Buffer(packet.getData(), packet.getOffset(), packet.getLength())); - store(journal, command, isEnableJournalDiskSyncs() && ack.isResponseRequired(), null, null); + store(command, isEnableJournalDiskSyncs() && ack.isResponseRequired(), null, null); } public void removeAllMessages(ConnectionContext context) throws IOException { KahaRemoveDestinationCommand command = new KahaRemoveDestinationCommand(); command.setDestination(dest); - store(journal, command, true, null, null); + store(command, true, null, null); } public Message getMessage(MessageId identity) throws IOException { @@ -392,14 +400,14 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { return sd.orderIndex.get(tx, sequence).location; } }); - } finally { + }finally { indexLock.readLock().unlock(); } if (location == null) { return null; } - return loadMessage(journal, location); + return loadMessage(location); } public int getMessageCount() throws IOException { @@ -415,14 +423,14 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { StoredDestination sd = getStoredDestination(dest, tx); int rc = 0; for (Iterator> iterator = sd.locationIndex.iterator(tx); iterator - .hasNext(); ) { + .hasNext();) { iterator.next(); rc++; } return rc; } }); - } finally { + }finally { indexLock.readLock().unlock(); } } finally { @@ -442,7 +450,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { return sd.locationIndex.isEmpty(tx); } }); - } finally { + }finally { indexLock.readLock().unlock(); } } @@ -461,17 +469,17 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { if (ackedAndPrepared.contains(entry.getValue().messageId)) { continue; } - Message msg = loadMessage(journal, entry.getValue().location); + Message msg = loadMessage(entry.getValue().location); listener.recoverMessage(msg); } } }); - } finally { + }finally { indexLock.writeLock().unlock(); } } - + public void recoverNextMessages(final int maxReturned, final MessageRecoveryListener listener) throws Exception { indexLock.readLock().lock(); try { @@ -486,7 +494,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { if (ackedAndPrepared.contains(entry.getValue().messageId)) { continue; } - Message msg = loadMessage(journal, entry.getValue().location); + Message msg = loadMessage(entry.getValue().location); listener.recoverMessage(msg); counter++; if (counter >= maxReturned) { @@ -496,23 +504,24 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { sd.orderIndex.stoppedIterating(); } }); - } finally { + }finally { indexLock.readLock().unlock(); } } public void resetBatching() { - try { - pageFile.tx().execute(new Transaction.Closure() { - public void execute(Transaction tx) throws Exception { - StoredDestination sd = getExistingStoredDestination(dest, tx); - if (sd != null) { - sd.orderIndex.resetCursorPosition(); - } - } - }); - } catch (Exception e) { - LOG.error("Failed to reset batching", e); + if (pageFile.isLoaded()) { + try { + pageFile.tx().execute(new Transaction.Closure() { + public void execute(Transaction tx) throws Exception { + StoredDestination sd = getExistingStoredDestination(dest, tx); + if (sd != null) { + sd.orderIndex.resetCursorPosition();} + } + }); + } catch (Exception e) { + LOG.error("Failed to reset batching",e); + } } } @@ -525,10 +534,10 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { // Hopefully one day the page file supports concurrent read // operations... but for now we must // externally synchronize... - + indexLock.writeLock().lock(); try { - pageFile.tx().execute(new Transaction.Closure() { + pageFile.tx().execute(new Transaction.Closure() { public void execute(Transaction tx) throws IOException { StoredDestination sd = getStoredDestination(dest, tx); Long location = sd.messageIdIndex.get(tx, key); @@ -537,10 +546,10 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { } } }); - } finally { + }finally { indexLock.writeLock().unlock(); } - + } finally { unlockAsyncJobQueue(); } @@ -550,21 +559,15 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { @Override public void setMemoryUsage(MemoryUsage memoeyUSage) { } - @Override public void start() throws Exception { super.start(); } - @Override public void stop() throws Exception { super.stop(); } - public Journal getJournal() { - return this.journal; - } - protected void lockAsyncJobQueue() { try { this.localDestinationSemaphore.tryAcquire(this.maxAsyncJobs, 60, TimeUnit.SECONDS); @@ -593,7 +596,6 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { class KahaDBTopicMessageStore extends KahaDBMessageStore implements TopicMessageStore { private final AtomicInteger subscriptionCount = new AtomicInteger(); - public KahaDBTopicMessageStore(ActiveMQTopic destination) throws IOException { super(destination); this.subscriptionCount.set(getAllSubscriptions().length); @@ -646,11 +648,11 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { command.setDestination(dest); command.setSubscriptionKey(subscriptionKey); command.setMessageId(messageId.toString()); - command.setTransactionInfo(createTransactionInfo(ack.getTransactionId())); + command.setTransactionInfo(transactionIdTransformer.transform(ack.getTransactionId())); if (ack != null && ack.isUnmatchedAck()) { command.setAck(UNMATCHED); } - store(getJournal(), command, false, null, null); + store(command, false, null, null); } public void addSubsciption(SubscriptionInfo subscriptionInfo, boolean retroactive) throws IOException { @@ -662,7 +664,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { command.setRetroactive(retroactive); org.apache.activemq.util.ByteSequence packet = wireFormat.marshal(subscriptionInfo); command.setSubscriptionInfo(new Buffer(packet.getData(), packet.getOffset(), packet.getLength())); - store(getJournal(), command, isEnableJournalDiskSyncs() && true, null, null); + store(command, isEnableJournalDiskSyncs() && true, null, null); this.subscriptionCount.incrementAndGet(); } @@ -670,7 +672,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { KahaSubscriptionCommand command = new KahaSubscriptionCommand(); command.setDestination(dest); command.setSubscriptionKey(subscriptionKey(clientId, subscriptionName)); - store(getJournal(), command, isEnableJournalDiskSyncs() && true, null, null); + store(command, isEnableJournalDiskSyncs() && true, null, null); this.subscriptionCount.decrementAndGet(); } @@ -683,7 +685,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { public void execute(Transaction tx) throws IOException { StoredDestination sd = getStoredDestination(dest, tx); for (Iterator> iterator = sd.subscriptions.iterator(tx); iterator - .hasNext(); ) { + .hasNext();) { Entry entry = iterator.next(); SubscriptionInfo info = (SubscriptionInfo) wireFormat.unmarshal(new DataInputStream(entry .getValue().getSubscriptionInfo().newInput())); @@ -692,7 +694,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { } } }); - } finally { + }finally { indexLock.readLock().unlock(); } @@ -716,7 +718,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { .getSubscriptionInfo().newInput())); } }); - } finally { + }finally { indexLock.readLock().unlock(); } } @@ -736,7 +738,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { int counter = 0; for (Iterator>> iterator = - sd.ackPositions.iterator(tx, cursorPos.lastAckedSequence); iterator.hasNext(); ) { + sd.ackPositions.iterator(tx, cursorPos.lastAckedSequence); iterator.hasNext();) { Entry> entry = iterator.next(); if (entry.getValue().contains(subscriptionKey)) { counter++; @@ -745,7 +747,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { return counter; } }); - } finally { + }finally { indexLock.writeLock().unlock(); } } @@ -762,20 +764,20 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { LastAck cursorPos = sd.subscriptionAcks.get(tx, subscriptionKey); sd.orderIndex.setBatch(tx, cursorPos); for (Iterator> iterator = sd.orderIndex.iterator(tx); iterator - .hasNext(); ) { + .hasNext();) { Entry entry = iterator.next(); - listener.recoverMessage(loadMessage(getJournal(), entry.getValue().location)); + listener.recoverMessage(loadMessage(entry.getValue().location)); } sd.orderIndex.resetCursorPosition(); } }); - } finally { + }finally { indexLock.writeLock().unlock(); } } public void recoverNextMessages(String clientId, String subscriptionName, final int maxReturned, - final MessageRecoveryListener listener) throws Exception { + final MessageRecoveryListener listener) throws Exception { final String subscriptionKey = subscriptionKey(clientId, subscriptionName); final SubscriptionInfo info = lookupSubscription(clientId, subscriptionName); indexLock.writeLock().lock(); @@ -800,9 +802,9 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { Entry entry = null; int counter = 0; for (Iterator> iterator = sd.orderIndex.iterator(tx, moc); iterator - .hasNext(); ) { + .hasNext();) { entry = iterator.next(); - if (listener.recoverMessage(loadMessage(getJournal(), entry.getValue().location))) { + if (listener.recoverMessage(loadMessage(entry.getValue().location))) { counter++; } if (counter >= maxReturned || listener.hasSpace() == false) { @@ -816,7 +818,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { } } }); - } finally { + }finally { indexLock.writeLock().unlock(); } } @@ -832,7 +834,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { sd.subscriptionCursors.remove(subscriptionKey); } }); - } finally { + }finally { indexLock.writeLock().unlock(); } } catch (IOException e) { @@ -856,8 +858,9 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { /** * Cleanup method to remove any state associated with the given destination. * This method does not stop the message store (it might not be cached). - * - * @param destination Destination to forget + * + * @param destination + * Destination to forget */ public void removeQueueMessageStore(ActiveMQQueue destination) { } @@ -865,8 +868,9 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { /** * Cleanup method to remove any state associated with the given destination * This method does not stop the message store (it might not be cached). - * - * @param destination Destination to forget + * + * @param destination + * Destination to forget */ public void removeTopicMessageStore(ActiveMQTopic destination) { } @@ -883,7 +887,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { pageFile.tx().execute(new Transaction.Closure() { public void execute(Transaction tx) throws IOException { for (Iterator> iterator = metadata.destinations.iterator(tx); iterator - .hasNext(); ) { + .hasNext();) { Entry entry = iterator.next(); if (!isEmptyTopic(entry, tx)) { rc.add(convert(entry.getKey())); @@ -904,7 +908,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { return isEmptyTopic; } }); - } finally { + }finally { indexLock.readLock().unlock(); } return rc; @@ -916,7 +920,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { public long getLastMessageBrokerSequenceId() throws IOException { return 0; } - + public long getLastProducerSequenceId(ProducerId id) { indexLock.readLock().lock(); try { @@ -933,11 +937,9 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { public void beginTransaction(ConnectionContext context) throws IOException { throw new IOException("Not yet implemented."); } - public void commitTransaction(ConnectionContext context) throws IOException { throw new IOException("Not yet implemented."); } - public void rollbackTransaction(ConnectionContext context) throws IOException { throw new IOException("Not yet implemented."); } @@ -955,8 +957,8 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { * @return * @throws IOException */ - Message loadMessage(Journal journal, Location location) throws IOException { - KahaAddMessageCommand addMessage = (KahaAddMessageCommand) load(journal, location); + Message loadMessage(Location location) throws IOException { + KahaAddMessageCommand addMessage = (KahaAddMessageCommand) load(location); Message msg = (Message) wireFormat.unmarshal(new DataInputStream(addMessage.getMessage().newInput())); return msg; } @@ -976,20 +978,20 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { KahaDestination rc = new KahaDestination(); rc.setName(dest.getPhysicalName()); switch (dest.getDestinationType()) { - case ActiveMQDestination.QUEUE_TYPE: - rc.setType(DestinationType.QUEUE); - return rc; - case ActiveMQDestination.TOPIC_TYPE: - rc.setType(DestinationType.TOPIC); - return rc; - case ActiveMQDestination.TEMP_QUEUE_TYPE: - rc.setType(DestinationType.TEMP_QUEUE); - return rc; - case ActiveMQDestination.TEMP_TOPIC_TYPE: - rc.setType(DestinationType.TEMP_TOPIC); - return rc; - default: - return null; + case ActiveMQDestination.QUEUE_TYPE: + rc.setType(DestinationType.QUEUE); + return rc; + case ActiveMQDestination.TOPIC_TYPE: + rc.setType(DestinationType.TOPIC); + return rc; + case ActiveMQDestination.TEMP_QUEUE_TYPE: + rc.setType(DestinationType.TEMP_QUEUE); + return rc; + case ActiveMQDestination.TEMP_TOPIC_TYPE: + rc.setType(DestinationType.TEMP_TOPIC); + return rc; + default: + return null; } } @@ -1002,19 +1004,27 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { String name = dest.substring(p + 1); switch (KahaDestination.DestinationType.valueOf(type)) { - case QUEUE: - return new ActiveMQQueue(name); - case TOPIC: - return new ActiveMQTopic(name); - case TEMP_QUEUE: - return new ActiveMQTempQueue(name); - case TEMP_TOPIC: - return new ActiveMQTempTopic(name); - default: - throw new IllegalArgumentException("Not in the valid destination format"); + case QUEUE: + return new ActiveMQQueue(name); + case TOPIC: + return new ActiveMQTopic(name); + case TEMP_QUEUE: + return new ActiveMQTempQueue(name); + case TEMP_TOPIC: + return new ActiveMQTempTopic(name); + default: + throw new IllegalArgumentException("Not in the valid destination format"); } } + public TransactionIdTransformer getTransactionIdTransformer() { + return transactionIdTransformer; + } + + public void setTransactionIdTransformer(TransactionIdTransformer transactionIdTransformer) { + this.transactionIdTransformer = transactionIdTransformer; + } + static class AsyncJobKey { MessageId id; ActiveMQDestination destination; @@ -1141,9 +1151,8 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { private final int subscriptionCount; private final List subscriptionKeys = new ArrayList(1); private final KahaDBTopicMessageStore topicStore; - public StoreTopicTask(KahaDBTopicMessageStore store, ConnectionContext context, Message message, - int subscriptionCount) { + int subscriptionCount) { super(store, context, message); this.topicStore = store; this.subscriptionCount = subscriptionCount; @@ -1175,7 +1184,8 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { /** * add a key - * + * + * @param key * @return true if all acknowledgements received */ public boolean addSubscriptionKey(String key) { @@ -1221,7 +1231,7 @@ public class KahaDBStore extends MessageDatabase implements PersistenceAdapter { super.afterExecute(runnable, throwable); if (runnable instanceof StoreTask) { - ((StoreTask) runnable).releaseLocks(); + ((StoreTask)runnable).releaseLocks(); } } diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBTransactionStore.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBTransactionStore.java index 57b53db2c1..6b220d797f 100755 --- a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBTransactionStore.java +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/KahaDBTransactionStore.java @@ -19,7 +19,6 @@ package org.apache.activemq.store.kahadb; import java.io.DataInputStream; import java.io.IOException; import java.util.ArrayList; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -27,9 +26,7 @@ import java.util.concurrent.CancellationException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; - import org.apache.activemq.broker.ConnectionContext; -import org.apache.activemq.command.ActiveMQDestination; import org.apache.activemq.command.Message; import org.apache.activemq.command.MessageAck; import org.apache.activemq.command.MessageId; @@ -52,13 +49,14 @@ import org.apache.activemq.store.kahadb.data.KahaPrepareCommand; import org.apache.activemq.store.kahadb.data.KahaRollbackCommand; import org.apache.activemq.store.kahadb.data.KahaTransactionInfo; import org.apache.activemq.wireformat.WireFormat; -import org.apache.kahadb.journal.Journal; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Provides a TransactionStore implementation that can create transaction aware * MessageStore objects from non transaction aware MessageStore objects. + * + * */ public class KahaDBTransactionStore implements TransactionStore { static final Logger LOG = LoggerFactory.getLogger(KahaDBTransactionStore.class); @@ -72,23 +70,21 @@ public class KahaDBTransactionStore implements TransactionStore { public class Tx { private final ArrayList messages = new ArrayList(); + private final ArrayList acks = new ArrayList(); - private final HashSet destinations = new HashSet(); public void add(AddMessageCommand msg) { messages.add(msg); - destinations.add(msg.getMessage().getDestination()); } public void add(RemoveMessageCommand ack) { acks.add(ack); - destinations.add(ack.getMessageAck().getDestination()); } public Message[] getMessages() { Message rc[] = new Message[messages.size()]; int count = 0; - for (Iterator iter = messages.iterator(); iter.hasNext(); ) { + for (Iterator iter = messages.iterator(); iter.hasNext();) { AddMessageCommand cmd = iter.next(); rc[count++] = cmd.getMessage(); } @@ -98,7 +94,7 @@ public class KahaDBTransactionStore implements TransactionStore { public MessageAck[] getAcks() { MessageAck rc[] = new MessageAck[acks.size()]; int count = 0; - for (Iterator iter = acks.iterator(); iter.hasNext(); ) { + for (Iterator iter = acks.iterator(); iter.hasNext();) { RemoveMessageCommand cmd = iter.next(); rc[count++] = cmd.getMessageAck(); } @@ -107,56 +103,49 @@ public class KahaDBTransactionStore implements TransactionStore { /** * @return true if something to commit + * @throws IOException */ public List> commit() throws IOException { List> results = new ArrayList>(); // Do all the message adds. - for (Iterator iter = messages.iterator(); iter.hasNext(); ) { + for (Iterator iter = messages.iterator(); iter.hasNext();) { AddMessageCommand cmd = iter.next(); results.add(cmd.run()); } // And removes.. - for (Iterator iter = acks.iterator(); iter.hasNext(); ) { + for (Iterator iter = acks.iterator(); iter.hasNext();) { RemoveMessageCommand cmd = iter.next(); cmd.run(); results.add(cmd.run()); } - + return results; } } public abstract class AddMessageCommand { private final ConnectionContext ctx; - AddMessageCommand(ConnectionContext ctx) { this.ctx = ctx; } - abstract Message getMessage(); - Future run() throws IOException { return run(this.ctx); } - abstract Future run(ConnectionContext ctx) throws IOException; } public abstract class RemoveMessageCommand { private final ConnectionContext ctx; - RemoveMessageCommand(ConnectionContext ctx) { this.ctx = ctx; } - abstract MessageAck getMessageAck(); - Future run() throws IOException { return run(this.ctx); } - abstract Future run(ConnectionContext context) throws IOException; } @@ -208,8 +197,8 @@ public class KahaDBTransactionStore implements TransactionStore { @Override public void acknowledge(ConnectionContext context, String clientId, String subscriptionName, - MessageId messageId, MessageAck ack) throws IOException { - KahaDBTransactionStore.this.acknowledge(context, (TopicMessageStore) getDelegate(), clientId, + MessageId messageId, MessageAck ack) throws IOException { + KahaDBTransactionStore.this.acknowledge(context, (TopicMessageStore)getDelegate(), clientId, subscriptionName, messageId, ack); } @@ -217,20 +206,17 @@ public class KahaDBTransactionStore implements TransactionStore { } /** + * @throws IOException * @see org.apache.activemq.store.TransactionStore#prepare(TransactionId) */ public void prepare(TransactionId txid) throws IOException { KahaTransactionInfo info = getTransactionInfo(txid); if (txid.isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions() == false) { - for (Journal journal : theStore.getJournalManager().getJournals()) { - theStore.store(journal, new KahaPrepareCommand().setTransactionInfo(info), true, null, null); - } + theStore.store(new KahaPrepareCommand().setTransactionInfo(info), true, null, null); } else { Tx tx = inflightTransactions.remove(txid); if (tx != null) { - for (Journal journal : theStore.getJournalManager().getJournals(tx.destinations)) { - theStore.store(journal, new KahaPrepareCommand().setTransactionInfo(info), true, null, null); - } + theStore.store(new KahaPrepareCommand().setTransactionInfo(info), true, null, null); } } } @@ -262,7 +248,7 @@ public class KahaDBTransactionStore implements TransactionStore { theStore.brokerService.handleIOException(new IOException(e.getMessage())); } catch (ExecutionException e) { theStore.brokerService.handleIOException(new IOException(e.getMessage())); - } catch (CancellationException e) { + }catch(CancellationException e) { } if (!result.isCancelled()) { doneSomething = true; @@ -273,11 +259,9 @@ public class KahaDBTransactionStore implements TransactionStore { } if (doneSomething) { KahaTransactionInfo info = getTransactionInfo(txid); - for (Journal journal : theStore.getJournalManager().getJournals(tx.destinations)) { - theStore.store(journal, new KahaCommitCommand().setTransactionInfo(info), true, null, null); - } + theStore.store(new KahaCommitCommand().setTransactionInfo(info), true, null, null); } - } else { + }else { //The Tx will be null for failed over clients - lets run their post commits if (postCommit != null) { postCommit.run(); @@ -286,25 +270,22 @@ public class KahaDBTransactionStore implements TransactionStore { } else { KahaTransactionInfo info = getTransactionInfo(txid); - for (Journal journal : theStore.getJournalManager().getJournals()) { - theStore.store(journal, new KahaCommitCommand().setTransactionInfo(info), true, preCommit, postCommit); - } - forgetRecoveredAcks(txid); + theStore.store(new KahaCommitCommand().setTransactionInfo(info), true, preCommit, postCommit); + forgetRecoveredAcks(txid); } - } else { - LOG.error("Null transaction passed on commit"); + }else { + LOG.error("Null transaction passed on commit"); } } /** + * @throws IOException * @see org.apache.activemq.store.TransactionStore#rollback(TransactionId) */ public void rollback(TransactionId txid) throws IOException { if (txid.isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions() == false) { KahaTransactionInfo info = getTransactionInfo(txid); - for (Journal journal : theStore.getJournalManager().getJournals()) { - theStore.store(journal, new KahaRollbackCommand().setTransactionInfo(info), false, null, null); - } + theStore.store(new KahaRollbackCommand().setTransactionInfo(info), false, null, null); forgetRecoveredAcks(txid); } else { inflightTransactions.remove(txid); @@ -371,7 +352,6 @@ public class KahaDBTransactionStore implements TransactionStore { public Message getMessage() { return message; } - @Override public Future run(ConnectionContext ctx) throws IOException { destination.addMessage(ctx, message); @@ -399,7 +379,6 @@ public class KahaDBTransactionStore implements TransactionStore { public Message getMessage() { return message; } - @Override public Future run(ConnectionContext ctx) throws IOException { return destination.asyncAddQueueMessage(ctx, message); @@ -417,7 +396,7 @@ public class KahaDBTransactionStore implements TransactionStore { throws IOException { if (message.getTransactionId() != null) { - if (message.getTransactionId().isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions() == false) { + if (message.getTransactionId().isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions()==false) { destination.addMessage(context, message); return AbstractMessageStore.FUTURE; } else { @@ -427,7 +406,6 @@ public class KahaDBTransactionStore implements TransactionStore { public Message getMessage() { return message; } - @Override public Future run(ConnectionContext ctx) throws IOException { return destination.asyncAddTopicMessage(ctx, message); @@ -449,7 +427,7 @@ public class KahaDBTransactionStore implements TransactionStore { throws IOException { if (ack.isInTransaction()) { - if (ack.getTransactionId().isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions() == false) { + if (ack.getTransactionId().isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions()== false) { destination.removeMessage(context, ack); } else { Tx tx = getTx(ack.getTransactionId()); @@ -475,7 +453,7 @@ public class KahaDBTransactionStore implements TransactionStore { throws IOException { if (ack.isInTransaction()) { - if (ack.getTransactionId().isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions() == false) { + if (ack.getTransactionId().isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions()==false) { destination.removeAsyncMessage(context, ack); } else { Tx tx = getTx(ack.getTransactionId()); @@ -501,7 +479,7 @@ public class KahaDBTransactionStore implements TransactionStore { final MessageId messageId, final MessageAck ack) throws IOException { if (ack.isInTransaction()) { - if (ack.getTransactionId().isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions() == false) { + if (ack.getTransactionId().isXATransaction() || theStore.isConcurrentStoreAndDispatchTransactions()== false) { destination.acknowledge(context, clientId, subscriptionName, messageId, ack); } else { Tx tx = getTx(ack.getTransactionId()); @@ -523,7 +501,6 @@ public class KahaDBTransactionStore implements TransactionStore { private KahaTransactionInfo getTransactionInfo(TransactionId txid) { - return theStore.createTransactionInfo(txid); + return theStore.getTransactionIdTransformer().transform(txid); } - } diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MessageDatabase.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MessageDatabase.java index 990856feb4..7a7705e611 100644 --- a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MessageDatabase.java +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MessageDatabase.java @@ -18,7 +18,15 @@ package org.apache.activemq.store.kahadb; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.*; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.EOFException; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.OutputStream; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicBoolean; @@ -28,19 +36,29 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.activemq.ActiveMQMessageAuditNoSync; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.broker.BrokerServiceAware; -import org.apache.activemq.command.ActiveMQDestination; -import org.apache.activemq.command.ConnectionId; -import org.apache.activemq.command.LocalTransactionId; import org.apache.activemq.command.MessageAck; import org.apache.activemq.command.SubscriptionInfo; import org.apache.activemq.command.TransactionId; -import org.apache.activemq.command.XATransactionId; import org.apache.activemq.protobuf.Buffer; -import org.apache.activemq.store.kahadb.data.*; +import org.apache.activemq.store.kahadb.data.KahaAddMessageCommand; +import org.apache.activemq.store.kahadb.data.KahaCommitCommand; +import org.apache.activemq.store.kahadb.data.KahaDestination; +import org.apache.activemq.store.kahadb.data.KahaEntryType; +import org.apache.activemq.store.kahadb.data.KahaPrepareCommand; +import org.apache.activemq.store.kahadb.data.KahaProducerAuditCommand; +import org.apache.activemq.store.kahadb.data.KahaRemoveDestinationCommand; +import org.apache.activemq.store.kahadb.data.KahaRemoveMessageCommand; +import org.apache.activemq.store.kahadb.data.KahaRollbackCommand; +import org.apache.activemq.store.kahadb.data.KahaSubscriptionCommand; +import org.apache.activemq.store.kahadb.data.KahaTraceCommand; +import org.apache.activemq.store.kahadb.data.KahaTransactionInfo; import org.apache.activemq.util.Callback; import org.apache.activemq.util.IOHelper; import org.apache.activemq.util.ServiceStopper; import org.apache.activemq.util.ServiceSupport; +import org.apache.kahadb.util.LocationMarshaller; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.kahadb.index.BTreeIndex; import org.apache.kahadb.index.BTreeVisitor; import org.apache.kahadb.journal.DataFile; @@ -49,9 +67,16 @@ import org.apache.kahadb.journal.Location; import org.apache.kahadb.page.Page; import org.apache.kahadb.page.PageFile; import org.apache.kahadb.page.Transaction; -import org.apache.kahadb.util.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.kahadb.util.ByteSequence; +import org.apache.kahadb.util.DataByteArrayInputStream; +import org.apache.kahadb.util.DataByteArrayOutputStream; +import org.apache.kahadb.util.LockFile; +import org.apache.kahadb.util.LongMarshaller; +import org.apache.kahadb.util.Marshaller; +import org.apache.kahadb.util.Sequence; +import org.apache.kahadb.util.SequenceSet; +import org.apache.kahadb.util.StringMarshaller; +import org.apache.kahadb.util.VariableMarshaller; public class MessageDatabase extends ServiceSupport implements BrokerServiceAware { @@ -61,11 +86,9 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar public static final int LOG_SLOW_ACCESS_TIME = Integer.parseInt(System.getProperty(PROPERTY_LOG_SLOW_ACCESS_TIME, "0")); protected static final Buffer UNMATCHED; - static { UNMATCHED = new Buffer(new byte[]{}); } - private static final Logger LOG = LoggerFactory.getLogger(MessageDatabase.class); private static final int DEFAULT_DATABASE_LOCKED_WAIT_DELAY = 10 * 1000; @@ -85,7 +108,6 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar protected Location producerSequenceIdTrackerLocation = null; protected transient ActiveMQMessageAuditNoSync producerSequenceIdTracker = new ActiveMQMessageAuditNoSync(); protected int version = VERSION; - public void read(DataInput is) throws IOException { state = is.readInt(); destinations = new BTreeIndex(pageFile, is.readLong()); @@ -108,9 +130,9 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } catch (EOFException expectedOnUpgrade) { } try { - version = is.readInt(); - } catch (EOFException expectedOnUpgrade) { - version = 1; + version = is.readInt(); + }catch (EOFException expectedOnUpgrade) { + version=1; } LOG.info("KahaDB is version " + version); } @@ -156,7 +178,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } protected PageFile pageFile; - protected JournalManager journalManager; + protected Journal journal; protected Metadata metadata = new Metadata(); protected MetadataMarshaller metadataMarshaller = new MetadataMarshaller(); @@ -166,12 +188,12 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar protected boolean deleteAllMessages; protected File directory = new File("KahaDB"); protected Thread checkpointThread; - protected boolean enableJournalDiskSyncs = true; + protected boolean enableJournalDiskSyncs=true; protected boolean archiveDataLogs; protected File directoryArchive; protected AtomicLong storeSize = new AtomicLong(0); - long checkpointInterval = 5 * 1000; - long cleanupInterval = 30 * 1000; + long checkpointInterval = 5*1000; + long cleanupInterval = 30*1000; int journalMaxFileLength = Journal.DEFAULT_MAX_FILE_LENGTH; int journalMaxWriteBatchSize = Journal.DEFAULT_MAX_WRITE_BATCH_SIZE; boolean enableIndexWriteAsync = false; @@ -187,8 +209,6 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar private int databaseLockedWaitDelay = DEFAULT_DATABASE_LOCKED_WAIT_DELAY; protected boolean forceRecoverIndex = false; private final Object checkpointThreadLock = new Object(); - private boolean journalPerDestination = false; - public MessageDatabase() { } @@ -235,15 +255,15 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar storedDestinations.clear(); pageFile.tx().execute(new Transaction.Closure() { public void execute(Transaction tx) throws IOException { - for (Iterator> iterator = metadata.destinations.iterator(tx); iterator.hasNext(); ) { + for (Iterator> iterator = metadata.destinations.iterator(tx); iterator.hasNext();) { Entry entry = iterator.next(); - StoredDestination sd = loadStoredDestination(tx, entry.getKey(), entry.getValue().subscriptions != null); + StoredDestination sd = loadStoredDestination(tx, entry.getKey(), entry.getValue().subscriptions!=null); storedDestinations.put(entry.getKey(), sd); } } }); pageFile.flush(); - } finally { + }finally { this.indexLock.writeLock().unlock(); } } @@ -270,11 +290,11 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar while (opened.get()) { Thread.sleep(sleepTime); long now = System.currentTimeMillis(); - if (now - lastCleanup >= cleanupInterval) { + if( now - lastCleanup >= cleanupInterval ) { checkpointCleanup(true); lastCleanup = now; lastCheckpoint = now; - } else if (now - lastCheckpoint >= checkpointInterval) { + } else if( now - lastCheckpoint >= checkpointInterval ) { checkpointCleanup(false); lastCheckpoint = now; } @@ -295,8 +315,8 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } public void open() throws IOException { - if (opened.compareAndSet(false, true)) { - getJournalManager().start(); + if( opened.compareAndSet(false, true) ) { + getJournal().start(); loadPageFile(); startCheckpoint(); recover(); @@ -348,20 +368,18 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar try { lock(); if (deleteAllMessages) { - getJournalManager().start(); - getJournalManager().delete(); - getJournalManager().close(); - journalManager = null; + getJournal().start(); + getJournal().delete(); + getJournal().close(); + journal = null; getPageFile().delete(); LOG.info("Persistence store purged."); deleteAllMessages = false; } open(); - for (Journal journal : getJournalManager().getJournals()) { - store(journal, new KahaTraceCommand().setMessage("LOADED " + new Date())); - } - } finally { + store(new KahaTraceCommand().setMessage("LOADED " + new Date())); + }finally { this.indexLock.writeLock().unlock(); } @@ -369,34 +387,32 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar public void close() throws IOException, InterruptedException { - if (opened.compareAndSet(true, false)) { + if( opened.compareAndSet(true, false)) { this.indexLock.writeLock().lock(); try { pageFile.tx().execute(new Transaction.Closure() { public void execute(Transaction tx) throws IOException { - for (Journal journal : getJournalManager().getJournals()) { - checkpointUpdate(tx, journal, true); - } + checkpointUpdate(tx, true); } }); pageFile.unload(); metadata = new Metadata(); - } finally { + }finally { this.indexLock.writeLock().unlock(); } - journalManager.close(); + journal.close(); synchronized (checkpointThreadLock) { checkpointThread.join(); } lockFile.unlock(); - lockFile = null; + lockFile=null; } } public void unload() throws IOException, InterruptedException { this.indexLock.writeLock().lock(); try { - if (pageFile != null && pageFile.isLoaded()) { + if( pageFile != null && pageFile.isLoaded() ) { metadata.state = CLOSED_STATE; metadata.firstInProgressTransactionLocation = getFirstInProgressTxLocation(); @@ -406,7 +422,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } }); } - } finally { + }finally { this.indexLock.writeLock().unlock(); } close(); @@ -417,12 +433,22 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar Location l = null; synchronized (inflightTransactions) { if (!inflightTransactions.isEmpty()) { - l = inflightTransactions.values().iterator().next().get(0).getLocation(); + for (List ops : inflightTransactions.values()) { + if (!ops.isEmpty()) { + l = ops.get(0).getLocation(); + break; + } + } } if (!preparedTransactions.isEmpty()) { - Location t = preparedTransactions.values().iterator().next().get(0).getLocation(); - if (l == null || t.compareTo(l) <= 0) { - l = t; + for (List ops : preparedTransactions.values()) { + if (!ops.isEmpty()) { + Location t = ops.get(0).getLocation(); + if (l==null || t.compareTo(l) <= 0) { + l = t; + } + break; + } } } } @@ -432,65 +458,67 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar /** * Move all the messages that were in the journal into long term storage. We * just replay and do a checkpoint. + * + * @throws IOException + * @throws IOException + * @throws IllegalStateException */ private void recover() throws IllegalStateException, IOException { this.indexLock.writeLock().lock(); try { - for (Journal journal : getJournalManager().getJournals()) { - recover(journal); + + long start = System.currentTimeMillis(); + Location producerAuditPosition = recoverProducerAudit(); + Location lastIndoubtPosition = getRecoveryPosition(); + + Location recoveryPosition = minimum(producerAuditPosition, lastIndoubtPosition); + + if (recoveryPosition != null) { + int redoCounter = 0; + LOG.info("Recovering from the journal ..."); + while (recoveryPosition != null) { + JournalCommand message = load(recoveryPosition); + metadata.lastUpdate = recoveryPosition; + process(message, recoveryPosition, lastIndoubtPosition); + redoCounter++; + recoveryPosition = journal.getNextLocation(recoveryPosition); + } + long end = System.currentTimeMillis(); + LOG.info("Recovery replayed " + redoCounter + " operations from the journal in " + ((end - start) / 1000.0f) + " seconds."); } - } finally { + + // We may have to undo some index updates. + pageFile.tx().execute(new Transaction.Closure() { + public void execute(Transaction tx) throws IOException { + recoverIndex(tx); + } + }); + + // rollback any recovered inflight local transactions + Set toRollback = new HashSet(); + synchronized (inflightTransactions) { + for (Iterator it = inflightTransactions.keySet().iterator(); it.hasNext(); ) { + TransactionId id = it.next(); + if (id.isLocalTransaction()) { + toRollback.add(id); + } + } + for (TransactionId tx: toRollback) { + LOG.debug("rolling back recovered indoubt local transaction " + tx); + store(new KahaRollbackCommand().setTransactionInfo(TransactionIdConversion.convertToLocal(tx)), false, null, null); + } + } + }finally { this.indexLock.writeLock().unlock(); } } - private void recover(final Journal journal) throws IllegalStateException, IOException { - - long start = System.currentTimeMillis(); - Location producerAuditPosition = recoverProducerAudit(journal); - Location lastIndoubtPosition = getRecoveryPosition(journal); - - Location recoveryPosition = minimum(producerAuditPosition, lastIndoubtPosition); - - if (recoveryPosition != null) { - int redoCounter = 0; - LOG.info("Recovering from the journal ..."); - while (recoveryPosition != null) { - JournalCommand message = load(journal, recoveryPosition); - metadata.lastUpdate = recoveryPosition; - process(message, recoveryPosition, lastIndoubtPosition); - redoCounter++; - recoveryPosition = journal.getNextLocation(recoveryPosition); - } - long end = System.currentTimeMillis(); - LOG.info("Recovery replayed " + redoCounter + " operations from the journal in " + ((end - start) / 1000.0f) + " seconds."); - } - - // We may have to undo some index updates. - pageFile.tx().execute(new Transaction.Closure() { - public void execute(Transaction tx) throws IOException { - recoverIndex(tx, journal); - } - }); - - // rollback any recovered inflight local transactions - Set toRollback = new HashSet(); - synchronized (inflightTransactions) { - for (Iterator it = inflightTransactions.keySet().iterator(); it.hasNext(); ) { - TransactionId id = it.next(); - if (id.isLocalTransaction()) { - toRollback.add(id); - } - } - for (TransactionId tx : toRollback) { - LOG.debug("rolling back recovered indoubt local transaction " + tx); - store(journal, new KahaRollbackCommand().setTransactionInfo(createTransactionInfo(tx)), false, null, null); - } - } + private KahaTransactionInfo createLocalTransactionInfo(TransactionId tx) { + return TransactionIdConversion.convertToLocal(tx); } private Location minimum(Location producerAuditPosition, - Location lastIndoubtPosition) { + Location lastIndoubtPosition) { Location min = null; if (producerAuditPosition != null) { min = producerAuditPosition; @@ -503,9 +531,9 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar return min; } - private Location recoverProducerAudit(Journal journal) throws IOException { + private Location recoverProducerAudit() throws IOException { if (metadata.producerSequenceIdTrackerLocation != null) { - KahaProducerAuditCommand audit = (KahaProducerAuditCommand) load(journal, metadata.producerSequenceIdTrackerLocation); + KahaProducerAuditCommand audit = (KahaProducerAuditCommand) load(metadata.producerSequenceIdTrackerLocation); try { ObjectInputStream objectIn = new ObjectInputStream(audit.getAudit().newInput()); metadata.producerSequenceIdTracker = (ActiveMQMessageAuditNoSync) objectIn.readObject(); @@ -520,12 +548,12 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } } - protected void recoverIndex(Transaction tx, Journal journal) throws IOException { + protected void recoverIndex(Transaction tx) throws IOException { long start = System.currentTimeMillis(); // It is possible index updates got applied before the journal updates.. // in that case we need to removed references to messages that are not in the journal final Location lastAppendLocation = journal.getLastAppendLocation(); - long undoCounter = 0; + long undoCounter=0; // Go through all the destinations to see if they have messages past the lastAppendLocation for (StoredDestination sd : storedDestinations.values()) { @@ -551,7 +579,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } long end = System.currentTimeMillis(); - if (undoCounter > 0) { + if( undoCounter > 0 ) { // The rolledback operations are basically in flight journal writes. To avoid getting these the end user // should do sync writes to the journal. LOG.info("Rolled back " + undoCounter + " messages from the index in " + ((end - start) / 1000.0f) + " seconds."); @@ -567,12 +595,12 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar for (StoredDestination sd : storedDestinations.values()) { // Use a visitor to cut down the number of pages that we load sd.locationIndex.visit(tx, new BTreeVisitor() { - int last = -1; + int last=-1; public boolean isInterestedInKeysBetween(Location first, Location second) { - if (first == null) { + if( first==null ) { return !ss.contains(0, second.getDataFileId()); - } else if (second == null) { + } else if( second==null ) { return true; } else { return !ss.contains(first.getDataFileId(), second.getDataFileId()); @@ -582,7 +610,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar public void visit(List keys, List values) { for (Location l : keys) { int fileId = l.getDataFileId(); - if (last != fileId) { + if( last != fileId ) { ss.add(fileId); last = fileId; } @@ -592,34 +620,34 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar }); } HashSet missingJournalFiles = new HashSet(); - while (!ss.isEmpty()) { - missingJournalFiles.add((int) ss.removeFirst()); + while( !ss.isEmpty() ) { + missingJournalFiles.add( (int)ss.removeFirst() ); } - missingJournalFiles.removeAll(journal.getFileMap().keySet()); + missingJournalFiles.removeAll( journal.getFileMap().keySet() ); - if (!missingJournalFiles.isEmpty()) { - LOG.info("Some journal files are missing: " + missingJournalFiles); + if( !missingJournalFiles.isEmpty() ) { + LOG.info("Some journal files are missing: "+missingJournalFiles); } ArrayList> missingPredicates = new ArrayList>(); for (Integer missing : missingJournalFiles) { - missingPredicates.add(new BTreeVisitor.BetweenVisitor(new Location(missing, 0), new Location(missing + 1, 0))); + missingPredicates.add(new BTreeVisitor.BetweenVisitor(new Location(missing,0), new Location(missing+1,0))); } - if (checkForCorruptJournalFiles) { + if ( checkForCorruptJournalFiles ) { Collection dataFiles = journal.getFileMap().values(); for (DataFile dataFile : dataFiles) { int id = dataFile.getDataFileId(); - missingPredicates.add(new BTreeVisitor.BetweenVisitor(new Location(id, dataFile.getLength()), new Location(id + 1, 0))); + missingPredicates.add(new BTreeVisitor.BetweenVisitor(new Location(id,dataFile.getLength()), new Location(id+1,0))); Sequence seq = dataFile.getCorruptedBlocks().getHead(); - while (seq != null) { - missingPredicates.add(new BTreeVisitor.BetweenVisitor(new Location(id, (int) seq.getFirst()), new Location(id, (int) seq.getLast() + 1))); + while( seq!=null ) { + missingPredicates.add(new BTreeVisitor.BetweenVisitor(new Location(id, (int) seq.getFirst()), new Location(id, (int) seq.getLast()+1))); seq = seq.getNext(); } } } - if (!missingPredicates.isEmpty()) { + if( !missingPredicates.isEmpty() ) { for (StoredDestination sd : storedDestinations.values()) { final ArrayList matches = new ArrayList(); @@ -631,11 +659,11 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar }); // If somes message references are affected by the missing data files... - if (!matches.isEmpty()) { + if( !matches.isEmpty() ) { // We either 'gracefully' recover dropping the missing messages or // we error out. - if (ignoreMissingJournalfiles) { + if( ignoreMissingJournalfiles ) { // Update the index to remove the references to the missing data for (Long sequenceId : matches) { MessageKeys keys = sd.orderIndex.remove(tx, sequenceId); @@ -646,14 +674,14 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } } else { - throw new IOException("Detected missing/corrupt journal files. " + matches.size() + " messages affected."); + throw new IOException("Detected missing/corrupt journal files. "+matches.size()+" messages affected."); } } } } end = System.currentTimeMillis(); - if (undoCounter > 0) { + if( undoCounter > 0 ) { // The rolledback operations are basically in flight journal writes. To avoid getting these the end user // should do sync writes to the journal. LOG.info("Detected missing/corrupt journal files. Dropped " + undoCounter + " messages from the index in " + ((end - start) / 1000.0f) + " seconds."); @@ -663,12 +691,12 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar private Location nextRecoveryPosition; private Location lastRecoveryPosition; - public void incrementalRecover(Journal journal) throws IOException { + public void incrementalRecover() throws IOException { this.indexLock.writeLock().lock(); try { - if (nextRecoveryPosition == null) { - if (lastRecoveryPosition == null) { - nextRecoveryPosition = getRecoveryPosition(journal); + if( nextRecoveryPosition == null ) { + if( lastRecoveryPosition==null ) { + nextRecoveryPosition = getRecoveryPosition(); } else { nextRecoveryPosition = journal.getNextLocation(lastRecoveryPosition); } @@ -676,11 +704,11 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar while (nextRecoveryPosition != null) { lastRecoveryPosition = nextRecoveryPosition; metadata.lastUpdate = lastRecoveryPosition; - JournalCommand message = load(journal, lastRecoveryPosition); + JournalCommand message = load(lastRecoveryPosition); process(message, lastRecoveryPosition, (Runnable)null); nextRecoveryPosition = journal.getNextLocation(lastRecoveryPosition); } - } finally { + }finally { this.indexLock.writeLock().unlock(); } } @@ -689,7 +717,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar return metadata.lastUpdate; } - private Location getRecoveryPosition(Journal journal) throws IOException { + private Location getRecoveryPosition() throws IOException { if (!this.forceRecoverIndex) { @@ -699,7 +727,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } // Perhaps there were no transactions... - if (metadata.lastUpdate != null) { + if( metadata.lastUpdate!=null) { // Start replay at the record after the last one recorded in the index file. return journal.getNextLocation(metadata.lastUpdate); } @@ -709,44 +737,38 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } protected void checkpointCleanup(final boolean cleanup) throws IOException { - for (Journal journal : getJournalManager().getJournals()) { - checkpointCleanup(journal, cleanup); - } - } - - protected void checkpointCleanup(final Journal journal, final boolean cleanup) throws IOException { long start; this.indexLock.writeLock().lock(); try { start = System.currentTimeMillis(); - if (!opened.get()) { + if( !opened.get() ) { return; } pageFile.tx().execute(new Transaction.Closure() { public void execute(Transaction tx) throws IOException { - checkpointUpdate(tx, journal, cleanup); + checkpointUpdate(tx, cleanup); } }); - } finally { + }finally { this.indexLock.writeLock().unlock(); } long end = System.currentTimeMillis(); - if (LOG_SLOW_ACCESS_TIME > 0 && end - start > LOG_SLOW_ACCESS_TIME) { - LOG.info("Slow KahaDB access: cleanup took " + (end - start)); + if( LOG_SLOW_ACCESS_TIME>0 && end-start > LOG_SLOW_ACCESS_TIME) { + LOG.info("Slow KahaDB access: cleanup took "+(end-start)); } } - public void checkpoint(final Journal journal, Callback closure) throws Exception { + public void checkpoint(Callback closure) throws Exception { this.indexLock.writeLock().lock(); try { pageFile.tx().execute(new Transaction.Closure() { public void execute(Transaction tx) throws IOException { - checkpointUpdate(tx, journal, false); + checkpointUpdate(tx, false); } }); closure.execute(); - } finally { + }finally { this.indexLock.writeLock().unlock(); } } @@ -754,18 +776,17 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar // ///////////////////////////////////////////////////////////////// // Methods call by the broker to update and query the store. // ///////////////////////////////////////////////////////////////// - public Location store(Journal journal, JournalCommand data) throws IOException { - return store(journal, data, false, null, null); + public Location store(JournalCommand data) throws IOException { + return store(data, false, null,null); } - /** * All updated are are funneled through this method. The updates are converted * to a JournalMessage which is logged to the journal and then the data from * the JournalMessage is used to update the index just like it would be done * during a recovery process. */ - public Location store(final Journal journal, JournalCommand data, boolean sync, Runnable before, Runnable after) throws IOException { + public Location store(JournalCommand data, boolean sync, Runnable before,Runnable after) throws IOException { if (before != null) { before.run(); } @@ -780,8 +801,8 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar long start2 = System.currentTimeMillis(); process(data, location, after); long end = System.currentTimeMillis(); - if (LOG_SLOW_ACCESS_TIME > 0 && end - start > LOG_SLOW_ACCESS_TIME) { - LOG.info("Slow KahaDB access: Journal append took: " + (start2 - start) + " ms, Index update took " + (end - start2) + " ms"); + if( LOG_SLOW_ACCESS_TIME>0 && end-start > LOG_SLOW_ACCESS_TIME) { + LOG.info("Slow KahaDB access: Journal append took: "+(start2-start)+" ms, Index update took "+(end-start2)+" ms"); } if (after != null) { @@ -812,27 +833,35 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar /** * Loads a previously stored JournalMessage + * + * @param location + * @return + * @throws IOException */ - public JournalCommand load(Journal journal, Location location) throws IOException { + public JournalCommand load(Location location) throws IOException { long start = System.currentTimeMillis(); ByteSequence data = journal.read(location); long end = System.currentTimeMillis(); - if (LOG_SLOW_ACCESS_TIME > 0 && end - start > LOG_SLOW_ACCESS_TIME) { - LOG.info("Slow KahaDB access: Journal read took: " + (end - start) + " ms"); + if( LOG_SLOW_ACCESS_TIME>0 && end-start > LOG_SLOW_ACCESS_TIME) { + LOG.info("Slow KahaDB access: Journal read took: "+(end-start)+" ms"); } DataByteArrayInputStream is = new DataByteArrayInputStream(data); byte readByte = is.readByte(); KahaEntryType type = KahaEntryType.valueOf(readByte); - if (type == null) { - throw new IOException("Could not load journal record. Invalid location: " + location); + if( type == null ) { + throw new IOException("Could not load journal record. Invalid location: "+location); } - JournalCommand message = (JournalCommand) type.createMessage(); + JournalCommand message = (JournalCommand)type.createMessage(); message.mergeFramed(is); return message; } /** * do minimal recovery till we reach the last inDoubtLocation + * @param data + * @param location + * @param inDoubtlocation + * @throws IOException */ void process(JournalCommand data, final Location location, final Location inDoubtlocation) throws IOException { if (inDoubtlocation != null && location.compareTo(inDoubtlocation) >= 0) { @@ -914,7 +943,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar upadateIndex(tx, command, location); } }); - } finally { + }finally { this.indexLock.writeLock().unlock(); } } @@ -922,8 +951,8 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar protected void process(final KahaRemoveMessageCommand command, final Location location) throws IOException { if (command.hasTransactionInfo()) { - List inflightTx = getInflightTx(command.getTransactionInfo(), location); - inflightTx.add(new RemoveOpperation(command, location)); + List inflightTx = getInflightTx(command.getTransactionInfo(), location); + inflightTx.add(new RemoveOpperation(command, location)); } else { this.indexLock.writeLock().lock(); try { @@ -932,7 +961,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar updateIndex(tx, command, location); } }); - } finally { + }finally { this.indexLock.writeLock().unlock(); } } @@ -947,7 +976,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar updateIndex(tx, command, location); } }); - } finally { + }finally { this.indexLock.writeLock().unlock(); } } @@ -960,7 +989,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar updateIndex(tx, command, location); } }); - } finally { + }finally { this.indexLock.writeLock().unlock(); } } @@ -984,7 +1013,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } protected void process(KahaCommitCommand command, Location location, final Runnable after) throws IOException { - TransactionId key = key(command.getTransactionInfo()); + TransactionId key = TransactionIdConversion.convert(command.getTransactionInfo()); List inflightTx; synchronized (inflightTransactions) { inflightTx = inflightTransactions.remove(key); @@ -1014,7 +1043,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } protected void process(KahaPrepareCommand command, Location location) { - TransactionId key = key(command.getTransactionInfo()); + TransactionId key = TransactionIdConversion.convert(command.getTransactionInfo()); synchronized (inflightTransactions) { List tx = inflightTransactions.remove(key); if (tx != null) { @@ -1024,7 +1053,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } protected void process(KahaRollbackCommand command, Location location) { - TransactionId key = key(command.getTransactionInfo()); + TransactionId key = TransactionIdConversion.convert(command.getTransactionInfo()); synchronized (inflightTransactions) { List tx = inflightTransactions.remove(key); if (tx == null) { @@ -1119,7 +1148,6 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } Map> ackMessageFileMap = new HashMap>(); - private void recordAckMessageReferenceLocation(Location ackLocation, Location messageLocation) { Set referenceFileIds = ackMessageFileMap.get(Integer.valueOf(ackLocation.getDataFileId())); if (referenceFileIds == null) { @@ -1172,9 +1200,9 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar // If set then we are creating it.. otherwise we are destroying the sub if (command.hasSubscriptionInfo()) { sd.subscriptions.put(tx, subscriptionKey, command); - long ackLocation = NOT_ACKED; + long ackLocation=NOT_ACKED; if (!command.getRetroactive()) { - ackLocation = sd.orderIndex.nextMessageId - 1; + ackLocation = sd.orderIndex.nextMessageId-1; } else { addAckLocationForRetroactiveSub(tx, sd, ackLocation, subscriptionKey); } @@ -1191,19 +1219,19 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar * @param tx * @throws IOException */ - void checkpointUpdate(Transaction tx, Journal journal, boolean cleanup) throws IOException { + void checkpointUpdate(Transaction tx, boolean cleanup) throws IOException { LOG.debug("Checkpoint started."); // reflect last update exclusive of current checkpoint Location firstTxLocation = metadata.lastUpdate; metadata.state = OPEN_STATE; - metadata.producerSequenceIdTrackerLocation = checkpointProducerAudit(journal); + metadata.producerSequenceIdTrackerLocation = checkpointProducerAudit(); metadata.firstInProgressTransactionLocation = getFirstInProgressTxLocation(); tx.store(metadata.page, metadataMarshaller, true); pageFile.flush(); - if (cleanup) { + if( cleanup ) { final TreeSet completeFileSet = new TreeSet(journal.getFileMap().keySet()); final TreeSet gcCandidateSet = new TreeSet(completeFileSet); @@ -1211,7 +1239,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar LOG.trace("Last update: " + firstTxLocation + ", full gc candidates set: " + gcCandidateSet); // Don't GC files under replication - if (journalFilesBeingReplicated != null) { + if( journalFilesBeingReplicated!=null ) { gcCandidateSet.removeAll(journalFilesBeingReplicated); } @@ -1220,16 +1248,16 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } // Don't GC files after the first in progress tx - if (metadata.firstInProgressTransactionLocation != null) { + if( metadata.firstInProgressTransactionLocation!=null ) { if (metadata.firstInProgressTransactionLocation.getDataFileId() < firstTxLocation.getDataFileId()) { firstTxLocation = metadata.firstInProgressTransactionLocation; } } - if (firstTxLocation != null) { - while (!gcCandidateSet.isEmpty()) { + if( firstTxLocation!=null ) { + while( !gcCandidateSet.isEmpty() ) { Integer last = gcCandidateSet.last(); - if (last >= firstTxLocation.getDataFileId()) { + if( last >= firstTxLocation.getDataFileId() ) { gcCandidateSet.remove(last); } else { break; @@ -1240,33 +1268,32 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar // Go through all the destinations to see if any of them can remove GC candidates. for (Entry entry : storedDestinations.entrySet()) { - if (gcCandidateSet.isEmpty()) { + if( gcCandidateSet.isEmpty() ) { break; } // Use a visitor to cut down the number of pages that we load entry.getValue().locationIndex.visit(tx, new BTreeVisitor() { - int last = -1; - + int last=-1; public boolean isInterestedInKeysBetween(Location first, Location second) { - if (first == null) { - SortedSet subset = gcCandidateSet.headSet(second.getDataFileId() + 1); - if (!subset.isEmpty() && subset.last() == second.getDataFileId()) { + if( first==null ) { + SortedSet subset = gcCandidateSet.headSet(second.getDataFileId()+1); + if( !subset.isEmpty() && subset.last() == second.getDataFileId() ) { subset.remove(second.getDataFileId()); } return !subset.isEmpty(); - } else if (second == null) { + } else if( second==null ) { SortedSet subset = gcCandidateSet.tailSet(first.getDataFileId()); - if (!subset.isEmpty() && subset.first() == first.getDataFileId()) { + if( !subset.isEmpty() && subset.first() == first.getDataFileId() ) { subset.remove(first.getDataFileId()); } return !subset.isEmpty(); } else { - SortedSet subset = gcCandidateSet.subSet(first.getDataFileId(), second.getDataFileId() + 1); - if (!subset.isEmpty() && subset.first() == first.getDataFileId()) { + SortedSet subset = gcCandidateSet.subSet(first.getDataFileId(), second.getDataFileId()+1); + if( !subset.isEmpty() && subset.first() == first.getDataFileId() ) { subset.remove(first.getDataFileId()); } - if (!subset.isEmpty() && subset.last() == second.getDataFileId()) { + if( !subset.isEmpty() && subset.last() == second.getDataFileId() ) { subset.remove(second.getDataFileId()); } return !subset.isEmpty(); @@ -1276,7 +1303,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar public void visit(List keys, List values) { for (Location l : keys) { int fileId = l.getDataFileId(); - if (last != fileId) { + if( last != fileId ) { gcCandidateSet.remove(fileId); last = fileId; } @@ -1310,8 +1337,8 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } } - if (!gcCandidateSet.isEmpty()) { - LOG.debug("Cleanup removing the data files: " + gcCandidateSet); + if( !gcCandidateSet.isEmpty() ) { + LOG.debug("Cleanup removing the data files: "+gcCandidateSet); journal.removeDataFiles(gcCandidateSet); } } @@ -1319,13 +1346,13 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar LOG.debug("Checkpoint done."); } - private Location checkpointProducerAudit(Journal journal) throws IOException { + private Location checkpointProducerAudit() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oout = new ObjectOutputStream(baos); oout.writeObject(metadata.producerSequenceIdTracker); oout.flush(); oout.close(); - return store(journal, new KahaProducerAuditCommand().setAudit(new Buffer(baos.toByteArray())), true, null, null); + return store(new KahaProducerAuditCommand().setAudit(new Buffer(baos.toByteArray())), true, null, null); } public HashSet getJournalFilesBeingReplicated() { @@ -1351,13 +1378,13 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar final Location location; public MessageKeys(String messageId, Location location) { - this.messageId = messageId; - this.location = location; + this.messageId=messageId; + this.location=location; } @Override public String toString() { - return "[" + messageId + "," + location + "]"; + return "["+messageId+","+location+"]"; } } @@ -1473,20 +1500,20 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar value.orderIndex.lowPriorityIndex = new BTreeIndex(pageFile, dataIn.readLong()); value.orderIndex.highPriorityIndex = new BTreeIndex(pageFile, dataIn.readLong()); } else { - // upgrade - pageFile.tx().execute(new Transaction.Closure() { - public void execute(Transaction tx) throws IOException { - value.orderIndex.lowPriorityIndex = new BTreeIndex(pageFile, tx.allocate()); - value.orderIndex.lowPriorityIndex.setKeyMarshaller(LongMarshaller.INSTANCE); - value.orderIndex.lowPriorityIndex.setValueMarshaller(MessageKeysMarshaller.INSTANCE); - value.orderIndex.lowPriorityIndex.load(tx); + // upgrade + pageFile.tx().execute(new Transaction.Closure() { + public void execute(Transaction tx) throws IOException { + value.orderIndex.lowPriorityIndex = new BTreeIndex(pageFile, tx.allocate()); + value.orderIndex.lowPriorityIndex.setKeyMarshaller(LongMarshaller.INSTANCE); + value.orderIndex.lowPriorityIndex.setValueMarshaller(MessageKeysMarshaller.INSTANCE); + value.orderIndex.lowPriorityIndex.load(tx); - value.orderIndex.highPriorityIndex = new BTreeIndex(pageFile, tx.allocate()); - value.orderIndex.highPriorityIndex.setKeyMarshaller(LongMarshaller.INSTANCE); - value.orderIndex.highPriorityIndex.setValueMarshaller(MessageKeysMarshaller.INSTANCE); - value.orderIndex.highPriorityIndex.load(tx); - } - }); + value.orderIndex.highPriorityIndex = new BTreeIndex(pageFile, tx.allocate()); + value.orderIndex.highPriorityIndex.setKeyMarshaller(LongMarshaller.INSTANCE); + value.orderIndex.highPriorityIndex.setValueMarshaller(MessageKeysMarshaller.INSTANCE); + value.orderIndex.highPriorityIndex.load(tx); + } + }); } return value; @@ -1514,12 +1541,12 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar public KahaSubscriptionCommand readPayload(DataInput dataIn) throws IOException { KahaSubscriptionCommand rc = new KahaSubscriptionCommand(); - rc.mergeFramed((InputStream) dataIn); + rc.mergeFramed((InputStream)dataIn); return rc; } public void writePayload(KahaSubscriptionCommand object, DataOutput dataOut) throws IOException { - object.writeFramed((OutputStream) dataOut); + object.writeFramed((OutputStream)dataOut); } } @@ -1609,7 +1636,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar for (Iterator> iterator = rc.subscriptionAcks.iterator(tx); iterator.hasNext(); ) { Entry entry = iterator.next(); for (Iterator> orderIterator = - rc.orderIndex.iterator(tx, new MessageOrderCursor(entry.getValue().lastAckedSequence)); orderIterator.hasNext(); ) { + rc.orderIndex.iterator(tx, new MessageOrderCursor(entry.getValue().lastAckedSequence)); orderIterator.hasNext(); ) { Long sequence = orderIterator.next().getKey(); addAckLocation(tx, rc, sequence, entry.getKey()); } @@ -1621,18 +1648,18 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar if (rc.orderIndex.nextMessageId == 0) { // check for existing durable sub all acked out - pull next seq from acks as messages are gone if (!rc.subscriptionAcks.isEmpty(tx)) { - for (Iterator> iterator = rc.subscriptionAcks.iterator(tx); iterator.hasNext(); ) { + for (Iterator> iterator = rc.subscriptionAcks.iterator(tx); iterator.hasNext();) { Entry entry = iterator.next(); rc.orderIndex.nextMessageId = - Math.max(rc.orderIndex.nextMessageId, entry.getValue().lastAckedSequence + 1); + Math.max(rc.orderIndex.nextMessageId, entry.getValue().lastAckedSequence +1); } } } else { // update based on ackPositions for unmatched, last entry is always the next if (!rc.ackPositions.isEmpty(tx)) { - Entry> last = rc.ackPositions.getLast(tx); + Entry> last = rc.ackPositions.getLast(tx); rc.orderIndex.nextMessageId = - Math.max(rc.orderIndex.nextMessageId, last.getKey()); + Math.max(rc.orderIndex.nextMessageId, last.getKey()); } } @@ -1665,17 +1692,16 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } final HashSet nextMessageIdMarker = new HashSet(); - // on a new message add, all existing subs are interested in this message private void addAckLocationForNewMessage(Transaction tx, StoredDestination sd, Long messageSequence) throws IOException { HashSet hs = new HashSet(); - for (Iterator> iterator = sd.subscriptionAcks.iterator(tx); iterator.hasNext(); ) { + for (Iterator> iterator = sd.subscriptionAcks.iterator(tx); iterator.hasNext();) { Entry entry = iterator.next(); hs.add(entry.getKey()); } sd.ackPositions.put(tx, messageSequence, hs); // add empty next to keep track of nextMessage - sd.ackPositions.put(tx, messageSequence + 1, nextMessageIdMarker); + sd.ackPositions.put(tx, messageSequence+1, nextMessageIdMarker); } private void removeAckLocationsForSub(Transaction tx, StoredDestination sd, String subscriptionKey) throws IOException { @@ -1729,11 +1755,11 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar // ///////////////////////////////////////////////////////////////// // Transaction related implementation methods. // ///////////////////////////////////////////////////////////////// - protected final LinkedHashMap> inflightTransactions = new LinkedHashMap>(); + private final LinkedHashMap> inflightTransactions = new LinkedHashMap>(); protected final LinkedHashMap> preparedTransactions = new LinkedHashMap>(); protected final Set ackedAndPrepared = new HashSet(); - // messages that have prepared (pending) acks cannot be redispatched unless the outcome is rollback, + // messages that have prepared (pending) acks cannot be re-dispatched unless the outcome is rollback, // till then they are skipped by the store. // 'at most once' XA guarantee public void trackRecoveredAcks(ArrayList acks) { @@ -1761,7 +1787,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } private List getInflightTx(KahaTransactionInfo info, Location location) { - TransactionId key = key(info); + TransactionId key = TransactionIdConversion.convert(info); List tx; synchronized (inflightTransactions) { tx = inflightTransactions.get(key); @@ -1774,20 +1800,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } private TransactionId key(KahaTransactionInfo transactionInfo) { - if (transactionInfo.hasLocalTransacitonId()) { - KahaLocalTransactionId tx = transactionInfo.getLocalTransacitonId(); - LocalTransactionId rc = new LocalTransactionId(); - rc.setConnectionId(new ConnectionId(tx.getConnectionId())); - rc.setValue(tx.getTransacitonId()); - return rc; - } else { - KahaXATransactionId tx = transactionInfo.getXaTransacitonId(); - XATransactionId rc = new XATransactionId(); - rc.setBranchQualifier(tx.getBranchQualifier().toByteArray()); - rc.setGlobalTransactionId(tx.getGlobalTransactionId().toByteArray()); - rc.setFormatId(tx.getFormatId()); - return rc; - } + return TransactionIdConversion.convert(transactionInfo); } abstract class Operation { @@ -1852,15 +1865,15 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar return index; } - private JournalManager createJournalManager() throws IOException { - JournalManager manager = isJournalPerDestination() ? new DestinationJournalManager() : new DefaultJournalManager(); + private Journal createJournal() throws IOException { + Journal manager = new Journal(); manager.setDirectory(directory); manager.setMaxFileLength(getJournalMaxFileLength()); manager.setCheckForCorruptionOnStartup(checkForCorruptJournalFiles); manager.setChecksum(checksumJournalFiles || checkForCorruptJournalFiles); manager.setWriteBatchSize(getJournalMaxWriteBatchSize()); manager.setArchiveDataLogs(isArchiveDataLogs()); - manager.setStoreSize(storeSize); + manager.setSizeAccumulator(storeSize); if (getDirectoryArchive() != null) { IOHelper.mkdirs(getDirectoryArchive()); manager.setDirectoryArchive(getDirectoryArchive()); @@ -1963,15 +1976,11 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar return pageFile; } - public JournalManager getJournalManager() throws IOException { - if (journalManager == null) { - journalManager = createJournalManager(); + public Journal getJournal() throws IOException { + if (journal == null) { + journal = createJournal(); } - return journalManager; - } - - public Journal getJournal(ActiveMQDestination destination) throws IOException { - return getJournalManager().getJournal(destination); + return journal; } public boolean isFailIfDatabaseIsLocked() { @@ -2060,59 +2069,27 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar this.databaseLockedWaitDelay = databaseLockedWaitDelay; } - public boolean isJournalPerDestination() { - return journalPerDestination; - } - - public void setJournalPerDestination(boolean journalPerDestination) { - this.journalPerDestination = journalPerDestination; - } - // ///////////////////////////////////////////////////////////////// // Internal conversion methods. // ///////////////////////////////////////////////////////////////// - KahaTransactionInfo createTransactionInfo(TransactionId txid) { - if (txid == null) { - return null; - } - KahaTransactionInfo rc = new KahaTransactionInfo(); - - if (txid.isLocalTransaction()) { - LocalTransactionId t = (LocalTransactionId) txid; - KahaLocalTransactionId kahaTxId = new KahaLocalTransactionId(); - kahaTxId.setConnectionId(t.getConnectionId().getValue()); - kahaTxId.setTransacitonId(t.getValue()); - rc.setLocalTransacitonId(kahaTxId); - } else { - XATransactionId t = (XATransactionId) txid; - KahaXATransactionId kahaTxId = new KahaXATransactionId(); - kahaTxId.setBranchQualifier(new Buffer(t.getBranchQualifier())); - kahaTxId.setGlobalTransactionId(new Buffer(t.getGlobalTransactionId())); - kahaTxId.setFormatId(t.getFormatId()); - rc.setXaTransacitonId(kahaTxId); - } - return rc; - } - - class MessageOrderCursor { + class MessageOrderCursor{ long defaultCursorPosition; long lowPriorityCursorPosition; long highPriorityCursorPosition; - - MessageOrderCursor() { + MessageOrderCursor(){ } - MessageOrderCursor(long position) { - this.defaultCursorPosition = position; - this.lowPriorityCursorPosition = position; - this.highPriorityCursorPosition = position; + MessageOrderCursor(long position){ + this.defaultCursorPosition=position; + this.lowPriorityCursorPosition=position; + this.highPriorityCursorPosition=position; } - MessageOrderCursor(MessageOrderCursor other) { - this.defaultCursorPosition = other.defaultCursorPosition; - this.lowPriorityCursorPosition = other.lowPriorityCursorPosition; - this.highPriorityCursorPosition = other.highPriorityCursorPosition; + MessageOrderCursor(MessageOrderCursor other){ + this.defaultCursorPosition=other.defaultCursorPosition; + this.lowPriorityCursorPosition=other.lowPriorityCursorPosition; + this.highPriorityCursorPosition=other.highPriorityCursorPosition; } MessageOrderCursor copy() { @@ -2120,33 +2097,33 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } void reset() { - this.defaultCursorPosition = 0; - this.highPriorityCursorPosition = 0; - this.lowPriorityCursorPosition = 0; + this.defaultCursorPosition=0; + this.highPriorityCursorPosition=0; + this.lowPriorityCursorPosition=0; } void increment() { - if (defaultCursorPosition != 0) { + if (defaultCursorPosition!=0) { defaultCursorPosition++; } - if (highPriorityCursorPosition != 0) { + if (highPriorityCursorPosition!=0) { highPriorityCursorPosition++; } - if (lowPriorityCursorPosition != 0) { + if (lowPriorityCursorPosition!=0) { lowPriorityCursorPosition++; } } public String toString() { - return "MessageOrderCursor:[def:" + defaultCursorPosition - + ", low:" + lowPriorityCursorPosition - + ", high:" + highPriorityCursorPosition + "]"; + return "MessageOrderCursor:[def:" + defaultCursorPosition + + ", low:" + lowPriorityCursorPosition + + ", high:" + highPriorityCursorPosition + "]"; } public void sync(MessageOrderCursor other) { - this.defaultCursorPosition = other.defaultCursorPosition; - this.lowPriorityCursorPosition = other.lowPriorityCursorPosition; - this.highPriorityCursorPosition = other.highPriorityCursorPosition; + this.defaultCursorPosition=other.defaultCursorPosition; + this.lowPriorityCursorPosition=other.lowPriorityCursorPosition; + this.highPriorityCursorPosition=other.highPriorityCursorPosition; } } @@ -2167,9 +2144,9 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar MessageKeys remove(Transaction tx, Long key) throws IOException { MessageKeys result = defaultPriorityIndex.remove(tx, key); - if (result == null && highPriorityIndex != null) { + if (result == null && highPriorityIndex!=null) { result = highPriorityIndex.remove(tx, key); - if (result == null && lowPriorityIndex != null) { + if (result ==null && lowPriorityIndex!=null) { result = lowPriorityIndex.remove(tx, key); } } @@ -2292,14 +2269,14 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } void stoppedIterating() { - if (lastDefaultKey != null) { - cursor.defaultCursorPosition = lastDefaultKey.longValue() + 1; + if (lastDefaultKey!=null) { + cursor.defaultCursorPosition=lastDefaultKey.longValue()+1; } - if (lastHighKey != null) { - cursor.highPriorityCursorPosition = lastHighKey.longValue() + 1; + if (lastHighKey!=null) { + cursor.highPriorityCursorPosition=lastHighKey.longValue()+1; } - if (lastLowKey != null) { - cursor.lowPriorityCursorPosition = lastLowKey.longValue() + 1; + if (lastLowKey!=null) { + cursor.lowPriorityCursorPosition=lastLowKey.longValue()+1; } lastDefaultKey = null; lastHighKey = null; @@ -2318,7 +2295,7 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } void getDeleteList(Transaction tx, ArrayList> deletes, - BTreeIndex index, Long sequenceId) throws IOException { + BTreeIndex index, Long sequenceId) throws IOException { Iterator> iterator = index.iterator(tx, sequenceId); deletes.add(iterator.next()); @@ -2354,23 +2331,24 @@ public class MessageDatabase extends ServiceSupport implements BrokerServiceAwar } } - Iterator> iterator(Transaction tx) throws IOException { - return new MessageOrderIterator(tx, cursor); + Iterator> iterator(Transaction tx) throws IOException{ + return new MessageOrderIterator(tx,cursor); } - Iterator> iterator(Transaction tx, MessageOrderCursor m) throws IOException { - return new MessageOrderIterator(tx, m); + Iterator> iterator(Transaction tx, MessageOrderCursor m) throws IOException{ + return new MessageOrderIterator(tx,m); } public byte lastGetPriority() { return lastGetPriority; } - class MessageOrderIterator implements Iterator> { - Iterator> currentIterator; - final Iterator> highIterator; - final Iterator> defaultIterator; - final Iterator> lowIterator; + class MessageOrderIterator implements Iterator>{ + Iterator>currentIterator; + final Iterator>highIterator; + final Iterator>defaultIterator; + final Iterator>lowIterator; + MessageOrderIterator(Transaction tx, MessageOrderCursor m) throws IOException { diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MultiKahaDBPersistenceAdapter.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MultiKahaDBPersistenceAdapter.java new file mode 100644 index 0000000000..ea0d9c6def --- /dev/null +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MultiKahaDBPersistenceAdapter.java @@ -0,0 +1,295 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.activemq.store.kahadb; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import javax.xml.bind.annotation.XmlAnyAttribute; +import org.apache.activemq.broker.BrokerService; +import org.apache.activemq.broker.BrokerServiceAware; +import org.apache.activemq.broker.ConnectionContext; +import org.apache.activemq.command.ActiveMQDestination; +import org.apache.activemq.command.ActiveMQQueue; +import org.apache.activemq.command.ActiveMQTopic; +import org.apache.activemq.command.LocalTransactionId; +import org.apache.activemq.command.ProducerId; +import org.apache.activemq.command.TransactionId; +import org.apache.activemq.command.XATransactionId; +import org.apache.activemq.filter.AnyDestination; +import org.apache.activemq.filter.DestinationMap; +import org.apache.activemq.protobuf.Buffer; +import org.apache.activemq.store.MessageStore; +import org.apache.activemq.store.PersistenceAdapter; +import org.apache.activemq.store.TopicMessageStore; +import org.apache.activemq.store.TransactionStore; +import org.apache.activemq.store.kahadb.data.KahaTransactionInfo; +import org.apache.activemq.store.kahadb.data.KahaXATransactionId; +import org.apache.activemq.usage.SystemUsage; +import org.apache.activemq.util.IOHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * An implementation of {@link org.apache.activemq.store.PersistenceAdapter} that supports + * distribution of destinations across multiple kahaDB persistence adapters + * + * @org.apache.xbean.XBean element="mKahaDB" + */ +public class MultiKahaDBPersistenceAdapter extends DestinationMap implements PersistenceAdapter, BrokerServiceAware { + static final Logger LOG = LoggerFactory.getLogger(MultiKahaDBPersistenceAdapter.class); + + final static ActiveMQDestination matchAll = new AnyDestination(new ActiveMQDestination[]{new ActiveMQQueue(">"), new ActiveMQTopic(">")}); + final int LOCAL_FORMAT_ID_MAGIC = Integer.valueOf(System.getProperty("org.apache.activemq.store.kahadb.MultiKahaDBTransactionStore.localXaFormatId", "61616")); + + BrokerService brokerService; + List adapters = new LinkedList(); + private File directory = new File(IOHelper.getDefaultDataDirectory() + File.separator + "mKahaDB"); + + MultiKahaDBTransactionStore transactionStore = new MultiKahaDBTransactionStore(this); + + // all local store transactions are XA, 2pc if more than one adapter involved + TransactionIdTransformer transactionIdTransformer = new TransactionIdTransformer() { + @Override + public KahaTransactionInfo transform(TransactionId txid) { + if (txid == null) { + return null; + } + KahaTransactionInfo rc = new KahaTransactionInfo(); + KahaXATransactionId kahaTxId = new KahaXATransactionId(); + if (txid.isLocalTransaction()) { + LocalTransactionId t = (LocalTransactionId) txid; + kahaTxId.setBranchQualifier(new Buffer(Long.toString(t.getValue()).getBytes(Charset.forName("utf-8")))); + kahaTxId.setGlobalTransactionId(new Buffer(t.getConnectionId().getValue().getBytes(Charset.forName("utf-8")))); + kahaTxId.setFormatId(LOCAL_FORMAT_ID_MAGIC); + } else { + XATransactionId t = (XATransactionId) txid; + kahaTxId.setBranchQualifier(new Buffer(t.getBranchQualifier())); + kahaTxId.setGlobalTransactionId(new Buffer(t.getGlobalTransactionId())); + kahaTxId.setFormatId(t.getFormatId()); + } + rc.setXaTransacitonId(kahaTxId); + return rc; + } + }; + + /** + * Sets the FilteredKahaDBPersistenceAdapter entries + * + * @org.apache.xbean.ElementType class="org.apache.activemq.store.kahadb.FilteredKahaDBPersistenceAdapter" + */ + public void setFilteredPersistenceAdapters(List entries) { + for (Object entry : entries) { + FilteredKahaDBPersistenceAdapter filteredAdapter = (FilteredKahaDBPersistenceAdapter) entry; + KahaDBPersistenceAdapter adapter = filteredAdapter.getPersistenceAdapter(); + if (filteredAdapter.getDestination() == null) { + filteredAdapter.setDestination(matchAll); + } + adapter.setDirectory(new File(getDirectory(), nameFromDestinationFilter(filteredAdapter.getDestination()))); + + // need a per store factory that will put the store in the branch qualifier to disiambiguate xid mbeans + adapter.getStore().setTransactionIdTransformer(transactionIdTransformer); + adapters.add(adapter); + } + super.setEntries(entries); + } + + private String nameFromDestinationFilter(ActiveMQDestination destination) { + return IOHelper.toFileSystemSafeName(destination.getQualifiedName()); + } + + public boolean isLocalXid(TransactionId xid) { + return xid instanceof XATransactionId && + ((XATransactionId)xid).getFormatId() == LOCAL_FORMAT_ID_MAGIC; + } + + public void beginTransaction(ConnectionContext context) throws IOException { + throw new IllegalStateException(); + } + + public void checkpoint(final boolean sync) throws IOException { + for (PersistenceAdapter persistenceAdapter : adapters) { + persistenceAdapter.checkpoint(sync); + } + } + + public void commitTransaction(ConnectionContext context) throws IOException { + throw new IllegalStateException(); + } + + public MessageStore createQueueMessageStore(ActiveMQQueue destination) throws IOException { + PersistenceAdapter persistenceAdapter = getMatchingPersistenceAdapter(destination); + return transactionStore.proxy(persistenceAdapter.createTransactionStore(), persistenceAdapter.createQueueMessageStore(destination)); + } + + private PersistenceAdapter getMatchingPersistenceAdapter(ActiveMQDestination destination) { + Object result = this.chooseValue(destination); + if (result == null) { + throw new RuntimeException("No matching persistence adapter configured for destination: " + destination + ", options:" + adapters); + } + return ((FilteredKahaDBPersistenceAdapter) result).getPersistenceAdapter(); + } + + public TopicMessageStore createTopicMessageStore(ActiveMQTopic destination) throws IOException { + PersistenceAdapter persistenceAdapter = getMatchingPersistenceAdapter(destination); + return transactionStore.proxy(persistenceAdapter.createTransactionStore(), persistenceAdapter.createTopicMessageStore(destination)); + } + + public TransactionStore createTransactionStore() throws IOException { + return transactionStore; + } + + public void deleteAllMessages() throws IOException { + for (PersistenceAdapter persistenceAdapter : adapters) { + persistenceAdapter.deleteAllMessages(); + } + transactionStore.deleteAllMessages(); + } + + public Set getDestinations() { + Set results = new HashSet(); + for (PersistenceAdapter persistenceAdapter : adapters) { + results.addAll(persistenceAdapter.getDestinations()); + } + return results; + } + + public long getLastMessageBrokerSequenceId() throws IOException { + long maxId = -1; + for (PersistenceAdapter persistenceAdapter : adapters) { + maxId = Math.max(maxId, persistenceAdapter.getLastMessageBrokerSequenceId()); + } + return maxId; + } + + public long getLastProducerSequenceId(ProducerId id) throws IOException { + long maxId = -1; + for (PersistenceAdapter persistenceAdapter : adapters) { + maxId = Math.max(maxId, persistenceAdapter.getLastProducerSequenceId(id)); + } + return maxId; + } + + public void removeQueueMessageStore(ActiveMQQueue destination) { + getMatchingPersistenceAdapter(destination).removeQueueMessageStore(destination); + } + + public void removeTopicMessageStore(ActiveMQTopic destination) { + getMatchingPersistenceAdapter(destination).removeTopicMessageStore(destination); + } + + public void rollbackTransaction(ConnectionContext context) throws IOException { + throw new IllegalStateException(); + } + + public void setBrokerName(String brokerName) { + for (PersistenceAdapter persistenceAdapter : adapters) { + persistenceAdapter.setBrokerName(brokerName); + } + } + + public void setUsageManager(SystemUsage usageManager) { + for (PersistenceAdapter persistenceAdapter : adapters) { + persistenceAdapter.setUsageManager(usageManager); + } + } + + public long size() { + long size = 0; + for (PersistenceAdapter persistenceAdapter : adapters) { + size += persistenceAdapter.size(); + } + return size; + } + + public void start() throws Exception { + for (PersistenceAdapter persistenceAdapter : adapters) { + persistenceAdapter.start(); + } + } + + public void stop() throws Exception { + for (PersistenceAdapter persistenceAdapter : adapters) { + persistenceAdapter.stop(); + } + } + + public File getDirectory() { + return this.directory; + } + + @Override + public void setDirectory(File dir) { + this.directory = directory; + } + + public void setBrokerService(BrokerService brokerService) { + for (KahaDBPersistenceAdapter persistenceAdapter : adapters) { + persistenceAdapter.setBrokerService(brokerService); + } + this.brokerService = brokerService; + } + + public BrokerService getBrokerService() { + return brokerService; + } + + public void setTransactionStore(MultiKahaDBTransactionStore transactionStore) { + this.transactionStore = transactionStore; + } + + /** + * Set the max file length of the transaction journal + * When set using Xbean, values of the form "20 Mb", "1024kb", and "1g" can + * be used + * + * @org.apache.xbean.Property propertyEditor="org.apache.activemq.util.MemoryIntPropertyEditor" + */ + public void setJournalMaxFileLength(int maxFileLength) { + transactionStore.setJournalMaxFileLength(maxFileLength); + } + + public int getJournalMaxFileLength() { + return transactionStore.getJournalMaxFileLength(); + } + + /** + * Set the max write batch size of the transaction journal + * When set using Xbean, values of the form "20 Mb", "1024kb", and "1g" can + * be used + * + * @org.apache.xbean.Property propertyEditor="org.apache.activemq.util.MemoryIntPropertyEditor" + */ + public void setJournalWriteBatchSize(int journalWriteBatchSize) { + transactionStore.setJournalMaxWriteBatchSize(journalWriteBatchSize); + } + + public int getJournalMaxWriteBatchSize() { + return transactionStore.getJournalMaxWriteBatchSize(); + } + + @Override + public String toString() { + String path = getDirectory() != null ? getDirectory().getAbsolutePath() : "DIRECTORY_NOT_SET"; + return "MultiKahaDBPersistenceAdapter[" + path + "]" + adapters; + } + +} diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MultiKahaDBTransactionStore.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MultiKahaDBTransactionStore.java new file mode 100644 index 0000000000..e7310e8dc1 --- /dev/null +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/MultiKahaDBTransactionStore.java @@ -0,0 +1,419 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.activemq.store.kahadb; + +import java.io.File; +import java.io.IOException; +import java.util.Date; +import java.util.HashSet; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Future; +import org.apache.activemq.broker.Broker; +import org.apache.activemq.broker.ConnectionContext; +import org.apache.activemq.command.Message; +import org.apache.activemq.command.MessageAck; +import org.apache.activemq.command.MessageId; +import org.apache.activemq.command.TransactionId; +import org.apache.activemq.command.XATransactionId; +import org.apache.activemq.store.AbstractMessageStore; +import org.apache.activemq.store.MessageStore; +import org.apache.activemq.store.ProxyMessageStore; +import org.apache.activemq.store.ProxyTopicMessageStore; +import org.apache.activemq.store.TopicMessageStore; +import org.apache.activemq.store.TransactionRecoveryListener; +import org.apache.activemq.store.TransactionStore; +import org.apache.activemq.store.kahadb.data.KahaCommitCommand; +import org.apache.activemq.store.kahadb.data.KahaEntryType; +import org.apache.activemq.store.kahadb.data.KahaPrepareCommand; +import org.apache.activemq.store.kahadb.data.KahaTraceCommand; +import org.apache.activemq.util.IOHelper; +import org.apache.kahadb.journal.Journal; +import org.apache.kahadb.journal.Location; +import org.apache.kahadb.util.DataByteArrayInputStream; +import org.apache.kahadb.util.DataByteArrayOutputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MultiKahaDBTransactionStore implements TransactionStore { + static final Logger LOG = LoggerFactory.getLogger(MultiKahaDBTransactionStore.class); + final MultiKahaDBPersistenceAdapter multiKahaDBPersistenceAdapter; + final ConcurrentHashMap inflightTransactions = new ConcurrentHashMap(); + final Set recoveredPendingCommit = new HashSet(); + private Journal journal; + private int journalMaxFileLength = Journal.DEFAULT_MAX_FILE_LENGTH; + private int journalWriteBatchSize = Journal.DEFAULT_MAX_WRITE_BATCH_SIZE; + + public MultiKahaDBTransactionStore(MultiKahaDBPersistenceAdapter multiKahaDBPersistenceAdapter) { + this.multiKahaDBPersistenceAdapter = multiKahaDBPersistenceAdapter; + } + + public MessageStore proxy(final TransactionStore transactionStore, MessageStore messageStore) { + return new ProxyMessageStore(messageStore) { + @Override + public void addMessage(ConnectionContext context, final Message send) throws IOException { + MultiKahaDBTransactionStore.this.addMessage(transactionStore, context, getDelegate(), send); + } + + @Override + public Future asyncAddQueueMessage(ConnectionContext context, Message message) throws IOException { + return MultiKahaDBTransactionStore.this.asyncAddQueueMessage(transactionStore, context, getDelegate(), message); + } + + @Override + public void removeMessage(ConnectionContext context, final MessageAck ack) throws IOException { + MultiKahaDBTransactionStore.this.removeMessage(transactionStore, context, getDelegate(), ack); + } + + @Override + public void removeAsyncMessage(ConnectionContext context, MessageAck ack) throws IOException { + MultiKahaDBTransactionStore.this.removeAsyncMessage(transactionStore, context, getDelegate(), ack); + } + }; + } + + public TopicMessageStore proxy(final TransactionStore transactionStore, final TopicMessageStore messageStore) { + return new ProxyTopicMessageStore(messageStore) { + @Override + public void addMessage(ConnectionContext context, final Message send) throws IOException { + MultiKahaDBTransactionStore.this.addMessage(transactionStore, context, getDelegate(), send); + } + + @Override + public Future asyncAddTopicMessage(ConnectionContext context, Message message) throws IOException { + return MultiKahaDBTransactionStore.this.asyncAddTopicMessage(transactionStore, context, getDelegate(), message); + } + + @Override + public void removeMessage(ConnectionContext context, final MessageAck ack) throws IOException { + MultiKahaDBTransactionStore.this.removeMessage(transactionStore, context, getDelegate(), ack); + } + + @Override + public void removeAsyncMessage(ConnectionContext context, MessageAck ack) throws IOException { + MultiKahaDBTransactionStore.this.removeAsyncMessage(transactionStore, context, getDelegate(), ack); + } + + @Override + public void acknowledge(ConnectionContext context, String clientId, String subscriptionName, + MessageId messageId, MessageAck ack) throws IOException { + MultiKahaDBTransactionStore.this.acknowledge(transactionStore, context, (TopicMessageStore) getDelegate(), clientId, + subscriptionName, messageId, ack); + } + }; + } + + public void deleteAllMessages() { + IOHelper.deleteChildren(getDirectory()); + } + + public int getJournalMaxFileLength() { + return journalMaxFileLength; + } + + public void setJournalMaxFileLength(int journalMaxFileLength) { + this.journalMaxFileLength = journalMaxFileLength; + } + + public int getJournalMaxWriteBatchSize() { + return journalWriteBatchSize; + } + + public void setJournalMaxWriteBatchSize(int journalWriteBatchSize) { + this.journalWriteBatchSize = journalWriteBatchSize; + } + + public class Tx { + private final Set stores = new HashSet(); + private int prepareLocationId = 0; + + public void trackStore(TransactionStore store) { + stores.add(store); + } + + public Set getStores() { + return stores; + } + + public void trackPrepareLocation(Location location) { + this.prepareLocationId = location.getDataFileId(); + } + + public int getPreparedLocationId() { + return prepareLocationId; + } + } + + public Tx getTx(TransactionId txid) { + Tx tx = inflightTransactions.get(txid); + if (tx == null) { + tx = new Tx(); + inflightTransactions.put(txid, tx); + } + return tx; + } + + public Tx removeTx(TransactionId txid) { + return inflightTransactions.remove(txid); + } + + public void prepare(TransactionId txid) throws IOException { + Tx tx = getTx(txid); + for (TransactionStore store : tx.getStores()) { + store.prepare(txid); + } + } + + public void commit(TransactionId txid, boolean wasPrepared, Runnable preCommit, Runnable postCommit) + throws IOException { + + if (preCommit != null) { + preCommit.run(); + } + + Tx tx = getTx(txid); + if (wasPrepared) { + for (TransactionStore store : tx.getStores()) { + store.commit(txid, true, null, null); + } + } else { + // can only do 1pc on a single store + if (tx.getStores().size() == 1) { + for (TransactionStore store : tx.getStores()) { + store.commit(txid, false, null, null); + } + } else { + // need to do local 2pc + for (TransactionStore store : tx.getStores()) { + store.prepare(txid); + } + persistOutcome(tx, txid); + for (TransactionStore store : tx.getStores()) { + store.commit(txid, true, null, null); + } + persistCompletion(txid); + } + } + removeTx(txid); + if (postCommit != null) { + postCommit.run(); + } + } + + public void persistOutcome(Tx tx, TransactionId txid) throws IOException { + tx.trackPrepareLocation(store(new KahaPrepareCommand().setTransactionInfo(multiKahaDBPersistenceAdapter.transactionIdTransformer.transform(txid)))); + } + + public void persistCompletion(TransactionId txid) throws IOException { + store(new KahaCommitCommand().setTransactionInfo(multiKahaDBPersistenceAdapter.transactionIdTransformer.transform(txid))); + } + + private Location store(JournalCommand data) throws IOException { + int size = data.serializedSizeFramed(); + DataByteArrayOutputStream os = new DataByteArrayOutputStream(size + 1); + os.writeByte(data.type().getNumber()); + data.writeFramed(os); + Location location = journal.write(os.toByteSequence(), true); + journal.setLastAppendLocation(location); + return location; + } + + public void rollback(TransactionId txid) throws IOException { + Tx tx = removeTx(txid); + if (tx != null) { + for (TransactionStore store : tx.getStores()) { + store.rollback(txid); + } + } + } + + public void start() throws Exception { + journal = new Journal() { + @Override + protected void cleanup() { + super.cleanup(); + txStoreCleanup(); + } + }; + journal.setDirectory(getDirectory()); + journal.setMaxFileLength(journalMaxFileLength); + journal.setWriteBatchSize(journalWriteBatchSize); + IOHelper.mkdirs(journal.getDirectory()); + journal.start(); + recoverPendingLocalTransactions(); + store(new KahaTraceCommand().setMessage("LOADED " + new Date())); + } + + private void txStoreCleanup() { + Set knownDataFileIds = new TreeSet(journal.getFileMap().keySet()); + for (Tx tx : inflightTransactions.values()) { + knownDataFileIds.remove(tx.getPreparedLocationId()); + } + try { + journal.removeDataFiles(knownDataFileIds); + } catch (Exception e) { + LOG.error(this + ", Failed to remove tx journal datafiles " + knownDataFileIds); + } + } + + private File getDirectory() { + return new File(multiKahaDBPersistenceAdapter.getDirectory(), "txStore"); + } + + public void stop() throws Exception { + journal.close(); + journal = null; + } + + private void recoverPendingLocalTransactions() throws IOException { + Location location = journal.getNextLocation(null); + while (location != null) { + process(load(location)); + location = journal.getNextLocation(location); + } + recoveredPendingCommit.addAll(inflightTransactions.keySet()); + LOG.info("pending local transactions: " + recoveredPendingCommit); + } + + public JournalCommand load(Location location) throws IOException { + DataByteArrayInputStream is = new DataByteArrayInputStream(journal.read(location)); + byte readByte = is.readByte(); + KahaEntryType type = KahaEntryType.valueOf(readByte); + if (type == null) { + throw new IOException("Could not load journal record. Invalid location: " + location); + } + JournalCommand message = (JournalCommand) type.createMessage(); + message.mergeFramed(is); + return message; + } + + public void process(JournalCommand command) throws IOException { + switch (command.type()) { + case KAHA_PREPARE_COMMAND: + KahaPrepareCommand prepareCommand = (KahaPrepareCommand) command; + getTx(TransactionIdConversion.convert(prepareCommand.getTransactionInfo())); + break; + case KAHA_COMMIT_COMMAND: + KahaCommitCommand commitCommand = (KahaCommitCommand) command; + removeTx(TransactionIdConversion.convert(commitCommand.getTransactionInfo())); + break; + case KAHA_TRACE_COMMAND: + break; + default: + throw new IOException("Unexpected command in transaction journal: " + command); + } + } + + + public synchronized void recover(final TransactionRecoveryListener listener) throws IOException { + + for (final KahaDBPersistenceAdapter adapter : multiKahaDBPersistenceAdapter.adapters) { + adapter.createTransactionStore().recover(new TransactionRecoveryListener() { + @Override + public void recover(XATransactionId xid, Message[] addedMessages, MessageAck[] acks) { + try { + getTx(xid).trackStore(adapter.createTransactionStore()); + } catch (IOException e) { + LOG.error("Failed to access transaction store: " + adapter + " for prepared xa tid: " + xid, e); + } + listener.recover(xid, addedMessages, acks); + } + }); + } + + try { + Broker broker = multiKahaDBPersistenceAdapter.getBrokerService().getBroker(); + // force completion of local xa + for (TransactionId txid : broker.getPreparedTransactions(null)) { + if (multiKahaDBPersistenceAdapter.isLocalXid(txid)) { + try { + if (recoveredPendingCommit.contains(txid)) { + LOG.info("delivering pending commit outcome for tid: " + txid); + broker.commitTransaction(null, txid, false); + + } else { + LOG.info("delivering rollback outcome to store for tid: " + txid); + broker.forgetTransaction(null, txid); + } + persistCompletion(txid); + } catch (Exception ex) { + LOG.error("failed to deliver pending outcome for tid: " + txid, ex); + } + } + } + } catch (Exception e) { + LOG.error("failed to resolve pending local transactions", e); + } + } + + void addMessage(final TransactionStore transactionStore, ConnectionContext context, final MessageStore destination, final Message message) + throws IOException { + if (message.getTransactionId() != null) { + getTx(message.getTransactionId()).trackStore(transactionStore); + } + destination.addMessage(context, message); + } + + Future asyncAddQueueMessage(final TransactionStore transactionStore, ConnectionContext context, final MessageStore destination, final Message message) + throws IOException { + if (message.getTransactionId() != null) { + getTx(message.getTransactionId()).trackStore(transactionStore); + destination.addMessage(context, message); + return AbstractMessageStore.FUTURE; + } else { + return destination.asyncAddQueueMessage(context, message); + } + } + + Future asyncAddTopicMessage(final TransactionStore transactionStore, ConnectionContext context, final MessageStore destination, final Message message) + throws IOException { + + if (message.getTransactionId() != null) { + getTx(message.getTransactionId()).trackStore(transactionStore); + destination.addMessage(context, message); + return AbstractMessageStore.FUTURE; + } else { + return destination.asyncAddTopicMessage(context, message); + } + } + + final void removeMessage(final TransactionStore transactionStore, ConnectionContext context, final MessageStore destination, final MessageAck ack) + throws IOException { + if (ack.getTransactionId() != null) { + getTx(ack.getTransactionId()).trackStore(transactionStore); + } + destination.removeMessage(context, ack); + } + + final void removeAsyncMessage(final TransactionStore transactionStore, ConnectionContext context, final MessageStore destination, final MessageAck ack) + throws IOException { + if (ack.getTransactionId() != null) { + getTx(ack.getTransactionId()).trackStore(transactionStore); + } + destination.removeAsyncMessage(context, ack); + } + + final void acknowledge(final TransactionStore transactionStore, ConnectionContext context, final TopicMessageStore destination, + final String clientId, final String subscriptionName, + final MessageId messageId, final MessageAck ack) throws IOException { + if (ack.getTransactionId() != null) { + getTx(ack.getTransactionId()).trackStore(transactionStore); + } + destination.acknowledge(context, clientId, subscriptionName, messageId, ack); + } +} diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/TransactionIdConversion.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/TransactionIdConversion.java new file mode 100644 index 0000000000..e00f1957eb --- /dev/null +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/TransactionIdConversion.java @@ -0,0 +1,77 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.activemq.store.kahadb; + +import org.apache.activemq.command.ConnectionId; +import org.apache.activemq.command.LocalTransactionId; +import org.apache.activemq.command.TransactionId; +import org.apache.activemq.command.XATransactionId; +import org.apache.activemq.protobuf.Buffer; +import org.apache.activemq.store.kahadb.data.KahaLocalTransactionId; +import org.apache.activemq.store.kahadb.data.KahaTransactionInfo; +import org.apache.activemq.store.kahadb.data.KahaXATransactionId; + +public class TransactionIdConversion { + + static KahaTransactionInfo convertToLocal(TransactionId tx) { + KahaTransactionInfo rc = new KahaTransactionInfo(); + LocalTransactionId t = (LocalTransactionId) tx; + KahaLocalTransactionId kahaTxId = new KahaLocalTransactionId(); + kahaTxId.setConnectionId(t.getConnectionId().getValue()); + kahaTxId.setTransacitonId(t.getValue()); + rc.setLocalTransacitonId(kahaTxId); + return rc; + } + + static KahaTransactionInfo convert(TransactionId txid) { + if (txid == null) { + return null; + } + KahaTransactionInfo rc; + + if (txid.isLocalTransaction()) { + rc = convertToLocal(txid); + } else { + rc = new KahaTransactionInfo(); + XATransactionId t = (XATransactionId) txid; + KahaXATransactionId kahaTxId = new KahaXATransactionId(); + kahaTxId.setBranchQualifier(new Buffer(t.getBranchQualifier())); + kahaTxId.setGlobalTransactionId(new Buffer(t.getGlobalTransactionId())); + kahaTxId.setFormatId(t.getFormatId()); + rc.setXaTransacitonId(kahaTxId); + } + return rc; + } + + static TransactionId convert(KahaTransactionInfo transactionInfo) { + if (transactionInfo.hasLocalTransacitonId()) { + KahaLocalTransactionId tx = transactionInfo.getLocalTransacitonId(); + LocalTransactionId rc = new LocalTransactionId(); + rc.setConnectionId(new ConnectionId(tx.getConnectionId())); + rc.setValue(tx.getTransacitonId()); + return rc; + } else { + KahaXATransactionId tx = transactionInfo.getXaTransacitonId(); + XATransactionId rc = new XATransactionId(); + rc.setBranchQualifier(tx.getBranchQualifier().toByteArray()); + rc.setGlobalTransactionId(tx.getGlobalTransactionId().toByteArray()); + rc.setFormatId(tx.getFormatId()); + return rc; + } + } +} \ No newline at end of file diff --git a/activemq-core/src/main/java/org/apache/activemq/store/kahadb/TransactionIdTransformer.java b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/TransactionIdTransformer.java new file mode 100644 index 0000000000..3ea74b0bae --- /dev/null +++ b/activemq-core/src/main/java/org/apache/activemq/store/kahadb/TransactionIdTransformer.java @@ -0,0 +1,8 @@ +package org.apache.activemq.store.kahadb; + +import org.apache.activemq.command.TransactionId; +import org.apache.activemq.store.kahadb.data.KahaTransactionInfo; + +public interface TransactionIdTransformer { + KahaTransactionInfo transform(TransactionId txid); +} diff --git a/activemq-core/src/test/java/org/apache/activemq/broker/XARecoveryBrokerTest.java b/activemq-core/src/test/java/org/apache/activemq/broker/XARecoveryBrokerTest.java index 1e4bad9c91..e44261a18f 100755 --- a/activemq-core/src/test/java/org/apache/activemq/broker/XARecoveryBrokerTest.java +++ b/activemq-core/src/test/java/org/apache/activemq/broker/XARecoveryBrokerTest.java @@ -37,6 +37,8 @@ import org.apache.activemq.command.TransactionId; import org.apache.activemq.command.TransactionInfo; import org.apache.activemq.command.XATransactionId; import org.apache.activemq.util.JMXSupport; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Used to simulate the recovery that occurs when a broker shuts down. @@ -44,7 +46,7 @@ import org.apache.activemq.util.JMXSupport; * */ public class XARecoveryBrokerTest extends BrokerRestartTestSupport { - + protected static final Logger LOG = LoggerFactory.getLogger(XARecoveryBrokerTest.class); public void testPreparedJmxView() throws Exception { ActiveMQDestination destination = createDestination(); @@ -202,7 +204,7 @@ public class XARecoveryBrokerTest extends BrokerRestartTestSupport { } // We should get the committed transactions. - for (int i = 0; i < 4; i++) { + for (int i = 0; i < expectedMessageCount(4, destination); i++) { Message m = receiveMessage(connection); assertNotNull(m); } @@ -249,7 +251,7 @@ public class XARecoveryBrokerTest extends BrokerRestartTestSupport { ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination); connection.send(consumerInfo); - for (int i = 0; i < 4; i++) { + for (int i = 0; i < expectedMessageCount(4, destination); i++) { Message m = receiveMessage(connection); assertNotNull(m); } @@ -276,22 +278,26 @@ public class XARecoveryBrokerTest extends BrokerRestartTestSupport { connection.send(message); } - // Setup the consumer and receive the message. - ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination); - connection.send(consumerInfo); - // Begin the transaction. XATransactionId txid = createXATransaction(sessionInfo); connection.send(createBeginTransaction(connectionInfo, txid)); - Message m = null; - for (int i = 0; i < 4; i++) { - m = receiveMessage(connection); - assertNotNull(m); - } - MessageAck ack = createAck(consumerInfo, m, 4, MessageAck.STANDARD_ACK_TYPE); - ack.setTransactionId(txid); - connection.send(ack); + ConsumerInfo consumerInfo; + Message m = null; + for (ActiveMQDestination dest : destinationList(destination)) { + // Setup the consumer and receive the message. + consumerInfo = createConsumerInfo(sessionInfo, dest); + connection.send(consumerInfo); + + for (int i = 0; i < 4; i++) { + m = receiveMessage(connection); + assertNotNull(m); + } + + MessageAck ack = createAck(consumerInfo, m, 4, MessageAck.STANDARD_ACK_TYPE); + ack.setTransactionId(txid); + connection.send(ack); + } // Commit connection.request(createCommitTransaction1Phase(connectionInfo, txid)); @@ -334,23 +340,27 @@ public class XARecoveryBrokerTest extends BrokerRestartTestSupport { connection.send(message); } - // Setup the consumer and receive the message. - ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination); - connection.send(consumerInfo); - // Begin the transaction. XATransactionId txid = createXATransaction(sessionInfo); connection.send(createBeginTransaction(connectionInfo, txid)); - Message m = null; - for (int i = 0; i < 4; i++) { - m = receiveMessage(connection); - assertNotNull(m); - } - // one ack with last received, mimic a beforeEnd synchronization - MessageAck ack = createAck(consumerInfo, m, 4, MessageAck.STANDARD_ACK_TYPE); - ack.setTransactionId(txid); - connection.send(ack); + ConsumerInfo consumerInfo; + Message m = null; + for (ActiveMQDestination dest : destinationList(destination)) { + // Setup the consumer and receive the message. + consumerInfo = createConsumerInfo(sessionInfo, dest); + connection.send(consumerInfo); + + for (int i = 0; i < 4; i++) { + m = receiveMessage(connection); + assertNotNull(m); + } + + // one ack with last received, mimic a beforeEnd synchronization + MessageAck ack = createAck(consumerInfo, m, 4, MessageAck.STANDARD_ACK_TYPE); + ack.setTransactionId(txid); + connection.send(ack); + } connection.request(createPrepareTransaction(connectionInfo, txid)); @@ -404,23 +414,27 @@ public class XARecoveryBrokerTest extends BrokerRestartTestSupport { connection.send(message); } - // Setup the consumer and receive the message. - ConsumerInfo consumerInfo = createConsumerInfo(sessionInfo, destination); - connection.send(consumerInfo); - // Begin the transaction. XATransactionId txid = createXATransaction(sessionInfo); connection.send(createBeginTransaction(connectionInfo, txid)); - Message message = null; - for (int i = 0; i < 4; i++) { - message = receiveMessage(connection); - assertNotNull(message); - } - // one ack with last received, mimic a beforeEnd synchronization - MessageAck ack = createAck(consumerInfo, message, 4, MessageAck.STANDARD_ACK_TYPE); - ack.setTransactionId(txid); - connection.send(ack); + ConsumerInfo consumerInfo; + Message message = null; + for (ActiveMQDestination dest : destinationList(destination)) { + // Setup the consumer and receive the message. + consumerInfo = createConsumerInfo(sessionInfo, dest); + connection.send(consumerInfo); + + for (int i = 0; i < 4; i++) { + message = receiveMessage(connection); + assertNotNull(message); + } + + // one ack with last received, mimic a beforeEnd synchronization + MessageAck ack = createAck(consumerInfo, message, 4, MessageAck.STANDARD_ACK_TYPE); + ack.setTransactionId(txid); + connection.send(ack); + } connection.request(createPrepareTransaction(connectionInfo, txid)); @@ -454,13 +468,20 @@ public class XARecoveryBrokerTest extends BrokerRestartTestSupport { // Begin new transaction for redelivery txid = createXATransaction(sessionInfo); connection.send(createBeginTransaction(connectionInfo, txid)); - for (int i = 0; i < 4; i++) { - message = receiveMessage(connection); - assertNotNull(message); + + for (ActiveMQDestination dest : destinationList(destination)) { + // Setup the consumer and receive the message. + consumerInfo = createConsumerInfo(sessionInfo, dest); + connection.send(consumerInfo); + + for (int i = 0; i < 4; i++) { + message = receiveMessage(connection); + assertNotNull(message); + } + MessageAck ack = createAck(consumerInfo, message, 4, MessageAck.STANDARD_ACK_TYPE); + ack.setTransactionId(txid); + connection.send(ack); } - ack = createAck(consumerInfo, message, 4, MessageAck.STANDARD_ACK_TYPE); - ack.setTransactionId(txid); - connection.send(ack); // Commit connection.request(createCommitTransaction1Phase(connectionInfo, txid)); @@ -470,6 +491,14 @@ public class XARecoveryBrokerTest extends BrokerRestartTestSupport { assertEquals("there are no prepared tx", 0, dataArrayResponse.getData().length); } + private ActiveMQDestination[] destinationList(ActiveMQDestination dest) { + return dest.isComposite() ? dest.getCompositeDestinations() : new ActiveMQDestination[]{dest}; + } + + private int expectedMessageCount(int i, ActiveMQDestination destination) { + return i * (destination.isComposite() ? destination.getCompositeDestinations().length : 1); + } + public void testQueuePersistentUncommittedAcksLostOnRestart() throws Exception { ActiveMQDestination destination = createDestination(); diff --git a/activemq-core/src/test/java/org/apache/activemq/broker/mKahaDBXARecoveryBrokerTest.java b/activemq-core/src/test/java/org/apache/activemq/broker/mKahaDBXARecoveryBrokerTest.java new file mode 100644 index 0000000000..2687b1cc20 --- /dev/null +++ b/activemq-core/src/test/java/org/apache/activemq/broker/mKahaDBXARecoveryBrokerTest.java @@ -0,0 +1,78 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.activemq.broker; + +import java.util.LinkedList; +import java.util.List; +import javax.jms.JMSException; +import javax.management.InstanceNotFoundException; +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; +import junit.framework.Test; +import org.apache.activemq.broker.jmx.RecoveredXATransactionViewMBean; +import org.apache.activemq.command.ActiveMQDestination; +import org.apache.activemq.command.ActiveMQQueue; +import org.apache.activemq.command.ConnectionInfo; +import org.apache.activemq.command.ConsumerInfo; +import org.apache.activemq.command.DataArrayResponse; +import org.apache.activemq.command.Message; +import org.apache.activemq.command.MessageAck; +import org.apache.activemq.command.ProducerInfo; +import org.apache.activemq.command.Response; +import org.apache.activemq.command.SessionInfo; +import org.apache.activemq.command.TransactionId; +import org.apache.activemq.command.TransactionInfo; +import org.apache.activemq.command.XATransactionId; +import org.apache.activemq.store.kahadb.FilteredKahaDBPersistenceAdapter; +import org.apache.activemq.store.kahadb.KahaDBPersistenceAdapter; +import org.apache.activemq.store.kahadb.MultiKahaDBPersistenceAdapter; +import org.apache.activemq.util.JMXSupport; + +public class mKahaDBXARecoveryBrokerTest extends XARecoveryBrokerTest { + + @Override + protected void configureBroker(BrokerService broker) throws Exception { + super.configureBroker(broker); + + MultiKahaDBPersistenceAdapter mKahaDB = new MultiKahaDBPersistenceAdapter(); + List adapters = new LinkedList(); + FilteredKahaDBPersistenceAdapter defaultEntry = new FilteredKahaDBPersistenceAdapter(); + defaultEntry.setPersistenceAdapter(new KahaDBPersistenceAdapter()); + adapters.add(defaultEntry); + + FilteredKahaDBPersistenceAdapter special = new FilteredKahaDBPersistenceAdapter(); + special.setDestination(new ActiveMQQueue("special")); + special.setPersistenceAdapter(new KahaDBPersistenceAdapter()); + adapters.add(special); + + mKahaDB.setFilteredPersistenceAdapters(adapters); + broker.setPersistenceAdapter(mKahaDB); + } + + public static Test suite() { + return suite(mKahaDBXARecoveryBrokerTest.class); + } + + public static void main(String[] args) { + junit.textui.TestRunner.run(suite()); + } + + protected ActiveMQDestination createDestination() { + return new ActiveMQQueue("test,special"); + } + +} diff --git a/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2736Test.java b/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2736Test.java index 1b7faedb94..661b25d045 100644 --- a/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2736Test.java +++ b/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2736Test.java @@ -27,6 +27,8 @@ import org.apache.activemq.store.kahadb.KahaDBStore; import org.apache.activemq.util.DefaultIOExceptionHandler; import org.junit.After; import org.junit.Test; + + import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -56,7 +58,7 @@ public class AMQ2736Test { // test hack, close the journal to ensure no further journal updates when broker stops // mimic kill -9 in terms of no normal shutdown sequence - store.getJournalManager().close(); + store.getJournal().close(); try { store.close(); } catch (Exception expectedLotsAsJournalBorked) { diff --git a/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2982Test.java b/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2982Test.java index 861bb3a324..d5220c8dd0 100644 --- a/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2982Test.java +++ b/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2982Test.java @@ -16,7 +16,11 @@ */ package org.apache.activemq.bugs; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + import java.io.IOException; +import java.net.URISyntaxException; import java.util.concurrent.CountDownLatch; import javax.jms.BytesMessage; @@ -28,6 +32,7 @@ import javax.jms.MessageConsumer; import javax.jms.MessageListener; import javax.jms.MessageProducer; import javax.jms.Session; + import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.RedeliveryPolicy; import org.apache.activemq.broker.BrokerService; @@ -37,8 +42,6 @@ import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; public class AMQ2982Test { @@ -62,7 +65,7 @@ public class AMQ2982Test { // ensure save memory publishing, use the right lock indexLock.readLock().lock(); try { - return getJournalManager().getFileMap().size(); + return getJournal().getFileMap().size(); } finally { indexLock.readLock().unlock(); } diff --git a/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2983Test.java b/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2983Test.java index cbb7752480..f8b941a40c 100644 --- a/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2983Test.java +++ b/activemq-core/src/test/java/org/apache/activemq/bugs/AMQ2983Test.java @@ -16,6 +16,10 @@ */ package org.apache.activemq.bugs; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -30,6 +34,7 @@ import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Session; + import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.store.kahadb.KahaDBStore; @@ -37,7 +42,6 @@ import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import static org.junit.Assert.*; public class AMQ2983Test { @@ -63,7 +67,7 @@ public class AMQ2983Test { // ensure save memory publishing, use the right lock indexLock.readLock().lock(); try { - return getJournalManager().getFileMap().size(); + return getJournal().getFileMap().size(); } finally { indexLock.readLock().unlock(); } diff --git a/activemq-core/src/test/java/org/apache/activemq/perf/SimpleDurableTopicTest.java b/activemq-core/src/test/java/org/apache/activemq/perf/SimpleDurableTopicTest.java index e7becf88fd..08a0fba48e 100644 --- a/activemq-core/src/test/java/org/apache/activemq/perf/SimpleDurableTopicTest.java +++ b/activemq-core/src/test/java/org/apache/activemq/perf/SimpleDurableTopicTest.java @@ -31,9 +31,9 @@ public class SimpleDurableTopicTest extends SimpleTopicTest { protected long initialConsumerDelay = 0; @Override protected void setUp() throws Exception { - numberOfDestinations=10; + numberOfDestinations=1; numberOfConsumers = 1; - numberofProducers = Integer.parseInt(System.getProperty("SimpleDurableTopicTest.numberofProducers", "1")); + numberofProducers = Integer.parseInt(System.getProperty("SimpleDurableTopicTest.numberofProducers", "20"), 20); sampleCount= Integer.parseInt(System.getProperty("SimpleDurableTopicTest.sampleCount", "1000"), 10); playloadSize = 1024; super.setUp(); diff --git a/activemq-core/src/test/java/org/apache/activemq/store/StorePerDestinationTest.java b/activemq-core/src/test/java/org/apache/activemq/store/StorePerDestinationTest.java new file mode 100644 index 0000000000..f34e04851c --- /dev/null +++ b/activemq-core/src/test/java/org/apache/activemq/store/StorePerDestinationTest.java @@ -0,0 +1,282 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.activemq.store; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Vector; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import javax.jms.Connection; +import javax.jms.JMSException; +import javax.jms.MessageConsumer; +import javax.jms.MessageProducer; +import javax.jms.Session; +import org.apache.activemq.ActiveMQConnectionFactory; +import org.apache.activemq.broker.BrokerService; +import org.apache.activemq.command.ActiveMQQueue; +import org.apache.activemq.command.TransactionId; +import org.apache.activemq.store.kahadb.FilteredKahaDBPersistenceAdapter; +import org.apache.activemq.store.kahadb.KahaDBPersistenceAdapter; +import org.apache.activemq.store.kahadb.MultiKahaDBPersistenceAdapter; +import org.apache.activemq.store.kahadb.MultiKahaDBTransactionStore; +import org.apache.activemq.usage.SystemUsage; +import org.apache.activemq.util.Wait; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class StorePerDestinationTest { + static final Logger LOG = LoggerFactory.getLogger(StorePerDestinationTest.class); + final static int maxFileLength = 1024*100; + final static int numToSend = 10000; + final Vector exceptions = new Vector(); + BrokerService brokerService; + + protected BrokerService createBroker(PersistenceAdapter kaha) throws Exception { + + BrokerService broker = new BrokerService(); + broker.setUseJmx(false); + broker.setPersistenceAdapter(kaha); + return broker; + } + + private KahaDBPersistenceAdapter createStore(boolean delete) throws IOException { + KahaDBPersistenceAdapter kaha = new KahaDBPersistenceAdapter(); + kaha.setJournalMaxFileLength(maxFileLength); + kaha.setCleanupInterval(5000); + if (delete) { + kaha.deleteAllMessages(); + } + return kaha; + } + + @Before + public void prepareCleanBrokerWithMultiStore() throws Exception { + prepareBrokerWithMultiStore(true); + } + + public void prepareBrokerWithMultiStore(boolean deleteAllMessages) throws Exception { + + MultiKahaDBPersistenceAdapter multiKahaDBPersistenceAdapter = new MultiKahaDBPersistenceAdapter(); + if (deleteAllMessages) { + multiKahaDBPersistenceAdapter.deleteAllMessages(); + } + ArrayList adapters = new ArrayList(); + + FilteredKahaDBPersistenceAdapter theRest = new FilteredKahaDBPersistenceAdapter(); + theRest.setPersistenceAdapter(createStore(deleteAllMessages)); + // default destination when not set is a match for all + adapters.add(theRest); + + // separate store for FastQ + FilteredKahaDBPersistenceAdapter fastQStore = new FilteredKahaDBPersistenceAdapter(); + fastQStore.setPersistenceAdapter(createStore(deleteAllMessages)); + fastQStore.setDestination(new ActiveMQQueue("FastQ")); + adapters.add(fastQStore); + + multiKahaDBPersistenceAdapter.setFilteredPersistenceAdapters(adapters); + brokerService = createBroker(multiKahaDBPersistenceAdapter); + } + + @After + public void tearDown() throws Exception { + brokerService.stop(); + } + + @Test + public void testTransactedSendReceive() throws Exception { + brokerService.start(); + sendMessages(true, "SlowQ", 1, 0); + assertEquals("got one", 1, receiveMessages(true, "SlowQ", 1)); + } + + @Test + public void testTransactedSendReceiveAcrossStores() throws Exception { + brokerService.start(); + sendMessages(true, "SlowQ,FastQ", 1, 0); + assertEquals("got one", 2, receiveMessages(true, "SlowQ,FastQ", 2)); + } + + @Test + public void testCommitRecovery() throws Exception { + doTestRecovery(true); + } + + @Test + public void testRollbackRecovery() throws Exception { + doTestRecovery(false); + } + + public void doTestRecovery(final boolean haveOutcome) throws Exception { + final MultiKahaDBPersistenceAdapter persistenceAdapter = + (MultiKahaDBPersistenceAdapter) brokerService.getPersistenceAdapter(); + MultiKahaDBTransactionStore transactionStore = + new MultiKahaDBTransactionStore(persistenceAdapter) { + @Override + public void persistOutcome(Tx tx, TransactionId txid) throws IOException { + if (haveOutcome) { + super.persistOutcome(tx, txid); + } + try { + // IOExceptions will stop the broker + persistenceAdapter.stop(); + } catch (Exception e) { + LOG.error("ex on stop ", e); + exceptions.add(e); + } + } + }; + persistenceAdapter.setTransactionStore(transactionStore); + brokerService.start(); + + ExecutorService executorService = Executors.newCachedThreadPool(); + executorService.execute(new Runnable() { + @Override + public void run() { + try { + // commit will block + sendMessages(true, "SlowQ,FastQ", 1, 0); + } catch(Exception expected) { + LOG.info("expected", expected); + } + } + }); + + brokerService.waitUntilStopped(); + // interrupt the send thread + executorService.shutdownNow(); + + // verify auto recovery + prepareBrokerWithMultiStore(false); + brokerService.start(); + + assertEquals("expect to get the recovered message", haveOutcome ? 2 : 0, receiveMessages(false, "SlowQ,FastQ", 2)); + assertEquals("all transactions are complete", 0, brokerService.getBroker().getPreparedTransactions(null).length); + } + + @Test + public void testSlowFastDestinationsStoreUsage() throws Exception { + brokerService.start(); + ExecutorService executorService = Executors.newCachedThreadPool(); + executorService.execute(new Runnable() { + @Override + public void run() { + try { + sendMessages(false, "SlowQ", 50, 500); + } catch (Exception e) { + exceptions.add(e); + } + } + }); + + executorService.execute(new Runnable() { + @Override + public void run() { + try { + sendMessages(false, "FastQ", numToSend, 0); + } catch (Exception e) { + exceptions.add(e); + } + } + }); + + executorService.execute(new Runnable() { + @Override + public void run() { + try { + assertEquals("Got all sent", numToSend, receiveMessages(false, "FastQ", numToSend)); + } catch (Exception e) { + exceptions.add(e); + } + } + }); + + executorService.shutdown(); + assertTrue("consumers executor finished on time", executorService.awaitTermination(60, TimeUnit.SECONDS)); + final SystemUsage usage = brokerService.getSystemUsage(); + assertTrue("Store is not hogged", Wait.waitFor(new Wait.Condition() { + + @Override + public boolean isSatisified() throws Exception { + long storeUsage = usage.getStoreUsage().getUsage(); + LOG.info("Store Usage: " + storeUsage); + return storeUsage < 5 * maxFileLength; + } + })); + assertTrue("no exceptions", exceptions.isEmpty()); + } + + private void sendMessages(boolean transacted, String destName, int count, long sleep) throws Exception { + ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("vm://localhost"); + Connection connection = cf.createConnection(); + try { + Session session = transacted ? connection.createSession(true, Session.SESSION_TRANSACTED) : connection.createSession(false, Session.AUTO_ACKNOWLEDGE); + MessageProducer producer = session.createProducer(new ActiveMQQueue(destName)); + for (int i = 0; i < count; i++) { + if (sleep > 0) { + TimeUnit.MILLISECONDS.sleep(sleep); + } + producer.send(session.createTextMessage(createContent(i))); + } + if (transacted) { + session.commit(); + } + } finally { + connection.close(); + } + } + + private int receiveMessages(boolean transacted, String destName, int max) throws JMSException { + int rc = 0; + ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("vm://localhost"); + Connection connection = cf.createConnection(); + try { + connection.start(); + Session session = transacted ? connection.createSession(true, Session.SESSION_TRANSACTED) : connection.createSession(false, Session.AUTO_ACKNOWLEDGE); + MessageConsumer messageConsumer = session.createConsumer(new ActiveMQQueue(destName)); + while (rc < max && messageConsumer.receive(4000) != null) { + rc++; + + if (transacted && rc % 200 == 0) { + session.commit(); + } + } + if (transacted) { + session.commit(); + } + return rc; + } finally { + connection.close(); + } + } + + private String createContent(int i) { + StringBuilder sb = new StringBuilder(i + ":"); + while (sb.length() < 1024) { + sb.append("*"); + } + return sb.toString(); + } + +} \ No newline at end of file diff --git a/activemq-core/src/test/java/org/apache/activemq/usecases/DurableSubscriptionOfflineTest.java b/activemq-core/src/test/java/org/apache/activemq/usecases/DurableSubscriptionOfflineTest.java index b7b3bc555c..7d7aafdc2d 100644 --- a/activemq-core/src/test/java/org/apache/activemq/usecases/DurableSubscriptionOfflineTest.java +++ b/activemq-core/src/test/java/org/apache/activemq/usecases/DurableSubscriptionOfflineTest.java @@ -17,7 +17,6 @@ package org.apache.activemq.usecases; import java.util.Vector; - import javax.jms.Connection; import javax.jms.JMSException; import javax.jms.Message; @@ -73,7 +72,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp public static Test suite() { return suite(DurableSubscriptionOfflineTest.class); } - + protected void setUp() throws Exception { exceptions.clear(); topic = (ActiveMQTopic) createDestination(); @@ -89,9 +88,9 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp private void createBroker() throws Exception { createBroker(true); } - + private void createBroker(boolean deleteAllMessages) throws Exception { - broker = BrokerFactory.createBroker("broker:(vm://" + getName(true) + ")"); + broker = BrokerFactory.createBroker("broker:(vm://" + getName(true) +")"); broker.setBrokerName(getName(true)); broker.setDeleteAllMessagesOnStartup(deleteAllMessages); broker.getManagementContext().setCreateConnector(false); @@ -105,14 +104,14 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp policyMap.setDefaultEntry(policy); broker.setDestinationPolicy(policyMap); } - + setDefaultPersistenceAdapter(broker); if (broker.getPersistenceAdapter() instanceof JDBCPersistenceAdapter) { // ensure it kicks in during tests - ((JDBCPersistenceAdapter) broker.getPersistenceAdapter()).setCleanupPeriod(2 * 1000); + ((JDBCPersistenceAdapter)broker.getPersistenceAdapter()).setCleanupPeriod(2*1000); } else if (broker.getPersistenceAdapter() instanceof KahaDBPersistenceAdapter) { // have lots of journal files - ((KahaDBPersistenceAdapter) broker.getPersistenceAdapter()).setJournalMaxFileLength(journalMaxFileLength); + ((KahaDBPersistenceAdapter)broker.getPersistenceAdapter()).setJournalMaxFileLength(journalMaxFileLength); } broker.start(); } @@ -124,9 +123,9 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp public void initCombosForTestConsumeOnlyMatchedMessages() throws Exception { this.addCombinationValues("defaultPersistenceAdapter", - new Object[]{PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); + new Object[]{ PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); this.addCombinationValues("usePrioritySupport", - new Object[]{Boolean.TRUE, Boolean.FALSE}); + new Object[]{ Boolean.TRUE, Boolean.FALSE}); } public void testConsumeOnlyMatchedMessages() throws Exception { @@ -171,110 +170,110 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp assertEquals(sent, listener.count); } - public void testConsumeAllMatchedMessages() throws Exception { - // create durable subscription - Connection con = createConnection(); - Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); - session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); - session.close(); - con.close(); + public void testConsumeAllMatchedMessages() throws Exception { + // create durable subscription + Connection con = createConnection(); + Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); + session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); + session.close(); + con.close(); - // send messages - con = createConnection(); - session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); - MessageProducer producer = session.createProducer(null); + // send messages + con = createConnection(); + session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); + MessageProducer producer = session.createProducer(null); - int sent = 0; - for (int i = 0; i < 10; i++) { - sent++; - Message message = session.createMessage(); - message.setStringProperty("filter", "true"); - producer.send(topic, message); - } + int sent = 0; + for (int i = 0; i < 10; i++) { + sent++; + Message message = session.createMessage(); + message.setStringProperty("filter", "true"); + producer.send(topic, message); + } - Thread.sleep(1 * 1000); + Thread.sleep(1 * 1000); - session.close(); - con.close(); + session.close(); + con.close(); - // consume messages - con = createConnection(); - session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); - MessageConsumer consumer = session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); - Listener listener = new Listener(); - consumer.setMessageListener(listener); + // consume messages + con = createConnection(); + session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); + MessageConsumer consumer = session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); + Listener listener = new Listener(); + consumer.setMessageListener(listener); - Thread.sleep(3 * 1000); + Thread.sleep(3 * 1000); - session.close(); - con.close(); - - assertEquals(sent, listener.count); - } + session.close(); + con.close(); + assertEquals(sent, listener.count); + } + public void initCombosForTestVerifyAllConsumedAreAcked() throws Exception { this.addCombinationValues("defaultPersistenceAdapter", - new Object[]{PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); + new Object[]{ PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); this.addCombinationValues("usePrioritySupport", - new Object[]{Boolean.TRUE, Boolean.FALSE}); + new Object[]{ Boolean.TRUE, Boolean.FALSE}); } - public void testVerifyAllConsumedAreAcked() throws Exception { - // create durable subscription - Connection con = createConnection(); - Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); - session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); - session.close(); - con.close(); + public void testVerifyAllConsumedAreAcked() throws Exception { + // create durable subscription + Connection con = createConnection(); + Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); + session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); + session.close(); + con.close(); - // send messages - con = createConnection(); - session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); - MessageProducer producer = session.createProducer(null); + // send messages + con = createConnection(); + session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); + MessageProducer producer = session.createProducer(null); - int sent = 0; - for (int i = 0; i < 10; i++) { - sent++; - Message message = session.createMessage(); - message.setStringProperty("filter", "true"); - producer.send(topic, message); - } + int sent = 0; + for (int i = 0; i < 10; i++) { + sent++; + Message message = session.createMessage(); + message.setStringProperty("filter", "true"); + producer.send(topic, message); + } - Thread.sleep(1 * 1000); + Thread.sleep(1 * 1000); - session.close(); - con.close(); + session.close(); + con.close(); - // consume messages - con = createConnection(); - session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); - MessageConsumer consumer = session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); - Listener listener = new Listener(); - consumer.setMessageListener(listener); + // consume messages + con = createConnection(); + session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); + MessageConsumer consumer = session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); + Listener listener = new Listener(); + consumer.setMessageListener(listener); - Thread.sleep(3 * 1000); + Thread.sleep(3 * 1000); - session.close(); - con.close(); + session.close(); + con.close(); - LOG.info("Consumed: " + listener.count); - assertEquals(sent, listener.count); + LOG.info("Consumed: " + listener.count); + assertEquals(sent, listener.count); - // consume messages again, should not get any - con = createConnection(); - session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); - consumer = session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); - listener = new Listener(); - consumer.setMessageListener(listener); + // consume messages again, should not get any + con = createConnection(); + session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); + consumer = session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true); + listener = new Listener(); + consumer.setMessageListener(listener); - Thread.sleep(3 * 1000); + Thread.sleep(3 * 1000); - session.close(); - con.close(); + session.close(); + con.close(); - assertEquals(0, listener.count); - } + assertEquals(0, listener.count); + } public void testTwoOfflineSubscriptionCanConsume() throws Exception { // create durable subscription 1 @@ -445,9 +444,9 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp public void initCombosForTestOfflineSubscriptionCanConsumeAfterOnlineSubs() throws Exception { this.addCombinationValues("defaultPersistenceAdapter", - new Object[]{PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); + new Object[]{ PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); this.addCombinationValues("usePrioritySupport", - new Object[]{Boolean.TRUE, Boolean.FALSE}); + new Object[]{ Boolean.TRUE, Boolean.FALSE}); } public void testOfflineSubscriptionCanConsumeAfterOnlineSubs() throws Exception { @@ -596,15 +595,14 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp con.close(); assertEquals("offline consumer got all", sent, listener.count); - } + } public void initCombosForTestMixOfOnLineAndOfflineSubsGetAllMatched() throws Exception { this.addCombinationValues("defaultPersistenceAdapter", - new Object[]{PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); + new Object[]{ PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); } private static String filter = "$a='A1' AND (($b=true AND $c=true) OR ($d='D1' OR $d='D2'))"; - public void testMixOfOnLineAndOfflineSubsGetAllMatched() throws Exception { // create offline subs 1 Connection con = createConnection("offCli1"); @@ -752,9 +750,9 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp public void initCombosForTestOfflineSubscriptionWithSelectorAfterRestart() throws Exception { this.addCombinationValues("defaultPersistenceAdapter", - new Object[]{PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); + new Object[]{ PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); } - + public void testOfflineSubscriptionWithSelectorAfterRestart() throws Exception { // create offline subs 1 Connection con = createConnection("offCli1"); @@ -795,7 +793,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp Thread.sleep(3 * 1000); broker.stop(); createBroker(false /*deleteAllMessages*/); - + // send more messages con = createConnection(); session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); @@ -842,7 +840,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp public void initCombosForTestOfflineAfterRestart() throws Exception { this.addCombinationValues("defaultPersistenceAdapter", - new Object[]{PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); + new Object[]{ PersistenceAdapterChoice.KahaDB, PersistenceAdapterChoice.JDBC}); } public void testOfflineSubscriptionAfterRestart() throws Exception { @@ -978,7 +976,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp int filtered = 0; for (int i = 0; i < 10; i++) { - boolean filter = (i % 2 == 0); //(int) (Math.random() * 2) >= 1; + boolean filter = (i %2 == 0); //(int) (Math.random() * 2) >= 1; if (filter) filtered++; @@ -1076,7 +1074,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp sent = 0; for (int i = 0; i < 2; i++) { Message message = session.createMessage(); - message.setStringProperty("filter", i == 1 ? "true" : "false"); + message.setStringProperty("filter", i==1 ? "true" : "false"); producer.send(topic, message); sent++; } @@ -1084,7 +1082,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp Thread.sleep(1 * 1000); session.close(); con.close(); - + LOG.info("cli1 again, should get 1 new ones"); con = createConnection("cli1"); session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); @@ -1206,7 +1204,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp MessageProducer producer = session.createProducer(null); final int toSend = 500; - final String payload = new byte[40 * 1024].toString(); + final String payload = new byte[40*1024].toString(); int sent = 0; for (int i = sent; i < toSend; i++) { Message message = session.createTextMessage(payload); @@ -1233,7 +1231,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp consumer.setMessageListener(listener); assertTrue("got all sent", Wait.waitFor(new Wait.Condition() { public boolean isSatisified() throws Exception { - LOG.info("Want: " + toSend + ", current: " + listener.count); + LOG.info("Want: " + toSend + ", current: " + listener.count); return listener.count == toSend; } })); @@ -1243,7 +1241,7 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp destroyBroker(); createBroker(false); KahaDBPersistenceAdapter pa = (KahaDBPersistenceAdapter) broker.getPersistenceAdapter(); - assertEquals("only one journal file left after restart", 1, pa.getStore().getJournalManager().getFileMap().size()); + assertEquals("only one journal file left after restart", 1, pa.getStore().getJournal().getFileMap().size()); } public static class Listener implements MessageListener { @@ -1252,23 +1250,20 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp Listener() { } - Listener(String id) { this.id = id; } - public void onMessage(Message message) { count++; if (id != null) { try { LOG.info(id + ", " + message.getJMSMessageID()); - } catch (Exception ignored) { - } + } catch (Exception ignored) {} } } } - public class FilterCheckListener extends Listener { + public class FilterCheckListener extends Listener { public void onMessage(Message message) { count++; @@ -1278,11 +1273,13 @@ public class DurableSubscriptionOfflineTest extends org.apache.activemq.TestSupp if (b != null) { boolean c = message.getBooleanProperty("$c"); assertTrue("", c); - } else { + } + else { String d = message.getStringProperty("$d"); assertTrue("", "D1".equals(d) || "D2".equals(d)); } - } catch (JMSException e) { + } + catch (JMSException e) { exceptions.add(e); } } diff --git a/kahadb/src/main/java/org/apache/kahadb/journal/Journal.java b/kahadb/src/main/java/org/apache/kahadb/journal/Journal.java index d3854dc528..2c22a8bac8 100644 --- a/kahadb/src/main/java/org/apache/kahadb/journal/Journal.java +++ b/kahadb/src/main/java/org/apache/kahadb/journal/Journal.java @@ -382,7 +382,7 @@ public class Journal { started = false; } - synchronized void cleanup() { + protected synchronized void cleanup() { if (accessorPool != null) { accessorPool.disposeUnused(); }