HBASE-1714
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@798334 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ace2be6b00
commit
a6c6f77612
|
@ -521,6 +521,7 @@ Release 0.20.0 - Unreleased
|
||||||
(Tim Sell via Stack)
|
(Tim Sell via Stack)
|
||||||
HBASE-1683 OOME on master splitting logs; stuck, won't go down
|
HBASE-1683 OOME on master splitting logs; stuck, won't go down
|
||||||
HBASE-1704 Better zk error when failed connect
|
HBASE-1704 Better zk error when failed connect
|
||||||
|
HBASE-1714 Thrift server: prefix scan API
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
HBASE-1412 Change values for delete column and column family in KeyValue
|
HBASE-1412 Change values for delete column and column family in KeyValue
|
||||||
|
|
|
@ -229,6 +229,11 @@ public class Delete implements Writable {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void deleteColumns(byte [] column) {
|
||||||
|
byte [][] parts = KeyValue.parseColumn(column);
|
||||||
|
this.deleteColumns(parts[0], parts[1]);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete the latest version of the specified column, given in
|
* Delete the latest version of the specified column, given in
|
||||||
* <code>family:qualifier</code> notation.
|
* <code>family:qualifier</code> notation.
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.hbase.io.RowResult;
|
import org.apache.hadoop.hbase.io.RowResult;
|
||||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.mapred.InputFormat;
|
import org.apache.hadoop.mapred.InputFormat;
|
||||||
import org.apache.hadoop.mapred.InputSplit;
|
import org.apache.hadoop.mapred.InputSplit;
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
|
@ -106,21 +107,22 @@ implements InputFormat<ImmutableBytesWritable, RowResult> {
|
||||||
public void restart(byte[] firstRow) throws IOException {
|
public void restart(byte[] firstRow) throws IOException {
|
||||||
if ((endRow != null) && (endRow.length > 0)) {
|
if ((endRow != null) && (endRow.length > 0)) {
|
||||||
if (trrRowFilter != null) {
|
if (trrRowFilter != null) {
|
||||||
final Set<RowFilterInterface> rowFiltersSet =
|
Scan scan = new Scan(firstRow, endRow);
|
||||||
new HashSet<RowFilterInterface>();
|
|
||||||
rowFiltersSet.add(new WhileMatchRowFilter(new StopRowFilter(endRow)));
|
|
||||||
rowFiltersSet.add(trrRowFilter);
|
|
||||||
Scan scan = new Scan(startRow);
|
|
||||||
scan.addColumns(trrInputColumns);
|
scan.addColumns(trrInputColumns);
|
||||||
// scan.setFilter(new RowFilterSet(RowFilterSet.Operator.MUST_PASS_ALL,
|
scan.setOldFilter(trrRowFilter);
|
||||||
// rowFiltersSet));
|
|
||||||
this.scanner = this.htable.getScanner(scan);
|
this.scanner = this.htable.getScanner(scan);
|
||||||
} else {
|
} else {
|
||||||
|
LOG.debug("TIFB.restart, firstRow: " +
|
||||||
|
Bytes.toStringBinary(firstRow) + ", endRow: " +
|
||||||
|
Bytes.toStringBinary(endRow));
|
||||||
Scan scan = new Scan(firstRow, endRow);
|
Scan scan = new Scan(firstRow, endRow);
|
||||||
scan.addColumns(trrInputColumns);
|
scan.addColumns(trrInputColumns);
|
||||||
this.scanner = this.htable.getScanner(scan);
|
this.scanner = this.htable.getScanner(scan);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
LOG.debug("TIFB.restart, firstRow: " +
|
||||||
|
Bytes.toStringBinary(firstRow) + ", no endRow");
|
||||||
|
|
||||||
Scan scan = new Scan(firstRow);
|
Scan scan = new Scan(firstRow);
|
||||||
scan.addColumns(trrInputColumns);
|
scan.addColumns(trrInputColumns);
|
||||||
// scan.setFilter(trrRowFilter);
|
// scan.setFilter(trrRowFilter);
|
||||||
|
|
|
@ -1,3 +1,23 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
|
@ -69,7 +89,6 @@ public class DeleteCompare {
|
||||||
if(res > 0) {
|
if(res > 0) {
|
||||||
return DeleteCode.DONE;
|
return DeleteCode.DONE;
|
||||||
} else if(res < 0){
|
} else if(res < 0){
|
||||||
System.out.println("SKIPPING ROW");
|
|
||||||
return DeleteCode.SKIP;
|
return DeleteCode.SKIP;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,7 +132,6 @@ public class DeleteCompare {
|
||||||
}
|
}
|
||||||
return DeleteCode.DONE;
|
return DeleteCode.DONE;
|
||||||
} else {
|
} else {
|
||||||
System.out.println("SKIPPING TS");
|
|
||||||
return DeleteCode.SKIP;
|
return DeleteCode.SKIP;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,12 +74,11 @@ struct ColumnDescriptor {
|
||||||
2:i32 maxVersions = 3,
|
2:i32 maxVersions = 3,
|
||||||
3:string compression = "NONE",
|
3:string compression = "NONE",
|
||||||
4:bool inMemory = 0,
|
4:bool inMemory = 0,
|
||||||
5:i32 maxValueLength = 2147483647,
|
5:string bloomFilterType = "NONE",
|
||||||
6:string bloomFilterType = "NONE",
|
6:i32 bloomFilterVectorSize = 0,
|
||||||
7:i32 bloomFilterVectorSize = 0,
|
7:i32 bloomFilterNbHashes = 0,
|
||||||
8:i32 bloomFilterNbHashes = 0,
|
8:bool blockCacheEnabled = 0,
|
||||||
9:bool blockCacheEnabled = 0,
|
9:i32 timeToLive = -1
|
||||||
10:i32 timeToLive = -1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -463,6 +462,20 @@ service Hbase {
|
||||||
4:list<Text> columns)
|
4:list<Text> columns)
|
||||||
throws (1:IOError io)
|
throws (1:IOError io)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open a scanner for a given prefix. That is all rows will have the specified
|
||||||
|
* prefix. No other rows will be returned.
|
||||||
|
*
|
||||||
|
* @param tableName name of table
|
||||||
|
* @param startAndPrefix the prefix (and thus start row) of the keys you want
|
||||||
|
* @param columns the columns you want returned
|
||||||
|
* @return scanner id to use with other scanner calls
|
||||||
|
*/
|
||||||
|
ScannerID scannerOpenWithPrefix(1:Text tableName,
|
||||||
|
2:Text startAndPrefix,
|
||||||
|
3:list<Text> columns)
|
||||||
|
throws (1:IOError io)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a scanner on the current table starting at the specified row and
|
* Get a scanner on the current table starting at the specified row and
|
||||||
* ending at the last row in the table. Return the specified columns.
|
* ending at the last row in the table. Return the specified columns.
|
||||||
|
|
|
@ -35,6 +35,9 @@ import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||||
|
import org.apache.hadoop.hbase.filter.Filter;
|
||||||
|
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
|
||||||
|
import org.apache.hadoop.hbase.filter.PrefixFilter;
|
||||||
import org.apache.hadoop.hbase.client.Delete;
|
import org.apache.hadoop.hbase.client.Delete;
|
||||||
import org.apache.hadoop.hbase.client.Get;
|
import org.apache.hadoop.hbase.client.Get;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
@ -604,6 +607,23 @@ public class ThriftServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int scannerOpenWithPrefix(byte[] tableName, byte[] startAndPrefix, List<byte[]> columns) throws IOError, TException {
|
||||||
|
try {
|
||||||
|
HTable table = getTable(tableName);
|
||||||
|
byte [][] columnsArray = null;
|
||||||
|
columnsArray = columns.toArray(new byte[0][]);
|
||||||
|
Scan scan = new Scan(startAndPrefix);
|
||||||
|
scan.addColumns(columnsArray);
|
||||||
|
Filter f = new WhileMatchFilter(
|
||||||
|
new PrefixFilter(startAndPrefix));
|
||||||
|
scan.setFilter(f);
|
||||||
|
return addScanner(table.getScanner(scan));
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new IOError(e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public int scannerOpenTs(byte[] tableName, byte[] startRow,
|
public int scannerOpenTs(byte[] tableName, byte[] startRow,
|
||||||
List<byte[]> columns, long timestamp) throws IOError, TException {
|
List<byte[]> columns, long timestamp) throws IOError, TException {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -129,7 +126,6 @@ public class ColumnDescriptor implements TBase, java.io.Serializable, Cloneable
|
||||||
int maxVersions,
|
int maxVersions,
|
||||||
String compression,
|
String compression,
|
||||||
boolean inMemory,
|
boolean inMemory,
|
||||||
int maxValueLength,
|
|
||||||
String bloomFilterType,
|
String bloomFilterType,
|
||||||
int bloomFilterVectorSize,
|
int bloomFilterVectorSize,
|
||||||
int bloomFilterNbHashes,
|
int bloomFilterNbHashes,
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
/*
|
||||||
|
* Copyright 2009 The Apache Software Foundation
|
||||||
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -15,11 +17,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
/**
|
|
||||||
* Autogenerated by Thrift
|
|
||||||
*
|
|
||||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase.thrift.generated;
|
package org.apache.hadoop.hbase.thrift.generated;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -322,7 +322,7 @@ public class TestThriftServer extends HBaseClusterTestCase {
|
||||||
|
|
||||||
// A slightly customized ColumnDescriptor (only 2 versions)
|
// A slightly customized ColumnDescriptor (only 2 versions)
|
||||||
ColumnDescriptor cDescB = new ColumnDescriptor(columnBname, 2, "NONE",
|
ColumnDescriptor cDescB = new ColumnDescriptor(columnBname, 2, "NONE",
|
||||||
false, 2147483647, "NONE", 0, 0, false, -1);
|
false, "NONE", 0, 0, false, -1);
|
||||||
cDescriptors.add(cDescB);
|
cDescriptors.add(cDescB);
|
||||||
|
|
||||||
return cDescriptors;
|
return cDescriptors;
|
||||||
|
|
Loading…
Reference in New Issue