diff --git a/commonlib/1line-ferrari.jar b/commonlib/1line-ferrari.jar new file mode 100644 index 0000000..83c2c1a Binary files /dev/null and b/commonlib/1line-ferrari.jar differ diff --git a/commonlib/commons-cli-1.2.jar b/commonlib/commons-cli-1.2.jar new file mode 100644 index 0000000..ce4b9ff Binary files /dev/null and b/commonlib/commons-cli-1.2.jar differ diff --git a/commonlib/commons-jexl-2.1.1.jar b/commonlib/commons-jexl-2.1.1.jar new file mode 100644 index 0000000..d160a23 Binary files /dev/null and b/commonlib/commons-jexl-2.1.1.jar differ diff --git a/commonlib/commons-lang-2.5.jar b/commonlib/commons-lang-2.5.jar new file mode 100644 index 0000000..ae491da Binary files /dev/null and b/commonlib/commons-lang-2.5.jar differ diff --git a/commonlib/commons-logging-1.1.1.jar b/commonlib/commons-logging-1.1.1.jar new file mode 100644 index 0000000..1deef14 Binary files /dev/null and b/commonlib/commons-logging-1.1.1.jar differ diff --git a/commonlib/gson-2.2.2.jar b/commonlib/gson-2.2.2.jar new file mode 100644 index 0000000..f2108e0 Binary files /dev/null and b/commonlib/gson-2.2.2.jar differ diff --git a/commonlib/log4j-1.2.16.jar b/commonlib/log4j-1.2.16.jar new file mode 100644 index 0000000..5429a90 Binary files /dev/null and b/commonlib/log4j-1.2.16.jar differ diff --git a/commonlib/lucene-analyzers-3.6.2.jar b/commonlib/lucene-analyzers-3.6.2.jar new file mode 100644 index 0000000..6896ec2 Binary files /dev/null and b/commonlib/lucene-analyzers-3.6.2.jar differ diff --git a/commonlib/lucene-core-3.6.2.jar b/commonlib/lucene-core-3.6.2.jar new file mode 100644 index 0000000..bdc96ab Binary files /dev/null and b/commonlib/lucene-core-3.6.2.jar differ diff --git a/commonlib/lucene-queryparser-3.6.2.jar b/commonlib/lucene-queryparser-3.6.2.jar new file mode 100644 index 0000000..d3ed193 Binary files /dev/null and b/commonlib/lucene-queryparser-3.6.2.jar differ diff --git a/hadooplib_12/hadoop-client-1.2.0.1.3.0.0-107.jar b/hadooplib_12/hadoop-client-1.2.0.1.3.0.0-107.jar new file mode 100644 index 0000000..2e26358 Binary files /dev/null and b/hadooplib_12/hadoop-client-1.2.0.1.3.0.0-107.jar differ diff --git a/hadooplib_12/hadoop-core-1.2.0.1.3.0.0-107.jar b/hadooplib_12/hadoop-core-1.2.0.1.3.0.0-107.jar new file mode 100644 index 0000000..54c361f Binary files /dev/null and b/hadooplib_12/hadoop-core-1.2.0.1.3.0.0-107.jar differ diff --git a/hadooplib_12/hbase-0.94.6.1.3.0.0-107-security.jar b/hadooplib_12/hbase-0.94.6.1.3.0.0-107-security.jar new file mode 100644 index 0000000..f143f5d Binary files /dev/null and b/hadooplib_12/hbase-0.94.6.1.3.0.0-107-security.jar differ diff --git a/hadooplib_12/zookeeper-3.4.5.1.3.3.0-58.jar b/hadooplib_12/zookeeper-3.4.5.1.3.3.0-58.jar new file mode 100644 index 0000000..0b194e0 Binary files /dev/null and b/hadooplib_12/zookeeper-3.4.5.1.3.3.0-58.jar differ diff --git a/hadooplib_94/hadoop-annotations-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-annotations-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..7b81e77 Binary files /dev/null and b/hadooplib_94/hadoop-annotations-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-auth-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-auth-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..b21f940 Binary files /dev/null and b/hadooplib_94/hadoop-auth-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-client-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-client-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..74b5b13 Binary files /dev/null and b/hadooplib_94/hadoop-client-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-common-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-common-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..4985a26 Binary files /dev/null and b/hadooplib_94/hadoop-common-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-hdfs-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-hdfs-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..ee49e44 Binary files /dev/null and b/hadooplib_94/hadoop-hdfs-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-mapreduce-client-app-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-mapreduce-client-app-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..60af9a0 Binary files /dev/null and b/hadooplib_94/hadoop-mapreduce-client-app-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-mapreduce-client-common-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-mapreduce-client-common-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..12a909f Binary files /dev/null and b/hadooplib_94/hadoop-mapreduce-client-common-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-mapreduce-client-core-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-mapreduce-client-core-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..4ed0608 Binary files /dev/null and b/hadooplib_94/hadoop-mapreduce-client-core-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-yarn-client-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-yarn-client-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..1249420 Binary files /dev/null and b/hadooplib_94/hadoop-yarn-client-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hadoop-yarn-common-2.0.0-cdh4.2.0.jar b/hadooplib_94/hadoop-yarn-common-2.0.0-cdh4.2.0.jar new file mode 100644 index 0000000..b27c897 Binary files /dev/null and b/hadooplib_94/hadoop-yarn-common-2.0.0-cdh4.2.0.jar differ diff --git a/hadooplib_94/hbase-0.94.2-cdh4.2.0-security.jar b/hadooplib_94/hbase-0.94.2-cdh4.2.0-security.jar new file mode 100644 index 0000000..85ff3a7 Binary files /dev/null and b/hadooplib_94/hbase-0.94.2-cdh4.2.0-security.jar differ diff --git a/hadooplib_94/zookeeper-3.4.5-cdh4.2.0.jar b/hadooplib_94/zookeeper-3.4.5-cdh4.2.0.jar new file mode 100644 index 0000000..80fe1a1 Binary files /dev/null and b/hadooplib_94/zookeeper-3.4.5-cdh4.2.0.jar differ diff --git a/hadooplib_96/hadoop-client-2.2.0.2.0.6.0-76.jar b/hadooplib_96/hadoop-client-2.2.0.2.0.6.0-76.jar new file mode 100644 index 0000000..0d5a956 Binary files /dev/null and b/hadooplib_96/hadoop-client-2.2.0.2.0.6.0-76.jar differ diff --git a/hadooplib_96/hadoop-common-2.2.0.2.0.6.0-76.jar b/hadooplib_96/hadoop-common-2.2.0.2.0.6.0-76.jar new file mode 100644 index 0000000..606acf4 Binary files /dev/null and b/hadooplib_96/hadoop-common-2.2.0.2.0.6.0-76.jar differ diff --git a/hadooplib_96/hadoop-hdfs-2.2.0.2.0.6.0-76.jar b/hadooplib_96/hadoop-hdfs-2.2.0.2.0.6.0-76.jar new file mode 100644 index 0000000..26b8bee Binary files /dev/null and b/hadooplib_96/hadoop-hdfs-2.2.0.2.0.6.0-76.jar differ diff --git a/hadooplib_96/hadoop-mapreduce-client-common-2.2.0.2.0.6.0-76.jar b/hadooplib_96/hadoop-mapreduce-client-common-2.2.0.2.0.6.0-76.jar new file mode 100644 index 0000000..0927c70 Binary files /dev/null and b/hadooplib_96/hadoop-mapreduce-client-common-2.2.0.2.0.6.0-76.jar differ diff --git a/hadooplib_96/hadoop-mapreduce-client-core-2.2.0.2.0.6.0-76.jar b/hadooplib_96/hadoop-mapreduce-client-core-2.2.0.2.0.6.0-76.jar new file mode 100644 index 0000000..04f331d Binary files /dev/null and b/hadooplib_96/hadoop-mapreduce-client-core-2.2.0.2.0.6.0-76.jar differ diff --git a/hadooplib_96/hadoop-yarn-client-2.2.0.2.0.6.0-76.jar b/hadooplib_96/hadoop-yarn-client-2.2.0.2.0.6.0-76.jar new file mode 100644 index 0000000..9d995a3 Binary files /dev/null and b/hadooplib_96/hadoop-yarn-client-2.2.0.2.0.6.0-76.jar differ diff --git a/hadooplib_96/hadoop-yarn-common-2.2.0.2.0.6.0-76.jar b/hadooplib_96/hadoop-yarn-common-2.2.0.2.0.6.0-76.jar new file mode 100644 index 0000000..b0386cb Binary files /dev/null and b/hadooplib_96/hadoop-yarn-common-2.2.0.2.0.6.0-76.jar differ diff --git a/hadooplib_96/hbase-client-0.96.0.2.0.6.0-76-hadoop2.jar b/hadooplib_96/hbase-client-0.96.0.2.0.6.0-76-hadoop2.jar new file mode 100644 index 0000000..dc86155 Binary files /dev/null and b/hadooplib_96/hbase-client-0.96.0.2.0.6.0-76-hadoop2.jar differ diff --git a/hadooplib_96/hbase-common-0.96.0.2.0.6.0-76-hadoop2.jar b/hadooplib_96/hbase-common-0.96.0.2.0.6.0-76-hadoop2.jar new file mode 100644 index 0000000..6d24005 Binary files /dev/null and b/hadooplib_96/hbase-common-0.96.0.2.0.6.0-76-hadoop2.jar differ diff --git a/hadooplib_96/hbase-it-0.96.0.2.0.6.0-76-hadoop2.jar b/hadooplib_96/hbase-it-0.96.0.2.0.6.0-76-hadoop2.jar new file mode 100644 index 0000000..437df94 Binary files /dev/null and b/hadooplib_96/hbase-it-0.96.0.2.0.6.0-76-hadoop2.jar differ diff --git a/hadooplib_96/hbase-protocol-0.96.0.2.0.6.0-76-hadoop2.jar b/hadooplib_96/hbase-protocol-0.96.0.2.0.6.0-76-hadoop2.jar new file mode 100644 index 0000000..9ef49eb Binary files /dev/null and b/hadooplib_96/hbase-protocol-0.96.0.2.0.6.0-76-hadoop2.jar differ diff --git a/hadooplib_96/hbase-server-0.96.0.2.0.6.0-76-hadoop2.jar b/hadooplib_96/hbase-server-0.96.0.2.0.6.0-76-hadoop2.jar new file mode 100644 index 0000000..82fbf9c Binary files /dev/null and b/hadooplib_96/hbase-server-0.96.0.2.0.6.0-76-hadoop2.jar differ diff --git a/hadooplib_96/protobuf-java-2.5.0.jar b/hadooplib_96/protobuf-java-2.5.0.jar new file mode 100644 index 0000000..4c4e686 Binary files /dev/null and b/hadooplib_96/protobuf-java-2.5.0.jar differ diff --git a/hadooplib_96/zookeeper-3.4.5.2.0.6.0-76.jar b/hadooplib_96/zookeeper-3.4.5.2.0.6.0-76.jar new file mode 100644 index 0000000..d1802fa Binary files /dev/null and b/hadooplib_96/zookeeper-3.4.5.2.0.6.0-76.jar differ diff --git a/nightly/src-hsearch-core_1.0.3.jar b/nightly/src-hsearch-core_1.0.3.jar new file mode 100644 index 0000000..b4769b1 Binary files /dev/null and b/nightly/src-hsearch-core_1.0.3.jar differ diff --git a/src/compatibility/hadooplib_12/hbase/HDML.java b/src/compatibility/hadooplib_12/hbase/HDML.java new file mode 100644 index 0000000..f7cac1f --- /dev/null +++ b/src/compatibility/hadooplib_12/hbase/HDML.java @@ -0,0 +1,224 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.TableExistsException; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.RowFilter; + +import com.bizosys.hsearch.util.HSearchLog; + +public class HDML { + private static final boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + private static final boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + /** + * Creates the table if not existing before + * @param tableName + * @param cols + * @throws IOException + */ + public static final boolean create(final String tableName, final List cols) throws HBaseException { + + if (DEBUG_ENABLED) + HSearchLog.l.debug("Creating HBase Table - " + tableName); + + try { + if (DEBUG_ENABLED) + HSearchLog.l.debug("Checking for table existance : " + tableName); + HBaseAdmin admin = HBaseFacade.getInstance().getAdmin(); + if ( admin.tableExists(tableName)) { + + if (INFO_ENABLED) + HSearchLog.l.info("Ignoring creation. Table already exists - " + tableName); + return false; + } else { + HTableDescriptor tableMeta = new HTableDescriptor(tableName); + for (HColumnDescriptor col : cols) tableMeta.addFamily(col); + admin.createTable(tableMeta); + if (INFO_ENABLED ) HSearchLog.l.info("Table Created - " + tableName); + return true; + } + + } catch (TableExistsException ex) { + HSearchLog.l.warn("Ignoring creation. Table already exists - " + tableName, ex); + throw new HBaseException("Failed Table Creation : " + tableName, ex); + } catch (MasterNotRunningException mnre) { + throw new HBaseException("Failed Table Creation : " + tableName, mnre); + } catch (IOException ioex) { + throw new HBaseException("Failed Table Creation : " + tableName, ioex); + } + } + + + /** + * Drop a table. This may take significantly large time as things + * are disabled first and then gets deleted. + * @param tableName + * @throws IOException + */ + public final static void drop(final String tableName) throws HBaseException { + + if (DEBUG_ENABLED) + HSearchLog.l.debug("Checking for table existance"); + + try { + HBaseAdmin admin = HBaseFacade.getInstance().getAdmin(); + byte[] bytesTableName = tableName.getBytes(); + if ( admin.tableExists(bytesTableName)) { + if ( ! admin.isTableDisabled(bytesTableName) ) + admin.disableTable(bytesTableName); + if ( admin.isTableDisabled(bytesTableName) ) + admin.deleteTable(bytesTableName); + if (INFO_ENABLED ) HSearchLog.l.info (tableName + " Table is deleted."); + } else { + HSearchLog.l.warn( tableName + " table is not found during drop operation."); + throw new HBaseException("Table does not exist"); + } + } catch (IOException ioex) { + throw new HBaseException("Table Drop Failed : " + tableName, ioex); + } + } + + public final static void truncate(final String tableName, final NV kv) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + scanner = table.getScanner(scan); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + Delete delete = new Delete(r.getRow()); + delete = delete.deleteColumns(kv.family, kv.name); + table.delete(delete); + } + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) { + table.flushCommits(); + facade.putTable(table); + } + if ( null != matched) matched.clear(); + } + } + + public static final void truncateBatch(final String tableName, final String keyPrefix) throws IOException { + + if (INFO_ENABLED) HSearchLog.l.info( + "Deleted from " + tableName + " with prefix " + keyPrefix); + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List deletes = new ArrayList(256); + + int batchSize = 0; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + if ( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes()) ); + scan = scan.setFilter(rowFilter); + } + scanner = table.getScanner(scan); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + Delete delete = new Delete(r.getRow()); + deletes.add(delete); + + batchSize++; + if ( batchSize > 1000) { + if ( deletes.size() > 0 ) { + table.delete(deletes); + deletes.clear(); + } + batchSize = 0; + } + } + if ( deletes.size() > 0 ) table.delete(deletes); + + } finally { + table.flushCommits(); + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != deletes) deletes.clear(); + } + } + + public final static void truncateBatch(final String tableName, final List rows) throws IOException { + + if ( null == rows) return; + if ( rows.size() == 0) return; + + HBaseFacade facade = null; + HTableWrapper table = null; + List deletes = new ArrayList(rows.size()); + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + for (byte[] row : rows) { + Delete delete = new Delete(row); + deletes.add(delete); + } + table.delete(deletes); + + } finally { + table.flushCommits(); + if ( null != table ) facade.putTable(table); + if ( null != deletes) deletes.clear(); + } + } +} diff --git a/src/compatibility/hadooplib_12/hbase/HReader.java b/src/compatibility/hadooplib_12/hbase/HReader.java new file mode 100644 index 0000000..a018359 --- /dev/null +++ b/src/compatibility/hadooplib_12/hbase/HReader.java @@ -0,0 +1,464 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.RowLock; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.KeyOnlyFilter; +import org.apache.hadoop.hbase.filter.PageFilter; +import org.apache.hadoop.hbase.filter.RegexStringComparator; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.util.Bytes; + +import com.bizosys.hsearch.util.HSearchLog; + +public class HReader { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + /** + * Scalar data will contain the amount to increase + * @param tableName + * @param scalar + * @throws SystemFault + */ + public static final long idGenerationByAutoIncr(final String tableName, + final RecordScalar scalar, final long amount ) throws HBaseException { + + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + long incrementedValue = table.incrementColumnValue( + scalar.pk, scalar.kv.family, scalar.kv.name, amount); + return incrementedValue; + } catch (Exception ex) { + throw new HBaseException("Error in getScalar :" + scalar.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public final static boolean exists (final String tableName, final byte[] pk) throws HBaseException { + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(pk); + return table.exists(getter); + } catch (Exception ex) { + throw new HBaseException("Error in existance checking :" + pk.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static List getMatchingRowIds(String tableName, String rowIdPattern) throws IOException { + + FilterList filters = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RegexStringComparator regex = new RegexStringComparator(rowIdPattern); + RowFilter aFilter = new RowFilter(CompareOp.EQUAL, regex); + filters.addFilter(aFilter); + filters.addFilter(new KeyOnlyFilter()); + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + + List rowIds = new ArrayList(); + + try { + + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan.setFilter(filters); + scanner = table.getScanner(scan); + + for (Result r : scanner) { + if (null == r) continue; + byte[] rowB = r.getRow(); + if (null == rowB) continue; + if (rowB.length == 0) continue; + String row = new String(rowB); + rowIds.add(row); + } + return rowIds; + + } catch (IOException ex) { + HSearchLog.l.fatal("Error while looking table :" + tableName + " for regex, " + rowIdPattern , ex); + throw ex; + } finally { + if (null != scanner) scanner.close(); + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final List getCompleteRow (final String tableName, + final byte[] pk) throws HBaseException{ + + return getCompleteRow (tableName, pk, null, null); + } + + public static final List getCompleteRow (final String tableName, final byte[] pk, + final Filter filter) throws HBaseException { + + return getCompleteRow (tableName, pk, filter, null); + } + public final static List getCompleteRow (final String tableName, final byte[] pk, + final Filter filter, final RowLock lock) throws HBaseException { + + HBaseFacade facade = null; + HTableWrapper table = null; + Result r = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = ( null == lock) ? new Get(pk) : new Get(pk,lock); + if (null != filter) getter.setFilter(filter); + if ( table.exists(getter) ) { + r = table.get(getter); + if ( null == r ) return null; + List nvs = new ArrayList(r.list().size()); + for (KeyValue kv : r.list()) { + NVBytes nv = new NVBytes(kv.getFamily(),kv.getQualifier(), kv.getValue()); + nvs.add(nv); + } + return nvs; + } + return null; + } catch (Exception ex) { + throw new HBaseException("Error in existance checking :" + pk.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final void getScalar (final String tableName, final RecordScalar scalar) throws HBaseException { + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(scalar.pk); + Result result = table.get(getter); + if ( null == result) return; + byte[] val = result.getValue(scalar.kv.family, scalar.kv.name); + if ( null != val ) scalar.kv.data = val; + } catch (Exception ex) { + throw new HBaseException("Error in getScalar :" + scalar.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final byte[] getScalar (final String tableName, + final byte[] family, final byte[] col, final byte[] pk) throws HBaseException { + + return getScalar(tableName,family,col,pk,null); + } + + + public static final byte[] getScalar (final String tableName, + final byte[] family, final byte[] col, final byte[] pk, final Filter filter) throws HBaseException { + + if ( null == family || null == col || null == pk ) return null; + + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(pk); + if ( null != filter) getter = getter.setFilter(filter); + Result result = table.get(getter); + if ( null == result) return null; + return result.getValue(family, col); + } catch (Exception ex) { + StringBuilder sb = new StringBuilder(); + sb.append("Input during exception = Table : [").append(tableName); + sb.append("] , Family : [").append(new String(family)); + sb.append("] , Column : [").append(new String(col)); + sb.append("] , Key : [").append(new String(pk)); + sb.append(']'); + throw new HBaseException(sb.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final void getAllValues(final String tableName, final byte[] family, + final byte[] col, final String keyPrefix, final IScanCallBack callback ) throws IOException { + + Filter rowFilter = null; + if ( null != keyPrefix) { + rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + } + getAllValues(tableName, family, col, rowFilter, callback); + + } + + public static final void getAllValues(final String tableName, final byte[] family, + final byte[] col, final Filter filter, final IScanCallBack callback ) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > getAllValues."); + + facade = HBaseFacade.getInstance(); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Table Facade is obtained."); + table = facade.getTable(tableName); + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Table is obtained."); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(family, col); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Scanner is created."); + + if ( null != filter) scan = scan.setFilter(filter); + + scanner = table.getScanner(scan); + + long timeS = System.currentTimeMillis(); + + ColumnFamName aColFamilyName = new ColumnFamName(family, col); + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + byte[] storedBytes = r.getValue(family, col); + if ( null == storedBytes) continue; + callback.process(r.getRow(), aColFamilyName, storedBytes); + } + + if ( DEBUG_ENABLED) { + long timeE = System.currentTimeMillis(); + HSearchLog.l.debug("HReader.getAllValues (" + tableName + ") execution time = " + + (timeE - timeS) ); + } + + } catch ( IOException ex) { + throw ex; + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != matched) matched.clear(); + } + } + + + public static final void getAllValues(final String tableName, final List columns, + final String keyPrefix, final IScanCallBack callback ) throws IOException { + + Filter rowFilter = null; + if ( null != keyPrefix) { + rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + } + getAllValues(tableName, columns, rowFilter, callback); + + } + + public final static void getAllValues(final String tableName, final List columns, + final Filter filter, final IScanCallBack callback ) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + for (ColumnFamName aColFamilyName : columns) { + scan = scan.addColumn(aColFamilyName.family, aColFamilyName.name); + } + + if ( null != filter) scan = scan.setFilter(filter); + + scanner = table.getScanner(scan); + + long timeS = System.currentTimeMillis(); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + for (ColumnFamName aColFamilyName : columns) { + byte[] storedBytes = r.getValue(aColFamilyName.family, aColFamilyName.name); + if ( null == storedBytes) continue; + callback.process(r.getRow(), aColFamilyName, storedBytes); + } + } + + if ( DEBUG_ENABLED) { + long timeE = System.currentTimeMillis(); + HSearchLog.l.debug("HReader.getAllValues (" + tableName + ") execution time = " + + (timeE - timeS) ); + } + + } catch ( IOException ex) { + throw ex; + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != matched) matched.clear(); + } + } + + + /** + * Get all the keys of the table cutting the keyPrefix. + * @param tableName Table name + * @param kv Key-Value + * @param startKey Start Row Primary Key + * @param pageSize Page size + * @return Record Keys + * @throws SystemFault + */ + public static final void getAllKeys(final String tableName, final NV kv, + final String keyPrefix, final IScanCallBack callback) throws HBaseException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + + if ( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + scan = scan.setFilter(rowFilter); + } + + scanner = table.getScanner(scan); + ColumnFamName familyName = new ColumnFamName(kv.family, kv.name); + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + callback.process(r.getRow(), familyName, null); + } + } catch ( IOException ex) { + throw new HBaseException(ex); + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + } + } + + /** + * Get the keys of the table + * @param tableName Table name + * @param kv Key-Value + * @param startKey Start Row Primary Key + * @param pageSize Page size + * @return Record Keys + * @throws SystemFault + */ + public static final List getKeysForAPage(final String tableName, final NV kv, + final byte[] startKey, final String keyPrefix, final int pageSize) throws HBaseException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List keys = ( pageSize > 0 ) ? + new ArrayList(pageSize): new ArrayList(1024); + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + + if( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.NOT_EQUAL, + new BinaryPrefixComparator(Bytes.toBytes(keyPrefix))); + scan = scan.setFilter(rowFilter); + } + + if ( pageSize > 0) { + PageFilter pageFilter = new PageFilter(pageSize); + scan = scan.setFilter(pageFilter); + } + + if ( null != startKey) scan = scan.setStartRow(startKey); + + scanner = table.getScanner(scan); + + int counter = 0; + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + if ( counter++ > pageSize) break; + keys.add(r.getRow()); + } + return keys; + } catch ( IOException ex) { + throw new HBaseException(ex); + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + } + } +} diff --git a/src/compatibility/hadooplib_12/hbase/HTableWrapper.java b/src/compatibility/hadooplib_12/hbase/HTableWrapper.java new file mode 100644 index 0000000..20b4904 --- /dev/null +++ b/src/compatibility/hadooplib_12/hbase/HTableWrapper.java @@ -0,0 +1,208 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.RetriesExhaustedException; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.RowLock; +import org.apache.hadoop.hbase.client.Scan; + +import com.bizosys.hsearch.util.HSearchLog; + +/** + * Wraps an HBase tableInterface object. + * @author karan + *@see org.apache.hadoop.hbase.client.HTableInterface + */ +public final class HTableWrapper { + + private static final boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + /** + * The tableInterface interface + */ + public HTableInterface tableInterface = null; + public HTable innerHtable = null; + + /** + * Name of HBase tableInterface + */ + String tableName = null; + + /** + * Constructor + * @param tableName The tableInterface name + * @param tableInterface tableInterface interface + */ + public HTableWrapper(String tableName, HTableInterface tableInterface) { + this.tableInterface = tableInterface; + this.tableName = tableName; + } + + /** + * Get the tableInterface name in bytes + * @return tableInterface name as byte array + */ + public byte[] getTableName() { + return tableInterface.getTableName(); + } + + /** + * Get tableInterface description + * @return tableInterface Descriptor + * @throws IOException + */ + public HTableDescriptor getTableDescriptor() throws IOException { + return tableInterface.getTableDescriptor(); + } + + /** + * Test for the existence of columns in the tableInterface, as specified in the Get. + * @param get object + * @return True on existence + * @throws IOException + */ + public boolean exists(Get get) throws IOException { + return tableInterface.exists(get); + } + + public Result get(Get get) throws IOException{ + return tableInterface.get(get); + } + + public ResultScanner getScanner(Scan scan) throws IOException { + return tableInterface.getScanner(scan); + } + + public ResultScanner getScanner(byte[] family) throws IOException { + return tableInterface.getScanner(family); + } + + public ResultScanner getScanner(byte[] family, byte[] qualifier) throws IOException { + return tableInterface.getScanner(family, qualifier); + } + + public void put(Put put) throws IOException { + try { + tableInterface.put(put); + } catch ( RetriesExhaustedException ex) { + HBaseFacade.getInstance().recycleTable(this); + tableInterface.put(put); + } + } + + public void put(List puts) throws IOException { + try { + tableInterface.put(puts); + } catch ( RetriesExhaustedException ex) { + HBaseFacade.getInstance().recycleTable(this); + tableInterface.put(puts); + } + } + + public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, + byte[] value, Put put) throws IOException { + + return tableInterface.checkAndPut(row, family, qualifier,value, put ); + } + + public void delete(Delete delete) throws IOException { + tableInterface.delete(delete ); + } + + public void delete(List deletes) throws IOException { + if ( null == deletes) return; + if ( INFO_ENABLED) HSearchLog.l.info("HTableWrapper: Batch Deleting: " + deletes.size()); + tableInterface.delete(deletes); + } + + public void flushCommits() throws IOException { + tableInterface.flushCommits(); + } + + public void close() throws IOException { + tableInterface.close(); + if ( null != innerHtable) { + innerHtable.close(); + innerHtable = null; + } + } + + public RowLock lockRow(byte[] row) throws IOException { + return tableInterface.lockRow(row); + } + + public void unlockRow(RowLock rl) throws IOException { + if ( null == rl) return; + tableInterface.unlockRow(rl); + } + + public long incrementColumnValue(byte[] row, + byte[] family, byte[] qualifier, long amount) throws IOException { + + return tableInterface.incrementColumnValue(row, family, qualifier, amount, true); + } + + public Object[] batch(List actions) throws IOException, InterruptedException { + return tableInterface.batch(actions); + } + + public HRegionLocation getRegionLocation(byte[] row) throws IOException { + + + if ( null == innerHtable ) { + synchronized (this.tableName) { + if ( null == innerHtable) innerHtable = + new HTable(tableInterface.getConfiguration(), this.tableName); + } + } + return innerHtable.getRegionLocation(row); + } + + public List getRegionLocation(List rows) throws IOException { + if ( null == rows) return null; + List regions = new ArrayList(); + + if ( null == innerHtable ) { + synchronized (this.tableName) { + if ( null == innerHtable) innerHtable = + new HTable(tableInterface.getConfiguration(), this.tableName); + } + } + + for (byte[] row : rows) { + regions.add(innerHtable.getRegionLocation(row)); + } + return regions; + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_12/hbase/HWriter.java b/src/compatibility/hadooplib_12/hbase/HWriter.java new file mode 100644 index 0000000..cabed43 --- /dev/null +++ b/src/compatibility/hadooplib_12/hbase/HWriter.java @@ -0,0 +1,668 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.RowLock; + +import com.bizosys.hsearch.util.HSearchLog; + + +/** + * All HBase write calls goes from here. + * It supports Insert, Delete, Update and Merge operations. + * Merge is a operation, where read and write happens inside + * a lock. This lock is never exposed to caller function. + * @author karan + * + */ +public class HWriter { + + private static final boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + //private boolean isBatchMode = false; + private static HWriter singleton = null; + + /** + * Factory for getting HWriter instance. + * Currently HWriter can execute in a thread safe environment with + * multiple writers originating from a singel machine or multi + * machine environment or out of a single thread write environment. + * @param enableThreadSafety Should it run in a parallel clients mode + * @return HWriter instance. + */ + public static HWriter getInstance(boolean enableThreadSafety ) { + if ( null != singleton) return singleton; + synchronized (HWriter.class) { + if ( null != singleton) return singleton; + singleton = new HWriter(); + } + return singleton; + } + + /** + * Default constructor. + * Don't use + */ + private HWriter() { + } + + /** + * Insert just a single scalar record. If the record is already existing, it overrides. + * A scalar record contains just one column. + * @param tableName Table name + * @param record A Table record + * @throws IOException + */ + public final void insertScalar(final String tableName, final RecordScalar record) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insertScalar:record " + tableName); + + byte[] pk = record.pk; + Put update = new Put(pk); + NV kv = record.kv; + update.add(kv.family,kv.name, kv.data); + update.setWriteToWAL(true); + + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(update); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + } + } + + /** + * Insert multiple scalar records. If records exist, it overrides + * A scalar record contains just one column. + * @param tableName Table name + * @param records Table records + * @throws IOException + */ + public final void insertScalar(final String tableName, + final List records) throws IOException { + + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insertScalar:records table " + tableName); + + List updates = ObjectFactory.getInstance().getPutList(); + + for (RecordScalar record : records) { + Put update = new Put(record.pk); + NV kv = record.kv; + update.add(kv.family,kv.name, kv.data); + update.setWriteToWAL(true); + updates.add(update); + } + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(updates); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + if ( null != updates) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Insert a record + * @param tableName + * @param record + * @throws IOException + */ + public final void insert(final String tableName, final Record record) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert to table " + tableName); + + HTableWrapper table = null; + HBaseFacade facade = null; + try { + Put update = new Put(record.pk); + for (NV param : record.getNVs()) { + update.add(param.family,param.name, param.data); + } + update.setWriteToWAL(true); + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(update); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + } + } + + /** + * Inserting multiple records. It overrides the values of existing records. + * from the time we have read.. + * @param tableName + * @param records + * @throws IOException + */ + public final void insert(final String tableName, final List records) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert:records to table " + tableName); + + List updates = ObjectFactory.getInstance().getPutList(); + + for (Record record : records) { + Put update = new Put(record.pk); + for (NV param : record.getNVs()) { + update.add(param.family, param.name, param.data); + } + update.setWriteToWAL(true); + updates.add(update); + } + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert:Putting records " + updates.size()); + table.put(updates); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + if ( null != updates) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Update a table. It calls back the update call back function for + * various modifications during update operations as bytes merging. + * @param tableName + * @param pk + * @param pipe + * @param families + * @throws IOException + */ + public final void update(final String tableName, + final byte[] pk, final IUpdatePipe pipe, final byte[][] families) throws IOException { + + if ( null == tableName || null == pk) return; + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> update to table " + tableName); + + HTableWrapper table = null; + HBaseFacade facade = null; + RowLock lock = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + /** + * Scope down the existance check getter, not to mingle with actual one. + */ + Get existanceGet = new Get(pk); + if ( ! table.exists(existanceGet) ) return; + + lock = table.lockRow(pk); + Get lockedGet = ( null == lock) ? new Get(pk) : new Get(pk,lock); + if ( null != families) { + for (byte[] family : families) { + lockedGet = lockedGet.addFamily(family); + } + } + + Put lockedUpdate = null; + Delete lockedDelete = null; + + int familiesT = ( null == families) ? 0 : families.length; + int[] familyByteLen = new int[familiesT]; + + Result r = table.get(lockedGet); + if ( null == r) return; + if ( null == r.list()) return; + + for (KeyValue kv : r.list()) { + byte[] curVal = kv.getValue(); + if ( null == curVal) continue; + if ( 0 == curVal.length) continue; + byte[] modifiedB = pipe.process(kv.getFamily(), kv.getQualifier(), curVal); + int modifiedBLen = ( null == modifiedB) ? 0 : modifiedB.length; + + /** + * Count if family to be chucked out + * */ + for (int i=0; i records) + throws IOException { + + if ( null == tableName || null == records) return; + if (DEBUG_ENABLED) + HSearchLog.l.debug("HWriter: mergeScalar (" + tableName + ") , Count =" + records.size()); + + HTableWrapper table = null; + HBaseFacade facade = null; + List locks = ObjectFactory.getInstance().getRowLockList(); + List updates = ObjectFactory.getInstance().getPutList(); + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + for (RecordScalar scalar : records) { + byte[] pk = scalar.pk; + if ( 0 == pk.length) continue;; + Get getter = new Get(pk); + byte[] famB = scalar.kv.family; + byte[] nameB = scalar.kv.name; + RowLock lock = null; + + if ( table.exists(getter) ) { + lock = table.lockRow(pk); + if ( null == lock) { + throw new IOException("Unable to aquire lock," + new String(pk) + + " for the table - " + tableName); + } + locks.add(lock); + + Get existingGet = (null == lock) ? new Get(pk) : new Get(pk, lock); + existingGet = existingGet.addColumn(famB, nameB); + Result r = table.get(existingGet); + if ( ! scalar.merge(r.getValue(famB, nameB)) ) { + if ( null != lock ) { + table.unlockRow(lock); + locks.remove(lock); + } + continue; + } + } + + NV kv = scalar.kv; + byte[] data = kv.data; + if ( null == data ) { + try { + if ( null != lock ) { + table.unlockRow(lock); + updates.remove(lock); + lock = null; + } + + } catch (Exception ex) { + HSearchLog.l.warn("HWriter:mergeScalar > Ignore Unlock exp :" , ex); + } + continue; + } + + Put update = ( null == lock ) ? new Put(pk) : new Put(pk, lock); + update.add(famB,nameB, data); + update.setWriteToWAL(true); + updates.add(update); + } + + table.put(updates); + table.flushCommits(); + + } finally { + boolean goodTable = true; + for (RowLock lock: locks) { + try { + if ( null != lock ) table.unlockRow(lock); + } catch (Exception ex) { + reportUnlockException(ex); + goodTable = false; + } + } + + if ( null != facade && null != table && goodTable) { + facade.putTable(table); + } + + if ( null != locks ) ObjectFactory.getInstance().putRowLockList(locks); + if ( null != updates ) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Merge a record accessing the existing value + * It happens with the locking mechanism + * @param tableName Table name + * @param record A record + * @throws IOException + */ + public final void merge(final String tableName, final Record record) + throws IOException { + + if ( null == tableName || null == record) return; + if (DEBUG_ENABLED) + HSearchLog.l.debug("HWriter:merge Record (" + tableName + ")") ; + + HTableWrapper table = null; + HBaseFacade facade = null; + RowLock lock = null; + + try { + byte[] pk = record.pk; + + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + //Step 0 : If does exists no need to merge.. Just insert. + Get existsCheck = new Get(pk); + if ( ! table.exists(existsCheck) ) { + insert(tableName, record); + return; + } + + //Step 1 : Aquire a lock before merging + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> Locking Row " ); + lock = table.lockRow(pk); + if ( null == lock) { + throw new IOException("HWriter:merge Unable to aquire lock," + new String(pk) + + " for the table - " + tableName); + } + Get existingGet = ( null == lock) ? new Get(pk) : new Get(pk, lock); + for (NV nv : record.getBlankNVs()) { + existingGet = existingGet.addColumn(nv.family, nv.name); + } + + //Step 2 : Merge data with existing values + Result r = table.get(existingGet); + if ( null != r) { + if ( null != r.list()) { + for (KeyValue kv : r.list()) { + byte[] existingB = kv.getValue(); + if ( null == existingB) continue; + if ( 0 == existingB.length)continue; + record.merge(kv.getFamily(),kv.getQualifier(), existingB); + } + } + } + + //Step 3 : Only add values which have changed. + Put update = ( null == lock ) ? new Put(pk) : new Put(pk, lock); + int totalCols = 0; + for (NV nv : record.getNVs()) { + byte[] data = nv.data; + if ( nv.isDataUnchanged) continue; + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> data Size " + data.length); + update = update.add(nv.family, nv.name, data); + totalCols++; + } + + //Step 4 : If no change.. Nothing to do. + if ( totalCols == 0 ) return; + + + //Step 5 : Write the changes. + update.setWriteToWAL(true); + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> Committing Updates" ); + table.put(update); + table.flushCommits(); + + } finally { + + boolean goodTable = true; + if ( null != lock ) { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> Un Locking Row " ); + try { table.unlockRow(lock); } catch (Exception ex) { + reportUnlockException(ex); + goodTable = false; + } + } + if ( null != facade && null != table && goodTable) { + facade.putTable(table); + } + } + } + + private final void reportUnlockException(final Exception ex) { + Runtime runTime = Runtime.getRuntime(); + String errorMsg = "Max Mem: " + runTime.maxMemory()/1024; + errorMsg = errorMsg + ", Total Mem: " + runTime.totalMemory()/1024; + errorMsg = errorMsg + ", Free Mem: " + runTime.freeMemory()/1024; + HSearchLog.l.warn("HWriter:reportUnlockException > Ignoring Unlock exp. May be memory Issue \n" + errorMsg, ex); + } + + + /** + * Compare byte values + * @param offset Starting position of compare with Byte Array + * @param inputBytes Compare with Bytes + * @param compareBytes Compare to Bytes + * @return True if matches + */ + private final boolean compareBytes(final int offset, + final byte[] inputBytes, final byte[] compareBytes) { + + int inputBytesT = inputBytes.length; + int compareBytesT = compareBytes.length; + if ( compareBytesT != inputBytesT - offset) return false; + + if ( compareBytes[0] != inputBytes[offset]) return false; + if ( compareBytes[compareBytesT - 1] != inputBytes[compareBytesT + offset - 1] ) return false; + + switch (compareBytesT) + { + case 3: + return compareBytes[1] == inputBytes[1 + offset]; + case 4: + return compareBytes[1] == inputBytes[1 + offset] && + compareBytes[2] == inputBytes[2 + offset]; + case 5: + return compareBytes[1] == inputBytes[1+ offset] && + compareBytes[2] == inputBytes[2+ offset] && + compareBytes[3] == inputBytes[3+ offset]; + case 6: + return compareBytes[1] == inputBytes[1+ offset] && + compareBytes[3] == inputBytes[3+ offset] && + compareBytes[2] == inputBytes[2+ offset] && + compareBytes[4] == inputBytes[4+ offset]; + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + case 16: + case 17: + case 18: + case 19: + case 20: + case 21: + case 22: + case 23: + case 24: + case 25: + case 26: + case 27: + case 28: + case 29: + case 30: + for ( int i=offset; i< compareBytesT - 1; i++) { + if ( compareBytes[i] != inputBytes[offset + i]) return false; + } + break; + + case 31: + + for ( int a = 1; a <= 6; a++) { + if ( ! + (compareBytes[a] == inputBytes[a+offset] && + compareBytes[a+6] == inputBytes[a+6+offset] && + compareBytes[a+12] == inputBytes[a+12+offset] && + compareBytes[a+18] == inputBytes[a+18+offset] && + compareBytes[a+24] == inputBytes[a+24+offset]) ) return false; + } + break; + default: + + for ( int i=offset; i< compareBytesT - 1; i++) { + if ( compareBytes[i] != inputBytes[offset + i]) return false; + } + } + return true; + } + + /** + * Compare two bytes + * @param inputBytes Compare with Bytes + * @param compareBytes Compare to Bytes + * @return True if matches + */ + private final boolean compareBytes(final byte[] inputBytes, final byte[] compareBytes) { + return compareBytes(0,inputBytes,compareBytes); + } + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_12/hbase/ObjectFactory.java b/src/compatibility/hadooplib_12/hbase/ObjectFactory.java new file mode 100644 index 0000000..1d04865 --- /dev/null +++ b/src/compatibility/hadooplib_12/hbase/ObjectFactory.java @@ -0,0 +1,240 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.hbase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Hashtable; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Stack; + +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.RowLock; + +public class ObjectFactory { + + private static int MINIMUM_CACHE = 10; + private static int MAXIMUM_CACHE = 4096; + + private static ObjectFactory thisInstance = new ObjectFactory(); + public static ObjectFactory getInstance() { + return thisInstance; + } + + Stack> putsLists = new Stack>(); + Stack> locksLists = new Stack>(); + Stack> byteArrLists = new Stack>(); + Stack> blockMap = new Stack>(); + Stack> blockMaps = new Stack>(); + Stack> nvLists = new Stack>(); + Stack> stringSets = new Stack>(); + Stack> stringLists = new Stack>(); + Stack> shortLists = new Stack>(); + Stack> longSets = new Stack>(); + Stack> integerLists = new Stack>(); + + + public final List getRowLockList() { + List locks = null; + if (locksLists.size() > MINIMUM_CACHE ) locks = locksLists.pop(); + if ( null != locks ) return locks; + return new ArrayList(256); + } + + public final void putRowLockList(final List locks ) { + if ( null == locks) return; + locks.clear(); + if (locksLists.size() > MAXIMUM_CACHE ) return; + if ( locksLists.contains(locks)) return; + locksLists.push(locks); + } + + public final List getPutList() { + List puts = null; + if (putsLists.size() > MINIMUM_CACHE ) puts = putsLists.pop(); + if ( null != puts ) return puts; + return new ArrayList(256); + } + + public final void putPutsList(List puts ) { + if ( null == puts) return; + puts.clear(); + if (putsLists.size() > MAXIMUM_CACHE ) return; + if ( putsLists.contains(puts)) return; + putsLists.push(puts); + } + + public final List getByteArrList() { + List bytesA = null; + if (byteArrLists.size() > MINIMUM_CACHE ) bytesA = byteArrLists.pop(); + if ( null != bytesA ) return bytesA; + return new ArrayList(32); + } + + public final void putByteArrList(final List bytesA ) { + if ( null == bytesA) return; + bytesA.clear(); + if (byteArrLists.size() > MAXIMUM_CACHE ) return; + if ( byteArrLists.contains(bytesA)) return; + byteArrLists.push(bytesA); + } + + public final List getNVList() { + List nvs = null; + if (nvLists.size() > MINIMUM_CACHE ) nvs = nvLists.pop(); + if ( null != nvs ) return nvs; + return new ArrayList(); + } + + public final void putNVList(final List nvs ) { + if ( null == nvs) return; + nvs.clear(); + if (nvLists.size() > MAXIMUM_CACHE ) return; + if ( nvLists.contains(nvs)) return; + nvLists.push(nvs); + } + + public final Map getBytesList() { + Map lstB = null; + if (blockMap.size() > MINIMUM_CACHE ) lstB = blockMap.pop(); + if ( null != lstB ) return lstB; + return new HashMap(); + } + + public final void putBytesList(final Map lstB ) { + if ( null == lstB) return; + lstB.clear(); + if (blockMap.size() > MAXIMUM_CACHE ) return; + if ( blockMap.contains(lstB)) return; + blockMap.push(lstB); + } + + + public final Map getByteBlockMap(){ + Map obj = null; + if (blockMaps.size() > MINIMUM_CACHE ) obj = blockMaps.pop(); + if ( null != obj ) return obj; + return new Hashtable(); + } + + public final void putByteBlockMap(final Map obj){ + if ( null == obj) return; + obj.clear(); + if (blockMaps.size() > MAXIMUM_CACHE ) return; + if ( blockMaps.contains(obj)) return; + blockMaps.push(obj); + } + + public final Set getStringSet(){ + Set obj = null; + if (stringSets.size() > MINIMUM_CACHE ) obj = stringSets.pop(); + if ( null != obj ) return obj; + return new HashSet(); + } + + public final void putStringSet(final Set obj){ + if ( null == obj) return; + obj.clear(); + if (stringSets.size() > MAXIMUM_CACHE ) return; + if ( stringSets.contains(obj)) return; + stringSets.push(obj); + } + + public final List getStringList(){ + List obj = null; + if (stringLists.size() > MINIMUM_CACHE ) obj = stringLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putStringList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (stringLists.size() > MAXIMUM_CACHE ) return; + if ( stringLists.contains(obj)) return; + stringLists.push(obj); + } + + public final List getShortList(){ + List obj = null; + if (shortLists.size() > MINIMUM_CACHE ) obj = shortLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putShortList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (shortLists.size() > MAXIMUM_CACHE ) return; + if ( shortLists.contains(obj)) return; + shortLists.push(obj); + } + + public final List getIntegerList(){ + List obj = null; + if (integerLists.size() > MINIMUM_CACHE ) obj = integerLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putIntegerList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (integerLists.size() > MAXIMUM_CACHE ) return; + if ( integerLists.contains(obj)) return; + integerLists.push(obj); + } + + + public final Set getLongSet(){ + Set obj = null; + if (longSets.size() > MINIMUM_CACHE ) obj = longSets.pop(); + if ( null != obj ) return obj; + return new HashSet(); + } + + public final void putLongSet(final Set obj){ + if ( null == obj) return; + obj.clear(); + if (longSets.size() > MAXIMUM_CACHE ) return; + if ( longSets.contains(obj)) return; + longSets.push(obj); + } + + public final String getStatus() { + StringBuilder sb = new StringBuilder(476); + sb.append(""); + sb.append("blockMap:").append(blockMap.size()).append('|'); + sb.append("blockMaps:").append(blockMaps.size()).append('|'); + sb.append("nvLists:").append(nvLists.size()).append('|'); + sb.append("stringSets:").append(stringSets.size()).append('|'); + sb.append("stringLists:").append(stringLists.size()).append('|'); + sb.append("shortLists:").append(shortLists.size()).append('|'); + sb.append("longSets:").append(longSets.size()).append('|'); + sb.append("integerLists:").append(integerLists.size()); + sb.append(""); + return sb.toString(); + } + +} diff --git a/src/compatibility/hadooplib_12/protobuf/HSearchCoprocessorProtos.java b/src/compatibility/hadooplib_12/protobuf/HSearchCoprocessorProtos.java new file mode 100644 index 0000000..e00a04e --- /dev/null +++ b/src/compatibility/hadooplib_12/protobuf/HSearchCoprocessorProtos.java @@ -0,0 +1,4 @@ +package com.bizosys.hsearch.treetable.storage.protobuf.generated; + +public final class HSearchCoprocessorProtos { +} diff --git a/src/compatibility/hadooplib_12/protobuf/HSearchFilterProtos.java b/src/compatibility/hadooplib_12/protobuf/HSearchFilterProtos.java new file mode 100644 index 0000000..12f9081 --- /dev/null +++ b/src/compatibility/hadooplib_12/protobuf/HSearchFilterProtos.java @@ -0,0 +1,4 @@ +package com.bizosys.hsearch.treetable.storage.protobuf.generated; + +public final class HSearchFilterProtos { +} diff --git a/src/compatibility/hadooplib_12/storage/CacheStorage.java b/src/compatibility/hadooplib_12/storage/CacheStorage.java new file mode 100644 index 0000000..ec8268a --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/CacheStorage.java @@ -0,0 +1,57 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; + +import com.bizosys.hsearch.hbase.HDML; + + +public class CacheStorage { + + public static String TABLE_NAME = "hsearch-cache"; + public static final String CACHE_COLUMN = "c"; + public static final byte[] CACHE_COLUMN_BYTES = CACHE_COLUMN.getBytes(); + private static CacheStorage singleton = null; + + public static CacheStorage getInstance() throws IOException { + + if ( null == singleton ) { + synchronized (CacheStorage.class.getName()) { + if ( null == singleton ) { + singleton = new CacheStorage(); + } + } + } + return singleton; + } + + + private CacheStorage() throws IOException { + + HColumnDescriptor col = new HColumnDescriptor( CACHE_COLUMN.getBytes()) ; + + col.setMinVersions(1); + col.setMaxVersions(1); + col.setKeepDeletedCells(false); + col.setCompressionType(Compression.Algorithm.NONE); + col.setEncodeOnDisk(false); + col.setDataBlockEncoding(DataBlockEncoding.NONE); + col.setInMemory(false); + col.setBlockCacheEnabled(true); + col.setTimeToLive(HConstants.FOREVER); + col.setBloomFilterType(BloomType.NONE); + col.setScope(HConstants.REPLICATION_SCOPE_GLOBAL); + + List colFamilies = new ArrayList(); + colFamilies.add(col); + HDML.create(TABLE_NAME, colFamilies); + + } +} diff --git a/src/compatibility/hadooplib_12/storage/HBaseTableSchemaCreator.java b/src/compatibility/hadooplib_12/storage/HBaseTableSchemaCreator.java new file mode 100644 index 0000000..fbf3e04 --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HBaseTableSchemaCreator.java @@ -0,0 +1,148 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; +import org.apache.log4j.Logger; + +import com.bizosys.hsearch.hbase.HDML; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HBaseTableSchemaCreator { + + private static HBaseTableSchemaCreator instance = null; + public static Logger l = Logger.getLogger(HBaseTableSchemaCreator.class.getName()); + + Configuration config = HSearchConfig.getInstance().getConfiguration(); + + public Algorithm compression = Compression.Algorithm.NONE; + public int partitionBlockSize = config.getInt("partition.block.size", 13035596); + public int partitionRepMode = HConstants.REPLICATION_SCOPE_GLOBAL; + public DataBlockEncoding dataBlockEncoding = DataBlockEncoding.NONE; + public BloomType bloomType = BloomType.NONE; + public boolean inMemory = false; + public boolean blockCacheEnabled = config.getBoolean("block.cache.enabled", true);; + + public static final HBaseTableSchemaCreator getInstance() { + if ( null != instance) return instance; + synchronized (HBaseTableSchemaCreator.class) { + if ( null != instance) return instance; + instance = new HBaseTableSchemaCreator(); + } + return instance; + } + + /** + * Default constructor + * + */ + public HBaseTableSchemaCreator(){ + } + + /** + * Checks and Creates all necessary tables required for HSearch index. + */ + public final boolean init(String tableName) { + + try { + + List colFamilies = new ArrayList(); + + HBaseTableSchemaDefn def = HBaseTableSchemaDefn.getInstance(tableName); + + System.out.println("Compression : " + this.compression.getName()); + System.out.println("Partition Block Size : " + this.partitionBlockSize); + System.out.println("Partition Rep Mode : " + this.partitionRepMode); + System.out.println("Partition Block Size : " + this.partitionBlockSize); + System.out.println("Partition Block Encoding : " + this.dataBlockEncoding.name()); + System.out.println("Bloom Type : " + this.bloomType.name()); + System.out.println("In Memory Table: " + this.inMemory); + System.out.println("Block Caching: " + this.blockCacheEnabled); + + for (String familyName : def.columnPartions.keySet()) { + + //Partitioned + List partitionNames = def.columnPartions.get(familyName).getPartitionNames(); + for (String partition : partitionNames) { + HColumnDescriptor rangeCols = new HColumnDescriptor( (familyName + "_" + partition ).getBytes()); + configColumn(rangeCols); + colFamilies.add(rangeCols); + } + + //No Partition + if ( partitionNames.size() == 0 ) { + HColumnDescriptor rangeCols = new HColumnDescriptor( familyName.getBytes()); + configColumn(rangeCols); + colFamilies.add(rangeCols); + } + } + + HDML.create(tableName, colFamilies); + return true; + + } catch (Exception sf) { + sf.printStackTrace(System.err); + l.fatal(sf); + return false; + } + } + + /** + * Compression method to HBase compression code. + * @param methodName + * @return + */ + public static final String resolveCompression(final String methodName) { + String compClazz = Compression.Algorithm.GZ.getName(); + if ("gz".equals(methodName)) { + compClazz = Compression.Algorithm.GZ.getName(); + } else if ("lzo".equals(methodName)) { + compClazz = Compression.Algorithm.LZO.getName(); + } else if ("none".equals(methodName)) { + compClazz = Compression.Algorithm.NONE.getName(); + } + return compClazz; + } + + public final void configColumn(final HColumnDescriptor col) { + col.setMinVersions(1); + col.setMaxVersions(1); + col.setKeepDeletedCells(false); + col.setCompressionType(compression); + col.setEncodeOnDisk(false); + col.setDataBlockEncoding(dataBlockEncoding); + col.setInMemory(inMemory); + col.setBlockCacheEnabled(blockCacheEnabled); + col.setBlocksize(partitionBlockSize); + col.setTimeToLive(HConstants.FOREVER); + col.setBloomFilterType(bloomType); + col.setScope(partitionRepMode); + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_12/storage/HBaseTableSchemaDefn.java b/src/compatibility/hadooplib_12/storage/HBaseTableSchemaDefn.java new file mode 100644 index 0000000..d06ca0e --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HBaseTableSchemaDefn.java @@ -0,0 +1,65 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.util.HashMap; +import java.util.Map; + +import com.bizosys.hsearch.treetable.client.partition.IPartition; + +public final class HBaseTableSchemaDefn { + + private static Map repositories = new HashMap(); + + public static HBaseTableSchemaDefn getInstance(String tableName) { + if ( repositories.containsKey(tableName)) return repositories.get(tableName); + else { + synchronized (HBaseTableSchemaDefn.class.getName()) { + if ( repositories.containsKey(tableName)) return repositories.get(tableName); + repositories.put(tableName, new HBaseTableSchemaDefn(tableName)); + } + } + return repositories.get(tableName); + } + + private HBaseTableSchemaDefn(String tableName) { + this.tableName = tableName; + } + + private String tableName = "htable"; + + //FamilyName_partition is how the column families are created. + public Map columnPartions = new HashMap(); + + public final static char getColumnName() { + return getColumnName(1); + } + + public final static char getColumnName(int token) { + String tokenStr = new Integer(token).toString(); + return tokenStr.charAt(tokenStr.length() - 1); + } + + public String getTableName() { + return this.tableName; + } + + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_12/storage/HSearchBytesCoProcessorProxy.java b/src/compatibility/hadooplib_12/storage/HSearchBytesCoProcessorProxy.java new file mode 100644 index 0000000..392ffb0 --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchBytesCoProcessorProxy.java @@ -0,0 +1,72 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.util.HSearchLog; + +public final class HSearchBytesCoProcessorProxy { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + HSearchBytesFilter filter = null; + byte[][] families = null; + byte[][] cols = null; + + public HSearchBytesCoProcessorProxy(final List family_cols , final HSearchBytesFilter filter) throws IOException { + this.filter = filter; + + if (null == family_cols) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.families = new byte[family_cols.size()][]; + this.cols = new byte[family_cols.size()][]; + + int seq = -1; + for (ColumnFamName columnFamName : family_cols) { + seq++; + this.families[seq] = columnFamName.family; + this.cols[seq] = columnFamName.name; + } + + } + + public final Map execCoprocessorRows(final HTableWrapper table) throws IOException, Throwable { + + Map output = table.tableInterface.coprocessorExec( + HSearchBytesCoprocessorI.class, null, null, + + + new Batch.Call() { + @Override + public final byte[] call(HSearchBytesCoprocessorI counter) throws IOException { + return counter.getRows(families, cols, filter); + } + } ); + + return output; + } +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchBytesCoprocessor.java b/src/compatibility/hadooplib_12/storage/HSearchBytesCoprocessor.java new file mode 100644 index 0000000..9bf3449 --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchBytesCoprocessor.java @@ -0,0 +1,104 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HSearchBytesCoprocessor extends BaseEndpointCoprocessor implements HSearchBytesCoprocessorI { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + public boolean scannerBlockCaching = true; + public int scannerBlockCachingLimit = 1; + + public HSearchBytesCoprocessor() { + Configuration config = HSearchConfig.getInstance().getConfiguration(); + this.scannerBlockCaching = config.getBoolean("scanner.block.caching", true); + this.scannerBlockCachingLimit = config.getInt("scanner.block.caching.amount", 1); + } + + /** + * Get Matching rows + * @param filter + * @return + * @throws IOException + */ + @Override + public byte[] getRows(final byte[][] families, final byte[][] cols, final HSearchBytesFilter filter) throws IOException { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + InternalScanner scanner = null; + + try { + Scan scan = new Scan(); + scan.setCacheBlocks(scannerBlockCaching); + scan.setCaching(scannerBlockCachingLimit); + scan.setMaxVersions(1); + int familiesT = families.length; + + for (int i=0; i curVals = new ArrayList(); + boolean done = false; + do { + done = scanner.next(curVals); + } while (done); + + byte[] data = filter.processRows(); + return data; + + } finally { + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchBytesCoprocessorI.java b/src/compatibility/hadooplib_12/storage/HSearchBytesCoprocessorI.java new file mode 100644 index 0000000..8d4ae9e --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchBytesCoprocessorI.java @@ -0,0 +1,11 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; + +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; + +public interface HSearchBytesCoprocessorI extends CoprocessorProtocol { + byte[] getRows(final byte[][] families, final byte[][] cols, + final HSearchBytesFilter filter) throws IOException; + +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchBytesFilter.java b/src/compatibility/hadooplib_12/storage/HSearchBytesFilter.java new file mode 100644 index 0000000..382330d --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchBytesFilter.java @@ -0,0 +1,146 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; + +import com.bizosys.hsearch.util.HSearchLog; + +/** + * @author abinash + * + */ +public abstract class HSearchBytesFilter implements Filter { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + protected byte[] state = null; + + public HSearchBytesFilter(){ + } + + public HSearchBytesFilter(final byte[] state){ + this.state = state; + } + + @Override + public final void write(final DataOutput out) throws IOException { + out.writeInt(state.length); + out.write(state); + } + + @Override + public final void readFields(final DataInput in) throws IOException { + try { + int length = in.readInt(); + if ( 0 == length) throw new IOException("Invalid Input"); + + state = new byte[length]; + in.readFully(state, 0, length); + + } catch (Exception ex) { + HSearchLog.l.fatal("Error at deserialization of filter:" + ex.getMessage() , ex); + throw new IOException(ex); + } + } + + @Override + public final void filterRow(final List kvL) { + if ( null == kvL) return; + int kvT = kvL.size(); + if ( 0 == kvT) return; + + try { + for (KeyValue kv : kvL) { + if ( null == kv) continue; + + byte[] inputData = kv.getValue(); + if ( null == inputData) continue; + + processColumn(kv); + } + + processRow(kvL); + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + ex.printStackTrace(System.err); + } + } + + public abstract void processColumn(KeyValue cell) throws IOException; + public abstract void processRow(List row) throws IOException; + public abstract byte[] processRows() throws IOException; + + + @Override + public void reset() { + } + + @Override + public boolean hasFilterRow() { + return true; + } + + @Override + public KeyValue getNextKeyHint(final KeyValue arg0) { + return null; + } + + @Override + public boolean filterRowKey(final byte[] rowKey, final int offset, final int length) { + return false; + } + + @Override + public boolean filterAllRemaining() { + return false; + } + + @Override + public boolean filterRow() { + return false; + } + + @Override + public ReturnCode filterKeyValue(final KeyValue arg0) { + return ReturnCode.INCLUDE; + } + + /** + * Version 0.94 FIX + */ + @Override + public KeyValue transform(final KeyValue arg0) { + return arg0; + } + + public FilterList getFilters() { + return null; + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_12/storage/HSearchGenericCoProcessorFactory.java b/src/compatibility/hadooplib_12/storage/HSearchGenericCoProcessorFactory.java new file mode 100644 index 0000000..978b896 --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchGenericCoProcessorFactory.java @@ -0,0 +1,110 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.treetable.cache.CacheService; +import com.bizosys.hsearch.util.HSearchLog; + +public final class HSearchGenericCoProcessorFactory { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + boolean cacheEnabled = false; + + HSearchGenericFilter filter = null; + byte[][] families = null; + byte[][] cols = null; + + public HSearchGenericCoProcessorFactory(final List family_cols , final HSearchGenericFilter filter) throws IOException { + this.filter = filter; + this.cacheEnabled = CacheService.getInstance().isCacheEnable(); + if ( INFO_ENABLED) { + HSearchLog.l.info("Cache Storage Enablement :" + cacheEnabled ); + } + + if (null == family_cols) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.families = new byte[family_cols.size()][]; + this.cols = new byte[family_cols.size()][]; + + int seq = -1; + for (ColumnFamName columnFamName : family_cols) { + seq++; + this.families[seq] = columnFamName.family; + this.cols[seq] = columnFamName.name; + } + + } + + public final Collection execCoprocessorRows(final HTableWrapper table) throws IOException, Throwable { + + String singleQuery = null; + + /** + * Check for already cached result + */ + if ( null != filter) { + if ( filter.clientSideAPI_IsSingleQuery() ) { + singleQuery = filter.clientSideAPI_getSingleQueryWithScope(); + if ( cacheEnabled ) { + byte[] singleQueryResultB = CacheService.getInstance().get(singleQuery); + if( null != singleQueryResultB) { + return SortedBytesArray.getInstance().parse(singleQueryResultB).values(); + } + } + } + } + + Map output = table.tableInterface.coprocessorExec( + HSearchGenericCoprocessor.class, null, null, + + + new Batch.Call() { + @Override + public final byte[] call(HSearchGenericCoprocessor counter) throws IOException { + return counter.getRows(families, cols, filter); + } + } ); + + Collection result = output.values(); + + try { + if ( null != singleQuery) { + if ( cacheEnabled ) { + byte[] dataPack = SortedBytesArray.getInstance().toBytes(result); + CacheService.getInstance().put(singleQuery, dataPack); + } + } + } catch (Exception ex) { + HSearchLog.l.warn("Cache Service Failure.", ex); + } + + return result; + } +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchGenericCoprocessor.java b/src/compatibility/hadooplib_12/storage/HSearchGenericCoprocessor.java new file mode 100644 index 0000000..f820aa3 --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchGenericCoprocessor.java @@ -0,0 +1,29 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; + +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; + +public interface HSearchGenericCoprocessor extends CoprocessorProtocol { + byte[] getRows(byte[][] families, byte[][] cols, HSearchGenericFilter filter) throws IOException; +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchGenericCoprocessorImpl.java b/src/compatibility/hadooplib_12/storage/HSearchGenericCoprocessorImpl.java new file mode 100644 index 0000000..15fc806 --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchGenericCoprocessorImpl.java @@ -0,0 +1,150 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.functions.HSearchReducer; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HSearchGenericCoprocessorImpl extends BaseEndpointCoprocessor + implements HSearchGenericCoprocessor { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + private Configuration config = HSearchConfig.getInstance().getConfiguration(); + + private boolean internalScannerBlockCaching = true; + private int internalScannerBlockCachingAmount = 1; + + public HSearchGenericCoprocessorImpl() { + this.internalScannerBlockCaching = config.getBoolean("internal.scanner.block.caching", true); + this.internalScannerBlockCachingAmount = config.getInt("internal.scanner.block.caching.amount", 1); + } + + /** + * Get Matching rows + * @param filter + * @return + * @throws IOException + */ + public byte[] getRows(final byte[][] families, final byte[][] cols, final HSearchGenericFilter filter) throws IOException { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + InternalScanner scanner = null; + long monitorStartTime = 0L; + long overallStartTime = System.currentTimeMillis(); + + try { + Scan scan = new Scan(); + scan.setCacheBlocks(internalScannerBlockCaching); + scan.setCaching(internalScannerBlockCachingAmount); + scan.setMaxVersions(1); + int familiesT = families.length; + + for (int i=0; i curVals = new ArrayList(); + boolean done = false; + + Collection finalOutput = new ArrayList(); + Collection partOutput = new ArrayList(); + + HSearchReducer reducer = filter.getReducer(); + filter.configure(); + do { + curVals.clear(); + partOutput.clear(); + + done = scanner.next(curVals); + for (KeyValue kv : curVals) { + byte[] input = kv.getValue(); + if ( null == input) continue; + + if ( null != reducer) { + filter.deserialize(input, partOutput); + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + reducer.appendRows(kv.getRow(), finalOutput, partOutput); + + if ( INFO_ENABLED ) { + filter.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + } + } + + } while (done); + + if ( INFO_ENABLED ) HSearchLog.l.info( + "**** Time spent on Overall : Scanner : Plugin Code = " + + ( System.currentTimeMillis() - overallStartTime) + ":" + + filter.overallExecutionTime + ":" + + filter.pluginExecutionTime + " in ms."); + + byte[] data = SortedBytesArray.getInstance().toBytes(finalOutput); + + return data; + + } finally { + if ( null != filter) filter.close(); + + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchGenericFilter.java b/src/compatibility/hadooplib_12/storage/HSearchGenericFilter.java new file mode 100644 index 0000000..b0f8eef --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchGenericFilter.java @@ -0,0 +1,618 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.StringTokenizer; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.byteutils.SortedBytesBase.Reference; +import com.bizosys.hsearch.federate.BitSetOrSet; +import com.bizosys.hsearch.federate.QueryPart; +import com.bizosys.hsearch.functions.HSearchReducer; +import com.bizosys.hsearch.functions.StatementWithOutput; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchTableMultiQueryExecutor; +import com.bizosys.hsearch.treetable.client.HSearchTableParts; +import com.bizosys.hsearch.treetable.client.IHSearchPlugin; +import com.bizosys.hsearch.util.HSearchLog; + +/** + * @author abinash + * + */ +public abstract class HSearchGenericFilter implements Filter { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + String name = null; + public String getName() { + if ( null == name) { + name = this.getClass().getName(); + } + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + + /** + * Client side variables + */ + Map queryFilters = null; + + /** + * Input Variables + */ + String multiQuery = null; + IHSearchPlugin plugin = null; + Map queryPayload = new HashMap(3); + Map colIdWithType = new HashMap(3); + + public long pluginExecutionTime = 0L; + public long overallExecutionTime = 0L; + + + HSearchProcessingInstruction inputMapperInstructions = new HSearchProcessingInstruction(); + byte[] inputRowsToIncludeB = null; + List inputRowsList = null; + SortedBytesArray rowsToInclude = null; + + Map queryIdWithParts = new HashMap(); + Map colNamesWithPartitionBytes = new HashMap(); + List columnsOfOneRowAfterJoin = new ArrayList(); + List> stmtOutputContainers = new LinkedList>(); + SortedBytesArray rowBytesPacker = SortedBytesArray.getInstanceArr(); + + HSearchTableMultiQueryExecutor intersector = null; + + public HSearchGenericFilter(){ + } + + public HSearchGenericFilter(final HSearchProcessingInstruction outputType, + final String query, final Map details) { + this(outputType, query, details, null); + } + + public HSearchGenericFilter(final HSearchProcessingInstruction outputType, + final String query, final Map details, List scopedToRows) { + + this.multiQuery = query; + this.queryFilters = details; + this.inputMapperInstructions = outputType; + this.inputRowsList = scopedToRows; + } + + public void setScopedToRows(List scopedToRows) { + this.inputRowsList = scopedToRows; + } + + public boolean clientSideAPI_IsSingleQuery() throws IOException { + if ( null == this.queryFilters) throw new IOException("Genric Filter is not initalized"); + if ( 1 == this.queryFilters.size()) return true; + return false; + } + + public String clientSideAPI_getSingleQueryWithScope() throws IOException { + if ( null == this.queryFilters) throw new IOException("Genric Filter is not initalized"); + if ( 1 != this.queryFilters.size()) throw new IOException("Genric Filter has multi queries"); + return getName() + "/" + this.queryFilters.values().iterator().next(); + } + + + /** + * output type + * structured:A OR unstructured:B + * structured:A=f|1|1|1|c|*|* + * unstructured:B=*|*|*|*|*|* + */ + @Override + public final void write(final DataOutput out) throws IOException { + + if ( null != inputRowsList) { + if ( inputRowsList.size() > 0 ) { + inputRowsToIncludeB = SortedBytesArray.getInstanceArr().toBytes(inputRowsList); + } + } + + StringBuilder querySection = new StringBuilder(); + querySection.append(inputMapperInstructions.toString()).append('\n'); + querySection.append(this.multiQuery); + if ( null != queryFilters) { + for (String queryP : queryFilters.keySet()) { + String input = queryFilters.get(queryP); + querySection.append('\n').append(queryP).append('=').append(input.toString()); + } + } + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Sending to HBase : " + querySection.toString() + ", Rows to include:" + inputRowsToIncludeB); + } + SortedBytesArray sendToRSData = SortedBytesArray.getInstanceArr(); + byte[] ser = ( null == inputRowsToIncludeB) ? + sendToRSData.toBytes(querySection.toString().getBytes()) + : + sendToRSData.toBytes( querySection.toString().getBytes(), inputRowsToIncludeB); + + out.writeInt(ser.length); + out.write(ser); + } + + /** + * output type + * structured:A OR unstructured:B + * structured:A=f|1|1|1|c|*|* + * unstructured:B=*|*|*|*|*|* + * TODO:// Replace with Fast Split. + */ + @Override + public final void readFields(final DataInput in) throws IOException { + try { + int length = in.readInt(); + if ( 0 == length) throw new IOException("Invalid Query"); + + byte[] deser = new byte[length]; + in.readFully(deser, 0, length); + + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Total bytes Received @ Generic Filter:" + length); + } + + SortedBytesArray receiveRSData = SortedBytesArray.getInstanceArr(); + receiveRSData.parse(deser); + + int packedDataSectionsT = receiveRSData.getSize(); + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Reading bytes sections of total :" + packedDataSectionsT); + } + if ( packedDataSectionsT == 0 ) { + throw new IOException("Unknown number of fields :" + packedDataSectionsT); + } + + //Filter Row Section + if ( packedDataSectionsT == 2) { + Reference ref = new Reference(); + receiveRSData.getValueAtReference(1, ref); + rowsToInclude = SortedBytesArray.getInstanceArr(); + rowsToInclude.parse(deser, ref.offset, ref.length); + } + + //Query Section + Reference ref = new Reference(); + receiveRSData.getValueAtReference(0, ref); + StringTokenizer stk = new StringTokenizer(new String(deser, ref.offset, ref.length), "\n"); + + int lineNo = -1; + while ( stk.hasMoreTokens() ) { + + lineNo++; + + switch ( lineNo ) { + case 0: + String output = stk.nextToken(); + if (output.length() == 0 ) throw new IOException("Unknown result output type."); + this.inputMapperInstructions = new HSearchProcessingInstruction(output); + break; + + case 1: + this.multiQuery = stk.nextToken(); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Multi Query" + this.multiQuery); + } + break; + + default: + String line = stk.nextToken(); + int splitIndex = line.indexOf('='); + if ( -1 == splitIndex) throw new IOException("Expecting [=] in line " + line); + + String colNameQuolonId = line.substring(0,splitIndex); + String filtersPipeSeparated = line.substring(splitIndex+1); + + int colNameAndQIdSplitIndex = colNameQuolonId.indexOf(':'); + if ( -1 == colNameAndQIdSplitIndex || colNameQuolonId.length() - 1 == colNameAndQIdSplitIndex) { + throw new IOException("Sub queries expected as X:Y eg.\n" + + "family1:A OR family2:B\nfamily1:A=f|1|1|1|c|*|*\nfamily2:B=*|*|*|*|*|*"); + } + String colName = colNameQuolonId.substring(0,colNameAndQIdSplitIndex); + String qId = colNameQuolonId.substring(colNameAndQIdSplitIndex+1); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("colName:qId = " + colName + "/" + qId); + } + + colIdWithType.put(qId, colName); + + this.plugin = createPlugIn(colName) ; + plugin.setOutputType(this.inputMapperInstructions); + + this.queryPayload.put( + colNameQuolonId, new QueryPart(filtersPipeSeparated, + HSearchTableMultiQueryExecutor.PLUGIN, plugin) ); + + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Query Payload " + line); + } + break; + } + } + for (int i=0; i() ); + } + + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + throw new IOException(ex); + } + } + + /** + * TODO: + * If we have a query as FieldA OR FieldB + * FieldA, tableparts should only contain byte[] of family FieldA_* + * and FieldB byte[] of family FieldB_* + */ + @Override + public final void filterRow(final List kvL) { + if ( null == kvL) return; + int kvT = kvL.size(); + if ( 0 == kvT) return; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Processing @ Region Server : filterRow" ); + } + + try { + + byte[] row = null; + byte[] firstFamily = null; + byte[] firstCol = null; + + //colParts.put("structured:A", bytes); + colNamesWithPartitionBytes.clear(); + + //HBase Family Name = schema column name + "_" + partition + String columnNameWithParition = null; + String colName = null; + + for (KeyValue kv : kvL) { + if ( null == kv) continue; + + byte[] inputData = kv.getValue(); + if ( null == inputData) continue; + columnNameWithParition = new String(kv.getFamily()); + + int partitionIndex = columnNameWithParition.indexOf('_'); + colName = ( partitionIndex == -1 ) ? columnNameWithParition : + columnNameWithParition.substring(0, partitionIndex); + + HSearchTableParts tableParts = null; + if ( colNamesWithPartitionBytes.containsKey(colName)) { + tableParts = colNamesWithPartitionBytes.get(colName); + } else { + tableParts = new HSearchTableParts(); + colNamesWithPartitionBytes.put(colName, tableParts); + } + tableParts.put(inputData); + + if ( null == row ) { + firstFamily = kv.getFamily(); + firstCol = kv.getQualifier(); + row = kv.getRow(); + } + } + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("queryData HSearchTableParts creation. "); + } + + queryIdWithParts.clear(); + + for (String queryId : colIdWithType.keySet()) { //A + String queryType = colIdWithType.get(queryId); //structured + HSearchTableParts parts = colNamesWithPartitionBytes.get(queryType); + + String queryTypeWithId = queryType + ":" + queryId; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug(queryTypeWithId); + HSearchLog.l.debug("Query Parts for " + queryTypeWithId); + } + + queryIdWithParts.put(queryTypeWithId, parts); + } + colNamesWithPartitionBytes.clear(); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchGenericFilter: Filteration Starts"); + + long monitorStartTime = 0L; + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + if ( null == intersector ) intersector = createExecutor(); + this.plugin.setMergeId(row); + BitSetOrSet intersectedIds = federatedQueryExec(row, intersector, queryIdWithParts); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + kvL.clear(); //Clear all data + byte[] value = getOneRowBytes(intersectedIds, this.queryPayload); + kvL.add(new KeyValue(row, firstFamily, firstCol, value) ); + + + } catch (Exception ex) { + ex.printStackTrace(System.err); + HSearchLog.l.fatal(ex); + } + } + + private final BitSetOrSet federatedQueryExec(final byte[] row, + final HSearchTableMultiQueryExecutor intersector, + final Map queryData) throws Exception, IOException { + + BitSetOrSet intersectedIds = intersector.execute( + queryData, this.multiQuery, this.queryPayload, inputMapperInstructions); + + if ( DEBUG_ENABLED ) { + boolean hasMatchingIds = false; + hasMatchingIds = ( null != intersectedIds && intersectedIds.size() > 0 ); + HSearchLog.l.debug("Generaic filter hasMatchingIds :" + hasMatchingIds + " objectid=" + intersectedIds.hashCode()); + if ( hasMatchingIds ) HSearchLog.l.debug( new String(row) + " has ids of :" + intersectedIds.size()); + } + + return intersectedIds; + } + + + @Override + public final void reset() { + } + + @Override + public final boolean hasFilterRow() { + return true; + } + + @Override + public final KeyValue getNextKeyHint(final KeyValue arg0) { + return null; + } + + @Override + public final boolean filterRowKey(final byte[] rowKey, final int offset, final int length) { + + if (DEBUG_ENABLED) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + HSearchLog.l.debug("Analyzing row for processing: " + new String(rowKey + " , From a matching set of " + scopeToTheseRowsT)); + } + + if ( null == rowsToInclude) return false; + + byte[] exactRowBytes = new byte[length]; + try { + System.arraycopy(rowKey, offset, exactRowBytes, 0, length); + if ( rowsToInclude.getEqualToIndex(exactRowBytes) == -1) return true; + return false; + + } catch (IOException ex) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + String rowKeyStr = ( null == rowKey) ? "Null row key" : new String(rowKey); + String errMsg = "Error while finding fileration criteria for the row , " + rowKeyStr + + "\n" + ex.getMessage() + "\n" + + "With search scope inside id count : " + scopeToTheseRowsT; + System.err.println(errMsg); + HSearchLog.l.fatal(errMsg, ex); + + return false; + } + } + + @Override + public final boolean filterAllRemaining() { + return false; + } + + @Override + public final boolean filterRow() { + return false; + } + + @Override + public final ReturnCode filterKeyValue(final KeyValue arg0) { + return ReturnCode.INCLUDE; + } + + /** + * Version 0.94 FIX + */ + @Override + public final KeyValue transform(final KeyValue arg0) { + return arg0; + } + + /** + ******************************************************************************************* + * COMPUTATIONS + * Step 1 - HSearch Table merge + ******************************************************************************************* + */ + + /** + * *|*|architect|age + * AND + * *|*|developer|age + * + * @param matchedIds + * @param queryPayload + * @param inputMapperInstructions + * @return + * @throws IOException + */ + public final byte[] getOneRowBytes( final BitSetOrSet matchedIds, final Map queryPayload) throws IOException { + + if ( DEBUG_ENABLED ) { + int matchedIdsT = ( null == matchedIds) ? 0 : matchedIds.size(); + HSearchLog.l.debug("HSearchGenericFilter:serialize : with matchedIds " + matchedIdsT + ", Object:" + matchedIds.hashCode()); + if ( null != matchedIds.getDocumentIds()) { + HSearchLog.l.debug("HSearchGenericFilter: DocumentIds size " + matchedIds.getDocumentIds().size() + " and matchedId size " + matchedIds.size()); + } else if ( null != matchedIds.getDocumentSequences()) { + HSearchLog.l.debug("HSearchGenericFilter: DocumentSequences cardinality " + matchedIds.getDocumentSequences().cardinality()); + } + } + + /** + * - Iterate through all the parts and find the values. + * - Collect the data for multiple queries + */ + HSearchReducer reducer = getReducer(); + int totalQueries = queryPayload.size(); + + columnsOfOneRowAfterJoin.clear(); + long monitorStartTime = 0L; + + if ( totalQueries == 1) { + + Object pluginO = queryPayload.values().iterator().next().getParams().get( + HSearchTableMultiQueryExecutor.PLUGIN); + IHSearchPlugin plugin = (IHSearchPlugin) pluginO; + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + plugin.getResultSingleQuery(columnsOfOneRowAfterJoin); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + + } else { + StatementWithOutput[] stmtWithOutputs = new StatementWithOutput[totalQueries]; + int seq = 0; + + for (QueryPart part : queryPayload.values()) { + + Object pluginO = part.getParams().get(HSearchTableMultiQueryExecutor.PLUGIN); + IHSearchPlugin plugin = (IHSearchPlugin) pluginO; + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + Collection queryOutput = this.stmtOutputContainers.get(seq); + queryOutput.clear(); //Clear to reuse + plugin.getResultMultiQuery(matchedIds, queryOutput); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + stmtWithOutputs[seq] = new StatementWithOutput(part.aStmtOrValue, queryOutput); + seq++; + } + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + reducer.appendQueries(columnsOfOneRowAfterJoin, stmtWithOutputs); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + for (StatementWithOutput stmtWithOutput : stmtWithOutputs) { + if ( null != stmtWithOutput.cells ) stmtWithOutput.cells.clear(); + } + } + + //Put it to Bytes + byte[] processedRowBytes = rowBytesPacker.toBytes(columnsOfOneRowAfterJoin); + columnsOfOneRowAfterJoin.clear(); + + return processedRowBytes; + } + + public final void deserialize(final byte[] input, final Collection output) throws IOException { + SortedBytesArray.getInstance().parse(input).values(output); + } + + public abstract HSearchTableMultiQueryExecutor createExecutor(); + public abstract IHSearchPlugin createPlugIn(String type) throws IOException ; + public abstract HSearchReducer getReducer(); + + /** + * Override this method if you want to set more filters in processing. + * + FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RowFilter filter1 = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("row-22")) ); + list.addFilter(filter1); + list.addFilter(this); + return list; + + * @return + */ + public final FilterList getFilters() { + return null; + } + + /** + * Any information to be configured before starting the filtration process. + */ + public final void configure() { + } + + /** + * At the end release the resources. + */ + public final void close() { + if ( null != queryFilters) queryFilters.clear(); + if ( null != queryPayload) queryPayload.clear(); + if ( null != colIdWithType) colIdWithType.clear(); + if ( null != queryIdWithParts) queryIdWithParts.clear(); + if ( null != colNamesWithPartitionBytes) colNamesWithPartitionBytes.clear(); + if ( null != columnsOfOneRowAfterJoin) columnsOfOneRowAfterJoin.clear(); + if ( null != stmtOutputContainers) stmtOutputContainers.clear(); + } + + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoProcessorProxy.java b/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoProcessorProxy.java new file mode 100644 index 0000000..181a90a --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoProcessorProxy.java @@ -0,0 +1,77 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.treetable.BytesSection; +import com.bizosys.hsearch.treetable.Cell2; +import com.bizosys.hsearch.treetable.CellKeyValue; +import com.bizosys.hsearch.util.HSearchLog; + +public final class HSearchMultiGetCoProcessorProxy { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + HSearchScalarFilter filter = null; + byte[][] families = null; + byte[][] cols = null; + byte[][] rows = null; + + public HSearchMultiGetCoProcessorProxy(final ColumnFamName columnFamName , + final HSearchScalarFilter filter, byte[][] rows) throws IOException { + + this.filter = filter; + if (null == columnFamName) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.families = new byte[][]{columnFamName.family}; + this.cols = new byte[][]{columnFamName.name}; + this.rows = rows; + } + + public final void execCoprocessorRows( Map kvs, + final HTableWrapper table, final byte[] row) throws IOException, Throwable { + + Map output = table.tableInterface.coprocessorExec( + HSearchMultiGetCoprocessorI.class, row, row, + + new Batch.Call() { + @Override + public final byte[] call(HSearchMultiGetCoprocessorI counter) throws IOException { + return counter.getRows(families, cols, filter, rows); + } + } ); + + + for (byte[] bs : output.keySet()) { + Cell2 cell2 = new Cell2(byte[].class, byte[].class); + cell2.data = new BytesSection(output.get(bs) ); + cell2.parseElements(); + for (CellKeyValue kv: cell2.sortedList) { + kvs.put(new String(kv.getKey()), kv.getValue()); + } + } + } +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoprocessor.java b/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoprocessor.java new file mode 100644 index 0000000..fd2b078 --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoprocessor.java @@ -0,0 +1,129 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.treetable.Cell2; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HSearchMultiGetCoprocessor extends BaseEndpointCoprocessor implements HSearchMultiGetCoprocessorI { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + public boolean scannerBlockCaching = true; + public int scannerBlockCachingLimit = 1; + + public HSearchMultiGetCoprocessor() { + Configuration config = HSearchConfig.getInstance().getConfiguration(); + this.scannerBlockCaching = config.getBoolean("scanner.block.caching", true); + this.scannerBlockCachingLimit = config.getInt("scanner.block.caching.amount", 1); + } + + /** + * Get Matching rows + * @param filter + * @return + * @throws IOException + */ + @Override + public byte[] getRows(final byte[][] families, final byte[][] cols, final Filter filter, final byte[][] rows) throws IOException { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + InternalScanner scanner = null; + + try { + Scan scan = new Scan(); + scan.setCacheBlocks(scannerBlockCaching); + scan.setCaching(scannerBlockCachingLimit); + scan.setMaxVersions(1); + int familiesT = families.length; + + for (int i=0; i finalVals = new ArrayList(); + List curVals = new ArrayList(); + + if ( null != filter) { + scan = scan.setFilter(filter); + } + + boolean done = false; + for (byte[] row : rows) { + done = false; + scan.setStartRow(row); + scan.setStopRow(row); + + scanner = environment.getRegion().getScanner(scan); + do { + curVals.clear(); + done = scanner.next(curVals); + if ( curVals.size() == 0 ) continue; + finalVals.addAll(curVals); + + KeyValue keyValue = curVals.get(0); + System.out.println( new String ( keyValue.getRow() )); + } while (done); + scanner.close(); + scanner = null; + } + + Cell2 container = new Cell2(byte[].class, byte[].class); + for (KeyValue keyValue : finalVals) { + byte[] key = keyValue.getRow(); + byte[] val = keyValue.getValue(); + + if ( null == key || null == val) continue; + if ( key.length == 0 || val.length == 0 ) continue; + container.add(key, val); + } + + byte[] data = container.toBytesOnSortedData(); + + + return data; + + } finally { + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoprocessorI.java b/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoprocessorI.java new file mode 100644 index 0000000..053ea8c --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchMultiGetCoprocessorI.java @@ -0,0 +1,12 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; + +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; + + +public interface HSearchMultiGetCoprocessorI extends CoprocessorProtocol { + byte[] getRows(final byte[][] families, final byte[][] cols, + final Filter filter, final byte[][] rows) throws IOException; +} diff --git a/src/compatibility/hadooplib_12/storage/HSearchScalarFilter.java b/src/compatibility/hadooplib_12/storage/HSearchScalarFilter.java new file mode 100644 index 0000000..ebe3f74 --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchScalarFilter.java @@ -0,0 +1,377 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.StringTokenizer; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.byteutils.SortedBytesBase.Reference; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchQuery; +import com.bizosys.hsearch.treetable.client.IHSearchPlugin; +import com.bizosys.hsearch.treetable.client.IHSearchTable; +import com.bizosys.hsearch.util.HSearchLog; + +/** + * @author abinash + * + */ +public abstract class HSearchScalarFilter implements Filter { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + String name = null; + public String getName() { + if ( null == name) { + name = this.getClass().getName(); + } + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + + /** + * Input Variables + */ + String multiQuery = null; + + public long pluginExecutionTime = 0L; + public long overallExecutionTime = 0L; + + + protected HSearchProcessingInstruction inputMapperInstructions = new HSearchProcessingInstruction(); + byte[] inputRowsToIncludeB = null; + List inputRowsList = null; + SortedBytesArray rowsToInclude = null; + byte[] matchingIds = null; + + public HSearchScalarFilter(){ + } + + public HSearchScalarFilter(final HSearchProcessingInstruction outputType,final String query) { + this.multiQuery = query; + this.inputMapperInstructions = outputType; + } + + public void setMatchingRows(List inputRowsList) { + this.inputRowsList = inputRowsList; + } + + public void setMatchingIds(byte[] matchingIds) { + this.matchingIds = matchingIds; + } + + @Override + public final void write(final DataOutput out) throws IOException { + + if ( null != inputRowsList) { + if ( inputRowsList.size() > 0 ) { + inputRowsToIncludeB = SortedBytesArray.getInstanceArr().toBytes(inputRowsList); + } + } + + SortedBytesArray sendToRSData = SortedBytesArray.getInstanceArr(); + String querySection = this.inputMapperInstructions.toString() + "\n" + this.multiQuery; + + List values = new ArrayList(3); + values.add(querySection.getBytes()); + + if(null != matchingIds) + values.add(matchingIds); + else + values.add(new byte[0]); + + if(null != inputRowsToIncludeB) + values.add(inputRowsToIncludeB); + + byte[] ser = sendToRSData.toBytes( values ); + + out.writeInt(ser.length); + out.write(ser); + } + + HSearchQuery query = null; + IHSearchTable table = null; + IHSearchPlugin plugin = null; + boolean skipFiltering = true; + Collection dataCarrier = new ArrayList(); + + @Override + public final void readFields(final DataInput in) throws IOException { + try { + int length = in.readInt(); + if ( 0 == length) throw new IOException("Invalid Query"); + + byte[] deser = new byte[length]; + in.readFully(deser, 0, length); + + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Total bytes Received @ Generic Filter:" + length); + } + + SortedBytesArray receiveRSData = SortedBytesArray.getInstanceArr(); + receiveRSData.parse(deser); + + int packedDataSectionsT = receiveRSData.getSize(); + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Reading bytes sections of total :" + packedDataSectionsT); + } + if ( packedDataSectionsT == 0 ) { + throw new IOException("Unknown number of fields :" + packedDataSectionsT); + } + + Reference ref = new Reference(); + //Filter Row Section + if ( packedDataSectionsT == 3) { + receiveRSData.getValueAtReference(2, ref); + rowsToInclude = SortedBytesArray.getInstanceArr(); + rowsToInclude.parse(deser, ref.offset, ref.length); + } + + //matching ids + receiveRSData.getValueAtReference(1, ref); + this.matchingIds = new byte[ref.length]; + System.arraycopy(deser, ref.offset, this.matchingIds, 0, ref.length); + + //Query Section + receiveRSData.getValueAtReference(0, ref); + StringTokenizer stk = new StringTokenizer(new String(deser, ref.offset, ref.length), "\n"); + + int lineNo = -1; + while ( stk.hasMoreTokens() ) { + + lineNo++; + + switch ( lineNo ) { + case 0: + String output = stk.nextToken(); + if (output.length() == 0 ) throw new IOException("Unknown result output type."); + this.inputMapperInstructions = new HSearchProcessingInstruction(output); + break; + + case 1: + this.multiQuery = stk.nextToken(); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Multi Query" + this.multiQuery); + } + break; + } + } + + if ( null != this.multiQuery ) { + if ( 0 != this.multiQuery.trim().length() ) + query = new HSearchQuery(this.multiQuery); + + this.table = createTable(); + if ( null != table) { + this.plugin =createPlugIn(); + if ( null != this.plugin) { + this.plugin.setOutputType(this.inputMapperInstructions); + if(0 != this.matchingIds.length) + this.plugin.setMergeId(this.matchingIds); + skipFiltering = false; + } + } + } + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + ex.printStackTrace(); + throw new IOException(ex); + } + } + + @Override + public final void filterRow(final List kvL) { + if ( skipFiltering ) return; + + if ( null == kvL) return; + int kvT = kvL.size(); + if ( 0 == kvT) return; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Processing @ Region Server : filterRow" ); + } + + try { + + List kvLFiltered = new ArrayList(); + + for (KeyValue kv : kvL) { + if ( null == kv) continue; + + byte[] inputData = kv.getValue(); + if ( null == inputData) continue; + + switch ( this.inputMapperInstructions.getCallbackType()) { + case HSearchProcessingInstruction.PLUGIN_CALLBACK_COLS: + table.get(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_ID: + table.keySet(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_VAL: + table.values(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_IDVAL: + table.keyValues(inputData, this.query, plugin); + break; + default: + throw new IOException("Unknown output type:" + this.inputMapperInstructions.getCallbackType()); + } + + plugin.getResultSingleQuery(dataCarrier); + + kvLFiltered.add(new KeyValue(kv.getKey(), kv.getFamily(), kv.getQualifier(), + SortedBytesArray.getInstance().toBytes(dataCarrier)) ); + dataCarrier.clear(); + } + kvL.clear(); + kvL.addAll(kvLFiltered); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("queryData HSearchTableParts creation. "); + } + + + + } catch (Exception ex) { + ex.printStackTrace(System.err); + HSearchLog.l.fatal(ex); + } + } + + @Override + public final void reset() { + } + + @Override + public final boolean hasFilterRow() { + return true; + } + + @Override + public final KeyValue getNextKeyHint(final KeyValue arg0) { + return null; + } + + @Override + public final boolean filterRowKey(final byte[] rowKey, final int offset, final int length) { + + if ( null == rowsToInclude) return false; + byte[] exactRowBytes = new byte[length]; + try { + System.arraycopy(rowKey, offset, exactRowBytes, 0, length); + if ( rowsToInclude.getEqualToIndex(exactRowBytes) >= 0 ) { + //System.out.println("Allow row:" + new String(exactRowBytes)); + return false; + } else { + //System.out.println("Disallow row:" + new String(exactRowBytes)); + return true; + } + + } catch (IOException ex) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + String rowKeyStr = ( null == rowKey) ? "Null row key" : new String(rowKey); + String errMsg = "Error while finding fileration criteria for the row , " + rowKeyStr + + "\n" + ex.getMessage() + "\n" + + "With search scope inside id count : " + scopeToTheseRowsT; + System.err.println(errMsg); + HSearchLog.l.fatal(errMsg, ex); + + return false; + } + } + + @Override + public final boolean filterAllRemaining() { + return false; + } + + @Override + public final boolean filterRow() { + return false; + } + + @Override + public final ReturnCode filterKeyValue(final KeyValue arg0) { + return ReturnCode.INCLUDE; + } + + /** + * Version 0.94 FIX + */ + @Override + public final KeyValue transform(final KeyValue arg0) { + return arg0; + } + + + public final void deserialize(final byte[] input, final Collection output) throws IOException { + SortedBytesArray.getInstance().parse(input).values(output); + } + + public abstract IHSearchPlugin createPlugIn() throws IOException ; + public abstract IHSearchTable createTable(); + + + /** + * Override this method if you want to set more filters in processing. + * + FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RowFilter filter1 = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("row-22")) ); + list.addFilter(filter1); + list.addFilter(this); + return list; + + * @return + */ + public final FilterList getFilters() { + return null; + } + + /** + * Any information to be configured before starting the filtration process. + */ + public final void configure() { + } + + /** + * At the end release the resources. + */ + public final void close() { + } +} + \ No newline at end of file diff --git a/src/compatibility/hadooplib_12/storage/HSearchTableReader.java b/src/compatibility/hadooplib_12/storage/HSearchTableReader.java new file mode 100644 index 0000000..777b6bd --- /dev/null +++ b/src/compatibility/hadooplib_12/storage/HSearchTableReader.java @@ -0,0 +1,125 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HBaseFacade; +import com.bizosys.hsearch.hbase.HReader; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.hbase.IScanCallBack; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchQuery; +import com.bizosys.hsearch.util.HSearchLog; + +public abstract class HSearchTableReader implements IScanCallBack { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + //public static ParallelHReader parallelReader = new ParallelHReader(10); + + public abstract HSearchGenericFilter getFilter(String multiQuery, Map multiQueryParts, HSearchProcessingInstruction outputType); + + public abstract void rows(Collection results, HSearchProcessingInstruction rowType); + + + @SuppressWarnings("unchecked") + public void setPartionsFamilies(String tableName, String colName, String range, Set uniqueFamilies) + throws ParseException, IOException { + + HSearchQuery query = new HSearchQuery(range); + HBaseTableSchemaDefn.getInstance(tableName).columnPartions.get(colName). + getMatchingFamilies(query, uniqueFamilies); + } + + public IScanCallBack getResultCollector() { + return this; + } + + @Override + public void process(byte[] pk, ColumnFamName fn, byte[] storedBytes) throws IOException { + + int length = ( null == storedBytes ) ? 0 : storedBytes.length; + if ( length == 0 ) return; + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("Found Primary Key :" + new String(pk) + "/" + length); + } + + + + public void read( String tableName, String multiQuery, Map multiQueryParts, + HSearchProcessingInstruction outputType, boolean isPartitioned, boolean isParallel) + throws IOException, ParseException { + + HSearchGenericFilter filter = getFilter(multiQuery, multiQueryParts, outputType); + + Set uniqueFamilies = new HashSet(3); + + for ( String colNameQuolonId : multiQueryParts.keySet() ) { + + int colNameAndQIdSplitIndex = colNameQuolonId.indexOf(':'); + if ( -1 == colNameAndQIdSplitIndex || colNameQuolonId.length() - 1 == colNameAndQIdSplitIndex) { + throw new IOException("Sub queries expected as X:Y eg.\n" + + "structured:A OR unstructured:B\nstructured:A=f|1|1|1|c|*|*\nunstructured:B=*|*|*|*|*|*"); + } + String colName = colNameQuolonId.substring(0,colNameAndQIdSplitIndex); + setPartionsFamilies(tableName, colName, multiQueryParts.get(colNameQuolonId),uniqueFamilies); + } + + List families = new ArrayList(); + for (String family : uniqueFamilies) { + if ( INFO_ENABLED ) HSearchLog.l.info("HSearchTableReader > Adding Family: " + family); + families.add(new ColumnFamName( family.getBytes(), + new String( new char[] {HBaseTableSchemaDefn.getColumnName()}).getBytes() ) ); + } + + IScanCallBack recordsCollector = getResultCollector(); + + if ( isParallel ) { + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchTableReader > Searching in parallel."); + /** + * OLD Version + * parallelReader.getAllValues(tableName, families, filter, recordsCollector); + */ + HTableWrapper table = HBaseFacade.getInstance().getTable(tableName); + + try { + rows(new HSearchGenericCoProcessorFactory( + families, filter).execCoprocessorRows(table), outputType ); + } catch (Throwable th) { + throw new IOException(th); + } + + } else { + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchTableReader > Searching in Sequential."); + HReader.getAllValues(tableName,families, filter, recordsCollector); + } + } +} + diff --git a/src/compatibility/hadooplib_94/hbase/HDML.java b/src/compatibility/hadooplib_94/hbase/HDML.java new file mode 100644 index 0000000..f7cac1f --- /dev/null +++ b/src/compatibility/hadooplib_94/hbase/HDML.java @@ -0,0 +1,224 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.TableExistsException; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.RowFilter; + +import com.bizosys.hsearch.util.HSearchLog; + +public class HDML { + private static final boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + private static final boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + /** + * Creates the table if not existing before + * @param tableName + * @param cols + * @throws IOException + */ + public static final boolean create(final String tableName, final List cols) throws HBaseException { + + if (DEBUG_ENABLED) + HSearchLog.l.debug("Creating HBase Table - " + tableName); + + try { + if (DEBUG_ENABLED) + HSearchLog.l.debug("Checking for table existance : " + tableName); + HBaseAdmin admin = HBaseFacade.getInstance().getAdmin(); + if ( admin.tableExists(tableName)) { + + if (INFO_ENABLED) + HSearchLog.l.info("Ignoring creation. Table already exists - " + tableName); + return false; + } else { + HTableDescriptor tableMeta = new HTableDescriptor(tableName); + for (HColumnDescriptor col : cols) tableMeta.addFamily(col); + admin.createTable(tableMeta); + if (INFO_ENABLED ) HSearchLog.l.info("Table Created - " + tableName); + return true; + } + + } catch (TableExistsException ex) { + HSearchLog.l.warn("Ignoring creation. Table already exists - " + tableName, ex); + throw new HBaseException("Failed Table Creation : " + tableName, ex); + } catch (MasterNotRunningException mnre) { + throw new HBaseException("Failed Table Creation : " + tableName, mnre); + } catch (IOException ioex) { + throw new HBaseException("Failed Table Creation : " + tableName, ioex); + } + } + + + /** + * Drop a table. This may take significantly large time as things + * are disabled first and then gets deleted. + * @param tableName + * @throws IOException + */ + public final static void drop(final String tableName) throws HBaseException { + + if (DEBUG_ENABLED) + HSearchLog.l.debug("Checking for table existance"); + + try { + HBaseAdmin admin = HBaseFacade.getInstance().getAdmin(); + byte[] bytesTableName = tableName.getBytes(); + if ( admin.tableExists(bytesTableName)) { + if ( ! admin.isTableDisabled(bytesTableName) ) + admin.disableTable(bytesTableName); + if ( admin.isTableDisabled(bytesTableName) ) + admin.deleteTable(bytesTableName); + if (INFO_ENABLED ) HSearchLog.l.info (tableName + " Table is deleted."); + } else { + HSearchLog.l.warn( tableName + " table is not found during drop operation."); + throw new HBaseException("Table does not exist"); + } + } catch (IOException ioex) { + throw new HBaseException("Table Drop Failed : " + tableName, ioex); + } + } + + public final static void truncate(final String tableName, final NV kv) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + scanner = table.getScanner(scan); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + Delete delete = new Delete(r.getRow()); + delete = delete.deleteColumns(kv.family, kv.name); + table.delete(delete); + } + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) { + table.flushCommits(); + facade.putTable(table); + } + if ( null != matched) matched.clear(); + } + } + + public static final void truncateBatch(final String tableName, final String keyPrefix) throws IOException { + + if (INFO_ENABLED) HSearchLog.l.info( + "Deleted from " + tableName + " with prefix " + keyPrefix); + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List deletes = new ArrayList(256); + + int batchSize = 0; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + if ( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes()) ); + scan = scan.setFilter(rowFilter); + } + scanner = table.getScanner(scan); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + Delete delete = new Delete(r.getRow()); + deletes.add(delete); + + batchSize++; + if ( batchSize > 1000) { + if ( deletes.size() > 0 ) { + table.delete(deletes); + deletes.clear(); + } + batchSize = 0; + } + } + if ( deletes.size() > 0 ) table.delete(deletes); + + } finally { + table.flushCommits(); + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != deletes) deletes.clear(); + } + } + + public final static void truncateBatch(final String tableName, final List rows) throws IOException { + + if ( null == rows) return; + if ( rows.size() == 0) return; + + HBaseFacade facade = null; + HTableWrapper table = null; + List deletes = new ArrayList(rows.size()); + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + for (byte[] row : rows) { + Delete delete = new Delete(row); + deletes.add(delete); + } + table.delete(deletes); + + } finally { + table.flushCommits(); + if ( null != table ) facade.putTable(table); + if ( null != deletes) deletes.clear(); + } + } +} diff --git a/src/compatibility/hadooplib_94/hbase/HReader.java b/src/compatibility/hadooplib_94/hbase/HReader.java new file mode 100644 index 0000000..a018359 --- /dev/null +++ b/src/compatibility/hadooplib_94/hbase/HReader.java @@ -0,0 +1,464 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.RowLock; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.KeyOnlyFilter; +import org.apache.hadoop.hbase.filter.PageFilter; +import org.apache.hadoop.hbase.filter.RegexStringComparator; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.util.Bytes; + +import com.bizosys.hsearch.util.HSearchLog; + +public class HReader { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + /** + * Scalar data will contain the amount to increase + * @param tableName + * @param scalar + * @throws SystemFault + */ + public static final long idGenerationByAutoIncr(final String tableName, + final RecordScalar scalar, final long amount ) throws HBaseException { + + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + long incrementedValue = table.incrementColumnValue( + scalar.pk, scalar.kv.family, scalar.kv.name, amount); + return incrementedValue; + } catch (Exception ex) { + throw new HBaseException("Error in getScalar :" + scalar.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public final static boolean exists (final String tableName, final byte[] pk) throws HBaseException { + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(pk); + return table.exists(getter); + } catch (Exception ex) { + throw new HBaseException("Error in existance checking :" + pk.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static List getMatchingRowIds(String tableName, String rowIdPattern) throws IOException { + + FilterList filters = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RegexStringComparator regex = new RegexStringComparator(rowIdPattern); + RowFilter aFilter = new RowFilter(CompareOp.EQUAL, regex); + filters.addFilter(aFilter); + filters.addFilter(new KeyOnlyFilter()); + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + + List rowIds = new ArrayList(); + + try { + + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan.setFilter(filters); + scanner = table.getScanner(scan); + + for (Result r : scanner) { + if (null == r) continue; + byte[] rowB = r.getRow(); + if (null == rowB) continue; + if (rowB.length == 0) continue; + String row = new String(rowB); + rowIds.add(row); + } + return rowIds; + + } catch (IOException ex) { + HSearchLog.l.fatal("Error while looking table :" + tableName + " for regex, " + rowIdPattern , ex); + throw ex; + } finally { + if (null != scanner) scanner.close(); + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final List getCompleteRow (final String tableName, + final byte[] pk) throws HBaseException{ + + return getCompleteRow (tableName, pk, null, null); + } + + public static final List getCompleteRow (final String tableName, final byte[] pk, + final Filter filter) throws HBaseException { + + return getCompleteRow (tableName, pk, filter, null); + } + public final static List getCompleteRow (final String tableName, final byte[] pk, + final Filter filter, final RowLock lock) throws HBaseException { + + HBaseFacade facade = null; + HTableWrapper table = null; + Result r = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = ( null == lock) ? new Get(pk) : new Get(pk,lock); + if (null != filter) getter.setFilter(filter); + if ( table.exists(getter) ) { + r = table.get(getter); + if ( null == r ) return null; + List nvs = new ArrayList(r.list().size()); + for (KeyValue kv : r.list()) { + NVBytes nv = new NVBytes(kv.getFamily(),kv.getQualifier(), kv.getValue()); + nvs.add(nv); + } + return nvs; + } + return null; + } catch (Exception ex) { + throw new HBaseException("Error in existance checking :" + pk.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final void getScalar (final String tableName, final RecordScalar scalar) throws HBaseException { + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(scalar.pk); + Result result = table.get(getter); + if ( null == result) return; + byte[] val = result.getValue(scalar.kv.family, scalar.kv.name); + if ( null != val ) scalar.kv.data = val; + } catch (Exception ex) { + throw new HBaseException("Error in getScalar :" + scalar.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final byte[] getScalar (final String tableName, + final byte[] family, final byte[] col, final byte[] pk) throws HBaseException { + + return getScalar(tableName,family,col,pk,null); + } + + + public static final byte[] getScalar (final String tableName, + final byte[] family, final byte[] col, final byte[] pk, final Filter filter) throws HBaseException { + + if ( null == family || null == col || null == pk ) return null; + + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(pk); + if ( null != filter) getter = getter.setFilter(filter); + Result result = table.get(getter); + if ( null == result) return null; + return result.getValue(family, col); + } catch (Exception ex) { + StringBuilder sb = new StringBuilder(); + sb.append("Input during exception = Table : [").append(tableName); + sb.append("] , Family : [").append(new String(family)); + sb.append("] , Column : [").append(new String(col)); + sb.append("] , Key : [").append(new String(pk)); + sb.append(']'); + throw new HBaseException(sb.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final void getAllValues(final String tableName, final byte[] family, + final byte[] col, final String keyPrefix, final IScanCallBack callback ) throws IOException { + + Filter rowFilter = null; + if ( null != keyPrefix) { + rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + } + getAllValues(tableName, family, col, rowFilter, callback); + + } + + public static final void getAllValues(final String tableName, final byte[] family, + final byte[] col, final Filter filter, final IScanCallBack callback ) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > getAllValues."); + + facade = HBaseFacade.getInstance(); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Table Facade is obtained."); + table = facade.getTable(tableName); + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Table is obtained."); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(family, col); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Scanner is created."); + + if ( null != filter) scan = scan.setFilter(filter); + + scanner = table.getScanner(scan); + + long timeS = System.currentTimeMillis(); + + ColumnFamName aColFamilyName = new ColumnFamName(family, col); + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + byte[] storedBytes = r.getValue(family, col); + if ( null == storedBytes) continue; + callback.process(r.getRow(), aColFamilyName, storedBytes); + } + + if ( DEBUG_ENABLED) { + long timeE = System.currentTimeMillis(); + HSearchLog.l.debug("HReader.getAllValues (" + tableName + ") execution time = " + + (timeE - timeS) ); + } + + } catch ( IOException ex) { + throw ex; + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != matched) matched.clear(); + } + } + + + public static final void getAllValues(final String tableName, final List columns, + final String keyPrefix, final IScanCallBack callback ) throws IOException { + + Filter rowFilter = null; + if ( null != keyPrefix) { + rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + } + getAllValues(tableName, columns, rowFilter, callback); + + } + + public final static void getAllValues(final String tableName, final List columns, + final Filter filter, final IScanCallBack callback ) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + for (ColumnFamName aColFamilyName : columns) { + scan = scan.addColumn(aColFamilyName.family, aColFamilyName.name); + } + + if ( null != filter) scan = scan.setFilter(filter); + + scanner = table.getScanner(scan); + + long timeS = System.currentTimeMillis(); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + for (ColumnFamName aColFamilyName : columns) { + byte[] storedBytes = r.getValue(aColFamilyName.family, aColFamilyName.name); + if ( null == storedBytes) continue; + callback.process(r.getRow(), aColFamilyName, storedBytes); + } + } + + if ( DEBUG_ENABLED) { + long timeE = System.currentTimeMillis(); + HSearchLog.l.debug("HReader.getAllValues (" + tableName + ") execution time = " + + (timeE - timeS) ); + } + + } catch ( IOException ex) { + throw ex; + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != matched) matched.clear(); + } + } + + + /** + * Get all the keys of the table cutting the keyPrefix. + * @param tableName Table name + * @param kv Key-Value + * @param startKey Start Row Primary Key + * @param pageSize Page size + * @return Record Keys + * @throws SystemFault + */ + public static final void getAllKeys(final String tableName, final NV kv, + final String keyPrefix, final IScanCallBack callback) throws HBaseException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + + if ( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + scan = scan.setFilter(rowFilter); + } + + scanner = table.getScanner(scan); + ColumnFamName familyName = new ColumnFamName(kv.family, kv.name); + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + callback.process(r.getRow(), familyName, null); + } + } catch ( IOException ex) { + throw new HBaseException(ex); + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + } + } + + /** + * Get the keys of the table + * @param tableName Table name + * @param kv Key-Value + * @param startKey Start Row Primary Key + * @param pageSize Page size + * @return Record Keys + * @throws SystemFault + */ + public static final List getKeysForAPage(final String tableName, final NV kv, + final byte[] startKey, final String keyPrefix, final int pageSize) throws HBaseException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List keys = ( pageSize > 0 ) ? + new ArrayList(pageSize): new ArrayList(1024); + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + + if( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.NOT_EQUAL, + new BinaryPrefixComparator(Bytes.toBytes(keyPrefix))); + scan = scan.setFilter(rowFilter); + } + + if ( pageSize > 0) { + PageFilter pageFilter = new PageFilter(pageSize); + scan = scan.setFilter(pageFilter); + } + + if ( null != startKey) scan = scan.setStartRow(startKey); + + scanner = table.getScanner(scan); + + int counter = 0; + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + if ( counter++ > pageSize) break; + keys.add(r.getRow()); + } + return keys; + } catch ( IOException ex) { + throw new HBaseException(ex); + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + } + } +} diff --git a/src/compatibility/hadooplib_94/hbase/HTableWrapper.java b/src/compatibility/hadooplib_94/hbase/HTableWrapper.java new file mode 100644 index 0000000..20b4904 --- /dev/null +++ b/src/compatibility/hadooplib_94/hbase/HTableWrapper.java @@ -0,0 +1,208 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.RetriesExhaustedException; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.RowLock; +import org.apache.hadoop.hbase.client.Scan; + +import com.bizosys.hsearch.util.HSearchLog; + +/** + * Wraps an HBase tableInterface object. + * @author karan + *@see org.apache.hadoop.hbase.client.HTableInterface + */ +public final class HTableWrapper { + + private static final boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + /** + * The tableInterface interface + */ + public HTableInterface tableInterface = null; + public HTable innerHtable = null; + + /** + * Name of HBase tableInterface + */ + String tableName = null; + + /** + * Constructor + * @param tableName The tableInterface name + * @param tableInterface tableInterface interface + */ + public HTableWrapper(String tableName, HTableInterface tableInterface) { + this.tableInterface = tableInterface; + this.tableName = tableName; + } + + /** + * Get the tableInterface name in bytes + * @return tableInterface name as byte array + */ + public byte[] getTableName() { + return tableInterface.getTableName(); + } + + /** + * Get tableInterface description + * @return tableInterface Descriptor + * @throws IOException + */ + public HTableDescriptor getTableDescriptor() throws IOException { + return tableInterface.getTableDescriptor(); + } + + /** + * Test for the existence of columns in the tableInterface, as specified in the Get. + * @param get object + * @return True on existence + * @throws IOException + */ + public boolean exists(Get get) throws IOException { + return tableInterface.exists(get); + } + + public Result get(Get get) throws IOException{ + return tableInterface.get(get); + } + + public ResultScanner getScanner(Scan scan) throws IOException { + return tableInterface.getScanner(scan); + } + + public ResultScanner getScanner(byte[] family) throws IOException { + return tableInterface.getScanner(family); + } + + public ResultScanner getScanner(byte[] family, byte[] qualifier) throws IOException { + return tableInterface.getScanner(family, qualifier); + } + + public void put(Put put) throws IOException { + try { + tableInterface.put(put); + } catch ( RetriesExhaustedException ex) { + HBaseFacade.getInstance().recycleTable(this); + tableInterface.put(put); + } + } + + public void put(List puts) throws IOException { + try { + tableInterface.put(puts); + } catch ( RetriesExhaustedException ex) { + HBaseFacade.getInstance().recycleTable(this); + tableInterface.put(puts); + } + } + + public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, + byte[] value, Put put) throws IOException { + + return tableInterface.checkAndPut(row, family, qualifier,value, put ); + } + + public void delete(Delete delete) throws IOException { + tableInterface.delete(delete ); + } + + public void delete(List deletes) throws IOException { + if ( null == deletes) return; + if ( INFO_ENABLED) HSearchLog.l.info("HTableWrapper: Batch Deleting: " + deletes.size()); + tableInterface.delete(deletes); + } + + public void flushCommits() throws IOException { + tableInterface.flushCommits(); + } + + public void close() throws IOException { + tableInterface.close(); + if ( null != innerHtable) { + innerHtable.close(); + innerHtable = null; + } + } + + public RowLock lockRow(byte[] row) throws IOException { + return tableInterface.lockRow(row); + } + + public void unlockRow(RowLock rl) throws IOException { + if ( null == rl) return; + tableInterface.unlockRow(rl); + } + + public long incrementColumnValue(byte[] row, + byte[] family, byte[] qualifier, long amount) throws IOException { + + return tableInterface.incrementColumnValue(row, family, qualifier, amount, true); + } + + public Object[] batch(List actions) throws IOException, InterruptedException { + return tableInterface.batch(actions); + } + + public HRegionLocation getRegionLocation(byte[] row) throws IOException { + + + if ( null == innerHtable ) { + synchronized (this.tableName) { + if ( null == innerHtable) innerHtable = + new HTable(tableInterface.getConfiguration(), this.tableName); + } + } + return innerHtable.getRegionLocation(row); + } + + public List getRegionLocation(List rows) throws IOException { + if ( null == rows) return null; + List regions = new ArrayList(); + + if ( null == innerHtable ) { + synchronized (this.tableName) { + if ( null == innerHtable) innerHtable = + new HTable(tableInterface.getConfiguration(), this.tableName); + } + } + + for (byte[] row : rows) { + regions.add(innerHtable.getRegionLocation(row)); + } + return regions; + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_94/hbase/HWriter.java b/src/compatibility/hadooplib_94/hbase/HWriter.java new file mode 100644 index 0000000..cabed43 --- /dev/null +++ b/src/compatibility/hadooplib_94/hbase/HWriter.java @@ -0,0 +1,668 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.RowLock; + +import com.bizosys.hsearch.util.HSearchLog; + + +/** + * All HBase write calls goes from here. + * It supports Insert, Delete, Update and Merge operations. + * Merge is a operation, where read and write happens inside + * a lock. This lock is never exposed to caller function. + * @author karan + * + */ +public class HWriter { + + private static final boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + //private boolean isBatchMode = false; + private static HWriter singleton = null; + + /** + * Factory for getting HWriter instance. + * Currently HWriter can execute in a thread safe environment with + * multiple writers originating from a singel machine or multi + * machine environment or out of a single thread write environment. + * @param enableThreadSafety Should it run in a parallel clients mode + * @return HWriter instance. + */ + public static HWriter getInstance(boolean enableThreadSafety ) { + if ( null != singleton) return singleton; + synchronized (HWriter.class) { + if ( null != singleton) return singleton; + singleton = new HWriter(); + } + return singleton; + } + + /** + * Default constructor. + * Don't use + */ + private HWriter() { + } + + /** + * Insert just a single scalar record. If the record is already existing, it overrides. + * A scalar record contains just one column. + * @param tableName Table name + * @param record A Table record + * @throws IOException + */ + public final void insertScalar(final String tableName, final RecordScalar record) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insertScalar:record " + tableName); + + byte[] pk = record.pk; + Put update = new Put(pk); + NV kv = record.kv; + update.add(kv.family,kv.name, kv.data); + update.setWriteToWAL(true); + + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(update); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + } + } + + /** + * Insert multiple scalar records. If records exist, it overrides + * A scalar record contains just one column. + * @param tableName Table name + * @param records Table records + * @throws IOException + */ + public final void insertScalar(final String tableName, + final List records) throws IOException { + + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insertScalar:records table " + tableName); + + List updates = ObjectFactory.getInstance().getPutList(); + + for (RecordScalar record : records) { + Put update = new Put(record.pk); + NV kv = record.kv; + update.add(kv.family,kv.name, kv.data); + update.setWriteToWAL(true); + updates.add(update); + } + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(updates); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + if ( null != updates) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Insert a record + * @param tableName + * @param record + * @throws IOException + */ + public final void insert(final String tableName, final Record record) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert to table " + tableName); + + HTableWrapper table = null; + HBaseFacade facade = null; + try { + Put update = new Put(record.pk); + for (NV param : record.getNVs()) { + update.add(param.family,param.name, param.data); + } + update.setWriteToWAL(true); + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(update); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + } + } + + /** + * Inserting multiple records. It overrides the values of existing records. + * from the time we have read.. + * @param tableName + * @param records + * @throws IOException + */ + public final void insert(final String tableName, final List records) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert:records to table " + tableName); + + List updates = ObjectFactory.getInstance().getPutList(); + + for (Record record : records) { + Put update = new Put(record.pk); + for (NV param : record.getNVs()) { + update.add(param.family, param.name, param.data); + } + update.setWriteToWAL(true); + updates.add(update); + } + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert:Putting records " + updates.size()); + table.put(updates); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + if ( null != updates) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Update a table. It calls back the update call back function for + * various modifications during update operations as bytes merging. + * @param tableName + * @param pk + * @param pipe + * @param families + * @throws IOException + */ + public final void update(final String tableName, + final byte[] pk, final IUpdatePipe pipe, final byte[][] families) throws IOException { + + if ( null == tableName || null == pk) return; + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> update to table " + tableName); + + HTableWrapper table = null; + HBaseFacade facade = null; + RowLock lock = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + /** + * Scope down the existance check getter, not to mingle with actual one. + */ + Get existanceGet = new Get(pk); + if ( ! table.exists(existanceGet) ) return; + + lock = table.lockRow(pk); + Get lockedGet = ( null == lock) ? new Get(pk) : new Get(pk,lock); + if ( null != families) { + for (byte[] family : families) { + lockedGet = lockedGet.addFamily(family); + } + } + + Put lockedUpdate = null; + Delete lockedDelete = null; + + int familiesT = ( null == families) ? 0 : families.length; + int[] familyByteLen = new int[familiesT]; + + Result r = table.get(lockedGet); + if ( null == r) return; + if ( null == r.list()) return; + + for (KeyValue kv : r.list()) { + byte[] curVal = kv.getValue(); + if ( null == curVal) continue; + if ( 0 == curVal.length) continue; + byte[] modifiedB = pipe.process(kv.getFamily(), kv.getQualifier(), curVal); + int modifiedBLen = ( null == modifiedB) ? 0 : modifiedB.length; + + /** + * Count if family to be chucked out + * */ + for (int i=0; i records) + throws IOException { + + if ( null == tableName || null == records) return; + if (DEBUG_ENABLED) + HSearchLog.l.debug("HWriter: mergeScalar (" + tableName + ") , Count =" + records.size()); + + HTableWrapper table = null; + HBaseFacade facade = null; + List locks = ObjectFactory.getInstance().getRowLockList(); + List updates = ObjectFactory.getInstance().getPutList(); + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + for (RecordScalar scalar : records) { + byte[] pk = scalar.pk; + if ( 0 == pk.length) continue;; + Get getter = new Get(pk); + byte[] famB = scalar.kv.family; + byte[] nameB = scalar.kv.name; + RowLock lock = null; + + if ( table.exists(getter) ) { + lock = table.lockRow(pk); + if ( null == lock) { + throw new IOException("Unable to aquire lock," + new String(pk) + + " for the table - " + tableName); + } + locks.add(lock); + + Get existingGet = (null == lock) ? new Get(pk) : new Get(pk, lock); + existingGet = existingGet.addColumn(famB, nameB); + Result r = table.get(existingGet); + if ( ! scalar.merge(r.getValue(famB, nameB)) ) { + if ( null != lock ) { + table.unlockRow(lock); + locks.remove(lock); + } + continue; + } + } + + NV kv = scalar.kv; + byte[] data = kv.data; + if ( null == data ) { + try { + if ( null != lock ) { + table.unlockRow(lock); + updates.remove(lock); + lock = null; + } + + } catch (Exception ex) { + HSearchLog.l.warn("HWriter:mergeScalar > Ignore Unlock exp :" , ex); + } + continue; + } + + Put update = ( null == lock ) ? new Put(pk) : new Put(pk, lock); + update.add(famB,nameB, data); + update.setWriteToWAL(true); + updates.add(update); + } + + table.put(updates); + table.flushCommits(); + + } finally { + boolean goodTable = true; + for (RowLock lock: locks) { + try { + if ( null != lock ) table.unlockRow(lock); + } catch (Exception ex) { + reportUnlockException(ex); + goodTable = false; + } + } + + if ( null != facade && null != table && goodTable) { + facade.putTable(table); + } + + if ( null != locks ) ObjectFactory.getInstance().putRowLockList(locks); + if ( null != updates ) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Merge a record accessing the existing value + * It happens with the locking mechanism + * @param tableName Table name + * @param record A record + * @throws IOException + */ + public final void merge(final String tableName, final Record record) + throws IOException { + + if ( null == tableName || null == record) return; + if (DEBUG_ENABLED) + HSearchLog.l.debug("HWriter:merge Record (" + tableName + ")") ; + + HTableWrapper table = null; + HBaseFacade facade = null; + RowLock lock = null; + + try { + byte[] pk = record.pk; + + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + //Step 0 : If does exists no need to merge.. Just insert. + Get existsCheck = new Get(pk); + if ( ! table.exists(existsCheck) ) { + insert(tableName, record); + return; + } + + //Step 1 : Aquire a lock before merging + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> Locking Row " ); + lock = table.lockRow(pk); + if ( null == lock) { + throw new IOException("HWriter:merge Unable to aquire lock," + new String(pk) + + " for the table - " + tableName); + } + Get existingGet = ( null == lock) ? new Get(pk) : new Get(pk, lock); + for (NV nv : record.getBlankNVs()) { + existingGet = existingGet.addColumn(nv.family, nv.name); + } + + //Step 2 : Merge data with existing values + Result r = table.get(existingGet); + if ( null != r) { + if ( null != r.list()) { + for (KeyValue kv : r.list()) { + byte[] existingB = kv.getValue(); + if ( null == existingB) continue; + if ( 0 == existingB.length)continue; + record.merge(kv.getFamily(),kv.getQualifier(), existingB); + } + } + } + + //Step 3 : Only add values which have changed. + Put update = ( null == lock ) ? new Put(pk) : new Put(pk, lock); + int totalCols = 0; + for (NV nv : record.getNVs()) { + byte[] data = nv.data; + if ( nv.isDataUnchanged) continue; + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> data Size " + data.length); + update = update.add(nv.family, nv.name, data); + totalCols++; + } + + //Step 4 : If no change.. Nothing to do. + if ( totalCols == 0 ) return; + + + //Step 5 : Write the changes. + update.setWriteToWAL(true); + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> Committing Updates" ); + table.put(update); + table.flushCommits(); + + } finally { + + boolean goodTable = true; + if ( null != lock ) { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> Un Locking Row " ); + try { table.unlockRow(lock); } catch (Exception ex) { + reportUnlockException(ex); + goodTable = false; + } + } + if ( null != facade && null != table && goodTable) { + facade.putTable(table); + } + } + } + + private final void reportUnlockException(final Exception ex) { + Runtime runTime = Runtime.getRuntime(); + String errorMsg = "Max Mem: " + runTime.maxMemory()/1024; + errorMsg = errorMsg + ", Total Mem: " + runTime.totalMemory()/1024; + errorMsg = errorMsg + ", Free Mem: " + runTime.freeMemory()/1024; + HSearchLog.l.warn("HWriter:reportUnlockException > Ignoring Unlock exp. May be memory Issue \n" + errorMsg, ex); + } + + + /** + * Compare byte values + * @param offset Starting position of compare with Byte Array + * @param inputBytes Compare with Bytes + * @param compareBytes Compare to Bytes + * @return True if matches + */ + private final boolean compareBytes(final int offset, + final byte[] inputBytes, final byte[] compareBytes) { + + int inputBytesT = inputBytes.length; + int compareBytesT = compareBytes.length; + if ( compareBytesT != inputBytesT - offset) return false; + + if ( compareBytes[0] != inputBytes[offset]) return false; + if ( compareBytes[compareBytesT - 1] != inputBytes[compareBytesT + offset - 1] ) return false; + + switch (compareBytesT) + { + case 3: + return compareBytes[1] == inputBytes[1 + offset]; + case 4: + return compareBytes[1] == inputBytes[1 + offset] && + compareBytes[2] == inputBytes[2 + offset]; + case 5: + return compareBytes[1] == inputBytes[1+ offset] && + compareBytes[2] == inputBytes[2+ offset] && + compareBytes[3] == inputBytes[3+ offset]; + case 6: + return compareBytes[1] == inputBytes[1+ offset] && + compareBytes[3] == inputBytes[3+ offset] && + compareBytes[2] == inputBytes[2+ offset] && + compareBytes[4] == inputBytes[4+ offset]; + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + case 16: + case 17: + case 18: + case 19: + case 20: + case 21: + case 22: + case 23: + case 24: + case 25: + case 26: + case 27: + case 28: + case 29: + case 30: + for ( int i=offset; i< compareBytesT - 1; i++) { + if ( compareBytes[i] != inputBytes[offset + i]) return false; + } + break; + + case 31: + + for ( int a = 1; a <= 6; a++) { + if ( ! + (compareBytes[a] == inputBytes[a+offset] && + compareBytes[a+6] == inputBytes[a+6+offset] && + compareBytes[a+12] == inputBytes[a+12+offset] && + compareBytes[a+18] == inputBytes[a+18+offset] && + compareBytes[a+24] == inputBytes[a+24+offset]) ) return false; + } + break; + default: + + for ( int i=offset; i< compareBytesT - 1; i++) { + if ( compareBytes[i] != inputBytes[offset + i]) return false; + } + } + return true; + } + + /** + * Compare two bytes + * @param inputBytes Compare with Bytes + * @param compareBytes Compare to Bytes + * @return True if matches + */ + private final boolean compareBytes(final byte[] inputBytes, final byte[] compareBytes) { + return compareBytes(0,inputBytes,compareBytes); + } + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_94/hbase/ObjectFactory.java b/src/compatibility/hadooplib_94/hbase/ObjectFactory.java new file mode 100644 index 0000000..1d04865 --- /dev/null +++ b/src/compatibility/hadooplib_94/hbase/ObjectFactory.java @@ -0,0 +1,240 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.hbase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Hashtable; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Stack; + +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.RowLock; + +public class ObjectFactory { + + private static int MINIMUM_CACHE = 10; + private static int MAXIMUM_CACHE = 4096; + + private static ObjectFactory thisInstance = new ObjectFactory(); + public static ObjectFactory getInstance() { + return thisInstance; + } + + Stack> putsLists = new Stack>(); + Stack> locksLists = new Stack>(); + Stack> byteArrLists = new Stack>(); + Stack> blockMap = new Stack>(); + Stack> blockMaps = new Stack>(); + Stack> nvLists = new Stack>(); + Stack> stringSets = new Stack>(); + Stack> stringLists = new Stack>(); + Stack> shortLists = new Stack>(); + Stack> longSets = new Stack>(); + Stack> integerLists = new Stack>(); + + + public final List getRowLockList() { + List locks = null; + if (locksLists.size() > MINIMUM_CACHE ) locks = locksLists.pop(); + if ( null != locks ) return locks; + return new ArrayList(256); + } + + public final void putRowLockList(final List locks ) { + if ( null == locks) return; + locks.clear(); + if (locksLists.size() > MAXIMUM_CACHE ) return; + if ( locksLists.contains(locks)) return; + locksLists.push(locks); + } + + public final List getPutList() { + List puts = null; + if (putsLists.size() > MINIMUM_CACHE ) puts = putsLists.pop(); + if ( null != puts ) return puts; + return new ArrayList(256); + } + + public final void putPutsList(List puts ) { + if ( null == puts) return; + puts.clear(); + if (putsLists.size() > MAXIMUM_CACHE ) return; + if ( putsLists.contains(puts)) return; + putsLists.push(puts); + } + + public final List getByteArrList() { + List bytesA = null; + if (byteArrLists.size() > MINIMUM_CACHE ) bytesA = byteArrLists.pop(); + if ( null != bytesA ) return bytesA; + return new ArrayList(32); + } + + public final void putByteArrList(final List bytesA ) { + if ( null == bytesA) return; + bytesA.clear(); + if (byteArrLists.size() > MAXIMUM_CACHE ) return; + if ( byteArrLists.contains(bytesA)) return; + byteArrLists.push(bytesA); + } + + public final List getNVList() { + List nvs = null; + if (nvLists.size() > MINIMUM_CACHE ) nvs = nvLists.pop(); + if ( null != nvs ) return nvs; + return new ArrayList(); + } + + public final void putNVList(final List nvs ) { + if ( null == nvs) return; + nvs.clear(); + if (nvLists.size() > MAXIMUM_CACHE ) return; + if ( nvLists.contains(nvs)) return; + nvLists.push(nvs); + } + + public final Map getBytesList() { + Map lstB = null; + if (blockMap.size() > MINIMUM_CACHE ) lstB = blockMap.pop(); + if ( null != lstB ) return lstB; + return new HashMap(); + } + + public final void putBytesList(final Map lstB ) { + if ( null == lstB) return; + lstB.clear(); + if (blockMap.size() > MAXIMUM_CACHE ) return; + if ( blockMap.contains(lstB)) return; + blockMap.push(lstB); + } + + + public final Map getByteBlockMap(){ + Map obj = null; + if (blockMaps.size() > MINIMUM_CACHE ) obj = blockMaps.pop(); + if ( null != obj ) return obj; + return new Hashtable(); + } + + public final void putByteBlockMap(final Map obj){ + if ( null == obj) return; + obj.clear(); + if (blockMaps.size() > MAXIMUM_CACHE ) return; + if ( blockMaps.contains(obj)) return; + blockMaps.push(obj); + } + + public final Set getStringSet(){ + Set obj = null; + if (stringSets.size() > MINIMUM_CACHE ) obj = stringSets.pop(); + if ( null != obj ) return obj; + return new HashSet(); + } + + public final void putStringSet(final Set obj){ + if ( null == obj) return; + obj.clear(); + if (stringSets.size() > MAXIMUM_CACHE ) return; + if ( stringSets.contains(obj)) return; + stringSets.push(obj); + } + + public final List getStringList(){ + List obj = null; + if (stringLists.size() > MINIMUM_CACHE ) obj = stringLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putStringList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (stringLists.size() > MAXIMUM_CACHE ) return; + if ( stringLists.contains(obj)) return; + stringLists.push(obj); + } + + public final List getShortList(){ + List obj = null; + if (shortLists.size() > MINIMUM_CACHE ) obj = shortLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putShortList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (shortLists.size() > MAXIMUM_CACHE ) return; + if ( shortLists.contains(obj)) return; + shortLists.push(obj); + } + + public final List getIntegerList(){ + List obj = null; + if (integerLists.size() > MINIMUM_CACHE ) obj = integerLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putIntegerList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (integerLists.size() > MAXIMUM_CACHE ) return; + if ( integerLists.contains(obj)) return; + integerLists.push(obj); + } + + + public final Set getLongSet(){ + Set obj = null; + if (longSets.size() > MINIMUM_CACHE ) obj = longSets.pop(); + if ( null != obj ) return obj; + return new HashSet(); + } + + public final void putLongSet(final Set obj){ + if ( null == obj) return; + obj.clear(); + if (longSets.size() > MAXIMUM_CACHE ) return; + if ( longSets.contains(obj)) return; + longSets.push(obj); + } + + public final String getStatus() { + StringBuilder sb = new StringBuilder(476); + sb.append(""); + sb.append("blockMap:").append(blockMap.size()).append('|'); + sb.append("blockMaps:").append(blockMaps.size()).append('|'); + sb.append("nvLists:").append(nvLists.size()).append('|'); + sb.append("stringSets:").append(stringSets.size()).append('|'); + sb.append("stringLists:").append(stringLists.size()).append('|'); + sb.append("shortLists:").append(shortLists.size()).append('|'); + sb.append("longSets:").append(longSets.size()).append('|'); + sb.append("integerLists:").append(integerLists.size()); + sb.append(""); + return sb.toString(); + } + +} diff --git a/src/compatibility/hadooplib_94/protobuf/HSearchCoprocessorProtos.java b/src/compatibility/hadooplib_94/protobuf/HSearchCoprocessorProtos.java new file mode 100644 index 0000000..e00a04e --- /dev/null +++ b/src/compatibility/hadooplib_94/protobuf/HSearchCoprocessorProtos.java @@ -0,0 +1,4 @@ +package com.bizosys.hsearch.treetable.storage.protobuf.generated; + +public final class HSearchCoprocessorProtos { +} diff --git a/src/compatibility/hadooplib_94/protobuf/HSearchFilterProtos.java b/src/compatibility/hadooplib_94/protobuf/HSearchFilterProtos.java new file mode 100644 index 0000000..12f9081 --- /dev/null +++ b/src/compatibility/hadooplib_94/protobuf/HSearchFilterProtos.java @@ -0,0 +1,4 @@ +package com.bizosys.hsearch.treetable.storage.protobuf.generated; + +public final class HSearchFilterProtos { +} diff --git a/src/compatibility/hadooplib_94/storage/CacheStorage.java b/src/compatibility/hadooplib_94/storage/CacheStorage.java new file mode 100644 index 0000000..ec8268a --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/CacheStorage.java @@ -0,0 +1,57 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; + +import com.bizosys.hsearch.hbase.HDML; + + +public class CacheStorage { + + public static String TABLE_NAME = "hsearch-cache"; + public static final String CACHE_COLUMN = "c"; + public static final byte[] CACHE_COLUMN_BYTES = CACHE_COLUMN.getBytes(); + private static CacheStorage singleton = null; + + public static CacheStorage getInstance() throws IOException { + + if ( null == singleton ) { + synchronized (CacheStorage.class.getName()) { + if ( null == singleton ) { + singleton = new CacheStorage(); + } + } + } + return singleton; + } + + + private CacheStorage() throws IOException { + + HColumnDescriptor col = new HColumnDescriptor( CACHE_COLUMN.getBytes()) ; + + col.setMinVersions(1); + col.setMaxVersions(1); + col.setKeepDeletedCells(false); + col.setCompressionType(Compression.Algorithm.NONE); + col.setEncodeOnDisk(false); + col.setDataBlockEncoding(DataBlockEncoding.NONE); + col.setInMemory(false); + col.setBlockCacheEnabled(true); + col.setTimeToLive(HConstants.FOREVER); + col.setBloomFilterType(BloomType.NONE); + col.setScope(HConstants.REPLICATION_SCOPE_GLOBAL); + + List colFamilies = new ArrayList(); + colFamilies.add(col); + HDML.create(TABLE_NAME, colFamilies); + + } +} diff --git a/src/compatibility/hadooplib_94/storage/HBaseTableSchemaCreator.java b/src/compatibility/hadooplib_94/storage/HBaseTableSchemaCreator.java new file mode 100644 index 0000000..fbf3e04 --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HBaseTableSchemaCreator.java @@ -0,0 +1,148 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; +import org.apache.log4j.Logger; + +import com.bizosys.hsearch.hbase.HDML; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HBaseTableSchemaCreator { + + private static HBaseTableSchemaCreator instance = null; + public static Logger l = Logger.getLogger(HBaseTableSchemaCreator.class.getName()); + + Configuration config = HSearchConfig.getInstance().getConfiguration(); + + public Algorithm compression = Compression.Algorithm.NONE; + public int partitionBlockSize = config.getInt("partition.block.size", 13035596); + public int partitionRepMode = HConstants.REPLICATION_SCOPE_GLOBAL; + public DataBlockEncoding dataBlockEncoding = DataBlockEncoding.NONE; + public BloomType bloomType = BloomType.NONE; + public boolean inMemory = false; + public boolean blockCacheEnabled = config.getBoolean("block.cache.enabled", true);; + + public static final HBaseTableSchemaCreator getInstance() { + if ( null != instance) return instance; + synchronized (HBaseTableSchemaCreator.class) { + if ( null != instance) return instance; + instance = new HBaseTableSchemaCreator(); + } + return instance; + } + + /** + * Default constructor + * + */ + public HBaseTableSchemaCreator(){ + } + + /** + * Checks and Creates all necessary tables required for HSearch index. + */ + public final boolean init(String tableName) { + + try { + + List colFamilies = new ArrayList(); + + HBaseTableSchemaDefn def = HBaseTableSchemaDefn.getInstance(tableName); + + System.out.println("Compression : " + this.compression.getName()); + System.out.println("Partition Block Size : " + this.partitionBlockSize); + System.out.println("Partition Rep Mode : " + this.partitionRepMode); + System.out.println("Partition Block Size : " + this.partitionBlockSize); + System.out.println("Partition Block Encoding : " + this.dataBlockEncoding.name()); + System.out.println("Bloom Type : " + this.bloomType.name()); + System.out.println("In Memory Table: " + this.inMemory); + System.out.println("Block Caching: " + this.blockCacheEnabled); + + for (String familyName : def.columnPartions.keySet()) { + + //Partitioned + List partitionNames = def.columnPartions.get(familyName).getPartitionNames(); + for (String partition : partitionNames) { + HColumnDescriptor rangeCols = new HColumnDescriptor( (familyName + "_" + partition ).getBytes()); + configColumn(rangeCols); + colFamilies.add(rangeCols); + } + + //No Partition + if ( partitionNames.size() == 0 ) { + HColumnDescriptor rangeCols = new HColumnDescriptor( familyName.getBytes()); + configColumn(rangeCols); + colFamilies.add(rangeCols); + } + } + + HDML.create(tableName, colFamilies); + return true; + + } catch (Exception sf) { + sf.printStackTrace(System.err); + l.fatal(sf); + return false; + } + } + + /** + * Compression method to HBase compression code. + * @param methodName + * @return + */ + public static final String resolveCompression(final String methodName) { + String compClazz = Compression.Algorithm.GZ.getName(); + if ("gz".equals(methodName)) { + compClazz = Compression.Algorithm.GZ.getName(); + } else if ("lzo".equals(methodName)) { + compClazz = Compression.Algorithm.LZO.getName(); + } else if ("none".equals(methodName)) { + compClazz = Compression.Algorithm.NONE.getName(); + } + return compClazz; + } + + public final void configColumn(final HColumnDescriptor col) { + col.setMinVersions(1); + col.setMaxVersions(1); + col.setKeepDeletedCells(false); + col.setCompressionType(compression); + col.setEncodeOnDisk(false); + col.setDataBlockEncoding(dataBlockEncoding); + col.setInMemory(inMemory); + col.setBlockCacheEnabled(blockCacheEnabled); + col.setBlocksize(partitionBlockSize); + col.setTimeToLive(HConstants.FOREVER); + col.setBloomFilterType(bloomType); + col.setScope(partitionRepMode); + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_94/storage/HBaseTableSchemaDefn.java b/src/compatibility/hadooplib_94/storage/HBaseTableSchemaDefn.java new file mode 100644 index 0000000..d06ca0e --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HBaseTableSchemaDefn.java @@ -0,0 +1,65 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.util.HashMap; +import java.util.Map; + +import com.bizosys.hsearch.treetable.client.partition.IPartition; + +public final class HBaseTableSchemaDefn { + + private static Map repositories = new HashMap(); + + public static HBaseTableSchemaDefn getInstance(String tableName) { + if ( repositories.containsKey(tableName)) return repositories.get(tableName); + else { + synchronized (HBaseTableSchemaDefn.class.getName()) { + if ( repositories.containsKey(tableName)) return repositories.get(tableName); + repositories.put(tableName, new HBaseTableSchemaDefn(tableName)); + } + } + return repositories.get(tableName); + } + + private HBaseTableSchemaDefn(String tableName) { + this.tableName = tableName; + } + + private String tableName = "htable"; + + //FamilyName_partition is how the column families are created. + public Map columnPartions = new HashMap(); + + public final static char getColumnName() { + return getColumnName(1); + } + + public final static char getColumnName(int token) { + String tokenStr = new Integer(token).toString(); + return tokenStr.charAt(tokenStr.length() - 1); + } + + public String getTableName() { + return this.tableName; + } + + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_94/storage/HSearchBytesCoProcessorProxy.java b/src/compatibility/hadooplib_94/storage/HSearchBytesCoProcessorProxy.java new file mode 100644 index 0000000..392ffb0 --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchBytesCoProcessorProxy.java @@ -0,0 +1,72 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.util.HSearchLog; + +public final class HSearchBytesCoProcessorProxy { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + HSearchBytesFilter filter = null; + byte[][] families = null; + byte[][] cols = null; + + public HSearchBytesCoProcessorProxy(final List family_cols , final HSearchBytesFilter filter) throws IOException { + this.filter = filter; + + if (null == family_cols) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.families = new byte[family_cols.size()][]; + this.cols = new byte[family_cols.size()][]; + + int seq = -1; + for (ColumnFamName columnFamName : family_cols) { + seq++; + this.families[seq] = columnFamName.family; + this.cols[seq] = columnFamName.name; + } + + } + + public final Map execCoprocessorRows(final HTableWrapper table) throws IOException, Throwable { + + Map output = table.tableInterface.coprocessorExec( + HSearchBytesCoprocessorI.class, null, null, + + + new Batch.Call() { + @Override + public final byte[] call(HSearchBytesCoprocessorI counter) throws IOException { + return counter.getRows(families, cols, filter); + } + } ); + + return output; + } +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchBytesCoprocessor.java b/src/compatibility/hadooplib_94/storage/HSearchBytesCoprocessor.java new file mode 100644 index 0000000..9bf3449 --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchBytesCoprocessor.java @@ -0,0 +1,104 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HSearchBytesCoprocessor extends BaseEndpointCoprocessor implements HSearchBytesCoprocessorI { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + public boolean scannerBlockCaching = true; + public int scannerBlockCachingLimit = 1; + + public HSearchBytesCoprocessor() { + Configuration config = HSearchConfig.getInstance().getConfiguration(); + this.scannerBlockCaching = config.getBoolean("scanner.block.caching", true); + this.scannerBlockCachingLimit = config.getInt("scanner.block.caching.amount", 1); + } + + /** + * Get Matching rows + * @param filter + * @return + * @throws IOException + */ + @Override + public byte[] getRows(final byte[][] families, final byte[][] cols, final HSearchBytesFilter filter) throws IOException { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + InternalScanner scanner = null; + + try { + Scan scan = new Scan(); + scan.setCacheBlocks(scannerBlockCaching); + scan.setCaching(scannerBlockCachingLimit); + scan.setMaxVersions(1); + int familiesT = families.length; + + for (int i=0; i curVals = new ArrayList(); + boolean done = false; + do { + done = scanner.next(curVals); + } while (done); + + byte[] data = filter.processRows(); + return data; + + } finally { + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchBytesCoprocessorI.java b/src/compatibility/hadooplib_94/storage/HSearchBytesCoprocessorI.java new file mode 100644 index 0000000..8d4ae9e --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchBytesCoprocessorI.java @@ -0,0 +1,11 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; + +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; + +public interface HSearchBytesCoprocessorI extends CoprocessorProtocol { + byte[] getRows(final byte[][] families, final byte[][] cols, + final HSearchBytesFilter filter) throws IOException; + +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchBytesFilter.java b/src/compatibility/hadooplib_94/storage/HSearchBytesFilter.java new file mode 100644 index 0000000..382330d --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchBytesFilter.java @@ -0,0 +1,146 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; + +import com.bizosys.hsearch.util.HSearchLog; + +/** + * @author abinash + * + */ +public abstract class HSearchBytesFilter implements Filter { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + protected byte[] state = null; + + public HSearchBytesFilter(){ + } + + public HSearchBytesFilter(final byte[] state){ + this.state = state; + } + + @Override + public final void write(final DataOutput out) throws IOException { + out.writeInt(state.length); + out.write(state); + } + + @Override + public final void readFields(final DataInput in) throws IOException { + try { + int length = in.readInt(); + if ( 0 == length) throw new IOException("Invalid Input"); + + state = new byte[length]; + in.readFully(state, 0, length); + + } catch (Exception ex) { + HSearchLog.l.fatal("Error at deserialization of filter:" + ex.getMessage() , ex); + throw new IOException(ex); + } + } + + @Override + public final void filterRow(final List kvL) { + if ( null == kvL) return; + int kvT = kvL.size(); + if ( 0 == kvT) return; + + try { + for (KeyValue kv : kvL) { + if ( null == kv) continue; + + byte[] inputData = kv.getValue(); + if ( null == inputData) continue; + + processColumn(kv); + } + + processRow(kvL); + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + ex.printStackTrace(System.err); + } + } + + public abstract void processColumn(KeyValue cell) throws IOException; + public abstract void processRow(List row) throws IOException; + public abstract byte[] processRows() throws IOException; + + + @Override + public void reset() { + } + + @Override + public boolean hasFilterRow() { + return true; + } + + @Override + public KeyValue getNextKeyHint(final KeyValue arg0) { + return null; + } + + @Override + public boolean filterRowKey(final byte[] rowKey, final int offset, final int length) { + return false; + } + + @Override + public boolean filterAllRemaining() { + return false; + } + + @Override + public boolean filterRow() { + return false; + } + + @Override + public ReturnCode filterKeyValue(final KeyValue arg0) { + return ReturnCode.INCLUDE; + } + + /** + * Version 0.94 FIX + */ + @Override + public KeyValue transform(final KeyValue arg0) { + return arg0; + } + + public FilterList getFilters() { + return null; + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_94/storage/HSearchGenericCoProcessorFactory.java b/src/compatibility/hadooplib_94/storage/HSearchGenericCoProcessorFactory.java new file mode 100644 index 0000000..978b896 --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchGenericCoProcessorFactory.java @@ -0,0 +1,110 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.treetable.cache.CacheService; +import com.bizosys.hsearch.util.HSearchLog; + +public final class HSearchGenericCoProcessorFactory { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + boolean cacheEnabled = false; + + HSearchGenericFilter filter = null; + byte[][] families = null; + byte[][] cols = null; + + public HSearchGenericCoProcessorFactory(final List family_cols , final HSearchGenericFilter filter) throws IOException { + this.filter = filter; + this.cacheEnabled = CacheService.getInstance().isCacheEnable(); + if ( INFO_ENABLED) { + HSearchLog.l.info("Cache Storage Enablement :" + cacheEnabled ); + } + + if (null == family_cols) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.families = new byte[family_cols.size()][]; + this.cols = new byte[family_cols.size()][]; + + int seq = -1; + for (ColumnFamName columnFamName : family_cols) { + seq++; + this.families[seq] = columnFamName.family; + this.cols[seq] = columnFamName.name; + } + + } + + public final Collection execCoprocessorRows(final HTableWrapper table) throws IOException, Throwable { + + String singleQuery = null; + + /** + * Check for already cached result + */ + if ( null != filter) { + if ( filter.clientSideAPI_IsSingleQuery() ) { + singleQuery = filter.clientSideAPI_getSingleQueryWithScope(); + if ( cacheEnabled ) { + byte[] singleQueryResultB = CacheService.getInstance().get(singleQuery); + if( null != singleQueryResultB) { + return SortedBytesArray.getInstance().parse(singleQueryResultB).values(); + } + } + } + } + + Map output = table.tableInterface.coprocessorExec( + HSearchGenericCoprocessor.class, null, null, + + + new Batch.Call() { + @Override + public final byte[] call(HSearchGenericCoprocessor counter) throws IOException { + return counter.getRows(families, cols, filter); + } + } ); + + Collection result = output.values(); + + try { + if ( null != singleQuery) { + if ( cacheEnabled ) { + byte[] dataPack = SortedBytesArray.getInstance().toBytes(result); + CacheService.getInstance().put(singleQuery, dataPack); + } + } + } catch (Exception ex) { + HSearchLog.l.warn("Cache Service Failure.", ex); + } + + return result; + } +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchGenericCoprocessor.java b/src/compatibility/hadooplib_94/storage/HSearchGenericCoprocessor.java new file mode 100644 index 0000000..f820aa3 --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchGenericCoprocessor.java @@ -0,0 +1,29 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; + +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; + +public interface HSearchGenericCoprocessor extends CoprocessorProtocol { + byte[] getRows(byte[][] families, byte[][] cols, HSearchGenericFilter filter) throws IOException; +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchGenericCoprocessorImpl.java b/src/compatibility/hadooplib_94/storage/HSearchGenericCoprocessorImpl.java new file mode 100644 index 0000000..15fc806 --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchGenericCoprocessorImpl.java @@ -0,0 +1,150 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.functions.HSearchReducer; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HSearchGenericCoprocessorImpl extends BaseEndpointCoprocessor + implements HSearchGenericCoprocessor { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + private Configuration config = HSearchConfig.getInstance().getConfiguration(); + + private boolean internalScannerBlockCaching = true; + private int internalScannerBlockCachingAmount = 1; + + public HSearchGenericCoprocessorImpl() { + this.internalScannerBlockCaching = config.getBoolean("internal.scanner.block.caching", true); + this.internalScannerBlockCachingAmount = config.getInt("internal.scanner.block.caching.amount", 1); + } + + /** + * Get Matching rows + * @param filter + * @return + * @throws IOException + */ + public byte[] getRows(final byte[][] families, final byte[][] cols, final HSearchGenericFilter filter) throws IOException { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + InternalScanner scanner = null; + long monitorStartTime = 0L; + long overallStartTime = System.currentTimeMillis(); + + try { + Scan scan = new Scan(); + scan.setCacheBlocks(internalScannerBlockCaching); + scan.setCaching(internalScannerBlockCachingAmount); + scan.setMaxVersions(1); + int familiesT = families.length; + + for (int i=0; i curVals = new ArrayList(); + boolean done = false; + + Collection finalOutput = new ArrayList(); + Collection partOutput = new ArrayList(); + + HSearchReducer reducer = filter.getReducer(); + filter.configure(); + do { + curVals.clear(); + partOutput.clear(); + + done = scanner.next(curVals); + for (KeyValue kv : curVals) { + byte[] input = kv.getValue(); + if ( null == input) continue; + + if ( null != reducer) { + filter.deserialize(input, partOutput); + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + reducer.appendRows(kv.getRow(), finalOutput, partOutput); + + if ( INFO_ENABLED ) { + filter.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + } + } + + } while (done); + + if ( INFO_ENABLED ) HSearchLog.l.info( + "**** Time spent on Overall : Scanner : Plugin Code = " + + ( System.currentTimeMillis() - overallStartTime) + ":" + + filter.overallExecutionTime + ":" + + filter.pluginExecutionTime + " in ms."); + + byte[] data = SortedBytesArray.getInstance().toBytes(finalOutput); + + return data; + + } finally { + if ( null != filter) filter.close(); + + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchGenericFilter.java b/src/compatibility/hadooplib_94/storage/HSearchGenericFilter.java new file mode 100644 index 0000000..b0f8eef --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchGenericFilter.java @@ -0,0 +1,618 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.StringTokenizer; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.byteutils.SortedBytesBase.Reference; +import com.bizosys.hsearch.federate.BitSetOrSet; +import com.bizosys.hsearch.federate.QueryPart; +import com.bizosys.hsearch.functions.HSearchReducer; +import com.bizosys.hsearch.functions.StatementWithOutput; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchTableMultiQueryExecutor; +import com.bizosys.hsearch.treetable.client.HSearchTableParts; +import com.bizosys.hsearch.treetable.client.IHSearchPlugin; +import com.bizosys.hsearch.util.HSearchLog; + +/** + * @author abinash + * + */ +public abstract class HSearchGenericFilter implements Filter { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + String name = null; + public String getName() { + if ( null == name) { + name = this.getClass().getName(); + } + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + + /** + * Client side variables + */ + Map queryFilters = null; + + /** + * Input Variables + */ + String multiQuery = null; + IHSearchPlugin plugin = null; + Map queryPayload = new HashMap(3); + Map colIdWithType = new HashMap(3); + + public long pluginExecutionTime = 0L; + public long overallExecutionTime = 0L; + + + HSearchProcessingInstruction inputMapperInstructions = new HSearchProcessingInstruction(); + byte[] inputRowsToIncludeB = null; + List inputRowsList = null; + SortedBytesArray rowsToInclude = null; + + Map queryIdWithParts = new HashMap(); + Map colNamesWithPartitionBytes = new HashMap(); + List columnsOfOneRowAfterJoin = new ArrayList(); + List> stmtOutputContainers = new LinkedList>(); + SortedBytesArray rowBytesPacker = SortedBytesArray.getInstanceArr(); + + HSearchTableMultiQueryExecutor intersector = null; + + public HSearchGenericFilter(){ + } + + public HSearchGenericFilter(final HSearchProcessingInstruction outputType, + final String query, final Map details) { + this(outputType, query, details, null); + } + + public HSearchGenericFilter(final HSearchProcessingInstruction outputType, + final String query, final Map details, List scopedToRows) { + + this.multiQuery = query; + this.queryFilters = details; + this.inputMapperInstructions = outputType; + this.inputRowsList = scopedToRows; + } + + public void setScopedToRows(List scopedToRows) { + this.inputRowsList = scopedToRows; + } + + public boolean clientSideAPI_IsSingleQuery() throws IOException { + if ( null == this.queryFilters) throw new IOException("Genric Filter is not initalized"); + if ( 1 == this.queryFilters.size()) return true; + return false; + } + + public String clientSideAPI_getSingleQueryWithScope() throws IOException { + if ( null == this.queryFilters) throw new IOException("Genric Filter is not initalized"); + if ( 1 != this.queryFilters.size()) throw new IOException("Genric Filter has multi queries"); + return getName() + "/" + this.queryFilters.values().iterator().next(); + } + + + /** + * output type + * structured:A OR unstructured:B + * structured:A=f|1|1|1|c|*|* + * unstructured:B=*|*|*|*|*|* + */ + @Override + public final void write(final DataOutput out) throws IOException { + + if ( null != inputRowsList) { + if ( inputRowsList.size() > 0 ) { + inputRowsToIncludeB = SortedBytesArray.getInstanceArr().toBytes(inputRowsList); + } + } + + StringBuilder querySection = new StringBuilder(); + querySection.append(inputMapperInstructions.toString()).append('\n'); + querySection.append(this.multiQuery); + if ( null != queryFilters) { + for (String queryP : queryFilters.keySet()) { + String input = queryFilters.get(queryP); + querySection.append('\n').append(queryP).append('=').append(input.toString()); + } + } + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Sending to HBase : " + querySection.toString() + ", Rows to include:" + inputRowsToIncludeB); + } + SortedBytesArray sendToRSData = SortedBytesArray.getInstanceArr(); + byte[] ser = ( null == inputRowsToIncludeB) ? + sendToRSData.toBytes(querySection.toString().getBytes()) + : + sendToRSData.toBytes( querySection.toString().getBytes(), inputRowsToIncludeB); + + out.writeInt(ser.length); + out.write(ser); + } + + /** + * output type + * structured:A OR unstructured:B + * structured:A=f|1|1|1|c|*|* + * unstructured:B=*|*|*|*|*|* + * TODO:// Replace with Fast Split. + */ + @Override + public final void readFields(final DataInput in) throws IOException { + try { + int length = in.readInt(); + if ( 0 == length) throw new IOException("Invalid Query"); + + byte[] deser = new byte[length]; + in.readFully(deser, 0, length); + + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Total bytes Received @ Generic Filter:" + length); + } + + SortedBytesArray receiveRSData = SortedBytesArray.getInstanceArr(); + receiveRSData.parse(deser); + + int packedDataSectionsT = receiveRSData.getSize(); + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Reading bytes sections of total :" + packedDataSectionsT); + } + if ( packedDataSectionsT == 0 ) { + throw new IOException("Unknown number of fields :" + packedDataSectionsT); + } + + //Filter Row Section + if ( packedDataSectionsT == 2) { + Reference ref = new Reference(); + receiveRSData.getValueAtReference(1, ref); + rowsToInclude = SortedBytesArray.getInstanceArr(); + rowsToInclude.parse(deser, ref.offset, ref.length); + } + + //Query Section + Reference ref = new Reference(); + receiveRSData.getValueAtReference(0, ref); + StringTokenizer stk = new StringTokenizer(new String(deser, ref.offset, ref.length), "\n"); + + int lineNo = -1; + while ( stk.hasMoreTokens() ) { + + lineNo++; + + switch ( lineNo ) { + case 0: + String output = stk.nextToken(); + if (output.length() == 0 ) throw new IOException("Unknown result output type."); + this.inputMapperInstructions = new HSearchProcessingInstruction(output); + break; + + case 1: + this.multiQuery = stk.nextToken(); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Multi Query" + this.multiQuery); + } + break; + + default: + String line = stk.nextToken(); + int splitIndex = line.indexOf('='); + if ( -1 == splitIndex) throw new IOException("Expecting [=] in line " + line); + + String colNameQuolonId = line.substring(0,splitIndex); + String filtersPipeSeparated = line.substring(splitIndex+1); + + int colNameAndQIdSplitIndex = colNameQuolonId.indexOf(':'); + if ( -1 == colNameAndQIdSplitIndex || colNameQuolonId.length() - 1 == colNameAndQIdSplitIndex) { + throw new IOException("Sub queries expected as X:Y eg.\n" + + "family1:A OR family2:B\nfamily1:A=f|1|1|1|c|*|*\nfamily2:B=*|*|*|*|*|*"); + } + String colName = colNameQuolonId.substring(0,colNameAndQIdSplitIndex); + String qId = colNameQuolonId.substring(colNameAndQIdSplitIndex+1); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("colName:qId = " + colName + "/" + qId); + } + + colIdWithType.put(qId, colName); + + this.plugin = createPlugIn(colName) ; + plugin.setOutputType(this.inputMapperInstructions); + + this.queryPayload.put( + colNameQuolonId, new QueryPart(filtersPipeSeparated, + HSearchTableMultiQueryExecutor.PLUGIN, plugin) ); + + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Query Payload " + line); + } + break; + } + } + for (int i=0; i() ); + } + + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + throw new IOException(ex); + } + } + + /** + * TODO: + * If we have a query as FieldA OR FieldB + * FieldA, tableparts should only contain byte[] of family FieldA_* + * and FieldB byte[] of family FieldB_* + */ + @Override + public final void filterRow(final List kvL) { + if ( null == kvL) return; + int kvT = kvL.size(); + if ( 0 == kvT) return; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Processing @ Region Server : filterRow" ); + } + + try { + + byte[] row = null; + byte[] firstFamily = null; + byte[] firstCol = null; + + //colParts.put("structured:A", bytes); + colNamesWithPartitionBytes.clear(); + + //HBase Family Name = schema column name + "_" + partition + String columnNameWithParition = null; + String colName = null; + + for (KeyValue kv : kvL) { + if ( null == kv) continue; + + byte[] inputData = kv.getValue(); + if ( null == inputData) continue; + columnNameWithParition = new String(kv.getFamily()); + + int partitionIndex = columnNameWithParition.indexOf('_'); + colName = ( partitionIndex == -1 ) ? columnNameWithParition : + columnNameWithParition.substring(0, partitionIndex); + + HSearchTableParts tableParts = null; + if ( colNamesWithPartitionBytes.containsKey(colName)) { + tableParts = colNamesWithPartitionBytes.get(colName); + } else { + tableParts = new HSearchTableParts(); + colNamesWithPartitionBytes.put(colName, tableParts); + } + tableParts.put(inputData); + + if ( null == row ) { + firstFamily = kv.getFamily(); + firstCol = kv.getQualifier(); + row = kv.getRow(); + } + } + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("queryData HSearchTableParts creation. "); + } + + queryIdWithParts.clear(); + + for (String queryId : colIdWithType.keySet()) { //A + String queryType = colIdWithType.get(queryId); //structured + HSearchTableParts parts = colNamesWithPartitionBytes.get(queryType); + + String queryTypeWithId = queryType + ":" + queryId; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug(queryTypeWithId); + HSearchLog.l.debug("Query Parts for " + queryTypeWithId); + } + + queryIdWithParts.put(queryTypeWithId, parts); + } + colNamesWithPartitionBytes.clear(); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchGenericFilter: Filteration Starts"); + + long monitorStartTime = 0L; + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + if ( null == intersector ) intersector = createExecutor(); + this.plugin.setMergeId(row); + BitSetOrSet intersectedIds = federatedQueryExec(row, intersector, queryIdWithParts); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + kvL.clear(); //Clear all data + byte[] value = getOneRowBytes(intersectedIds, this.queryPayload); + kvL.add(new KeyValue(row, firstFamily, firstCol, value) ); + + + } catch (Exception ex) { + ex.printStackTrace(System.err); + HSearchLog.l.fatal(ex); + } + } + + private final BitSetOrSet federatedQueryExec(final byte[] row, + final HSearchTableMultiQueryExecutor intersector, + final Map queryData) throws Exception, IOException { + + BitSetOrSet intersectedIds = intersector.execute( + queryData, this.multiQuery, this.queryPayload, inputMapperInstructions); + + if ( DEBUG_ENABLED ) { + boolean hasMatchingIds = false; + hasMatchingIds = ( null != intersectedIds && intersectedIds.size() > 0 ); + HSearchLog.l.debug("Generaic filter hasMatchingIds :" + hasMatchingIds + " objectid=" + intersectedIds.hashCode()); + if ( hasMatchingIds ) HSearchLog.l.debug( new String(row) + " has ids of :" + intersectedIds.size()); + } + + return intersectedIds; + } + + + @Override + public final void reset() { + } + + @Override + public final boolean hasFilterRow() { + return true; + } + + @Override + public final KeyValue getNextKeyHint(final KeyValue arg0) { + return null; + } + + @Override + public final boolean filterRowKey(final byte[] rowKey, final int offset, final int length) { + + if (DEBUG_ENABLED) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + HSearchLog.l.debug("Analyzing row for processing: " + new String(rowKey + " , From a matching set of " + scopeToTheseRowsT)); + } + + if ( null == rowsToInclude) return false; + + byte[] exactRowBytes = new byte[length]; + try { + System.arraycopy(rowKey, offset, exactRowBytes, 0, length); + if ( rowsToInclude.getEqualToIndex(exactRowBytes) == -1) return true; + return false; + + } catch (IOException ex) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + String rowKeyStr = ( null == rowKey) ? "Null row key" : new String(rowKey); + String errMsg = "Error while finding fileration criteria for the row , " + rowKeyStr + + "\n" + ex.getMessage() + "\n" + + "With search scope inside id count : " + scopeToTheseRowsT; + System.err.println(errMsg); + HSearchLog.l.fatal(errMsg, ex); + + return false; + } + } + + @Override + public final boolean filterAllRemaining() { + return false; + } + + @Override + public final boolean filterRow() { + return false; + } + + @Override + public final ReturnCode filterKeyValue(final KeyValue arg0) { + return ReturnCode.INCLUDE; + } + + /** + * Version 0.94 FIX + */ + @Override + public final KeyValue transform(final KeyValue arg0) { + return arg0; + } + + /** + ******************************************************************************************* + * COMPUTATIONS + * Step 1 - HSearch Table merge + ******************************************************************************************* + */ + + /** + * *|*|architect|age + * AND + * *|*|developer|age + * + * @param matchedIds + * @param queryPayload + * @param inputMapperInstructions + * @return + * @throws IOException + */ + public final byte[] getOneRowBytes( final BitSetOrSet matchedIds, final Map queryPayload) throws IOException { + + if ( DEBUG_ENABLED ) { + int matchedIdsT = ( null == matchedIds) ? 0 : matchedIds.size(); + HSearchLog.l.debug("HSearchGenericFilter:serialize : with matchedIds " + matchedIdsT + ", Object:" + matchedIds.hashCode()); + if ( null != matchedIds.getDocumentIds()) { + HSearchLog.l.debug("HSearchGenericFilter: DocumentIds size " + matchedIds.getDocumentIds().size() + " and matchedId size " + matchedIds.size()); + } else if ( null != matchedIds.getDocumentSequences()) { + HSearchLog.l.debug("HSearchGenericFilter: DocumentSequences cardinality " + matchedIds.getDocumentSequences().cardinality()); + } + } + + /** + * - Iterate through all the parts and find the values. + * - Collect the data for multiple queries + */ + HSearchReducer reducer = getReducer(); + int totalQueries = queryPayload.size(); + + columnsOfOneRowAfterJoin.clear(); + long monitorStartTime = 0L; + + if ( totalQueries == 1) { + + Object pluginO = queryPayload.values().iterator().next().getParams().get( + HSearchTableMultiQueryExecutor.PLUGIN); + IHSearchPlugin plugin = (IHSearchPlugin) pluginO; + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + plugin.getResultSingleQuery(columnsOfOneRowAfterJoin); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + + } else { + StatementWithOutput[] stmtWithOutputs = new StatementWithOutput[totalQueries]; + int seq = 0; + + for (QueryPart part : queryPayload.values()) { + + Object pluginO = part.getParams().get(HSearchTableMultiQueryExecutor.PLUGIN); + IHSearchPlugin plugin = (IHSearchPlugin) pluginO; + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + Collection queryOutput = this.stmtOutputContainers.get(seq); + queryOutput.clear(); //Clear to reuse + plugin.getResultMultiQuery(matchedIds, queryOutput); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + stmtWithOutputs[seq] = new StatementWithOutput(part.aStmtOrValue, queryOutput); + seq++; + } + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + reducer.appendQueries(columnsOfOneRowAfterJoin, stmtWithOutputs); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + for (StatementWithOutput stmtWithOutput : stmtWithOutputs) { + if ( null != stmtWithOutput.cells ) stmtWithOutput.cells.clear(); + } + } + + //Put it to Bytes + byte[] processedRowBytes = rowBytesPacker.toBytes(columnsOfOneRowAfterJoin); + columnsOfOneRowAfterJoin.clear(); + + return processedRowBytes; + } + + public final void deserialize(final byte[] input, final Collection output) throws IOException { + SortedBytesArray.getInstance().parse(input).values(output); + } + + public abstract HSearchTableMultiQueryExecutor createExecutor(); + public abstract IHSearchPlugin createPlugIn(String type) throws IOException ; + public abstract HSearchReducer getReducer(); + + /** + * Override this method if you want to set more filters in processing. + * + FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RowFilter filter1 = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("row-22")) ); + list.addFilter(filter1); + list.addFilter(this); + return list; + + * @return + */ + public final FilterList getFilters() { + return null; + } + + /** + * Any information to be configured before starting the filtration process. + */ + public final void configure() { + } + + /** + * At the end release the resources. + */ + public final void close() { + if ( null != queryFilters) queryFilters.clear(); + if ( null != queryPayload) queryPayload.clear(); + if ( null != colIdWithType) colIdWithType.clear(); + if ( null != queryIdWithParts) queryIdWithParts.clear(); + if ( null != colNamesWithPartitionBytes) colNamesWithPartitionBytes.clear(); + if ( null != columnsOfOneRowAfterJoin) columnsOfOneRowAfterJoin.clear(); + if ( null != stmtOutputContainers) stmtOutputContainers.clear(); + } + + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoProcessorProxy.java b/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoProcessorProxy.java new file mode 100644 index 0000000..181a90a --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoProcessorProxy.java @@ -0,0 +1,77 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.treetable.BytesSection; +import com.bizosys.hsearch.treetable.Cell2; +import com.bizosys.hsearch.treetable.CellKeyValue; +import com.bizosys.hsearch.util.HSearchLog; + +public final class HSearchMultiGetCoProcessorProxy { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + HSearchScalarFilter filter = null; + byte[][] families = null; + byte[][] cols = null; + byte[][] rows = null; + + public HSearchMultiGetCoProcessorProxy(final ColumnFamName columnFamName , + final HSearchScalarFilter filter, byte[][] rows) throws IOException { + + this.filter = filter; + if (null == columnFamName) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.families = new byte[][]{columnFamName.family}; + this.cols = new byte[][]{columnFamName.name}; + this.rows = rows; + } + + public final void execCoprocessorRows( Map kvs, + final HTableWrapper table, final byte[] row) throws IOException, Throwable { + + Map output = table.tableInterface.coprocessorExec( + HSearchMultiGetCoprocessorI.class, row, row, + + new Batch.Call() { + @Override + public final byte[] call(HSearchMultiGetCoprocessorI counter) throws IOException { + return counter.getRows(families, cols, filter, rows); + } + } ); + + + for (byte[] bs : output.keySet()) { + Cell2 cell2 = new Cell2(byte[].class, byte[].class); + cell2.data = new BytesSection(output.get(bs) ); + cell2.parseElements(); + for (CellKeyValue kv: cell2.sortedList) { + kvs.put(new String(kv.getKey()), kv.getValue()); + } + } + } +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoprocessor.java b/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoprocessor.java new file mode 100644 index 0000000..fd2b078 --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoprocessor.java @@ -0,0 +1,129 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.treetable.Cell2; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HSearchMultiGetCoprocessor extends BaseEndpointCoprocessor implements HSearchMultiGetCoprocessorI { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + public boolean scannerBlockCaching = true; + public int scannerBlockCachingLimit = 1; + + public HSearchMultiGetCoprocessor() { + Configuration config = HSearchConfig.getInstance().getConfiguration(); + this.scannerBlockCaching = config.getBoolean("scanner.block.caching", true); + this.scannerBlockCachingLimit = config.getInt("scanner.block.caching.amount", 1); + } + + /** + * Get Matching rows + * @param filter + * @return + * @throws IOException + */ + @Override + public byte[] getRows(final byte[][] families, final byte[][] cols, final Filter filter, final byte[][] rows) throws IOException { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + InternalScanner scanner = null; + + try { + Scan scan = new Scan(); + scan.setCacheBlocks(scannerBlockCaching); + scan.setCaching(scannerBlockCachingLimit); + scan.setMaxVersions(1); + int familiesT = families.length; + + for (int i=0; i finalVals = new ArrayList(); + List curVals = new ArrayList(); + + if ( null != filter) { + scan = scan.setFilter(filter); + } + + boolean done = false; + for (byte[] row : rows) { + done = false; + scan.setStartRow(row); + scan.setStopRow(row); + + scanner = environment.getRegion().getScanner(scan); + do { + curVals.clear(); + done = scanner.next(curVals); + if ( curVals.size() == 0 ) continue; + finalVals.addAll(curVals); + + KeyValue keyValue = curVals.get(0); + System.out.println( new String ( keyValue.getRow() )); + } while (done); + scanner.close(); + scanner = null; + } + + Cell2 container = new Cell2(byte[].class, byte[].class); + for (KeyValue keyValue : finalVals) { + byte[] key = keyValue.getRow(); + byte[] val = keyValue.getValue(); + + if ( null == key || null == val) continue; + if ( key.length == 0 || val.length == 0 ) continue; + container.add(key, val); + } + + byte[] data = container.toBytesOnSortedData(); + + + return data; + + } finally { + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoprocessorI.java b/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoprocessorI.java new file mode 100644 index 0000000..053ea8c --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchMultiGetCoprocessorI.java @@ -0,0 +1,12 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; + +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; + + +public interface HSearchMultiGetCoprocessorI extends CoprocessorProtocol { + byte[] getRows(final byte[][] families, final byte[][] cols, + final Filter filter, final byte[][] rows) throws IOException; +} diff --git a/src/compatibility/hadooplib_94/storage/HSearchScalarFilter.java b/src/compatibility/hadooplib_94/storage/HSearchScalarFilter.java new file mode 100644 index 0000000..ebe3f74 --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchScalarFilter.java @@ -0,0 +1,377 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.StringTokenizer; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.byteutils.SortedBytesBase.Reference; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchQuery; +import com.bizosys.hsearch.treetable.client.IHSearchPlugin; +import com.bizosys.hsearch.treetable.client.IHSearchTable; +import com.bizosys.hsearch.util.HSearchLog; + +/** + * @author abinash + * + */ +public abstract class HSearchScalarFilter implements Filter { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + String name = null; + public String getName() { + if ( null == name) { + name = this.getClass().getName(); + } + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + + /** + * Input Variables + */ + String multiQuery = null; + + public long pluginExecutionTime = 0L; + public long overallExecutionTime = 0L; + + + protected HSearchProcessingInstruction inputMapperInstructions = new HSearchProcessingInstruction(); + byte[] inputRowsToIncludeB = null; + List inputRowsList = null; + SortedBytesArray rowsToInclude = null; + byte[] matchingIds = null; + + public HSearchScalarFilter(){ + } + + public HSearchScalarFilter(final HSearchProcessingInstruction outputType,final String query) { + this.multiQuery = query; + this.inputMapperInstructions = outputType; + } + + public void setMatchingRows(List inputRowsList) { + this.inputRowsList = inputRowsList; + } + + public void setMatchingIds(byte[] matchingIds) { + this.matchingIds = matchingIds; + } + + @Override + public final void write(final DataOutput out) throws IOException { + + if ( null != inputRowsList) { + if ( inputRowsList.size() > 0 ) { + inputRowsToIncludeB = SortedBytesArray.getInstanceArr().toBytes(inputRowsList); + } + } + + SortedBytesArray sendToRSData = SortedBytesArray.getInstanceArr(); + String querySection = this.inputMapperInstructions.toString() + "\n" + this.multiQuery; + + List values = new ArrayList(3); + values.add(querySection.getBytes()); + + if(null != matchingIds) + values.add(matchingIds); + else + values.add(new byte[0]); + + if(null != inputRowsToIncludeB) + values.add(inputRowsToIncludeB); + + byte[] ser = sendToRSData.toBytes( values ); + + out.writeInt(ser.length); + out.write(ser); + } + + HSearchQuery query = null; + IHSearchTable table = null; + IHSearchPlugin plugin = null; + boolean skipFiltering = true; + Collection dataCarrier = new ArrayList(); + + @Override + public final void readFields(final DataInput in) throws IOException { + try { + int length = in.readInt(); + if ( 0 == length) throw new IOException("Invalid Query"); + + byte[] deser = new byte[length]; + in.readFully(deser, 0, length); + + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Total bytes Received @ Generic Filter:" + length); + } + + SortedBytesArray receiveRSData = SortedBytesArray.getInstanceArr(); + receiveRSData.parse(deser); + + int packedDataSectionsT = receiveRSData.getSize(); + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Reading bytes sections of total :" + packedDataSectionsT); + } + if ( packedDataSectionsT == 0 ) { + throw new IOException("Unknown number of fields :" + packedDataSectionsT); + } + + Reference ref = new Reference(); + //Filter Row Section + if ( packedDataSectionsT == 3) { + receiveRSData.getValueAtReference(2, ref); + rowsToInclude = SortedBytesArray.getInstanceArr(); + rowsToInclude.parse(deser, ref.offset, ref.length); + } + + //matching ids + receiveRSData.getValueAtReference(1, ref); + this.matchingIds = new byte[ref.length]; + System.arraycopy(deser, ref.offset, this.matchingIds, 0, ref.length); + + //Query Section + receiveRSData.getValueAtReference(0, ref); + StringTokenizer stk = new StringTokenizer(new String(deser, ref.offset, ref.length), "\n"); + + int lineNo = -1; + while ( stk.hasMoreTokens() ) { + + lineNo++; + + switch ( lineNo ) { + case 0: + String output = stk.nextToken(); + if (output.length() == 0 ) throw new IOException("Unknown result output type."); + this.inputMapperInstructions = new HSearchProcessingInstruction(output); + break; + + case 1: + this.multiQuery = stk.nextToken(); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Multi Query" + this.multiQuery); + } + break; + } + } + + if ( null != this.multiQuery ) { + if ( 0 != this.multiQuery.trim().length() ) + query = new HSearchQuery(this.multiQuery); + + this.table = createTable(); + if ( null != table) { + this.plugin =createPlugIn(); + if ( null != this.plugin) { + this.plugin.setOutputType(this.inputMapperInstructions); + if(0 != this.matchingIds.length) + this.plugin.setMergeId(this.matchingIds); + skipFiltering = false; + } + } + } + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + ex.printStackTrace(); + throw new IOException(ex); + } + } + + @Override + public final void filterRow(final List kvL) { + if ( skipFiltering ) return; + + if ( null == kvL) return; + int kvT = kvL.size(); + if ( 0 == kvT) return; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Processing @ Region Server : filterRow" ); + } + + try { + + List kvLFiltered = new ArrayList(); + + for (KeyValue kv : kvL) { + if ( null == kv) continue; + + byte[] inputData = kv.getValue(); + if ( null == inputData) continue; + + switch ( this.inputMapperInstructions.getCallbackType()) { + case HSearchProcessingInstruction.PLUGIN_CALLBACK_COLS: + table.get(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_ID: + table.keySet(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_VAL: + table.values(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_IDVAL: + table.keyValues(inputData, this.query, plugin); + break; + default: + throw new IOException("Unknown output type:" + this.inputMapperInstructions.getCallbackType()); + } + + plugin.getResultSingleQuery(dataCarrier); + + kvLFiltered.add(new KeyValue(kv.getKey(), kv.getFamily(), kv.getQualifier(), + SortedBytesArray.getInstance().toBytes(dataCarrier)) ); + dataCarrier.clear(); + } + kvL.clear(); + kvL.addAll(kvLFiltered); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("queryData HSearchTableParts creation. "); + } + + + + } catch (Exception ex) { + ex.printStackTrace(System.err); + HSearchLog.l.fatal(ex); + } + } + + @Override + public final void reset() { + } + + @Override + public final boolean hasFilterRow() { + return true; + } + + @Override + public final KeyValue getNextKeyHint(final KeyValue arg0) { + return null; + } + + @Override + public final boolean filterRowKey(final byte[] rowKey, final int offset, final int length) { + + if ( null == rowsToInclude) return false; + byte[] exactRowBytes = new byte[length]; + try { + System.arraycopy(rowKey, offset, exactRowBytes, 0, length); + if ( rowsToInclude.getEqualToIndex(exactRowBytes) >= 0 ) { + //System.out.println("Allow row:" + new String(exactRowBytes)); + return false; + } else { + //System.out.println("Disallow row:" + new String(exactRowBytes)); + return true; + } + + } catch (IOException ex) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + String rowKeyStr = ( null == rowKey) ? "Null row key" : new String(rowKey); + String errMsg = "Error while finding fileration criteria for the row , " + rowKeyStr + + "\n" + ex.getMessage() + "\n" + + "With search scope inside id count : " + scopeToTheseRowsT; + System.err.println(errMsg); + HSearchLog.l.fatal(errMsg, ex); + + return false; + } + } + + @Override + public final boolean filterAllRemaining() { + return false; + } + + @Override + public final boolean filterRow() { + return false; + } + + @Override + public final ReturnCode filterKeyValue(final KeyValue arg0) { + return ReturnCode.INCLUDE; + } + + /** + * Version 0.94 FIX + */ + @Override + public final KeyValue transform(final KeyValue arg0) { + return arg0; + } + + + public final void deserialize(final byte[] input, final Collection output) throws IOException { + SortedBytesArray.getInstance().parse(input).values(output); + } + + public abstract IHSearchPlugin createPlugIn() throws IOException ; + public abstract IHSearchTable createTable(); + + + /** + * Override this method if you want to set more filters in processing. + * + FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RowFilter filter1 = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("row-22")) ); + list.addFilter(filter1); + list.addFilter(this); + return list; + + * @return + */ + public final FilterList getFilters() { + return null; + } + + /** + * Any information to be configured before starting the filtration process. + */ + public final void configure() { + } + + /** + * At the end release the resources. + */ + public final void close() { + } +} + \ No newline at end of file diff --git a/src/compatibility/hadooplib_94/storage/HSearchTableReader.java b/src/compatibility/hadooplib_94/storage/HSearchTableReader.java new file mode 100644 index 0000000..777b6bd --- /dev/null +++ b/src/compatibility/hadooplib_94/storage/HSearchTableReader.java @@ -0,0 +1,125 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HBaseFacade; +import com.bizosys.hsearch.hbase.HReader; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.hbase.IScanCallBack; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchQuery; +import com.bizosys.hsearch.util.HSearchLog; + +public abstract class HSearchTableReader implements IScanCallBack { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + //public static ParallelHReader parallelReader = new ParallelHReader(10); + + public abstract HSearchGenericFilter getFilter(String multiQuery, Map multiQueryParts, HSearchProcessingInstruction outputType); + + public abstract void rows(Collection results, HSearchProcessingInstruction rowType); + + + @SuppressWarnings("unchecked") + public void setPartionsFamilies(String tableName, String colName, String range, Set uniqueFamilies) + throws ParseException, IOException { + + HSearchQuery query = new HSearchQuery(range); + HBaseTableSchemaDefn.getInstance(tableName).columnPartions.get(colName). + getMatchingFamilies(query, uniqueFamilies); + } + + public IScanCallBack getResultCollector() { + return this; + } + + @Override + public void process(byte[] pk, ColumnFamName fn, byte[] storedBytes) throws IOException { + + int length = ( null == storedBytes ) ? 0 : storedBytes.length; + if ( length == 0 ) return; + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("Found Primary Key :" + new String(pk) + "/" + length); + } + + + + public void read( String tableName, String multiQuery, Map multiQueryParts, + HSearchProcessingInstruction outputType, boolean isPartitioned, boolean isParallel) + throws IOException, ParseException { + + HSearchGenericFilter filter = getFilter(multiQuery, multiQueryParts, outputType); + + Set uniqueFamilies = new HashSet(3); + + for ( String colNameQuolonId : multiQueryParts.keySet() ) { + + int colNameAndQIdSplitIndex = colNameQuolonId.indexOf(':'); + if ( -1 == colNameAndQIdSplitIndex || colNameQuolonId.length() - 1 == colNameAndQIdSplitIndex) { + throw new IOException("Sub queries expected as X:Y eg.\n" + + "structured:A OR unstructured:B\nstructured:A=f|1|1|1|c|*|*\nunstructured:B=*|*|*|*|*|*"); + } + String colName = colNameQuolonId.substring(0,colNameAndQIdSplitIndex); + setPartionsFamilies(tableName, colName, multiQueryParts.get(colNameQuolonId),uniqueFamilies); + } + + List families = new ArrayList(); + for (String family : uniqueFamilies) { + if ( INFO_ENABLED ) HSearchLog.l.info("HSearchTableReader > Adding Family: " + family); + families.add(new ColumnFamName( family.getBytes(), + new String( new char[] {HBaseTableSchemaDefn.getColumnName()}).getBytes() ) ); + } + + IScanCallBack recordsCollector = getResultCollector(); + + if ( isParallel ) { + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchTableReader > Searching in parallel."); + /** + * OLD Version + * parallelReader.getAllValues(tableName, families, filter, recordsCollector); + */ + HTableWrapper table = HBaseFacade.getInstance().getTable(tableName); + + try { + rows(new HSearchGenericCoProcessorFactory( + families, filter).execCoprocessorRows(table), outputType ); + } catch (Throwable th) { + throw new IOException(th); + } + + } else { + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchTableReader > Searching in Sequential."); + HReader.getAllValues(tableName,families, filter, recordsCollector); + } + } +} + diff --git a/src/compatibility/hadooplib_96/hbase/HDML.java b/src/compatibility/hadooplib_96/hbase/HDML.java new file mode 100644 index 0000000..d3694de --- /dev/null +++ b/src/compatibility/hadooplib_96/hbase/HDML.java @@ -0,0 +1,225 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.TableExistsException; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.RowFilter; + +import com.bizosys.hsearch.util.HSearchLog; + +public class HDML { + private static final boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + private static final boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + /** + * Creates the table if not existing before + * @param tableName + * @param cols + * @throws IOException + */ + public static final boolean create(final String tableName, final List cols) throws HBaseException { + + if (DEBUG_ENABLED) + HSearchLog.l.debug("Creating HBase Table - " + tableName); + + try { + if (DEBUG_ENABLED) + HSearchLog.l.debug("Checking for table existance : " + tableName); + HBaseAdmin admin = HBaseFacade.getInstance().getAdmin(); + if ( admin.tableExists(tableName)) { + + if (INFO_ENABLED) + HSearchLog.l.info("Ignoring creation. Table already exists - " + tableName); + return false; + } else { + HTableDescriptor tableMeta = new HTableDescriptor(TableName.valueOf(tableName)); + for (HColumnDescriptor col : cols) tableMeta.addFamily(col); + admin.createTable(tableMeta); + if (INFO_ENABLED ) HSearchLog.l.info("Table Created - " + tableName); + return true; + } + + } catch (TableExistsException ex) { + HSearchLog.l.warn("Ignoring creation. Table already exists - " + tableName, ex); + throw new HBaseException("Failed Table Creation : " + tableName, ex); + } catch (MasterNotRunningException mnre) { + throw new HBaseException("Failed Table Creation : " + tableName, mnre); + } catch (IOException ioex) { + throw new HBaseException("Failed Table Creation : " + tableName, ioex); + } + } + + + /** + * Drop a table. This may take significantly large time as things + * are disabled first and then gets deleted. + * @param tableName + * @throws IOException + */ + public final static void drop(final String tableName) throws HBaseException { + + if (DEBUG_ENABLED) + HSearchLog.l.debug("Checking for table existance"); + + try { + HBaseAdmin admin = HBaseFacade.getInstance().getAdmin(); + byte[] bytesTableName = tableName.getBytes(); + if ( admin.tableExists(bytesTableName)) { + if ( ! admin.isTableDisabled(bytesTableName) ) + admin.disableTable(bytesTableName); + if ( admin.isTableDisabled(bytesTableName) ) + admin.deleteTable(bytesTableName); + if (INFO_ENABLED ) HSearchLog.l.info (tableName + " Table is deleted."); + } else { + HSearchLog.l.warn( tableName + " table is not found during drop operation."); + throw new HBaseException("Table does not exist"); + } + } catch (IOException ioex) { + throw new HBaseException("Table Drop Failed : " + tableName, ioex); + } + } + + public final static void truncate(final String tableName, final NV kv) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + scanner = table.getScanner(scan); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + Delete delete = new Delete(r.getRow()); + delete = delete.deleteColumns(kv.family, kv.name); + table.delete(delete); + } + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) { + table.flushCommits(); + facade.putTable(table); + } + if ( null != matched) matched.clear(); + } + } + + public static final void truncateBatch(final String tableName, final String keyPrefix) throws IOException { + + if (INFO_ENABLED) HSearchLog.l.info( + "Deleted from " + tableName + " with prefix " + keyPrefix); + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List deletes = new ArrayList(256); + + int batchSize = 0; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + if ( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes()) ); + scan = scan.setFilter(rowFilter); + } + scanner = table.getScanner(scan); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + Delete delete = new Delete(r.getRow()); + deletes.add(delete); + + batchSize++; + if ( batchSize > 1000) { + if ( deletes.size() > 0 ) { + table.delete(deletes); + deletes.clear(); + } + batchSize = 0; + } + } + if ( deletes.size() > 0 ) table.delete(deletes); + + } finally { + table.flushCommits(); + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != deletes) deletes.clear(); + } + } + + public final static void truncateBatch(final String tableName, final List rows) throws IOException { + + if ( null == rows) return; + if ( rows.size() == 0) return; + + HBaseFacade facade = null; + HTableWrapper table = null; + List deletes = new ArrayList(rows.size()); + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + for (byte[] row : rows) { + Delete delete = new Delete(row); + deletes.add(delete); + } + table.delete(deletes); + + } finally { + table.flushCommits(); + if ( null != table ) facade.putTable(table); + if ( null != deletes) deletes.clear(); + } + } +} diff --git a/src/compatibility/hadooplib_96/hbase/HReader.java b/src/compatibility/hadooplib_96/hbase/HReader.java new file mode 100644 index 0000000..571af19 --- /dev/null +++ b/src/compatibility/hadooplib_96/hbase/HReader.java @@ -0,0 +1,459 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.KeyOnlyFilter; +import org.apache.hadoop.hbase.filter.PageFilter; +import org.apache.hadoop.hbase.filter.RegexStringComparator; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.util.Bytes; + +import com.bizosys.hsearch.util.HSearchLog; + +public class HReader { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + /** + * Scalar data will contain the amount to increase + * @param tableName + * @param scalar + * @throws SystemFault + */ + public static final long idGenerationByAutoIncr(final String tableName, + final RecordScalar scalar, final long amount ) throws HBaseException { + + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + long incrementedValue = table.incrementColumnValue( + scalar.pk, scalar.kv.family, scalar.kv.name, amount); + return incrementedValue; + } catch (Exception ex) { + throw new HBaseException("Error in getScalar :" + scalar.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public final static boolean exists (final String tableName, final byte[] pk) throws HBaseException { + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(pk); + return table.exists(getter); + } catch (Exception ex) { + throw new HBaseException("Error in existance checking :" + pk.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static List getMatchingRowIds(String tableName, String rowIdPattern) throws IOException { + + FilterList filters = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RegexStringComparator regex = new RegexStringComparator(rowIdPattern); + RowFilter aFilter = new RowFilter(CompareOp.EQUAL, regex); + filters.addFilter(aFilter); + filters.addFilter(new KeyOnlyFilter()); + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + + List rowIds = new ArrayList(); + + try { + + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan.setFilter(filters); + scanner = table.getScanner(scan); + + for (Result r : scanner) { + if (null == r) continue; + byte[] rowB = r.getRow(); + if (null == rowB) continue; + if (rowB.length == 0) continue; + String row = new String(rowB); + rowIds.add(row); + } + return rowIds; + + } catch (IOException ex) { + HSearchLog.l.fatal("Error while looking table :" + tableName + " for regex, " + rowIdPattern , ex); + throw ex; + } finally { + if (null != scanner) scanner.close(); + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final List getCompleteRow (final String tableName, + final byte[] pk) throws HBaseException{ + + return getCompleteRow (tableName, pk, null); + } + + public static final List getCompleteRow (final String tableName, final byte[] pk, + final Filter filter) throws HBaseException { + + HBaseFacade facade = null; + HTableWrapper table = null; + Result r = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(pk); + if (null != filter) getter.setFilter(filter); + if ( table.exists(getter) ) { + r = table.get(getter); + if ( null == r ) return null; + List nvs = new ArrayList(r.listCells().size()); + for (Cell cell : r.listCells()) { + NVBytes nv = new NVBytes(CellUtil.cloneFamily(cell),CellUtil.cloneQualifier(cell), CellUtil.cloneValue(cell)); + nvs.add(nv); + } + return nvs; + } + return null; + } catch (Exception ex) { + throw new HBaseException("Error in existance checking :" + pk.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final void getScalar (final String tableName, final RecordScalar scalar) throws HBaseException { + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(scalar.pk); + Result result = table.get(getter); + if ( null == result) return; + byte[] val = result.getValue(scalar.kv.family, scalar.kv.name); + if ( null != val ) scalar.kv.data = val; + } catch (Exception ex) { + throw new HBaseException("Error in getScalar :" + scalar.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final byte[] getScalar (final String tableName, + final byte[] family, final byte[] col, final byte[] pk) throws HBaseException { + + return getScalar(tableName,family,col,pk,null); + } + + + public static final byte[] getScalar (final String tableName, + final byte[] family, final byte[] col, final byte[] pk, final Filter filter) throws HBaseException { + + if ( null == family || null == col || null == pk ) return null; + + HBaseFacade facade = null; + HTableWrapper table = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + Get getter = new Get(pk); + if ( null != filter) getter = getter.setFilter(filter); + Result result = table.get(getter); + if ( null == result) return null; + return result.getValue(family, col); + } catch (Exception ex) { + StringBuilder sb = new StringBuilder(); + sb.append("Input during exception = Table : [").append(tableName); + sb.append("] , Family : [").append(new String(family)); + sb.append("] , Column : [").append(new String(col)); + sb.append("] , Key : [").append(new String(pk)); + sb.append(']'); + throw new HBaseException(sb.toString(), ex); + } finally { + if ( null != facade && null != table) facade.putTable(table); + } + } + + public static final void getAllValues(final String tableName, final byte[] family, + final byte[] col, final String keyPrefix, final IScanCallBack callback ) throws IOException { + + Filter rowFilter = null; + if ( null != keyPrefix) { + rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + } + getAllValues(tableName, family, col, rowFilter, callback); + + } + + public static final void getAllValues(final String tableName, final byte[] family, + final byte[] col, final Filter filter, final IScanCallBack callback ) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > getAllValues."); + + facade = HBaseFacade.getInstance(); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Table Facade is obtained."); + table = facade.getTable(tableName); + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Table is obtained."); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(family, col); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HReader > Scanner is created."); + + if ( null != filter) scan = scan.setFilter(filter); + + scanner = table.getScanner(scan); + + long timeS = System.currentTimeMillis(); + + ColumnFamName aColFamilyName = new ColumnFamName(family, col); + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + byte[] storedBytes = r.getValue(family, col); + if ( null == storedBytes) continue; + callback.process(r.getRow(), aColFamilyName, storedBytes); + } + + if ( DEBUG_ENABLED) { + long timeE = System.currentTimeMillis(); + HSearchLog.l.debug("HReader.getAllValues (" + tableName + ") execution time = " + + (timeE - timeS) ); + } + + } catch ( IOException ex) { + throw ex; + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != matched) matched.clear(); + } + } + + + public static final void getAllValues(final String tableName, final List columns, + final String keyPrefix, final IScanCallBack callback ) throws IOException { + + Filter rowFilter = null; + if ( null != keyPrefix) { + rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + } + getAllValues(tableName, columns, rowFilter, callback); + + } + + public final static void getAllValues(final String tableName, final List columns, + final Filter filter, final IScanCallBack callback ) throws IOException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List matched = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + for (ColumnFamName aColFamilyName : columns) { + scan = scan.addColumn(aColFamilyName.family, aColFamilyName.name); + } + + if ( null != filter) scan = scan.setFilter(filter); + + scanner = table.getScanner(scan); + + long timeS = System.currentTimeMillis(); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + for (ColumnFamName aColFamilyName : columns) { + byte[] storedBytes = r.getValue(aColFamilyName.family, aColFamilyName.name); + if ( null == storedBytes) continue; + callback.process(r.getRow(), aColFamilyName, storedBytes); + } + } + + if ( DEBUG_ENABLED) { + long timeE = System.currentTimeMillis(); + HSearchLog.l.debug("HReader.getAllValues (" + tableName + ") execution time = " + + (timeE - timeS) ); + } + + } catch ( IOException ex) { + throw ex; + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + if ( null != matched) matched.clear(); + } + } + + + /** + * Get all the keys of the table cutting the keyPrefix. + * @param tableName Table name + * @param kv Key-Value + * @param startKey Start Row Primary Key + * @param pageSize Page size + * @return Record Keys + * @throws SystemFault + */ + public static final void getAllKeys(final String tableName, final NV kv, + final String keyPrefix, final IScanCallBack callback) throws HBaseException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + + if ( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, + new BinaryPrefixComparator(keyPrefix.getBytes())); + scan = scan.setFilter(rowFilter); + } + + scanner = table.getScanner(scan); + ColumnFamName familyName = new ColumnFamName(kv.family, kv.name); + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + callback.process(r.getRow(), familyName, null); + } + } catch ( IOException ex) { + throw new HBaseException(ex); + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + } + } + + /** + * Get the keys of the table + * @param tableName Table name + * @param kv Key-Value + * @param startKey Start Row Primary Key + * @param pageSize Page size + * @return Record Keys + * @throws SystemFault + */ + public static final List getKeysForAPage(final String tableName, final NV kv, + final byte[] startKey, final String keyPrefix, final int pageSize) throws HBaseException { + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + List keys = ( pageSize > 0 ) ? + new ArrayList(pageSize): new ArrayList(1024); + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addColumn(kv.family, kv.name); + + if( null != keyPrefix) { + Filter rowFilter = new RowFilter(CompareFilter.CompareOp.NOT_EQUAL, + new BinaryPrefixComparator(Bytes.toBytes(keyPrefix))); + scan = scan.setFilter(rowFilter); + } + + if ( pageSize > 0) { + PageFilter pageFilter = new PageFilter(pageSize); + scan = scan.setFilter(pageFilter); + } + + if ( null != startKey) scan = scan.setStartRow(startKey); + + scanner = table.getScanner(scan); + + int counter = 0; + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + if ( counter++ > pageSize) break; + keys.add(r.getRow()); + } + return keys; + } catch ( IOException ex) { + throw new HBaseException(ex); + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + } + } +} diff --git a/src/compatibility/hadooplib_96/hbase/HTableWrapper.java b/src/compatibility/hadooplib_96/hbase/HTableWrapper.java new file mode 100644 index 0000000..ae9cde5 --- /dev/null +++ b/src/compatibility/hadooplib_96/hbase/HTableWrapper.java @@ -0,0 +1,199 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.RetriesExhaustedException; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.Scan; + +import com.bizosys.hsearch.util.HSearchLog; + +/** + * Wraps an HBase table object. + * @author karan + *@see org.apache.hadoop.hbase.client.HTableInterface + */ +public final class HTableWrapper { + + private static final boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + /** + * The table interface + */ + public HTableInterface tableInterface = null; + public HTable innerHtable = null; + + /** + * Name of HBase table + */ + String tableName = null; + + /** + * Constructor + * @param tableName The table name + * @param table Table interface + */ + public HTableWrapper(String tableName, HTableInterface table) { + this.tableInterface = table; + this.tableName = tableName; + } + + /** + * Get the table name in bytes + * @return Table name as byte array + */ + public byte[] getTableName() { + return tableInterface.getTableName(); + } + + /** + * Get table description + * @return Table Descriptor + * @throws IOException + */ + public HTableDescriptor getTableDescriptor() throws IOException { + return tableInterface.getTableDescriptor(); + } + + /** + * Test for the existence of columns in the table, as specified in the Get. + * @param get object + * @return True on existence + * @throws IOException + */ + public boolean exists(Get get) throws IOException { + return tableInterface.exists(get); + } + + public Result get(Get get) throws IOException{ + return tableInterface.get(get); + } + + public ResultScanner getScanner(Scan scan) throws IOException { + return tableInterface.getScanner(scan); + } + + public ResultScanner getScanner(byte[] family) throws IOException { + return tableInterface.getScanner(family); + } + + public ResultScanner getScanner(byte[] family, byte[] qualifier) throws IOException { + return tableInterface.getScanner(family, qualifier); + } + + public void put(Put put) throws IOException { + try { + tableInterface.put(put); + } catch ( RetriesExhaustedException ex) { + HBaseFacade.getInstance().recycleTable(this); + tableInterface.put(put); + } + } + + public void put(List puts) throws IOException { + try { + tableInterface.put(puts); + } catch ( RetriesExhaustedException ex) { + HBaseFacade.getInstance().recycleTable(this); + tableInterface.put(puts); + } + } + + public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, + byte[] value, Put put) throws IOException { + + return tableInterface.checkAndPut(row, family, qualifier,value, put ); + } + + public void delete(Delete delete) throws IOException { + tableInterface.delete(delete ); + } + + public void delete(List deletes) throws IOException { + if ( null == deletes) return; + if ( INFO_ENABLED) HSearchLog.l.info("HTableWrapper: Batch Deleting: " + deletes.size()); + tableInterface.delete(deletes); + } + + public void flushCommits() throws IOException { + tableInterface.flushCommits(); + } + + public void close() throws IOException { + tableInterface.close(); + if ( null != innerHtable) { + innerHtable.close(); + innerHtable = null; + } + } + + public long incrementColumnValue(byte[] row, + byte[] family, byte[] qualifier, long amount) throws IOException { + + return tableInterface.incrementColumnValue(row, family, qualifier, amount, Durability.SYNC_WAL); + } + + public Object[] batch(List actions) throws IOException, InterruptedException { + return tableInterface.batch(actions); + } + + public HRegionLocation getRegionLocation(byte[] row) throws IOException { + + + if ( null == innerHtable ) { + synchronized (this.tableName) { + if ( null == innerHtable) innerHtable = + new HTable(tableInterface.getConfiguration(), this.tableName); + } + } + return innerHtable.getRegionLocation(row); + } + + public List getRegionLocation(List rows) throws IOException { + if ( null == rows) return null; + List regions = new ArrayList(); + + if ( null == innerHtable ) { + synchronized (this.tableName) { + if ( null == innerHtable) innerHtable = + new HTable(tableInterface.getConfiguration(), this.tableName); + } + } + + for (byte[] row : rows) { + regions.add(innerHtable.getRegionLocation(row)); + } + return regions; + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_96/hbase/HWriter.java b/src/compatibility/hadooplib_96/hbase/HWriter.java new file mode 100644 index 0000000..38a59b9 --- /dev/null +++ b/src/compatibility/hadooplib_96/hbase/HWriter.java @@ -0,0 +1,609 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.hbase; + +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; + +import com.bizosys.hsearch.util.HSearchLog; + + +/** + * All HBase write calls goes from here. + * It supports Insert, Delete, Update and Merge operations. + * Merge is a operation, where read and write happens inside + * a lock. This lock is never exposed to caller function. + * @author karan + * + */ +public class HWriter { + + private static final boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + //private boolean isBatchMode = false; + private static HWriter singleton = null; + + /** + * Factory for getting HWriter instance. + * Currently HWriter can execute in a thread safe environment with + * multiple writers originating from a singel machine or multi + * machine environment or out of a single thread write environment. + * @param enableThreadSafety Should it run in a parallel clients mode + * @return HWriter instance. + */ + public static HWriter getInstance(boolean enableThreadSafety ) { + if ( null != singleton) return singleton; + synchronized (HWriter.class) { + if ( null != singleton) return singleton; + singleton = new HWriter(); + } + return singleton; + } + + /** + * Default constructor. + * Don't use + */ + private HWriter() { + } + + /** + * Insert just a single scalar record. If the record is already existing, it overrides. + * A scalar record contains just one column. + * @param tableName Table name + * @param record A Table record + * @throws IOException + */ + public final void insertScalar(final String tableName, final RecordScalar record) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insertScalar:record " + tableName); + + byte[] pk = record.pk; + Put update = new Put(pk); + NV kv = record.kv; + update.add(kv.family,kv.name, kv.data); + update.setDurability(Durability.SYNC_WAL); + + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(update); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + } + } + + /** + * Insert multiple scalar records. If records exist, it overrides + * A scalar record contains just one column. + * @param tableName Table name + * @param records Table records + * @throws IOException + */ + public final void insertScalar(final String tableName, + final List records) throws IOException { + + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insertScalar:records table " + tableName); + + List updates = ObjectFactory.getInstance().getPutList(); + + for (RecordScalar record : records) { + Put update = new Put(record.pk); + NV kv = record.kv; + update.add(kv.family,kv.name, kv.data); + update.setDurability(Durability.SYNC_WAL); + updates.add(update); + } + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(updates); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + if ( null != updates) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Insert a record + * @param tableName + * @param record + * @throws IOException + */ + public final void insert(final String tableName, final Record record) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert to table " + tableName); + + HTableWrapper table = null; + HBaseFacade facade = null; + try { + Put update = new Put(record.pk); + for (NV param : record.getNVs()) { + update.add(param.family,param.name, param.data); + } + update.setDurability(Durability.SYNC_WAL); + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + table.put(update); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + } + } + + /** + * Inserting multiple records. It overrides the values of existing records. + * from the time we have read.. + * @param tableName + * @param records + * @throws IOException + */ + public final void insert(final String tableName, final List records) throws IOException { + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert:records to table " + tableName); + + List updates = ObjectFactory.getInstance().getPutList(); + + for (Record record : records) { + Put update = new Put(record.pk); + for (NV param : record.getNVs()) { + update.add(param.family, param.name, param.data); + } + update.setDurability(Durability.SYNC_WAL); + updates.add(update); + } + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> insert:Putting records " + updates.size()); + table.put(updates); + table.flushCommits(); + } finally { + if ( null != facade && null != table) { + facade.putTable(table); + } + if ( null != updates) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Update a table. It calls back the update call back function for + * various modifications during update operations as bytes merging. + * @param tableName + * @param pk + * @param pipe + * @param families + * @throws IOException + */ + public final void update(final String tableName, + final byte[] pk, final IUpdatePipe pipe, final byte[][] families) throws IOException { + + if ( null == tableName || null == pk) return; + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> update to table " + tableName); + + HTableWrapper table = null; + HBaseFacade facade = null; + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + /** + * Scope down the existance check getter, not to mingle with actual one. + */ + Get existanceGet = new Get(pk); + if ( ! table.exists(existanceGet) ) return; + + if ( null != families) { + for (byte[] family : families) { + existanceGet = existanceGet.addFamily(family); + } + } + + Put update = null; + Delete delete = null; + + int familiesT = ( null == families) ? 0 : families.length; + int[] familyByteLen = new int[familiesT]; + + Result r = table.get(existanceGet); + if ( null == r) return; + if ( null == r.listCells()) return; + + for (Cell cell : r.listCells()) { + byte[] curVal = CellUtil.cloneValue(cell); + if ( null == curVal) continue; + if ( 0 == curVal.length) continue; + byte[] modifiedB = pipe.process(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), curVal); + int modifiedBLen = ( null == modifiedB) ? 0 : modifiedB.length; + + /** + * Count if family to be chucked out + * */ + for (int i=0; i records) + throws IOException { + + if ( null == tableName || null == records) return; + if (DEBUG_ENABLED) + HSearchLog.l.debug("HWriter: mergeScalar (" + tableName + ") , Count =" + records.size()); + + HTableWrapper table = null; + HBaseFacade facade = null; + List updates = ObjectFactory.getInstance().getPutList(); + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + for (RecordScalar scalar : records) { + byte[] pk = scalar.pk; + if ( 0 == pk.length) continue;; + Get getter = new Get(pk); + byte[] famB = scalar.kv.family; + byte[] nameB = scalar.kv.name; + + if ( table.exists(getter) ) { + Get existingGet = new Get(pk); + existingGet = existingGet.addColumn(famB, nameB); + Result r = table.get(existingGet); + if ( ! scalar.merge(r.getValue(famB, nameB)) ) { + continue; + } + } + + NV kv = scalar.kv; + byte[] data = kv.data; + if ( null == data ) { + continue; + } + + Put update = new Put(pk); + update.add(famB,nameB, data); + update.setDurability(Durability.SYNC_WAL); + updates.add(update); + } + + table.put(updates); + table.flushCommits(); + + } finally { + boolean goodTable = true; + + if ( null != facade && null != table && goodTable) { + facade.putTable(table); + } + + if ( null != updates ) ObjectFactory.getInstance().putPutsList(updates); + } + } + + /** + * Merge a record accessing the existing value + * It happens with the locking mechanism + * @param tableName Table name + * @param record A record + * @throws IOException + */ + public final void merge(final String tableName, final Record record) + throws IOException { + + if ( null == tableName || null == record) return; + if (DEBUG_ENABLED) + HSearchLog.l.debug("HWriter:merge Record (" + tableName + ")") ; + + HTableWrapper table = null; + HBaseFacade facade = null; + + try { + byte[] pk = record.pk; + + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + //Step 0 : If does exists no need to merge.. Just insert. + Get existsCheck = new Get(pk); + if ( ! table.exists(existsCheck) ) { + insert(tableName, record); + return; + } + + //Step 1 : Aquire a lock before merging + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> Locking Row " ); + + Get existingGet = new Get(pk); + for (NV nv : record.getBlankNVs()) { + existingGet = existingGet.addColumn(nv.family, nv.name); + } + + //Step 2 : Merge data with existing values + Result r = table.get(existingGet); + if ( null != r) { + if ( null != r.listCells()) { + for (Cell cell : r.listCells()) { + byte[] existingB = CellUtil.cloneValue(cell); + if ( null == existingB) continue; + if ( 0 == existingB.length)continue; + record.merge(CellUtil.cloneFamily(cell),CellUtil.cloneQualifier(cell), existingB); + } + } + } + + //Step 3 : Only add values which have changed. + Put update = new Put(pk); + int totalCols = 0; + for (NV nv : record.getNVs()) { + byte[] data = nv.data; + if ( nv.isDataUnchanged) continue; + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> data Size " + data.length); + update = update.add(nv.family, nv.name, data); + totalCols++; + } + + //Step 4 : If no change.. Nothing to do. + if ( totalCols == 0 ) return; + + + //Step 5 : Write the changes. + update.setDurability(Durability.SYNC_WAL); + if (DEBUG_ENABLED) HSearchLog.l.debug("HWriter> Committing Updates" ); + table.put(update); + table.flushCommits(); + + } finally { + + boolean goodTable = true; + if ( null != facade && null != table && goodTable) { + facade.putTable(table); + } + } + } + + + /** + * Compare byte values + * @param offset Starting position of compare with Byte Array + * @param inputBytes Compare with Bytes + * @param compareBytes Compare to Bytes + * @return True if matches + */ + private final boolean compareBytes(final int offset, + final byte[] inputBytes, final byte[] compareBytes) { + + int inputBytesT = inputBytes.length; + int compareBytesT = compareBytes.length; + if ( compareBytesT != inputBytesT - offset) return false; + + if ( compareBytes[0] != inputBytes[offset]) return false; + if ( compareBytes[compareBytesT - 1] != inputBytes[compareBytesT + offset - 1] ) return false; + + switch (compareBytesT) + { + case 3: + return compareBytes[1] == inputBytes[1 + offset]; + case 4: + return compareBytes[1] == inputBytes[1 + offset] && + compareBytes[2] == inputBytes[2 + offset]; + case 5: + return compareBytes[1] == inputBytes[1+ offset] && + compareBytes[2] == inputBytes[2+ offset] && + compareBytes[3] == inputBytes[3+ offset]; + case 6: + return compareBytes[1] == inputBytes[1+ offset] && + compareBytes[3] == inputBytes[3+ offset] && + compareBytes[2] == inputBytes[2+ offset] && + compareBytes[4] == inputBytes[4+ offset]; + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + case 16: + case 17: + case 18: + case 19: + case 20: + case 21: + case 22: + case 23: + case 24: + case 25: + case 26: + case 27: + case 28: + case 29: + case 30: + for ( int i=offset; i< compareBytesT - 1; i++) { + if ( compareBytes[i] != inputBytes[offset + i]) return false; + } + break; + + case 31: + + for ( int a = 1; a <= 6; a++) { + if ( ! + (compareBytes[a] == inputBytes[a+offset] && + compareBytes[a+6] == inputBytes[a+6+offset] && + compareBytes[a+12] == inputBytes[a+12+offset] && + compareBytes[a+18] == inputBytes[a+18+offset] && + compareBytes[a+24] == inputBytes[a+24+offset]) ) return false; + } + break; + default: + + for ( int i=offset; i< compareBytesT - 1; i++) { + if ( compareBytes[i] != inputBytes[offset + i]) return false; + } + } + return true; + } + + /** + * Compare two bytes + * @param inputBytes Compare with Bytes + * @param compareBytes Compare to Bytes + * @return True if matches + */ + private final boolean compareBytes(final byte[] inputBytes, final byte[] compareBytes) { + return compareBytes(0,inputBytes,compareBytes); + } + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_96/hbase/ObjectFactory.java b/src/compatibility/hadooplib_96/hbase/ObjectFactory.java new file mode 100644 index 0000000..a152514 --- /dev/null +++ b/src/compatibility/hadooplib_96/hbase/ObjectFactory.java @@ -0,0 +1,223 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.hbase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Hashtable; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Stack; + +import org.apache.hadoop.hbase.client.Put; + +public class ObjectFactory { + + private static int MINIMUM_CACHE = 10; + private static int MAXIMUM_CACHE = 4096; + + private static ObjectFactory thisInstance = new ObjectFactory(); + public static ObjectFactory getInstance() { + return thisInstance; + } + + Stack> putsLists = new Stack>(); + Stack> byteArrLists = new Stack>(); + Stack> blockMap = new Stack>(); + Stack> blockMaps = new Stack>(); + Stack> nvLists = new Stack>(); + Stack> stringSets = new Stack>(); + Stack> stringLists = new Stack>(); + Stack> shortLists = new Stack>(); + Stack> longSets = new Stack>(); + Stack> integerLists = new Stack>(); + + + public final List getPutList() { + List puts = null; + if (putsLists.size() > MINIMUM_CACHE ) puts = putsLists.pop(); + if ( null != puts ) return puts; + return new ArrayList(256); + } + + public final void putPutsList(List puts ) { + if ( null == puts) return; + puts.clear(); + if (putsLists.size() > MAXIMUM_CACHE ) return; + if ( putsLists.contains(puts)) return; + putsLists.push(puts); + } + + public final List getByteArrList() { + List bytesA = null; + if (byteArrLists.size() > MINIMUM_CACHE ) bytesA = byteArrLists.pop(); + if ( null != bytesA ) return bytesA; + return new ArrayList(32); + } + + public final void putByteArrList(final List bytesA ) { + if ( null == bytesA) return; + bytesA.clear(); + if (byteArrLists.size() > MAXIMUM_CACHE ) return; + if ( byteArrLists.contains(bytesA)) return; + byteArrLists.push(bytesA); + } + + public final List getNVList() { + List nvs = null; + if (nvLists.size() > MINIMUM_CACHE ) nvs = nvLists.pop(); + if ( null != nvs ) return nvs; + return new ArrayList(); + } + + public final void putNVList(final List nvs ) { + if ( null == nvs) return; + nvs.clear(); + if (nvLists.size() > MAXIMUM_CACHE ) return; + if ( nvLists.contains(nvs)) return; + nvLists.push(nvs); + } + + public final Map getBytesList() { + Map lstB = null; + if (blockMap.size() > MINIMUM_CACHE ) lstB = blockMap.pop(); + if ( null != lstB ) return lstB; + return new HashMap(); + } + + public final void putBytesList(final Map lstB ) { + if ( null == lstB) return; + lstB.clear(); + if (blockMap.size() > MAXIMUM_CACHE ) return; + if ( blockMap.contains(lstB)) return; + blockMap.push(lstB); + } + + + public final Map getByteBlockMap(){ + Map obj = null; + if (blockMaps.size() > MINIMUM_CACHE ) obj = blockMaps.pop(); + if ( null != obj ) return obj; + return new Hashtable(); + } + + public final void putByteBlockMap(final Map obj){ + if ( null == obj) return; + obj.clear(); + if (blockMaps.size() > MAXIMUM_CACHE ) return; + if ( blockMaps.contains(obj)) return; + blockMaps.push(obj); + } + + public final Set getStringSet(){ + Set obj = null; + if (stringSets.size() > MINIMUM_CACHE ) obj = stringSets.pop(); + if ( null != obj ) return obj; + return new HashSet(); + } + + public final void putStringSet(final Set obj){ + if ( null == obj) return; + obj.clear(); + if (stringSets.size() > MAXIMUM_CACHE ) return; + if ( stringSets.contains(obj)) return; + stringSets.push(obj); + } + + public final List getStringList(){ + List obj = null; + if (stringLists.size() > MINIMUM_CACHE ) obj = stringLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putStringList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (stringLists.size() > MAXIMUM_CACHE ) return; + if ( stringLists.contains(obj)) return; + stringLists.push(obj); + } + + public final List getShortList(){ + List obj = null; + if (shortLists.size() > MINIMUM_CACHE ) obj = shortLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putShortList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (shortLists.size() > MAXIMUM_CACHE ) return; + if ( shortLists.contains(obj)) return; + shortLists.push(obj); + } + + public final List getIntegerList(){ + List obj = null; + if (integerLists.size() > MINIMUM_CACHE ) obj = integerLists.pop(); + if ( null != obj ) return obj; + return new ArrayList(); + } + + public final void putIntegerList(final List obj){ + if ( null == obj) return; + obj.clear(); + if (integerLists.size() > MAXIMUM_CACHE ) return; + if ( integerLists.contains(obj)) return; + integerLists.push(obj); + } + + + public final Set getLongSet(){ + Set obj = null; + if (longSets.size() > MINIMUM_CACHE ) obj = longSets.pop(); + if ( null != obj ) return obj; + return new HashSet(); + } + + public final void putLongSet(final Set obj){ + if ( null == obj) return; + obj.clear(); + if (longSets.size() > MAXIMUM_CACHE ) return; + if ( longSets.contains(obj)) return; + longSets.push(obj); + } + + public final String getStatus() { + StringBuilder sb = new StringBuilder(476); + sb.append(""); + sb.append("blockMap:").append(blockMap.size()).append('|'); + sb.append("blockMaps:").append(blockMaps.size()).append('|'); + sb.append("nvLists:").append(nvLists.size()).append('|'); + sb.append("stringSets:").append(stringSets.size()).append('|'); + sb.append("stringLists:").append(stringLists.size()).append('|'); + sb.append("shortLists:").append(shortLists.size()).append('|'); + sb.append("longSets:").append(longSets.size()).append('|'); + sb.append("integerLists:").append(integerLists.size()); + sb.append(""); + return sb.toString(); + } + +} diff --git a/src/compatibility/hadooplib_96/protobuf/HSearchCoprocessorProtos.java b/src/compatibility/hadooplib_96/protobuf/HSearchCoprocessorProtos.java new file mode 100644 index 0000000..94deaba --- /dev/null +++ b/src/compatibility/hadooplib_96/protobuf/HSearchCoprocessorProtos.java @@ -0,0 +1,4613 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: HSearchCoprocessor.proto + +package com.bizosys.hsearch.treetable.storage.protobuf.generated; + +public final class HSearchCoprocessorProtos { + private HSearchCoprocessorProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ColumnOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + /** + * required bytes family = 1; + */ + boolean hasFamily(); + /** + * required bytes family = 1; + */ + com.google.protobuf.ByteString getFamily(); + + // required bytes qualifier = 2; + /** + * required bytes qualifier = 2; + */ + boolean hasQualifier(); + /** + * required bytes qualifier = 2; + */ + com.google.protobuf.ByteString getQualifier(); + } + /** + * Protobuf type {@code Column} + * + *
+   **
+   * Container for a list of column qualifier names of a family.
+   * 
+ */ + public static final class Column extends + com.google.protobuf.GeneratedMessage + implements ColumnOrBuilder { + // Use Column.newBuilder() to construct. + private Column(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Column(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Column defaultInstance; + public static Column getDefaultInstance() { + return defaultInstance; + } + + public Column getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Column( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + qualifier_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_Column_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_Column_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Column parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Column(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + /** + * required bytes family = 1; + */ + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes family = 1; + */ + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required bytes qualifier = 2; + public static final int QUALIFIER_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString qualifier_; + /** + * required bytes qualifier = 2; + */ + public boolean hasQualifier() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required bytes qualifier = 2; + */ + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasQualifier()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, qualifier_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, qualifier_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code Column} + * + *
+     **
+     * Container for a list of column qualifier names of a family.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_Column_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_Column_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_Column_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.qualifier_ = qualifier_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + if (!hasQualifier()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 1; + */ + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes family = 1; + */ + public com.google.protobuf.ByteString getFamily() { + return family_; + } + /** + * required bytes family = 1; + */ + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + /** + * required bytes family = 1; + */ + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required bytes qualifier = 2; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes qualifier = 2; + */ + public boolean hasQualifier() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required bytes qualifier = 2; + */ + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + /** + * required bytes qualifier = 2; + */ + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + qualifier_ = value; + onChanged(); + return this; + } + /** + * required bytes qualifier = 2; + */ + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000002); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Column) + } + + static { + defaultInstance = new Column(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Column) + } + + public interface RowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Column familyWithQualifier = 1; + /** + * repeated .Column familyWithQualifier = 1; + */ + java.util.List + getFamilyWithQualifierList(); + /** + * repeated .Column familyWithQualifier = 1; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier(int index); + /** + * repeated .Column familyWithQualifier = 1; + */ + int getFamilyWithQualifierCount(); + /** + * repeated .Column familyWithQualifier = 1; + */ + java.util.List + getFamilyWithQualifierOrBuilderList(); + /** + * repeated .Column familyWithQualifier = 1; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder( + int index); + + // optional .HSearchGenericFilterMessage filter = 2; + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + boolean hasFilter(); + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage getFilter(); + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessageOrBuilder getFilterOrBuilder(); + } + /** + * Protobuf type {@code RowRequest} + */ + public static final class RowRequest extends + com.google.protobuf.GeneratedMessage + implements RowRequestOrBuilder { + // Use RowRequest.newBuilder() to construct. + private RowRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RowRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RowRequest defaultInstance; + public static RowRequest getDefaultInstance() { + return defaultInstance; + } + + public RowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RowRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + familyWithQualifier_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + familyWithQualifier_.add(input.readMessage(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.PARSER, extensionRegistry)); + break; + } + case 18: { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = filter_.toBuilder(); + } + filter_ = input.readMessage(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(filter_); + filter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + familyWithQualifier_ = java.util.Collections.unmodifiableList(familyWithQualifier_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RowRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // repeated .Column familyWithQualifier = 1; + public static final int FAMILYWITHQUALIFIER_FIELD_NUMBER = 1; + private java.util.List familyWithQualifier_; + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List getFamilyWithQualifierList() { + return familyWithQualifier_; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List + getFamilyWithQualifierOrBuilderList() { + return familyWithQualifier_; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public int getFamilyWithQualifierCount() { + return familyWithQualifier_.size(); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier(int index) { + return familyWithQualifier_.get(index); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder( + int index) { + return familyWithQualifier_.get(index); + } + + // optional .HSearchGenericFilterMessage filter = 2; + public static final int FILTER_FIELD_NUMBER = 2; + private com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage filter_; + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public boolean hasFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage getFilter() { + return filter_; + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessageOrBuilder getFilterOrBuilder() { + return filter_; + } + + private void initFields() { + familyWithQualifier_ = java.util.Collections.emptyList(); + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getFamilyWithQualifierCount(); i++) { + if (!getFamilyWithQualifier(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < familyWithQualifier_.size(); i++) { + output.writeMessage(1, familyWithQualifier_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(2, filter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < familyWithQualifier_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, familyWithQualifier_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, filter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest) obj; + + boolean result = true; + result = result && getFamilyWithQualifierList() + .equals(other.getFamilyWithQualifierList()); + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getFamilyWithQualifierCount() > 0) { + hash = (37 * hash) + FAMILYWITHQUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getFamilyWithQualifierList().hashCode(); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code RowRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFamilyWithQualifierFieldBuilder(); + getFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (familyWithQualifierBuilder_ == null) { + familyWithQualifier_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + familyWithQualifierBuilder_.clear(); + } + if (filterBuilder_ == null) { + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowRequest_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (familyWithQualifierBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + familyWithQualifier_ = java.util.Collections.unmodifiableList(familyWithQualifier_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.familyWithQualifier_ = familyWithQualifier_; + } else { + result.familyWithQualifier_ = familyWithQualifierBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.getDefaultInstance()) return this; + if (familyWithQualifierBuilder_ == null) { + if (!other.familyWithQualifier_.isEmpty()) { + if (familyWithQualifier_.isEmpty()) { + familyWithQualifier_ = other.familyWithQualifier_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.addAll(other.familyWithQualifier_); + } + onChanged(); + } + } else { + if (!other.familyWithQualifier_.isEmpty()) { + if (familyWithQualifierBuilder_.isEmpty()) { + familyWithQualifierBuilder_.dispose(); + familyWithQualifierBuilder_ = null; + familyWithQualifier_ = other.familyWithQualifier_; + bitField0_ = (bitField0_ & ~0x00000001); + familyWithQualifierBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyWithQualifierFieldBuilder() : null; + } else { + familyWithQualifierBuilder_.addAllMessages(other.familyWithQualifier_); + } + } + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getFamilyWithQualifierCount(); i++) { + if (!getFamilyWithQualifier(i).isInitialized()) { + + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .Column familyWithQualifier = 1; + private java.util.List familyWithQualifier_ = + java.util.Collections.emptyList(); + private void ensureFamilyWithQualifierIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + familyWithQualifier_ = new java.util.ArrayList(familyWithQualifier_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder> familyWithQualifierBuilder_; + + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List getFamilyWithQualifierList() { + if (familyWithQualifierBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyWithQualifier_); + } else { + return familyWithQualifierBuilder_.getMessageList(); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public int getFamilyWithQualifierCount() { + if (familyWithQualifierBuilder_ == null) { + return familyWithQualifier_.size(); + } else { + return familyWithQualifierBuilder_.getCount(); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier(int index) { + if (familyWithQualifierBuilder_ == null) { + return familyWithQualifier_.get(index); + } else { + return familyWithQualifierBuilder_.getMessage(index); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder setFamilyWithQualifier( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column value) { + if (familyWithQualifierBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.set(index, value); + onChanged(); + } else { + familyWithQualifierBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder setFamilyWithQualifier( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder builderForValue) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.set(index, builderForValue.build()); + onChanged(); + } else { + familyWithQualifierBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addFamilyWithQualifier(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column value) { + if (familyWithQualifierBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.add(value); + onChanged(); + } else { + familyWithQualifierBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addFamilyWithQualifier( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column value) { + if (familyWithQualifierBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.add(index, value); + onChanged(); + } else { + familyWithQualifierBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addFamilyWithQualifier( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder builderForValue) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.add(builderForValue.build()); + onChanged(); + } else { + familyWithQualifierBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addFamilyWithQualifier( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder builderForValue) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.add(index, builderForValue.build()); + onChanged(); + } else { + familyWithQualifierBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addAllFamilyWithQualifier( + java.lang.Iterable values) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + super.addAll(values, familyWithQualifier_); + onChanged(); + } else { + familyWithQualifierBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder clearFamilyWithQualifier() { + if (familyWithQualifierBuilder_ == null) { + familyWithQualifier_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + familyWithQualifierBuilder_.clear(); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder removeFamilyWithQualifier(int index) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.remove(index); + onChanged(); + } else { + familyWithQualifierBuilder_.remove(index); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder getFamilyWithQualifierBuilder( + int index) { + return getFamilyWithQualifierFieldBuilder().getBuilder(index); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder( + int index) { + if (familyWithQualifierBuilder_ == null) { + return familyWithQualifier_.get(index); } else { + return familyWithQualifierBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List + getFamilyWithQualifierOrBuilderList() { + if (familyWithQualifierBuilder_ != null) { + return familyWithQualifierBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyWithQualifier_); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder addFamilyWithQualifierBuilder() { + return getFamilyWithQualifierFieldBuilder().addBuilder( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance()); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder addFamilyWithQualifierBuilder( + int index) { + return getFamilyWithQualifierFieldBuilder().addBuilder( + index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance()); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List + getFamilyWithQualifierBuilderList() { + return getFamilyWithQualifierFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder> + getFamilyWithQualifierFieldBuilder() { + if (familyWithQualifierBuilder_ == null) { + familyWithQualifierBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder>( + familyWithQualifier_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + familyWithQualifier_ = null; + } + return familyWithQualifierBuilder_; + } + + // optional .HSearchGenericFilterMessage filter = 2; + private com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessageOrBuilder> filterBuilder_; + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public boolean hasFilter() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public Builder setFilter(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public Builder setFilter( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public Builder mergeFilter(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + filter_ != com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.getDefaultInstance()) { + filter_ = + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.Builder getFilterBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessageOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + /** + * optional .HSearchGenericFilterMessage filter = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessageOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessageOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RowRequest) + } + + static { + defaultInstance = new RowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RowRequest) + } + + public interface RowResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes result = 1; + /** + * required bytes result = 1; + */ + boolean hasResult(); + /** + * required bytes result = 1; + */ + com.google.protobuf.ByteString getResult(); + } + /** + * Protobuf type {@code RowResponse} + */ + public static final class RowResponse extends + com.google.protobuf.GeneratedMessage + implements RowResponseOrBuilder { + // Use RowResponse.newBuilder() to construct. + private RowResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RowResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RowResponse defaultInstance; + public static RowResponse getDefaultInstance() { + return defaultInstance; + } + + public RowResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RowResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + result_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RowResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RowResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString result_; + /** + * required bytes result = 1; + */ + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes result = 1; + */ + public com.google.protobuf.ByteString getResult() { + return result_; + } + + private void initFields() { + result_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasResult()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, result_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, result_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse) obj; + + boolean result = true; + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code RowResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + result_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_RowResponse_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.result_ = result_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance()) return this; + if (other.hasResult()) { + setResult(other.getResult()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasResult()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes result = 1; + private com.google.protobuf.ByteString result_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes result = 1; + */ + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes result = 1; + */ + public com.google.protobuf.ByteString getResult() { + return result_; + } + /** + * required bytes result = 1; + */ + public Builder setResult(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + result_ = value; + onChanged(); + return this; + } + /** + * required bytes result = 1; + */ + public Builder clearResult() { + bitField0_ = (bitField0_ & ~0x00000001); + result_ = getDefaultInstance().getResult(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RowResponse) + } + + static { + defaultInstance = new RowResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RowResponse) + } + + public interface MultiRowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .Column familyWithQualifier = 1; + /** + * required .Column familyWithQualifier = 1; + */ + boolean hasFamilyWithQualifier(); + /** + * required .Column familyWithQualifier = 1; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier(); + /** + * required .Column familyWithQualifier = 1; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder(); + + // optional .HSearchScalarFilterMessage filter = 2; + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + boolean hasFilter(); + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage getFilter(); + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessageOrBuilder getFilterOrBuilder(); + + // repeated bytes rows = 3; + /** + * repeated bytes rows = 3; + */ + java.util.List getRowsList(); + /** + * repeated bytes rows = 3; + */ + int getRowsCount(); + /** + * repeated bytes rows = 3; + */ + com.google.protobuf.ByteString getRows(int index); + } + /** + * Protobuf type {@code MultiRowRequest} + */ + public static final class MultiRowRequest extends + com.google.protobuf.GeneratedMessage + implements MultiRowRequestOrBuilder { + // Use MultiRowRequest.newBuilder() to construct. + private MultiRowRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MultiRowRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MultiRowRequest defaultInstance; + public static MultiRowRequest getDefaultInstance() { + return defaultInstance; + } + + public MultiRowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiRowRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = familyWithQualifier_.toBuilder(); + } + familyWithQualifier_ = input.readMessage(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(familyWithQualifier_); + familyWithQualifier_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = filter_.toBuilder(); + } + filter_ = input.readMessage(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(filter_); + filter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + rows_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + rows_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + rows_ = java.util.Collections.unmodifiableList(rows_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_MultiRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_MultiRowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiRowRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiRowRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .Column familyWithQualifier = 1; + public static final int FAMILYWITHQUALIFIER_FIELD_NUMBER = 1; + private com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column familyWithQualifier_; + /** + * required .Column familyWithQualifier = 1; + */ + public boolean hasFamilyWithQualifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier() { + return familyWithQualifier_; + } + /** + * required .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder() { + return familyWithQualifier_; + } + + // optional .HSearchScalarFilterMessage filter = 2; + public static final int FILTER_FIELD_NUMBER = 2; + private com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage filter_; + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public boolean hasFilter() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage getFilter() { + return filter_; + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessageOrBuilder getFilterOrBuilder() { + return filter_; + } + + // repeated bytes rows = 3; + public static final int ROWS_FIELD_NUMBER = 3; + private java.util.List rows_; + /** + * repeated bytes rows = 3; + */ + public java.util.List + getRowsList() { + return rows_; + } + /** + * repeated bytes rows = 3; + */ + public int getRowsCount() { + return rows_.size(); + } + /** + * repeated bytes rows = 3; + */ + public com.google.protobuf.ByteString getRows(int index) { + return rows_.get(index); + } + + private void initFields() { + familyWithQualifier_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance(); + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.getDefaultInstance(); + rows_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamilyWithQualifier()) { + memoizedIsInitialized = 0; + return false; + } + if (!getFamilyWithQualifier().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, familyWithQualifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, filter_); + } + for (int i = 0; i < rows_.size(); i++) { + output.writeBytes(3, rows_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, familyWithQualifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, filter_); + } + { + int dataSize = 0; + for (int i = 0; i < rows_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(rows_.get(i)); + } + size += dataSize; + size += 1 * getRowsList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest) obj; + + boolean result = true; + result = result && (hasFamilyWithQualifier() == other.hasFamilyWithQualifier()); + if (hasFamilyWithQualifier()) { + result = result && getFamilyWithQualifier() + .equals(other.getFamilyWithQualifier()); + } + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && getRowsList() + .equals(other.getRowsList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamilyWithQualifier()) { + hash = (37 * hash) + FAMILYWITHQUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getFamilyWithQualifier().hashCode(); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + if (getRowsCount() > 0) { + hash = (37 * hash) + ROWS_FIELD_NUMBER; + hash = (53 * hash) + getRowsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code MultiRowRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_MultiRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_MultiRowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFamilyWithQualifierFieldBuilder(); + getFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (familyWithQualifierBuilder_ == null) { + familyWithQualifier_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance(); + } else { + familyWithQualifierBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (filterBuilder_ == null) { + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + rows_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_MultiRowRequest_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (familyWithQualifierBuilder_ == null) { + result.familyWithQualifier_ = familyWithQualifier_; + } else { + result.familyWithQualifier_ = familyWithQualifierBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + rows_ = java.util.Collections.unmodifiableList(rows_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.rows_ = rows_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.getDefaultInstance()) return this; + if (other.hasFamilyWithQualifier()) { + mergeFamilyWithQualifier(other.getFamilyWithQualifier()); + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + if (!other.rows_.isEmpty()) { + if (rows_.isEmpty()) { + rows_ = other.rows_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureRowsIsMutable(); + rows_.addAll(other.rows_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamilyWithQualifier()) { + + return false; + } + if (!getFamilyWithQualifier().isInitialized()) { + + return false; + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .Column familyWithQualifier = 1; + private com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column familyWithQualifier_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder> familyWithQualifierBuilder_; + /** + * required .Column familyWithQualifier = 1; + */ + public boolean hasFamilyWithQualifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier() { + if (familyWithQualifierBuilder_ == null) { + return familyWithQualifier_; + } else { + return familyWithQualifierBuilder_.getMessage(); + } + } + /** + * required .Column familyWithQualifier = 1; + */ + public Builder setFamilyWithQualifier(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column value) { + if (familyWithQualifierBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + familyWithQualifier_ = value; + onChanged(); + } else { + familyWithQualifierBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .Column familyWithQualifier = 1; + */ + public Builder setFamilyWithQualifier( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder builderForValue) { + if (familyWithQualifierBuilder_ == null) { + familyWithQualifier_ = builderForValue.build(); + onChanged(); + } else { + familyWithQualifierBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .Column familyWithQualifier = 1; + */ + public Builder mergeFamilyWithQualifier(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column value) { + if (familyWithQualifierBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + familyWithQualifier_ != com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance()) { + familyWithQualifier_ = + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.newBuilder(familyWithQualifier_).mergeFrom(value).buildPartial(); + } else { + familyWithQualifier_ = value; + } + onChanged(); + } else { + familyWithQualifierBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .Column familyWithQualifier = 1; + */ + public Builder clearFamilyWithQualifier() { + if (familyWithQualifierBuilder_ == null) { + familyWithQualifier_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance(); + onChanged(); + } else { + familyWithQualifierBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * required .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder getFamilyWithQualifierBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getFamilyWithQualifierFieldBuilder().getBuilder(); + } + /** + * required .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder() { + if (familyWithQualifierBuilder_ != null) { + return familyWithQualifierBuilder_.getMessageOrBuilder(); + } else { + return familyWithQualifier_; + } + } + /** + * required .Column familyWithQualifier = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder> + getFamilyWithQualifierFieldBuilder() { + if (familyWithQualifierBuilder_ == null) { + familyWithQualifierBuilder_ = new com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder>( + familyWithQualifier_, + getParentForChildren(), + isClean()); + familyWithQualifier_ = null; + } + return familyWithQualifierBuilder_; + } + + // optional .HSearchScalarFilterMessage filter = 2; + private com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessageOrBuilder> filterBuilder_; + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public boolean hasFilter() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public Builder setFilter(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public Builder setFilter( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public Builder mergeFilter(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + filter_ != com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.getDefaultInstance()) { + filter_ = + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.Builder getFilterBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessageOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + /** + * optional .HSearchScalarFilterMessage filter = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessageOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessageOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // repeated bytes rows = 3; + private java.util.List rows_ = java.util.Collections.emptyList(); + private void ensureRowsIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + rows_ = new java.util.ArrayList(rows_); + bitField0_ |= 0x00000004; + } + } + /** + * repeated bytes rows = 3; + */ + public java.util.List + getRowsList() { + return java.util.Collections.unmodifiableList(rows_); + } + /** + * repeated bytes rows = 3; + */ + public int getRowsCount() { + return rows_.size(); + } + /** + * repeated bytes rows = 3; + */ + public com.google.protobuf.ByteString getRows(int index) { + return rows_.get(index); + } + /** + * repeated bytes rows = 3; + */ + public Builder setRows( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRowsIsMutable(); + rows_.set(index, value); + onChanged(); + return this; + } + /** + * repeated bytes rows = 3; + */ + public Builder addRows(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRowsIsMutable(); + rows_.add(value); + onChanged(); + return this; + } + /** + * repeated bytes rows = 3; + */ + public Builder addAllRows( + java.lang.Iterable values) { + ensureRowsIsMutable(); + super.addAll(values, rows_); + onChanged(); + return this; + } + /** + * repeated bytes rows = 3; + */ + public Builder clearRows() { + rows_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MultiRowRequest) + } + + static { + defaultInstance = new MultiRowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiRowRequest) + } + + public interface BytesRowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Column familyWithQualifier = 1; + /** + * repeated .Column familyWithQualifier = 1; + */ + java.util.List + getFamilyWithQualifierList(); + /** + * repeated .Column familyWithQualifier = 1; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier(int index); + /** + * repeated .Column familyWithQualifier = 1; + */ + int getFamilyWithQualifierCount(); + /** + * repeated .Column familyWithQualifier = 1; + */ + java.util.List + getFamilyWithQualifierOrBuilderList(); + /** + * repeated .Column familyWithQualifier = 1; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder( + int index); + + // optional .HSearchBytesFilterMessage filter = 2; + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + boolean hasFilter(); + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage getFilter(); + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessageOrBuilder getFilterOrBuilder(); + } + /** + * Protobuf type {@code BytesRowRequest} + */ + public static final class BytesRowRequest extends + com.google.protobuf.GeneratedMessage + implements BytesRowRequestOrBuilder { + // Use BytesRowRequest.newBuilder() to construct. + private BytesRowRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private BytesRowRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final BytesRowRequest defaultInstance; + public static BytesRowRequest getDefaultInstance() { + return defaultInstance; + } + + public BytesRowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BytesRowRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + familyWithQualifier_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + familyWithQualifier_.add(input.readMessage(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.PARSER, extensionRegistry)); + break; + } + case 18: { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = filter_.toBuilder(); + } + filter_ = input.readMessage(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(filter_); + filter_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + familyWithQualifier_ = java.util.Collections.unmodifiableList(familyWithQualifier_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_BytesRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_BytesRowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BytesRowRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BytesRowRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // repeated .Column familyWithQualifier = 1; + public static final int FAMILYWITHQUALIFIER_FIELD_NUMBER = 1; + private java.util.List familyWithQualifier_; + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List getFamilyWithQualifierList() { + return familyWithQualifier_; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List + getFamilyWithQualifierOrBuilderList() { + return familyWithQualifier_; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public int getFamilyWithQualifierCount() { + return familyWithQualifier_.size(); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier(int index) { + return familyWithQualifier_.get(index); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder( + int index) { + return familyWithQualifier_.get(index); + } + + // optional .HSearchBytesFilterMessage filter = 2; + public static final int FILTER_FIELD_NUMBER = 2; + private com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage filter_; + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public boolean hasFilter() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage getFilter() { + return filter_; + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessageOrBuilder getFilterOrBuilder() { + return filter_; + } + + private void initFields() { + familyWithQualifier_ = java.util.Collections.emptyList(); + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getFamilyWithQualifierCount(); i++) { + if (!getFamilyWithQualifier(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < familyWithQualifier_.size(); i++) { + output.writeMessage(1, familyWithQualifier_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(2, filter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < familyWithQualifier_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, familyWithQualifier_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, filter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest) obj; + + boolean result = true; + result = result && getFamilyWithQualifierList() + .equals(other.getFamilyWithQualifierList()); + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getFamilyWithQualifierCount() > 0) { + hash = (37 * hash) + FAMILYWITHQUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getFamilyWithQualifierList().hashCode(); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code BytesRowRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_BytesRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_BytesRowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFamilyWithQualifierFieldBuilder(); + getFilterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (familyWithQualifierBuilder_ == null) { + familyWithQualifier_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + familyWithQualifierBuilder_.clear(); + } + if (filterBuilder_ == null) { + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.internal_static_BytesRowRequest_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (familyWithQualifierBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + familyWithQualifier_ = java.util.Collections.unmodifiableList(familyWithQualifier_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.familyWithQualifier_ = familyWithQualifier_; + } else { + result.familyWithQualifier_ = familyWithQualifierBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.getDefaultInstance()) return this; + if (familyWithQualifierBuilder_ == null) { + if (!other.familyWithQualifier_.isEmpty()) { + if (familyWithQualifier_.isEmpty()) { + familyWithQualifier_ = other.familyWithQualifier_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.addAll(other.familyWithQualifier_); + } + onChanged(); + } + } else { + if (!other.familyWithQualifier_.isEmpty()) { + if (familyWithQualifierBuilder_.isEmpty()) { + familyWithQualifierBuilder_.dispose(); + familyWithQualifierBuilder_ = null; + familyWithQualifier_ = other.familyWithQualifier_; + bitField0_ = (bitField0_ & ~0x00000001); + familyWithQualifierBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyWithQualifierFieldBuilder() : null; + } else { + familyWithQualifierBuilder_.addAllMessages(other.familyWithQualifier_); + } + } + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getFamilyWithQualifierCount(); i++) { + if (!getFamilyWithQualifier(i).isInitialized()) { + + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .Column familyWithQualifier = 1; + private java.util.List familyWithQualifier_ = + java.util.Collections.emptyList(); + private void ensureFamilyWithQualifierIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + familyWithQualifier_ = new java.util.ArrayList(familyWithQualifier_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder> familyWithQualifierBuilder_; + + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List getFamilyWithQualifierList() { + if (familyWithQualifierBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyWithQualifier_); + } else { + return familyWithQualifierBuilder_.getMessageList(); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public int getFamilyWithQualifierCount() { + if (familyWithQualifierBuilder_ == null) { + return familyWithQualifier_.size(); + } else { + return familyWithQualifierBuilder_.getCount(); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column getFamilyWithQualifier(int index) { + if (familyWithQualifierBuilder_ == null) { + return familyWithQualifier_.get(index); + } else { + return familyWithQualifierBuilder_.getMessage(index); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder setFamilyWithQualifier( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column value) { + if (familyWithQualifierBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.set(index, value); + onChanged(); + } else { + familyWithQualifierBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder setFamilyWithQualifier( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder builderForValue) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.set(index, builderForValue.build()); + onChanged(); + } else { + familyWithQualifierBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addFamilyWithQualifier(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column value) { + if (familyWithQualifierBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.add(value); + onChanged(); + } else { + familyWithQualifierBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addFamilyWithQualifier( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column value) { + if (familyWithQualifierBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.add(index, value); + onChanged(); + } else { + familyWithQualifierBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addFamilyWithQualifier( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder builderForValue) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.add(builderForValue.build()); + onChanged(); + } else { + familyWithQualifierBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addFamilyWithQualifier( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder builderForValue) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.add(index, builderForValue.build()); + onChanged(); + } else { + familyWithQualifierBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder addAllFamilyWithQualifier( + java.lang.Iterable values) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + super.addAll(values, familyWithQualifier_); + onChanged(); + } else { + familyWithQualifierBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder clearFamilyWithQualifier() { + if (familyWithQualifierBuilder_ == null) { + familyWithQualifier_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + familyWithQualifierBuilder_.clear(); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public Builder removeFamilyWithQualifier(int index) { + if (familyWithQualifierBuilder_ == null) { + ensureFamilyWithQualifierIsMutable(); + familyWithQualifier_.remove(index); + onChanged(); + } else { + familyWithQualifierBuilder_.remove(index); + } + return this; + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder getFamilyWithQualifierBuilder( + int index) { + return getFamilyWithQualifierFieldBuilder().getBuilder(index); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder getFamilyWithQualifierOrBuilder( + int index) { + if (familyWithQualifierBuilder_ == null) { + return familyWithQualifier_.get(index); } else { + return familyWithQualifierBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List + getFamilyWithQualifierOrBuilderList() { + if (familyWithQualifierBuilder_ != null) { + return familyWithQualifierBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyWithQualifier_); + } + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder addFamilyWithQualifierBuilder() { + return getFamilyWithQualifierFieldBuilder().addBuilder( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance()); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder addFamilyWithQualifierBuilder( + int index) { + return getFamilyWithQualifierFieldBuilder().addBuilder( + index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.getDefaultInstance()); + } + /** + * repeated .Column familyWithQualifier = 1; + */ + public java.util.List + getFamilyWithQualifierBuilderList() { + return getFamilyWithQualifierFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder> + getFamilyWithQualifierFieldBuilder() { + if (familyWithQualifierBuilder_ == null) { + familyWithQualifierBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.ColumnOrBuilder>( + familyWithQualifier_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + familyWithQualifier_ = null; + } + return familyWithQualifierBuilder_; + } + + // optional .HSearchBytesFilterMessage filter = 2; + private com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessageOrBuilder> filterBuilder_; + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public boolean hasFilter() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public Builder setFilter(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public Builder setFilter( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public Builder mergeFilter(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + filter_ != com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.getDefaultInstance()) { + filter_ = + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.Builder getFilterBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessageOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + /** + * optional .HSearchBytesFilterMessage filter = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessageOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessageOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:BytesRowRequest) + } + + static { + defaultInstance = new BytesRowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BytesRowRequest) + } + + /** + * Protobuf service {@code HSearchGenericCoprocessorService} + */ + public static abstract class HSearchGenericCoprocessorService + implements com.google.protobuf.Service { + protected HSearchGenericCoprocessorService() {} + + public interface Interface { + /** + * rpc getRows(.RowRequest) returns (.RowResponse); + */ + public abstract void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new HSearchGenericCoprocessorService() { + @java.lang.Override + public void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest request, + com.google.protobuf.RpcCallback done) { + impl.getRows(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getRows(controller, (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc getRows(.RowRequest) returns (.RowResponse); + */ + public abstract void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getRows(controller, (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchGenericCoprocessorService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.class, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest request) + throws com.google.protobuf.ServiceException { + return (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:HSearchGenericCoprocessorService) + } + + /** + * Protobuf service {@code HSearchMultiGetCoprocessorProxyService} + */ + public static abstract class HSearchMultiGetCoprocessorProxyService + implements com.google.protobuf.Service { + protected HSearchMultiGetCoprocessorProxyService() {} + + public interface Interface { + /** + * rpc getRows(.MultiRowRequest) returns (.RowResponse); + */ + public abstract void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new HSearchMultiGetCoprocessorProxyService() { + @java.lang.Override + public void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest request, + com.google.protobuf.RpcCallback done) { + impl.getRows(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getRows(controller, (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc getRows(.MultiRowRequest) returns (.RowResponse); + */ + public abstract void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.getDescriptor().getServices().get(1); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getRows(controller, (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchMultiGetCoprocessorProxyService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.class, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest request) + throws com.google.protobuf.ServiceException { + return (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:HSearchMultiGetCoprocessorProxyService) + } + + /** + * Protobuf service {@code HSearchBytesCoprocessorProxyService} + */ + public static abstract class HSearchBytesCoprocessorProxyService + implements com.google.protobuf.Service { + protected HSearchBytesCoprocessorProxyService() {} + + public interface Interface { + /** + * rpc getRows(.BytesRowRequest) returns (.RowResponse); + */ + public abstract void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new HSearchBytesCoprocessorProxyService() { + @java.lang.Override + public void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest request, + com.google.protobuf.RpcCallback done) { + impl.getRows(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getRows(controller, (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc getRows(.BytesRowRequest) returns (.RowResponse); + */ + public abstract void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.getDescriptor().getServices().get(2); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getRows(controller, (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchBytesCoprocessorProxyService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.class, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse getRows( + com.google.protobuf.RpcController controller, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest request) + throws com.google.protobuf.ServiceException { + return (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:HSearchBytesCoprocessorProxyService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Column_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Column_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RowRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RowResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RowResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiRowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiRowRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BytesRowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BytesRowRequest_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\030HSearchCoprocessor.proto\032\023HSearchFilte" + + "r.proto\"+\n\006Column\022\016\n\006family\030\001 \002(\014\022\021\n\tqua" + + "lifier\030\002 \002(\014\"`\n\nRowRequest\022$\n\023familyWith" + + "Qualifier\030\001 \003(\0132\007.Column\022,\n\006filter\030\002 \001(\013" + + "2\034.HSearchGenericFilterMessage\"\035\n\013RowRes" + + "ponse\022\016\n\006result\030\001 \002(\014\"r\n\017MultiRowRequest" + + "\022$\n\023familyWithQualifier\030\001 \002(\0132\007.Column\022+" + + "\n\006filter\030\002 \001(\0132\033.HSearchScalarFilterMess" + + "age\022\014\n\004rows\030\003 \003(\014\"c\n\017BytesRowRequest\022$\n\023" + + "familyWithQualifier\030\001 \003(\0132\007.Column\022*\n\006fi", + "lter\030\002 \001(\0132\032.HSearchBytesFilterMessage2H" + + "\n HSearchGenericCoprocessorService\022$\n\007ge" + + "tRows\022\013.RowRequest\032\014.RowResponse2S\n&HSea" + + "rchMultiGetCoprocessorProxyService\022)\n\007ge" + + "tRows\022\020.MultiRowRequest\032\014.RowResponse2P\n" + + "#HSearchBytesCoprocessorProxyService\022)\n\007" + + "getRows\022\020.BytesRowRequest\032\014.RowResponseB" + + "\\\n8com.bizosys.hsearch.treetable.storage" + + ".protobuf.generatedB\030HSearchCoprocessorP" + + "rotosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_Column_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Column_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Column_descriptor, + new java.lang.String[] { "Family", "Qualifier", }); + internal_static_RowRequest_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_RowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RowRequest_descriptor, + new java.lang.String[] { "FamilyWithQualifier", "Filter", }); + internal_static_RowResponse_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_RowResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RowResponse_descriptor, + new java.lang.String[] { "Result", }); + internal_static_MultiRowRequest_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_MultiRowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiRowRequest_descriptor, + new java.lang.String[] { "FamilyWithQualifier", "Filter", "Rows", }); + internal_static_BytesRowRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_BytesRowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BytesRowRequest_descriptor, + new java.lang.String[] { "FamilyWithQualifier", "Filter", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/src/compatibility/hadooplib_96/protobuf/HSearchFilterProtos.java b/src/compatibility/hadooplib_96/protobuf/HSearchFilterProtos.java new file mode 100644 index 0000000..f75a402 --- /dev/null +++ b/src/compatibility/hadooplib_96/protobuf/HSearchFilterProtos.java @@ -0,0 +1,3772 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: HSearchFilter.proto + +package com.bizosys.hsearch.treetable.storage.protobuf.generated; + +public final class HSearchFilterProtos { + private HSearchFilterProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface HSearchGenericFilterMessageOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string filterClassName = 1; + /** + * required string filterClassName = 1; + */ + boolean hasFilterClassName(); + /** + * required string filterClassName = 1; + */ + java.lang.String getFilterClassName(); + /** + * required string filterClassName = 1; + */ + com.google.protobuf.ByteString + getFilterClassNameBytes(); + + // required string inputMapperInstructions = 2; + /** + * required string inputMapperInstructions = 2; + */ + boolean hasInputMapperInstructions(); + /** + * required string inputMapperInstructions = 2; + */ + java.lang.String getInputMapperInstructions(); + /** + * required string inputMapperInstructions = 2; + */ + com.google.protobuf.ByteString + getInputMapperInstructionsBytes(); + + // required string multiQuery = 3; + /** + * required string multiQuery = 3; + */ + boolean hasMultiQuery(); + /** + * required string multiQuery = 3; + */ + java.lang.String getMultiQuery(); + /** + * required string multiQuery = 3; + */ + com.google.protobuf.ByteString + getMultiQueryBytes(); + + // optional bytes inputRowsToIncludeB = 4; + /** + * optional bytes inputRowsToIncludeB = 4; + */ + boolean hasInputRowsToIncludeB(); + /** + * optional bytes inputRowsToIncludeB = 4; + */ + com.google.protobuf.ByteString getInputRowsToIncludeB(); + + // repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + java.util.List + getQueryFiltersList(); + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair getQueryFilters(int index); + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + int getQueryFiltersCount(); + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + java.util.List + getQueryFiltersOrBuilderList(); + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPairOrBuilder getQueryFiltersOrBuilder( + int index); + } + /** + * Protobuf type {@code HSearchGenericFilterMessage} + */ + public static final class HSearchGenericFilterMessage extends + com.google.protobuf.GeneratedMessage + implements HSearchGenericFilterMessageOrBuilder { + // Use HSearchGenericFilterMessage.newBuilder() to construct. + private HSearchGenericFilterMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private HSearchGenericFilterMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final HSearchGenericFilterMessage defaultInstance; + public static HSearchGenericFilterMessage getDefaultInstance() { + return defaultInstance; + } + + public HSearchGenericFilterMessage getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private HSearchGenericFilterMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + filterClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + inputMapperInstructions_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + multiQuery_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + inputRowsToIncludeB_ = input.readBytes(); + break; + } + case 42: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + queryFilters_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000010; + } + queryFilters_.add(input.readMessage(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + queryFilters_ = java.util.Collections.unmodifiableList(queryFilters_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public HSearchGenericFilterMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new HSearchGenericFilterMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public interface QueryFiltersPairOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string key = 1; + /** + * required string key = 1; + */ + boolean hasKey(); + /** + * required string key = 1; + */ + java.lang.String getKey(); + /** + * required string key = 1; + */ + com.google.protobuf.ByteString + getKeyBytes(); + + // required string value = 2; + /** + * required string value = 2; + */ + boolean hasValue(); + /** + * required string value = 2; + */ + java.lang.String getValue(); + /** + * required string value = 2; + */ + com.google.protobuf.ByteString + getValueBytes(); + } + /** + * Protobuf type {@code HSearchGenericFilterMessage.QueryFiltersPair} + */ + public static final class QueryFiltersPair extends + com.google.protobuf.GeneratedMessage + implements QueryFiltersPairOrBuilder { + // Use QueryFiltersPair.newBuilder() to construct. + private QueryFiltersPair(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private QueryFiltersPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final QueryFiltersPair defaultInstance; + public static QueryFiltersPair getDefaultInstance() { + return defaultInstance; + } + + public QueryFiltersPair getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private QueryFiltersPair( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + key_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_QueryFiltersPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_QueryFiltersPair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public QueryFiltersPair parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new QueryFiltersPair(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string key = 1; + public static final int KEY_FIELD_NUMBER = 1; + private java.lang.Object key_; + /** + * required string key = 1; + */ + public boolean hasKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string key = 1; + */ + public java.lang.String getKey() { + java.lang.Object ref = key_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + key_ = s; + } + return s; + } + } + /** + * required string key = 1; + */ + public com.google.protobuf.ByteString + getKeyBytes() { + java.lang.Object ref = key_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + key_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private java.lang.Object value_; + /** + * required string value = 2; + */ + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string value = 2; + */ + public java.lang.String getValue() { + java.lang.Object ref = value_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + value_ = s; + } + return s; + } + } + /** + * required string value = 2; + */ + public com.google.protobuf.ByteString + getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + key_ = ""; + value_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasKey()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getKeyBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getValueBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getKeyBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getValueBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair) obj; + + boolean result = true; + result = result && (hasKey() == other.hasKey()); + if (hasKey()) { + result = result && getKey() + .equals(other.getKey()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasKey()) { + hash = (37 * hash) + KEY_FIELD_NUMBER; + hash = (53 * hash) + getKey().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code HSearchGenericFilterMessage.QueryFiltersPair} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPairOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_QueryFiltersPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_QueryFiltersPair_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + key_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_QueryFiltersPair_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.key_ = key_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.getDefaultInstance()) return this; + if (other.hasKey()) { + bitField0_ |= 0x00000001; + key_ = other.key_; + onChanged(); + } + if (other.hasValue()) { + bitField0_ |= 0x00000002; + value_ = other.value_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasKey()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string key = 1; + private java.lang.Object key_ = ""; + /** + * required string key = 1; + */ + public boolean hasKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string key = 1; + */ + public java.lang.String getKey() { + java.lang.Object ref = key_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + key_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string key = 1; + */ + public com.google.protobuf.ByteString + getKeyBytes() { + java.lang.Object ref = key_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + key_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string key = 1; + */ + public Builder setKey( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + key_ = value; + onChanged(); + return this; + } + /** + * required string key = 1; + */ + public Builder clearKey() { + bitField0_ = (bitField0_ & ~0x00000001); + key_ = getDefaultInstance().getKey(); + onChanged(); + return this; + } + /** + * required string key = 1; + */ + public Builder setKeyBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + key_ = value; + onChanged(); + return this; + } + + // required string value = 2; + private java.lang.Object value_ = ""; + /** + * required string value = 2; + */ + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string value = 2; + */ + public java.lang.String getValue() { + java.lang.Object ref = value_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + value_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string value = 2; + */ + public com.google.protobuf.ByteString + getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string value = 2; + */ + public Builder setValue( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + /** + * required string value = 2; + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + /** + * required string value = 2; + */ + public Builder setValueBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:HSearchGenericFilterMessage.QueryFiltersPair) + } + + static { + defaultInstance = new QueryFiltersPair(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:HSearchGenericFilterMessage.QueryFiltersPair) + } + + private int bitField0_; + // required string filterClassName = 1; + public static final int FILTERCLASSNAME_FIELD_NUMBER = 1; + private java.lang.Object filterClassName_; + /** + * required string filterClassName = 1; + */ + public boolean hasFilterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string filterClassName = 1; + */ + public java.lang.String getFilterClassName() { + java.lang.Object ref = filterClassName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + filterClassName_ = s; + } + return s; + } + } + /** + * required string filterClassName = 1; + */ + public com.google.protobuf.ByteString + getFilterClassNameBytes() { + java.lang.Object ref = filterClassName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + filterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string inputMapperInstructions = 2; + public static final int INPUTMAPPERINSTRUCTIONS_FIELD_NUMBER = 2; + private java.lang.Object inputMapperInstructions_; + /** + * required string inputMapperInstructions = 2; + */ + public boolean hasInputMapperInstructions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string inputMapperInstructions = 2; + */ + public java.lang.String getInputMapperInstructions() { + java.lang.Object ref = inputMapperInstructions_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + inputMapperInstructions_ = s; + } + return s; + } + } + /** + * required string inputMapperInstructions = 2; + */ + public com.google.protobuf.ByteString + getInputMapperInstructionsBytes() { + java.lang.Object ref = inputMapperInstructions_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + inputMapperInstructions_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string multiQuery = 3; + public static final int MULTIQUERY_FIELD_NUMBER = 3; + private java.lang.Object multiQuery_; + /** + * required string multiQuery = 3; + */ + public boolean hasMultiQuery() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required string multiQuery = 3; + */ + public java.lang.String getMultiQuery() { + java.lang.Object ref = multiQuery_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + multiQuery_ = s; + } + return s; + } + } + /** + * required string multiQuery = 3; + */ + public com.google.protobuf.ByteString + getMultiQueryBytes() { + java.lang.Object ref = multiQuery_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + multiQuery_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes inputRowsToIncludeB = 4; + public static final int INPUTROWSTOINCLUDEB_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString inputRowsToIncludeB_; + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public boolean hasInputRowsToIncludeB() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public com.google.protobuf.ByteString getInputRowsToIncludeB() { + return inputRowsToIncludeB_; + } + + // repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + public static final int QUERYFILTERS_FIELD_NUMBER = 5; + private java.util.List queryFilters_; + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public java.util.List getQueryFiltersList() { + return queryFilters_; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public java.util.List + getQueryFiltersOrBuilderList() { + return queryFilters_; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public int getQueryFiltersCount() { + return queryFilters_.size(); + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair getQueryFilters(int index) { + return queryFilters_.get(index); + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPairOrBuilder getQueryFiltersOrBuilder( + int index) { + return queryFilters_.get(index); + } + + private void initFields() { + filterClassName_ = ""; + inputMapperInstructions_ = ""; + multiQuery_ = ""; + inputRowsToIncludeB_ = com.google.protobuf.ByteString.EMPTY; + queryFilters_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFilterClassName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasInputMapperInstructions()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMultiQuery()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getQueryFiltersCount(); i++) { + if (!getQueryFilters(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getFilterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getInputMapperInstructionsBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getMultiQueryBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, inputRowsToIncludeB_); + } + for (int i = 0; i < queryFilters_.size(); i++) { + output.writeMessage(5, queryFilters_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getFilterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getInputMapperInstructionsBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMultiQueryBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, inputRowsToIncludeB_); + } + for (int i = 0; i < queryFilters_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, queryFilters_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage) obj; + + boolean result = true; + result = result && (hasFilterClassName() == other.hasFilterClassName()); + if (hasFilterClassName()) { + result = result && getFilterClassName() + .equals(other.getFilterClassName()); + } + result = result && (hasInputMapperInstructions() == other.hasInputMapperInstructions()); + if (hasInputMapperInstructions()) { + result = result && getInputMapperInstructions() + .equals(other.getInputMapperInstructions()); + } + result = result && (hasMultiQuery() == other.hasMultiQuery()); + if (hasMultiQuery()) { + result = result && getMultiQuery() + .equals(other.getMultiQuery()); + } + result = result && (hasInputRowsToIncludeB() == other.hasInputRowsToIncludeB()); + if (hasInputRowsToIncludeB()) { + result = result && getInputRowsToIncludeB() + .equals(other.getInputRowsToIncludeB()); + } + result = result && getQueryFiltersList() + .equals(other.getQueryFiltersList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFilterClassName()) { + hash = (37 * hash) + FILTERCLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getFilterClassName().hashCode(); + } + if (hasInputMapperInstructions()) { + hash = (37 * hash) + INPUTMAPPERINSTRUCTIONS_FIELD_NUMBER; + hash = (53 * hash) + getInputMapperInstructions().hashCode(); + } + if (hasMultiQuery()) { + hash = (37 * hash) + MULTIQUERY_FIELD_NUMBER; + hash = (53 * hash) + getMultiQuery().hashCode(); + } + if (hasInputRowsToIncludeB()) { + hash = (37 * hash) + INPUTROWSTOINCLUDEB_FIELD_NUMBER; + hash = (53 * hash) + getInputRowsToIncludeB().hashCode(); + } + if (getQueryFiltersCount() > 0) { + hash = (37 * hash) + QUERYFILTERS_FIELD_NUMBER; + hash = (53 * hash) + getQueryFiltersList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code HSearchGenericFilterMessage} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getQueryFiltersFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + filterClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + inputMapperInstructions_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + multiQuery_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + inputRowsToIncludeB_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + if (queryFiltersBuilder_ == null) { + queryFilters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + } else { + queryFiltersBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchGenericFilterMessage_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.filterClassName_ = filterClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.inputMapperInstructions_ = inputMapperInstructions_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.multiQuery_ = multiQuery_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.inputRowsToIncludeB_ = inputRowsToIncludeB_; + if (queryFiltersBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { + queryFilters_ = java.util.Collections.unmodifiableList(queryFilters_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.queryFilters_ = queryFilters_; + } else { + result.queryFilters_ = queryFiltersBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.getDefaultInstance()) return this; + if (other.hasFilterClassName()) { + bitField0_ |= 0x00000001; + filterClassName_ = other.filterClassName_; + onChanged(); + } + if (other.hasInputMapperInstructions()) { + bitField0_ |= 0x00000002; + inputMapperInstructions_ = other.inputMapperInstructions_; + onChanged(); + } + if (other.hasMultiQuery()) { + bitField0_ |= 0x00000004; + multiQuery_ = other.multiQuery_; + onChanged(); + } + if (other.hasInputRowsToIncludeB()) { + setInputRowsToIncludeB(other.getInputRowsToIncludeB()); + } + if (queryFiltersBuilder_ == null) { + if (!other.queryFilters_.isEmpty()) { + if (queryFilters_.isEmpty()) { + queryFilters_ = other.queryFilters_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureQueryFiltersIsMutable(); + queryFilters_.addAll(other.queryFilters_); + } + onChanged(); + } + } else { + if (!other.queryFilters_.isEmpty()) { + if (queryFiltersBuilder_.isEmpty()) { + queryFiltersBuilder_.dispose(); + queryFiltersBuilder_ = null; + queryFilters_ = other.queryFilters_; + bitField0_ = (bitField0_ & ~0x00000010); + queryFiltersBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getQueryFiltersFieldBuilder() : null; + } else { + queryFiltersBuilder_.addAllMessages(other.queryFilters_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFilterClassName()) { + + return false; + } + if (!hasInputMapperInstructions()) { + + return false; + } + if (!hasMultiQuery()) { + + return false; + } + for (int i = 0; i < getQueryFiltersCount(); i++) { + if (!getQueryFilters(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string filterClassName = 1; + private java.lang.Object filterClassName_ = ""; + /** + * required string filterClassName = 1; + */ + public boolean hasFilterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string filterClassName = 1; + */ + public java.lang.String getFilterClassName() { + java.lang.Object ref = filterClassName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + filterClassName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string filterClassName = 1; + */ + public com.google.protobuf.ByteString + getFilterClassNameBytes() { + java.lang.Object ref = filterClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + filterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string filterClassName = 1; + */ + public Builder setFilterClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + filterClassName_ = value; + onChanged(); + return this; + } + /** + * required string filterClassName = 1; + */ + public Builder clearFilterClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + filterClassName_ = getDefaultInstance().getFilterClassName(); + onChanged(); + return this; + } + /** + * required string filterClassName = 1; + */ + public Builder setFilterClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + filterClassName_ = value; + onChanged(); + return this; + } + + // required string inputMapperInstructions = 2; + private java.lang.Object inputMapperInstructions_ = ""; + /** + * required string inputMapperInstructions = 2; + */ + public boolean hasInputMapperInstructions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string inputMapperInstructions = 2; + */ + public java.lang.String getInputMapperInstructions() { + java.lang.Object ref = inputMapperInstructions_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + inputMapperInstructions_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string inputMapperInstructions = 2; + */ + public com.google.protobuf.ByteString + getInputMapperInstructionsBytes() { + java.lang.Object ref = inputMapperInstructions_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + inputMapperInstructions_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string inputMapperInstructions = 2; + */ + public Builder setInputMapperInstructions( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + inputMapperInstructions_ = value; + onChanged(); + return this; + } + /** + * required string inputMapperInstructions = 2; + */ + public Builder clearInputMapperInstructions() { + bitField0_ = (bitField0_ & ~0x00000002); + inputMapperInstructions_ = getDefaultInstance().getInputMapperInstructions(); + onChanged(); + return this; + } + /** + * required string inputMapperInstructions = 2; + */ + public Builder setInputMapperInstructionsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + inputMapperInstructions_ = value; + onChanged(); + return this; + } + + // required string multiQuery = 3; + private java.lang.Object multiQuery_ = ""; + /** + * required string multiQuery = 3; + */ + public boolean hasMultiQuery() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required string multiQuery = 3; + */ + public java.lang.String getMultiQuery() { + java.lang.Object ref = multiQuery_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + multiQuery_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string multiQuery = 3; + */ + public com.google.protobuf.ByteString + getMultiQueryBytes() { + java.lang.Object ref = multiQuery_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + multiQuery_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string multiQuery = 3; + */ + public Builder setMultiQuery( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + multiQuery_ = value; + onChanged(); + return this; + } + /** + * required string multiQuery = 3; + */ + public Builder clearMultiQuery() { + bitField0_ = (bitField0_ & ~0x00000004); + multiQuery_ = getDefaultInstance().getMultiQuery(); + onChanged(); + return this; + } + /** + * required string multiQuery = 3; + */ + public Builder setMultiQueryBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + multiQuery_ = value; + onChanged(); + return this; + } + + // optional bytes inputRowsToIncludeB = 4; + private com.google.protobuf.ByteString inputRowsToIncludeB_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public boolean hasInputRowsToIncludeB() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public com.google.protobuf.ByteString getInputRowsToIncludeB() { + return inputRowsToIncludeB_; + } + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public Builder setInputRowsToIncludeB(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + inputRowsToIncludeB_ = value; + onChanged(); + return this; + } + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public Builder clearInputRowsToIncludeB() { + bitField0_ = (bitField0_ & ~0x00000008); + inputRowsToIncludeB_ = getDefaultInstance().getInputRowsToIncludeB(); + onChanged(); + return this; + } + + // repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + private java.util.List queryFilters_ = + java.util.Collections.emptyList(); + private void ensureQueryFiltersIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + queryFilters_ = new java.util.ArrayList(queryFilters_); + bitField0_ |= 0x00000010; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPairOrBuilder> queryFiltersBuilder_; + + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public java.util.List getQueryFiltersList() { + if (queryFiltersBuilder_ == null) { + return java.util.Collections.unmodifiableList(queryFilters_); + } else { + return queryFiltersBuilder_.getMessageList(); + } + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public int getQueryFiltersCount() { + if (queryFiltersBuilder_ == null) { + return queryFilters_.size(); + } else { + return queryFiltersBuilder_.getCount(); + } + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair getQueryFilters(int index) { + if (queryFiltersBuilder_ == null) { + return queryFilters_.get(index); + } else { + return queryFiltersBuilder_.getMessage(index); + } + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder setQueryFilters( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair value) { + if (queryFiltersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQueryFiltersIsMutable(); + queryFilters_.set(index, value); + onChanged(); + } else { + queryFiltersBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder setQueryFilters( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder builderForValue) { + if (queryFiltersBuilder_ == null) { + ensureQueryFiltersIsMutable(); + queryFilters_.set(index, builderForValue.build()); + onChanged(); + } else { + queryFiltersBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder addQueryFilters(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair value) { + if (queryFiltersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQueryFiltersIsMutable(); + queryFilters_.add(value); + onChanged(); + } else { + queryFiltersBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder addQueryFilters( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair value) { + if (queryFiltersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQueryFiltersIsMutable(); + queryFilters_.add(index, value); + onChanged(); + } else { + queryFiltersBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder addQueryFilters( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder builderForValue) { + if (queryFiltersBuilder_ == null) { + ensureQueryFiltersIsMutable(); + queryFilters_.add(builderForValue.build()); + onChanged(); + } else { + queryFiltersBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder addQueryFilters( + int index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder builderForValue) { + if (queryFiltersBuilder_ == null) { + ensureQueryFiltersIsMutable(); + queryFilters_.add(index, builderForValue.build()); + onChanged(); + } else { + queryFiltersBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder addAllQueryFilters( + java.lang.Iterable values) { + if (queryFiltersBuilder_ == null) { + ensureQueryFiltersIsMutable(); + super.addAll(values, queryFilters_); + onChanged(); + } else { + queryFiltersBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder clearQueryFilters() { + if (queryFiltersBuilder_ == null) { + queryFilters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + } else { + queryFiltersBuilder_.clear(); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public Builder removeQueryFilters(int index) { + if (queryFiltersBuilder_ == null) { + ensureQueryFiltersIsMutable(); + queryFilters_.remove(index); + onChanged(); + } else { + queryFiltersBuilder_.remove(index); + } + return this; + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder getQueryFiltersBuilder( + int index) { + return getQueryFiltersFieldBuilder().getBuilder(index); + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPairOrBuilder getQueryFiltersOrBuilder( + int index) { + if (queryFiltersBuilder_ == null) { + return queryFilters_.get(index); } else { + return queryFiltersBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public java.util.List + getQueryFiltersOrBuilderList() { + if (queryFiltersBuilder_ != null) { + return queryFiltersBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(queryFilters_); + } + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder addQueryFiltersBuilder() { + return getQueryFiltersFieldBuilder().addBuilder( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.getDefaultInstance()); + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder addQueryFiltersBuilder( + int index) { + return getQueryFiltersFieldBuilder().addBuilder( + index, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.getDefaultInstance()); + } + /** + * repeated .HSearchGenericFilterMessage.QueryFiltersPair queryFilters = 5; + */ + public java.util.List + getQueryFiltersBuilderList() { + return getQueryFiltersFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPairOrBuilder> + getQueryFiltersFieldBuilder() { + if (queryFiltersBuilder_ == null) { + queryFiltersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair.Builder, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPairOrBuilder>( + queryFilters_, + ((bitField0_ & 0x00000010) == 0x00000010), + getParentForChildren(), + isClean()); + queryFilters_ = null; + } + return queryFiltersBuilder_; + } + + // @@protoc_insertion_point(builder_scope:HSearchGenericFilterMessage) + } + + static { + defaultInstance = new HSearchGenericFilterMessage(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:HSearchGenericFilterMessage) + } + + public interface HSearchBytesFilterMessageOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string filterClassName = 1; + /** + * required string filterClassName = 1; + */ + boolean hasFilterClassName(); + /** + * required string filterClassName = 1; + */ + java.lang.String getFilterClassName(); + /** + * required string filterClassName = 1; + */ + com.google.protobuf.ByteString + getFilterClassNameBytes(); + + // optional bytes state = 2; + /** + * optional bytes state = 2; + */ + boolean hasState(); + /** + * optional bytes state = 2; + */ + com.google.protobuf.ByteString getState(); + } + /** + * Protobuf type {@code HSearchBytesFilterMessage} + */ + public static final class HSearchBytesFilterMessage extends + com.google.protobuf.GeneratedMessage + implements HSearchBytesFilterMessageOrBuilder { + // Use HSearchBytesFilterMessage.newBuilder() to construct. + private HSearchBytesFilterMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private HSearchBytesFilterMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final HSearchBytesFilterMessage defaultInstance; + public static HSearchBytesFilterMessage getDefaultInstance() { + return defaultInstance; + } + + public HSearchBytesFilterMessage getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private HSearchBytesFilterMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + filterClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + state_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchBytesFilterMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchBytesFilterMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public HSearchBytesFilterMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new HSearchBytesFilterMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string filterClassName = 1; + public static final int FILTERCLASSNAME_FIELD_NUMBER = 1; + private java.lang.Object filterClassName_; + /** + * required string filterClassName = 1; + */ + public boolean hasFilterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string filterClassName = 1; + */ + public java.lang.String getFilterClassName() { + java.lang.Object ref = filterClassName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + filterClassName_ = s; + } + return s; + } + } + /** + * required string filterClassName = 1; + */ + public com.google.protobuf.ByteString + getFilterClassNameBytes() { + java.lang.Object ref = filterClassName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + filterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes state = 2; + public static final int STATE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString state_; + /** + * optional bytes state = 2; + */ + public boolean hasState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes state = 2; + */ + public com.google.protobuf.ByteString getState() { + return state_; + } + + private void initFields() { + filterClassName_ = ""; + state_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFilterClassName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getFilterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, state_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getFilterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, state_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage) obj; + + boolean result = true; + result = result && (hasFilterClassName() == other.hasFilterClassName()); + if (hasFilterClassName()) { + result = result && getFilterClassName() + .equals(other.getFilterClassName()); + } + result = result && (hasState() == other.hasState()); + if (hasState()) { + result = result && getState() + .equals(other.getState()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFilterClassName()) { + hash = (37 * hash) + FILTERCLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getFilterClassName().hashCode(); + } + if (hasState()) { + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + getState().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code HSearchBytesFilterMessage} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchBytesFilterMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchBytesFilterMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + filterClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + state_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchBytesFilterMessage_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.filterClassName_ = filterClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.state_ = state_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage.getDefaultInstance()) return this; + if (other.hasFilterClassName()) { + bitField0_ |= 0x00000001; + filterClassName_ = other.filterClassName_; + onChanged(); + } + if (other.hasState()) { + setState(other.getState()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFilterClassName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string filterClassName = 1; + private java.lang.Object filterClassName_ = ""; + /** + * required string filterClassName = 1; + */ + public boolean hasFilterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string filterClassName = 1; + */ + public java.lang.String getFilterClassName() { + java.lang.Object ref = filterClassName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + filterClassName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string filterClassName = 1; + */ + public com.google.protobuf.ByteString + getFilterClassNameBytes() { + java.lang.Object ref = filterClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + filterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string filterClassName = 1; + */ + public Builder setFilterClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + filterClassName_ = value; + onChanged(); + return this; + } + /** + * required string filterClassName = 1; + */ + public Builder clearFilterClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + filterClassName_ = getDefaultInstance().getFilterClassName(); + onChanged(); + return this; + } + /** + * required string filterClassName = 1; + */ + public Builder setFilterClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + filterClassName_ = value; + onChanged(); + return this; + } + + // optional bytes state = 2; + private com.google.protobuf.ByteString state_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes state = 2; + */ + public boolean hasState() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes state = 2; + */ + public com.google.protobuf.ByteString getState() { + return state_; + } + /** + * optional bytes state = 2; + */ + public Builder setState(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + state_ = value; + onChanged(); + return this; + } + /** + * optional bytes state = 2; + */ + public Builder clearState() { + bitField0_ = (bitField0_ & ~0x00000002); + state_ = getDefaultInstance().getState(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:HSearchBytesFilterMessage) + } + + static { + defaultInstance = new HSearchBytesFilterMessage(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:HSearchBytesFilterMessage) + } + + public interface HSearchScalarFilterMessageOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string filterClassName = 1; + /** + * required string filterClassName = 1; + */ + boolean hasFilterClassName(); + /** + * required string filterClassName = 1; + */ + java.lang.String getFilterClassName(); + /** + * required string filterClassName = 1; + */ + com.google.protobuf.ByteString + getFilterClassNameBytes(); + + // required string inputMapperInstructions = 2; + /** + * required string inputMapperInstructions = 2; + */ + boolean hasInputMapperInstructions(); + /** + * required string inputMapperInstructions = 2; + */ + java.lang.String getInputMapperInstructions(); + /** + * required string inputMapperInstructions = 2; + */ + com.google.protobuf.ByteString + getInputMapperInstructionsBytes(); + + // optional string multiQuery = 3; + /** + * optional string multiQuery = 3; + */ + boolean hasMultiQuery(); + /** + * optional string multiQuery = 3; + */ + java.lang.String getMultiQuery(); + /** + * optional string multiQuery = 3; + */ + com.google.protobuf.ByteString + getMultiQueryBytes(); + + // optional bytes inputRowsToIncludeB = 4; + /** + * optional bytes inputRowsToIncludeB = 4; + */ + boolean hasInputRowsToIncludeB(); + /** + * optional bytes inputRowsToIncludeB = 4; + */ + com.google.protobuf.ByteString getInputRowsToIncludeB(); + + // optional bytes matchingIds = 5; + /** + * optional bytes matchingIds = 5; + */ + boolean hasMatchingIds(); + /** + * optional bytes matchingIds = 5; + */ + com.google.protobuf.ByteString getMatchingIds(); + } + /** + * Protobuf type {@code HSearchScalarFilterMessage} + */ + public static final class HSearchScalarFilterMessage extends + com.google.protobuf.GeneratedMessage + implements HSearchScalarFilterMessageOrBuilder { + // Use HSearchScalarFilterMessage.newBuilder() to construct. + private HSearchScalarFilterMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private HSearchScalarFilterMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final HSearchScalarFilterMessage defaultInstance; + public static HSearchScalarFilterMessage getDefaultInstance() { + return defaultInstance; + } + + public HSearchScalarFilterMessage getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private HSearchScalarFilterMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + filterClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + inputMapperInstructions_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + multiQuery_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + inputRowsToIncludeB_ = input.readBytes(); + break; + } + case 42: { + bitField0_ |= 0x00000010; + matchingIds_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchScalarFilterMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchScalarFilterMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public HSearchScalarFilterMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new HSearchScalarFilterMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string filterClassName = 1; + public static final int FILTERCLASSNAME_FIELD_NUMBER = 1; + private java.lang.Object filterClassName_; + /** + * required string filterClassName = 1; + */ + public boolean hasFilterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string filterClassName = 1; + */ + public java.lang.String getFilterClassName() { + java.lang.Object ref = filterClassName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + filterClassName_ = s; + } + return s; + } + } + /** + * required string filterClassName = 1; + */ + public com.google.protobuf.ByteString + getFilterClassNameBytes() { + java.lang.Object ref = filterClassName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + filterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string inputMapperInstructions = 2; + public static final int INPUTMAPPERINSTRUCTIONS_FIELD_NUMBER = 2; + private java.lang.Object inputMapperInstructions_; + /** + * required string inputMapperInstructions = 2; + */ + public boolean hasInputMapperInstructions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string inputMapperInstructions = 2; + */ + public java.lang.String getInputMapperInstructions() { + java.lang.Object ref = inputMapperInstructions_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + inputMapperInstructions_ = s; + } + return s; + } + } + /** + * required string inputMapperInstructions = 2; + */ + public com.google.protobuf.ByteString + getInputMapperInstructionsBytes() { + java.lang.Object ref = inputMapperInstructions_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + inputMapperInstructions_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string multiQuery = 3; + public static final int MULTIQUERY_FIELD_NUMBER = 3; + private java.lang.Object multiQuery_; + /** + * optional string multiQuery = 3; + */ + public boolean hasMultiQuery() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string multiQuery = 3; + */ + public java.lang.String getMultiQuery() { + java.lang.Object ref = multiQuery_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + multiQuery_ = s; + } + return s; + } + } + /** + * optional string multiQuery = 3; + */ + public com.google.protobuf.ByteString + getMultiQueryBytes() { + java.lang.Object ref = multiQuery_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + multiQuery_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes inputRowsToIncludeB = 4; + public static final int INPUTROWSTOINCLUDEB_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString inputRowsToIncludeB_; + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public boolean hasInputRowsToIncludeB() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public com.google.protobuf.ByteString getInputRowsToIncludeB() { + return inputRowsToIncludeB_; + } + + // optional bytes matchingIds = 5; + public static final int MATCHINGIDS_FIELD_NUMBER = 5; + private com.google.protobuf.ByteString matchingIds_; + /** + * optional bytes matchingIds = 5; + */ + public boolean hasMatchingIds() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional bytes matchingIds = 5; + */ + public com.google.protobuf.ByteString getMatchingIds() { + return matchingIds_; + } + + private void initFields() { + filterClassName_ = ""; + inputMapperInstructions_ = ""; + multiQuery_ = ""; + inputRowsToIncludeB_ = com.google.protobuf.ByteString.EMPTY; + matchingIds_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFilterClassName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasInputMapperInstructions()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getFilterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getInputMapperInstructionsBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getMultiQueryBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, inputRowsToIncludeB_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBytes(5, matchingIds_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getFilterClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getInputMapperInstructionsBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMultiQueryBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, inputRowsToIncludeB_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(5, matchingIds_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage)) { + return super.equals(obj); + } + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage other = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage) obj; + + boolean result = true; + result = result && (hasFilterClassName() == other.hasFilterClassName()); + if (hasFilterClassName()) { + result = result && getFilterClassName() + .equals(other.getFilterClassName()); + } + result = result && (hasInputMapperInstructions() == other.hasInputMapperInstructions()); + if (hasInputMapperInstructions()) { + result = result && getInputMapperInstructions() + .equals(other.getInputMapperInstructions()); + } + result = result && (hasMultiQuery() == other.hasMultiQuery()); + if (hasMultiQuery()) { + result = result && getMultiQuery() + .equals(other.getMultiQuery()); + } + result = result && (hasInputRowsToIncludeB() == other.hasInputRowsToIncludeB()); + if (hasInputRowsToIncludeB()) { + result = result && getInputRowsToIncludeB() + .equals(other.getInputRowsToIncludeB()); + } + result = result && (hasMatchingIds() == other.hasMatchingIds()); + if (hasMatchingIds()) { + result = result && getMatchingIds() + .equals(other.getMatchingIds()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFilterClassName()) { + hash = (37 * hash) + FILTERCLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getFilterClassName().hashCode(); + } + if (hasInputMapperInstructions()) { + hash = (37 * hash) + INPUTMAPPERINSTRUCTIONS_FIELD_NUMBER; + hash = (53 * hash) + getInputMapperInstructions().hashCode(); + } + if (hasMultiQuery()) { + hash = (37 * hash) + MULTIQUERY_FIELD_NUMBER; + hash = (53 * hash) + getMultiQuery().hashCode(); + } + if (hasInputRowsToIncludeB()) { + hash = (37 * hash) + INPUTROWSTOINCLUDEB_FIELD_NUMBER; + hash = (53 * hash) + getInputRowsToIncludeB().hashCode(); + } + if (hasMatchingIds()) { + hash = (37 * hash) + MATCHINGIDS_FIELD_NUMBER; + hash = (53 * hash) + getMatchingIds().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code HSearchScalarFilterMessage} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchScalarFilterMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchScalarFilterMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.class, com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.Builder.class); + } + + // Construct using com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + filterClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + inputMapperInstructions_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + multiQuery_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + inputRowsToIncludeB_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + matchingIds_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.internal_static_HSearchScalarFilterMessage_descriptor; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage getDefaultInstanceForType() { + return com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.getDefaultInstance(); + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage build() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage buildPartial() { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage result = new com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.filterClassName_ = filterClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.inputMapperInstructions_ = inputMapperInstructions_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.multiQuery_ = multiQuery_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.inputRowsToIncludeB_ = inputRowsToIncludeB_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.matchingIds_ = matchingIds_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage) { + return mergeFrom((com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage other) { + if (other == com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage.getDefaultInstance()) return this; + if (other.hasFilterClassName()) { + bitField0_ |= 0x00000001; + filterClassName_ = other.filterClassName_; + onChanged(); + } + if (other.hasInputMapperInstructions()) { + bitField0_ |= 0x00000002; + inputMapperInstructions_ = other.inputMapperInstructions_; + onChanged(); + } + if (other.hasMultiQuery()) { + bitField0_ |= 0x00000004; + multiQuery_ = other.multiQuery_; + onChanged(); + } + if (other.hasInputRowsToIncludeB()) { + setInputRowsToIncludeB(other.getInputRowsToIncludeB()); + } + if (other.hasMatchingIds()) { + setMatchingIds(other.getMatchingIds()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFilterClassName()) { + + return false; + } + if (!hasInputMapperInstructions()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string filterClassName = 1; + private java.lang.Object filterClassName_ = ""; + /** + * required string filterClassName = 1; + */ + public boolean hasFilterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string filterClassName = 1; + */ + public java.lang.String getFilterClassName() { + java.lang.Object ref = filterClassName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + filterClassName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string filterClassName = 1; + */ + public com.google.protobuf.ByteString + getFilterClassNameBytes() { + java.lang.Object ref = filterClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + filterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string filterClassName = 1; + */ + public Builder setFilterClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + filterClassName_ = value; + onChanged(); + return this; + } + /** + * required string filterClassName = 1; + */ + public Builder clearFilterClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + filterClassName_ = getDefaultInstance().getFilterClassName(); + onChanged(); + return this; + } + /** + * required string filterClassName = 1; + */ + public Builder setFilterClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + filterClassName_ = value; + onChanged(); + return this; + } + + // required string inputMapperInstructions = 2; + private java.lang.Object inputMapperInstructions_ = ""; + /** + * required string inputMapperInstructions = 2; + */ + public boolean hasInputMapperInstructions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string inputMapperInstructions = 2; + */ + public java.lang.String getInputMapperInstructions() { + java.lang.Object ref = inputMapperInstructions_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + inputMapperInstructions_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string inputMapperInstructions = 2; + */ + public com.google.protobuf.ByteString + getInputMapperInstructionsBytes() { + java.lang.Object ref = inputMapperInstructions_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + inputMapperInstructions_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string inputMapperInstructions = 2; + */ + public Builder setInputMapperInstructions( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + inputMapperInstructions_ = value; + onChanged(); + return this; + } + /** + * required string inputMapperInstructions = 2; + */ + public Builder clearInputMapperInstructions() { + bitField0_ = (bitField0_ & ~0x00000002); + inputMapperInstructions_ = getDefaultInstance().getInputMapperInstructions(); + onChanged(); + return this; + } + /** + * required string inputMapperInstructions = 2; + */ + public Builder setInputMapperInstructionsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + inputMapperInstructions_ = value; + onChanged(); + return this; + } + + // optional string multiQuery = 3; + private java.lang.Object multiQuery_ = ""; + /** + * optional string multiQuery = 3; + */ + public boolean hasMultiQuery() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string multiQuery = 3; + */ + public java.lang.String getMultiQuery() { + java.lang.Object ref = multiQuery_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + multiQuery_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string multiQuery = 3; + */ + public com.google.protobuf.ByteString + getMultiQueryBytes() { + java.lang.Object ref = multiQuery_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + multiQuery_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string multiQuery = 3; + */ + public Builder setMultiQuery( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + multiQuery_ = value; + onChanged(); + return this; + } + /** + * optional string multiQuery = 3; + */ + public Builder clearMultiQuery() { + bitField0_ = (bitField0_ & ~0x00000004); + multiQuery_ = getDefaultInstance().getMultiQuery(); + onChanged(); + return this; + } + /** + * optional string multiQuery = 3; + */ + public Builder setMultiQueryBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + multiQuery_ = value; + onChanged(); + return this; + } + + // optional bytes inputRowsToIncludeB = 4; + private com.google.protobuf.ByteString inputRowsToIncludeB_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public boolean hasInputRowsToIncludeB() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public com.google.protobuf.ByteString getInputRowsToIncludeB() { + return inputRowsToIncludeB_; + } + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public Builder setInputRowsToIncludeB(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + inputRowsToIncludeB_ = value; + onChanged(); + return this; + } + /** + * optional bytes inputRowsToIncludeB = 4; + */ + public Builder clearInputRowsToIncludeB() { + bitField0_ = (bitField0_ & ~0x00000008); + inputRowsToIncludeB_ = getDefaultInstance().getInputRowsToIncludeB(); + onChanged(); + return this; + } + + // optional bytes matchingIds = 5; + private com.google.protobuf.ByteString matchingIds_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes matchingIds = 5; + */ + public boolean hasMatchingIds() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional bytes matchingIds = 5; + */ + public com.google.protobuf.ByteString getMatchingIds() { + return matchingIds_; + } + /** + * optional bytes matchingIds = 5; + */ + public Builder setMatchingIds(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + matchingIds_ = value; + onChanged(); + return this; + } + /** + * optional bytes matchingIds = 5; + */ + public Builder clearMatchingIds() { + bitField0_ = (bitField0_ & ~0x00000010); + matchingIds_ = getDefaultInstance().getMatchingIds(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:HSearchScalarFilterMessage) + } + + static { + defaultInstance = new HSearchScalarFilterMessage(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:HSearchScalarFilterMessage) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_HSearchGenericFilterMessage_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_HSearchGenericFilterMessage_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_HSearchGenericFilterMessage_QueryFiltersPair_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_HSearchGenericFilterMessage_QueryFiltersPair_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_HSearchBytesFilterMessage_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_HSearchBytesFilterMessage_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_HSearchScalarFilterMessage_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_HSearchScalarFilterMessage_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\023HSearchFilter.proto\"\375\001\n\033HSearchGeneric" + + "FilterMessage\022\027\n\017filterClassName\030\001 \002(\t\022\037" + + "\n\027inputMapperInstructions\030\002 \002(\t\022\022\n\nmulti" + + "Query\030\003 \002(\t\022\033\n\023inputRowsToIncludeB\030\004 \001(\014" + + "\022C\n\014queryFilters\030\005 \003(\0132-.HSearchGenericF" + + "ilterMessage.QueryFiltersPair\032.\n\020QueryFi" + + "ltersPair\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\"C\n" + + "\031HSearchBytesFilterMessage\022\027\n\017filterClas" + + "sName\030\001 \002(\t\022\r\n\005state\030\002 \001(\014\"\234\001\n\032HSearchSc" + + "alarFilterMessage\022\027\n\017filterClassName\030\001 \002", + "(\t\022\037\n\027inputMapperInstructions\030\002 \002(\t\022\022\n\nm" + + "ultiQuery\030\003 \001(\t\022\033\n\023inputRowsToIncludeB\030\004" + + " \001(\014\022\023\n\013matchingIds\030\005 \001(\014BW\n8com.bizosys" + + ".hsearch.treetable.storage.protobuf.gene" + + "ratedB\023HSearchFilterProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_HSearchGenericFilterMessage_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_HSearchGenericFilterMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_HSearchGenericFilterMessage_descriptor, + new java.lang.String[] { "FilterClassName", "InputMapperInstructions", "MultiQuery", "InputRowsToIncludeB", "QueryFilters", }); + internal_static_HSearchGenericFilterMessage_QueryFiltersPair_descriptor = + internal_static_HSearchGenericFilterMessage_descriptor.getNestedTypes().get(0); + internal_static_HSearchGenericFilterMessage_QueryFiltersPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_HSearchGenericFilterMessage_QueryFiltersPair_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_HSearchBytesFilterMessage_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_HSearchBytesFilterMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_HSearchBytesFilterMessage_descriptor, + new java.lang.String[] { "FilterClassName", "State", }); + internal_static_HSearchScalarFilterMessage_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_HSearchScalarFilterMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_HSearchScalarFilterMessage_descriptor, + new java.lang.String[] { "FilterClassName", "InputMapperInstructions", "MultiQuery", "InputRowsToIncludeB", "MatchingIds", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/src/compatibility/hadooplib_96/storage/CacheStorage.java b/src/compatibility/hadooplib_96/storage/CacheStorage.java new file mode 100644 index 0000000..8bfdf57 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/CacheStorage.java @@ -0,0 +1,57 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.regionserver.BloomType; + +import com.bizosys.hsearch.hbase.HDML; + + +public class CacheStorage { + + public static String TABLE_NAME = "hsearch-cache"; + public static final String CACHE_COLUMN = "c"; + public static final byte[] CACHE_COLUMN_BYTES = CACHE_COLUMN.getBytes(); + private static CacheStorage singleton = null; + + public static CacheStorage getInstance() throws IOException { + + if ( null == singleton ) { + synchronized (CacheStorage.class.getName()) { + if ( null == singleton ) { + singleton = new CacheStorage(); + } + } + } + return singleton; + } + + + private CacheStorage() throws IOException { + + HColumnDescriptor col = new HColumnDescriptor( CACHE_COLUMN.getBytes()) ; + + col.setMinVersions(1); + col.setMaxVersions(1); + col.setKeepDeletedCells(false); + col.setCompressionType(Compression.Algorithm.NONE); + col.setEncodeOnDisk(false); + col.setDataBlockEncoding(DataBlockEncoding.NONE); + col.setInMemory(false); + col.setBlockCacheEnabled(true); + col.setTimeToLive(HConstants.FOREVER); + col.setBloomFilterType(BloomType.NONE); + col.setScope(HConstants.REPLICATION_SCOPE_GLOBAL); + + List colFamilies = new ArrayList(); + colFamilies.add(col); + HDML.create(TABLE_NAME, colFamilies); + + } +} diff --git a/src/compatibility/hadooplib_96/storage/HBaseTableSchemaCreator.java b/src/compatibility/hadooplib_96/storage/HBaseTableSchemaCreator.java new file mode 100644 index 0000000..8109ef5 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HBaseTableSchemaCreator.java @@ -0,0 +1,148 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.log4j.Logger; + +import com.bizosys.hsearch.hbase.HDML; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.conf.Configuration; + +public final class HBaseTableSchemaCreator { + + private static HBaseTableSchemaCreator instance = null; + public static Logger l = Logger.getLogger(HBaseTableSchemaCreator.class.getName()); + + Configuration config = HSearchConfig.getInstance().getConfiguration(); + + public Algorithm compression = Compression.Algorithm.NONE; + public int partitionBlockSize = config.getInt("partition.block.size", 13035596); + public int partitionRepMode = HConstants.REPLICATION_SCOPE_GLOBAL; + public DataBlockEncoding dataBlockEncoding = DataBlockEncoding.NONE; + public BloomType bloomType = BloomType.NONE; + public boolean inMemory = false; + public boolean blockCacheEnabled = config.getBoolean("block.cache.enabled", true);; + + public static final HBaseTableSchemaCreator getInstance() { + if ( null != instance) return instance; + synchronized (HBaseTableSchemaCreator.class) { + if ( null != instance) return instance; + instance = new HBaseTableSchemaCreator(); + } + return instance; + } + + /** + * Default constructor + * + */ + public HBaseTableSchemaCreator(){ + } + + /** + * Checks and Creates all necessary tables required for HSearch index. + */ + public final boolean init(String tableName) { + + try { + + List colFamilies = new ArrayList(); + + HBaseTableSchemaDefn def = HBaseTableSchemaDefn.getInstance(tableName); + + System.out.println("Compression : " + this.compression.getName()); + System.out.println("Partition Block Size : " + this.partitionBlockSize); + System.out.println("Partition Rep Mode : " + this.partitionRepMode); + System.out.println("Partition Block Size : " + this.partitionBlockSize); + System.out.println("Partition Block Encoding : " + this.dataBlockEncoding.name()); + System.out.println("Bloom Type : " + this.bloomType.name()); + System.out.println("In Memory Table: " + this.inMemory); + System.out.println("Block Caching: " + this.blockCacheEnabled); + + for (String familyName : def.columnPartions.keySet()) { + + //Partitioned + List partitionNames = def.columnPartions.get(familyName).getPartitionNames(); + for (String partition : partitionNames) { + HColumnDescriptor rangeCols = new HColumnDescriptor( (familyName + "_" + partition ).getBytes()); + configColumn(rangeCols); + colFamilies.add(rangeCols); + } + + //No Partition + if ( partitionNames.size() == 0 ) { + HColumnDescriptor rangeCols = new HColumnDescriptor( familyName.getBytes()); + configColumn(rangeCols); + colFamilies.add(rangeCols); + } + } + + HDML.create(tableName, colFamilies); + return true; + + } catch (Exception sf) { + sf.printStackTrace(System.err); + l.fatal(sf); + return false; + } + } + + /** + * Compression method to HBase compression code. + * @param methodName + * @return + */ + public static final String resolveCompression(final String methodName) { + String compClazz = Compression.Algorithm.GZ.getName(); + if ("gz".equals(methodName)) { + compClazz = Compression.Algorithm.GZ.getName(); + } else if ("lzo".equals(methodName)) { + compClazz = Compression.Algorithm.LZO.getName(); + } else if ("none".equals(methodName)) { + compClazz = Compression.Algorithm.NONE.getName(); + } + return compClazz; + } + + public final void configColumn(final HColumnDescriptor col) { + col.setMinVersions(1); + col.setMaxVersions(1); + col.setKeepDeletedCells(false); + col.setCompressionType(compression); + col.setEncodeOnDisk(false); + col.setDataBlockEncoding(dataBlockEncoding); + col.setInMemory(inMemory); + col.setBlockCacheEnabled(blockCacheEnabled); + col.setBlocksize(partitionBlockSize); + col.setTimeToLive(HConstants.FOREVER); + col.setBloomFilterType(bloomType); + col.setScope(partitionRepMode); + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_96/storage/HBaseTableSchemaDefn.java b/src/compatibility/hadooplib_96/storage/HBaseTableSchemaDefn.java new file mode 100644 index 0000000..d06ca0e --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HBaseTableSchemaDefn.java @@ -0,0 +1,65 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.util.HashMap; +import java.util.Map; + +import com.bizosys.hsearch.treetable.client.partition.IPartition; + +public final class HBaseTableSchemaDefn { + + private static Map repositories = new HashMap(); + + public static HBaseTableSchemaDefn getInstance(String tableName) { + if ( repositories.containsKey(tableName)) return repositories.get(tableName); + else { + synchronized (HBaseTableSchemaDefn.class.getName()) { + if ( repositories.containsKey(tableName)) return repositories.get(tableName); + repositories.put(tableName, new HBaseTableSchemaDefn(tableName)); + } + } + return repositories.get(tableName); + } + + private HBaseTableSchemaDefn(String tableName) { + this.tableName = tableName; + } + + private String tableName = "htable"; + + //FamilyName_partition is how the column families are created. + public Map columnPartions = new HashMap(); + + public final static char getColumnName() { + return getColumnName(1); + } + + public final static char getColumnName(int token) { + String tokenStr = new Integer(token).toString(); + return tokenStr.charAt(tokenStr.length() - 1); + } + + public String getTableName() { + return this.tableName; + } + + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_96/storage/HSearchBytesCoProcessorProxy.java b/src/compatibility/hadooplib_96/storage/HSearchBytesCoProcessorProxy.java new file mode 100644 index 0000000..07ba877 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchBytesCoProcessorProxy.java @@ -0,0 +1,98 @@ +/* + * Copyright 2010 Bizosys Technologies Limited + * + * Licensed to the Bizosys Technologies Limited (Bizosys) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The Bizosys licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchBytesCoprocessorProxyService; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage; +import com.bizosys.hsearch.util.HSearchLog; +import com.google.protobuf.ByteString; + +public final class HSearchBytesCoProcessorProxy { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + HSearchBytesFilter filter = null; + List family_cols = null; + + public HSearchBytesCoProcessorProxy(final List family_cols , final HSearchBytesFilter filter) throws IOException { + this.filter = filter; + if (null == family_cols) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.family_cols = family_cols; + } + + public final Map execCoprocessorRows(final HTableWrapper table) throws IOException, Throwable { + + BytesRowRequest.Builder requestBuilder = BytesRowRequest.newBuilder(); + for (ColumnFamName familyNqualifier : this.family_cols) { + Column.Builder columnBuilder = Column.newBuilder(); + columnBuilder.setFamily(ByteString.copyFrom(familyNqualifier.family)); + columnBuilder.setQualifier(ByteString.copyFrom(familyNqualifier.name)); + Column column = columnBuilder.build(); + requestBuilder.addFamilyWithQualifier(column); + } + if(null != filter){ + HSearchBytesFilterMessage filterMessage = HSearchBytesFilter.getBytesFilterMessage(filter); + requestBuilder.setFilter(filterMessage); + } + + final BytesRowRequest request = requestBuilder.build(); + + Batch.Call callable = + new Batch.Call() { + + @Override + public final byte[] call(HSearchBytesCoprocessorProxyService instance) throws IOException { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = new BlockingRpcCallback(); + instance.getRows(controller, request, rpcCallback); + RowResponse response = rpcCallback.get(); + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + + ByteString result = response.getResult(); + if (!result.isEmpty()) { + return result.toByteArray(); + } + + return null; + } + }; + + Map output = table.tableInterface.coprocessorService( + HSearchBytesCoprocessorProxyService.class,null,null,callable); + + return output; + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_96/storage/HSearchBytesCoprocessor.java b/src/compatibility/hadooplib_96/storage/HSearchBytesCoprocessor.java new file mode 100644 index 0000000..4c0ec4d --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchBytesCoprocessor.java @@ -0,0 +1,156 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.CoprocessorException; +import org.apache.hadoop.hbase.coprocessor.CoprocessorService; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.BytesRowRequest; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchBytesCoprocessorProxyService; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; +import com.google.protobuf.ByteString; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + +public final class HSearchBytesCoprocessor extends HSearchBytesCoprocessorProxyService +implements CoprocessorService, Coprocessor { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + public boolean scannerBlockCaching = true; + public int scannerBlockCachingLimit = 1; + private RegionCoprocessorEnvironment env = null; + + public HSearchBytesCoprocessor() { + Configuration config = HSearchConfig.getInstance().getConfiguration(); + this.scannerBlockCaching = config.getBoolean("scanner.block.caching", true); + this.scannerBlockCachingLimit = config.getInt("scanner.block.caching.amount", 1); + } + + @Override + public void start(CoprocessorEnvironment env) throws IOException { + if (env instanceof RegionCoprocessorEnvironment) { + this.env = (RegionCoprocessorEnvironment)env; + } else { + throw new CoprocessorException("Must be loaded on a table region!"); + } + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + } + + @Override + public Service getService() { + return this; + } + + + /** + * Get Matching rows + * @param filter + * @return + */ + @Override + public void getRows(RpcController controller, BytesRowRequest request, RpcCallback callBack) { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + + InternalScanner scanner = null; + RowResponse response = null; + HSearchBytesFilter filter = null; + + try { + Scan scan = new Scan(); + scan.setCacheBlocks(scannerBlockCaching); + scan.setCaching(scannerBlockCachingLimit); + scan.setMaxVersions(1); + + List familyWithQualifierL = request.getFamilyWithQualifierList(); + for (Column column : familyWithQualifierL) { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + + " @ adding family " + column.getFamily().toString() + "_" + column.getQualifier().toString()); + scan.addColumn(column.getFamily().toByteArray(), column.getQualifier().toByteArray()); + } + + HSearchBytesFilterMessage filterMessage = request.getFilter(); + filter = HSearchBytesFilter.getBytesFilter(filterMessage); + + if ( null != filter) { + FilterList filterL = filter.getFilters(); + if ( null != filterL) scan = scan.setFilter(filterL); + else scan = scan.setFilter(filter); + } + + RegionCoprocessorEnvironment environment = env; + scanner = environment.getRegion().getScanner(scan); + + List curVals = new ArrayList(); + boolean hasMoreRows = false; + do { + hasMoreRows = scanner.next(curVals); + } while (hasMoreRows); + + byte[] data = filter.processRows(); + if (data != null) { + RowResponse.Builder builder = RowResponse.newBuilder(); + builder.setResult(ByteString.copyFrom(data)); + response = builder.build(); + + if(DEBUG_ENABLED) + HSearchLog.l.debug("Row response length from " + env.getRegion().getRegionNameAsString() + ": " + data.length); + } + + callBack.run(response); + + } catch (Exception e) { + + HSearchLog.l.fatal("Error fetching rows in coprocessor : " + e.getLocalizedMessage()); + ResponseConverter.setControllerException(controller, new IOException(e)); + + }finally { + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_96/storage/HSearchBytesCoprocessorI.java b/src/compatibility/hadooplib_96/storage/HSearchBytesCoprocessorI.java new file mode 100644 index 0000000..b089227 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchBytesCoprocessorI.java @@ -0,0 +1,9 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; + +public interface HSearchBytesCoprocessorI { + byte[] getRows(final byte[][] families, final byte[][] cols, + final HSearchBytesFilter filter) throws IOException; + +} diff --git a/src/compatibility/hadooplib_96/storage/HSearchBytesFilter.java b/src/compatibility/hadooplib_96/storage/HSearchBytesFilter.java new file mode 100644 index 0000000..4e34f07 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchBytesFilter.java @@ -0,0 +1,158 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.text.ParseException; +import java.util.List; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.filter.FilterBase; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.PrefixFilter; + +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchBytesFilterMessage; +import com.bizosys.hsearch.util.HSearchLog; +import com.google.protobuf.ByteString; + +/** + * @author abinash + * + */ +public abstract class HSearchBytesFilter extends FilterBase { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + protected byte[] state = null; + + public HSearchBytesFilter(final byte[] state){ + this.state = state; + } + + /** + * @return The filter serialized using pb + * @throws IOException + */ + @Override + public byte [] toByteArray() throws IOException { + + HSearchBytesFilterMessage filterMessage = getBytesFilterMessage(this); + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Sending to HBase : " + filterMessage.toString()); + } + + return filterMessage.toByteArray(); + } + + public static HSearchBytesFilterMessage getBytesFilterMessage(HSearchBytesFilter instance) + throws IOException { + + HSearchBytesFilterMessage.Builder builder = HSearchBytesFilterMessage.newBuilder(); + + builder.setFilterClassName(instance.getClass().getName()); + builder.setState(ByteString.copyFrom(instance.state)); + return builder.build(); + } + + /** + * @param pbBytes A pb serialized {@link PrefixFilter} instance + * @return An instance of {@link PrefixFilter} made from bytes + * @throws org.apache.hadoop.hbase.exceptions.DeserializationException + * + */ + public static HSearchBytesFilter parseFrom(final byte [] pbBytes)throws DeserializationException { + + HSearchBytesFilter bytesFilter = null; + + try { + + int length = null == pbBytes ? 0 : pbBytes.length; + if(0 == length) + throw new IOException("Invalid Query"); + + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Total bytes Received @ HSearchBytesFilter:" + length); + } + + HSearchBytesFilterMessage filterMessage = HSearchBytesFilterMessage.parseFrom(pbBytes); + bytesFilter = getBytesFilter(filterMessage); + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + ex.printStackTrace(); + throw new DeserializationException(ex); + } + + return bytesFilter; + } + + public static HSearchBytesFilter getBytesFilter(HSearchBytesFilterMessage filterMessage) + throws InstantiationException, IllegalAccessException, + ClassNotFoundException, IOException, ParseException { + + String className = filterMessage.getFilterClassName(); + //TODO:Create factory for Generic filter instead of instantiating using reflection + HSearchBytesFilter bytesFilter = (HSearchBytesFilter) Class.forName(className).newInstance(); + + bytesFilter.state = filterMessage.getState().toByteArray(); + return bytesFilter; + } + + @Override + public final void filterRowCells(final List cellL) { + if ( null == cellL) return; + int cellT = cellL.size(); + if ( 0 == cellT) return; + + try { + for (Cell cell : cellL) { + if ( null == cell) continue; + + byte[] inputData = CellUtil.cloneValue(cell); + if ( null == inputData) continue; + + processColumn(cell); + } + + processRow(cellL); + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + ex.printStackTrace(System.err); + } + } + + public abstract void processColumn(Cell cell) throws IOException; + public abstract void processRow(List row) throws IOException; + public abstract byte[] processRows() throws IOException; + + + @Override + public boolean hasFilterRow() { + return true; + } + + public FilterList getFilters() { + return null; + } +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_96/storage/HSearchGenericCoProcessorFactory.java b/src/compatibility/hadooplib_96/storage/HSearchGenericCoProcessorFactory.java new file mode 100644 index 0000000..a33b94b --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchGenericCoProcessorFactory.java @@ -0,0 +1,145 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.treetable.cache.CacheService; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchGenericCoprocessorService; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage; +import com.bizosys.hsearch.util.HSearchLog; +import com.google.protobuf.ByteString; + +public final class HSearchGenericCoProcessorFactory { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + boolean cacheEnabled = false; + + HSearchGenericFilter filter = null; + List family_cols = null; + + public HSearchGenericCoProcessorFactory(final List family_cols , final HSearchGenericFilter filter) throws IOException { + + if (null == family_cols) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.family_cols = family_cols; + this.filter = filter; + this.cacheEnabled = CacheService.getInstance().isCacheEnable(); + if ( INFO_ENABLED) { + HSearchLog.l.info("Cache Storage Enablement :" + cacheEnabled ); + } + } + + public final Collection execCoprocessorRows(final HTableWrapper table) throws IOException, Throwable { + + String singleQuery = null; + + /** + * Check for already cached result + */ + if ( null != filter) { + if ( filter.clientSideAPI_IsSingleQuery() ) { + singleQuery = filter.clientSideAPI_getSingleQueryWithScope(); + if ( cacheEnabled ) { + byte[] singleQueryResultB = CacheService.getInstance().get(singleQuery); + if( null != singleQueryResultB) { + return SortedBytesArray.getInstance().parse(singleQueryResultB).values(); + } + } + } + } + + RowRequest.Builder requestBuilder = RowRequest.newBuilder(); + + for (ColumnFamName familyNqualifier : this.family_cols) { + Column.Builder columnBuilder = Column.newBuilder(); + columnBuilder.setFamily(ByteString.copyFrom(familyNqualifier.family)); + columnBuilder.setQualifier(ByteString.copyFrom(familyNqualifier.name)); + Column column = columnBuilder.build(); + requestBuilder.addFamilyWithQualifier(column); + } + + if(null != filter){ + HSearchGenericFilterMessage filterMessage = HSearchGenericFilter.getGenericFilterMessage(filter); + requestBuilder.setFilter(filterMessage); + } + + final RowRequest request = requestBuilder.build(); + + Batch.Call onComplete = new Batch.Call(){ + + @Override + public byte[] call(HSearchGenericCoprocessorService instance)throws IOException { + + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = new BlockingRpcCallback(); + + instance.getRows(controller, request, rpcCallback); + RowResponse response = rpcCallback.get(); + + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + + ByteString result = response.hasResult() ? response.getResult() : null; + + if (null != result) { + return result.toByteArray(); + } + + return null; + } + }; + + Map output = table.tableInterface.coprocessorService( + HSearchGenericCoprocessorService.class,//coprocessor pf service class + null,//Start Row Key + null,//End Row key + onComplete);//CallBack + + Collection result = output.values(); + + try { + if ( null != singleQuery) { + if ( cacheEnabled ) { + byte[] dataPack = SortedBytesArray.getInstance().toBytes(result); + CacheService.getInstance().put(singleQuery, dataPack); + } + } + } catch (Exception ex) { + HSearchLog.l.warn("Cache Service Failure.", ex); + } + + return result; + } +} diff --git a/src/compatibility/hadooplib_96/storage/HSearchGenericCoprocessor.java b/src/compatibility/hadooplib_96/storage/HSearchGenericCoprocessor.java new file mode 100644 index 0000000..4d14da3 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchGenericCoprocessor.java @@ -0,0 +1,25 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + + +public interface HSearchGenericCoprocessor { +} diff --git a/src/compatibility/hadooplib_96/storage/HSearchGenericCoprocessorImpl.java b/src/compatibility/hadooplib_96/storage/HSearchGenericCoprocessorImpl.java new file mode 100644 index 0000000..c6b12f1 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchGenericCoprocessorImpl.java @@ -0,0 +1,207 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.CoprocessorException; +import org.apache.hadoop.hbase.coprocessor.CoprocessorService; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.functions.HSearchReducer; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchGenericCoprocessorService; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowRequest; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; +import com.google.protobuf.ByteString; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + +public final class HSearchGenericCoprocessorImpl extends HSearchGenericCoprocessorService +implements CoprocessorService, Coprocessor { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + private Configuration config = HSearchConfig.getInstance().getConfiguration(); + + private boolean internalScannerBlockCaching = true; + private int internalScannerBlockCachingAmount = 1; + private RegionCoprocessorEnvironment env = null; + + public HSearchGenericCoprocessorImpl() { + this.internalScannerBlockCaching = config.getBoolean("internal.scanner.block.caching", true); + this.internalScannerBlockCachingAmount = config.getInt("internal.scanner.block.caching.amount", 1); + } + + @Override + public void start(CoprocessorEnvironment env) throws IOException { + if (env instanceof RegionCoprocessorEnvironment) { + this.env = (RegionCoprocessorEnvironment)env; + } else { + throw new CoprocessorException("Must be loaded on a table region!"); + } + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + } + + @Override + public Service getService() { + return this; + } + + /** + * Get Matching rows + * @param filter + * @return + */ + @Override + public void getRows(RpcController controller, RowRequest request, RpcCallback callBack) { + + if ( DEBUG_ENABLED ) + HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + + InternalScanner scanner = null; + RowResponse response = null; + + long monitorStartTime = 0L; + long overallStartTime = System.currentTimeMillis(); + HSearchGenericFilter filter = null; + + try { + + Scan scan = new Scan(); + scan.setCacheBlocks(internalScannerBlockCaching); + scan.setCaching(internalScannerBlockCachingAmount); + scan.setMaxVersions(1); + + List familyWithQualifierL = request.getFamilyWithQualifierList(); + for (Column column : familyWithQualifierL) { + if ( DEBUG_ENABLED ) + HSearchLog.l.debug( Thread.currentThread().getName() + + " @ adding family " + new String(column.getFamily().toByteArray()) + "_" + new String(column.getQualifier().toByteArray())); + scan.addColumn(column.getFamily().toByteArray(), column.getQualifier().toByteArray()); + } + + HSearchGenericFilterMessage filterMessage = request.getFilter(); + filter = HSearchGenericFilter.getGenericFilter(filterMessage); + + if ( null != filter) { + FilterList filterL = filter.getFilters(); + if ( null != filterL) scan = scan.setFilter(filterL); + else scan = scan.setFilter(filter); + } + + RegionCoprocessorEnvironment environment = env; + + scanner = environment.getRegion().getScanner(scan); + List curVals = new ArrayList(); + boolean hasMoreRows = false; + + Collection finalOutput = new ArrayList(); + Collection partOutput = new ArrayList(); + + HSearchReducer reducer = filter.getReducer(); + filter.configure(); + do { + + curVals.clear(); + partOutput.clear(); + hasMoreRows = scanner.next(curVals); + + for (Cell cell : curVals) { + byte[] input = CellUtil.cloneValue(cell); + if ( null == input) continue; + + if ( null != reducer) { + filter.deserialize(input, partOutput); + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + reducer.appendRows(cell.getRowArray(), finalOutput, partOutput); + + if ( INFO_ENABLED ) { + filter.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + } + } + + } while (hasMoreRows); + + if ( INFO_ENABLED ) HSearchLog.l.info( + "**** Time spent on Overall : Scanner : Plugin Code = " + + ( System.currentTimeMillis() - overallStartTime) + ":" + + filter.overallExecutionTime + ":" + + filter.pluginExecutionTime + " in ms."); + + byte[] data = SortedBytesArray.getInstance().toBytes(finalOutput); + if (data != null) { + + RowResponse.Builder builder = RowResponse.newBuilder(); + builder.setResult(ByteString.copyFrom(data)); + response = builder.build(); + + if(DEBUG_ENABLED) + HSearchLog.l.debug("Row response length from region " + env.getRegion().getRegionNameAsString() + " is of data zize : " + data.length + " bytes"); + } + + callBack.run(response); + + } catch (Exception e) { + e.printStackTrace(); + HSearchLog.l.fatal("Error fetching rows in coprocessor : " + e.getMessage()); + ResponseConverter.setControllerException(controller, new IOException(e)); + + } finally { + if ( null != filter) filter.close(); + + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_96/storage/HSearchGenericFilter.java b/src/compatibility/hadooplib_96/storage/HSearchGenericFilter.java new file mode 100644 index 0000000..1e1ef51 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchGenericFilter.java @@ -0,0 +1,586 @@ +/* + * Copyright 2010 Bizosys Technologies Limited + * + * Licensed to the Bizosys Technologies Limited (Bizosys) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The Bizosys licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.filter.FilterBase; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.PrefixFilter; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.federate.BitSetOrSet; +import com.bizosys.hsearch.federate.QueryPart; +import com.bizosys.hsearch.functions.HSearchReducer; +import com.bizosys.hsearch.functions.StatementWithOutput; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchTableMultiQueryExecutor; +import com.bizosys.hsearch.treetable.client.HSearchTableParts; +import com.bizosys.hsearch.treetable.client.IHSearchPlugin; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchGenericFilterMessage.QueryFiltersPair; +import com.bizosys.hsearch.util.HSearchLog; +import com.google.protobuf.ByteString; + +/** + * @author abinash + * + */ +public abstract class HSearchGenericFilter extends FilterBase { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + String name = null; + public String getName() { + if ( null == name) { + name = this.getClass().getName(); + } + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + + /** + * Client side variables + */ + Map queryFilters = null; + + /** + * Server Side Variables + */ + String multiQuery = null; + IHSearchPlugin plugin = null; + Map queryPayload = new HashMap(3); + Map colIdWithType = new HashMap(3); + + public long pluginExecutionTime = 0L; + public long overallExecutionTime = 0L; + + + HSearchProcessingInstruction inputMapperInstructions = new HSearchProcessingInstruction(); + byte[] inputRowsToIncludeB = null; + List inputRowsList = null; + SortedBytesArray rowsToInclude = null; + + Map queryIdWithParts = new HashMap(); + Map colNamesWithPartitionBytes = new HashMap(); + List columnsOfOneRowAfterJoin = new ArrayList(); + List> stmtOutputContainers = new LinkedList>(); + SortedBytesArray rowBytesPacker = SortedBytesArray.getInstanceArr(); + + HSearchTableMultiQueryExecutor intersector = null; + + public HSearchGenericFilter(){ + } + + public HSearchGenericFilter(final HSearchProcessingInstruction outputType, + final String query, final Map details) { + this(outputType, query, details, null); + } + + public HSearchGenericFilter(final HSearchProcessingInstruction outputType, + final String query, final Map details, List scopedToRows) { + + this.multiQuery = query; + this.queryFilters = details; + this.inputMapperInstructions = outputType; + this.inputRowsList = scopedToRows; + this.name = this.getClass().getSimpleName(); + } + + public void setScopedToRows(List scopedToRows) { + this.inputRowsList = scopedToRows; + } + + public boolean clientSideAPI_IsSingleQuery() throws IOException { + if ( null == this.queryFilters) throw new IOException("Genric Filter is not initalized"); + if ( 1 == this.queryFilters.size()) return true; + return false; + } + + public String clientSideAPI_getSingleQueryWithScope() throws IOException { + if ( null == this.queryFilters) throw new IOException("Genric Filter is not initalized"); + if ( 1 != this.queryFilters.size()) throw new IOException("Genric Filter has multi queries"); + return getName() + "/" + this.queryFilters.values().iterator().next(); + } + + + /** + * @return The filter serialized using pb + * @throws IOException + */ + @Override + public byte [] toByteArray() throws IOException { + + HSearchGenericFilterMessage filterMessage = getGenericFilterMessage(this); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Sending to HBase : " + filterMessage.toString()); + } + + return filterMessage.toByteArray(); + } + + public static HSearchGenericFilterMessage getGenericFilterMessage(HSearchGenericFilter instance) + throws IOException { + + HSearchGenericFilterMessage.Builder builder = HSearchGenericFilterMessage.newBuilder(); + builder.setFilterClassName(instance.getClass().getName()); + builder.setInputMapperInstructions(instance.inputMapperInstructions.toString()); + builder.setMultiQuery(instance.multiQuery); + + if ( null != instance.inputRowsList) { + if ( instance.inputRowsList.size() > 0 ) { + instance.inputRowsToIncludeB = SortedBytesArray.getInstanceArr().toBytes(instance.inputRowsList); + builder.setInputRowsToIncludeB(ByteString.copyFrom(instance.inputRowsToIncludeB)); + } + } + + if ( null != instance.queryFilters) { + for (String key : instance.queryFilters.keySet()) { + String value = instance.queryFilters.get(key); + QueryFiltersPair.Builder queryBuilder = QueryFiltersPair.newBuilder() + .setKey(key) + .setValue(value); + builder.addQueryFilters(queryBuilder); + } + } + return builder.build(); + } + + /** + * @param pbBytes A pb serialized {@link PrefixFilter} instance + * @return An instance of {@link PrefixFilter} made from bytes + * @throws org.apache.hadoop.hbase.exceptions.DeserializationException + * @see #toByteArray + */ + public static HSearchGenericFilter parseFrom(final byte [] pbBytes)throws DeserializationException { + HSearchGenericFilter genericFilter = null; + try { + + int length = null == pbBytes ? 0 : pbBytes.length; + if(0 == length) + throw new IOException("Invalid Query"); + + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Total bytes Received @ Generic Filter:" + length); + } + + HSearchGenericFilterMessage filterMessage = HSearchGenericFilterMessage.parseFrom(pbBytes); + genericFilter = getGenericFilter(filterMessage); + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + } + + return genericFilter; + } + + public static HSearchGenericFilter getGenericFilter(HSearchGenericFilterMessage filterMessage) + throws InstantiationException, IllegalAccessException, + ClassNotFoundException, IOException, ParseException { + + HSearchGenericFilter genericFilter; + String className = filterMessage.getFilterClassName(); + //TODO:Create factory for Generic filter instead of instantiating using reflection + genericFilter = (HSearchGenericFilter) Class.forName(className).newInstance(); + + if(filterMessage.hasInputRowsToIncludeB()){ + genericFilter.rowsToInclude = SortedBytesArray.getInstanceArr(); + genericFilter.rowsToInclude.parse(filterMessage.getInputRowsToIncludeB().toByteArray()); + } + + genericFilter.multiQuery = filterMessage.getMultiQuery(); + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Multi Query" + genericFilter.multiQuery); + } + + String instruction = filterMessage.getInputMapperInstructions(); + if (instruction.length() == 0 ) throw new IOException("Unknown result output type."); + + genericFilter.inputMapperInstructions = new HSearchProcessingInstruction(instruction); + + String colNameQuolonId = null; + String filtersPipeSeparated = null; + int colNameAndQIdSplitIndex = -1; + String colName = null; + String qId = null; + + List filterPairs = filterMessage.getQueryFiltersList(); + for (QueryFiltersPair filterPair : filterPairs) { + + colNameQuolonId = filterPair.getKey(); + filtersPipeSeparated = filterPair.getValue(); + + colNameAndQIdSplitIndex = colNameQuolonId.indexOf(':'); + if ( -1 == colNameAndQIdSplitIndex || colNameQuolonId.length() - 1 == colNameAndQIdSplitIndex) { + throw new IOException("Sub queries expected as X:Y eg.\n" + + "family1:A OR family2:B\nfamily1:A=f|1|1|1|c|*|*\nfamily2:B=*|*|*|*|*|*"); + } + + colName = colNameQuolonId.substring(0,colNameAndQIdSplitIndex); + qId = colNameQuolonId.substring(colNameAndQIdSplitIndex+1); + + genericFilter.colIdWithType.put(qId, colName); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("colName:qId = " + colName + ":" + qId); + } + + genericFilter.plugin = genericFilter.createPlugIn(colName) ; + genericFilter.plugin.setOutputType(genericFilter.inputMapperInstructions); + genericFilter.queryPayload.put(colNameQuolonId, new QueryPart(filtersPipeSeparated,HSearchTableMultiQueryExecutor.PLUGIN, genericFilter.plugin) ); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Query Payload added for " + colName); + } + } + + for (int i = 0 ; i < genericFilter.queryPayload.size() ; i++) { + genericFilter.stmtOutputContainers.add( new ArrayList() ); + } + return genericFilter; + } + + @Override + public final void filterRowCells(final List cellL) { + if ( null == cellL) return; + int cellT = cellL.size(); + if ( 0 == cellT) return; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Processing @ Region Server : filterRow" ); + } + + try { + + byte[] row = null; + byte[] firstFamily = null; + byte[] firstCol = null; + + //colParts.put("structured:A", bytes); + colNamesWithPartitionBytes.clear(); + + //HBase Family Name = schema column name + "_" + partition + String columnNameWithParition = null; + String colName = null; + + for (Cell cell : cellL) { + if ( null == cell) continue; + + byte[] inputData = CellUtil.cloneValue(cell); + if ( null == inputData) continue; + columnNameWithParition = new String(CellUtil.cloneFamily(cell)); + + int partitionIndex = columnNameWithParition.indexOf('_'); + colName = ( partitionIndex == -1 ) ? columnNameWithParition : + columnNameWithParition.substring(0, partitionIndex); + + HSearchTableParts tableParts = null; + if ( colNamesWithPartitionBytes.containsKey(colName)) { + tableParts = colNamesWithPartitionBytes.get(colName); + } else { + tableParts = new HSearchTableParts(); + colNamesWithPartitionBytes.put(colName, tableParts); + } + tableParts.put(inputData); + + if ( null == row ) { + firstFamily = CellUtil.cloneFamily(cell); + firstCol = CellUtil.cloneQualifier(cell); + row = CellUtil.cloneRow(cell); + } + } + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("queryData HSearchTableParts creation. "); + } + + queryIdWithParts.clear(); + + for (String queryId : colIdWithType.keySet()) { //A + String queryType = colIdWithType.get(queryId); //structured + HSearchTableParts parts = colNamesWithPartitionBytes.get(queryType); + + String queryTypeWithId = queryType + ":" + queryId; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug(queryTypeWithId); + HSearchLog.l.debug("Query Parts for " + queryTypeWithId); + } + + queryIdWithParts.put(queryTypeWithId, parts); + } + colNamesWithPartitionBytes.clear(); + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchGenericFilter: Filteration Starts"); + + long monitorStartTime = 0L; + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + if ( null == intersector ) intersector = createExecutor(); + this.plugin.setMergeId(row); + BitSetOrSet intersectedIds = federatedQueryExec(row, intersector, queryIdWithParts); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + cellL.clear(); //Clear all data + byte[] value = getOneRowBytes(intersectedIds, this.queryPayload); + cellL.add(new KeyValue(row, firstFamily, firstCol, value) ); + + + } catch (Exception ex) { + ex.printStackTrace(System.err); + HSearchLog.l.fatal(ex); + } + } + + private final BitSetOrSet federatedQueryExec(final byte[] row, + final HSearchTableMultiQueryExecutor intersector, + final Map queryData) throws Exception, IOException { + + BitSetOrSet intersectedIds = intersector.execute( + queryData, this.multiQuery, this.queryPayload, inputMapperInstructions); + + if ( DEBUG_ENABLED ) { + boolean hasMatchingIds = false; + hasMatchingIds = ( null != intersectedIds && intersectedIds.size() > 0 ); + HSearchLog.l.debug("Generaic filter hasMatchingIds :" + hasMatchingIds + " objectid=" + intersectedIds.hashCode()); + if ( hasMatchingIds ) HSearchLog.l.debug( new String(row) + " has ids of :" + intersectedIds.size()); + } + + return intersectedIds; + } + + @Override + public final boolean hasFilterRow() { + return true; + } + + @Override + public final boolean filterRowKey(final byte[] rowKey, final int offset, final int length) { + + if (DEBUG_ENABLED) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + HSearchLog.l.debug("Analyzing row for processing: " + new String(rowKey + " , From a matching set of " + scopeToTheseRowsT)); + } + + if ( null == rowsToInclude) return false; + + byte[] exactRowBytes = new byte[length]; + try { + System.arraycopy(rowKey, offset, exactRowBytes, 0, length); + if ( rowsToInclude.getEqualToIndex(exactRowBytes) == -1) return true; + return false; + + } catch (IOException ex) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + String rowKeyStr = ( null == rowKey) ? "Null row key" : new String(rowKey); + String errMsg = "Error while finding fileration criteria for the row , " + rowKeyStr + + "\n" + ex.getMessage() + "\n" + + "With search scope inside id count : " + scopeToTheseRowsT; + System.err.println(errMsg); + HSearchLog.l.fatal(errMsg, ex); + + return false; + } + } + + /** + ******************************************************************************************* + * COMPUTATIONS + * Step 1 - HSearch Table merge + ******************************************************************************************* + */ + + /** + * *|*|architect|age + * AND + * *|*|developer|age + * + * @param matchedIds + * @param queryPayload + * @param inputMapperInstructions + * @return + * @throws IOException + */ + public final byte[] getOneRowBytes( final BitSetOrSet matchedIds, final Map queryPayload) throws IOException { + + if ( DEBUG_ENABLED ) { + int matchedIdsT = ( null == matchedIds) ? 0 : matchedIds.size(); + HSearchLog.l.debug("HSearchGenericFilter:serialize : with matchedIds " + matchedIdsT + ", Object:" + matchedIds.hashCode()); + if ( null != matchedIds.getDocumentIds()) { + HSearchLog.l.debug("HSearchGenericFilter: DocumentIds size " + matchedIds.getDocumentIds().size() + " and matchedId size " + matchedIds.size()); + } else if ( null != matchedIds.getDocumentSequences()) { + HSearchLog.l.debug("HSearchGenericFilter: DocumentSequences cardinality " + matchedIds.getDocumentSequences().cardinality()); + } + } + + /** + * - Iterate through all the parts and find the values. + * - Collect the data for multiple queries + */ + HSearchReducer reducer = getReducer(); + int totalQueries = queryPayload.size(); + + columnsOfOneRowAfterJoin.clear(); + long monitorStartTime = 0L; + + if ( totalQueries == 1) { + + if(DEBUG_ENABLED) + HSearchLog.l.debug("HSearchGenericFilter: processing single query."); + + Object pluginO = queryPayload.values().iterator().next().getParams().get( + HSearchTableMultiQueryExecutor.PLUGIN); + IHSearchPlugin plugin = (IHSearchPlugin) pluginO; + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + plugin.getResultSingleQuery(columnsOfOneRowAfterJoin); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + + } else { + + if(DEBUG_ENABLED) + HSearchLog.l.debug("HSearchGenericFilter: processing multiple query."); + + StatementWithOutput[] stmtWithOutputs = new StatementWithOutput[totalQueries]; + int seq = 0; + + for (QueryPart part : queryPayload.values()) { + + Object pluginO = part.getParams().get(HSearchTableMultiQueryExecutor.PLUGIN); + IHSearchPlugin plugin = (IHSearchPlugin) pluginO; + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + Collection queryOutput = this.stmtOutputContainers.get(seq); + queryOutput.clear(); //Clear to reuse + + if(DEBUG_ENABLED) + HSearchLog.l.debug("HSearchGenericFilter: Calling getResultMultiQuery for " + part.aStmtOrValue); + + plugin.getResultMultiQuery(matchedIds, queryOutput); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + stmtWithOutputs[seq] = new StatementWithOutput(part.aStmtOrValue, queryOutput); + seq++; + } + + if ( INFO_ENABLED ) { + monitorStartTime = System.currentTimeMillis(); + } + + if(DEBUG_ENABLED) + HSearchLog.l.debug("HSearchGenericFilter: Calling reducer.appendQueries "); + + reducer.appendQueries(columnsOfOneRowAfterJoin, stmtWithOutputs); + + if ( INFO_ENABLED ) { + this.pluginExecutionTime += System.currentTimeMillis() - monitorStartTime; + } + + for (StatementWithOutput stmtWithOutput : stmtWithOutputs) { + if ( null != stmtWithOutput.cells ) stmtWithOutput.cells.clear(); + } + } + + //Put it to Bytes + byte[] processedRowBytes = rowBytesPacker.toBytes(columnsOfOneRowAfterJoin); + columnsOfOneRowAfterJoin.clear(); + + return processedRowBytes; + } + + public final void deserialize(final byte[] input, final Collection output) throws IOException { + SortedBytesArray.getInstance().parse(input).values(output); + } + + public abstract HSearchTableMultiQueryExecutor createExecutor(); + public abstract IHSearchPlugin createPlugIn(String type) throws IOException ; + public abstract HSearchReducer getReducer(); + + /** + * Override this method if you want to set more filters in processing. + * + FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RowFilter filter1 = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("row-22")) ); + list.addFilter(filter1); + list.addFilter(this); + return list; + + * @return + */ + public final FilterList getFilters() { + return null; + } + + /** + * Any information to be configured before starting the filtration process. + */ + public final void configure() { + } + + /** + * At the end release the resources. + */ + public final void close() { + if ( null != queryFilters) queryFilters.clear(); + if ( null != queryPayload) queryPayload.clear(); + if ( null != colIdWithType) colIdWithType.clear(); + if ( null != queryIdWithParts) queryIdWithParts.clear(); + if ( null != colNamesWithPartitionBytes) colNamesWithPartitionBytes.clear(); + if ( null != columnsOfOneRowAfterJoin) columnsOfOneRowAfterJoin.clear(); + if ( null != stmtOutputContainers) stmtOutputContainers.clear(); + } + + +} \ No newline at end of file diff --git a/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoProcessorProxy.java b/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoProcessorProxy.java new file mode 100644 index 0000000..0941890 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoProcessorProxy.java @@ -0,0 +1,119 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.treetable.BytesSection; +import com.bizosys.hsearch.treetable.Cell2; +import com.bizosys.hsearch.treetable.CellKeyValue; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchMultiGetCoprocessorProxyService; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage; +import com.bizosys.hsearch.util.HSearchLog; +import com.google.protobuf.ByteString; + +public final class HSearchMultiGetCoProcessorProxy { + + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + HSearchScalarFilter filter = null; + ColumnFamName columnFamName = null; + byte[][] rows = null; + + public HSearchMultiGetCoProcessorProxy(final ColumnFamName columnFamName , + final HSearchScalarFilter filter, byte[][] rows) throws IOException { + + this.filter = filter; + if (null == columnFamName) throw new IOException("Please provide family details. Scan on all cols are not allowed"); + this.columnFamName = columnFamName; + this.rows = rows; + } + + public final void execCoprocessorRows( Map kvs, + final HTableWrapper table, final byte[] row) throws IOException, Throwable { + + MultiRowRequest.Builder requestBuilder = MultiRowRequest.newBuilder(); + Column.Builder columnBuilder = Column.newBuilder(); + columnBuilder.setFamily(ByteString.copyFrom(columnFamName.family)); + columnBuilder.setQualifier(ByteString.copyFrom(columnFamName.name)); + Column column = columnBuilder.build(); + requestBuilder.setFamilyWithQualifier(column); + + int rowsT = this.rows.length; + for(int i = 0; i < rowsT; i++){ + requestBuilder.addRows(ByteString.copyFrom(this.rows[i])); + } + + if(null != filter){ + HSearchScalarFilterMessage filterMessage = HSearchScalarFilter.getScalarFilterMessage(filter); + requestBuilder.setFilter(filterMessage); + } + + final MultiRowRequest request = requestBuilder.build(); + Batch.Call callable = + new Batch.Call(){ + + @Override + public byte[] call(HSearchMultiGetCoprocessorProxyService instance)throws IOException { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = new BlockingRpcCallback(); + instance.getRows(controller, request, rpcCallback); + RowResponse response = rpcCallback.get(); + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + + ByteString result = response.getResult(); + if (!result.isEmpty()) { + return result.toByteArray(); + } + + return null; + } + }; + + Map output = table.tableInterface.coprocessorService( + HSearchMultiGetCoprocessorProxyService.class,//coprocessor pf service class + row,//Start Row Key + row,//End Row key + callable);//CallBack + + + for (byte[] bs : output.keySet()) { + Cell2 cell2 = new Cell2(byte[].class, byte[].class); + cell2.data = new BytesSection(output.get(bs) ); + cell2.parseElements(); + for (CellKeyValue kv: cell2.sortedList) { + kvs.put(new String(kv.getKey()), kv.getValue()); + } + } + } +} diff --git a/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoprocessor.java b/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoprocessor.java new file mode 100644 index 0000000..4f5ee7c --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoprocessor.java @@ -0,0 +1,182 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.CoprocessorException; +import org.apache.hadoop.hbase.coprocessor.CoprocessorService; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +import com.bizosys.hsearch.treetable.Cell2; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.Column; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.HSearchMultiGetCoprocessorProxyService; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.MultiRowRequest; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchCoprocessorProtos.RowResponse; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage; +import com.bizosys.hsearch.util.HSearchConfig; +import com.bizosys.hsearch.util.HSearchLog; +import com.bizosys.hsearch.util.conf.Configuration; +import com.google.protobuf.ByteString; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + +public final class HSearchMultiGetCoprocessor extends HSearchMultiGetCoprocessorProxyService +implements CoprocessorService, Coprocessor { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + + public boolean scannerBlockCaching = true; + public int scannerBlockCachingLimit = 1; + private RegionCoprocessorEnvironment env = null; + + public HSearchMultiGetCoprocessor() { + Configuration config = HSearchConfig.getInstance().getConfiguration(); + this.scannerBlockCaching = config.getBoolean("scanner.block.caching", true); + this.scannerBlockCachingLimit = config.getInt("scanner.block.caching.amount", 1); + } + + @Override + public void start(CoprocessorEnvironment env) throws IOException { + if (env instanceof RegionCoprocessorEnvironment) { + this.env = (RegionCoprocessorEnvironment)env; + } else { + throw new CoprocessorException("Must be loaded on a table region!"); + } + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + } + + @Override + public Service getService() { + return this; + } + + /** + * Get Matching rows + * @param filter + * @return + */ + @Override + public void getRows(RpcController controller, MultiRowRequest request, RpcCallback callBack) { + if ( DEBUG_ENABLED ) HSearchLog.l.debug( Thread.currentThread().getName() + " @ coprocessor : getRows"); + + InternalScanner scanner = null; + HSearchScalarFilter filter = null; + RowResponse response = null; + + try { + Scan scan = new Scan(); + scan.setCacheBlocks(scannerBlockCaching); + scan.setCaching(scannerBlockCachingLimit); + scan.setMaxVersions(1); + + Column column = request.getFamilyWithQualifier(); + scan.addColumn(column.getFamily().toByteArray(), column.getQualifier().toByteArray()); + + HSearchScalarFilterMessage filterMessage = request.getFilter(); + filter = HSearchScalarFilter.getScalarFilter(filterMessage); + + if ( null != filter) { + FilterList filterL = filter.getFilters(); + if ( null != filterL) scan = scan.setFilter(filterL); + else scan = scan.setFilter(filter); + } + + RegionCoprocessorEnvironment environment = env; + List finalVals = new ArrayList(); + List curVals = new ArrayList(); + + List rows = request.getRowsList(); + byte[] row = null; + boolean hasMoreRows = false; + + for (ByteString rowBS : rows) { + row = rowBS.toByteArray(); + hasMoreRows = false; + scan.setStartRow(row); + scan.setStopRow(row); + + scanner = environment.getRegion().getScanner(scan); + do { + + curVals.clear(); + hasMoreRows = scanner.next(curVals); + if ( curVals.size() == 0 ) continue; + finalVals.addAll(curVals); + + } while (hasMoreRows); + scanner.close(); + scanner = null; + } + + Cell2 container = new Cell2(byte[].class, byte[].class); + for (Cell cell : finalVals) { + byte[] key = cell.getRowArray(); + byte[] val = CellUtil.cloneValue(cell); + + if ( null == key || null == val) continue; + if ( key.length == 0 || val.length == 0 ) continue; + + container.add(key, val); + } + + byte[] data = container.toBytesOnSortedData(); + if (data != null) { + RowResponse.Builder builder = RowResponse.newBuilder(); + builder.setResult(ByteString.copyFrom(data)); + response = builder.build(); + + if(DEBUG_ENABLED) + HSearchLog.l.debug("Row response length from " + env.getRegion().getRegionNameAsString() + ": " + data.length); + } + + callBack.run(response); + + } catch (Exception e) { + + HSearchLog.l.fatal("Error fetching rows in coprocessor : " + e.getLocalizedMessage()); + ResponseConverter.setControllerException(controller, new IOException(e)); + + } finally { + if ( null != scanner) { + try { + scanner.close(); + } catch (Exception ex) { + ex.printStackTrace(System.err); + } + } + } + } +} diff --git a/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoprocessorI.java b/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoprocessorI.java new file mode 100644 index 0000000..fd26cbc --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchMultiGetCoprocessorI.java @@ -0,0 +1,11 @@ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; + +import org.apache.hadoop.hbase.filter.Filter; + + +public interface HSearchMultiGetCoprocessorI{ + byte[] getRows(final byte[][] families, final byte[][] cols, + final Filter filter, final byte[][] rows) throws IOException; +} diff --git a/src/compatibility/hadooplib_96/storage/HSearchScalarFilter.java b/src/compatibility/hadooplib_96/storage/HSearchScalarFilter.java new file mode 100644 index 0000000..5828ed1 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchScalarFilter.java @@ -0,0 +1,343 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.filter.FilterBase; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.PrefixFilter; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchQuery; +import com.bizosys.hsearch.treetable.client.IHSearchPlugin; +import com.bizosys.hsearch.treetable.client.IHSearchTable; +import com.bizosys.hsearch.treetable.storage.protobuf.generated.HSearchFilterProtos.HSearchScalarFilterMessage; +import com.bizosys.hsearch.util.HSearchLog; +import com.google.protobuf.ByteString; + +/** + * @author abinash + * + */ +public abstract class HSearchScalarFilter extends FilterBase { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + /** + * Input Variables + */ + String multiQuery = null; + String name = null; + + public long pluginExecutionTime = 0L; + public long overallExecutionTime = 0L; + + protected HSearchProcessingInstruction inputMapperInstructions = new HSearchProcessingInstruction(); + byte[] inputRowsToIncludeB = null; + List inputRowsList = null; + SortedBytesArray rowsToInclude = null; + byte[] matchingIds = null; + + /** + * Output Variables + */ + + HSearchQuery query = null; + IHSearchTable table = null; + IHSearchPlugin plugin = null; + boolean skipFiltering = true; + Collection dataCarrier = new ArrayList(); + + + public HSearchScalarFilter(){} + + public HSearchScalarFilter(final HSearchProcessingInstruction outputType,final String query) { + this.multiQuery = query; + this.inputMapperInstructions = outputType; + } + + public void setMatchingRows(List inputRowsList) { + this.inputRowsList = inputRowsList; + } + + public void setMatchingIds(byte[] matchingIds) { + this.matchingIds = matchingIds; + } + + public void setName(String name) { + this.name = name; + } + + public String getName() { + if ( null == name) { + name = this.getClass().getName(); + } + return this.name; + } + + /** + * @return The filter serialized using pb + * @throws IOException + */ + @Override + public byte [] toByteArray() throws IOException { + + HSearchScalarFilterMessage filterMessage = getScalarFilterMessage(this); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Sending to HBase : " + filterMessage.toString()); + } + + return filterMessage.toByteArray(); + } + + public static HSearchScalarFilterMessage getScalarFilterMessage(HSearchScalarFilter instance) + throws IOException { + + HSearchScalarFilterMessage.Builder builder = HSearchScalarFilterMessage.newBuilder(); + + builder.setFilterClassName(instance.getClass().getName()); + builder.setInputMapperInstructions(instance.inputMapperInstructions.toString()); + builder.setMultiQuery(instance.multiQuery); + + if(null != instance.matchingIds) + builder.setMatchingIds(ByteString.copyFrom(instance.matchingIds)); + else + builder.setMatchingIds(ByteString.copyFrom(new byte[0])); + + if ( null != instance.inputRowsList) { + if ( instance.inputRowsList.size() > 0 ) { + instance.inputRowsToIncludeB = SortedBytesArray.getInstanceArr().toBytes(instance.inputRowsList); + builder.setInputRowsToIncludeB(ByteString.copyFrom(instance.inputRowsToIncludeB)); + } + } + return builder.build(); + } + + /** + * @param pbBytes A pb serialized {@link PrefixFilter} instance + * @return An instance of {@link PrefixFilter} made from bytes + * @throws org.apache.hadoop.hbase.exceptions.DeserializationException + * + */ + public static HSearchScalarFilter parseFrom(final byte [] pbBytes)throws DeserializationException { + + HSearchScalarFilter scalarFilter = null; + + try { + + int length = null == pbBytes ? 0 : pbBytes.length; + if(0 == length) + throw new IOException("Invalid Query"); + + + if ( DEBUG_ENABLED) { + HSearchLog.l.debug("Total bytes Received @ HSearchScalarFilter:" + length); + } + + HSearchScalarFilterMessage filterMessage = HSearchScalarFilterMessage.parseFrom(pbBytes); + scalarFilter = getScalarFilter(filterMessage); + + } catch (Exception ex) { + HSearchLog.l.fatal(ex); + ex.printStackTrace(); + throw new DeserializationException(ex); + } + + return scalarFilter; + } + + public static HSearchScalarFilter getScalarFilter(HSearchScalarFilterMessage filterMessage) + throws InstantiationException, IllegalAccessException, + ClassNotFoundException, IOException, ParseException { + + String className = filterMessage.getFilterClassName(); + //TODO:Create factory for Generic filter instead of instantiating using reflection + HSearchScalarFilter scalarFilter = (HSearchScalarFilter) Class.forName(className).newInstance(); + + + if(filterMessage.hasInputRowsToIncludeB()){ + scalarFilter.rowsToInclude = SortedBytesArray.getInstanceArr(); + scalarFilter.rowsToInclude.parse(filterMessage.getInputRowsToIncludeB().toByteArray()); + } + + scalarFilter.multiQuery = filterMessage.getMultiQuery(); + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("HBase Region Server: Multi Query" + scalarFilter.multiQuery); + } + + String instruction = filterMessage.getInputMapperInstructions(); + if (instruction.length() == 0 ) throw new IOException("Unknown result output type."); + + scalarFilter.inputMapperInstructions = new HSearchProcessingInstruction(instruction); + + //matching ids + scalarFilter.matchingIds = filterMessage.getMatchingIds().toByteArray(); + + scalarFilter.query = new HSearchQuery(scalarFilter.multiQuery); + scalarFilter.table = scalarFilter.createTable(); + if ( null != scalarFilter.table) { + scalarFilter.plugin = scalarFilter.createPlugIn(); + if ( null != scalarFilter.plugin) { + scalarFilter.plugin.setOutputType(scalarFilter.inputMapperInstructions); + if(0 != scalarFilter.matchingIds.length) + scalarFilter.plugin.setMergeId(scalarFilter.matchingIds); + scalarFilter.skipFiltering = false; + } + } + + return scalarFilter; + } + + @Override + public void filterRowCells(List cellL) throws IOException { + if ( skipFiltering ) return; + + if ( null == cellL) return; + int cellT = cellL.size(); + if ( 0 == cellT) return; + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("Processing @ Region Server : filterRow" ); + } + + try { + + List cellLFiltered = new ArrayList(); + + for (Cell cell : cellL) { + if ( null == cell) continue; + + byte[] inputData = CellUtil.cloneValue(cell); + if ( null == inputData) continue; + + switch ( this.inputMapperInstructions.getCallbackType()) { + case HSearchProcessingInstruction.PLUGIN_CALLBACK_COLS: + table.get(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_ID: + table.keySet(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_VAL: + table.values(inputData, this.query, plugin); + break; + case HSearchProcessingInstruction.PLUGIN_CALLBACK_IDVAL: + table.keyValues(inputData, this.query, plugin); + break; + default: + throw new IOException("Unknown output type:" + this.inputMapperInstructions.getCallbackType()); + } + Cell newCell = CellUtil.createCell(CellUtil.cloneRow(cell), CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), cell.getTimestamp(), cell.getTypeByte(), SortedBytesArray.getInstance().toBytes(dataCarrier)); + plugin.getResultSingleQuery(dataCarrier); + cellLFiltered.add(newCell); + dataCarrier.clear(); + } + cellL.clear(); + cellL.addAll(cellLFiltered); + + if ( DEBUG_ENABLED ) { + HSearchLog.l.debug("queryData HSearchTableParts creation. "); + } + + } catch (Exception ex) { + ex.printStackTrace(System.err); + HSearchLog.l.fatal(ex); + } + } + + @Override + public final boolean hasFilterRow() { + return true; + } + + @Override + public final boolean filterRowKey(final byte[] rowKey, final int offset, final int length) { + + if ( null == rowsToInclude) return false; + byte[] exactRowBytes = new byte[length]; + try { + System.arraycopy(rowKey, offset, exactRowBytes, 0, length); + if ( rowsToInclude.getEqualToIndex(exactRowBytes) >= 0 ) { + //System.out.println("Allow row:" + new String(exactRowBytes)); + return false; + } else { + //System.out.println("Disallow row:" + new String(exactRowBytes)); + return true; + } + + } catch (IOException ex) { + int scopeToTheseRowsT = ( null == rowsToInclude) ? 0 : rowsToInclude.getSize(); + String rowKeyStr = ( null == rowKey) ? "Null row key" : new String(rowKey); + String errMsg = "Error while finding fileration criteria for the row , " + rowKeyStr + + "\n" + ex.getMessage() + "\n" + + "With search scope inside id count : " + scopeToTheseRowsT; + System.err.println(errMsg); + HSearchLog.l.fatal(errMsg, ex); + + return false; + } + } + + public final void deserialize(final byte[] input, final Collection output) throws IOException { + SortedBytesArray.getInstance().parse(input).values(output); + } + + public abstract IHSearchPlugin createPlugIn() throws IOException ; + public abstract IHSearchTable createTable(); + + + /** + * Override this method if you want to set more filters in processing. + * + FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); + RowFilter filter1 = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("row-22")) ); + list.addFilter(filter1); + list.addFilter(this); + return list; + + * @return + */ + public final FilterList getFilters() { + return null; + } + + /** + * Any information to be configured before starting the filtration process. + */ + public final void configure() { + } + + /** + * At the end release the resources. + */ + public final void close() { + } +} + \ No newline at end of file diff --git a/src/compatibility/hadooplib_96/storage/HSearchTableReader.java b/src/compatibility/hadooplib_96/storage/HSearchTableReader.java new file mode 100644 index 0000000..f0e5ed5 --- /dev/null +++ b/src/compatibility/hadooplib_96/storage/HSearchTableReader.java @@ -0,0 +1,124 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable.storage; + +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.bizosys.hsearch.hbase.ColumnFamName; +import com.bizosys.hsearch.hbase.HBaseFacade; +import com.bizosys.hsearch.hbase.HReader; +import com.bizosys.hsearch.hbase.HTableWrapper; +import com.bizosys.hsearch.hbase.IScanCallBack; +import com.bizosys.hsearch.treetable.client.HSearchProcessingInstruction; +import com.bizosys.hsearch.treetable.client.HSearchQuery; +import com.bizosys.hsearch.util.HSearchLog; + +public abstract class HSearchTableReader implements IScanCallBack { + + public static boolean DEBUG_ENABLED = HSearchLog.l.isDebugEnabled(); + public static boolean INFO_ENABLED = HSearchLog.l.isInfoEnabled(); + + //public static ParallelHReader parallelReader = new ParallelHReader(10); + + public abstract HSearchGenericFilter getFilter(String multiQuery, Map multiQueryParts, HSearchProcessingInstruction outputType); + + public abstract void rows(Collection results, HSearchProcessingInstruction rowType); + + + @SuppressWarnings("unchecked") + public void setPartionsFamilies(String tableName, String colName, String range, Set uniqueFamilies) + throws ParseException, IOException { + + HSearchQuery query = new HSearchQuery(range); + HBaseTableSchemaDefn.getInstance(tableName).columnPartions.get(colName). + getMatchingFamilies(query, uniqueFamilies); + } + + public IScanCallBack getResultCollector() { + return this; + } + + @Override + public void process(byte[] pk, ColumnFamName fn, byte[] storedBytes) throws IOException { + + int length = ( null == storedBytes ) ? 0 : storedBytes.length; + if ( length == 0 ) return; + + if ( DEBUG_ENABLED ) HSearchLog.l.debug("Found Primary Key :" + new String(pk) + "/" + length); + } + + + + public void read( String tableName, String multiQuery, Map multiQueryParts, + HSearchProcessingInstruction outputType, boolean isPartitioned, boolean isParallel) + throws IOException, ParseException { + + HSearchGenericFilter filter = getFilter(multiQuery, multiQueryParts, outputType); + + Set uniqueFamilies = new HashSet(3); + + for ( String colNameQuolonId : multiQueryParts.keySet() ) { + + int colNameAndQIdSplitIndex = colNameQuolonId.indexOf(':'); + if ( -1 == colNameAndQIdSplitIndex || colNameQuolonId.length() - 1 == colNameAndQIdSplitIndex) { + throw new IOException("Sub queries expected as X:Y eg.\n" + + "structured:A OR unstructured:B\nstructured:A=f|1|1|1|c|*|*\nunstructured:B=*|*|*|*|*|*"); + } + String colName = colNameQuolonId.substring(0,colNameAndQIdSplitIndex); + setPartionsFamilies(tableName, colName, multiQueryParts.get(colNameQuolonId),uniqueFamilies); + } + + List families = new ArrayList(); + for (String family : uniqueFamilies) { + if ( INFO_ENABLED ) HSearchLog.l.info("HSearchTableReader > Adding Family: " + family); + families.add(new ColumnFamName( family.getBytes(), + new String( new char[] {HBaseTableSchemaDefn.getColumnName()}).getBytes() ) ); + } + + IScanCallBack recordsCollector = getResultCollector(); + + if ( isParallel ) { + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchTableReader > Searching in parallel."); + HTableWrapper table = HBaseFacade.getInstance().getTable(tableName); + + try { + + HSearchGenericCoProcessorFactory factory = new HSearchGenericCoProcessorFactory(families, filter); + Collection output = factory.execCoprocessorRows(table); + rows(output, outputType ); + + } catch (Throwable th) { + throw new IOException(th); + } + + } else { + if ( DEBUG_ENABLED ) HSearchLog.l.debug("HSearchTableReader > Searching in Sequential."); + HReader.getAllValues(tableName,families, filter, recordsCollector); + } + } +} + diff --git a/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesBase.java b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesBase.java new file mode 100644 index 0000000..1b9c187 --- /dev/null +++ b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesBase.java @@ -0,0 +1,303 @@ +package com.bizosys.hsearch.byteutils.vs; +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import com.bizosys.hsearch.byteutils.ISortedByte; + +public abstract class PositionalBytesBase implements ISortedByte { + + public static final class Reference { + public int offset; + public int length; + + public Reference() { + } + + public Reference(final int offset, final int length ) { + this.offset = offset; + this.length = length; + } + + public void set(final int offset, final int length ) { + this.offset = offset; + this.length = length; + } + + } + + protected byte[] inputBytes = null; + protected int offset = 0; + protected int length = -1; + protected int dataSize = -1; + + public T defaultValue = null; + public byte[] defaultValueB = null; + + public abstract int compare(byte[] inputB, int offset, T matchNo); + public abstract boolean isEqual(T firsValue, T secondValue); + + @Override + public ISortedByte parse(final byte[] bytes) throws IOException { + this.inputBytes = bytes; + this.offset = 0; + this.length = ( null == bytes) ? 0 : bytes.length; + return this; + } + + @Override + public ISortedByte parse(final byte[] bytes, final int offset, final int length) throws IOException { + this.inputBytes = bytes; + this.offset = offset; + this.length = length; + return this; + } + + @Override + public int getSize() throws IOException { + if ( null == this.inputBytes) return 0; + int total = this.length / dataSize; + if ( total < 0 ) throw new IOException("Invalid size, offset is out of range. Length, " + + this.length + " , Offset " + this.offset + " , Size " + dataSize); + return total; + } + + @Override + public void addAll(Collection vals) throws IOException { + if ( null == this.inputBytes ) return; + int total = getSize(); + T value = null; + for ( int index=0; index values() throws IOException { + List vals = new ArrayList(); + return values(vals); + } + + @Override + public Collection values(Collection vals) throws IOException { + if ( null == this.inputBytes ) return vals; + int total = getSize(); + T value = null; + + for ( int index=0; index getEqualToIndexes(final T matchNo) throws IOException { + Collection matchingPos = new ArrayList(); + getEqualToIndexes(matchNo, matchingPos); + return matchingPos; + } + + + @Override + public void getEqualToIndexes(final T matchingNo, final Collection matchings) throws IOException { + + int intBT = getSize(); + for (int i = 0; i < intBT; i++) { + if(compare(inputBytes, offset + i * dataSize, matchingNo) == 0) + matchings.add(i); + } + } + + @Override + public void getNotEqualToIndexes(final T matchingNo, final Collection matchings) throws IOException { + + int intBT = getSize(); + for (int i = 0; i < intBT; i++) { + + if(compare(inputBytes, offset + i * dataSize, matchingNo) == 0) + continue; + + matchings.add(i); + } + } + + @Override + public Collection getGreaterThanIndexes(T matchNo) throws IOException { + Collection matchingPos = new ArrayList(); + getGreaterThanIndexes(matchNo, matchingPos); + return matchingPos; + } + + @Override + public void getGreaterThanIndexes(T matchNo, Collection matchingPos) throws IOException { + this.computeGTGTEQIndexes(matchNo, matchingPos, false); + } + + @Override + public Collection getGreaterThanEqualToIndexes(T matchNo) throws IOException { + Collection matchingPos = new ArrayList(); + getGreaterThanEqualToIndexes(matchNo, matchingPos); + return matchingPos; + } + + @Override + public void getGreaterThanEqualToIndexes(T matchNo, Collection matchingPos) throws IOException { + this.computeGTGTEQIndexes(matchNo, matchingPos, true); + } + + protected final void computeGTGTEQIndexes(final T matchingNo, final Collection matchingPos, final boolean isEqualCheck) throws IOException { + + int totalSize = getSize(); + if ( totalSize <= 0 ) return; + int isSame = -1; + boolean includeMacthing = false; + + for (int i = 0; i < totalSize; i++) { + isSame = compare(inputBytes, offset + i * dataSize, matchingNo); + includeMacthing = (isEqualCheck) ? isSame >= 0 : isSame > 0; + if(includeMacthing) + matchingPos.add(i); + } + } + + @Override + public Collection getLessThanIndexes(T matchingNo) throws IOException { + Collection matchingPos = new ArrayList(); + getLessThanIndexes(matchingNo,matchingPos) ; + return matchingPos; + } + + @Override + public void getLessThanIndexes(T matchingNo, Collection matchingPos ) throws IOException { + computeLTLTEQIndexes(matchingNo, matchingPos, false); + } + + @Override + public Collection getLessThanEqualToIndexes(T matchingNo) throws IOException { + Collection matchingPos = new ArrayList(); + getLessThanEqualToIndexes(matchingNo,matchingPos) ; + return matchingPos; + } + + @Override + public void getLessThanEqualToIndexes(T matchingNo, Collection matchingPos) throws IOException { + computeLTLTEQIndexes(matchingNo, matchingPos, true); + } + + protected final void computeLTLTEQIndexes(final T matchingNo, + final Collection matchingPos, final boolean isEqualCheck) throws IOException { + + int totalSize = getSize(); + if ( totalSize <= 0 ) return; + + int isSame = -1; + boolean includeMacthing = false; + + for (int i = 0; i < totalSize; i++) { + isSame = compare(inputBytes, offset + i * dataSize, matchingNo); + includeMacthing = (isEqualCheck) ? isSame <= 0 : isSame < 0; + if(includeMacthing) + matchingPos.add(i); + } + } + + + @Override + public Collection getRangeIndexes(final T matchNoStart, final T matchNoEnd) throws IOException { + Collection matchingPos = new ArrayList(); + getRangeIndexes(matchNoStart, matchNoEnd, matchingPos); + return matchingPos; + + } + + @Override + public void getRangeIndexes(final T matchNoStart, final T matchNoEnd, final Collection matchings) throws IOException { + computeRangeIndexes(matchNoStart, matchNoEnd, false, false, matchings); + } + + @Override + public Collection getRangeIndexesInclusive(final T matchNoStart, final T matchNoEnd) throws IOException { + Collection matchingPos = new ArrayList(); + getRangeIndexesInclusive(matchNoStart, matchNoEnd, matchingPos); + return matchingPos; + } + + public void getRangeIndexesInclusive(final T matchNoStart, final T matchNoEnd, final Collection matchings) throws IOException { + computeRangeIndexes(matchNoStart, matchNoEnd, true, true, matchings); + } + + @Override + public Collection getRangeIndexesInclusive(final T matchNoStart, + final boolean startMatch, final T matchNoEnd, final boolean endMatch) throws IOException { + Collection matchingPos = new ArrayList(); + getRangeIndexesInclusive(matchNoStart, startMatch, matchNoEnd, endMatch, matchingPos); + return matchingPos; + } + + @Override + public void getRangeIndexesInclusive(final T matchNoStart, final boolean startMatch, + final T matchNoEnd, final boolean endMatch, final Collection matchings) throws IOException { + computeRangeIndexes(matchNoStart, matchNoEnd, startMatch, endMatch, matchings); + } + + protected void computeRangeIndexes(final T matchingValS, final T matchingValE, + final boolean isStartInclusive, final boolean isEndInclusive, final Collection matchingPos) throws IOException { + + int totalSize = getSize(); + if ( totalSize <= 0 ) return; + + int isSameS = -1; + int isSameE = -1; + boolean includeMacthingS = false; + boolean includeMacthingE = false; + + for (int i = 0; i < totalSize; i++) { + isSameS = compare(inputBytes, offset + i * dataSize, matchingValS); + isSameE = compare(inputBytes, offset + i * dataSize, matchingValE); + includeMacthingS = (isStartInclusive) ? isSameS >= 0 : isSameS > 0; + includeMacthingE = (isEndInclusive) ? isSameE <= 0 : isSameE < 0; + if(includeMacthingS && includeMacthingE) + matchingPos.add(i); + } + } + +} diff --git a/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesByte.java b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesByte.java new file mode 100644 index 0000000..4433027 --- /dev/null +++ b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesByte.java @@ -0,0 +1,75 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.byteutils.vs; + +import java.io.IOException; +import java.util.Collection; + +import com.bizosys.hsearch.byteutils.ISortedByte; + +public final class PositionalBytesByte extends PositionalBytesBase{ + + public static final ISortedByte getInstance(byte defaultValue) { + return new PositionalBytesByte(defaultValue); + } + + private PositionalBytesByte(byte defaultValue){ + this.dataSize = 1; + this.defaultValue = defaultValue; + this.defaultValueB = new byte[]{defaultValue}; + } + + @Override + public byte[] toBytes(Collection sortedCollection)throws IOException { + + int maxIndex = sortedCollection.size(); + int length = maxIndex * dataSize; + inputBytes = new byte[length]; + int index = 0; + for (Byte input : sortedCollection) { + if(null == input) + inputBytes[index++] = defaultValueB[0]; + else + inputBytes[index++] = input; + } + + return inputBytes; + } + + + @Override + public Byte getValueAt(int pos) throws IndexOutOfBoundsException { + return inputBytes[this.offset + pos]; + } + + @Override + public int compare(byte[] inputB, int offset, Byte matchNo) { + byte val = inputB[offset]; + if ( val == matchNo.byteValue()) return 0; + if (val > matchNo.byteValue()) return 1; + return -1; + } + + @Override + public boolean isEqual(Byte firstValue, Byte secondValue) { + return firstValue.byteValue() == secondValue.byteValue(); + } + +} diff --git a/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesDouble.java b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesDouble.java new file mode 100644 index 0000000..a1e3ae8 --- /dev/null +++ b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesDouble.java @@ -0,0 +1,76 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.byteutils.vs; + +import java.io.IOException; +import java.util.Collection; + +import com.bizosys.hsearch.byteutils.ByteUtil; +import com.bizosys.hsearch.byteutils.ISortedByte; +import com.bizosys.hsearch.byteutils.Storable; + +public final class PositionalBytesDouble extends PositionalBytesBase{ + + public static final ISortedByte getInstance(double defaultValue) { + return new PositionalBytesDouble(defaultValue); + } + + private PositionalBytesDouble(double defaultValue){ + this.dataSize = 8; + this.defaultValue = defaultValue; + this.defaultValueB = ByteUtil.toBytes(defaultValue); + } + + @Override + public byte[] toBytes(Collection sortedCollection)throws IOException { + + int maxIndex = sortedCollection.size(); + int length = maxIndex * dataSize; + inputBytes = new byte[length]; + int index = 0; + for (Double input : sortedCollection) { + if(null == input) + System.arraycopy(defaultValueB, 0, inputBytes, index, dataSize); + else + System.arraycopy(ByteUtil.toBytes(input), 0, inputBytes, index, dataSize); + + index = index + dataSize; + } + + return inputBytes; + } + + + @Override + public Double getValueAt(int pos) throws IndexOutOfBoundsException { + return Storable.getDouble(this.offset + pos*dataSize, inputBytes); + } + + @Override + public int compare(byte[] inputB, int offset, Double matchNo) { + double val = Storable.getDouble(offset, inputB); + return Double.compare(val, matchNo); + } + + @Override + public boolean isEqual(Double firstNumber, Double secondNumber) { + return firstNumber.doubleValue() == secondNumber.doubleValue(); + } +} diff --git a/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesFloat.java b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesFloat.java new file mode 100644 index 0000000..80c10e6 --- /dev/null +++ b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesFloat.java @@ -0,0 +1,77 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.byteutils.vs; + +import java.io.IOException; +import java.util.Collection; + +import com.bizosys.hsearch.byteutils.ByteUtil; +import com.bizosys.hsearch.byteutils.ISortedByte; +import com.bizosys.hsearch.byteutils.Storable; + +public final class PositionalBytesFloat extends PositionalBytesBase{ + + public static final ISortedByte getInstance(float defaultValue) { + return new PositionalBytesFloat(defaultValue); + } + + private PositionalBytesFloat(float defaultValue){ + this.dataSize = 4; + this.defaultValue = defaultValue; + this.defaultValueB = ByteUtil.toBytes(defaultValue); + } + + @Override + public byte[] toBytes(Collection sortedCollection)throws IOException { + + int maxIndex = sortedCollection.size(); + int length = maxIndex * dataSize; + inputBytes = new byte[length]; + int index = 0; + for (Float input : sortedCollection) { + if(null == input) + System.arraycopy(defaultValueB, 0, inputBytes, index, dataSize); + else + System.arraycopy(ByteUtil.toBytes(input), 0, inputBytes, index, dataSize); + + index = index + dataSize; + } + + return inputBytes; + } + + + @Override + public Float getValueAt(int pos) throws IndexOutOfBoundsException { + return Storable.getFloat(this.offset + pos*dataSize, inputBytes); + } + + @Override + public int compare(byte[] inputB, int offset, Float matchNo) { + float val = Storable.getFloat(offset, inputB); + return Float.compare(val, matchNo); + } + + @Override + public boolean isEqual(Float firstNumber, Float secondNumber) { + return firstNumber.floatValue() == secondNumber.floatValue(); + } + +} diff --git a/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesInteger.java b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesInteger.java new file mode 100644 index 0000000..1843535 --- /dev/null +++ b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesInteger.java @@ -0,0 +1,79 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.byteutils.vs; + +import java.io.IOException; +import java.util.Collection; + +import com.bizosys.hsearch.byteutils.ByteUtil; +import com.bizosys.hsearch.byteutils.ISortedByte; +import com.bizosys.hsearch.byteutils.Storable; + +public final class PositionalBytesInteger extends PositionalBytesBase{ + + public static final ISortedByte getInstance(int defaultValue) { + return new PositionalBytesInteger(defaultValue); + } + + private PositionalBytesInteger(int defaultValue){ + this.dataSize = 4; + this.defaultValue = defaultValue; + this.defaultValueB = ByteUtil.toBytes(defaultValue); + } + + @Override + public byte[] toBytes(Collection sortedCollection)throws IOException { + + int maxIndex = sortedCollection.size(); + int length = maxIndex * dataSize; + inputBytes = new byte[length]; + int index = 0; + for (Integer input : sortedCollection) { + if(null == input){ + System.arraycopy(defaultValueB, 0, inputBytes, index, dataSize); + } + else{ + System.arraycopy(ByteUtil.toBytes(input), 0, inputBytes, index, dataSize); + } + index = index + dataSize; + } + + return inputBytes; + } + + + @Override + public Integer getValueAt(int pos) throws IndexOutOfBoundsException { + return Storable.getInt(this.offset + pos*dataSize, inputBytes); + } + + @Override + public int compare(byte[] inputB, int offset, Integer matchNo) { + int val = Storable.getInt(offset, inputB); + if ( val == matchNo) return 0; + if (val > matchNo) return 1; + return -1; + } + + @Override + public boolean isEqual(Integer firstNumber, Integer secondNumber) { + return firstNumber.intValue() == secondNumber.intValue(); + } +} diff --git a/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesLong.java b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesLong.java new file mode 100644 index 0000000..005458c --- /dev/null +++ b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesLong.java @@ -0,0 +1,78 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.byteutils.vs; + +import java.io.IOException; +import java.util.Collection; + +import com.bizosys.hsearch.byteutils.ByteUtil; +import com.bizosys.hsearch.byteutils.ISortedByte; +import com.bizosys.hsearch.byteutils.Storable; + +public final class PositionalBytesLong extends PositionalBytesBase{ + + public static final ISortedByte getInstance(long defaultValue) { + return new PositionalBytesLong(defaultValue); + } + + private PositionalBytesLong(long defaultValue){ + this.dataSize = 8; + this.defaultValue = defaultValue; + this.defaultValueB = ByteUtil.toBytes(defaultValue); + } + + @Override + public byte[] toBytes(Collection sortedCollection)throws IOException { + + int maxIndex = sortedCollection.size(); + int length = maxIndex * dataSize; + inputBytes = new byte[length]; + int index = 0; + for (Long input : sortedCollection) { + if(null == input) + System.arraycopy(defaultValueB, 0, inputBytes, index, dataSize); + else + System.arraycopy(ByteUtil.toBytes(input), 0, inputBytes, index, dataSize); + + index = index + dataSize; + } + + return inputBytes; + } + + + @Override + public Long getValueAt(int pos) throws IndexOutOfBoundsException { + return Storable.getLong(this.offset + pos*dataSize, inputBytes); + } + + @Override + public int compare(byte[] inputB, int offset, Long matchNo) { + long val = Storable.getLong(offset, inputB); + if ( val == matchNo) return 0; + if (val > matchNo) return 1; + return -1; + } + + @Override + public boolean isEqual(Long firstNumber, Long secondNumber) { + return firstNumber.longValue() == secondNumber.longValue(); + } +} diff --git a/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesShort.java b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesShort.java new file mode 100644 index 0000000..f123158 --- /dev/null +++ b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesShort.java @@ -0,0 +1,79 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.byteutils.vs; + +import java.io.IOException; +import java.util.Collection; + +import com.bizosys.hsearch.byteutils.ByteUtil; +import com.bizosys.hsearch.byteutils.ISortedByte; +import com.bizosys.hsearch.byteutils.Storable; + +public final class PositionalBytesShort extends PositionalBytesBase{ + + public static final ISortedByte getInstance(short defaultValue) { + return new PositionalBytesShort(defaultValue); + } + + private PositionalBytesShort(short defaultValue){ + this.dataSize = 2; + this.defaultValue = defaultValue; + this.defaultValueB = ByteUtil.toBytes(defaultValue); + } + + @Override + public byte[] toBytes(Collection sortedCollection)throws IOException { + + int maxIndex = sortedCollection.size(); + int length = maxIndex * dataSize; + inputBytes = new byte[length]; + int index = 0; + for (Short input : sortedCollection) { + if(null == input) + System.arraycopy(defaultValueB, 0, inputBytes, index, dataSize); + else + System.arraycopy(ByteUtil.toBytes(input), 0, inputBytes, index, dataSize); + + index = index + dataSize; + } + + return inputBytes; + } + + + @Override + public Short getValueAt(int pos) throws IndexOutOfBoundsException { + return Storable.getShort(this.offset + pos*dataSize, inputBytes); + } + + @Override + public int compare(byte[] inputB, int offset, Short matchNo) { + short val = Storable.getShort(offset, inputB); + if ( val == matchNo) return 0; + if (val > matchNo) return 1; + return -1; + } + + @Override + public boolean isEqual(Short firstNumber, Short secondNumber) { + return firstNumber.shortValue() == secondNumber.shortValue(); + } + +} diff --git a/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesString.java b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesString.java new file mode 100644 index 0000000..95cc6f6 --- /dev/null +++ b/src/java/com/bizosys/hsearch/byteutils/vs/PositionalBytesString.java @@ -0,0 +1,327 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.byteutils.vs; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import com.bizosys.hsearch.byteutils.ByteUtil; +import com.bizosys.hsearch.byteutils.ISortedByte; +import com.bizosys.hsearch.byteutils.Storable; +import com.bizosys.hsearch.hbase.ObjectFactory; + +public final class PositionalBytesString extends PositionalBytesBase{ + + public static ISortedByte getInstance(String defaultValue) { + return new PositionalBytesString(defaultValue); + } + + private PositionalBytesString(String defaultValue) { + this.defaultValue = defaultValue; + this.defaultValueB = ByteUtil.toBytes(defaultValue); + } + + @Override + public final int getSize() { + if ( null == this.inputBytes) return 0; + return Storable.getInt(offset, this.inputBytes); + } + + /** + * 4 bytes - total entities + * 4 bytes - outputBytesLen ( total * ( 4 + string length) ) + * Each element bytes length + * Each element bytes + */ + @Override + public final byte[] toBytes(final Collection sortedCollection) throws IOException { + + int totalEntities = sortedCollection.size(); + int seek = 0; + byte[] headerBytes = new byte[4 + totalEntities * 4 + 4] ; + System.arraycopy(Storable.putInt(totalEntities), 0, headerBytes, seek, 4); + seek += 4; //4 is added for collection size + + int outputBytesLen = 0; + int index = 0; + byte[][] bytes = new byte[totalEntities][]; + byte[] valueB = null; + for (String value : sortedCollection) { + + + System.arraycopy(Storable.putInt(outputBytesLen), 0, headerBytes, seek, 4); + seek = seek + 4; + if(null == value){ + + bytes[index++] = defaultValueB; + outputBytesLen += defaultValueB.length; + + } else{ + valueB = value.getBytes(); + bytes[index++] = valueB; + outputBytesLen += valueB.length; + } + + } + System.arraycopy(Storable.putInt(outputBytesLen), 0, headerBytes, seek, 4); + + outputBytesLen = outputBytesLen + headerBytes.length; + byte[] outputBytes = new byte[outputBytesLen]; + System.arraycopy(headerBytes, 0, outputBytes, 0, headerBytes.length); + seek = headerBytes.length; + + int byteSize = 0; + for (byte[] byteArr : bytes) { + byteSize = byteArr.length; + System.arraycopy(byteArr, 0, outputBytes, seek, byteSize); + seek = seek + byteSize; + } + return outputBytes; + } + + @Override + public final void addAll(final Collection vals) throws IOException { + + int collectionSize = getSize(); + + List seeks = ObjectFactory.getInstance().getIntegerList(); + int seek = offset + 4; + + for ( int i=0; i values(final Collection vals) throws IOException { + + if ( null == this.inputBytes ) return vals; + int total = Storable.getInt(offset, this.inputBytes); + + byte[] aElem = new byte[65536]; + Arrays.fill(aElem, (byte)0); + + int elemSizeOffset = 0; + int elemStartOffset = 0; + int elemEndOffset = 0; + int elemLen = 0; + int headerOffset = 0; + + String EMPTY = ""; + String temp = null; + + for ( int index=0; index 65536) { + byte[] aElemBig = new byte[elemLen]; + System.arraycopy(inputBytes, headerOffset + elemStartOffset, aElemBig, 0, elemLen); + temp = new String(aElemBig); + if(this.defaultValue.equalsIgnoreCase(temp)) + continue; + + vals.add( temp ); + + } else { + System.arraycopy(inputBytes, headerOffset + elemStartOffset, aElem, 0, elemLen); + temp = new String(aElem, 0, elemLen ); + if(this.defaultValue.equalsIgnoreCase(temp)) + continue; + + vals.add( temp ); + } + } + return vals; + } + + @Override + public final String getValueAt(final int pos) throws IndexOutOfBoundsException { + + int collectionSize = getSize(); + if ( pos >= collectionSize) throw new IndexOutOfBoundsException( + "Maximum position in array is " + collectionSize + " and accessed " + pos ); + + int elemSizeOffset = (offset + 4 + pos * 4); + int elemStartOffset = Storable.getInt( elemSizeOffset, inputBytes); + int elemEndOffset = Storable.getInt( elemSizeOffset + 4, inputBytes); + //System.out.println(elemEndOffset + "-" + elemStartOffset); + int elemLen = elemEndOffset - elemStartOffset; + + int headerOffset = (offset + 8 + collectionSize * 4); + if ( 0 == elemLen) return ""; + byte[] aElem = new byte[elemLen]; + + System.arraycopy(inputBytes, headerOffset + elemStartOffset, aElem, 0, elemLen); + return new String(aElem); + } + + @Override + public final int getEqualToIndex(final String matchVal) throws IOException { + + int collectionSize = getSize(); + if ( 0 == collectionSize) return -1; + + int seek = offset; + List seekPositions = ObjectFactory.getInstance().getIntegerList(); + + seek = seek + 4; + + for ( int i=0; i matchings) throws IOException { + getEqualOrNorEqualToIndexes(false, matchVal, matchings); + } + + @Override + public final void getNotEqualToIndexes(final String matchVal, final Collection matchings) throws IOException { + getEqualOrNorEqualToIndexes(true, matchVal, matchings); + } + + /** + * Find total entieis - 4 bytes + * Find the end bytes position to read + * Iterate to find String positions + * Read each string + */ + private final void getEqualOrNorEqualToIndexes(final boolean isNot, final String matchVal, + final Collection matchings) throws IOException { + + int collectionSize = getSize(); + if ( 0 == collectionSize) return; + + int headerLen = 4 + (collectionSize * 4); + + if (inputBytes.length < headerLen) throw new IOException( + "Corrupted bytes : collectionSize( " + collectionSize + "), header lengh=" + headerLen + + " , actual length = " + inputBytes.length); + + List seeks = ObjectFactory.getInstance().getIntegerList(); + + int seek = this.offset + 4; + for ( int i=0; i> <> [JUMP_STEP] [FROM_TIME yyyy:MM:dd:HH:mm:ss] [TO_TIME yyyy:MM:dd:HH:mm:ss]"); + return; + } + + String tableName = args[0]; + String familyName = args[1]; + int jumpStep = 1; + Date startTime = null; + Date endTime = new Date(); + + if (args.length > 2) { + jumpStep = Integer.parseInt(args[2]); + } + + if (args.length > 3) { + startTime = dateFormatter.parse(args[3]); + } + + if (args.length > 4) { + endTime = dateFormatter.parse(args[4]); + } + + HBaseFacade facade = null; + ResultScanner scanner = null; + HTableWrapper table = null; + + try { + facade = HBaseFacade.getInstance(); + table = facade.getTable(tableName); + + Scan scan = new Scan(); + scan.setCacheBlocks(true); + scan.setCaching(500); + scan.setMaxVersions(1); + scan = scan.addFamily(familyName.getBytes()); + if ( startTime != null) { + scan = scan.setTimeRange(startTime.getTime(), endTime.getTime()); + } + + scanner = table.getScanner(scan); + + for (Result r: scanner) { + if ( null == r) continue; + if ( r.isEmpty()) continue; + + for (KeyValue kv : r.list()) { + System.out.println(new String(r.getRow()) + "\t" + new String(kv.getQualifier()) + "\t" + kv.getValueLength()); + } + } + } finally { + if ( null != scanner) scanner.close(); + if ( null != table ) facade.putTable(table); + } + + } + +} diff --git a/src/java/com/bizosys/hsearch/treetable/CellV2.java b/src/java/com/bizosys/hsearch/treetable/CellV2.java new file mode 100644 index 0000000..131c8e7 --- /dev/null +++ b/src/java/com/bizosys/hsearch/treetable/CellV2.java @@ -0,0 +1,481 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package com.bizosys.hsearch.treetable; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.bizosys.hsearch.byteutils.ISortedByte; +import com.bizosys.hsearch.byteutils.vs.PositionalBytesBase; +import com.bizosys.hsearch.hbase.ObjectFactory; + +public class CellV2 { + + public ISortedByte vSorter = null; + public List sortedList = null; + public BytesSection data = null; + + public CellV2(final ISortedByte vSorter) { + this.vSorter = vSorter; + } + + public CellV2(final ISortedByte vSorter, final List sortedList) { + this(vSorter); + this.sortedList = sortedList; + } + + + public CellV2 (final ISortedByte vSorter, final byte[] data) { + this(vSorter); + int dataLen = ( null == data) ? 0 : data.length; + this.data = new BytesSection(data, 0, dataLen); + } + + public CellV2 (final ISortedByte vSorter, final BytesSection sectionData) { + this(vSorter); + this.data = sectionData; + } + + public final void add(final Integer k, final V v) { + if ( null == sortedList) sortedList = new ArrayList(); + + int size = sortedList.size(); + int askedSize = k.intValue(); + if ( askedSize == size) { + sortedList.add(k,v ); + } else if (askedSize < size) { //13 < 10 + sortedList.set(k, v); + } else { + //Iterate and fill the blanks with default value + for ( int i=size; i visitor) throws IOException{ + + PositionalBytesBase baseSorter = (PositionalBytesBase) vSorter.parse(data.data, data.offset, data.length); + int sizeV = baseSorter.getSize(); + V temp = null; + + for ( int i=0; i visitor) throws IOException { + + PositionalBytesBase baseSorter = (PositionalBytesBase) vSorter.parse(data.data, data.offset, data.length); + int sizeV = baseSorter.getSize(); + + if ( null != exactValue || null != minimumValue || null != maximumValue) { + findMatchingPositionsVsorterInitialized( + exactValue, minimumValue, maximumValue, new CellV2FoundIndex(vSorter, visitor)); + } else { + V temp = null; + + for ( int i=0; i visitor) throws IOException { + if ( null != exactValue ) { + findNonMatchingPositionsVsorterInitialized( exactValue, + new CellV2FoundIndex(vSorter, visitor) ); + } else { + throw new IOException("Not queries are not yet supported for ranges."); + } + } + + + public final List getMap(final byte[] data) throws IOException { + int dataLen = ( null == data) ? 0 : data.length; + this.data = new BytesSection(data, 0, dataLen) ; + parseElements(); + return sortedList; + } + + public final List getMap() throws IOException { + if ( null != sortedList) return sortedList; + if ( null != this.data) { + parseElements(); + return sortedList; + } + throw new IOException("Cell2 is not initialized"); + } + + public final void getMap(final List valueContainer) throws IOException { + if ( null != sortedList) { + valueContainer.addAll(sortedList); + return; + } + + this.sortedList = valueContainer; + if ( null != this.data) { + parseElements(); + return; + } + throw new IOException("Cell2 is not initialized"); + } + + public final void getMap(final List kContainer, final List vContainer) throws IOException{ + keySet(kContainer); + values(vContainer); + } + + public final void getMap(final V exactValue, final V minimumValue, final V maximumValue, + final List reusableFoundPosArray, final List kContainer, final List vContainer) throws IOException { + + List foundPositions = reusableFoundPosArray; + byte[] allValsB = findMatchingPositions(exactValue, minimumValue, maximumValue, foundPositions); + + PositionalBytesBase baseSorter = (PositionalBytesBase) vSorter.parse(allValsB); + V temp = null; + for (int position : foundPositions) { + temp = baseSorter.getValueAt(position); + if(baseSorter.isEqual(baseSorter.defaultValue, temp)) + continue; + + kContainer.add(position); + vContainer.add(temp); + } + } + + public final void populate(final Map map) throws IOException { + PositionalBytesBase baseSorter = (PositionalBytesBase) vSorter.parse(data.data, data.offset, data.length); + int sizeV = baseSorter.getSize(); + V temp = null; + + for ( int i=0; i vComp) throws IOException { + + if ( sortedList.size() == 0 ) return null; + + List values = new ArrayList(); + int i=0; + for (V entry : sortedList) { + i++; + if ( leftInclusize ) if ( vComp.compare(entry, minValue) < 0 ) continue; + else if ( vComp.compare(entry, minValue) <= 0 ) continue; + + if ( rightInclusize ) if ( vComp.compare(entry, maximumValue) >= 0 ) continue; + else if ( vComp.compare(entry, maximumValue) > 0 ) continue; + + values.add(i,entry); + } + + if ( i == 0 ) return null; + + byte[] cellB = vSorter.toBytes(values); + values.clear(); + return cellB; + } + + public final Collection indexOf(final V exactValue) throws IOException { + List foundPositions = new ArrayList(); + findMatchingPositions(exactValue, null, null, foundPositions); + return foundPositions; + } + + public final Collection indexOf(final V minimumValue, final V maximumValue) throws IOException { + List foundPositions = new ArrayList(); + findMatchingPositions(null, minimumValue, maximumValue, foundPositions); + return foundPositions; + } + + public final Set keySet(final V exactValue) throws IOException { + Set keys = new HashSet(); + keySet(exactValue, null, null, keys); + return keys; + } + + public final void keySet(final V exactValue, final Collection keys) throws IOException { + keySet(exactValue, null, null, keys); + } + + public final Set keySet(final V minimumValue, final V maximumValue) throws IOException { + Set keys = new HashSet(); + keySet(minimumValue, maximumValue, keys); + return keys; + } + + public final void keySet(final V minimumValue, final V maximumValue, final Collection keys) throws IOException { + keySet(null, minimumValue, maximumValue, keys); + } + + private final void keySet( final V exactValue, final V minimumValue, final V maximumValue, final Collection foundKeys) throws IOException { + + if ( null == data) { + System.err.println("Null Data - It should be an warning"); + return; + } + findMatchingPositions(exactValue, minimumValue, maximumValue, foundKeys); + } + + public final Collection values(final V exactValue) throws IOException { + Collection values = new ArrayList(); + matchValues(exactValue, null, null, values); + return values; + } + + public final Collection values(final V minimumValue, final V maximumValue) throws IOException { + Collection values = new ArrayList(); + matchValues(null, minimumValue, maximumValue, values); + return values; + } + + public final void values(final V exactValue, final Collection foundValues) throws IOException { + matchValues(exactValue, null, null, foundValues); + } + + public final void values(final V minimumValue, final V maximumValue, final Collection foundValues) throws IOException { + matchValues(null, minimumValue, maximumValue, foundValues); + } + + private final void matchValues(final V exactValue, final V minimumValue,final V maximumValue, final Collection foundValues) throws IOException { + + if ( null == data) { + System.err.println("Null Data - It should be an warning"); + return; + } + + List foundPositions = ObjectFactory.getInstance().getIntegerList(); + + byte[] allValuesB = findMatchingPositions(exactValue, minimumValue, maximumValue, foundPositions); + + vSorter.parse(allValuesB); + for (int position : foundPositions) { + foundValues.add( vSorter.getValueAt(position) ); + } + + ObjectFactory.getInstance().putIntegerList(foundPositions); + + } + + public final Collection values() throws IOException { + + Collection values = new ArrayList(); + values(values); + return values; + } + + public final void values( final Collection values) throws IOException { + + if ( null == data) { + System.err.println("Null Data - It should be an warning"); + return; + } + + PositionalBytesBase baseSorter = (PositionalBytesBase) vSorter.parse(data.data, data.offset, data.length); + V temp = null; + int size = baseSorter.getSize(); + for ( int i=0; i < size; i++) { + + temp = vSorter.getValueAt(i); + if(baseSorter.isEqual(baseSorter.defaultValue, temp)) + continue; + + values.add(vSorter.getValueAt(i)); + } + } + + public final Collection valuesAt(final Collection foundPositions) throws IOException { + List foundValues = new ArrayList(); + valuesAt(foundValues, foundPositions ); + return foundValues; + } + + public final void valuesAt(final Collection foundValues, final Collection foundPositions) throws IOException { + + if ( null == data.data) return; + PositionalBytesBase baseSorter = (PositionalBytesBase) vSorter.parse(data.data, data.offset, data.length); + V temp = null; + for (int position : foundPositions) { + + temp = vSorter.getValueAt(position); + if(baseSorter.isEqual(baseSorter.defaultValue, temp)) + continue; + + foundValues.add( vSorter.getValueAt(position)); + } + } + + private final byte[] findMatchingPositions( final V exactValue, final V minimumValue, final V maximumValue, final Collection foundPositions) throws IOException { + + if ( null == data) { + System.err.println("Null Data - It should be an warning"); + return null; + } + + if ( null == data.data) return null; + vSorter.parse(data.data, data.offset, data.length); + findMatchingPositionsVsorterInitialized (exactValue, minimumValue, maximumValue, foundPositions); + return data.data; + } + + private final void findMatchingPositionsVsorterInitialized( final V exactValue, final V minimumValue, + final V maximumValue, final Collection foundPositions) throws IOException { + + if ( null != exactValue || null != minimumValue || null != maximumValue ) { + + if ( null != exactValue ) { + vSorter.getEqualToIndexes(exactValue, foundPositions); + } else { + if ( null != minimumValue && null != maximumValue ) { + vSorter.getRangeIndexesInclusive(minimumValue, maximumValue, foundPositions); + } else if ( null != minimumValue) { + vSorter.getGreaterThanEqualToIndexes(minimumValue, foundPositions); + } else { + vSorter.getLessThanEqualToIndexes(maximumValue, foundPositions); + } + } + } + } + + private final void findNonMatchingPositionsVsorterInitialized( final V exactValue, + final Collection foundPositions) throws IOException { + + if ( null != exactValue) { + vSorter.getNotEqualToIndexes(exactValue, foundPositions); + } + } + + public final void processIn(final V[] inValues, final Cell2Visitor visitor) throws IOException { + + if ( null == this.data ) return; + if ( null == this.data.data ) return; + + PositionalBytesBase baseSorter = (PositionalBytesBase) vSorter.parse(data.data, data.offset, data.length); + int size = baseSorter.getSize(); + if ( 0 != size) { + findInMatchingPositionsVsorterInitialized( inValues, + new CellV2FoundIndex(vSorter, visitor) ); + } else { + throw new IOException("Size for the in elemnts are zero."); + } + + } + + private final void findInMatchingPositionsVsorterInitialized( final V[] inValues, + final Collection foundPositions) throws IOException { + int size = inValues.length; + if ( 0 != size) { + for(int i = 0; i < size; i++){ + vSorter.getEqualToIndexes(inValues[i], foundPositions); + } + } + } + + public final Collection keySet() throws IOException { + List keys = new ArrayList(); + keySet(keys); + return keys; + } + + public final void keySet( final Collection keys) throws IOException { + + if ( null == this.data ) return; + if ( null == this.data.data ) return; + + PositionalBytesBase baseSorter = (PositionalBytesBase) vSorter.parse(data.data, data.offset, data.length); + int size = baseSorter.getSize(); + for ( int i=0; i(); + else this.sortedList.clear(); + values(this.sortedList); + } + + public final void remove(final Integer key) { + if ( null == this.sortedList) return; + int elemIndex = this.sortedList.indexOf(key); + if ( -1 == elemIndex) return; + this.sortedList.remove(elemIndex); + } + + public void clear() { + if ( null != sortedList) sortedList.clear(); + this.data = null; + } + + @Override + public String toString() { + if ( null == sortedList) try {parseElements();} catch (Exception e) {return e.getMessage();}; + return sortedList.toString(); + } +} diff --git a/src/java/com/bizosys/hsearch/treetable/CellV2FoundIndex.java b/src/java/com/bizosys/hsearch/treetable/CellV2FoundIndex.java new file mode 100644 index 0000000..01ea724 --- /dev/null +++ b/src/java/com/bizosys/hsearch/treetable/CellV2FoundIndex.java @@ -0,0 +1,102 @@ +/* +* Copyright 2010 Bizosys Technologies Limited +* +* Licensed to the Bizosys Technologies Limited (Bizosys) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The Bizosys licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package com.bizosys.hsearch.treetable; + +import java.util.Collection; +import java.util.Iterator; + +import com.bizosys.hsearch.byteutils.ISortedByte; + +public class CellV2FoundIndex implements Collection{ + + private ISortedByte vSorter = null; + private Cell2Visitor visitor = null; + + public CellV2FoundIndex(ISortedByte vSorter, Cell2Visitor visitor) { + this.vSorter = vSorter; + this.visitor = visitor; + } + + @Override + public int size() { + return 0; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public boolean contains(Object o) { + return false; + } + + @Override + public Iterator iterator() { + return null; + } + + @Override + public Object[] toArray() { + return null; + } + + @Override + public T[] toArray(T[] a) { + return null; + } + + @Override + public final boolean add(Integer i) { + visitor.visit(i, vSorter.getValueAt(i)); + return true; + } + + @Override + public boolean remove(Object o) { + return false; + } + + @Override + public boolean containsAll(Collection c) { + return false; + } + + @Override + public boolean addAll(Collection c) { + return false; + } + + @Override + public boolean removeAll(Collection c) { + return false; + } + + @Override + public boolean retainAll(Collection c) { + return false; + } + + @Override + public void clear() { + } +} diff --git a/src/java/com/bizosys/hsearch/treetable/storage/protobuf/HSearchCoprocessor.proto b/src/java/com/bizosys/hsearch/treetable/storage/protobuf/HSearchCoprocessor.proto new file mode 100644 index 0000000..7a916d9 --- /dev/null +++ b/src/java/com/bizosys/hsearch/treetable/storage/protobuf/HSearchCoprocessor.proto @@ -0,0 +1,47 @@ +option java_package = "com.bizosys.hsearch.treetable.storage.protobuf.generated"; +option java_outer_classname = "HSearchCoprocessorProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "HSearchFilter.proto"; + +/** + * Container for a list of column qualifier names of a family. + */ +message Column { + required bytes family = 1; + required bytes qualifier = 2; +} + +message RowRequest { + repeated Column familyWithQualifier = 1; + optional HSearchGenericFilterMessage filter = 2; +} + +message RowResponse { + required bytes result = 1; +} + +service HSearchGenericCoprocessorService { + rpc getRows(RowRequest) returns(RowResponse); +} + +message MultiRowRequest { + required Column familyWithQualifier = 1; + optional HSearchScalarFilterMessage filter = 2; + repeated bytes rows = 3; +} + +service HSearchMultiGetCoprocessorProxyService { + rpc getRows(MultiRowRequest) returns(RowResponse); +} + +message BytesRowRequest { + repeated Column familyWithQualifier = 1; + optional HSearchBytesFilterMessage filter = 2; +} + +service HSearchBytesCoprocessorProxyService { + rpc getRows(BytesRowRequest) returns(RowResponse); +} diff --git a/src/java/com/bizosys/hsearch/treetable/storage/protobuf/HSearchFilter.proto b/src/java/com/bizosys/hsearch/treetable/storage/protobuf/HSearchFilter.proto new file mode 100644 index 0000000..8285ba0 --- /dev/null +++ b/src/java/com/bizosys/hsearch/treetable/storage/protobuf/HSearchFilter.proto @@ -0,0 +1,31 @@ +option java_package = "com.bizosys.hsearch.treetable.storage.protobuf.generated"; +option java_outer_classname = "HSearchFilterProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message HSearchGenericFilterMessage { + required string filterClassName = 1; + required string inputMapperInstructions = 2; + required string multiQuery = 3; + optional bytes inputRowsToIncludeB = 4; + repeated QueryFiltersPair queryFilters = 5; + + message QueryFiltersPair { + required string key = 1; + required string value = 2; + } +} + +message HSearchBytesFilterMessage { + required string filterClassName = 1; + optional bytes state = 2; +} + +message HSearchScalarFilterMessage { + required string filterClassName = 1; + required string inputMapperInstructions = 2; + optional string multiQuery = 3; + optional bytes inputRowsToIncludeB = 4; + optional bytes matchingIds = 5; +} \ No newline at end of file diff --git a/src/java/com/bizosys/hsearch/treetable/storage/protobuf/generated/HSearchCoprocessorProtos.java b/src/java/com/bizosys/hsearch/treetable/storage/protobuf/generated/HSearchCoprocessorProtos.java new file mode 100644 index 0000000..e00a04e --- /dev/null +++ b/src/java/com/bizosys/hsearch/treetable/storage/protobuf/generated/HSearchCoprocessorProtos.java @@ -0,0 +1,4 @@ +package com.bizosys.hsearch.treetable.storage.protobuf.generated; + +public final class HSearchCoprocessorProtos { +} diff --git a/src/java/com/bizosys/hsearch/treetable/storage/protobuf/generated/HSearchFilterProtos.java b/src/java/com/bizosys/hsearch/treetable/storage/protobuf/generated/HSearchFilterProtos.java new file mode 100644 index 0000000..12f9081 --- /dev/null +++ b/src/java/com/bizosys/hsearch/treetable/storage/protobuf/generated/HSearchFilterProtos.java @@ -0,0 +1,4 @@ +package com.bizosys.hsearch.treetable.storage.protobuf.generated; + +public final class HSearchFilterProtos { +} diff --git a/src/test/com/bizosys/hsearch/byteutils/vs/PositionalBytesIntegerTest.java b/src/test/com/bizosys/hsearch/byteutils/vs/PositionalBytesIntegerTest.java new file mode 100644 index 0000000..2885d2e --- /dev/null +++ b/src/test/com/bizosys/hsearch/byteutils/vs/PositionalBytesIntegerTest.java @@ -0,0 +1,292 @@ +package com.bizosys.hsearch.byteutils.vs; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import junit.framework.TestCase; +import junit.framework.TestFerrari; + +import com.oneline.ferrari.TestAll; + +public class PositionalBytesIntegerTest extends TestCase { + + public static String[] modes = new String[] { "all", "random", "method"}; + public static String mode = modes[2]; + + public static void main(String[] args) throws Exception { + PositionalBytesIntegerTest t = new PositionalBytesIntegerTest(); + + if ( modes[0].equals(mode) ) { + TestAll.run(new TestCase[]{t}); + } else if ( modes[1].equals(mode) ) { + TestFerrari.testRandom(t); + + } else if ( modes[2].equals(mode) ) { + t.setUp(); + t.testRangeCheck(); + t.tearDown(); + } + } + + @Override + protected void setUp() throws Exception { + } + + @Override + protected void tearDown() throws Exception { + } + + public void testEqual() throws Exception { + List sortedList = new ArrayList(); + sortedList.add(new Integer(0)); + sortedList.add(new Integer(2)); + sortedList.add(new Integer(12)); + sortedList.add(new Integer(10)); + sortedList.add(new Integer(10)); + sortedList.add(new Integer(15)); + sortedList.add(new Integer(18)); + + for ( int i=0; i<1000; i++) { + sortedList.add(i); + } + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + List positions = new ArrayList(); + pos.parse(bytes).getEqualToIndexes(10, positions); + assertNotNull(positions); + for (int position : positions) { + assertEquals(pos.parse(bytes).getValueAt(position).intValue(), 10 ); + } + assertEquals(3, positions.size()); + } + + public void testEqualWithOffset() throws Exception { + List sortedList = new ArrayList(); + sortedList.add(new Integer(-1)); + sortedList.add(new Integer(2)); + sortedList.add(new Integer(12)); + sortedList.add(new Integer(10)); + sortedList.add(new Integer(10)); + sortedList.add(new Integer(15)); + sortedList.add(new Integer(18)); + + for ( int i=0; i<1000; i++) { + sortedList.add(i); + } + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + byte[] finalbytes = new byte[bytes.length + 300]; + System.arraycopy(bytes, 0, finalbytes, 300, bytes.length); + + int foundLoc = pos.parse(finalbytes,300,finalbytes.length - 300).getEqualToIndex(10); + + assertTrue(foundLoc != -1); + int foundVal = pos.parse(finalbytes,300,finalbytes.length - 300).getValueAt(foundLoc); + assertEquals(10, foundVal ); + } + + public void testLessthan() throws Exception { + List sortedList = new ArrayList(); + for ( int i=0; i<1000; i++) { + sortedList.add(i); + } + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + List positions = new ArrayList(); + pos.parse(bytes).getLessThanIndexes(130, positions); + + //System.out.println(positions.toString()); + assertNotNull(positions); + assertTrue(! positions.contains(130) ); + assertTrue(!positions.contains(999) ); + assertTrue(positions.contains(129) ); + assertTrue(positions.contains(0) ); + assertTrue(!positions.contains(-1) ); + + assertEquals(130, positions.size()); + + for (int position : positions) { + assertTrue( (pos.parse(bytes).getValueAt(position) < 130) ); + } + } + + public void testLessthanMultiValue() throws Exception { + List sortedList = new ArrayList(); + sortedList.add(new Integer(0)); + sortedList.add(new Integer(5)); + sortedList.add(new Integer(2)); + sortedList.add(new Integer(12)); + sortedList.add(new Integer(10)); + sortedList.add(new Integer(10)); + sortedList.add(new Integer(15)); + sortedList.add(new Integer(18)); + + for ( int i=0; i<1000; i++) { + sortedList.add(i); + } + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + List positions = new ArrayList(); + pos.parse(bytes).getLessThanIndexes(10, positions); + + assertNotNull(positions); + + for (int position : positions) { + assertTrue( (pos.parse(bytes).getValueAt(position) < 10) ); + } + assertEquals(13, positions.size()); + } + + public void testLessthanEqual() throws Exception { + List sortedList = new ArrayList(); + sortedList.add(32); + sortedList.add(82); + for ( int i=0; i<1000; i++) { + sortedList.add(i); + } + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + List positions = new ArrayList(); + pos.parse(bytes).getLessThanEqualToIndexes(700, positions); + + assertNotNull(positions); + assertEquals(703, positions.size()); + for (int position : positions) { + assertTrue( (pos.parse(bytes).getValueAt(position) <= 700) ); + } + } + + public void testGreaterthan() throws Exception { + List sortedList = new ArrayList(); + for ( int i=0; i<1000; i++) { + sortedList.add(i); + } + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + List positions = new ArrayList(); + pos.parse(bytes).getGreaterThanEqualToIndexes(121, positions); + + assertNotNull(positions); + assertTrue(positions.contains(498) ); + assertTrue(positions.contains(499) ); + assertTrue(positions.contains(500) ); + assertTrue(positions.contains(999) ); + assertTrue(positions.contains(121) ); + assertTrue(!positions.contains(120) ); + assertEquals(1000-121, positions.size()); + for (int position : positions) { + assertTrue( (pos.parse(bytes).getValueAt(position) >= 121) ); + } + } + + public void testGreaterthanEqual() throws Exception { + List sortedList = new ArrayList(); + for ( int i=0; i<1000; i++) { + sortedList.add(i); + } + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + List positions = new ArrayList(); + pos.parse(bytes).getGreaterThanIndexes(800, positions); + + assertNotNull(positions); + assertTrue(positions.contains(801) ); + assertTrue(positions.contains(999)); + assertTrue(!positions.contains(800)); + assertEquals(1000-800-1, positions.size()); + for (int position : positions) { + assertTrue( (pos.parse(bytes).getValueAt(position) > 800) ); + } + } + + public void testRandomOperationsGTCheck(Integer checkNo, + Integer input1, Integer input2, Integer input3, Integer input4,Integer input5, + Integer input6, Integer input7, Integer input8, Integer input9,Integer input10 + ) throws Exception { + + List sortedList = new ArrayList(); + sortedList.add(input1); + sortedList.add(input2); + sortedList.add(input3); + sortedList.add(input4); + sortedList.add(input5); + sortedList.add(input6); + sortedList.add(input7); + sortedList.add(input8); + sortedList.add(input9); + sortedList.add(input10); + Collections.sort(sortedList); + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + for (Integer aInput : sortedList) { + List positions = new ArrayList(); + pos.parse(bytes).getGreaterThanIndexes(aInput, positions); + + for (Integer bInput : positions) { + if ( aInput > bInput) assertTrue(positions.contains(bInput)); + } + + } + } + + public void testRandomOperationsLTCheck(Integer checkNo, + Integer input1, Integer input2, Integer input3, Integer input4,Integer input5, + Integer input6, Integer input7, Integer input8, Integer input9,Integer input10 + ) throws Exception { + + List sortedList = new ArrayList(); + sortedList.add(input1); + sortedList.add(input2); + sortedList.add(input3); + sortedList.add(input4); + sortedList.add(input5); + sortedList.add(input6); + sortedList.add(input7); + sortedList.add(input8); + sortedList.add(input9); + sortedList.add(input10); + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(sortedList); + for (Integer aInput : sortedList) { + List positions = new ArrayList(); + pos.parse(bytes).getLessThanIndexes(aInput, positions); + + for (Integer bInput : positions) { + if ( aInput < bInput) assertTrue(positions.contains(bInput)); + } + + } + } + + public void testRangeCheck() throws IOException{ + List list = new ArrayList(); + list.add(10); + list.add(12); + list.add(1); + list.add(3); + list.add(8); + + for(int i = 0; i < 1000; i++){ + list.add(i); + } + + PositionalBytesInteger pos = (PositionalBytesInteger) PositionalBytesInteger.getInstance(Integer.MIN_VALUE); + byte[] bytes = pos.toBytes(list); + List positions = new ArrayList(); + pos.parse(bytes).getRangeIndexesInclusive(5, 10, positions); + for (Integer position : positions) { + System.out.println(list.get(position)); + } + } +} diff --git a/src/test/com/bizosys/hsearch/treetable/CellV2Test.java b/src/test/com/bizosys/hsearch/treetable/CellV2Test.java new file mode 100644 index 0000000..656b996 --- /dev/null +++ b/src/test/com/bizosys/hsearch/treetable/CellV2Test.java @@ -0,0 +1,691 @@ +package com.bizosys.hsearch.treetable; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import junit.framework.TestCase; +import junit.framework.TestFerrari; + +import com.bizosys.hsearch.byteutils.SortedBytesArray; +import com.bizosys.hsearch.byteutils.vs.PositionalBytesByte; +import com.bizosys.hsearch.byteutils.vs.PositionalBytesFloat; +import com.bizosys.hsearch.byteutils.vs.PositionalBytesInteger; +import com.bizosys.hsearch.byteutils.vs.PositionalBytesLong; +import com.bizosys.hsearch.byteutils.vs.PositionalBytesShort; +import com.bizosys.hsearch.byteutils.vs.PositionalBytesString; +import com.oneline.ferrari.TestAll; + +public class CellV2Test extends TestCase { + + public static String[] modes = new String[] { "all", "random", "method"}; + public static String mode = modes[2]; + + public static void main(String[] args) throws Exception { + CellV2Test t = new CellV2Test(); + + if ( modes[0].equals(mode) ) { + TestAll.run(new TestCase[]{t}); + } else if ( modes[1].equals(mode) ) { + TestFerrari.testRandom(t); + + } else if ( modes[2].equals(mode) ) { + t.setUp(); + t.testGetMap(); + t.tearDown(); + } + } + + @Override + protected void setUp() throws Exception { + } + + @Override + protected void tearDown() throws Exception { + } + + public void testInitialization() throws Exception { + + { + CellV2 ser = new CellV2(PositionalBytesInteger.getInstance(Integer.MIN_VALUE)); + ser.add(10, 1); + ser.toBytes(); + } + + { + CellV2 ser = new CellV2(PositionalBytesFloat.getInstance(Float.MIN_VALUE)); + ser.add(268, 3.5f); + ser.add(151, 2.800000000000000f); + ser.add(72, -2147483648f); + ser.add(10, 2.500000000000000f); + ser.add(86, 2.600000000000000f); + byte[] data = ser.toBytes(); + CellV2 deser = new CellV2(PositionalBytesFloat.getInstance(Float.MIN_VALUE),data); + System.out.println(deser.getMap()); + } + + { + CellV2 ser = new CellV2(PositionalBytesByte.getInstance(Byte.MIN_VALUE)); + ser.add(11, (byte)1); + ser.toBytes(); + } + + } + + public void testNonUnique() throws Exception { + CellV2 ser = new CellV2(PositionalBytesFloat.getInstance(Float.MIN_VALUE)); + for ( int i=0; i<10; i++) { + ser.add(i, (float)i); + } + + byte[] data = ser.toBytes(); + CellV2 deser = new CellV2(PositionalBytesFloat.getInstance(Float.MIN_VALUE),data); + + final Cell2Visitor visitor = new Cell2Visitor() { + @Override + public final void visit(final Integer k, final Float v) { + System.out.println(k + "-" + v); + } + }; + + long start = System.currentTimeMillis(); + deser.process(visitor); + long end = System.currentTimeMillis(); + System.out.println(end - start); + } + + public void testOneRow() throws Exception { + CellV2 ser = new CellV2(PositionalBytesString.getInstance("-")); + ser.add(100, "AAAAAAAAAA"); + + + CellV2 deser = new CellV2(PositionalBytesString.getInstance("-"),ser.toBytes()); + HashMap out = new HashMap(); + deser.populate(out); + System.out.println(out.toString()); + } + + public void testMultiRow() throws Exception { + + List merged = new ArrayList(); + for ( int i=0; i<3; i++ ) { + CellV2 ser = new CellV2(PositionalBytesString.getInstance("-")); + ser.add(100, "AAAAAAAAAA"); + merged.add(ser.toBytes()); + } + + SortedBytesArray sbaSet = SortedBytesArray.getInstanceArr(); + byte[] mergedData = sbaSet.toBytes(merged); + + SortedBytesArray sbaDeser = SortedBytesArray.getInstanceArr(); + sbaDeser.parse(mergedData); + int size = sbaDeser.getSize(); + System.out.println(size); + + SortedBytesArray.Reference ref = new SortedBytesArray.Reference(); + + for ( int i=0; i CellV2 = new CellV2(PositionalBytesString.getInstance("-"), + new BytesSection(mergedData, ref.offset, ref.length)); + Map elems = new HashMap(); + CellV2.populate(elems); + System.out.println("CellV2 :" + elems.toString()); + } + + } + + public void testSorterOnlyConsrtuctor() throws Exception { + + CellV2 ser = new CellV2(PositionalBytesShort.getInstance(Short.MIN_VALUE)); + + ser.add(1, (short) 1); + ser.add(2, (short) 2); + + //Test Parsing + CellV2 deser = new CellV2(PositionalBytesShort.getInstance(Short.MIN_VALUE),ser.toBytes()); + + deser.parseElements(); + assertEquals(2, deser.getMap().size()); + for (Short cell : deser.getMap()) { + assertTrue( ( cell.shortValue() == 1) || ( cell.shortValue() == 2)); + } + } + + public void testBytesSectionConsrtuctor() throws Exception { + + CellV2 ser = new CellV2(PositionalBytesInteger.getInstance(Integer.MIN_VALUE)); + + for ( int i=1; i< 100; i++ ) ser.add( i, i); + + byte[] data = ser.toBytes(); + byte[] appendedData = new byte[data.length + 12]; + Arrays.fill(appendedData, (byte) 0); + System.arraycopy(data, 0, appendedData, 0, data.length); + + //Test Parsing + CellV2 deser = new CellV2(PositionalBytesInteger.getInstance(Integer.MIN_VALUE), + new BytesSection(appendedData, 0, data.length)); + + deser.parseElements(); + assertEquals(99, deser.getMap().size()); + + } + + public void testGetMap() throws Exception { + + CellV2 ser = new CellV2(PositionalBytesLong.getInstance(Long.MIN_VALUE)); + + for ( int i=1; i< 100; i++ ) ser.add(i, (long) i); + + byte[] data = ser.toBytes(); + + //Test Parsing + CellV2 deser = new CellV2(PositionalBytesLong.getInstance(Long.MIN_VALUE)); + + List values = deser.getMap(data); + + assertEquals(values.size(), 99); + } + +// public void testGetMapWithContainer() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesDouble.getInstance(), SortedBytesFloat.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add((double)i, (float) i); +// ser.sort (new CellComparator.FloatComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesDouble.getInstance(), SortedBytesFloat.getInstance(), dataSection); +// +// List> values = new ArrayList>(); +// deser.getMap(values); +// assertEquals(values.size(), 99); +// +// for (CellKeyValue cell : values) { +// assertTrue( ( cell.key > 0 && cell.key < 100)); +// assertTrue( cell.value.intValue() == cell.key.intValue()); +// } +// } +// +// public void testGetMapWithKVContainer() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesString.getInstance(), SortedBytesString.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add("k" + i, "v" + i); +// ser.sort (new CellComparator.StringComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesString.getInstance(), SortedBytesString.getInstance(), dataSection); +// +// List keys = new ArrayList(); +// List values = new ArrayList(); +// deser.getMap(keys, values); +// +// assertEquals(keys.size(), 99); +// assertEquals(values.size(), 99); +// +// assertTrue( keys.get(0).equals("k1")); +// assertTrue( values.get(0).equals("v1")); +// assertTrue( keys.get(98).equals("k99")); +// assertTrue( values.get(98).equals("v99")); +// } +// +// public void testFilterMapWithKVContainer() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesFloat.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add(i, (float) i); +// ser.sort (new CellComparator.FloatComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesFloat.getInstance(), dataSection); +// +// List positions = new ArrayList(); +// List keys = new ArrayList(); +// List values = new ArrayList(); +// deser.getMap(null, new Float(50), new Float(50), positions, keys, values); +// +// assertEquals(keys.size(), 1); +// assertEquals(values.size(), 1); +// +// assertTrue( keys.get(0).intValue() == 50); +// assertTrue( values.get(0).floatValue() == 50); +// } +// +// public void testFilterPopulate() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesString.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add(i, "v" + i); +// ser.sort (new CellComparator.StringComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesString.getInstance(), dataSection); +// +// Map elems = new HashMap(); +// deser.populate(elems); +// +// assertEquals(elems.size(), 99); +// Iterator itr = elems.keySet().iterator(); +// assertEquals(itr.next().intValue() , 1); +// assertEquals(elems.get(1) , "v1"); +// +// assertEquals(itr.next().intValue() , 2); +// assertEquals(elems.get(2) , "v2"); +// } +// +// public void testIndexOf() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesArray.getInstance(), SortedBytesString.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add( Storable.putInt(i), "v" + i); +// ser.sort (new CellComparator.StringComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesArray.getInstance(), SortedBytesString.getInstance(), dataSection); +// +// Collection positions = deser.indexOf("v50"); +// assertEquals(1, positions.size()); +// Collection foundVals = deser.valuesAt(positions); +// assertEquals(1, foundVals.size()); +// +// assertEquals("v50", foundVals.iterator().next()); +// } +// +// +// public void testIndexRange() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesString.getInstance(), SortedBytesLong.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add( "k" + i, (long) i); +// ser.sort (new CellComparator.LongComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesString.getInstance(), SortedBytesLong.getInstance(), dataSection); +// +// Collection positions = deser.indexOf((long)23, (long)25); +// assertEquals(3, positions.size()); +// Collection foundVals = deser.valuesAt(positions); +// assertEquals(3, foundVals.size()); +// +// Iterator v = foundVals.iterator(); +// assertEquals((long) 25, v.next().longValue()); +// assertEquals((long) 24, v.next().longValue()); +// assertEquals( (long) 23, v.next().longValue()); +// } +// +// public void testKeySet() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesLong.getInstance(), SortedBytesLong.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add( (long) i, (long) i); +// ser.sort (new CellComparator.LongComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesLong.getInstance(), SortedBytesLong.getInstance(), dataSection); +// +// Collection keys = deser.keySet( (long)23); +// +// assertEquals(1, keys.size()); +// assertEquals((long) 23, keys.iterator().next().longValue()); +// } +// +// public void testKeySetWithCollection() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesLong.getInstance(), SortedBytesLong.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add( (long) i, (long) i); +// ser.sort (new CellComparator.LongComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesLong.getInstance(), SortedBytesLong.getInstance(), dataSection); +// +// +// Collection keys = new ArrayList(); +// deser.keySet( (long)99, keys); +// +// assertEquals(1, keys.size()); +// assertEquals((long) 99, keys.iterator().next().longValue()); +// +// keys.clear(); +// deser.keySet( (long)100, keys); +// assertEquals(0, keys.size()); +// +// keys.clear(); +// deser.keySet( (long)98, (long)101, keys); +// assertEquals(2, keys.size()); +// Iterator itr = keys.iterator(); +// assertEquals((long) 98, itr.next().longValue()); +// assertEquals((long) 99, itr.next().longValue()); +// } +// +// public void testValuesWithCollection() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesDouble.getInstance(), SortedBytesDouble.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add( (double) i, (double) i); +// ser.sort (new CellComparator.DoubleComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesDouble.getInstance(), SortedBytesDouble.getInstance(), dataSection); +// +// Collection vals = deser.values(); +// assertEquals(vals.size(), 99); +// assertEquals(vals.iterator().next().doubleValue(), (double) 1); +// +// vals = deser.values((double)1, (double)9); +// assertEquals(vals.size(), 9); +// +// Iterator itr = vals.iterator(); +// for ( int i=9; i<0; i++ ) assertEquals(itr.next().doubleValue(), (double) i); +// +// vals = new ArrayList(); +// deser.values((double) 1, vals); +// +// assertEquals(vals.size(), 1); +// assertEquals(vals.iterator().next().doubleValue(), (double) 1); +// +// vals = new ArrayList(); +// deser.values((double) 1, (double) 200, vals); +// +// assertEquals(vals.size(), 99); +// itr = vals.iterator(); +// for ( int i=99; i<0; i++ ) assertEquals(itr.next().doubleValue(), (double) i); +// } +// +// public void testValueAt() throws Exception { +// +// CellV2 ser = new CellV2( +// SortedBytesFloat.getInstance(), SortedBytesFloat.getInstance()); +// +// for ( int i=1; i< 100; i++ ) ser.add( (float) i, (float) i); +// ser.sort (new CellComparator.FloatComparator()); +// +// byte[] data = ser.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 deser = new CellV2( +// SortedBytesFloat.getInstance(), SortedBytesFloat.getInstance(), dataSection); +// +// Collection foundPositions = new ArrayList(); +// foundPositions.add(0); +// +// Collection vals = deser.valuesAt(foundPositions); +// assertEquals(vals.size(), 1); +// assertEquals(vals.iterator().next().floatValue(), (float) 1); +// } +// +// +// public void testAddOnEmptySet(Long aLong, Float aFloat) throws Exception { +// if ( null == aLong || null == aFloat ) return; +// +// CellV2 tc = new CellV2( +// SortedBytesLong.getInstance(), SortedBytesFloat.getInstance()); +// +// tc.add(aLong, aFloat); +// tc.sort (new CellComparator.FloatComparator()); +// byte[] data = tc.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// //Test Parsing +// CellV2 tcNewParsing = new CellV2( +// SortedBytesLong.getInstance(), SortedBytesFloat.getInstance(),dataSection); +// +// tcNewParsing.parseElements(); +// assertEquals(1, tcNewParsing.getMap().size()); +// +// for (CellKeyValue cell : tcNewParsing.getMap()) { +// assertEquals(cell.key.longValue(), aLong.longValue()); +// assertEquals(cell.value.floatValue(), aFloat.floatValue()); +// } +// +// //Find Matching +// CellV2 tcNewFinding = new CellV2( +// SortedBytesLong.getInstance(), SortedBytesFloat.getInstance(),data); +// +// Set all = tcNewFinding.keySet(aFloat); +// +// assertEquals(1, all.size()); +// assertEquals(all.iterator().next().longValue(), aLong.longValue()); +// +// } +// +// public void testSubsequentAdd(Integer aInt, String aString) throws Exception { +// if ( null == aString || null == aInt ) return; +// +// CellV2 tc = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesArray.getInstance()); +// +// tc.add(aInt, aString.getBytes()); +// tc.add(46, "new46".getBytes()); +// +// tc.sort (new CellComparator.BytesComparator()); +// byte[] data = tc.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// CellV2 tcNewFinding = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesArray.getInstance(), dataSection); +// +// List foundValues = new ArrayList(); +// tcNewFinding.values("new46".getBytes(), foundValues); +// assertEquals(foundValues.size(), 1); +// for (byte[] bs : foundValues) { +// assertEquals("new46" , new String(bs)); +// } +// +// Collection foundKeys = tcNewFinding.keySet("new46".getBytes() ); +// assertEquals(foundValues.size(), 1); +// for (Integer i : foundKeys) { +// assertEquals( 46, i.intValue()); +// } +// +// tcNewFinding.parseElements(); +// +// for (CellKeyValue elem : tcNewFinding.getMap()) { +// assertTrue(elem.key == aInt.intValue() || elem.key == 46); +// assertTrue( "new46".equals(new String(elem.value)) || aString.equals(new String(elem.value)) ); +// } +// +// } +// +// public void testCallback() throws Exception { +// CellV2Visitor visitor = new CellV2Visitor() { +// @Override +// public void visit(Integer k, Float v) { +// System.out.println(k.toString() + "/" + v.toString()); +// if ( k.intValue() == 23) { +// assertEquals(23.1F, v.floatValue()); +// } else if ( k.intValue() == 24) { +// assertEquals(24.1F, v.floatValue()); +// } else { +// assertTrue( k != 23 || k != 24); +// } +// } +// }; +// +// CellV2 table = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesFloat.getInstance()); +// table.add(23, 23.1F); +// table.add(24, 24.1F); +// +// byte[] data = table.toBytesOnSortedData(); +// byte[] appendedData = new byte[12 + data.length + 12]; +// Arrays.fill(appendedData, (byte) 0); +// System.arraycopy(data, 0, appendedData, 12, data.length); +// BytesSection dataSection = new BytesSection(appendedData, 12, data.length); +// +// +// CellV2 tableNew = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesFloat.getInstance(), dataSection); +// tableNew.process(visitor); +// } +// +// public void testCallbackResponse() throws Exception { +// CellV2Visitor visitor = new CellV2Visitor() { +// @Override public final void visit(final Integer k, final Float v) {}}; +// +// CellV2 table = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesFloat.getInstance()); +// for ( int i=0; i<50; i++) { table.add(i, (float) (i + .1));} +// byte[] ser = table.toBytesOnSortedData(); +// System.out.println("Serialization Length :" + ser.length); +// +// long start = System.currentTimeMillis(); +// for ( int i=0; i<10; i++) { +// CellV2 tableNew = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesFloat.getInstance(), ser); +// tableNew.process(visitor); +// } +// long end = System.currentTimeMillis(); +// System.out.println("Response time ms :" + (end - start)); +// } +// +// public void testCallbackFiltering() throws Exception { +// +// CellV2Visitor visitor = new CellV2Visitor() { +// @Override +// public void visit(Integer k, Float v) { +// System.out.println(k.toString() + "/" + v.toString()); +// } +// }; +// +// CellV2 table = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesFloat.getInstance()); +// for ( int i=0; i<10; i++) { +// table.add(i, i + .1F); +// } +// byte[] data = table.toBytesOnSortedData(); +// +// long st = System.currentTimeMillis(); +// CellV2 tableNew = new CellV2( +// SortedBytesInteger.getInstance(), SortedBytesFloat.getInstance(), data); +// tableNew.process(null, 10F, 20F, visitor); +// long ed = System.currentTimeMillis(); +// System.out.println(ed - st); +// } +// +// public void testBooleanBoolean() throws Exception { +// CellV2 tc = new CellV2( +// SortedBytesBoolean.getInstance(), SortedBytesBoolean.getInstance()); +// +// tc.add(true, true); +// tc.add(false, true); +// +// tc.sort (new CellComparator.BooleanComparator()); +// byte[] data = tc.toBytesOnSortedData(); +// +// CellV2 tcNewFinding = new CellV2( +// SortedBytesBoolean.getInstance(), SortedBytesBoolean.getInstance(), data); +// +// List foundValues = new ArrayList(); +// tcNewFinding.keySet(true,foundValues); +// +// for (Boolean bs : foundValues) { +// System.out.println(bs); +// } +// } +// +// public void toBytesOnSortedDataWithMapTest() throws Exception { +// Map cellL = new HashMap(); +// for ( int i=0; i< 100; i++) { +// cellL.put("key" + i, "value" + i); +// } +// CellV2 cellMap = new CellV2( +// SortedBytesString.getInstance(), SortedBytesString.getInstance() ); +// byte[] ser = cellMap.toBytesOnSortedData(cellL); +// +// Map deserL = new HashMap(); +// +// new CellV2( +// SortedBytesString.getInstance(), SortedBytesString.getInstance(), ser ). +// populate(deserL); +// System.out.println(deserL.toString()); +// +// } + +} diff --git a/src/test/com/bizosys/hsearch/treetable/example/DataIndexer.java b/src/test/com/bizosys/hsearch/treetable/example/DataIndexer.java new file mode 100644 index 0000000..6f6a0f5 --- /dev/null +++ b/src/test/com/bizosys/hsearch/treetable/example/DataIndexer.java @@ -0,0 +1,90 @@ +package com.bizosys.hsearch.treetable.example; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.client.HBaseAdmin; + +import com.bizosys.hsearch.hbase.HBaseFacade; +import com.bizosys.hsearch.hbase.HDML; +import com.bizosys.hsearch.hbase.HWriter; +import com.bizosys.hsearch.hbase.NV; +import com.bizosys.hsearch.hbase.RecordScalar; +import com.bizosys.hsearch.treetable.client.partition.IPartition; +import com.bizosys.hsearch.treetable.example.impl.donotmodify.HBaseTableSchema; +import com.bizosys.hsearch.treetable.example.impl.donotmodify.HSearchTableExamResult; +import com.bizosys.hsearch.treetable.storage.HBaseTableSchemaDefn; + +public class DataIndexer { + /** + * Range Query - [1:7432] + * Match All - * + * Exact match - m + */ + + @SuppressWarnings("unchecked") + public static void main(String[] args) throws Exception { + + HBaseTableSchemaDefn schema = HBaseTableSchema.getInstance().getSchema(); + String tableName = schema.getTableName(); + String colFamily = schema.columnPartions.keySet().iterator().next(); + + HBaseFacade facade = HBaseFacade.getInstance(); + HBaseAdmin admin = facade.getAdmin(); + IPartition partition = schema.columnPartions.get(colFamily); + if(admin.tableExists(tableName)){ + for ( String famExt : partition.getPartitionNames() ) { + HDML.truncate(tableName, new NV((schema.columnPartions.keySet().iterator().next() + "_" + famExt).getBytes(), "1".getBytes() )); + } + } else{ + List cols = new ArrayList(); + for ( String famExt : partition.getPartitionNames() ) { + cols.add(new HColumnDescriptor((schema.columnPartions.keySet().iterator().next() + "_" + famExt))); + } + HDML.create(tableName, cols); + } + + Map tableParts = new HashMap(); + + int [] ages = new int[] {22,23,24,25,26}; + int agesCounter = 0; + + String [] role = new String[] {"scout","monitor","captain","student"}; + int roleCounter = 0; + + for ( int i=0; i<101; i++) { + + HSearchTableExamResult part = null; + + String family = partition.getColumnFamily( (double) i/10); + if (tableParts.containsKey(family)) { + part = tableParts.get(family); + } else { + part = new HSearchTableExamResult(); + tableParts.put(family, part); + } + + part.put( ages[agesCounter], role[roleCounter], "classx", i, (float) i/10 ); + + agesCounter++; + if ( agesCounter > 4) agesCounter = 0; + + roleCounter++; + if ( roleCounter > 3) roleCounter = 0; + } + + + List records = new ArrayList(); + for (String family : tableParts.keySet()) { + System.out.println("Writting to Family :" + family); + records.add( new RecordScalar ("row1".getBytes(), + new NV ( family.getBytes(), "1".getBytes() , tableParts.get(family).toBytes() ) ) + ); + } + + HWriter.getInstance(true).insertScalar(schema.getTableName(), records); + } +}