AnsweredAssumed Answered

Able to read/write tables using Java API for Hbase but the tables wont show up in the hbase shell

Question asked by karthik.dondapati on Dec 7, 2015
Latest reply on Dec 7, 2015 by keysbotzum
Hi,

I am able to read/write tables using the Hbase Java API. When i query the hbase shell the tables wont show up in the hbase shell. The tables that are created from the API are stored in the hdfs /hbase/data/default. I ttried to query the hbase uisng the commans list and list '/hbase/data/default' and still not able to see the tables in the shell. are there any configuration settings that i am missing ? Any help is appreciated.

Tried to create tables in the hbase shell and tried to read those tables from the JAVA API and i am not able to read those tables. The Storage folder for the tables created from the Java API and hbase shell are defferent. The folder for the tables created from the hbase shell is /tables/tablename.


package hbase;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;

public class CreateTable {

    private static Configuration conf = null;

    static {
        conf = HBaseConfiguration.create();
        conf.set("hbase.table.namespace.mappings",
                "t1:/,t15:/tables,t3:./,t20:/goose/tables");
        conf.set("mapr.htable.impl", "com.mapr.fs.MapRHTable");
        conf.set("fs.default.name", "maprfs://MyServer:7222");
        conf.set("fs.maprfs.impl", "com.mapr.fs.MapRFileSystem");
        /*
         * conf.set("hadoop.spoofed.user.uid","501");
         * conf.set("hadoop.spoofed.user.gid","501");
         * conf.set("hadoop.spoofed.user.username","mapr");
         */
    }

    // Scan (or list) a table
    public static void getAllRecord() {
        try {
            String tablePath = "ShoppingCart";
            Configuration conf = HBaseConfiguration.create();
            System.out.println("Creating table ...");
            HBaseAdmin admin = new HBaseAdmin(conf);
            byte[] tablePathBytes = Bytes.toBytes(tablePath);
            HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tablePath));
            HColumnDescriptor coldef = new HColumnDescriptor("mdf");
            desc.addFamily(coldef);           
            admin.createTable(desc);
            System.out.println("Table Created ...");
            
        }

        catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static void main(String[] agrs) {
        try {
            // To access M7 tables always prefix /
            CreateTable.getAllRecord();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

Outcomes