Installation of HBase on Centos7 and configuration of Eclipse+Maven

I. configuration process of Eclipse+Maven
1. Install and configure JDK
Configure the JDK environment on Windows
2. Install Eclipse
3. Install Maven
Extract the Maven package, and put the extracted folder \ apache-maven-3.6.0 in the eclipse directory.
Configure the environment variable of maven, add the installation Path of Maven's \ bin, and run mvn -v on the cmd command line to check whether the configuration is installed successfully.
4. Eclipse configuration Maven


5. Add pom.xml dependency
Copy to and of pom.xml, and after saving, Maven Dependencies will be generated automatically.

<dependencies>
  <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>2.7.3</version>
  </dependency>
  <dependency>  
      <groupId>org.apache.hadoop</groupId>  
      <artifactId>hadoop-client</artifactId>  
      <version>2.7.3</version>  
  </dependency> 
  <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>2.7.3</version>
  </dependency>
</dependencies>


Note: it is necessary to download dependency in the networking state


II. HBase installation on Centos7
1. Download the compressed package, upload the compressed package, and decompress the package (here I use hbase-1.4.9-bin.tar.gz)
2. Configure environment variables
Add the configuration information of HBase installation path in the / etc/profile file, and then use the source command to make the configuration effective. After installation, the directory is hbase-1.4.9, which is changed to HBase, so my HBase installation path is / opt/module/hbase.

Configure hbase-env.sh : set Java installation path

Set the configuration file path of HBase (/ opt/module/hbase/conf)

Adopt HBase's own Zookeeper and set the parameter true

Configure hbase-site.xml

<!--hbase Shared directory, persistent hbase data-->
<!--Configured as core-site.xml Medium fs.defaultFS -->
<property>
        <name>hbase.rootdir</name>
        <value>hdfs://bigdata128:9000/hbase</value>
</property>
<!--Distributed operation mode, false(Default) is stand-alone mode-->
<property>
        <name>hbase.cluster.distributed</name>
        <value>true</value>
</property>

<!--Zookeeper Address list of cluster, pseudo distributed with default localhost-->
<property>
        <name>hbase.zookeeper.quorum</name>
        <value>localhost</value>
</property>


3. Start and run HBase: start HBase
4. Use Eclipse to create hbase database:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.log4j.BasicConfigurator;

import java.io.IOException;
public class ExampleForHbase{
    public static Configuration configuration;
    public static Connection connection;
    public static Admin admin;
    //Please execute the statements in the main function one by one, just delete the / / before them. For example, please comment other statements when executing insertRow
    public static void main(String[] args)throws IOException{
    	BasicConfigurator.configure();
        //Create a table named Score, sname and course.
//        createTable("Score",new String[]{"sname","course"});

        //Insert a piece of data in the Score table with row key 95001 and sname Mary (the fourth parameter is empty because there is no child column under sname column family)
        //Equivalent commands: put 'Score','95001','sname','Mary'
//        insertRow("Score", "95001", "sname", "", "Mary");
        //Insert a piece of data in the Score table, with row key 95001,course:Math 88 (course is the column family, and math is the child column under course)
        //Equivalent commands: put 'Score','95001','score:Math','88'
//        insertRow("Score", "95001", "course", "Math", "88");
        //Insert a piece of data in the Score table, with row key 95001,course:English 85 (course is the column family, English is the sub column under course)
        //Equivalent commands: put 'Score','95001','score:English','85'
//        insertRow("Score", "95001", "course", "English", "85");

        //1. Delete the specified column data in the Score table. The row key is 95001, the column family is course, and the column is Math
        //Before executing this code, please delete the definition of the deleteRow method. The code of the specified column data will be deleted. Uncomment will be cancelled. The code of the specified column family will be deleted
        //Equivalent commands: delete 'Score','95001','score:Math'
        //deleteRow("Score", "95001", "course", "Math");

        //2. Delete the data of the specified column family in the Score table. The row key is 95001, and the column family is course (the values of Math and English in 95001 will be deleted)
        //Before executing this code, please delete the code comment of the specified column data in the definition of deleteRow method, and the code of the specified column family will be deleted
        //Equivalent commands: delete 'Score','95001','score'
        //deleteRow("Score", "95001", "course", "");

        //3. Delete the row data specified in the Score table, and the row key is 95001
        //Before executing this code, please delete the code comment of the specified column data and the code comment of the specified column family in the definition of deleteRow method
        //Equivalent command: deleteall 'Score','95001'
        //deleteRow("Score", "95001", "", "");

        //In the Score table, the row key is 95001, the column family is course, and the column is the value of Math
        getData("Score", "95001", "course", "Math");
        //Query Score table, row key is 95001, column family is sname value (because there is no child column under sname column family, the fourth parameter is empty)
        //getData("Score", "95001", "sname", "");

        //Delete Score table
        //deleteTable("Score");
    }

    //Establish connection
    public static void init(){
    	BasicConfigurator.configure();
        configuration  = HBaseConfiguration.create();
        configuration.set("hbase.rootdir","hdfs://192.168.21.10:9000/hbase");
        try{
            connection = ConnectionFactory.createConnection(configuration);
            admin = connection.getAdmin();
        }catch (IOException e){
            e.printStackTrace();
        }
    }
    //Close connection
    public static void close(){
        try{
            if(admin != null){
                admin.close();
            }
            if(null != connection){
                connection.close();
            }
        }catch (IOException e){
            e.printStackTrace();
        }
    }

    /**
     * Build tables. There will be a system default attribute as the primary key in HBase's table. The primary key does not need to be created by itself. It is the first data after the table name in the put command operation by default, so there is no need to create the id column here
     * @param myTableName Table name
     * @param colFamily Family name
     * @throws IOException
     */
    public static void createTable(String myTableName,String[] colFamily) throws IOException {
        init();
        TableName tableName = TableName.valueOf(myTableName);
        if(admin.tableExists(tableName)){
            System.out.println("talbe is exists!");
        }else {
            HTableDescriptor hTableDescriptor = new HTableDescriptor(tableName);
            for(String str:colFamily){
                HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(str);
                hTableDescriptor.addFamily(hColumnDescriptor);
            }
            admin.createTable(hTableDescriptor);
            System.out.println("create table success");
        }
        close();
    }
    /**
     * Delete specified table
     * @param tableName Table name
     * @throws IOException
     */
    public static void deleteTable(String tableName) throws IOException {
        init();
        TableName tn = TableName.valueOf(tableName);
        if (admin.tableExists(tn)) {
            admin.disableTable(tn);
            admin.deleteTable(tn);
        }
        close();
    }
    /**
     * View existing tables
     * @throws IOException
     */
    public static void listTables() throws IOException {
        init();
        HTableDescriptor hTableDescriptors[] = admin.listTables();
        for(HTableDescriptor hTableDescriptor :hTableDescriptors){
            System.out.println(hTableDescriptor.getNameAsString());
        }
        close();
    }
    /**
     * Insert data into a column of a row
     * @param tableName Table name
     * @param rowKey Row key
     * @param colFamily Family name
     * @param col Column name (this parameter can be empty if there is no child column under its column family)
     * @param val value
     * @throws IOException
     */
    public static void insertRow(String tableName,String rowKey,String colFamily,String col,String val) throws IOException {
        init();
        Table table = connection.getTable(TableName.valueOf(tableName));
        Put put = new Put(rowKey.getBytes());
        put.addColumn(colFamily.getBytes(), col.getBytes(), val.getBytes());
        table.put(put);
        table.close();
        close();
    }
    /**
     * Delete data
     * @param tableName Table name
     * @param rowKey Row key
     * @param colFamily Family name
     * @param col Column names
     * @throws IOException
     */
    public static void deleteRow(String tableName,String rowKey,String colFamily,String col) throws IOException {
        init();
        Table table = connection.getTable(TableName.valueOf(tableName));
        Delete delete = new Delete(rowKey.getBytes());
        //Delete all data for the specified column family
        //delete.addFamily(colFamily.getBytes());
        //Delete data for the specified column
        //delete.addColumn(colFamily.getBytes(), col.getBytes());
        table.delete(delete);
        table.close();
        close();
    }
    /**
     * Find data according to row key
     * @param tableName Table name
     * @param rowKey Row key
     * @param colFamily Family name
     * @param col Column names
     * @throws IOException
     */
    public static void getData(String tableName,String rowKey,String colFamily,String col)throws  IOException{
        init();
        Table table = connection.getTable(TableName.valueOf(tableName));
        Get get = new Get(rowKey.getBytes());
        get.addColumn(colFamily.getBytes(),col.getBytes());
        Result result = table.get(get);
        showCell(result);
        table.close();
        close();
    }
    /**
     * Format output
     * @param result
     */
    public static void showCell(Result result){
        Cell[] cells = result.rawCells();
        for(Cell cell:cells){
            System.out.println("RowName:"+new String(CellUtil.cloneRow(cell))+" ");
            System.out.println("Timetamp:"+cell.getTimestamp()+" ");
            System.out.println("column Family:"+new String(CellUtil.cloneFamily(cell))+" ");
            System.out.println("row Name:"+new String(CellUtil.cloneQualifier(cell))+" ");
            System.out.println("value:"+new String(CellUtil.cloneValue(cell))+" ");
        }
    }
}

Tags: HBase Hadoop Maven Apache

Posted on Wed, 06 Nov 2019 11:59:54 -0500 by moneytree