Hbase 本地 测试 IDEA
需要本地配置单机版Zookeeper ,hbase hadoop 我运行时,zookeeper用的 是hbase自带的Zk服务 没开单机版Zookeeper 在hbase的conf文件夹下的hbase-env.cmd文件里面 有如下 true的意思就是使用自带的zkHBASE_MANAGES_ZK=true1、启动hadoop/sbin/start-dfs.cmd2、启动hb...
·
需要本地配置单机版Zookeeper ,hbase hadoop
我运行时,zookeeper用的 是hbase自带的Zk服务 没开单机版Zookeeper
在hbase的conf文件夹下的hbase-env.cmd文件里面 有如下 true的意思就是使用自带的zk
HBASE_MANAGES_ZK=true
1、启动hadoop/sbin/start-dfs.cmd
2、启动hbase/bin/start-hbase.cmd
3、然后在hbase/bin 目录下 按住Shift键 点击右键 选择在此处打开命令窗口 输入hbase shell 如下
然后运行项目跑完之后可以使用 list 命令查看创建的表
使用scan 'tableName' 查看内容 代码如下 初学者 不足之处请下方留言 共同学习
pom文件如下
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com</groupId>
<artifactId>HbaseTest</artifactId>
<version>1.0.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<spring.version>4.2.2.RELEASE</spring.version>
<slf4j.version>1.7.7</slf4j.version>
<xdcs.release.version>0.0.9-SNAPSHOT</xdcs.release.version>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.1</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-it</artifactId>
<version>1.4.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.4.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-common -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>1.4.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-server -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>1.4.6</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.6.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
HbaseTest类如下:
/**
* FileName: HbaseTest
* Author: johnny
* Date: 2019/1/28 15:06
* Description: Demo
*/
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.*;
import java.io.IOException;
/**
* 〈一句话功能简述〉<br>
* 〈Demo〉
*
* @author johnny
* @create 2019/1/28
* @since 1.0.0
*/
public class HbaseTest {
public static Configuration configuration;
static {
configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.property.clientPort", "2181");
configuration.set("hbase.zookeeper.quorum", "localhost");
configuration.set("hbase.master", "localhost:600000");
}
public static void main(String[] args) {
createTable("h2");
insertData("h2");
//queryAll("h2");
//deleteByCondition("h2","c1");
}
public static void createTable(String tableName){
System.out.println("------> Start Create Table <------");
try {
HBaseAdmin hBaseAdmin = new HBaseAdmin(configuration);
if(hBaseAdmin.tableExists(tableName)){
hBaseAdmin.disableTable(tableName);
hBaseAdmin.deleteTable(tableName);
System.out.println(tableName+ "-----------> Exists ! Delete TableName Over<-----------");
}else {
HTableDescriptor hTableDescriptor = new HTableDescriptor(tableName);
hTableDescriptor.addFamily(new HColumnDescriptor("c1"));
hTableDescriptor.addFamily(new HColumnDescriptor("c2"));
hTableDescriptor.addFamily(new HColumnDescriptor("c3"));
hBaseAdmin.createTable(hTableDescriptor);
System.out.println("----------------------> Create Table Successfully . Over <--------------------");
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static void insertData(String tableName) {
System.out.println("----------------> Start Insert Data <---------------------......");
HTablePool pool = new HTablePool(configuration, 1000);
Put put = new Put(("11223344").getBytes());
put.add("c1".getBytes(), null, "aaa".getBytes());// 本行数据的第一列
put.add("c2".getBytes(), null, "bbb".getBytes());// 本行数据的第三列
put.add("c3".getBytes(), null, "ccc".getBytes());// 本行数据的第三列
try {
//如今应用的api版本中pool.getTable返回的类型是HTableInterface ,无法强转为HTable
pool.getTable(tableName).put(put);
}catch (Exception e){
}
System.out.println("-----------------> End Insert Data <-------------------......");
}
public static void deleteByCondition(String tableName,String rowkey){
}
public static void queryAll(String tableName){
HTablePool pool = new HTablePool(configuration,1000);
HTable table = (HTable) pool.getTable(tableName);
try {
ResultScanner rs = table.getScanner(new Scan());
for (Result r : rs){
System.out.println("获得到 rowKwey" + new String(r.getRow()));
for (KeyValue keyValue : r.raw()){
System.out.println("列" + new String(keyValue.getFamily())
+ "------------> 值" + new String(keyValue.getValue()));
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
这个是测试hbase的抗压
/**
* FileName: InsertTest
* Author: johnny
* Date: 2019/1/28 20:27
* Description:
*/
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import java.io.IOException;
import java.util.ArrayList;
/**
* 〈一句话功能简述〉<br>
* 〈〉
*
* @author johnny
* @create 2019/1/28
* @since 1.0.0
*/
public class InsertTest {
public static long startTime;
public static long endTime;
public static void main(String[] args) throws IOException {
Thread t1 = new Thread();
startTime = System.currentTimeMillis();
System.out.println("start time = " + startTime);
insert_one();
t1.start();
endTime = System.currentTimeMillis();
long costTime = endTime - startTime;
System.out.println("end time = " + endTime);
System.out.println("cost time = " + costTime*1.0/1000+"s");
}
public static void insert_one() throws IOException {
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf,"h2");
ArrayList<Put> list = new ArrayList<Put>();
int count1 = 200000;
for(int i=0;i<count1;i++){
String rowname = "row"+i;
Put p = new Put(rowname.getBytes());
p.add("c1".getBytes(), "data".getBytes(),
("XXXXXXXXXXXXXXXXJJJJJJJJSSSSSSSSRRRRRRRRFFFFFFFFQQQQQQQQKKKKKKKKQQQQQQQQBBBBBBBBJJJJJJJJDDDDDDDDXXXXXXXXWWWWWWWWJJJJJJJJZZZZZZZ"
+"ZUUUUUUUUBBBBBBBBEEEEEEEEBBBBBBBBLLLLLLLLFFFFFFFFHHHHHHHHXXXXXXXXCCCCCCCCFFFFFFFFPPPPPPPPGGGGGGGGTTTTTTTTKKKKKKKKPPPPPPPPIIIIIIIIXXXXXXXXUUUUUUUUPPPPPPPPDDDDDDDDEEEEEEEEIIIIIII"
+"IJJJJJJJJOOOOOOOONNNNNNNNEEEEEEEEBBBBBBBBIIIIIIIIVVVVVVVVPPPPPPPPTTTTTTTTZZZZZZZZWWWWWWWWXXXXXXXXFFFFFFFFKKKKKKKKOOOOOOOONNNNNNNNNNNNNNNNYYYYYYYYEEEEEEEEUUUUUUUURRRRRRRRDDDDDDD"
+"DWWWWWWWWIIIIIIIIPPPPPPPPJJJJJJJJPPPPPPPPPPPPPPPPJJJJJJJJYYYYYYYYJJJJJJJJHHHHHHHHLLLLLLLLZZZZZZZZMMMMMMMMMMMMMMMMLLLLLLLLZZZZZZZZHHHHHHHHKKKKKKKKAAAAAAAAZZZZZZZZFFFFFFFFTTTTTTT"
+"TSSSSSSSSCCCCCCCCOOOOOOOOFFFFFFFFEEEEEEEEUUUUUUUUNNNNNNNNEEEEEEEENNNNNNNNSSSSSSSSWWWWWWWWTTTTTTTTPPPPPPPPZZZZZZZZAAAAAAAAZZZZZZZZKKKKKKKKHHHHHHHHDDDDDDDDWWWWWWWWOOOOOOOODDDDDDD"
+"DBBBBBBBBMMMMMMMMAAAAAAAADDDDDDDDVVVVVVVVUUUUUUUUYYYYYYYYZZZZZZZZPPPPPPPPJJJJJJJJPPPPPPPPXXXXXXXXFFFFFFFFHHHHHHHHGGGGGGGGHHHHHHHHMMMMMMMMCCCCCCCCEEEEEEEEBBBBBBBBXXXXXXXX").getBytes());
list.add(p);
}
table.put(list);
table.flushCommits();
table.close();
System.out.println("共插入数据:"+count1);
}
}
更多推荐
已为社区贡献1条内容
所有评论(0)