第一关
package step1;
import java.io.ioexception;
import org.apache.hadoop.conf.configuration;
import org.apache.hadoop.hbase.hbaseconfiguration;
import org.apache.hadoop.hbase.hcolumndescriptor;
import org.apache.hadoop.hbase.htabledescriptor;
import org.apache.hadoop.hbase.tablename;
import org.apache.hadoop.hbase.client.admin;
import org.apache.hadoop.hbase.client.columnfamilydescriptor;
import org.apache.hadoop.hbase.client.columnfamilydescriptorbuilder;
import org.apache.hadoop.hbase.client.connection;
import org.apache.hadoop.hbase.client.connectionfactory;
import org.apache.hadoop.hbase.client.get;
import org.apache.hadoop.hbase.client.put;
import org.apache.hadoop.hbase.client.result;
import org.apache.hadoop.hbase.client.resultscanner;
import org.apache.hadoop.hbase.client.scan;
import org.apache.hadoop.hbase.client.table;
import org.apache.hadoop.hbase.client.tabledescriptor;
import org.apache.hadoop.hbase.client.tabledescriptorbuilder;
import org.apache.hadoop.hbase.util.bytes;
/**
* hbase 1.0 version of exampleclient that uses {@code connection},
* {@code admin} and {@code table}.
*/
public class task{
public void createtable()throws exception{
/********* begin *********/
configuration config = hbaseconfiguration.create();
connection connection = connectionfactory.createconnection(config);
try {
// create table
admin admin = connection.getadmin();
try {
tablename tablename = tablename.valueof("dept");
// 新 api 构建表
// tabledescriptor 对象通过 tabledescriptorbuilder 构建;
tabledescriptorbuilder tabledescriptor =
tabledescriptorbuilder.newbuilder(tablename);
columnfamilydescriptor family =
columnfamilydescriptorbuilder.newbuilder(bytes.tobytes("data")).build();// 构建列族对象
tabledescriptor.setcolumnfamily(family); // 设置列族
admin.createtable(tabledescriptor.build()); // 创建表
tablename emp = tablename.valueof("emp");
// 新 api 构建表
// tabledescriptor 对象通过 tabledescriptorbuilder 构建;
tabledescriptorbuilder empdescriptor =
tabledescriptorbuilder.newbuilder(emp);
columnfamilydescriptor empfamily =
columnfamilydescriptorbuilder.newbuilder(bytes.tobytes("emp")).build();// 构建列族对象
empdescriptor.setcolumnfamily(empfamily); // 设置列族
admin.createtable(empdescriptor.build()); // 创建表
} finally {
admin.close();
}
} finally {
connection.close();
}
/********* end *********/
}
}
start-dfs.sh 回车
# 启动 hbase
start-hbase.sh 回车
第二关
package step2;
import java.io.ioexception;
import org.apache.hadoop.conf.configuration;
import org.apache.hadoop.hbase.hbaseconfiguration;
import org.apache.hadoop.hbase.hcolumndescriptor;
import org.apache.hadoop.hbase.htabledescriptor;
import org.apache.hadoop.hbase.tablename;
import org.apache.hadoop.hbase.client.admin;
import org.apache.hadoop.hbase.client.columnfamilydescriptor;
import org.apache.hadoop.hbase.client.columnfamilydescriptorbuilder;
import org.apache.hadoop.hbase.client.connection;
import org.apache.hadoop.hbase.client.connectionfactory;
import org.apache.hadoop.hbase.client.get;
import org.apache.hadoop.hbase.client.put;
import org.apache.hadoop.hbase.client.result;
import org.apache.hadoop.hbase.client.resultscanner;
import org.apache.hadoop.hbase.client.scan;
import org.apache.hadoop.hbase.client.table;
import org.apache.hadoop.hbase.client.tabledescriptor;
import org.apache.hadoop.hbase.client.tabledescriptorbuilder;
import org.apache.hadoop.hbase.util.bytes;
public class task {
public void insertinfo()throws exception{
/********* begin *********/
configuration config = hbaseconfiguration.create();
connection connection = connectionfactory.createconnection(config);
admin admin = connection.getadmin();
tablename tablename = tablename.valueof("tb_step2");
tabledescriptorbuilder tabledescriptor = tabledescriptorbuilder.newbuilder(tablename);
columnfamilydescriptor family = columnfamilydescriptorbuilder.newbuilder(bytes.tobytes("data")).build();// 构建列族对象
tabledescriptor.setcolumnfamily(family); // 设置列族
admin.createtable(tabledescriptor.build()); // 创建表
// 添加数据
byte[] row1 = bytes.tobytes("row1");
put put1 = new put(row1);
byte[] columnfamily1 = bytes.tobytes("data"); // 列
byte[] qualifier1 = bytes.tobytes(string.valueof(1)); // 列族修饰词
byte[] value1 = bytes.tobytes("张三丰"); // 值
put1.addcolumn(columnfamily1, qualifier1, value1);
byte[] row2 = bytes.tobytes("row2");
put put2 = new put(row2);
byte[] columnfamily2 = bytes.tobytes("data"); // 列
byte[] qualifier2 = bytes.tobytes(string.valueof(2)); // 列族修饰词
byte[] value2 = bytes.tobytes("张无忌"); // 值
put2.addcolumn(columnfamily2, qualifier2, value2);
table table = connection.gettable(tablename);
table.put(put1);
table.put(put2);
/********* end *********/
}
}
start-dfs.sh 回车
# 启动 hbase
start-hbase.sh 回车
第三关
package step3;
import java.io.ioexception;
import org.apache.hadoop.conf.configuration;
import org.apache.hadoop.hbase.hbaseconfiguration;
import org.apache.hadoop.hbase.hcolumndescriptor;
import org.apache.hadoop.hbase.htabledescriptor;
import org.apache.hadoop.hbase.tablename;
import org.apache.hadoop.hbase.client.admin;
import org.apache.hadoop.hbase.client.columnfamilydescriptor;
import org.apache.hadoop.hbase.client.columnfamilydescriptorbuilder;
import org.apache.hadoop.hbase.client.connection;
import org.apache.hadoop.hbase.client.connectionfactory;
import org.apache.hadoop.hbase.client.get;
import org.apache.hadoop.hbase.client.put;
import org.apache.hadoop.hbase.client.result;
import org.apache.hadoop.hbase.client.resultscanner;
import org.apache.hadoop.hbase.client.scan;
import org.apache.hadoop.hbase.client.table;
import org.apache.hadoop.hbase.client.tabledescriptor;
import org.apache.hadoop.hbase.client.tabledescriptorbuilder;
import org.apache.hadoop.hbase.util.bytes;
public class task {
public void querytableinfo()throws exception{
/********* begin *********/
configuration config = hbaseconfiguration.create();
connection connection = connectionfactory.createconnection(config);
admin admin = connection.getadmin();
tablename tablename = tablename.valueof("t_step3");
table table = connection.gettable(tablename);
// 获取数据
get get = new get(bytes.tobytes("row1")); // 定义 get 对象
result result = table.get(get); // 通过 table 对象获取数据
//system.out.println("result: " + result);
// 很多时候我们只需要获取“值” 这里表示获取 data:1 列族的值
byte[] valuebytes = result.getvalue(bytes.tobytes("data"), bytes.tobytes("1")); // 获取到的是字节数组
// 将字节转成字符串
string valuestr = new string(valuebytes,"utf-8");
system.out.println("value:" + valuestr);
tablename tablestep3name = tablename.valueof("table_step3");
table step3table = connection.gettable(tablestep3name);
// 批量查询
scan scan = new scan();
resultscanner scanner = step3table.getscanner(scan);
try {
int i = 0;
for (result scannerresult: scanner) {
//byte[] value = scannerresult.getvalue(bytes.tobytes("data"), bytes.tobytes(1));
// system.out.println("scan: " + scannerresult);
byte[] row = scannerresult.getrow();
system.out.println("rowname:" + new string(row,"utf-8"));
}
} finally {
scanner.close();
}
/********* end *********/
}
}
start-dfs.sh 回车
# 启动 hbase
start-hbase.sh 回车
第四关
package step4;
import java.io.ioexception;
import org.apache.hadoop.conf.configuration;
import org.apache.hadoop.hbase.hbaseconfiguration;
import org.apache.hadoop.hbase.hcolumndescriptor;
import org.apache.hadoop.hbase.htabledescriptor;
import org.apache.hadoop.hbase.tablename;
import org.apache.hadoop.hbase.client.admin;
import org.apache.hadoop.hbase.client.columnfamilydescriptor;
import org.apache.hadoop.hbase.client.columnfamilydescriptorbuilder;
import org.apache.hadoop.hbase.client.connection;
import org.apache.hadoop.hbase.client.connectionfactory;
import org.apache.hadoop.hbase.client.get;
import org.apache.hadoop.hbase.client.put;
import org.apache.hadoop.hbase.client.result;
import org.apache.hadoop.hbase.client.resultscanner;
import org.apache.hadoop.hbase.client.scan;
import org.apache.hadoop.hbase.client.table;
import org.apache.hadoop.hbase.client.tabledescriptor;
import org.apache.hadoop.hbase.client.tabledescriptorbuilder;
import org.apache.hadoop.hbase.util.bytes;
public class task {
public void deletetable()throws exception{
/********* begin *********/
configuration config = hbaseconfiguration.create();
connection connection = connectionfactory.createconnection(config);
admin admin = connection.getadmin();
tablename tablename = tablename.valueof("t_step4");
admin.disabletable(tablename);
admin.deletetable(tablename);
/********* end *********/
}
}
start-dfs.sh 回车
# 启动 hbase
start-hbase.sh 回车
发表评论