Preface:
1, HBase
HBase is a distributed column-oriented database based on the Hadoop distributed file system (HDFS). And is 2007 original prototype, long history. So what is Hadoop? Hadoop is a distributed environment that stores and processes big data. Hadoop uses the MapReduce algorithm to statistically analyze big data.
Scene:
- Log details of monitoring data
- Transaction order details data (Taobao, Like)
- Facebook news details
HBase provides capabilities similar to BigTable based on Hadoop. Unlike common relational databases, HBase is a database suitable for unstructured data storage. It also differs from a row database in that it is a column-based schema.
HBase A column-oriented database whose sorting is determined by rows.
- A table is a collection of rows.
- A row is a family of columns. Column family, that’s key-value pairs. Each column family is named after a key column and can have an infinite number of columns.
- A column family is a set of columns. Columns are stored consecutively, and each cell will have a timestamp
- Columns are also stored in key-value pairs.
The biggest difference from row databases is that large tables can be designed for columns, suitable for online analysis processing OLAP. There are also some differences from relational database RDBMS as follows:
- HBase wide table in horizontal scale. RDBMS small tables, difficult to scale
- HBase has no transactions
- There is no standardized data in HBase, but only key value pairs
2. Use the configuration
2.1
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.KBD;
import org.junit.Before;
public class HBaseOpreation {
public Configuration conf = null;
/** * init */
@Before
public void init(a){
conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum"."hadoop1:2181"); // Specify the zk address
}
/** * delete *@throws Exception
*/
public void drop(a) throws Exception{
HBaseAdmin admin = new HBaseAdmin(conf);
admin.disableTable("");
admin.deleteTable("");
admin.close();
}
/** * Add data *@throws Exception
*/
public void put(a) throws Exception{
HTable table = new HTable(conf, "");
List<Put> puts = new ArrayList<Put>();
Put name = new Put("".getBytes()); // Create a line
name.add("".getBytes(), "".getBytes(), "".getBytes());
Put address = new Put("".getBytes()); // Create a line
address.add("".getBytes(), "".getBytes(), "".getBytes());
puts.add(name);
puts.add(address);
table.put(puts);
table.close();
}
/ * * *@throws Exception
*
*/
public void get(a) throws Exception{
HTable table = new HTable(conf, "");
Get get = new Get("".getBytes());
get.setMaxVersions(3);
Result result = table.get(get);
for (KeyValue kv : result.list()) {
String family = new String( kv.getFamily());
String qualifier = new String(kv.getQualifier());
String value = new String(kv.getValue());
System.out.println(family + "\t" + qualifier + "\t" + value);
}
table.close();
}
/ * * *@throws Exception
*/
public void del(a) throws Exception{
HTable table = new HTable(conf, "");
Delete del = new Delete("".getBytes());
del.deleteColumn("".getBytes(), "".getBytes());
table.delete(del);
table.close();
}
public static void main(String[] args) throws Exception{
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum"."");
HBaseAdmin hAdmin = new HBaseAdmin(conf);
HTableDescriptor tDescriptor = new HTableDescriptor("".getBytes());
HColumnDescriptor cDescriptor = new HColumnDescriptor("".getBytes());
cDescriptor.setMaxVersions(10); tDescriptor.addFamily(cDescriptor); hAdmin.createTable(tDescriptor); hAdmin.close(); }}Copy the code
2.2
//pom.xml <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId> < version > 1.0.0 - cdh5.4.4 < / version > < exclusions > < exclusion > < groupId > javax.mail. Servlet < / groupId > <artifactId>servlet-api</artifactId> </exclusion> </exclusions> </dependency> <! -- Spring Boot HBase dependency --> <dependency> <groupId>com.spring4all</groupId> <artifactId>spring-boot-starter-hbase</artifactId> <version>${hbase-spring-boot}</version> </dependency>Copy the code
Spring. # # HBase configuration data. HBase. Quorum = XXX. / / specify the HBase zk address spring data. HBase. RootDir = XXX / / specified HBase on HDFS storage paths Spring. Data. Hbase. NodeParent = XXX / / specified in ZK hbase root ZNodeCopy the code
@Data
public class City {
/** ** City id */
private Long id;
/** * province age */
private Integer age;
/** * City name */
private String cityName;
}Copy the code
// Implement RowMapper for column cluster conversion
public class CityRowMapper implements RowMapper<City> {
private static byte[] COLUMN_FAMILY = "f".getBytes();
private static byte[] NAME = "name".getBytes();
private static byte[] AGE = "age".getBytes();
@Override
public City mapRow(Result result, int rowNum) throws Exception {
String name = Bytes.toString(result.getValue(COLUMN_FAMILY, NAME));
int age = Bytes.toInt(result.getValue(COLUMN_FAMILY, AGE));
City dto = new City();
dto.setCityName(name);
dto.setAge(age);
returndto; }}Copy the code
// HbaseTemplate makes API calls similar to RedisTemplate
@Service
public class CityServiceImpl implements CityService {
@Autowired private HbaseTemplate hbaseTemplate;
// Based on the start and end lines
public List<City> query(String startRow, String stopRow) {
Scan scan = new Scan(Bytes.toBytes(startRow), Bytes.toBytes(stopRow));
scan.setCaching(5000);
List<City> dtos = this.hbaseTemplate.find("people_table", scan, new CityRowMapper());
return dtos;
}
/ / query
public City query(String row) {
City dto = this.hbaseTemplate.get("people_table", row, new CityRowMapper());
return dto;
}
// Add a save
public void saveOrUpdate(a) {
List<Mutation> saveOrUpdates = new ArrayList<Mutation>();
Put put =new Put(Bytes.toBytes("135xxxxxx"));
put.addColumn(Bytes.toBytes("people"), Bytes.toBytes("name"), Bytes.toBytes("test"));
saveOrUpdates.add(put);
this.hbaseTemplate.saveOrUpdates("people_table", saveOrUpdates); }}Copy the code
The project can be referred to: github.com/SpringForAl…