Import dependencies and configure them

Maven rely on

<dependency> <groupId>org.springframework.boot</groupId> <artifactId> spring-boot-starter-data-elasticSearch </artifactId> </dependency> < the groupId > org. Projectlombok < / groupId > < artifactId > lombok < / artifactId > < version > 1.16.10 < / version > < / dependency >Copy the code

The XML configuration

# es cluster name server config file set the name
spring.data.elasticsearch.cluster-name=elasticsearch
Multiple nodes can be separated by commasSpring. Data. Elasticsearch. Cluster nodes = 127.0.0.1: - 9300Copy the code

Write es entity classes

(If there is a need for participles, you can follow the needDownload ik word dividerIk word dividers come in two types: ik_smart (coarse-grained split) and IK_max_word (fine-grained split). Once you download the segmentation, unzip it directly into plugins in the ES installation directory.

package com.tcm.elastic.entity;

import java.sql.Timestamp;

import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import lombok.Data;

// es entity class
// If you want to sort a field, the field type cannot be String
// represents the document object (index library (either custom or existing database). To define an index library is to define a library for storing data in the ES database.
@Document(indexName = "search", type = "elastic")
@Data
public class ElasticWhole {

	// Document primary key, unique
	@Id
	private Long uuid;
	// The following configuration represents the storage and word segmentation in ik_smart mode. (Word segmentation is enabled by default) The type is kept as text, and word segmentation is performed in ik_smart mode during query
	@Field(store = true, analyzer = "ik_smart", searchAnalyzer = "ik_smart", type = FieldType.Text)
	private String title;
	// The following configuration represents the storage and word segmentation in ik_smart mode. The type is kept as text. The word segmentation is performed in ik_smart mode during query
	@Field(store = true, analyzer = "ik_smart", searchAnalyzer = "ik_smart", type = FieldType.Text)
	private String content;

	private String img_url;

	private Timestamp add_time;

	private String zhaiyao;

	private String link_url;
}
Copy the code

Search for conditional entity classes

package com.tcm.elastic.entity;

import lombok.Data;
/** * Search parameters *@author XiaoRenPing
 *
 */
 @Data
public class ESArticleSearchRequest {

	// Search for the keyword
	private String keyword;
    / / the current page
	private int pageNum;
    // Display data per page
	private int pageSize;
}
Copy the code

Write the Dao layer

import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import org.springframework.stereotype.Repository;

import com.tcm.elastic.entity.ElasticWhole;

@Repository
// The ElasticsearchRepository tool class contains an add, delete, change query for es
public interface ElasticWholeRepository extends ElasticsearchRepository<ElasticWhole.Long>{}Copy the code

Writing the controller

package com.tcm.elastic.controller;

import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;

import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.DisMaxQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Field;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.SearchResultMapper;
import org.springframework.data.elasticsearch.core.aggregation.AggregatedPage;
import org.springframework.data.elasticsearch.core.aggregation.impl.AggregatedPageImpl;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import com.alibaba.fastjson.JSONObject;
import com.tcm.common.util.IdGenerator;
import com.tcm.elastic.entity.ESArticleSearchRequest;
import com.tcm.elastic.entity.ElasticWhole;
import com.tcm.elastic.mapper.ElasticWholeRepository;

@RestController
@RequestMapping("/elasticWhole")
public class ElasticWholeController {

	@Autowired
	ElasticWholeRepository elasticWholeRepository;

	// uUID primary key generation utility class
	IdGenerator idWorker = new IdGenerator(0.0);

	 @Autowired
	 private ElasticsearchTemplate elasticsearchTemplate;

	/ / check
	@RequestMapping("/findById")
	public Optional<ElasticWhole> findById(@RequestBody ElasticWhole elasticWhole) {
		return elasticWholeRepository.findById(elasticWhole.getUuid());
	}

	/ / add
	@RequestMapping("/insert")
	public String insert(@RequestBody ElasticWhole elasticWhole) {
    	// Generate insert UUID
    	elasticWhole.setUuid(idWorker.nextId());
    	elasticWhole.setAdd_time(new Timestamp(new Date().getTime()));
    	System.out.println(elasticWhole.getAdd_time());
    	elasticWholeRepository.save(elasticWhole);
    	return "Success";
	}

	/ / delete
	@RequestMapping("/delete")
	public String delete(@RequestBody ElasticWhole elasticWhole) {
    	elasticWholeRepository.deleteById(elasticWhole.getUuid());
    	return "Success";
	}
	
	 /** * multiMatchQuery: matchQuery: single field query * matchAllQuery: match all * multiMatchQuery: multiple fields matching a value * wildcardQuery: fuzzy query * boost: Set the weight, the larger the value the greater the weight * mixed search *@param content
        * @return* /
	// No highlighted version of the paging word segmentation query
    //	@RequestMapping("/search")
    // public Page
      
        querySearch(@RequestBody ESArticleSearchRequest articleSearchRequest){
      
    // // paging sort
    // //Sort sort = new Sort(Direction.DESC, "add_time");
    // Pageable pageable = PageRequest.of(articleSearchRequest.getPageNum(), articleSearchRequest.getPageSize());
    / / / / query
    // DisMaxQueryBuilder disMaxQueryBuilder = QueryBuilders.disMaxQuery();
    // // Query and set the weight of a single field
    // QueryBuilder ikTypeQuery = QueryBuilders.matchQuery("title", articleSearchRequest.getKeyword()).boost(2f);
    // // Pinyin query
    // QueryBuilder pinyinTypeQuery = QueryBuilders.matchQuery("title.pinyin", articleSearchRequest.getKeyword());
    / / / / multiple fields matching a certain value QueryBuilders multiMatchQuery (" matching ", "field 1", "field 2", "XXX")
    // QueryBuilder multiCodeQuery = QueryBuilders.multiMatchQuery(articleSearchRequest.getKeyword(),"title");
    // disMaxQueryBuilder.add(ikTypeQuery);
    // disMaxQueryBuilder.add(pinyinTypeQuery);
    // disMaxQueryBuilder.add(multiCodeQuery);
    // SearchQuery searchQuery = new NativeSearchQueryBuilder()
    // .withQuery(disMaxQueryBuilder)
    // // highlighted field
    // .withHighlightFields(new HighlightBuilder.Field("title").preTags("").postTags(""))
    / / / / paging
    // .withPageable(pageable).build();
    // searchQuery.addSort(new Sort(Direction.DESC, "add_time"));
    // Page
      
        search = elasticWholeRepository.search(searchQuery);
      
    // return search;
    / /}
	
	// keywords highlighted paging word segmentation query
	@RequestMapping("/search")
	public Page<ElasticWhole> findAnswerByTitle(@RequestBody ESArticleSearchRequest articleSearchRequest) {
		Page<ElasticWhole> search = null;
		// Define the highlighted fields
		Field titleField = new HighlightBuilder.Field("title")
// Add the start and end tags to the highlighted keywords
.preTags("<span class='gl'>").postTags("</span>");
		//Field contentField = new HighlightBuilder.Field("content").preTags("<span>").postTags("</span>");
		Pageable pageable = PageRequest.of(articleSearchRequest.getPageNum(), articleSearchRequest.getPageSize());
		// Build the query content
		QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder(articleSearchRequest.getKeyword());
		// Query the matched field
		//queryBuilder.field("title").field("content");
		queryBuilder.field("title");
		
		SearchQuery searchQuery = new NativeSearchQueryBuilder()
				.withQuery(queryBuilder)
				// Field highlight
				.withHighlightFields(titleField)
				/ / paging
				.withPageable(pageable)
				.build();
		/ / sorting
		searchQuery.addSort(new Sort(Direction.DESC, "add_time"));
		search = elasticWholeRepository.search(searchQuery);
		// If there is no data, return directly
		if (search.getContent().size() == 0) {
			return search;
		}
	  / / highlight
		search = elasticsearchTemplate.queryForPage(searchQuery, ElasticWhole.class,
				new SearchResultMapper() {
	 
					@Override
					public <T> AggregatedPage<T> mapResults(SearchResponse response, Class
       
         clazz, Pageable pageable)
        {
	 
						List<ElasticWhole> list = new ArrayList<ElasticWhole>();
						for (SearchHit searchHit : response.getHits()) {
							if (response.getHits().getHits().length <= 0) {
								return null;
							}
							ElasticWhole elasticWhole = JSONObject.parseObject(searchHit.getSourceAsString(), ElasticWhole.class);
							Map<String, HighlightField> highlightFields = searchHit.getHighlightFields();
							// Match the information in the title field
							HighlightField titleHighlight = highlightFields.get("title");
							if(titleHighlight ! =null) { 
								Text[] fragments = titleHighlight.fragments();
								String fragmentString = fragments[0].string();
								elasticWhole.setTitle(fragmentString);
							}
							// The information in the matched content field
// HighlightField contentHighlight = highlightFields.get("content");
// if (contentHighlight ! = null) {
// Text[] fragments = contentHighlight.fragments();
// String fragmentString = fragments[0].string();
// elasticWhole.setContent(fragmentString);
/ /}
							list.add(elasticWhole);
	 
						}
						if (list.size() > 0) {
							AggregatedPage<T> result = new AggregatedPageImpl<T>((List<T>) list, pageable,
									response.getHits().getTotalHits());

							return result;
						}
						return null; }});returnsearch; }}Copy the code

Primary key generation utility class

package com.tcm.common.util;


public class IdGenerator {

	/* Start time stamp (2018-09-01) */
	private final long twepoch = 1535731200L;
	/** * The number of bits occupied by the machine ID */
	private final long workerIdBits = 5L;

	/** * The number of digits in the data id */
	private final long datacenterIdBits = 5L;

	/** * The maximum machine ID supported, resulting in 31 (this shift algorithm can quickly calculate the largest decimal number represented by several binary digits) */
	private final long maxWorkerId = -1L ^ (-1L << workerIdBits);

	/** * The maximum data id supported. The result is 31 */
	private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);

	/** * The number of digits in the sequence */
	private final long sequenceBits = 12L;

	/** * Move the machine ID 12 bits to the left */
	private final long workerIdShift = sequenceBits;

	/** * Data id is moved 17 bits to the left (12+5) */
	private final long datacenterIdShift = sequenceBits + workerIdBits;

	/** * the timestamp is shifted 22 bits to the left (5+5+12) */
	private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;

	/** * Generate the mask of the sequence, where 4095 (0b111111111111= 0xFFf =4095) */
	private final long sequenceMask = -1L ^ (-1L << sequenceBits);

	/** * Working machine ID(0~31) */
	private long workerId;

	/** * DATA center ID(0-31) */
	private long datacenterId;

	/** * milliseconds sequence (0~4095) */
	private long sequence = 0L;

	/** * The last time the ID was generated */
	private long lastTimestamp = -1L;

	// ==============================Constructors=====================================

	/** * constructor **@paramWorkerId Indicates the work ID (0 to 31) *@paramDatacenterId datacenterId (0 to 31) */
	    public IdGenerator(long workerId, long datacenterId) {
	        if (workerId > maxWorkerId || workerId < 0) {
	            throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
	        }
	        if (datacenterId > maxDatacenterId || datacenterId < 0) {
	            throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
	        }
	        this.workerId = workerId;
	        this.datacenterId = datacenterId;
	    }

	// ==============================Methods==========================================

	/** * get the next ID (this method is thread-safe) **@return SnowflakeId
	 */
	public synchronized long nextId(a) {
		long timestamp = timeGen();

		// If the current time is less than the last timestamp generated by the ID, an exception should be thrown when the system clock is rolled back
		if (timestamp < lastTimestamp) {
			throw new RuntimeException(String.format(
					"Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
		}

		// If they are generated at the same time, the sequence is performed in milliseconds
		if (lastTimestamp == timestamp) {
			sequence = (sequence + 1) & sequenceMask;
			// Sequence overflow in milliseconds
			if (sequence == 0) {
				// block until the next millisecond to get a new timestamptimestamp = tilNextMillis(lastTimestamp); }}// The timestamp changes and the sequence is reset in milliseconds
		else {
			sequence = 0L;
		}

		// The last time the ID was generated
		lastTimestamp = timestamp;

		// Shift and put together by or to form a 64-bit ID
		return ((timestamp - twepoch) << timestampLeftShift) //
				| (datacenterId << datacenterIdShift) //
				| (workerId << workerIdShift) //
				| sequence;
	}

	/** * blocks to the next millisecond until a new timestamp ** is obtained@paramLastTimestamp Specifies the last time the ID was generated@returnCurrent timestamp */
	protected long tilNextMillis(long lastTimestamp) {
		long timestamp = timeGen();
		while (timestamp <= lastTimestamp) {
			timestamp = timeGen();
		}
		return timestamp;
	}

	/** * returns the current time in milliseconds **@returnCurrent time (ms) */
	protected long timeGen(a) {
		returnSystem.currentTimeMillis(); }}Copy the code

If the redis cache still exists in the project, the startup will fail. Combine two online solutions

// add the following sentence to main
System.setProperty("es.set.netty.runtime.available.processors"."false");
Copy the code
// Write an annotation class
package com.tcm.common.config;

import javax.annotation.PostConstruct;

import org.springframework.context.annotation.Configuration;
import org.springframework.core.annotation.Order;

/ * * *@authorTangyuewei * <p> * Description: Error * </p> *@date 2020/4/9
 * @see  com.tangyuewei.user.common.es
 */
@Configuration
public class ElasticSearchConfig {
	@PostConstruct
	void init(a) {
		System.setProperty("es.set.netty.runtime.available.processors"."false"); }}Copy the code

If too much data You will need to adjust the es memory size in the library, in the/elasticsearch/config/JVM options modify: – Xms4g – Xmx4g. Es has 1 GB memory by default