импортировать зависимости, настроить
maven-зависимости
// spring boot 整合elastic
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
// @data注解的依赖 减少实体类getset
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.16.10</version>
</dependency>
XML-конфигурация
# es集群的名字 服务器config文件中设置的名字
spring.data.elasticsearch.cluster-name=elasticsearch
#es机器的可用节点 多个节点可用英文逗号分隔
spring.data.elasticsearch.cluster-nodes=127.0.0.1:9300
Напишите класс сущности es
(Если есть необходимость в сегментации слов, вы можетескачать ик токенайзер, Существует два вида токенизаторов ik: токенизатор ik_smart (грубое разделение), ik_max_word (мелкое разделение). После загрузки токенизатора разархивируйте его прямо в файле плагинов в каталоге установки es. )
package com.tcm.elastic.entity;
import java.sql.Timestamp;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import lombok.Data;
// es实体类
// 如果需要针对某一个字段排序,这个字段类型不能为String类型
//代表文档对象(索引库(可以自己定义,也可以现有的数据库。定义索引库是在es数据库中定义一个用来存放数据的库),类型)
@Document(indexName = "search", type = "elastic")
@Data
public class ElasticWhole {
// 文档主键,唯一
@Id
private Long uuid;
// 下面配置代表 进行存储并以ik_smart方式分词,(默认开启分词)保持的类型为text,进行查询的时候按照ik_smart方式进行分词
@Field(store = true, analyzer = "ik_smart", searchAnalyzer = "ik_smart", type = FieldType.Text)
private String title;
// 下面配置代表 进行存储并以ik_smart方式分词,保持的类型为text,进行查询的时候按照ik_smart方式进行分词
@Field(store = true, analyzer = "ik_smart", searchAnalyzer = "ik_smart", type = FieldType.Text)
private String content;
private String img_url;
private Timestamp add_time;
private String zhaiyao;
private String link_url;
}
Критерии поиска Класс сущности
package com.tcm.elastic.entity;
import lombok.Data;
/**
* 搜索参数
* @author XiaoRenPing
*
*/
@Data
public class ESArticleSearchRequest {
// 搜索关键字
private String keyword;
// 当前页
private int pageNum;
// 每页显示数据
private int pageSize;
}
Напишите слой Дао
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import org.springframework.stereotype.Repository;
import com.tcm.elastic.entity.ElasticWhole;
@Repository
// ElasticsearchRepository工具类中包含了对es的一个增删改查
public interface ElasticWholeRepository extends ElasticsearchRepository<ElasticWhole, Long>{
}
контроллер записи
package com.tcm.elastic.controller;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.DisMaxQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Field;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.SearchResultMapper;
import org.springframework.data.elasticsearch.core.aggregation.AggregatedPage;
import org.springframework.data.elasticsearch.core.aggregation.impl.AggregatedPageImpl;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.alibaba.fastjson.JSONObject;
import com.tcm.common.util.IdGenerator;
import com.tcm.elastic.entity.ESArticleSearchRequest;
import com.tcm.elastic.entity.ElasticWhole;
import com.tcm.elastic.mapper.ElasticWholeRepository;
@RestController
@RequestMapping("/elasticWhole")
public class ElasticWholeController {
@Autowired
ElasticWholeRepository elasticWholeRepository;
// uuid主键生成工具类
IdGenerator idWorker = new IdGenerator(0, 0);
@Autowired
private ElasticsearchTemplate elasticsearchTemplate;
// 单查
@RequestMapping("/findById")
public Optional<ElasticWhole> findById(@RequestBody ElasticWhole elasticWhole) {
return elasticWholeRepository.findById(elasticWhole.getUuid());
}
// 增加
@RequestMapping("/insert")
public String insert(@RequestBody ElasticWhole elasticWhole) {
// 生成插入uuid
elasticWhole.setUuid(idWorker.nextId());
elasticWhole.setAdd_time(new Timestamp(new Date().getTime()));
System.out.println(elasticWhole.getAdd_time());
elasticWholeRepository.save(elasticWhole);
return "成功";
}
// 删除
@RequestMapping("/delete")
public String delete(@RequestBody ElasticWhole elasticWhole) {
elasticWholeRepository.deleteById(elasticWhole.getUuid());
return "成功";
}
/**
* matchQuery : 单个字段查询
* matchAllQuery : 匹配所有
* multiMatchQuery : 多个字段匹配某一个值
* wildcardQuery : 模糊查询
* boost : 设置权重,数值越大权重越大
* 混合搜索
* @param content
* @return
*/
// 无高亮版本的分页分词查询
// @RequestMapping("/search")
// public Page<ElasticWhole> querySearch(@RequestBody ESArticleSearchRequest articleSearchRequest){
// // 分页排序
// //Sort sort = new Sort(Direction.DESC, "add_time");
// Pageable pageable = PageRequest.of(articleSearchRequest.getPageNum(), articleSearchRequest.getPageSize());
// // 查询
// DisMaxQueryBuilder disMaxQueryBuilder = QueryBuilders.disMaxQuery();
// // 单个字段查询 并设置权重
// QueryBuilder ikTypeQuery = QueryBuilders.matchQuery("title", articleSearchRequest.getKeyword()).boost(2f);
// // 拼音查询
// QueryBuilder pinyinTypeQuery = QueryBuilders.matchQuery("title.pinyin", articleSearchRequest.getKeyword());
// // 多个字段匹配某一个值 QueryBuilders.multiMatchQuery("匹配值","字段1","字段2","xxx")
// QueryBuilder multiCodeQuery = QueryBuilders.multiMatchQuery(articleSearchRequest.getKeyword(),"title");
// disMaxQueryBuilder.add(ikTypeQuery);
// disMaxQueryBuilder.add(pinyinTypeQuery);
// disMaxQueryBuilder.add(multiCodeQuery);
// SearchQuery searchQuery = new NativeSearchQueryBuilder()
// .withQuery(disMaxQueryBuilder)
// // 高亮字段
// .withHighlightFields(new HighlightBuilder.Field("title").preTags("<span>").postTags("</span>"))
// // 分页
// .withPageable(pageable).build();
// searchQuery.addSort(new Sort(Direction.DESC, "add_time"));
// Page<ElasticWhole> search = elasticWholeRepository.search(searchQuery);
// return search;
// }
//关键字高亮分页分词查询
@RequestMapping("/search")
public Page<ElasticWhole> findAnswerByTitle(@RequestBody ESArticleSearchRequest articleSearchRequest) {
Page<ElasticWhole> search = null;
// 定义高亮字段
Field titleField = new HighlightBuilder.Field("title")
// 给高亮关键字添加开始结束标签
.preTags("<span class='gl'>").postTags("</span>");
//Field contentField = new HighlightBuilder.Field("content").preTags("<span>").postTags("</span>");
Pageable pageable = PageRequest.of(articleSearchRequest.getPageNum(), articleSearchRequest.getPageSize());
// 构建查询内容
QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder(articleSearchRequest.getKeyword());
// 查询匹配的字段
//queryBuilder.field("title").field("content");
queryBuilder.field("title");
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(queryBuilder)
// 字段高亮
.withHighlightFields(titleField)
// 分页
.withPageable(pageable)
.build();
// 排序
searchQuery.addSort(new Sort(Direction.DESC, "add_time"));
search = elasticWholeRepository.search(searchQuery);
// 如果没有数据 直接返回
if (search.getContent().size() == 0) {
return search;
}
// 高亮
search = elasticsearchTemplate.queryForPage(searchQuery, ElasticWhole.class,
new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz,
Pageable pageable) {
List<ElasticWhole> list = new ArrayList<ElasticWhole>();
for (SearchHit searchHit : response.getHits()) {
if (response.getHits().getHits().length <= 0) {
return null;
}
ElasticWhole elasticWhole = JSONObject.parseObject(searchHit.getSourceAsString(), ElasticWhole.class);
Map<String, HighlightField> highlightFields = searchHit.getHighlightFields();
//匹配到的title字段里面的信息
HighlightField titleHighlight = highlightFields.get("title");
if (titleHighlight != null) {
Text[] fragments = titleHighlight.fragments();
String fragmentString = fragments[0].string();
elasticWhole.setTitle(fragmentString);
}
//匹配到的content字段里面的信息
// HighlightField contentHighlight = highlightFields.get("content");
// if (contentHighlight != null) {
// Text[] fragments = contentHighlight.fragments();
// String fragmentString = fragments[0].string();
// elasticWhole.setContent(fragmentString);
// }
list.add(elasticWhole);
}
if (list.size() > 0) {
AggregatedPage<T> result = new AggregatedPageImpl<T>((List<T>) list, pageable,
response.getHits().getTotalHits());
return result;
}
return null;
}
});
return search;
}
}
Класс инструмента генерации первичного ключа
package com.tcm.common.util;
public class IdGenerator {
/* 开始时间戳 (2018-09-01) */
private final long twepoch = 1535731200L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间戳向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间戳
*/
private long lastTimestamp = -1L;
// ==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public IdGenerator(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
// 如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(String.format(
"Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
// 如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
// 毫秒内序列溢出
if (sequence == 0) {
// 阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
// 时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
// 上次生成ID的时间戳
lastTimestamp = timestamp;
// 移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间戳
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
return System.currentTimeMillis();
}
}
Если в проекте все еще есть кеш Redis, запуск завершится ошибкой. Объединение двух решений онлайн
// main中加下面这一句
System.setProperty("es.set.netty.runtime.available.processors", "false");
// 写一个注解类
package com.tcm.common.config;
import javax.annotation.PostConstruct;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.annotation.Order;
/**
* @author tangyuewei
* <p>
* Description: 解决同时引用redis与es启动时报错
* </p>
* @date 2020/4/9
* @see com.tangyuewei.user.common.es
*/
@Configuration
public class ElasticSearchConfig {
@PostConstruct
void init() {
System.setProperty("es.set.netty.runtime.available.processors", "false");
}
}
Если данных слишком много, нужно настроить размер памяти в библиотеке es и модифицировать в /elasticsearch/config/jvm.options: -Xms4g -Xmx4g. память по умолчанию 1G