注:html
1.當前的版本是7.5.1,後面安裝的插件會依賴es的版本,也可根據須要指定版本java
2.es推薦安裝在docker中,爲演示方便,直接使用了windows版本node
啓動成功後,經過postman就能夠向es執行操做命令mysql
1.添加或更新索引及其文檔git
方法一(推薦):PUT /{索引}/{文檔}/{id}, id爲必傳,若沒有該id則插入數據,已有id則更新數據(若只傳入索引,則建立索引)github
方法二:POST /{索引}/{文檔}/{id}, id可省略,如不傳則由es生成spring
2.獲取全部文檔sql
GET /{索引}/{文檔}/_searchdocker
如:http://127.0.0.1:9200/newindex/newdoc/_search數據庫
3.獲取指定id文檔
GET /{索引}/{文檔}/{id}
如:http://127.0.0.1:9200/newindex/newdoc/1
4.模糊查詢
GET /{索引}/{文檔}/_search?q=*關鍵詞*
如:http://127.0.0.1:9200/newindex/newdoc/_search?q=*王*
5.刪除文檔
DELETE /{索引}/{文檔}/{id}
如:http://127.0.0.1:9200/newindex/newdoc/1
更多語句可參考官網
git clone https://github.com/mobz/elasticsearch-head.git
複製代碼
npm install -g grunt-cli
複製代碼
cd elasticsearch-head/
npm install
複製代碼
vim ../elasticsearch-7.5.1/config/elasticsearch.yml
複製代碼
http.cors.enabled: true
http.cors.allow-origin: "*"
複製代碼
cd - // 返回head根目錄
grunt server
複製代碼
下載
擴展自定義分詞器的內容
在\elasticsearch-7.5.1\plugins\ik\config目錄下新建custom.dic;
添加本身的自定義的詞彙;
修改同目錄下的IKAnalyzer.cfg.xml文件,爲<entry key="ext_dict">屬性指定自定義的詞典;
Spring Data ElasticSearch
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
複製代碼
spring:
data:
elasticsearch:
cluster-nodes: 127.0.0.1:9300
複製代碼
@Data
@Accessors(chain = true)
@Document(indexName = "school", type = "student") // indexName爲ES索引名,type爲文檔名
public class Student implements Serializable {
// id標識
// index=true表明是否開啓索引,默認開啓;
// type字段類型
// analyzer="ik_max_word"表明搜索的時候是如何分詞匹配,爲IK分詞器最細顆粒度
// searchAnalyzer = "ik_max_word"搜索分詞的類型
@Id
private String id;
@Field(type = FieldType.Keyword, analyzer = "ik_max_word", searchAnalyzer = "ik_max_word")
private String name;
private Integer age;
@Field(type = FieldType.Double)
private Double score;
@Field(type = FieldType.Text, analyzer = "ik_max_word")
private String info;
}
複製代碼
@Data
@Accessors(chain = true)
public class QueryPage {
/** * 當前頁 */
private Integer current;
/** * 每頁記錄數 */
private Integer size;
}
複製代碼
public interface EsRepository extends ElasticsearchRepository<Student, String> {
/** * 根據學生姓名或信息模糊查詢 */
Page<Student> findByNameAndInfoLike(String name, String info, Pageable pageable);
}
複製代碼
public interface EsService {
/** * 插入 */
void add(Student student);
/** * 批量插入 */
void addAll(List<Student> student);
/** * 模糊查詢 */
Page<Student> search(String keyword, QueryPage queryPage);
}
複製代碼
@Service
public class EsServiceImpl implements EsService {
@Autowired
private EsRepository esRepository;
@Override
public void add(Student student) {
esRepository.save(student);
}
@Override
public void addAll(List<Student> student) {
esRepository.saveAll(student);
}
@Override
public Page<Student> search(String keyword, QueryPage queryPage) {
// es默認索引從0開始,mp默認從1開始
PageRequest pageRequest = PageRequest.of(queryPage.getCurrent() - 1, queryPage.getSize());
return esRepository.findByNameOrInfoLike(keyword, keyword, pageRequest);
}
}
複製代碼
@SpringBootTest
public class EsServiceImplTest {
@Autowired
private EsService esService;
@Test
public void insert() {
List<Student> students = new ArrayList<>();
for (int i = 10; i <= 12; i++) {
Student student = new Student();
student.setId(i + "").setAge(10 + i).setName("王二狗" + i).setScore(72.5 + i).setInfo("大王派我來巡山" + i);
students.add(student);
}
esService.addAll(students);
}
@Test
public void fuzzySearch() {
QueryPage queryPage = new QueryPage();
queryPage.setCurrent(1).setSize(5);
Page<Student> list = esService.search("二狗2", queryPage);
list.forEach(System.out::println);
}
}
複製代碼
\logstash-7.5.1\config\logstash-sample.conf
在當前目錄,重命名爲logstash.conf
# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.
input {
jdbc {
# MySql鏈接配置
jdbc_connection_string => "jdbc:mysql://127.0.0.1:3306/springboot_es?characterEncoding=UTF8"
jdbc_user => "root"
jdbc_password => "1234"
jdbc_driver_library => "D:\Develop_Tools_Others\logstash-7.5.1\mysql-connector-java-5.1.26.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_page_size => "50000"
# SQL查詢語句,用於將查詢到的數據導入到ElasticSearch
statement => "select id,name,age,score,info from t_student"
# 定時任務,各自表示:分 時 天 月 年 。所有爲 * 默認每分鐘執行
schedule => "* * * * *"
}
}
output {
elasticsearch {
hosts => "localhost:9200"
# 索引名稱
index => "school"
# 文檔名稱
document_type => "student"
# 自增ID編號
document_id => "%{id}"
}
stdout {
# JSON格式輸出
codec => json_lines
}
}
複製代碼
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for t_student
-- ----------------------------
DROP TABLE IF EXISTS `t_student`;
CREATE TABLE `t_student` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主鍵',
`name` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '學生姓名',
`age` int(11) NULL DEFAULT NULL COMMENT '年齡',
`score` double(255, 0) NULL DEFAULT NULL COMMENT '成績',
`info` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '信息',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 4 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of t_student
-- ----------------------------
INSERT INTO `t_student` VALUES (1, '小明', 18, 88, '好好學習');
INSERT INTO `t_student` VALUES (2, '小紅', 17, 85, '每天向上');
INSERT INTO `t_student` VALUES (3, '王二狗', 30, 59, '無產階級');
SET FOREIGN_KEY_CHECKS = 1;
複製代碼
D:\Develop_Tools_Others\logstash-7.5.1>.\bin\logstash.bat -f .\config\logstash.conf
複製代碼