feat: 项目调整

pull/19/head
dunwu 2022-02-23 16:24:46 +08:00
parent 0e9ded6812
commit ad820baf2b
120 changed files with 14436 additions and 3032 deletions

View File

@ -19,7 +19,7 @@ insert_final_newline = true
[*.{bat, cmd}]
end_of_line = crlf
[*.{java, gradle, groovy, kt, sh}]
[*.{java, gradle, groovy, kt, sh, xml}]
indent_size = 4
[*.md]

View File

@ -1,57 +1,53 @@
<?xml version="1.0"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-h2</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.version>1.8</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.6.3</version>
</parent>
<junit.version>4.13.1</junit.version>
</properties>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-h2</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<!-- db begin -->
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
</dependency>
<!-- db end -->
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-rest</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<!-- test begin -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<!-- test end -->
</dependencies>
<!-- db begin -->
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>2.1.210</version>
</dependency>
<!-- db end -->
</dependencies>
<dependencyManagement>
<dependencies>
<!-- database begin -->
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>2.0.206</version>
<scope>test</scope>
</dependency>
<!-- database end -->
<!-- test begin -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- test end -->
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,56 @@
package io.github.dunwu.javadb.h2.springboot;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
@SpringBootApplication
public class SpringBootDataJpaApplication implements CommandLineRunner {
private final Logger log = LoggerFactory.getLogger(this.getClass());
private final DataSource dataSource;
public SpringBootDataJpaApplication(DataSource dataSource) {
this.dataSource = dataSource;
}
public static void main(String[] args) {
SpringApplication.run(SpringBootDataJpaApplication.class, args);
}
@Override
public void run(String... args) throws Exception {
if (dataSource != null) {
printDataSourceInfo(dataSource);
log.info("Connect to datasource success.");
} else {
log.error("Connect to datasource failed!");
}
}
private void printDataSourceInfo(DataSource dataSource) throws SQLException {
Connection connection;
if (dataSource != null) {
connection = dataSource.getConnection();
} else {
log.error("Get dataSource failed!");
return;
}
if (connection != null) {
log.info("DataSource Url: {}", connection.getMetaData().getURL());
} else {
log.error("Connect to datasource failed!");
}
}
}

View File

@ -0,0 +1,69 @@
package io.github.dunwu.javadb.h2.springboot;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import java.util.Objects;
/**
* user
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-11-18
*/
@Entity
@Data
@ToString
@NoArgsConstructor
@AllArgsConstructor
public class User {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String name;
private Integer age;
private String address;
private String email;
public User(String name, Integer age, String address, String email) {
this.name = name;
this.age = age;
this.address = address;
this.email = email;
}
@Override
public int hashCode() {
return Objects.hash(id, name);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof User)) {
return false;
}
User user = (User) o;
if (id != null && id.equals(user.id)) {
return true;
}
return name.equals(user.name);
}
}

View File

@ -0,0 +1,41 @@
package io.github.dunwu.javadb.h2.springboot;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.data.rest.core.annotation.RepositoryRestResource;
/**
* JPA Rest user
* <p>
* Application 访http://<host:ip>/user
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-10-12
*/
@RepositoryRestResource(collectionResourceRel = "user", path = "user")
public interface UserRepository extends JpaRepository<User, Long> {
/**
*
* <p>
* http://localhost:8080/user/search/findByName?name=lisi
* @param name
* @return {@link User}
*/
User findByName(@Param("name") String name);
/**
*
* @param email
* @return {@link User}
*/
@Query("from User u where u.email=:email")
User findByEmail(@Param("email") String email);
/**
*
* @param name
*/
void deleteByName(@Param("name") String name);
}

View File

@ -0,0 +1,6 @@
spring.datasource.url = jdbc:h2:mem:test
spring.datasource.driver-class-name = org.h2.Driver
spring.datasource.username = sa
spring.datasource.password =
spring.datasource.schema = classpath:sql/schema-h2.sql
spring.datasource.data = classpath:sql/data-h2.sql

View File

@ -0,0 +1,12 @@
${AnsiColor.BRIGHT_YELLOW}${AnsiStyle.BOLD}
________ ___ ___ ________ ___ __ ___ ___
|\ ___ \|\ \|\ \|\ ___ \|\ \ |\ \|\ \|\ \
\ \ \_|\ \ \ \\\ \ \ \\ \ \ \ \ \ \ \ \ \\\ \
\ \ \ \\ \ \ \\\ \ \ \\ \ \ \ \ __\ \ \ \ \\\ \
\ \ \_\\ \ \ \\\ \ \ \\ \ \ \ \|\__\_\ \ \ \\\ \
\ \_______\ \_______\ \__\\ \__\ \____________\ \_______\
\|_______|\|_______|\|__| \|__|\|____________|\|_______|
${AnsiColor.CYAN}${AnsiStyle.BOLD}
:: Java :: (v${java.version})
:: Spring Boot :: (v${spring-boot.version})
${AnsiStyle.NORMAL}

View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8" ?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%boldYellow(%thread)] [%highlight(%-5level)] %boldGreen(%c{36}.%M) - %boldBlue(%m%n)
</pattern>
</encoder>
</appender>
<logger name="io.github.dunwu.spring" level="INFO" />
<root level="INFO">
<appender-ref ref="CONSOLE" />
</root>
</configuration>

View File

@ -0,0 +1,10 @@
-- -------------------------------------------------------------------
-- 运行本项目的初始化 DML 脚本
-- H2 知识点可以参考:
-- https://dunwu.github.io/db-tutorial/#/sql/h2
-- -------------------------------------------------------------------
INSERT INTO user (name, age, address, email)
VALUES ('张三', 18, '北京', 'xxx@163.com');
INSERT INTO user (name, age, address, email)
VALUES ('李四', 19, '上海', 'xxx@163.com');

View File

@ -0,0 +1,13 @@
-- -------------------------------------------------------------------
-- 运行本项目的初始化 DDL 脚本
-- H2 知识点可以参考:
-- https://dunwu.github.io/db-tutorial/#/sql/h2
-- -------------------------------------------------------------------
CREATE TABLE user (
id INT NOT NULL AUTO_INCREMENT,
name VARCHAR(100),
age INT,
address VARCHAR(50),
email VARCHAR(50),
PRIMARY KEY (id)
);

View File

@ -1,15 +1,14 @@
package io.github.dunwu.javadb;
package io.github.dunwu.javadb.h2;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.sql.*;
import java.util.UUID;
@SuppressWarnings("all")
public class H2JdbcTest01 {
public class H2JdbcTest {
// 数据库连接 URL当前连接的是 C:\Users\Administrator 目录下的 test 数据库(连用户目录下的 test 数据库)
private static final String JDBC_URL = "jdbc:h2:~/test";
@ -33,15 +32,15 @@ public class H2JdbcTest01 {
private static Statement STATEMENT = null;
@BeforeClass
@BeforeAll
public static void beforeClass() {
try {
// 加载H2数据库驱动
Class.forName(DRIVER_CLASS);
// 根据连接URL用户名密码获取数据库连接体会下不同 URL 连接的不同之处)
// CONNECTION = DriverManager.getConnection(JDBC_URL, USER, PASSWORD);
CONNECTION = DriverManager.getConnection(JDBC_URL, USER, PASSWORD);
// CONNECTION = DriverManager.getConnection(JDBC_URL2, USER, PASSWORD);
CONNECTION = DriverManager.getConnection(JDBC_URL3, USER, PASSWORD);
// CONNECTION = DriverManager.getConnection(JDBC_URL3, USER, PASSWORD);
// 创建sql声明
STATEMENT = CONNECTION.createStatement();
} catch (ClassNotFoundException | SQLException e) {
@ -49,7 +48,7 @@ public class H2JdbcTest01 {
}
}
@AfterClass
@AfterAll
public static void afterClass() {
try {
// 释放资源
@ -62,30 +61,26 @@ public class H2JdbcTest01 {
}
@Test
public void test() {
try {
// 如果存在USER_INFO表就先删除USER_INFO表
STATEMENT.execute("DROP TABLE IF EXISTS user_info");
// 创建USER_INFO表
STATEMENT.execute("CREATE TABLE user_info(id VARCHAR(36) PRIMARY KEY,name VARCHAR(100),sex VARCHAR(4))");
// 新增
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','带头大哥','男')");
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','萧峰','男')");
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','段誉','男')");
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','虚竹','男')");
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','王语嫣','女')");
// 删除
STATEMENT.executeUpdate("DELETE FROM user_info WHERE name='带头大哥'");
// 修改
STATEMENT.executeUpdate("UPDATE user_info SET name='大轮明王' WHERE name='鸠摩智'");
// 查询
ResultSet rs = STATEMENT.executeQuery("SELECT * FROM user_info");
// 遍历结果集
while (rs.next()) {
System.out.println(rs.getString("id") + "," + rs.getString("name") + "," + rs.getString("sex"));
}
} catch (SQLException e) {
Assert.assertTrue(e.getMessage(), true);
public void test() throws SQLException {
// 如果存在USER_INFO表就先删除USER_INFO表
STATEMENT.execute("DROP TABLE IF EXISTS user_info");
// 创建USER_INFO表
STATEMENT.execute("CREATE TABLE user_info(id VARCHAR(36) PRIMARY KEY,name VARCHAR(100),sex VARCHAR(4))");
// 新增
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','带头大哥','男')");
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','萧峰','男')");
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','段誉','男')");
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','虚竹','男')");
STATEMENT.executeUpdate("INSERT INTO USER_INFO VALUES('" + UUID.randomUUID() + "','王语嫣','女')");
// 删除
STATEMENT.executeUpdate("DELETE FROM user_info WHERE name='带头大哥'");
// 修改
STATEMENT.executeUpdate("UPDATE user_info SET name='大轮明王' WHERE name='鸠摩智'");
// 查询
ResultSet rs = STATEMENT.executeQuery("SELECT * FROM user_info");
// 遍历结果集
while (rs.next()) {
System.out.println(rs.getString("id") + "," + rs.getString("name") + "," + rs.getString("sex"));
}
}

View File

@ -0,0 +1,123 @@
package io.github.dunwu.javadb.h2.springboot;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Spring Boot + JPA REST CRUD
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-10-12
*/
@SpringBootTest
@AutoConfigureMockMvc
@ActiveProfiles({"test"})
public class SpringBootJpaRestTest {
@Autowired
private MockMvc mockMvc;
@Autowired
private UserRepository userRepository;
@Autowired
private ObjectMapper objectMapper;
@BeforeEach
public void deleteAllBeforeTests() {
userRepository.deleteAll();
}
@Test
public void shouldCreateEntity() throws Exception {
User user = new User("张三", 18, "北京", "user1@163.com");
mockMvc.perform(post("/user").content(objectMapper.writeValueAsString(user))).andExpect(status().isCreated())
.andExpect(header().string("Location", containsString("user/")));
}
@Test
public void shouldDeleteEntity() throws Exception {
User user = new User("张三", 18, "北京", "user1@163.com");
MvcResult mvcResult = mockMvc.perform(post("/user").content(objectMapper.writeValueAsString(user)))
.andExpect(status().isCreated()).andReturn();
String location = mvcResult.getResponse().getHeader("Location");
assertThat(location).isNotNull();
mockMvc.perform(delete(location)).andExpect(status().isNoContent());
mockMvc.perform(get(location)).andExpect(status().isNotFound());
}
@Test
public void shouldPartiallyUpdateEntity() throws Exception {
User user = new User("张三", 18, "北京", "user1@163.com");
User user2 = new User("李四", 19, "上海", "user2@163.com");
MvcResult mvcResult = mockMvc.perform(post("/user").content(objectMapper.writeValueAsString(user)))
.andExpect(status().isCreated()).andReturn();
String location = mvcResult.getResponse().getHeader("Location");
assertThat(location).isNotNull();
mockMvc.perform(patch(location).content(objectMapper.writeValueAsString(user2)))
.andExpect(status().isNoContent());
mockMvc.perform(get(location)).andExpect(status().isOk()).andExpect(jsonPath("$.username").value("李四"))
.andExpect(jsonPath("$.password").value("123456")).andExpect(jsonPath("$.email").value("user2@163.com"));
}
@Test
public void shouldQueryEntity() throws Exception {
User user = new User("张三", 18, "北京", "user1@163.com");
mockMvc.perform(post("/user").content(objectMapper.writeValueAsString(user))).andExpect(status().isCreated());
mockMvc.perform(get("/user/search/findByEmail?email={email}", "user1@163.com")).andExpect(status().isOk());
}
@Test
public void shouldRetrieveEntity() throws Exception {
User user = new User("张三", 18, "北京", "user1@163.com");
MvcResult mvcResult = mockMvc.perform(post("/user").content(objectMapper.writeValueAsString(user)))
.andExpect(status().isCreated()).andReturn();
String location = mvcResult.getResponse().getHeader("Location");
assertThat(location).isNotNull();
mockMvc.perform(get(location)).andExpect(status().isOk()).andExpect(jsonPath("$.username").value("张三"))
.andExpect(jsonPath("$.email").value("user1@163.com"));
}
@Test
public void shouldReturnRepositoryIndex() throws Exception {
mockMvc.perform(get("/")).andDo(print()).andExpect(status().isOk())
.andExpect(jsonPath("$._links.user").exists());
}
@Test
public void shouldUpdateEntity() throws Exception {
User user = new User("张三", 18, "北京", "user1@163.com");
User user2 = new User("李四", 19, "上海", "user2@163.com");
MvcResult mvcResult = mockMvc.perform(post("/user").content(objectMapper.writeValueAsString(user)))
.andExpect(status().isCreated()).andReturn();
String location = mvcResult.getResponse().getHeader("Location");
assertThat(location).isNotNull();
mockMvc.perform(put(location).content(objectMapper.writeValueAsString(user2)))
.andExpect(status().isNoContent());
mockMvc.perform(get(location)).andExpect(status().isOk()).andExpect(jsonPath("$.username").value("李四"))
.andExpect(jsonPath("$.password").value("123456"));
}
}

View File

@ -0,0 +1,115 @@
package io.github.dunwu.javadb.h2.springboot;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.test.context.ActiveProfiles;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Spring Boot + JPA CRUD
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-10-12
*/
@DataJpaTest
@ActiveProfiles({"test"})
public class SpringBootJpaTest {
private final Logger log = LoggerFactory.getLogger(this.getClass());
@Autowired
private UserRepository repository;
@BeforeEach
public void before() {
repository.deleteAll();
}
@Test
public void insert() {
User user = new User("张三", 18, "北京", "user1@163.com");
repository.save(user);
Optional<User> optional = repository.findById(user.getId());
assertThat(optional).isNotNull();
assertThat(optional.isPresent()).isTrue();
}
@Test
public void batchInsert() {
List<User> users = new ArrayList<>();
users.add(new User("张三", 18, "北京", "user1@163.com"));
users.add(new User("李四", 19, "上海", "user1@163.com"));
users.add(new User("王五", 18, "南京", "user1@163.com"));
users.add(new User("赵六", 20, "武汉", "user1@163.com"));
repository.saveAll(users);
long count = repository.count();
assertThat(count).isEqualTo(4);
List<User> list = repository.findAll();
assertThat(list).isNotEmpty().hasSize(4);
list.forEach(this::accept);
}
private void accept(User user) {log.info(user.toString());}
@Test
public void delete() {
List<User> users = new ArrayList<>();
users.add(new User("张三", 18, "北京", "user1@163.com"));
users.add(new User("李四", 19, "上海", "user1@163.com"));
users.add(new User("王五", 18, "南京", "user1@163.com"));
users.add(new User("赵六", 20, "武汉", "user1@163.com"));
repository.saveAll(users);
repository.deleteByName("张三");
assertThat(repository.findByName("张三")).isNull();
repository.deleteAll();
List<User> list = repository.findAll();
assertThat(list).isEmpty();
}
@Test
public void findAllInPage() {
List<User> users = new ArrayList<>();
users.add(new User("张三", 18, "北京", "user1@163.com"));
users.add(new User("李四", 19, "上海", "user1@163.com"));
users.add(new User("王五", 18, "南京", "user1@163.com"));
users.add(new User("赵六", 20, "武汉", "user1@163.com"));
repository.saveAll(users);
PageRequest pageRequest = PageRequest.of(1, 2);
Page<User> page = repository.findAll(pageRequest);
assertThat(page).isNotNull();
assertThat(page.isEmpty()).isFalse();
assertThat(page.getTotalElements()).isEqualTo(4);
assertThat(page.getTotalPages()).isEqualTo(2);
List<User> list = page.get().collect(Collectors.toList());
System.out.println("user list: ");
list.forEach(System.out::println);
}
@Test
public void update() {
User oldUser = new User("张三", 18, "北京", "user1@163.com");
oldUser.setName("张三丰");
repository.save(oldUser);
User newUser = repository.findByName("张三丰");
assertThat(newUser).isNotNull();
}
}

View File

@ -1,63 +1,63 @@
<?xml version="1.0"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-hbase</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-hbase</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.version>1.8</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.version>1.8</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
<hbase.version>1.3.1</hbase.version>
<junit.version>4.13.1</junit.version>
<dunwu.version>0.5.7</dunwu.version>
</properties>
<hbase.version>1.3.1</hbase.version>
<junit.version>4.13.1</junit.version>
<dunwu.version>0.5.7</dunwu.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
</dependency>
<dependency>
<groupId>io.github.dunwu</groupId>
<artifactId>dunwu-tool-core</artifactId>
</dependency>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
</dependency>
<dependency>
<groupId>io.github.dunwu</groupId>
<artifactId>dunwu-tool-core</artifactId>
</dependency>
<!-- test begin -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<!-- test end -->
</dependencies>
<!-- test begin -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<!-- test end -->
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>io.github.dunwu</groupId>
<artifactId>dunwu-tool-core</artifactId>
<version>${dunwu.version}</version>
</dependency>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>io.github.dunwu</groupId>
<artifactId>dunwu-tool-core</artifactId>
<version>${dunwu.version}</version>
</dependency>
<!-- test begin -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- test end -->
</dependencies>
</dependencyManagement>
<!-- test begin -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- test end -->
</dependencies>
</dependencyManagement>
</project>

View File

@ -1,24 +0,0 @@
package io.github.dunwu.javadb;
public enum HBaseConstant {
HBASE_ZOOKEEPER_QUORUM("hbase.zookeeper.quorum"), HBASE_ENABLE("hbase.enable"), HBASE_MASTER(
"hbase.master"), HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT(
"hbase.zookeeper.property.clientPort"), HBASE_HCONNECTION_THREADS_MAX(
"hbase.hconnection.threads.max"), HBASE_HCONNECTION_THREADS_CORE(
"hbase.hconnection.threads.core"), ZOOKEEPER_ZNODE_PARENT(
"zookeeper.znode.parent"), HBASE_COLUMN_FAMILY(
"hbase.column.family"), HBASE_EXECUTOR_NUM(
"hbase.executor.num"), HBASE_IPC_POOL_SIZE(
"hbase.client.ipc.pool.size");
private String key;
HBaseConstant(String key) {
this.key = key;
}
public String key() {
return key;
}
}

View File

@ -1,85 +0,0 @@
package io.github.dunwu.javadb;
/**
* HBase Cell
*
* @author Zhang Peng
* @since 2019-03-04
*/
public class HbaseCellEntity {
private String table;
private String row;
private String colFamily;
private String col;
private String val;
public HbaseCellEntity() {
}
public HbaseCellEntity(String row, String colFamily, String col, String val) {
this.row = row;
this.colFamily = colFamily;
this.col = col;
this.val = val;
}
public HbaseCellEntity(String table, String row, String colFamily, String col, String val) {
this.table = table;
this.row = row;
this.colFamily = colFamily;
this.col = col;
this.val = val;
}
public String getTable() {
return table;
}
public void setTable(String table) {
this.table = table;
}
public String getRow() {
return row;
}
public void setRow(String row) {
this.row = row;
}
public String getColFamily() {
return colFamily;
}
public void setColFamily(String colFamily) {
this.colFamily = colFamily;
}
public String getCol() {
return col;
}
public void setCol(String col) {
this.col = col;
}
public String getVal() {
return val;
}
public void setVal(String val) {
this.val = val;
}
@Override
public String toString() {
return "HbaseCellEntity{" + "table='" + table + '\'' + ", row='" + row + '\'' + ", colFamily='" + colFamily
+ '\'' + ", col='" + col + '\'' + ", val='" + val + '\'' + '}';
}
}

View File

@ -1,370 +0,0 @@
package io.github.dunwu.javadb;
import io.github.dunwu.tool.util.PropertiesUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
/**
* HBase
*
* @author Zhang Peng
* @since 2019-03-01
*/
public class HbaseHelper {
private static final String FIRST_CONFIG = "classpath://config//hbase.properties";
private static final String SECOND_CONFIG = "classpath://application.properties";
private HbaseProperties hbaseProperties;
private Connection connection;
public HbaseHelper() throws Exception {
// 初始化参数
Properties properties = loadConfigFile();
if (properties == null) {
throw new Exception("读取 Hbase 配置失败,无法建立连接");
}
Boolean enable = PropertiesUtil.getBoolean(properties, HBaseConstant.HBASE_ENABLE.key(), true);
if (!enable) {
return;
}
String quorum = PropertiesUtil.getString(properties, HBaseConstant.HBASE_ZOOKEEPER_QUORUM.key(), "");
String hbaseMaster = PropertiesUtil.getString(properties, HBaseConstant.HBASE_MASTER.key(), "");
String clientPort = PropertiesUtil.getString(properties,
HBaseConstant.HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT.key(), "");
String znodeParent = PropertiesUtil.getString(properties, HBaseConstant.ZOOKEEPER_ZNODE_PARENT.key(), "");
String maxThreads = PropertiesUtil.getString(properties, HBaseConstant.HBASE_HCONNECTION_THREADS_MAX.key(), "");
String coreThreads = PropertiesUtil.getString(properties, HBaseConstant.HBASE_HCONNECTION_THREADS_CORE.key(),
"");
String columnFamily = PropertiesUtil.getString(properties, HBaseConstant.HBASE_COLUMN_FAMILY.key(), "");
String hbaseExecutorsNum = PropertiesUtil.getString(properties, HBaseConstant.HBASE_EXECUTOR_NUM.key(), "10");
String ipcPoolSize = PropertiesUtil.getString(properties, HBaseConstant.HBASE_IPC_POOL_SIZE.key(), "1");
hbaseProperties = new HbaseProperties(hbaseMaster, quorum, clientPort, znodeParent, maxThreads, coreThreads,
columnFamily, hbaseExecutorsNum, ipcPoolSize);
init(hbaseProperties);
}
private Properties loadConfigFile() {
Properties properties = null;
try {
properties = PropertiesUtil.loadFromFile(FIRST_CONFIG);
} catch (Exception e) {
e.printStackTrace();
}
if (properties == null) {
try {
properties = PropertiesUtil.loadFromFile(SECOND_CONFIG);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
return properties;
}
private void init(HbaseProperties hbaseProperties) throws Exception {
try {
// @formatter:off
Configuration configuration = HBaseConfiguration.create();
configuration.set(HBaseConstant.HBASE_ZOOKEEPER_QUORUM.key(), hbaseProperties.getQuorum());
configuration.set(HBaseConstant.HBASE_MASTER.key(), hbaseProperties.getHbaseMaster());
configuration.set(HBaseConstant.HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT.key(),
hbaseProperties.getClientPort());
configuration.set(HBaseConstant.HBASE_HCONNECTION_THREADS_MAX.key(),
hbaseProperties.getMaxThreads());
configuration.set(HBaseConstant.HBASE_HCONNECTION_THREADS_CORE.key(),
hbaseProperties.getCoreThreads());
configuration.set(HBaseConstant.ZOOKEEPER_ZNODE_PARENT.key(), hbaseProperties.getZnodeParent());
configuration.set(HBaseConstant.HBASE_COLUMN_FAMILY.key(), hbaseProperties.getColumnFamily());
configuration.set(HBaseConstant.HBASE_IPC_POOL_SIZE.key(), hbaseProperties.getIpcPoolSize());
// @formatter:on
connection = ConnectionFactory.createConnection(configuration);
} catch (Exception e) {
throw new Exception("hbase链接未创建", e);
}
}
public HbaseHelper(HbaseProperties hbaseProperties) throws Exception {
this.hbaseProperties = hbaseProperties;
init(hbaseProperties);
}
public void destory() {
if (connection != null) {
try {
connection.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public HTableDescriptor[] listTables() throws Exception {
return listTables(null);
}
public HTableDescriptor[] listTables(String tableName) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
HTableDescriptor[] hTableDescriptors = new HTableDescriptor[0];
try {
if (StringUtils.isEmpty(tableName)) {
hTableDescriptors = connection.getAdmin().listTables();
} else {
hTableDescriptors = connection.getAdmin().listTables(tableName);
}
} catch (IOException e) {
throw new Exception("执行失败", e);
}
return hTableDescriptors;
}
/**
*
* <p>
*
* <ul>
* <li>create 'tablename','family1','family2','family3'...</li>
* </ul>
*/
public void createTable(String tableName) throws Exception {
createTable(tableName, new String[] {hbaseProperties.getColumnFamily()});
}
/**
*
* <p>
*
* <ul>
* <li>create 'tablename','family1','family2','family3'...</li>
* </ul>
*/
public void createTable(String tableName, String[] colFamilies) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
try {
TableName tablename = TableName.valueOf(tableName);
// 如果表存在,先删除
if (connection.getAdmin().isTableAvailable(tablename)) {
dropTable(tableName);
}
HTableDescriptor tableDescriptor = new HTableDescriptor(tablename);
for (String famliy : colFamilies) {
tableDescriptor.addFamily(new HColumnDescriptor(famliy));
}
connection.getAdmin().createTable(tableDescriptor);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
*
* <p>
*
* <ul>
* <li>disable 'tablename'</li>
* <li>drop 't1'</li>
* </ul>
*
* @param name
*/
public void dropTable(String name) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
Admin admin = null;
try {
admin = connection.getAdmin();
TableName tableName = TableName.valueOf(name);
// 如果表存在,先删除
if (admin.isTableAvailable(tableName)) {
admin.disableTable(tableName);
admin.deleteTable(tableName);
}
} catch (IOException e) {
e.printStackTrace();
}
}
private Put toPut(HbaseCellEntity hBaseTableDTO) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
Put put = new Put(Bytes.toBytes(hBaseTableDTO.getRow()));
put.addColumn(Bytes.toBytes(hBaseTableDTO.getColFamily()), Bytes.toBytes(hBaseTableDTO.getCol()),
Bytes.toBytes(hBaseTableDTO.getVal()));
return put;
}
public void delete(String tableName, String rowKey) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
Table table = null;
try {
table = connection.getTable(TableName.valueOf(tableName));
Delete delete = new Delete(Bytes.toBytes(rowKey));
table.delete(delete);
} catch (IOException e) {
e.printStackTrace();
throw new Exception("delete失败");
}
}
public String resultToString(Result result) {
if (result == null) {
return null;
}
Cell[] cells = result.rawCells();
StringBuilder sb = new StringBuilder();
for (Cell cell : cells) {
sb.append("{ ");
sb.append("RowName -> ").append(new String(CellUtil.cloneRow(cell)));
sb.append(", Timetamp -> ").append(cell.getTimestamp());
sb.append(", Column Family -> ").append(new String(CellUtil.cloneFamily(cell)));
sb.append(", Row Name -> ").append(new String(CellUtil.cloneQualifier(cell)));
sb.append(", value -> ").append(new String(CellUtil.cloneValue(cell)));
sb.append(" }\n");
}
return sb.toString();
}
public Result get(String tableName, String rowKey) throws Exception {
return get(tableName, rowKey, null, null);
}
public Result get(String tableName, String rowKey, String colFamily, String qualifier) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
if (connection.isClosed()) {
throw new Exception("hbase 连接已关闭");
}
if (StringUtils.isEmpty(tableName) || StringUtils.isEmpty(rowKey)) {
return null;
}
Result result = null;
try {
Table table = connection.getTable(TableName.valueOf(tableName));
Get get = new Get(Bytes.toBytes(rowKey));
if (StringUtils.isNotEmpty(colFamily)) {
if (StringUtils.isNotEmpty(qualifier)) {
get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(qualifier));
} else {
get.addFamily(Bytes.toBytes(colFamily));
}
}
result = table.get(get);
} catch (IOException e) {
throw new Exception("查询时发生异常");
}
return result;
}
public Result get(String tableName, String rowKey, String colFamily) throws Exception {
return get(tableName, rowKey, colFamily, null);
}
public Result[] scan(String tableName) throws Exception {
return scan(tableName, null, null, null, null);
}
public Result[] scan(String tableName, String colFamily, String qualifier, String startRow, String stopRow)
throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
if (StringUtils.isEmpty(tableName)) {
return null;
}
ResultScanner resultScanner = null;
List<Result> list = new ArrayList<>();
try {
Table table = connection.getTable(TableName.valueOf(tableName));
Scan scan = new Scan();
if (StringUtils.isNotEmpty(colFamily)) {
if (StringUtils.isNotEmpty(qualifier)) {
scan.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(qualifier));
}
scan.addFamily(Bytes.toBytes(colFamily));
}
if (StringUtils.isNotEmpty(startRow)) {
scan.setStartRow(Bytes.toBytes(startRow));
}
if (StringUtils.isNotEmpty(stopRow)) {
scan.setStopRow(Bytes.toBytes(stopRow));
}
resultScanner = table.getScanner(scan);
Result result = resultScanner.next();
while (result != null) {
list.add(result);
result = resultScanner.next();
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (resultScanner != null) {
resultScanner.close();
}
}
return list.toArray(new Result[0]);
}
public Result[] scan(String tableName, String colFamily) throws Exception {
return scan(tableName, colFamily, null, null, null);
}
public Result[] scan(String tableName, String colFamily, String qualifier) throws Exception {
return scan(tableName, colFamily, qualifier, null, null);
}
private List<Result> resultScannerToResults(ResultScanner resultScanner) {
if (resultScanner == null) {
return null;
}
List<Result> list = new ArrayList<>();
Result result = null;
try {
result = resultScanner.next();
while (result != null) {
list.add(result);
result = resultScanner.next();
}
} catch (IOException e) {
e.printStackTrace();
}
return list;
}
public HbaseProperties getHbaseProperties() {
return hbaseProperties;
}
}

View File

@ -1,128 +0,0 @@
package io.github.dunwu.javadb;
import java.io.Serializable;
/**
* Hbase
*
* @author Zhang Peng
*/
public class HbaseProperties implements Serializable {
private static final long serialVersionUID = 2930639554689310736L;
private String hbaseMaster;
private String quorum;
private String clientPort;
private String znodeParent;
private String maxThreads;
private String coreThreads;
private String columnFamily;
private String hbaseExecutorsNum = "10";
private String ipcPoolSize;
public HbaseProperties() {
}
public HbaseProperties(String hbaseMaster, String quorum, String clientPort, String znodeParent, String maxThreads,
String coreThreads, String columnFamily, String hbaseExecutorsNum, String ipcPoolSize) {
this.hbaseMaster = hbaseMaster;
this.quorum = quorum;
this.clientPort = clientPort;
this.znodeParent = znodeParent;
this.maxThreads = maxThreads;
this.coreThreads = coreThreads;
this.columnFamily = columnFamily;
this.hbaseExecutorsNum = hbaseExecutorsNum;
this.ipcPoolSize = ipcPoolSize;
}
public String getHbaseMaster() {
return hbaseMaster;
}
public void setHbaseMaster(String hbaseMaster) {
this.hbaseMaster = hbaseMaster;
}
public String getQuorum() {
return quorum;
}
public void setQuorum(String quorum) {
this.quorum = quorum;
}
public String getClientPort() {
return clientPort;
}
public void setClientPort(String clientPort) {
this.clientPort = clientPort;
}
public String getZnodeParent() {
return znodeParent;
}
public void setZnodeParent(String znodeParent) {
this.znodeParent = znodeParent;
}
public String getMaxThreads() {
return maxThreads;
}
public void setMaxThreads(String maxThreads) {
this.maxThreads = maxThreads;
}
public String getCoreThreads() {
return coreThreads;
}
public void setCoreThreads(String coreThreads) {
this.coreThreads = coreThreads;
}
public String getColumnFamily() {
return columnFamily;
}
public void setColumnFamily(String columnFamily) {
this.columnFamily = columnFamily;
}
public String getHbaseExecutorsNum() {
return hbaseExecutorsNum;
}
public void setHbaseExecutorsNum(String hbaseExecutorsNum) {
this.hbaseExecutorsNum = hbaseExecutorsNum;
}
public String getIpcPoolSize() {
return ipcPoolSize;
}
public void setIpcPoolSize(String ipcPoolSize) {
this.ipcPoolSize = ipcPoolSize;
}
@Override
public String toString() {
return "HbaseProperties{" + "quorum='" + quorum + '\'' + ", clientPort='" + clientPort + '\''
+ ", znodeParent='" + znodeParent + '\'' + ", maxThreads='" + maxThreads + '\'' + ", coreThreads='"
+ coreThreads + '\'' + ", columnFamily='" + columnFamily + '\'' + ", hbaseExecutorsNum='"
+ hbaseExecutorsNum + '\'' + '}';
}
}

View File

@ -0,0 +1,25 @@
package io.github.dunwu.javadb.hbase;
public enum HBaseConstant {
HBASE_ZOOKEEPER_QUORUM("hbase.zookeeper.quorum"),
HBASE_ENABLE("hbase.enable"),
HBASE_MASTER("hbase.master"),
HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT("hbase.zookeeper.property.clientPort"),
HBASE_HCONNECTION_THREADS_MAX("hbase.hconnection.threads.max"),
HBASE_HCONNECTION_THREADS_CORE("hbase.hconnection.threads.core"),
ZOOKEEPER_ZNODE_PARENT("zookeeper.znode.parent"),
HBASE_COLUMN_FAMILY("hbase.column.family"),
HBASE_EXECUTOR_NUM("hbase.executor.num"),
HBASE_IPC_POOL_SIZE("hbase.client.ipc.pool.size");
private String key;
HBaseConstant(String key) {
this.key = key;
}
public String key() {
return key;
}
}

View File

@ -0,0 +1,84 @@
package io.github.dunwu.javadb.hbase;
/**
* HBase Cell
* @author Zhang Peng
* @since 2019-03-04
*/
public class HbaseCellEntity {
private String table;
private String row;
private String colFamily;
private String col;
private String val;
public HbaseCellEntity() {
}
public HbaseCellEntity(String row, String colFamily, String col, String val) {
this.row = row;
this.colFamily = colFamily;
this.col = col;
this.val = val;
}
public HbaseCellEntity(String table, String row, String colFamily, String col, String val) {
this.table = table;
this.row = row;
this.colFamily = colFamily;
this.col = col;
this.val = val;
}
public String getTable() {
return table;
}
public void setTable(String table) {
this.table = table;
}
public String getRow() {
return row;
}
public void setRow(String row) {
this.row = row;
}
public String getColFamily() {
return colFamily;
}
public void setColFamily(String colFamily) {
this.colFamily = colFamily;
}
public String getCol() {
return col;
}
public void setCol(String col) {
this.col = col;
}
public String getVal() {
return val;
}
public void setVal(String val) {
this.val = val;
}
@Override
public String toString() {
return "HbaseCellEntity{" + "table='" + table + '\'' + ", row='" + row + '\'' + ", colFamily='" + colFamily
+ '\'' + ", col='" + col + '\'' + ", val='" + val + '\'' + '}';
}
}

View File

@ -0,0 +1,369 @@
package io.github.dunwu.javadb.hbase;
import io.github.dunwu.tool.util.PropertiesUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
/**
* HBase
* @author Zhang Peng
* @since 2019-03-01
*/
public class HbaseHelper {
private static final String FIRST_CONFIG = "classpath://config//hbase.properties";
private static final String SECOND_CONFIG = "classpath://application.properties";
private HbaseProperties hbaseProperties;
private Connection connection;
public HbaseHelper() throws Exception {
// 初始化参数
Properties properties = loadConfigFile();
if (properties == null) {
throw new Exception("读取 Hbase 配置失败,无法建立连接");
}
Boolean enable = PropertiesUtil.getBoolean(properties, HBaseConstant.HBASE_ENABLE.key(), true);
if (!enable) {
return;
}
String quorum = PropertiesUtil.getString(properties, HBaseConstant.HBASE_ZOOKEEPER_QUORUM.key(), "");
String hbaseMaster = PropertiesUtil.getString(properties, HBaseConstant.HBASE_MASTER.key(), "");
String clientPort =
PropertiesUtil.getString(properties, HBaseConstant.HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT.key(), "");
String znodeParent = PropertiesUtil.getString(properties, HBaseConstant.ZOOKEEPER_ZNODE_PARENT.key(), "");
String maxThreads = PropertiesUtil.getString(properties, HBaseConstant.HBASE_HCONNECTION_THREADS_MAX.key(), "");
String coreThreads =
PropertiesUtil.getString(properties, HBaseConstant.HBASE_HCONNECTION_THREADS_CORE.key(), "");
String columnFamily = PropertiesUtil.getString(properties, HBaseConstant.HBASE_COLUMN_FAMILY.key(), "");
String hbaseExecutorsNum = PropertiesUtil.getString(properties, HBaseConstant.HBASE_EXECUTOR_NUM.key(), "10");
String ipcPoolSize = PropertiesUtil.getString(properties, HBaseConstant.HBASE_IPC_POOL_SIZE.key(), "1");
hbaseProperties =
new HbaseProperties(hbaseMaster, quorum, clientPort, znodeParent, maxThreads, coreThreads, columnFamily,
hbaseExecutorsNum, ipcPoolSize);
init(hbaseProperties);
}
private Properties loadConfigFile() {
Properties properties = null;
try {
properties = PropertiesUtil.loadFromFile(FIRST_CONFIG);
} catch (Exception e) {
e.printStackTrace();
}
if (properties == null) {
try {
properties = PropertiesUtil.loadFromFile(SECOND_CONFIG);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
return properties;
}
private void init(HbaseProperties hbaseProperties) throws Exception {
try {
// @formatter:off
Configuration configuration = HBaseConfiguration.create();
configuration.set(HBaseConstant.HBASE_ZOOKEEPER_QUORUM.key(), hbaseProperties.getQuorum());
configuration.set(HBaseConstant.HBASE_MASTER.key(), hbaseProperties.getHbaseMaster());
configuration.set(HBaseConstant.HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT.key(),
hbaseProperties.getClientPort());
configuration.set(HBaseConstant.HBASE_HCONNECTION_THREADS_MAX.key(),
hbaseProperties.getMaxThreads());
configuration.set(HBaseConstant.HBASE_HCONNECTION_THREADS_CORE.key(),
hbaseProperties.getCoreThreads());
configuration.set(HBaseConstant.ZOOKEEPER_ZNODE_PARENT.key(), hbaseProperties.getZnodeParent());
configuration.set(HBaseConstant.HBASE_COLUMN_FAMILY.key(), hbaseProperties.getColumnFamily());
configuration.set(HBaseConstant.HBASE_IPC_POOL_SIZE.key(), hbaseProperties.getIpcPoolSize());
// @formatter:on
connection = ConnectionFactory.createConnection(configuration);
} catch (Exception e) {
throw new Exception("hbase链接未创建", e);
}
}
public HbaseHelper(HbaseProperties hbaseProperties) throws Exception {
this.hbaseProperties = hbaseProperties;
init(hbaseProperties);
}
public void destory() {
if (connection != null) {
try {
connection.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public HTableDescriptor[] listTables() throws Exception {
return listTables(null);
}
public HTableDescriptor[] listTables(String tableName) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
HTableDescriptor[] hTableDescriptors = new HTableDescriptor[0];
try {
if (StringUtils.isEmpty(tableName)) {
hTableDescriptors = connection.getAdmin().listTables();
} else {
hTableDescriptors = connection.getAdmin().listTables(tableName);
}
} catch (IOException e) {
throw new Exception("执行失败", e);
}
return hTableDescriptors;
}
/**
*
* <p>
*
* <ul>
* <li>create 'tablename','family1','family2','family3'...</li>
* </ul>
*/
public void createTable(String tableName) throws Exception {
createTable(tableName, new String[] {hbaseProperties.getColumnFamily()});
}
/**
*
* <p>
*
* <ul>
* <li>create 'tablename','family1','family2','family3'...</li>
* </ul>
*/
public void createTable(String tableName, String[] colFamilies) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
try {
TableName tablename = TableName.valueOf(tableName);
// 如果表存在,先删除
if (connection.getAdmin().isTableAvailable(tablename)) {
dropTable(tableName);
}
HTableDescriptor tableDescriptor = new HTableDescriptor(tablename);
for (String famliy : colFamilies) {
tableDescriptor.addFamily(new HColumnDescriptor(famliy));
}
connection.getAdmin().createTable(tableDescriptor);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
*
* <p>
*
* <ul>
* <li>disable 'tablename'</li>
* <li>drop 't1'</li>
* </ul>
* @param name
*/
public void dropTable(String name) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
Admin admin = null;
try {
admin = connection.getAdmin();
TableName tableName = TableName.valueOf(name);
// 如果表存在,先删除
if (admin.isTableAvailable(tableName)) {
admin.disableTable(tableName);
admin.deleteTable(tableName);
}
} catch (IOException e) {
e.printStackTrace();
}
}
private Put toPut(HbaseCellEntity hBaseTableDTO) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
Put put = new Put(Bytes.toBytes(hBaseTableDTO.getRow()));
put.addColumn(Bytes.toBytes(hBaseTableDTO.getColFamily()), Bytes.toBytes(hBaseTableDTO.getCol()),
Bytes.toBytes(hBaseTableDTO.getVal()));
return put;
}
public void delete(String tableName, String rowKey) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
Table table = null;
try {
table = connection.getTable(TableName.valueOf(tableName));
Delete delete = new Delete(Bytes.toBytes(rowKey));
table.delete(delete);
} catch (IOException e) {
e.printStackTrace();
throw new Exception("delete失败");
}
}
public String resultToString(Result result) {
if (result == null) {
return null;
}
Cell[] cells = result.rawCells();
StringBuilder sb = new StringBuilder();
for (Cell cell : cells) {
sb.append("{ ");
sb.append("RowName -> ").append(new String(CellUtil.cloneRow(cell)));
sb.append(", Timetamp -> ").append(cell.getTimestamp());
sb.append(", Column Family -> ").append(new String(CellUtil.cloneFamily(cell)));
sb.append(", Row Name -> ").append(new String(CellUtil.cloneQualifier(cell)));
sb.append(", value -> ").append(new String(CellUtil.cloneValue(cell)));
sb.append(" }\n");
}
return sb.toString();
}
public Result get(String tableName, String rowKey) throws Exception {
return get(tableName, rowKey, null, null);
}
public Result get(String tableName, String rowKey, String colFamily, String qualifier) throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
if (connection.isClosed()) {
throw new Exception("hbase 连接已关闭");
}
if (StringUtils.isEmpty(tableName) || StringUtils.isEmpty(rowKey)) {
return null;
}
Result result = null;
try {
Table table = connection.getTable(TableName.valueOf(tableName));
Get get = new Get(Bytes.toBytes(rowKey));
if (StringUtils.isNotEmpty(colFamily)) {
if (StringUtils.isNotEmpty(qualifier)) {
get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(qualifier));
} else {
get.addFamily(Bytes.toBytes(colFamily));
}
}
result = table.get(get);
} catch (IOException e) {
throw new Exception("查询时发生异常");
}
return result;
}
public Result get(String tableName, String rowKey, String colFamily) throws Exception {
return get(tableName, rowKey, colFamily, null);
}
public Result[] scan(String tableName) throws Exception {
return scan(tableName, null, null, null, null);
}
public Result[] scan(String tableName, String colFamily, String qualifier, String startRow, String stopRow)
throws Exception {
if (connection == null) {
throw new Exception("hbase链接未创建");
}
if (StringUtils.isEmpty(tableName)) {
return null;
}
ResultScanner resultScanner = null;
List<Result> list = new ArrayList<>();
try {
Table table = connection.getTable(TableName.valueOf(tableName));
Scan scan = new Scan();
if (StringUtils.isNotEmpty(colFamily)) {
if (StringUtils.isNotEmpty(qualifier)) {
scan.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(qualifier));
}
scan.addFamily(Bytes.toBytes(colFamily));
}
if (StringUtils.isNotEmpty(startRow)) {
scan.setStartRow(Bytes.toBytes(startRow));
}
if (StringUtils.isNotEmpty(stopRow)) {
scan.setStopRow(Bytes.toBytes(stopRow));
}
resultScanner = table.getScanner(scan);
Result result = resultScanner.next();
while (result != null) {
list.add(result);
result = resultScanner.next();
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (resultScanner != null) {
resultScanner.close();
}
}
return list.toArray(new Result[0]);
}
public Result[] scan(String tableName, String colFamily) throws Exception {
return scan(tableName, colFamily, null, null, null);
}
public Result[] scan(String tableName, String colFamily, String qualifier) throws Exception {
return scan(tableName, colFamily, qualifier, null, null);
}
private List<Result> resultScannerToResults(ResultScanner resultScanner) {
if (resultScanner == null) {
return null;
}
List<Result> list = new ArrayList<>();
Result result = null;
try {
result = resultScanner.next();
while (result != null) {
list.add(result);
result = resultScanner.next();
}
} catch (IOException e) {
e.printStackTrace();
}
return list;
}
public HbaseProperties getHbaseProperties() {
return hbaseProperties;
}
}

View File

@ -0,0 +1,127 @@
package io.github.dunwu.javadb.hbase;
import java.io.Serializable;
/**
* Hbase
* @author Zhang Peng
*/
public class HbaseProperties implements Serializable {
private static final long serialVersionUID = 2930639554689310736L;
private String hbaseMaster;
private String quorum;
private String clientPort;
private String znodeParent;
private String maxThreads;
private String coreThreads;
private String columnFamily;
private String hbaseExecutorsNum = "10";
private String ipcPoolSize;
public HbaseProperties() {
}
public HbaseProperties(String hbaseMaster, String quorum, String clientPort, String znodeParent, String maxThreads,
String coreThreads, String columnFamily, String hbaseExecutorsNum, String ipcPoolSize) {
this.hbaseMaster = hbaseMaster;
this.quorum = quorum;
this.clientPort = clientPort;
this.znodeParent = znodeParent;
this.maxThreads = maxThreads;
this.coreThreads = coreThreads;
this.columnFamily = columnFamily;
this.hbaseExecutorsNum = hbaseExecutorsNum;
this.ipcPoolSize = ipcPoolSize;
}
public String getHbaseMaster() {
return hbaseMaster;
}
public void setHbaseMaster(String hbaseMaster) {
this.hbaseMaster = hbaseMaster;
}
public String getQuorum() {
return quorum;
}
public void setQuorum(String quorum) {
this.quorum = quorum;
}
public String getClientPort() {
return clientPort;
}
public void setClientPort(String clientPort) {
this.clientPort = clientPort;
}
public String getZnodeParent() {
return znodeParent;
}
public void setZnodeParent(String znodeParent) {
this.znodeParent = znodeParent;
}
public String getMaxThreads() {
return maxThreads;
}
public void setMaxThreads(String maxThreads) {
this.maxThreads = maxThreads;
}
public String getCoreThreads() {
return coreThreads;
}
public void setCoreThreads(String coreThreads) {
this.coreThreads = coreThreads;
}
public String getColumnFamily() {
return columnFamily;
}
public void setColumnFamily(String columnFamily) {
this.columnFamily = columnFamily;
}
public String getHbaseExecutorsNum() {
return hbaseExecutorsNum;
}
public void setHbaseExecutorsNum(String hbaseExecutorsNum) {
this.hbaseExecutorsNum = hbaseExecutorsNum;
}
public String getIpcPoolSize() {
return ipcPoolSize;
}
public void setIpcPoolSize(String ipcPoolSize) {
this.ipcPoolSize = ipcPoolSize;
}
@Override
public String toString() {
return "HbaseProperties{" + "quorum='" + quorum + '\'' + ", clientPort='" + clientPort + '\''
+ ", znodeParent='" + znodeParent + '\'' + ", maxThreads='" + maxThreads + '\'' + ", coreThreads='"
+ coreThreads + '\'' + ", columnFamily='" + columnFamily + '\'' + ", hbaseExecutorsNum='"
+ hbaseExecutorsNum + '\'' + '}';
}
}

View File

@ -1,109 +0,0 @@
package io.github.dunwu.javadb;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Result;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* @author Zhang Peng
* @since 2019-03-29
*/
public class HbaseHelperTest {
private static HbaseHelper hbaseHelper;
@BeforeClass
public static void BeforeClass() {
try {
hbaseHelper = new HbaseHelper();
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void listTable() throws Exception {
HTableDescriptor[] hTableDescriptors = hbaseHelper.listTables();
if (hTableDescriptors == null || hTableDescriptors.length <= 0) {
Assert.fail();
}
System.out.println("Tables");
for (HTableDescriptor item : hTableDescriptors) {
System.out.println(item.getTableName());
}
}
@Test
public void createTable() throws Exception {
hbaseHelper.createTable("table1", new String[] {"columnFamliy1", "columnFamliy2"});
HTableDescriptor[] table1s = hbaseHelper.listTables("table1");
if (table1s == null || table1s.length <= 0) {
Assert.fail();
}
hbaseHelper.createTable("table2", new String[] {"columnFamliy1", "columnFamliy2"});
table1s = hbaseHelper.listTables("table2");
if (table1s == null || table1s.length <= 0) {
Assert.fail();
}
}
@Test
public void dropTable() throws Exception {
hbaseHelper.dropTable("table1");
HTableDescriptor[] table1s = hbaseHelper.listTables("table1");
if (table1s != null && table1s.length > 0) {
Assert.fail();
}
}
@Test
public void get() throws Exception {
Result result = hbaseHelper.get("table1", "row1");
System.out.println(hbaseHelper.resultToString(result));
result = hbaseHelper.get("table1", "row2", "columnFamliy1");
System.out.println(hbaseHelper.resultToString(result));
}
@Test
public void scan() throws Exception {
Result[] results = hbaseHelper.scan("table1");
System.out.println("HbaseUtil.scan(\"table1\") result: ");
if (results.length > 0) {
for (Result r : results) {
System.out.println(hbaseHelper.resultToString(r));
}
}
results = hbaseHelper.scan("table1", "columnFamliy1");
System.out.println("HbaseUtil.scan(\"table1\", \"columnFamliy1\" result: ");
if (results.length > 0) {
for (Result r : results) {
System.out.println(hbaseHelper.resultToString(r));
}
}
results = hbaseHelper.scan("table1", "columnFamliy1", "a");
System.out.println("HbaseUtil.scan(\"table1\", \"columnFamliy1\", \"a\") result: ");
if (results.length > 0) {
for (Result r : results) {
System.out.println(hbaseHelper.resultToString(r));
}
}
}
@Test
public void delete() throws Exception {
Result result = hbaseHelper.get("table1", "row1");
System.out.println(result.toString());
hbaseHelper.delete("table1", "row1");
result = hbaseHelper.get("table1", "row1");
System.out.println(result.toString());
}
}

View File

@ -0,0 +1,110 @@
package io.github.dunwu.javadb.hbase;
import io.github.dunwu.javadb.hbase.HbaseHelper;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Result;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* @author Zhang Peng
* @since 2019-03-29
*/
public class HbaseHelperTest {
private static HbaseHelper hbaseHelper;
@BeforeClass
public static void BeforeClass() {
try {
hbaseHelper = new HbaseHelper();
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void listTable() throws Exception {
HTableDescriptor[] hTableDescriptors = hbaseHelper.listTables();
if (hTableDescriptors == null || hTableDescriptors.length <= 0) {
Assert.fail();
}
System.out.println("Tables");
for (HTableDescriptor item : hTableDescriptors) {
System.out.println(item.getTableName());
}
}
@Test
public void createTable() throws Exception {
hbaseHelper.createTable("table1", new String[] {"columnFamliy1", "columnFamliy2"});
HTableDescriptor[] table1s = hbaseHelper.listTables("table1");
if (table1s == null || table1s.length <= 0) {
Assert.fail();
}
hbaseHelper.createTable("table2", new String[] {"columnFamliy1", "columnFamliy2"});
table1s = hbaseHelper.listTables("table2");
if (table1s == null || table1s.length <= 0) {
Assert.fail();
}
}
@Test
public void dropTable() throws Exception {
hbaseHelper.dropTable("table1");
HTableDescriptor[] table1s = hbaseHelper.listTables("table1");
if (table1s != null && table1s.length > 0) {
Assert.fail();
}
}
@Test
public void get() throws Exception {
Result result = hbaseHelper.get("table1", "row1");
System.out.println(hbaseHelper.resultToString(result));
result = hbaseHelper.get("table1", "row2", "columnFamliy1");
System.out.println(hbaseHelper.resultToString(result));
}
@Test
public void scan() throws Exception {
Result[] results = hbaseHelper.scan("table1");
System.out.println("HbaseUtil.scan(\"table1\") result: ");
if (results.length > 0) {
for (Result r : results) {
System.out.println(hbaseHelper.resultToString(r));
}
}
results = hbaseHelper.scan("table1", "columnFamliy1");
System.out.println("HbaseUtil.scan(\"table1\", \"columnFamliy1\" result: ");
if (results.length > 0) {
for (Result r : results) {
System.out.println(hbaseHelper.resultToString(r));
}
}
results = hbaseHelper.scan("table1", "columnFamliy1", "a");
System.out.println("HbaseUtil.scan(\"table1\", \"columnFamliy1\", \"a\") result: ");
if (results.length > 0) {
for (Result r : results) {
System.out.println(hbaseHelper.resultToString(r));
}
}
}
@Test
public void delete() throws Exception {
Result result = hbaseHelper.get("table1", "row1");
System.out.println(result.toString());
hbaseHelper.delete("table1", "row1");
result = hbaseHelper.get("table1", "row1");
System.out.println(result.toString());
}
}

View File

@ -1,67 +1,73 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.3.3.RELEASE</version>
</parent>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-mongodb</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-json</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.4.1</version>
</dependency>
<dependency>
<groupId>com.querydsl</groupId>
<artifactId>querydsl-mongodb</artifactId>
<exclusions>
<exclusion>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-drver</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
<version>3.3.9.RELEASE</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.6.3</version>
</parent>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-mongodb</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-json</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.7.20</version>
</dependency>
<dependency>
<groupId>com.querydsl</groupId>
<artifactId>querydsl-mongodb</artifactId>
<exclusions>
<exclusion>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-drver</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
<version>3.4.14</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,6 +1,6 @@
package io.github.dunwu.springboot;
package io.github.dunwu.javadb.mongodb.springboot;
import io.github.dunwu.springboot.mongodb.customer.CustomerRepository;
import io.github.dunwu.javadb.mongodb.springboot.customer.CustomerRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;

View File

@ -13,17 +13,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.advanced;
package io.github.dunwu.javadb.mongodb.springboot.advanced;
import io.github.dunwu.springboot.mongodb.customer.Customer;
import io.github.dunwu.springboot.mongodb.customer.CustomerRepository;
import io.github.dunwu.javadb.mongodb.springboot.customer.Customer;
import io.github.dunwu.javadb.mongodb.springboot.customer.CustomerRepository;
import org.springframework.data.mongodb.repository.Meta;
import java.util.List;
/**
* Repository interface to manage {@link Customer} instances.
*
* @author Christoph Strobl
*/
public interface AdvancedRepository extends CustomerRepository {
@ -39,7 +38,6 @@ public interface AdvancedRepository extends CustomerRepository {
* db['system.profile'].find({'query.$comment':'s2gx-2014-rocks!'})
* </code>
* </pre>
*
* @param firstname
* @return
*/

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.advanced;
package io.github.dunwu.javadb.mongodb.springboot.advanced;
import org.bson.Document;
import org.springframework.beans.factory.annotation.Autowired;
@ -25,7 +25,6 @@ import javax.annotation.PreDestroy;
/**
* Test configuration to connect to a MongoDB named "test" and using a {@code MongoClient} with profiling enabled.
*
* @author Christoph Strobl
*/
@SpringBootApplication

View File

@ -13,14 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* Test configuration to connect to a MongoDB named "test" and using a {@link com.mongodb.client.MongoClient}. Also
* enables Spring Data repositories for MongoDB.
*
* @author Oliver Gierke
*/
@SpringBootApplication

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import lombok.Value;
@ -21,7 +21,6 @@ import java.util.List;
/**
* A DTO to represent invoices.
*
* @author Thomas Darimont
* @author Oliver Gierke
*/

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import lombok.Data;
import lombok.RequiredArgsConstructor;
@ -21,7 +21,6 @@ import org.springframework.data.annotation.PersistenceConstructor;
/**
* A line item.
*
* @author Thomas Darimont
* @author Oliver Gierke
*/

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import lombok.AllArgsConstructor;
import lombok.Data;
@ -27,7 +27,6 @@ import java.util.List;
/**
* An entity representing an {@link Order}. Note how we don't need any MongoDB mapping annotations as {@code id} is
* recognized as the id property by default.
*
* @author Thomas Darimont
* @author Oliver Gierke
* @author Mark Paluch
@ -44,7 +43,6 @@ public class Order {
/**
* Creates a new {@link Order} for the given customer id and order date.
*
* @param customerId
* @param orderDate
*/
@ -54,7 +52,6 @@ public class Order {
/**
* Adds a {@link LineItem} to the {@link Order}.
*
* @param item
* @return
*/

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.repository.Aggregation;
@ -23,7 +23,6 @@ import java.util.List;
/**
* A repository interface assembling CRUD functionality as well as the API to invoke the methods implemented manually.
*
* @author Thomas Darimont
* @author Oliver Gierke
* @author Christoph Strobl
@ -33,7 +32,7 @@ public interface OrderRepository extends CrudRepository<Order, String>, OrderRep
@Aggregation("{ $group : { _id : $customerId, total : { $sum : 1 } } }")
List<OrdersPerCustomer> totalOrdersPerCustomer(Sort sort);
@Aggregation(pipeline = { "{ $match : { customerId : ?0 } }", "{ $count : total }" })
@Aggregation(pipeline = {"{ $match : { customerId : ?0 } }", "{ $count : total }"})
Long totalOrdersForCustomer(String customerId);
}

View File

@ -13,18 +13,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
/**
* The interface for repository functionality that will be implemented manually.
*
* @author Oliver Gierke
*/
interface OrderRepositoryCustom {
/**
* Creates an {@link Invoice} for the given {@link Order}.
*
* @param order must not be {@literal null}.
* @return
*/

View File

@ -0,0 +1,80 @@
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import lombok.RequiredArgsConstructor;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
import static org.springframework.data.mongodb.core.query.Criteria.where;
/**
* The manual implementation parts for {@link OrderRepository}. This will automatically be picked up by the Spring Data
* infrastructure as we follow the naming convention of extending the core repository interface's name with {@code Impl}
* .
* @author Thomas Darimont
* @author Oliver Gierke
*/
@RequiredArgsConstructor
class OrderRepositoryImpl implements OrderRepositoryCustom {
private final MongoOperations operations;
private double taxRate = 0.19;
/**
* The implementation uses the MongoDB aggregation framework support Spring Data provides as well as SpEL
* expressions to define arithmetical expressions. Note how we work with property names only and don't have to
* mitigate the nested {@code $_id} fields MongoDB usually requires.
* @see example.springdata.mongodb.aggregation.OrderRepositoryCustom#getInvoiceFor(example.springdata.mongodb.aggregation.Order)
*/
@Override
public Invoice getInvoiceFor(Order order) {
AggregationResults<Invoice> results = operations.aggregate(newAggregation(Order.class, //
match(where("id").is(order.getId())),
//
unwind("items"), //
project("id", "customerId",
"items") //
.andExpression(
"'$items.price' * '$items.quantity'")
.as("lineTotal"), //
group("id") //
.sum("lineTotal")
.as("netAmount") //
.addToSet("items")
.as("items"), //
project("id", "items", "netAmount") //
.and(
"orderId")
.previousOperation() //
.andExpression(
"netAmount * [0]",
taxRate)
.as("taxAmount") //
.andExpression(
"netAmount * (1 + [0])",
taxRate)
.as("totalAmount")
//
), Invoice.class);
return results.getUniqueMappedResult();
}
}

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import lombok.Value;
import org.springframework.data.annotation.Id;

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.customer;
package io.github.dunwu.javadb.mongodb.springboot.customer;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@ -21,7 +21,6 @@ import org.springframework.data.geo.Point;
/**
* A domain object to capture addresses.
*
* @author Oliver Gierke
*/
@Getter

View File

@ -13,14 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.customer;
package io.github.dunwu.javadb.mongodb.springboot.customer;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* Test configuration to connect to a MongoDB named "test" and using a {@code MongoClient}. Also enables Spring Data
* repositories for MongoDB.
*
* @author Oliver Gierke
*/
@SpringBootApplication

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.customer;
package io.github.dunwu.javadb.mongodb.springboot.customer;
import lombok.Data;
import org.springframework.data.mongodb.core.mapping.Document;
@ -21,7 +21,6 @@ import org.springframework.util.Assert;
/**
* An entity to represent a customer.
*
* @author Oliver Gierke
*/
@Data
@ -33,9 +32,8 @@ public class Customer {
/**
* Creates a new {@link Customer} with the given firstname and lastname.
*
* @param firstname must not be {@literal null} or empty.
* @param lastname must not be {@literal null} or empty.
* @param lastname must not be {@literal null} or empty.
*/
public Customer(String firstname, String lastname) {

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.customer;
package io.github.dunwu.javadb.mongodb.springboot.customer;
import org.springframework.data.domain.Sort;
import org.springframework.data.geo.Distance;
@ -25,14 +25,12 @@ import java.util.List;
/**
* Repository interface to manage {@link Customer} instances.
*
* @author Oliver Gierke
*/
public interface CustomerRepository extends CrudRepository<Customer, String> {
/**
* Derived query using dynamic sort information.
*
* @param lastname
* @param sort
* @return
@ -41,7 +39,6 @@ public interface CustomerRepository extends CrudRepository<Customer, String> {
/**
* Show case for a repository query using geo-spatial functionality.
*
* @param point
* @param distance
* @return

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.immutable;
package io.github.dunwu.javadb.mongodb.springboot.immutable;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
@ -24,7 +24,6 @@ import java.util.concurrent.ThreadLocalRandom;
/**
* Test configuration to connect to a MongoDB named "test" using a {@code MongoClient}. <br /> Also enables Spring Data
* repositories for MongoDB.
*
* @author Mark Paluch
* @author Christoph Strobl
*/
@ -35,7 +34,6 @@ class ApplicationConfiguration {
* Register the {@link BeforeConvertCallback} used to update an {@link ImmutablePerson} before handing over the
* newly created instance to the actual mapping layer performing the conversion into the store native {@link
* org.bson.Document} representation.
*
* @return a {@link BeforeConvertCallback} for {@link ImmutablePerson}.
*/
@Bean
@ -45,7 +43,8 @@ class ApplicationConfiguration {
int randomNumber = ThreadLocalRandom.current().nextInt(1, 100);
// withRandomNumber is a so called wither method returning a new instance of the entity with a new value assigned
// withRandomNumber is a so called wither method returning a new instance of the entity with a new value
// assigned
return immutablePerson.withRandomNumber(randomNumber);
};
}

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.immutable;
package io.github.dunwu.javadb.mongodb.springboot.immutable;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@ -22,7 +22,6 @@ import org.bson.types.ObjectId;
/**
* Immutable object.
*
* @author Mark Paluch
*/
@With

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.projections;
package io.github.dunwu.javadb.mongodb.springboot.projections;
import lombok.Data;
import org.bson.types.ObjectId;

View File

@ -13,13 +13,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.projections;
package io.github.dunwu.javadb.mongodb.springboot.projections;
import lombok.Value;
/**
* A sample DTO only containing the firstname.
*
* @author Oliver Gierke
*/
@Value

View File

@ -13,11 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.projections;
package io.github.dunwu.javadb.mongodb.springboot.projections;
/**
* An example projection interface containing only the firstname.
*
* @author Oliver Gierke
*/
interface CustomerProjection {

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.projections;
package io.github.dunwu.javadb.mongodb.springboot.projections;
import org.bson.types.ObjectId;
import org.springframework.beans.factory.annotation.Value;
@ -25,7 +25,6 @@ import java.util.Collection;
/**
* Sample repository managing customers to show projecting functionality of Spring Data MongoDB.
*
* @author Oliver Gierke
*/
interface CustomerRepository extends CrudRepository<Customer, ObjectId> {
@ -33,7 +32,6 @@ interface CustomerRepository extends CrudRepository<Customer, ObjectId> {
/**
* Uses a projection interface to indicate the fields to be returned. As the projection doesn't use any dynamic
* fields, the query execution will be restricted to only the fields needed by the projection.
*
* @return
*/
Collection<CustomerProjection> findAllProjectedBy();
@ -42,7 +40,6 @@ interface CustomerRepository extends CrudRepository<Customer, ObjectId> {
* When a projection is used that contains dynamic properties (i.e. SpEL expressions in an {@link Value}
* annotation), the normal target entity will be loaded but dynamically projected so that the target can be referred
* to in the expression.
*
* @return
*/
Collection<CustomerSummary> findAllSummarizedBy();
@ -50,14 +47,12 @@ interface CustomerRepository extends CrudRepository<Customer, ObjectId> {
/**
* Uses a concrete DTO type to indicate the fields to be returned. This will cause the original object being loaded
* and the properties copied over into the DTO.
*
* @return
*/
Collection<CustomerDto> findAllDtoedBy();
/**
* Passes in the projection type dynamically (either interface or DTO).
*
* @param firstname
* @param projection
* @return
@ -66,7 +61,6 @@ interface CustomerRepository extends CrudRepository<Customer, ObjectId> {
/**
* Projection for a single entity.
*
* @param id
* @return
*/
@ -74,7 +68,6 @@ interface CustomerRepository extends CrudRepository<Customer, ObjectId> {
/**
* Dynamic projection for a single entity.
*
* @param id
* @param projection
* @return
@ -83,7 +76,6 @@ interface CustomerRepository extends CrudRepository<Customer, ObjectId> {
/**
* Projections used with pagination.
*
* @param pageable
* @return
*/

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.projections;
package io.github.dunwu.javadb.mongodb.springboot.projections;
import org.springframework.beans.factory.annotation.Value;

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import org.springframework.boot.autoconfigure.SpringBootApplication;

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import lombok.EqualsAndHashCode;
import lombok.Getter;

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import org.bson.types.ObjectId;
import org.springframework.data.repository.CrudRepository;
@ -21,7 +21,6 @@ import org.springframework.data.repository.query.QueryByExampleExecutor;
/**
* Repository interface for {@link Contact} and sub-types.
*
* @author Oliver Gierke
*/
public interface ContactRepository extends CrudRepository<Contact, ObjectId>, QueryByExampleExecutor<Contact> {}

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@ -21,7 +21,6 @@ import org.springframework.data.mongodb.core.mapping.Document;
/**
* Sample user class.
*
* @author Mark Paluch
* @author Oliver Gierke
*/

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@ -21,7 +21,6 @@ import org.springframework.data.mongodb.core.mapping.Document;
/**
* Sample contact class.
*
* @author Mark Paluch
* @author Oliver Gierke
*/

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.QueryByExampleExecutor;
@ -21,7 +21,6 @@ import org.springframework.data.repository.query.QueryByExampleExecutor;
/**
* Simple repository interface for {@link Relative} instances. The interface implements {@link QueryByExampleExecutor}
* and allows execution of methods accepting {@link org.springframework.data.domain.Example}.
*
* @author Mark Paluch
*/
public interface RelativeRepository extends CrudRepository<Relative, Long>, QueryByExampleExecutor<Relative> {}

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.QueryByExampleExecutor;
@ -21,7 +21,6 @@ import org.springframework.data.repository.query.QueryByExampleExecutor;
/**
* Simple repository interface for {@link Person} instances. The interface implements {@link QueryByExampleExecutor} and
* allows execution of methods accepting {@link org.springframework.data.domain.Example}.
*
* @author Mark Paluch
*/
public interface UserRepository extends CrudRepository<Person, Long>, QueryByExampleExecutor<Person> {}

View File

@ -16,7 +16,6 @@
/**
* Sample showing Query-by-Example related features of Spring Data MongoDB.
*
* @author Mark Paluch
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.textsearch;
package io.github.dunwu.javadb.mongodb.springboot.textsearch;
import lombok.Data;
import org.springframework.data.annotation.Id;
@ -25,7 +25,6 @@ import java.util.List;
/**
* Document representation of a {@link BlogPost} carrying annotation based information for text indexes.
*
* @author Christoph Strobl
* @author Oliver Gierke
*/

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.textsearch;
package io.github.dunwu.javadb.mongodb.springboot.textsearch;
import org.springframework.data.mongodb.core.query.TextCriteria;
import org.springframework.data.repository.CrudRepository;

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.textsearch;
package io.github.dunwu.javadb.mongodb.springboot.textsearch;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@ -44,7 +44,7 @@ public class MongoTestConfiguration {
Jackson2RepositoryPopulatorFactoryBean repositoryPopulator() {
Jackson2RepositoryPopulatorFactoryBean factoryBean = new Jackson2RepositoryPopulatorFactoryBean();
factoryBean.setResources(new Resource[] { new ClassPathResource("spring-blog.atom.json") });
factoryBean.setResources(new Resource[] {new ClassPathResource("spring-blog.atom.json")});
return factoryBean;
}
@ -58,7 +58,6 @@ public class MongoTestConfiguration {
/**
* Clean up after execution by dropping used test db instance.
*
* @throws Exception
*/
@PreDestroy

View File

@ -1,67 +0,0 @@
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
import lombok.RequiredArgsConstructor;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
import static org.springframework.data.mongodb.core.query.Criteria.where;
/**
* The manual implementation parts for {@link OrderRepository}. This will automatically be picked up by the Spring Data
* infrastructure as we follow the naming convention of extending the core repository interface's name with {@code Impl}
* .
*
* @author Thomas Darimont
* @author Oliver Gierke
*/
@RequiredArgsConstructor
class OrderRepositoryImpl implements OrderRepositoryCustom {
private final MongoOperations operations;
private double taxRate = 0.19;
/**
* The implementation uses the MongoDB aggregation framework support Spring Data provides as well as SpEL
* expressions to define arithmetical expressions. Note how we work with property names only and don't have to
* mitigate the nested {@code $_id} fields MongoDB usually requires.
*
* @see example.springdata.mongodb.aggregation.OrderRepositoryCustom#getInvoiceFor(example.springdata.mongodb.aggregation.Order)
*/
@Override
public Invoice getInvoiceFor(Order order) {
AggregationResults<Invoice> results = operations.aggregate(newAggregation(Order.class, //
match(where("id").is(order.getId())), //
unwind("items"), //
project("id", "customerId", "items") //
.andExpression("'$items.price' * '$items.quantity'").as("lineTotal"), //
group("id") //
.sum("lineTotal").as("netAmount") //
.addToSet("items").as("items"), //
project("id", "items", "netAmount") //
.and("orderId").previousOperation() //
.andExpression("netAmount * [0]", taxRate).as("taxAmount") //
.andExpression("netAmount * (1 + [0])", taxRate).as("totalAmount") //
), Invoice.class);
return results.getUniqueMappedResult();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +1,204 @@
{ "_id" : "ac3", "name" : "AC3 Phone", "brand" : "ACME", "type" : "phone", "price" : 200, "rating" : 3.8,"warranty_years" : 1, "available" : true }
{ "_id" : "ac7", "name" : "AC7 Phone", "brand" : "ACME", "type" : "phone", "price" : 320, "rating" : 4,"warranty_years" : 1, "available" : false }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bf9" }, "name" : "AC3 Series Charger", "type" : [ "accessory", "charger" ], "price" : 19, "rating" : 2.8,"warranty_years" : 0.25, "for" : [ "ac3", "ac7", "ac9" ] }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfa" }, "name" : "AC3 Case Green", "type" : [ "accessory", "case" ], "color" : "green", "price" : 12, "rating" : 1,"warranty_years" : 0 }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfb" }, "name" : "Phone Extended Warranty", "type" : "warranty", "price" : 38, "rating" : 5,"warranty_years" : 2, "for" : [ "ac3", "ac7", "ac9", "qp7", "qp8", "qp9" ] }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfc" }, "name" : "AC3 Case Black", "type" : [ "accessory", "case" ], "color" : "black", "price" : 12.5, "rating" : 2,"warranty_years" : 0.25, "available" : false, "for" : "ac3" }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfd" }, "name" : "AC3 Case Red", "type" : [ "accessory", "case" ], "color" : "red", "price" : 12, "rating" : 4,"warranty_years" : 0.25, "available" : true, "for" : "ac3" }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfe" }, "name" : "Phone Service Basic Plan", "type" : "service", "monthly_price" : 40,"rating" : 3, "limits" : { "voice" : { "units" : "minutes", "n" : 400, "over_rate" : 0.05 }, "data" : { "units" : "gigabytes", "n" : 20, "over_rate" : 1 }, "sms" : { "units" : "texts sent", "n" : 100, "over_rate" : 0.001 } }, "term_years" : 2 }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bff" }, "name" : "Phone Service Core Plan", "type" : "service", "monthly_price" : 60, "rating" : 3, "limits" : { "voice" : { "units" : "minutes", "n" : 1000, "over_rate" : 0.05 }, "data" : { "n" : "unlimited", "over_rate" : 0 }, "sms" : { "n" : "unlimited", "over_rate" : 0 } }, "term_years" : 1 }
{ "_id" : { "$oid" : "507d95d5719dbef170f15c00" }, "name" : "Phone Service Family Plan", "type" : "service", "monthly_price" : 90,"rating" : 4, "limits" : { "voice" : { "units" : "minutes", "n" : 1200, "over_rate" : 0.05 }, "data" : { "n" : "unlimited", "over_rate" : 0 }, "sms" : { "n" : "unlimited", "over_rate" : 0 } }, "sales_tax" : true, "term_years" : 2 }
{ "_id" : { "$oid" : "507d95d5719dbef170f15c01" }, "name" : "Cable TV Basic Service Package", "type" : "tv", "monthly_price" : 50, "rating" : 3.9,"term_years" : 2, "cancel_penalty" : 25, "sales_tax" : true, "additional_tarriffs" : [ { "kind" : "federal tarriff", "amount" : { "percent_of_service" : 0.06 } }, { "kind" : "misc tarriff", "amount" : 2.25 } ] }
{
"_id": "ac3",
"name": "AC3 Phone",
"brand": "ACME",
"type": "phone",
"price": 200,
"rating": 3.8,
"warranty_years": 1,
"available": true
}
{
"_id": "ac7",
"name": "AC7 Phone",
"brand": "ACME",
"type": "phone",
"price": 320,
"rating": 4,
"warranty_years": 1,
"available": false
}
{
"_id": {
"$oid": "507d95d5719dbef170f15bf9"
},
"name": "AC3 Series Charger",
"type": [
"accessory",
"charger"
],
"price": 19,
"rating": 2.8,
"warranty_years": 0.25,
"for": [
"ac3",
"ac7",
"ac9"
]
}
{
"_id": {
"$oid": "507d95d5719dbef170f15bfa"
},
"name": "AC3 Case Green",
"type": [
"accessory",
"case"
],
"color": "green",
"price": 12,
"rating": 1,
"warranty_years": 0
}
{
"_id": {
"$oid": "507d95d5719dbef170f15bfb"
},
"name": "Phone Extended Warranty",
"type": "warranty",
"price": 38,
"rating": 5,
"warranty_years": 2,
"for": [
"ac3",
"ac7",
"ac9",
"qp7",
"qp8",
"qp9"
]
}
{
"_id": {
"$oid": "507d95d5719dbef170f15bfc"
},
"name": "AC3 Case Black",
"type": [
"accessory",
"case"
],
"color": "black",
"price": 12.5,
"rating": 2,
"warranty_years": 0.25,
"available": false,
"for": "ac3"
}
{
"_id": {
"$oid": "507d95d5719dbef170f15bfd"
},
"name": "AC3 Case Red",
"type": [
"accessory",
"case"
],
"color": "red",
"price": 12,
"rating": 4,
"warranty_years": 0.25,
"available": true,
"for": "ac3"
}
{
"_id": {
"$oid": "507d95d5719dbef170f15bfe"
},
"name": "Phone Service Basic Plan",
"type": "service",
"monthly_price": 40,
"rating": 3,
"limits": {
"voice": {
"units": "minutes",
"n": 400,
"over_rate": 0.05
},
"data": {
"units": "gigabytes",
"n": 20,
"over_rate": 1
},
"sms": {
"units": "texts sent",
"n": 100,
"over_rate": 0.001
}
},
"term_years": 2
}
{
"_id": {
"$oid": "507d95d5719dbef170f15bff"
},
"name": "Phone Service Core Plan",
"type": "service",
"monthly_price": 60,
"rating": 3,
"limits": {
"voice": {
"units": "minutes",
"n": 1000,
"over_rate": 0.05
},
"data": {
"n": "unlimited",
"over_rate": 0
},
"sms": {
"n": "unlimited",
"over_rate": 0
}
},
"term_years": 1
}
{
"_id": {
"$oid": "507d95d5719dbef170f15c00"
},
"name": "Phone Service Family Plan",
"type": "service",
"monthly_price": 90,
"rating": 4,
"limits": {
"voice": {
"units": "minutes",
"n": 1200,
"over_rate": 0.05
},
"data": {
"n": "unlimited",
"over_rate": 0
},
"sms": {
"n": "unlimited",
"over_rate": 0
}
},
"sales_tax": true,
"term_years": 2
}
{
"_id": {
"$oid": "507d95d5719dbef170f15c01"
},
"name": "Cable TV Basic Service Package",
"type": "tv",
"monthly_price": 50,
"rating": 3.9,
"term_years": 2,
"cancel_penalty": 25,
"sales_tax": true,
"additional_tarriffs": [
{
"kind": "federal tarriff",
"amount": {
"percent_of_service": 0.06
}
},
{
"kind": "misc tarriff",
"amount": 2.25
}
]
}

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +1,16 @@
<?xml version="1.0" encoding="UTF-8" ?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%boldYellow(%thread)] [%highlight(%-5level)] %boldGreen(%c{36}.%M) - %boldBlue(%m%n)
</pattern>
</encoder>
</appender>
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%boldYellow(%thread)] [%highlight(%-5level)] %boldGreen(%c{36}.%M) -
%boldBlue(%m%n)
</pattern>
</encoder>
</appender>
<logger name="io.github.dunwu.springboot" level="INFO" />
<logger name="io.github.dunwu.springboot" level="INFO"/>
<root level="WARN">
<appender-ref ref="CONSOLE" />
</root>
<root level="WARN">
<appender-ref ref="CONSOLE"/>
</root>
</configuration>

View File

@ -0,0 +1,80 @@
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.javadb.mongodb.springboot.advanced;
import com.mongodb.BasicDBObject;
import com.mongodb.client.FindIterable;
import io.github.dunwu.javadb.mongodb.springboot.customer.Customer;
import org.bson.Document;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.Meta;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Christoph Strobl
* @author Oliver Gierke
*/
@SpringBootTest
public class AdvancedIntegrationTests {
@Autowired
AdvancedRepository repository;
@Autowired
MongoOperations operations;
Customer dave, oliver, carter;
@BeforeEach
public void setUp() {
repository.deleteAll();
dave = repository.save(new Customer("Dave", "Matthews"));
oliver = repository.save(new Customer("Oliver August", "Matthews"));
carter = repository.save(new Customer("Carter", "Beauford"));
}
/**
* This test demonstrates usage of {@code $comment} {@link Meta} usage. One can also enable profiling using
* {@code --profile=2} when starting {@literal mongod}.
* <p>
* <strong>NOTE</strong>: Requires MongoDB v. 2.6.4+
*/
@Test
public void findByFirstnameUsingMetaAttributes() {
// execute derived finder method just to get the comment in the profile log
repository.findByFirstname(dave.getFirstname());
// execute another finder without meta attributes that should not be picked up
repository.findByLastname(dave.getLastname(), Sort.by("firstname"));
FindIterable<Document> cursor = operations.getCollection(ApplicationConfiguration.SYSTEM_PROFILE_DB).find(
new BasicDBObject("query.$comment", AdvancedRepository.META_COMMENT));
for (Document document : cursor) {
Document query = (Document) document.get("query");
assertThat(query).containsKey("foo");
}
}
}

View File

@ -1,5 +1,5 @@
/**
* Package showing usage of Spring Data abstractions for special (advanced) MongoDB operations.
*/
package io.github.dunwu.springboot.mongodb.advanced;
package io.github.dunwu.javadb.mongodb.springboot.advanced;

View File

@ -13,15 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Sort;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.Date;
@ -30,13 +28,11 @@ import static org.assertj.core.data.Offset.offset;
/**
* Integration tests for {@link OrderRepository}.
*
* @author Thomas Darimont
* @author Oliver Gierke
* @author Christoph Strobl
*/
@RunWith(SpringRunner.class)
@SpringBootTest
@SpringBootTest(classes = {ApplicationConfiguration.class})
public class OrderRepositoryIntegrationTests {
@Autowired
@ -46,7 +42,7 @@ public class OrderRepositoryIntegrationTests {
private final static LineItem product2 = new LineItem("p2", 0.87, 2);
private final static LineItem product3 = new LineItem("p3", 5.33);
@Before
@BeforeEach
public void setup() {
repository.deleteAll();
}
@ -55,7 +51,7 @@ public class OrderRepositoryIntegrationTests {
public void createsInvoiceViaAggregation() {
Order order = new Order("c42", new Date()).//
addItem(product1).addItem(product2).addItem(product3);
addItem(product1).addItem(product2).addItem(product3);
order = repository.save(order);
Invoice invoice = repository.getInvoiceFor(order);
@ -78,9 +74,14 @@ public class OrderRepositoryIntegrationTests {
repository.save(new Order("b12", new Date()).addItem(product1));
assertThat(repository.totalOrdersPerCustomer(Sort.by(Sort.Order.desc("total")))) //
.containsExactly( //
new OrdersPerCustomer("c42", 3L), new OrdersPerCustomer("b12", 2L) //
);
.containsExactly( //
new OrdersPerCustomer(
"c42",
3L),
new OrdersPerCustomer(
"b12",
2L) //
);
}
@Test

View File

@ -0,0 +1,342 @@
/*
* Copyright 2017-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.javadb.mongodb.springboot.aggregation;
import lombok.Getter;
import lombok.Value;
import org.assertj.core.util.Files;
import org.bson.Document;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.annotation.Id;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators;
import org.springframework.data.mongodb.core.aggregation.ArrayOperators;
import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
/**
* Examples for Spring Books using the MongoDB Aggregation Framework. Data originates from Google's Book search.
* @author Mark Paluch
* @author Oliver Gierke
* @see <a href=
* "https://www.googleapis.com/books/v1/volumes?q=intitle:spring+framework">https://www.googleapis
* .com/books/v1/volumes?q=intitle:spring+framework</a>
* @see <a href="/books.json>books.json</a>
*/
@SpringBootTest
public class SpringBooksIntegrationTests {
@Autowired
MongoOperations operations;
@SuppressWarnings("unchecked")
@BeforeEach
public void before() throws Exception {
if (operations.count(new Query(), "books") == 0) {
File file = new ClassPathResource("books.json").getFile();
String content = Files.contentOf(file, StandardCharsets.UTF_8);
Document wrapper = Document.parse("{wrapper: " + content + "}");
List<Object> books = wrapper.getList("wrapper", Object.class);
operations.insert(books, "books");
}
}
/**
* Project Book titles.
*/
@Test
public void shouldRetrieveOrderedBookTitles() {
Aggregation aggregation = newAggregation( //
sort(Direction.ASC, "volumeInfo.title"), //
project().and("volumeInfo.title").as("title"));
AggregationResults<BookTitle> result = operations.aggregate(aggregation, "books", BookTitle.class);
assertThat(result.getMappedResults())//
.extracting("title")//
.containsSequence("Aprende a Desarrollar con Spring Framework",
"Beginning Spring", "Beginning Spring 2");
}
/**
* Get number of books that were published by the particular publisher.
*/
@Test
public void shouldRetrieveBooksPerPublisher() {
Aggregation aggregation = newAggregation( //
group("volumeInfo.publisher") //
.count().as("count"), //
sort(Direction.DESC, "count"), //
project("count").and("_id").as("publisher"));
AggregationResults<BooksPerPublisher> result =
operations.aggregate(aggregation, "books", BooksPerPublisher.class);
assertThat(result).hasSize(27);
assertThat(result).extracting("publisher").containsSequence("Apress", "Packt Publishing Ltd");
assertThat(result).extracting("count").containsSequence(26, 22, 11);
}
/**
* Get number of books that were published by the particular publisher with their titles.
*/
@Test
public void shouldRetrieveBooksPerPublisherWithTitles() {
Aggregation aggregation = newAggregation( //
group("volumeInfo.publisher") //
.count().as("count") //
.addToSet("volumeInfo.title")
.as("titles"), //
sort(Direction.DESC, "count"), //
project("count", "titles").and("_id").as("publisher"));
AggregationResults<BooksPerPublisher> result =
operations.aggregate(aggregation, "books", BooksPerPublisher.class);
BooksPerPublisher booksPerPublisher = result.getMappedResults().get(0);
assertThat(booksPerPublisher.getPublisher()).isEqualTo("Apress");
assertThat(booksPerPublisher.getCount()).isEqualTo(26);
assertThat(booksPerPublisher.getTitles()).contains("Expert Spring MVC and Web Flow", "Pro Spring Boot");
}
/**
* Filter for Data-related books in their title and output the title and authors.
*/
@Test
public void shouldRetrieveDataRelatedBooks() {
Aggregation aggregation = newAggregation( //
match(Criteria.where("volumeInfo.title").regex("data", "i")), //
replaceRoot("volumeInfo"), //
project("title", "authors"), //
sort(Direction.ASC, "title"));
AggregationResults<BookAndAuthors> result = operations.aggregate(aggregation, "books", BookAndAuthors.class);
BookAndAuthors bookAndAuthors = result.getMappedResults().get(1);
assertThat(bookAndAuthors.getTitle()).isEqualTo("Spring Data");
assertThat(bookAndAuthors.getAuthors()).contains("Mark Pollack", "Oliver Gierke", "Thomas Risberg",
"Jon Brisbin", "Michael Hunger");
}
/**
* Retrieve the number of pages per author (and divide the number of pages by the number of authors).
*/
@Test
public void shouldRetrievePagesPerAuthor() {
Aggregation aggregation = newAggregation( //
match(Criteria.where("volumeInfo.authors").exists(true)), //
replaceRoot("volumeInfo"), //
project("authors", "pageCount") //
.and(ArithmeticOperators.valueOf(
"pageCount") //
.divideBy(
ArrayOperators.arrayOf(
"authors")
.length()))
.as("pagesPerAuthor"),
unwind("authors"), //
group("authors") //
.sum("pageCount").as("totalPageCount") //
.sum("pagesPerAuthor").as("approxWritten"), //
sort(Direction.DESC, "totalPageCount"));
AggregationResults<PagesPerAuthor> result = operations.aggregate(aggregation, "books", PagesPerAuthor.class);
PagesPerAuthor pagesPerAuthor = result.getMappedResults().get(0);
assertThat(pagesPerAuthor.getAuthor()).isEqualTo("Josh Long");
assertThat(pagesPerAuthor.getTotalPageCount()).isEqualTo(1892);
assertThat(pagesPerAuthor.getApproxWritten()).isEqualTo(573);
}
/**
* Categorize books by their page count into buckets.
*/
@Test
public void shouldCategorizeBooksInBuckets() {
Aggregation aggregation = newAggregation( //
replaceRoot("volumeInfo"), //
match(Criteria.where("pageCount").exists(true)),
bucketAuto("pageCount", 10) //
.withGranularity(
Granularities.SERIES_1_2_5) //
.andOutput("title").push().as("titles") //
.andOutput("titles").count().as("count"));
AggregationResults<BookFacetPerPage> result =
operations.aggregate(aggregation, "books", BookFacetPerPage.class);
List<BookFacetPerPage> mappedResults = result.getMappedResults();
BookFacetPerPage facet_20_to_100_pages = mappedResults.get(0);
assertThat(facet_20_to_100_pages.getId().getMin()).isEqualTo(20);
assertThat(facet_20_to_100_pages.getId().getMax()).isEqualTo(100);
assertThat(facet_20_to_100_pages.getCount()).isEqualTo(12);
BookFacetPerPage facet_100_to_500_pages = mappedResults.get(1);
assertThat(facet_100_to_500_pages.getId().getMin()).isEqualTo(100);
assertThat(facet_100_to_500_pages.getId().getMax()).isEqualTo(500);
assertThat(facet_100_to_500_pages.getCount()).isEqualTo(63);
assertThat(facet_100_to_500_pages.getTitles()).contains("Spring Data");
}
/**
* Run a multi-faceted aggregation to get buckets by price (1-10, 10-50, 50-100 EURO) and by the first letter of the
* author name.
*/
@Test
@SuppressWarnings("unchecked")
public void shouldCategorizeInMultipleFacetsByPriceAndAuthor() {
Aggregation aggregation = newAggregation( //
match(Criteria.where("volumeInfo.authors").exists(true)
.and("volumeInfo.publisher").exists(true)), facet() //
.and(
match(
Criteria.where(
"saleInfo.listPrice")
.exists(
true)),
//
replaceRoot(
"saleInfo"),
//
bucket(
"listPrice.amount") //
.withBoundaries(
1,
10,
50,
100))
.as("prices") //
.and(
unwind(
"volumeInfo.authors"),
//
replaceRoot(
"volumeInfo"),
//
match(
Criteria.where(
"authors")
.not()
.size(
0)),
//
project() //
.andExpression(
"substrCP(authors, 0, 1)")
.as("startsWith") //
.and(
"authors")
.as("author"),
//
bucketAuto(
"startsWith",
10) //
.andOutput(
"author")
.push()
.as("authors")
//
)
.as("authors"));
AggregationResults<Document> result = operations.aggregate(aggregation, "books", Document.class);
Document uniqueMappedResult = result.getUniqueMappedResult();
assertThat((List<Object>) uniqueMappedResult.get("prices")).hasSize(3);
assertThat((List<Object>) uniqueMappedResult.get("authors")).hasSize(8);
}
@Value
@Getter
static class BookTitle {
String title;
}
@Value
@Getter
static class BooksPerPublisher {
String publisher;
int count;
List<String> titles;
}
@Value
@Getter
static class BookAndAuthors {
String title;
List<String> authors;
}
@Value
@Getter
static class PagesPerAuthor {
@Id
String author;
int totalPageCount;
int approxWritten;
}
@Value
@Getter
static class BookFacetPerPage {
BookFacetPerPageId id;
int count;
List<String> titles;
}
@Value
@Getter
static class BookFacetPerPageId {
int min;
int max;
}
}

View File

@ -13,11 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.customer;
package io.github.dunwu.javadb.mongodb.springboot.customer;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.geo.Distance;
@ -27,17 +26,14 @@ import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.index.GeospatialIndex;
import org.springframework.data.querydsl.QSort;
import org.springframework.test.context.junit4.SpringRunner;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
/**
* Integration test for {@link CustomerRepository}.
*
* @author Oliver Gierke
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class CustomerRepositoryIntegrationTest {
@ -48,7 +44,7 @@ public class CustomerRepositoryIntegrationTest {
Customer dave, oliver, carter;
@Before
@BeforeEach
public void setUp() {
repository.deleteAll();

View File

@ -1,5 +1,5 @@
/**
* Package showing basic usage of Spring Data MongoDB Repositories.
*/
package io.github.dunwu.springboot.mongodb.customer;
package io.github.dunwu.javadb.mongodb.springboot.customer;

View File

@ -13,32 +13,28 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.immutable;
package io.github.dunwu.javadb.mongodb.springboot.immutable;
import static org.assertj.core.api.Assertions.*;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.test.context.junit4.SpringRunner;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Integration test for {@link ImmutablePerson} showing features around immutable object support.
*
* @author Mark Paluch
* @author Christoph Strobl
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class ImmutableEntityIntegrationTest {
@Autowired
MongoOperations operations;
@Before
@BeforeEach
public void setUp() {
operations.dropCollection(ImmutablePerson.class);
}

View File

@ -13,11 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.projections;
package io.github.dunwu.javadb.mongodb.springboot.projections;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
@ -27,7 +26,6 @@ import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.projection.TargetAware;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.Collection;
@ -36,10 +34,8 @@ import static org.hamcrest.Matchers.*;
/**
* Integration tests for {@link CustomerRepository} to show projection capabilities.
*
* @author Oliver Gierke
*/
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest
public class CustomerRepositoryIntegrationTest {
@ -47,12 +43,13 @@ public class CustomerRepositoryIntegrationTest {
@EnableAutoConfiguration
static class Config {}
@Autowired
CustomerRepository customers;
Customer dave, carter;
@Before
@BeforeEach
public void setUp() {
customers.deleteAll();
this.dave = customers.save(new Customer("Dave", "Matthews"));
@ -111,8 +108,8 @@ public class CustomerRepositoryIntegrationTest {
@Test
public void supportsProjectionInCombinationWithPagination() {
Page<CustomerProjection> page = customers
.findPagedProjectedBy(PageRequest.of(0, 1, Sort.by(Direction.ASC, "lastname")));
Page<CustomerProjection> page =
customers.findPagedProjectedBy(PageRequest.of(0, 1, Sort.by(Direction.ASC, "lastname")));
assertThat(page.getContent().get(0).getFirstname(), is("Carter"));
}

View File

@ -13,17 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.ExampleMatcher.StringMatcher;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.test.context.junit4.SpringRunner;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
@ -34,13 +32,11 @@ import static org.springframework.data.domain.ExampleMatcher.matching;
/**
* Integration test showing the usage of MongoDB Query-by-Example support through Spring Data repositories for a case
* where two domain types are stored in one collection.
*
* @author Mark Paluch
* @author Oliver Gierke
* @soundtrack Paul van Dyk - VONYC Sessions Episode 496 with guest Armin van Buuren
*/
@RunWith(SpringRunner.class)
@SpringBootTest
@SpringBootTest(classes = {ApplicationConfiguration.class})
public class ContactRepositoryIntegrationTests {
@Autowired
@ -53,7 +49,7 @@ public class ContactRepositoryIntegrationTests {
Person skyler, walter, flynn;
Relative marie, hank;
@Before
@BeforeEach
public void setUp() {
contactRepository.deleteAll();
@ -76,8 +72,8 @@ public class ContactRepositoryIntegrationTests {
@Test
public void findAllPersonsBySimpleExample() {
Example<Person> example = Example.of(new Person(".*", null, null),
matching().withStringMatcher(StringMatcher.REGEX));
Example<Person> example =
Example.of(new Person(".*", null, null), matching().withStringMatcher(StringMatcher.REGEX));
assertThat(userRepository.findAll(example), containsInAnyOrder(skyler, walter, flynn));
assertThat(userRepository.findAll(example), not(containsInAnyOrder(hank, marie)));
@ -86,8 +82,8 @@ public class ContactRepositoryIntegrationTests {
@Test
public void findAllRelativesBySimpleExample() {
Example<Relative> example = Example.of(new Relative(".*", null, null),
matching().withStringMatcher(StringMatcher.REGEX));
Example<Relative> example =
Example.of(new Relative(".*", null, null), matching().withStringMatcher(StringMatcher.REGEX));
assertThat(contactRepository.findAll(example), containsInAnyOrder(hank, marie));
assertThat(contactRepository.findAll(example), not(containsInAnyOrder(skyler, walter, flynn)));

View File

@ -0,0 +1,153 @@
/*
* Copyright 2016-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.ExampleMatcher.StringMatcher;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.Query;
import java.util.Optional;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.springframework.data.domain.ExampleMatcher.GenericPropertyMatchers.ignoreCase;
import static org.springframework.data.domain.ExampleMatcher.GenericPropertyMatchers.startsWith;
import static org.springframework.data.domain.ExampleMatcher.matching;
import static org.springframework.data.mongodb.core.query.Criteria.byExample;
import static org.springframework.data.mongodb.core.query.Query.query;
/**
* Integration test showing the usage of MongoDB Query-by-Example support through Spring Data repositories.
* @author Mark Paluch
* @author Oliver Gierke
*/
@SuppressWarnings("unused")
@SpringBootTest(classes = {ApplicationConfiguration.class})
public class MongoOperationsIntegrationTests {
@Autowired
MongoOperations operations;
Person skyler, walter, flynn, marie, hank;
@BeforeEach
public void setUp() {
operations.remove(new Query(), Person.class);
this.skyler = new Person("Skyler", "White", 45);
this.walter = new Person("Walter", "White", 50);
this.flynn = new Person("Walter Jr. (Flynn)", "White", 17);
this.marie = new Person("Marie", "Schrader", 38);
this.hank = new Person("Hank", "Schrader", 43);
operations.save(this.skyler);
operations.save(this.walter);
operations.save(this.flynn);
operations.save(this.marie);
operations.save(this.hank);
}
/**
* @see #153
*/
@Test
public void ignoreNullProperties() {
Query query = query(byExample(new Person(null, null, 17)));
assertThat(operations.find(query, Person.class), hasItems(flynn));
}
/**
* @see #153
*/
@Test
public void substringMatching() {
Example<Person> example = Example.of(new Person("er", null, null), matching().//
withStringMatcher(
StringMatcher.ENDING));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(skyler, walter));
}
/**
* @see #154
*/
@Test
public void regexMatching() {
Example<Person> example = Example.of(new Person("(Skyl|Walt)er", null, null), matching().//
withMatcher(
"firstname", matcher -> matcher.regex()));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(skyler, walter));
}
/**
* @see #153
*/
@Test
public void matchStartingStringsIgnoreCase() {
Example<Person> example = Example.of(new Person("Walter", "WHITE", null), matching(). //
withIgnorePaths(
"age").//
withMatcher("firstname", startsWith()).//
withMatcher("lastname", ignoreCase()));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(flynn, walter));
}
/**
* @see #153
*/
@Test
public void configuringMatchersUsingLambdas() {
Example<Person> example = Example.of(new Person("Walter", "WHITE", null), matching().//
withIgnorePaths(
"age"). //
withMatcher("firstname", matcher -> matcher.startsWith()). //
withMatcher("lastname",
matcher -> matcher.ignoreCase()));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(flynn, walter));
}
/**
* @see #153
*/
@Test
public void valueTransformer() {
Example<Person> example = Example.of(new Person(null, "White", 99), matching(). //
withMatcher("age",
matcher -> matcher.transform(
value -> Optional.of(
Integer.valueOf(
50)))));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(walter));
}
}

View File

@ -13,16 +13,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
package io.github.dunwu.javadb.mongodb.springboot.querybyexample;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.ExampleMatcher.StringMatcher;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.Optional;
@ -33,12 +31,10 @@ import static org.springframework.data.domain.ExampleMatcher.matching;
/**
* Integration test showing the usage of MongoDB Query-by-Example support through Spring Data repositories.
*
* @author Mark Paluch
* @author Oliver Gierke
* @author Jens Schauder
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class UserRepositoryIntegrationTests {
@ -47,7 +43,7 @@ public class UserRepositoryIntegrationTests {
Person skyler, walter, flynn, marie, hank;
@Before
@BeforeEach
public void setUp() {
repository.deleteAll();
@ -77,7 +73,7 @@ public class UserRepositoryIntegrationTests {
public void ignorePropertiesAndMatchByAge() {
Example<Person> example = Example.of(flynn, matching(). //
withIgnorePaths("firstname", "lastname"));
withIgnorePaths("firstname", "lastname"));
assertThat(repository.findOne(example)).contains(flynn);
}
@ -89,7 +85,8 @@ public class UserRepositoryIntegrationTests {
public void substringMatching() {
Example<Person> example = Example.of(new Person("er", null, null), matching(). //
withStringMatcher(StringMatcher.ENDING));
withStringMatcher(
StringMatcher.ENDING));
assertThat(repository.findAll(example)).containsExactlyInAnyOrder(skyler, walter);
}
@ -101,7 +98,8 @@ public class UserRepositoryIntegrationTests {
public void regexMatching() {
Example<Person> example = Example.of(new Person("(Skyl|Walt)er", null, null), matching(). //
withMatcher("firstname", matcher -> matcher.regex()));
withMatcher(
"firstname", matcher -> matcher.regex()));
assertThat(repository.findAll(example)).contains(skyler, walter);
}
@ -113,9 +111,10 @@ public class UserRepositoryIntegrationTests {
public void matchStartingStringsIgnoreCase() {
Example<Person> example = Example.of(new Person("Walter", "WHITE", null), matching(). //
withIgnorePaths("age"). //
withMatcher("firstname", startsWith()). //
withMatcher("lastname", ignoreCase()));
withIgnorePaths(
"age"). //
withMatcher("firstname", startsWith()). //
withMatcher("lastname", ignoreCase()));
assertThat(repository.findAll(example)).containsExactlyInAnyOrder(flynn, walter);
}
@ -127,9 +126,11 @@ public class UserRepositoryIntegrationTests {
public void configuringMatchersUsingLambdas() {
Example<Person> example = Example.of(new Person("Walter", "WHITE", null), matching(). //
withIgnorePaths("age"). //
withMatcher("firstname", matcher -> matcher.startsWith()). //
withMatcher("lastname", matcher -> matcher.ignoreCase()));
withIgnorePaths(
"age"). //
withMatcher("firstname", matcher -> matcher.startsWith()). //
withMatcher("lastname",
matcher -> matcher.ignoreCase()));
assertThat(repository.findAll(example)).containsExactlyInAnyOrder(flynn, walter);
}
@ -141,7 +142,11 @@ public class UserRepositoryIntegrationTests {
public void valueTransformer() {
Example<Person> example = Example.of(new Person(null, "White", 99), matching(). //
withMatcher("age", matcher -> matcher.transform(value -> Optional.of(Integer.valueOf(50)))));
withMatcher("age",
matcher -> matcher.transform(
value -> Optional.of(
Integer.valueOf(
50)))));
assertThat(repository.findAll(example)).containsExactlyInAnyOrder(walter);
}

View File

@ -13,28 +13,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.textsearch;
package io.github.dunwu.javadb.mongodb.springboot.textsearch;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.mongodb.core.mapping.TextScore;
import org.springframework.data.mongodb.core.query.TextCriteria;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.List;
import static io.github.dunwu.springboot.mongodb.textsearch.util.ConsoleResultPrinter.printResult;
import static io.github.dunwu.javadb.mongodb.springboot.textsearch.util.ConsoleResultPrinter.printResult;
/**
* Integration tests showing the text search functionality using repositories.
*
* @author Christoph Strobl
* @author Oliver Gierke
* @author Thomas Darimont
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class TextSearchRepositoryTests {

View File

@ -0,0 +1,72 @@
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.javadb.mongodb.springboot.textsearch;
import io.github.dunwu.javadb.mongodb.springboot.SpringBootDataMongodbApplication;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.TextCriteria;
import org.springframework.data.mongodb.core.query.TextQuery;
import java.util.List;
import static io.github.dunwu.javadb.mongodb.springboot.textsearch.util.ConsoleResultPrinter.printResult;
import static org.springframework.data.mongodb.core.query.Query.query;
/**
* @author Christoph Strobl
* @author Thomas Darimont
*/
@SpringBootTest(classes = {MongoTestConfiguration.class})
public class TextSearchTemplateTests {
@Autowired
MongoOperations operations;
/**
* Show how to do simple matching. Note that text search is case insensitive and will also find entries like
* {@literal releases}.
*/
@Test
public void findAllBlogPostsWithRelease() {
TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("release");
List<BlogPost> blogPosts = operations.find(query(criteria), BlogPost.class);
printResult(blogPosts, criteria);
}
/**
* Sort by relevance relying on the value marked with
* {@link org.springframework.data.mongodb.core.mapping.TextScore}.
*/
@Test
public void findAllBlogPostsByPhraseSortByScore() {
TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingPhrase("release");
TextQuery query = new TextQuery(criteria);
query.setScoreFieldName("score");
query.sortByScore();
List<BlogPost> blogPosts = operations.find(query, BlogPost.class);
printResult(blogPosts, criteria);
}
}

View File

@ -13,9 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.textsearch.util;
package io.github.dunwu.javadb.mongodb.springboot.textsearch.util;
import io.github.dunwu.springboot.mongodb.textsearch.BlogPost;
import io.github.dunwu.javadb.mongodb.springboot.textsearch.BlogPost;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.mongodb.core.MongoOperations;
@ -24,7 +24,6 @@ import org.springframework.util.Assert;
/**
* Component to initialize {@link BlogPost}s by accessing the latest ones from the Spring blog.
*
* @author Christoph Strobl
* @author Oliver Gierke
*/
@ -35,7 +34,6 @@ public enum BlogPostInitializer {
/**
* Initializes the given {@link MongoOperations} with {@link BlogPost}s from the Spring Blog.
*
* @param operations must not be {@literal null}.
* @throws Exception
*/
@ -45,7 +43,7 @@ public enum BlogPostInitializer {
loadFromClasspathSource(operations);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@SuppressWarnings({"unchecked", "rawtypes"})
private void loadFromClasspathSource(MongoOperations operations) throws Exception {
Jackson2ResourceReader reader = new Jackson2ResourceReader();

View File

@ -13,24 +13,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.textsearch.util;
package io.github.dunwu.javadb.mongodb.springboot.textsearch.util;
import io.github.dunwu.springboot.mongodb.textsearch.BlogPost;
import io.github.dunwu.javadb.mongodb.springboot.textsearch.BlogPost;
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
import java.util.Collection;
/**
* Just a little helper for showing {@link BlogPost}s output on the console.
*
* @author Christoph Strobl
*/
public class ConsoleResultPrinter {
public static void printResult(Collection<BlogPost> blogPosts, CriteriaDefinition criteria) {
System.out.println(String.format("XXXXXXXXXXXX -- Found %s blogPosts matching '%s' --XXXXXXXXXXXX",
blogPosts.size(), criteria != null ? criteria.getCriteriaObject() : ""));
System.out.println(
String.format("XXXXXXXXXXXX -- Found %s blogPosts matching '%s' --XXXXXXXXXXXX", blogPosts.size(),
criteria != null ? criteria.getCriteriaObject() : ""));
for (BlogPost blogPost : blogPosts) {
System.out.println(blogPost);

View File

@ -1,83 +0,0 @@
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.advanced;
import static org.assertj.core.api.Assertions.*;
import io.github.dunwu.springboot.mongodb.customer.Customer;
import org.bson.Document;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.Meta;
import org.springframework.test.context.junit4.SpringRunner;
import com.mongodb.BasicDBObject;
import com.mongodb.client.FindIterable;
/**
* @author Christoph Strobl
* @author Oliver Gierke
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class AdvancedIntegrationTests {
@Autowired AdvancedRepository repository;
@Autowired MongoOperations operations;
Customer dave, oliver, carter;
@Before
public void setUp() {
repository.deleteAll();
dave = repository.save(new Customer("Dave", "Matthews"));
oliver = repository.save(new Customer("Oliver August", "Matthews"));
carter = repository.save(new Customer("Carter", "Beauford"));
}
/**
* This test demonstrates usage of {@code $comment} {@link Meta} usage. One can also enable profiling using
* {@code --profile=2} when starting {@literal mongod}.
* <p>
* <strong>NOTE</strong>: Requires MongoDB v. 2.6.4+
*/
@Test
public void findByFirstnameUsingMetaAttributes() {
// execute derived finder method just to get the comment in the profile log
repository.findByFirstname(dave.getFirstname());
// execute another finder without meta attributes that should not be picked up
repository.findByLastname(dave.getLastname(), Sort.by("firstname"));
FindIterable<Document> cursor = operations.getCollection(ApplicationConfiguration.SYSTEM_PROFILE_DB)
.find(new BasicDBObject("query.$comment", AdvancedRepository.META_COMMENT));
for (Document document : cursor) {
Document query = (Document) document.get("query");
assertThat(query).containsKey("foo");
}
}
}

View File

@ -1,294 +0,0 @@
/*
* Copyright 2017-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.aggregation;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
import lombok.Getter;
import lombok.Value;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.assertj.core.util.Files;
import org.bson.Document;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.annotation.Id;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators;
import org.springframework.data.mongodb.core.aggregation.ArrayOperators;
import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.test.context.junit4.SpringRunner;
/**
* Examples for Spring Books using the MongoDB Aggregation Framework. Data originates from Google's Book search.
*
* @author Mark Paluch
* @author Oliver Gierke
* @see <a href=
* "https://www.googleapis.com/books/v1/volumes?q=intitle:spring+framework">https://www.googleapis.com/books/v1/volumes?q=intitle:spring+framework</a>
* @see <a href="/books.json>books.json</a>
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class SpringBooksIntegrationTests {
@Autowired MongoOperations operations;
@SuppressWarnings("unchecked")
@Before
public void before() throws Exception {
if (operations.count(new Query(), "books") == 0) {
File file = new ClassPathResource("books.json").getFile();
String content = Files.contentOf(file, StandardCharsets.UTF_8);
Document wrapper = Document.parse("{wrapper: " + content + "}");
List<Object> books = wrapper.getList("wrapper", Object.class);
operations.insert(books, "books");
}
}
/**
* Project Book titles.
*/
@Test
public void shouldRetrieveOrderedBookTitles() {
Aggregation aggregation = newAggregation( //
sort(Direction.ASC, "volumeInfo.title"), //
project().and("volumeInfo.title").as("title"));
AggregationResults<BookTitle> result = operations.aggregate(aggregation, "books", BookTitle.class);
assertThat(result.getMappedResults())//
.extracting("title")//
.containsSequence("Aprende a Desarrollar con Spring Framework", "Beginning Spring", "Beginning Spring 2");
}
/**
* Get number of books that were published by the particular publisher.
*/
@Test
public void shouldRetrieveBooksPerPublisher() {
Aggregation aggregation = newAggregation( //
group("volumeInfo.publisher") //
.count().as("count"), //
sort(Direction.DESC, "count"), //
project("count").and("_id").as("publisher"));
AggregationResults<BooksPerPublisher> result = operations.aggregate(aggregation, "books", BooksPerPublisher.class);
assertThat(result).hasSize(27);
assertThat(result).extracting("publisher").containsSequence("Apress", "Packt Publishing Ltd");
assertThat(result).extracting("count").containsSequence(26, 22, 11);
}
/**
* Get number of books that were published by the particular publisher with their titles.
*/
@Test
public void shouldRetrieveBooksPerPublisherWithTitles() {
Aggregation aggregation = newAggregation( //
group("volumeInfo.publisher") //
.count().as("count") //
.addToSet("volumeInfo.title").as("titles"), //
sort(Direction.DESC, "count"), //
project("count", "titles").and("_id").as("publisher"));
AggregationResults<BooksPerPublisher> result = operations.aggregate(aggregation, "books", BooksPerPublisher.class);
BooksPerPublisher booksPerPublisher = result.getMappedResults().get(0);
assertThat(booksPerPublisher.getPublisher()).isEqualTo("Apress");
assertThat(booksPerPublisher.getCount()).isEqualTo(26);
assertThat(booksPerPublisher.getTitles()).contains("Expert Spring MVC and Web Flow", "Pro Spring Boot");
}
/**
* Filter for Data-related books in their title and output the title and authors.
*/
@Test
public void shouldRetrieveDataRelatedBooks() {
Aggregation aggregation = newAggregation( //
match(Criteria.where("volumeInfo.title").regex("data", "i")), //
replaceRoot("volumeInfo"), //
project("title", "authors"), //
sort(Direction.ASC, "title"));
AggregationResults<BookAndAuthors> result = operations.aggregate(aggregation, "books", BookAndAuthors.class);
BookAndAuthors bookAndAuthors = result.getMappedResults().get(1);
assertThat(bookAndAuthors.getTitle()).isEqualTo("Spring Data");
assertThat(bookAndAuthors.getAuthors()).contains("Mark Pollack", "Oliver Gierke", "Thomas Risberg", "Jon Brisbin",
"Michael Hunger");
}
/**
* Retrieve the number of pages per author (and divide the number of pages by the number of authors).
*/
@Test
public void shouldRetrievePagesPerAuthor() {
Aggregation aggregation = newAggregation( //
match(Criteria.where("volumeInfo.authors").exists(true)), //
replaceRoot("volumeInfo"), //
project("authors", "pageCount") //
.and(ArithmeticOperators.valueOf("pageCount") //
.divideBy(ArrayOperators.arrayOf("authors").length()))
.as("pagesPerAuthor"),
unwind("authors"), //
group("authors") //
.sum("pageCount").as("totalPageCount") //
.sum("pagesPerAuthor").as("approxWritten"), //
sort(Direction.DESC, "totalPageCount"));
AggregationResults<PagesPerAuthor> result = operations.aggregate(aggregation, "books", PagesPerAuthor.class);
PagesPerAuthor pagesPerAuthor = result.getMappedResults().get(0);
assertThat(pagesPerAuthor.getAuthor()).isEqualTo("Josh Long");
assertThat(pagesPerAuthor.getTotalPageCount()).isEqualTo(1892);
assertThat(pagesPerAuthor.getApproxWritten()).isEqualTo(573);
}
/**
* Categorize books by their page count into buckets.
*/
@Test
public void shouldCategorizeBooksInBuckets() {
Aggregation aggregation = newAggregation( //
replaceRoot("volumeInfo"), //
match(Criteria.where("pageCount").exists(true)),
bucketAuto("pageCount", 10) //
.withGranularity(Granularities.SERIES_1_2_5) //
.andOutput("title").push().as("titles") //
.andOutput("titles").count().as("count"));
AggregationResults<BookFacetPerPage> result = operations.aggregate(aggregation, "books", BookFacetPerPage.class);
List<BookFacetPerPage> mappedResults = result.getMappedResults();
BookFacetPerPage facet_20_to_100_pages = mappedResults.get(0);
assertThat(facet_20_to_100_pages.getId().getMin()).isEqualTo(20);
assertThat(facet_20_to_100_pages.getId().getMax()).isEqualTo(100);
assertThat(facet_20_to_100_pages.getCount()).isEqualTo(12);
BookFacetPerPage facet_100_to_500_pages = mappedResults.get(1);
assertThat(facet_100_to_500_pages.getId().getMin()).isEqualTo(100);
assertThat(facet_100_to_500_pages.getId().getMax()).isEqualTo(500);
assertThat(facet_100_to_500_pages.getCount()).isEqualTo(63);
assertThat(facet_100_to_500_pages.getTitles()).contains("Spring Data");
}
/**
* Run a multi-faceted aggregation to get buckets by price (1-10, 10-50, 50-100 EURO) and by the first letter of the
* author name.
*/
@Test
@SuppressWarnings("unchecked")
public void shouldCategorizeInMultipleFacetsByPriceAndAuthor() {
Aggregation aggregation = newAggregation( //
match(Criteria.where("volumeInfo.authors").exists(true).and("volumeInfo.publisher").exists(true)),
facet() //
.and(match(Criteria.where("saleInfo.listPrice").exists(true)), //
replaceRoot("saleInfo"), //
bucket("listPrice.amount") //
.withBoundaries(1, 10, 50, 100))
.as("prices") //
.and(unwind("volumeInfo.authors"), //
replaceRoot("volumeInfo"), //
match(Criteria.where("authors").not().size(0)), //
project() //
.andExpression("substrCP(authors, 0, 1)").as("startsWith") //
.and("authors").as("author"), //
bucketAuto("startsWith", 10) //
.andOutput("author").push().as("authors") //
).as("authors"));
AggregationResults<Document> result = operations.aggregate(aggregation, "books", Document.class);
Document uniqueMappedResult = result.getUniqueMappedResult();
assertThat((List<Object>) uniqueMappedResult.get("prices")).hasSize(3);
assertThat((List<Object>) uniqueMappedResult.get("authors")).hasSize(8);
}
@Value
@Getter
static class BookTitle {
String title;
}
@Value
@Getter
static class BooksPerPublisher {
String publisher;
int count;
List<String> titles;
}
@Value
@Getter
static class BookAndAuthors {
String title;
List<String> authors;
}
@Value
@Getter
static class PagesPerAuthor {
@Id String author;
int totalPageCount;
int approxWritten;
}
@Value
@Getter
static class BookFacetPerPage {
BookFacetPerPageId id;
int count;
List<String> titles;
}
@Value
@Getter
static class BookFacetPerPageId {
int min;
int max;
}
}

View File

@ -1,148 +0,0 @@
/*
* Copyright 2016-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.querybyexample;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.junit.Assert.*;
import static org.springframework.data.domain.ExampleMatcher.*;
import static org.springframework.data.domain.ExampleMatcher.GenericPropertyMatchers.*;
import static org.springframework.data.domain.ExampleMatcher.GenericPropertyMatchers.startsWith;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import static org.springframework.data.mongodb.core.query.Query.*;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.ExampleMatcher.StringMatcher;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.test.context.junit4.SpringRunner;
/**
* Integration test showing the usage of MongoDB Query-by-Example support through Spring Data repositories.
*
* @author Mark Paluch
* @author Oliver Gierke
*/
@SuppressWarnings("unused")
@RunWith(SpringRunner.class)
@SpringBootTest
public class MongoOperationsIntegrationTests {
@Autowired MongoOperations operations;
Person skyler, walter, flynn, marie, hank;
@Before
public void setUp() {
operations.remove(new Query(), Person.class);
this.skyler = new Person("Skyler", "White", 45);
this.walter = new Person("Walter", "White", 50);
this.flynn = new Person("Walter Jr. (Flynn)", "White", 17);
this.marie = new Person("Marie", "Schrader", 38);
this.hank = new Person("Hank", "Schrader", 43);
operations.save(this.skyler);
operations.save(this.walter);
operations.save(this.flynn);
operations.save(this.marie);
operations.save(this.hank);
}
/**
* @see #153
*/
@Test
public void ignoreNullProperties() {
Query query = query(byExample(new Person(null, null, 17)));
assertThat(operations.find(query, Person.class), hasItems(flynn));
}
/**
* @see #153
*/
@Test
public void substringMatching() {
Example<Person> example = Example.of(new Person("er", null, null), matching().//
withStringMatcher(StringMatcher.ENDING));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(skyler, walter));
}
/**
* @see #154
*/
@Test
public void regexMatching() {
Example<Person> example = Example.of(new Person("(Skyl|Walt)er", null, null), matching().//
withMatcher("firstname", matcher -> matcher.regex()));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(skyler, walter));
}
/**
* @see #153
*/
@Test
public void matchStartingStringsIgnoreCase() {
Example<Person> example = Example.of(new Person("Walter", "WHITE", null), matching(). //
withIgnorePaths("age").//
withMatcher("firstname", startsWith()).//
withMatcher("lastname", ignoreCase()));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(flynn, walter));
}
/**
* @see #153
*/
@Test
public void configuringMatchersUsingLambdas() {
Example<Person> example = Example.of(new Person("Walter", "WHITE", null), matching().//
withIgnorePaths("age"). //
withMatcher("firstname", matcher -> matcher.startsWith()). //
withMatcher("lastname", matcher -> matcher.ignoreCase()));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(flynn, walter));
}
/**
* @see #153
*/
@Test
public void valueTransformer() {
Example<Person> example = Example.of(new Person(null, "White", 99), matching(). //
withMatcher("age", matcher -> matcher.transform(value -> Optional.of(Integer.valueOf(50)))));
assertThat(operations.find(query(byExample(example)), Person.class), hasItems(walter));
}
}

View File

@ -1,84 +0,0 @@
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.dunwu.springboot.mongodb.textsearch;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.TextCriteria;
import org.springframework.data.mongodb.core.query.TextQuery;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.List;
import static io.github.dunwu.springboot.mongodb.textsearch.util.ConsoleResultPrinter.printResult;
import static org.springframework.data.mongodb.core.query.Query.query;
/**
* @author Christoph Strobl
* @author Thomas Darimont
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class TextSearchTemplateTests {
@Autowired MongoOperations operations;
// @Before
// public void setUp() throws Exception {
//
// MongoProperties properties = new MongoProperties();
//
// operations = new MongoTemplate(properties.createMongoClient(null), properties.getMongoClientDatabase());
// operations.dropCollection(BlogPost.class);
//
// createIndex();
//
// BlogPostInitializer.INSTANCE.initialize(this.operations);
// }
/**
* Show how to do simple matching. Note that text search is case insensitive and will also find entries like
* {@literal releases}.
*/
@Test
public void findAllBlogPostsWithRelease() {
TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("release");
List<BlogPost> blogPosts = operations.find(query(criteria), BlogPost.class);
printResult(blogPosts, criteria);
}
/**
* Sort by relevance relying on the value marked with {@link org.springframework.data.mongodb.core.mapping.TextScore}.
*/
@Test
public void findAllBlogPostsByPhraseSortByScore() {
TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingPhrase("release");
TextQuery query = new TextQuery(criteria);
query.setScoreFieldName("score");
query.sortByScore();
List<BlogPost> blogPosts = operations.find(query, BlogPost.class);
printResult(blogPosts, criteria);
}
}

File diff suppressed because one or more lines are too long

View File

@ -1,46 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.3.3.RELEASE</version>
<relativePath />
</parent>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-mysql</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.6.3</version>
</parent>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-mysql</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,4 +1,4 @@
package io.github.dunwu.javadb;
package io.github.dunwu.javadb.mysql.springboot;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.CommandLineRunner;
@ -6,8 +6,8 @@ import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.jdbc.core.JdbcTemplate;
import java.sql.Connection;
import javax.sql.DataSource;
import java.sql.Connection;
@Slf4j
@SpringBootApplication

View File

@ -1,4 +1,4 @@
package io.github.dunwu.javadb;
package io.github.dunwu.javadb.mysql.springboot;
import lombok.AllArgsConstructor;
import lombok.Data;
@ -9,7 +9,6 @@ import java.util.Objects;
/**
* user
*
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-11-18
*/

View File

@ -1,4 +1,4 @@
package io.github.dunwu.javadb;
package io.github.dunwu.javadb.mysql.springboot;
import org.springframework.jdbc.core.JdbcTemplate;
@ -6,7 +6,6 @@ import java.util.List;
/**
* user Dao
*
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-11-18
*/

View File

@ -1,10 +1,10 @@
package io.github.dunwu.javadb;
package io.github.dunwu.javadb.mysql.springboot;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
import javax.annotation.PostConstruct;
import java.util.List;
/**
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>

View File

@ -1,4 +1,4 @@
package io.github.dunwu.javadb;
package io.github.dunwu.javadb.mysql.springboot;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
@ -11,7 +11,6 @@ import java.util.List;
/**
* user Dao
*
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-11-18
*/
@ -26,8 +25,8 @@ public class UserDaoImpl implements UserDao {
@Override
public void insert(User user) {
jdbcTemplate.update("INSERT INTO user(name, age, address, email) VALUES(?, ?, ?, ?)",
user.getName(), user.getAge(), user.getAddress(), user.getEmail());
jdbcTemplate.update("INSERT INTO user(name, age, address, email) VALUES(?, ?, ?, ?)", user.getName(),
user.getAge(), user.getAddress(), user.getEmail());
}
@Override
@ -38,7 +37,7 @@ public class UserDaoImpl implements UserDao {
List<Object[]> params = new ArrayList<>();
users.forEach(user -> {
params.add(new Object[] { user.getName(), user.getAge(), user.getAddress(), user.getEmail() });
params.add(new Object[] {user.getName(), user.getAge(), user.getAddress(), user.getEmail()});
});
jdbcTemplate.batchUpdate(sql, params);
}
@ -56,8 +55,8 @@ public class UserDaoImpl implements UserDao {
@Override
public void update(User user) {
jdbcTemplate.update("UPDATE user SET name=?, age=?, address=?, email=? WHERE id=?",
user.getName(), user.getAge(), user.getAddress(), user.getEmail(), user.getId());
jdbcTemplate.update("UPDATE user SET name=?, age=?, address=?, email=? WHERE id=?", user.getName(),
user.getAge(), user.getAddress(), user.getEmail(), user.getId());
}
@Override
@ -78,7 +77,7 @@ public class UserDaoImpl implements UserDao {
public User queryByName(String name) {
try {
return jdbcTemplate.queryForObject("SELECT * FROM user WHERE name = ?",
new BeanPropertyRowMapper<>(User.class), name);
new BeanPropertyRowMapper<>(User.class), name);
} catch (EmptyResultDataAccessException e) {
return null;
}
@ -99,15 +98,12 @@ public class UserDaoImpl implements UserDao {
jdbcTemplate.execute("DROP TABLE IF EXISTS user");
String sqlStatement =
"CREATE TABLE user (\n"
+ " id BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'ID',\n"
"CREATE TABLE user (\n" + " id BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'ID',\n"
+ " name VARCHAR(255) NOT NULL DEFAULT '' COMMENT '用户名',\n"
+ " age INT(3) NOT NULL DEFAULT 0 COMMENT '年龄',\n"
+ " address VARCHAR(255) NOT NULL DEFAULT '' COMMENT '地址',\n"
+ " email VARCHAR(255) NOT NULL DEFAULT '' COMMENT '邮件',\n"
+ " PRIMARY KEY (id),\n"
+ " UNIQUE (name)\n"
+ ");";
+ " email VARCHAR(255) NOT NULL DEFAULT '' COMMENT '邮件',\n" + " PRIMARY KEY (id),\n"
+ " UNIQUE (name)\n" + ");";
jdbcTemplate.execute(sqlStatement);
}

View File

@ -1,15 +1,16 @@
<?xml version="1.0" encoding="UTF-8" ?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%boldYellow(%thread)] [%highlight(%-5level)] %boldGreen(%c{36}.%M) - %boldBlue(%m%n)
</pattern>
</encoder>
</appender>
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%boldYellow(%thread)] [%highlight(%-5level)] %boldGreen(%c{36}.%M) -
%boldBlue(%m%n)
</pattern>
</encoder>
</appender>
<logger name="io.github.dunwu.javadb" level="INFO" />
<logger name="io.github.dunwu.javadb" level="INFO"/>
<root level="INFO">
<appender-ref ref="CONSOLE" />
</root>
<root level="INFO">
<appender-ref ref="CONSOLE"/>
</root>
</configuration>

View File

@ -7,12 +7,12 @@
-- 创建用户表
DROP TABLE IF EXISTS `user`;
CREATE TABLE `user` (
`id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'ID',
`name` VARCHAR(255) NOT NULL DEFAULT '' COMMENT '用户名',
`age` INT(3) NOT NULL DEFAULT 0 COMMENT '年龄',
`address` VARCHAR(255) NOT NULL DEFAULT '' COMMENT '地址',
`email` VARCHAR(255) NOT NULL DEFAULT '' COMMENT '邮件',
PRIMARY KEY (`id`),
UNIQUE (`name`)
`id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'ID',
`name` VARCHAR(255) NOT NULL DEFAULT '' COMMENT '用户名',
`age` INT(3) NOT NULL DEFAULT 0 COMMENT '年龄',
`address` VARCHAR(255) NOT NULL DEFAULT '' COMMENT '地址',
`email` VARCHAR(255) NOT NULL DEFAULT '' COMMENT '邮件',
PRIMARY KEY (`id`),
UNIQUE (`name`)
);

View File

@ -1,8 +1,8 @@
package io.github.dunwu.javadb;
package io.github.dunwu.javadb.mysql.springboot;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -10,7 +10,6 @@ import java.sql.*;
/**
* Mysql
*
* @author Zhang Peng
* @see https://dev.mysql.com/doc/connector-j/5.1/en/
*/
@ -32,7 +31,7 @@ public class MysqlDemoTest {
private static Connection connection;
@BeforeClass
@BeforeAll
public static void beforeClass() {
try {
final String DB_URL = String.format("jdbc:mysql://%s:%s/%s", DB_HOST, DB_PORT, DB_SCHEMA);
@ -43,7 +42,7 @@ public class MysqlDemoTest {
}
}
@AfterClass
@AfterAll
public static void afterClass() {
try {
if (connection != null) {

View File

@ -1,14 +1,12 @@
package io.github.dunwu.javadb;
package io.github.dunwu.javadb.mysql.springboot;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
@ -16,8 +14,7 @@ import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
@Rollback
@RunWith(SpringRunner.class)
@SpringBootTest(classes = { SpringBootDataJdbcApplication.class })
@SpringBootTest(classes = {SpringBootDataJdbcApplication.class})
public class SpringBootDataJdbcTest {
private static final Logger log = LoggerFactory.getLogger(SpringBootDataJdbcTest.class);
@ -25,7 +22,7 @@ public class SpringBootDataJdbcTest {
@Autowired
private UserDao userDAO;
@Before
@BeforeEach
public void before() {
userDAO.truncate();
}

View File

@ -1,127 +1,78 @@
<?xml version="1.0"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-redis</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.version>1.8</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.6.3</version>
</parent>
<spring.version>5.2.9.RELEASE</spring.version>
<logback.version>1.2.3</logback.version>
<jedis.version>2.9.0</jedis.version>
<redisson.version>3.7.2</redisson.version>
<junit.version>4.13.1</junit.version>
</properties>
<groupId>io.github.dunwu</groupId>
<artifactId>javadb-redis</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<!-- database begin -->
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
</dependency>
<dependency>
<groupId>org.redisson</groupId>
<artifactId>redisson</artifactId>
</dependency>
<!-- database end -->
<properties>
<redisson.version>3.7.2</redisson.version>
</properties>
<!-- log start -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
<!-- log end -->
<!-- spring begin -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<scope>test</scope>
</dependency>
<!-- spring end -->
<!-- test begin -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<!-- test end -->
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-framework-bom</artifactId>
<version>${spring.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-json</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<!-- database begin -->
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>${jedis.version}</version>
</dependency>
<dependency>
<groupId>org.redisson</groupId>
<artifactId>redisson</artifactId>
<version>${redisson.version}</version>
</dependency>
<!-- database end -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<!-- log begin -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-parent</artifactId>
<version>${logback.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<!-- log end -->
<!-- database begin -->
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
</dependency>
<dependency>
<groupId>org.redisson</groupId>
<artifactId>redisson</artifactId>
<version>3.16.8</version>
</dependency>
<!-- database end -->
<!-- test begin -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- test end -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<finalName>${project.artifactId}</finalName>
<resources>
<resource>
<filtering>true</filtering>
<directory>src/main/resources</directory>
<includes>
<include>logback.xml</include>
</includes>
</resource>
</resources>
</build>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.redisson</groupId>
<artifactId>redisson</artifactId>
<version>${redisson.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,78 @@
package io.github.dunwu.javadb.redis.springboot;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.*;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
/**
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-10-14
*/
@Configuration
public class RedisAutoConfiguration {
@Autowired
private ObjectMapper objectMapper;
@Bean
public HashOperations<String, String, Object> hashOperations(RedisTemplate<String, Object> redisTemplate) {
return redisTemplate.opsForHash();
}
@Bean
public ListOperations<String, Object> listOperations(RedisTemplate<String, Object> redisTemplate) {
return redisTemplate.opsForList();
}
@Bean
@Primary
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) {
// 指定要序列化的域field,get和set,以及修饰符范围ANY是都有包括private和public
objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
// 指定序列化输入的类型类必须是非final修饰的final修饰的类比如String,Integer等会跑出异常
objectMapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
// 使用Jackson2JsonRedisSerializer来序列化和反序列化redis的value值默认使用JDK的序列化方式
Jackson2JsonRedisSerializer serializer = new Jackson2JsonRedisSerializer(Object.class);
serializer.setObjectMapper(objectMapper);
RedisTemplate<String, Object> template = new RedisTemplate<>();
// 配置连接工厂
template.setConnectionFactory(factory);
// 值采用json序列化
template.setValueSerializer(serializer);
// 使用StringRedisSerializer来序列化和反序列化redis的key值
template.setKeySerializer(new StringRedisSerializer());
// 设置hash key 和value序列化模式
template.setHashKeySerializer(new StringRedisSerializer());
template.setHashValueSerializer(serializer);
template.afterPropertiesSet();
return template;
}
@Bean
public SetOperations<String, Object> setOperations(RedisTemplate<String, Object> redisTemplate) {
return redisTemplate.opsForSet();
}
@Bean
public ValueOperations<String, Object> valueOperations(RedisTemplate<String, Object> redisTemplate) {
return redisTemplate.opsForValue();
}
@Bean
public ZSetOperations<String, Object> zsetOperations(RedisTemplate<String, Object> redisTemplate) {
return redisTemplate.opsForZSet();
}
}

View File

@ -0,0 +1,41 @@
package io.github.dunwu.javadb.redis.springboot;
import io.github.dunwu.javadb.redis.springboot.data.User;
import io.github.dunwu.javadb.redis.springboot.data.UserService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SpringBootDataRedisApplication implements CommandLineRunner {
private static final Logger log = LoggerFactory.getLogger(SpringBootDataRedisApplication.class);
private final UserService userService;
public SpringBootDataRedisApplication(UserService userService) {
this.userService = userService;
}
public static void main(String[] args) {
SpringApplication.run(SpringBootDataRedisApplication.class, args);
}
@Override
public void run(String... args) throws Exception {
User user = new User(1L, "张三", 21, "南京", "xxx@163.com");
User user2 = new User(2L, "李四", 28, "上海", "xxx@163.com");
userService.setUser(user);
userService.setUser(user2);
User result = userService.getUser(user.getId());
User result2 = userService.getUser(user2.getId());
log.info(result.toString());
log.info(result2.toString());
}
}

View File

@ -0,0 +1,42 @@
package io.github.dunwu.javadb.redis.springboot.data;
import lombok.Data;
import lombok.ToString;
import java.io.Serializable;
@Data
@ToString
public class User implements Serializable {
private static final long serialVersionUID = 4142994984277644695L;
private Long id;
private String name;
private Integer age;
private String address;
private String email;
public User() {
}
public User(String name, Integer age, String address, String email) {
this.name = name;
this.age = age;
this.address = address;
this.email = email;
}
public User(Long id, String name, Integer age, String address, String email) {
this.id = id;
this.name = name;
this.age = age;
this.address = address;
this.email = email;
}
}

View File

@ -0,0 +1,13 @@
package io.github.dunwu.javadb.redis.springboot.data;
/**
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-10-14
*/
public interface UserService {
User getUser(Long id);
void setUser(User user);
}

View File

@ -0,0 +1,31 @@
package io.github.dunwu.javadb.redis.springboot.data;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
/**
* @author <a href="mailto:forbreak@163.com">Zhang Peng</a>
* @since 2019-10-14
*/
@Service
public class UserServiceImpl implements UserService {
public static final String DEFAULT_KEY = "spring-boot:user";
private final RedisTemplate redisTemplate;
public UserServiceImpl(RedisTemplate redisTemplate) {
this.redisTemplate = redisTemplate;
}
@Override
public User getUser(Long id) {
return (User) redisTemplate.opsForHash().get(DEFAULT_KEY, id.toString());
}
@Override
public void setUser(User user) {
redisTemplate.opsForHash().put(DEFAULT_KEY, user.getId().toString(), user);
}
}

View File

@ -0,0 +1,8 @@
spring.redis.database = 0
spring.redis.host = localhost
spring.redis.port = 6379
spring.redis.password =
spring.redis.jedis.pool.max-active = 8
spring.redis.jedis.pool.max-wait = -1
spring.redis.jedis.pool.max-idle = 8
spring.redis.jedis.pool.min-idle = 0

Some files were not shown because too many files have changed in this diff Show More