mirror of https://github.com/dunwu/db-tutorial.git
reformat codes
parent
25d9f5cf13
commit
aba34051fa
|
@ -1,32 +1,25 @@
|
||||||
# EditorConfig helps developers define and maintain consistent
|
# EditorConfig 用于在 IDE 中检查代码的基本 Code Style
|
||||||
# coding styles between different editors and IDEs
|
# @see: https://editorconfig.org/
|
||||||
# http://editorconfig.org
|
|
||||||
# 所有文件换行使用 Unix like 风格(LF),bat 文件使用 win 风格(CRLF)
|
# 配置说明:
|
||||||
# 缩进 java 4 个空格,其他所有文件 2 个空格
|
# 所有文件换行使用 Unix 风格(LF),*.bat 文件使用 Windows 风格(CRLF)
|
||||||
|
# java / sh 文件缩进 4 个空格,其他所有文件缩进 2 个空格
|
||||||
|
|
||||||
root = true
|
root = true
|
||||||
|
|
||||||
[*]
|
[*]
|
||||||
# Unix-style newlines with a newline ending every file
|
|
||||||
end_of_line = lf
|
end_of_line = lf
|
||||||
|
|
||||||
# Change these settings to your own preference
|
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
indent_style = space
|
indent_style = tab
|
||||||
max_line_length = 120
|
max_line_length = 120
|
||||||
|
|
||||||
# We recommend you to keep these unchanged
|
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
|
|
||||||
[*.bat]
|
[*.{bat, cmd}]
|
||||||
end_of_line = crlf
|
end_of_line = crlf
|
||||||
|
|
||||||
[*.java]
|
[*.{java, groovy, kt, sh}]
|
||||||
indent_size = 4
|
|
||||||
|
|
||||||
[*.sql]
|
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
[*.md]
|
[*.md]
|
||||||
|
|
|
@ -1,56 +1,57 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
|
||||||
<modelVersion>4.0.0</modelVersion>
|
xmlns="http://maven.apache.org/POM/4.0.0">
|
||||||
<groupId>io.github.dunwu</groupId>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>javadb-h2</artifactId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<version>1.0.0</version>
|
<artifactId>javadb-h2</artifactId>
|
||||||
<packaging>jar</packaging>
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<java.version>1.8</java.version>
|
<java.version>1.8</java.version>
|
||||||
<maven.compiler.source>${java.version}</maven.compiler.source>
|
<maven.compiler.source>${java.version}</maven.compiler.source>
|
||||||
<maven.compiler.target>${java.version}</maven.compiler.target>
|
<maven.compiler.target>${java.version}</maven.compiler.target>
|
||||||
|
|
||||||
<junit.version>4.12</junit.version>
|
<junit.version>4.12</junit.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- db begin -->
|
<!-- db begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.h2database</groupId>
|
<groupId>com.h2database</groupId>
|
||||||
<artifactId>h2</artifactId>
|
<artifactId>h2</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- db end -->
|
<!-- db end -->
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- database begin -->
|
<!-- database begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.h2database</groupId>
|
<groupId>com.h2database</groupId>
|
||||||
<artifactId>h2</artifactId>
|
<artifactId>h2</artifactId>
|
||||||
<version>1.4.197</version>
|
<version>1.4.197</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- database end -->
|
<!-- database end -->
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>${junit.version}</version>
|
<version>${junit.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</dependencyManagement>
|
</dependencyManagement>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -43,8 +43,7 @@ public class H2JdbcTest01 {
|
||||||
CONNECTION = DriverManager.getConnection(JDBC_URL3, USER, PASSWORD);
|
CONNECTION = DriverManager.getConnection(JDBC_URL3, USER, PASSWORD);
|
||||||
// 创建sql声明
|
// 创建sql声明
|
||||||
STATEMENT = CONNECTION.createStatement();
|
STATEMENT = CONNECTION.createStatement();
|
||||||
}
|
} catch (ClassNotFoundException | SQLException e) {
|
||||||
catch (ClassNotFoundException | SQLException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -56,8 +55,7 @@ public class H2JdbcTest01 {
|
||||||
STATEMENT.close();
|
STATEMENT.close();
|
||||||
// 关闭连接
|
// 关闭连接
|
||||||
CONNECTION.close();
|
CONNECTION.close();
|
||||||
}
|
} catch (SQLException e) {
|
||||||
catch (SQLException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -85,8 +83,7 @@ public class H2JdbcTest01 {
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
System.out.println(rs.getString("id") + "," + rs.getString("name") + "," + rs.getString("sex"));
|
System.out.println(rs.getString("id") + "," + rs.getString("name") + "," + rs.getString("sex"));
|
||||||
}
|
}
|
||||||
}
|
} catch (SQLException e) {
|
||||||
catch (SQLException e) {
|
|
||||||
Assert.assertTrue(e.getMessage(), true);
|
Assert.assertTrue(e.getMessage(), true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,62 +1,63 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
|
||||||
<modelVersion>4.0.0</modelVersion>
|
xmlns="http://maven.apache.org/POM/4.0.0">
|
||||||
<groupId>io.github.dunwu</groupId>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>javadb-hbase</artifactId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<version>1.0.0</version>
|
<artifactId>javadb-hbase</artifactId>
|
||||||
<packaging>jar</packaging>
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<java.version>1.8</java.version>
|
<java.version>1.8</java.version>
|
||||||
<maven.compiler.source>${java.version}</maven.compiler.source>
|
<maven.compiler.source>${java.version}</maven.compiler.source>
|
||||||
<maven.compiler.target>${java.version}</maven.compiler.target>
|
<maven.compiler.target>${java.version}</maven.compiler.target>
|
||||||
|
|
||||||
<hbase.version>1.3.1</hbase.version>
|
<hbase.version>1.3.1</hbase.version>
|
||||||
<junit.version>4.12</junit.version>
|
<junit.version>4.12</junit.version>
|
||||||
<dunwu.version>0.4.1</dunwu.version>
|
<dunwu.version>0.4.1</dunwu.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hbase</groupId>
|
<groupId>org.apache.hbase</groupId>
|
||||||
<artifactId>hbase-client</artifactId>
|
<artifactId>hbase-client</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.github.dunwu</groupId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<artifactId>dunwu-common</artifactId>
|
<artifactId>dunwu-common</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hbase</groupId>
|
<groupId>org.apache.hbase</groupId>
|
||||||
<artifactId>hbase-client</artifactId>
|
<artifactId>hbase-client</artifactId>
|
||||||
<version>${hbase.version}</version>
|
<version>${hbase.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.github.dunwu</groupId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<artifactId>dunwu-common</artifactId>
|
<artifactId>dunwu-common</artifactId>
|
||||||
<version>${dunwu.version}</version>
|
<version>${dunwu.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>${junit.version}</version>
|
<version>${junit.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</dependencyManagement>
|
</dependencyManagement>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -3,14 +3,14 @@ package io.github.dunwu.javadb;
|
||||||
public enum HBaseConstant {
|
public enum HBaseConstant {
|
||||||
|
|
||||||
HBASE_ZOOKEEPER_QUORUM("hbase.zookeeper.quorum"), HBASE_ENABLE("hbase.enable"), HBASE_MASTER(
|
HBASE_ZOOKEEPER_QUORUM("hbase.zookeeper.quorum"), HBASE_ENABLE("hbase.enable"), HBASE_MASTER(
|
||||||
"hbase.master"), HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT(
|
"hbase.master"), HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT(
|
||||||
"hbase.zookeeper.property.clientPort"), HBASE_HCONNECTION_THREADS_MAX(
|
"hbase.zookeeper.property.clientPort"), HBASE_HCONNECTION_THREADS_MAX(
|
||||||
"hbase.hconnection.threads.max"), HBASE_HCONNECTION_THREADS_CORE(
|
"hbase.hconnection.threads.max"), HBASE_HCONNECTION_THREADS_CORE(
|
||||||
"hbase.hconnection.threads.core"), ZOOKEEPER_ZNODE_PARENT(
|
"hbase.hconnection.threads.core"), ZOOKEEPER_ZNODE_PARENT(
|
||||||
"zookeeper.znode.parent"), HBASE_COLUMN_FAMILY(
|
"zookeeper.znode.parent"), HBASE_COLUMN_FAMILY(
|
||||||
"hbase.column.family"), HBASE_EXECUTOR_NUM(
|
"hbase.column.family"), HBASE_EXECUTOR_NUM(
|
||||||
"hbase.executor.num"), HBASE_IPC_POOL_SIZE(
|
"hbase.executor.num"), HBASE_IPC_POOL_SIZE(
|
||||||
"hbase.client.ipc.pool.size");
|
"hbase.client.ipc.pool.size");
|
||||||
|
|
||||||
private String key;
|
private String key;
|
||||||
|
|
||||||
|
@ -21,5 +21,4 @@ public enum HBaseConstant {
|
||||||
public String key() {
|
public String key() {
|
||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ package io.github.dunwu.javadb;
|
||||||
* HBase Cell 实体
|
* HBase Cell 实体
|
||||||
*
|
*
|
||||||
* @author Zhang Peng
|
* @author Zhang Peng
|
||||||
* @date 2019-03-04
|
* @since 2019-03-04
|
||||||
*/
|
*/
|
||||||
public class HbaseCellEntity {
|
public class HbaseCellEntity {
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ public class HbaseCellEntity {
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "HbaseCellEntity{" + "table='" + table + '\'' + ", row='" + row + '\'' + ", colFamily='" + colFamily
|
return "HbaseCellEntity{" + "table='" + table + '\'' + ", row='" + row + '\'' + ", colFamily='" + colFamily
|
||||||
+ '\'' + ", col='" + col + '\'' + ", val='" + val + '\'' + '}';
|
+ '\'' + ", col='" + col + '\'' + ", val='" + val + '\'' + '}';
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,13 +16,16 @@ import java.util.Properties;
|
||||||
* HBase 服务实现类
|
* HBase 服务实现类
|
||||||
*
|
*
|
||||||
* @author Zhang Peng
|
* @author Zhang Peng
|
||||||
* @date 2019-03-01
|
* @since 2019-03-01
|
||||||
*/
|
*/
|
||||||
public class HbaseHelper {
|
public class HbaseHelper {
|
||||||
|
|
||||||
private static final String FIRST_CONFIG = "classpath://config//hbase.properties";
|
private static final String FIRST_CONFIG = "classpath://config//hbase.properties";
|
||||||
|
|
||||||
private static final String SECOND_CONFIG = "classpath://application.properties";
|
private static final String SECOND_CONFIG = "classpath://application.properties";
|
||||||
|
|
||||||
private HbaseProperties hbaseProperties;
|
private HbaseProperties hbaseProperties;
|
||||||
|
|
||||||
private Connection connection;
|
private Connection connection;
|
||||||
|
|
||||||
public HbaseHelper() throws Exception {
|
public HbaseHelper() throws Exception {
|
||||||
|
@ -38,22 +41,17 @@ public class HbaseHelper {
|
||||||
String quorum = PropertiesUtil.getString(properties, HBaseConstant.HBASE_ZOOKEEPER_QUORUM.key(), "");
|
String quorum = PropertiesUtil.getString(properties, HBaseConstant.HBASE_ZOOKEEPER_QUORUM.key(), "");
|
||||||
String hbaseMaster = PropertiesUtil.getString(properties, HBaseConstant.HBASE_MASTER.key(), "");
|
String hbaseMaster = PropertiesUtil.getString(properties, HBaseConstant.HBASE_MASTER.key(), "");
|
||||||
String clientPort = PropertiesUtil.getString(properties,
|
String clientPort = PropertiesUtil.getString(properties,
|
||||||
HBaseConstant.HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT.key(), "");
|
HBaseConstant.HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT.key(), "");
|
||||||
String znodeParent = PropertiesUtil.getString(properties, HBaseConstant.ZOOKEEPER_ZNODE_PARENT.key(), "");
|
String znodeParent = PropertiesUtil.getString(properties, HBaseConstant.ZOOKEEPER_ZNODE_PARENT.key(), "");
|
||||||
String maxThreads = PropertiesUtil.getString(properties, HBaseConstant.HBASE_HCONNECTION_THREADS_MAX.key(), "");
|
String maxThreads = PropertiesUtil.getString(properties, HBaseConstant.HBASE_HCONNECTION_THREADS_MAX.key(), "");
|
||||||
String coreThreads = PropertiesUtil.getString(properties, HBaseConstant.HBASE_HCONNECTION_THREADS_CORE.key(),
|
String coreThreads = PropertiesUtil.getString(properties, HBaseConstant.HBASE_HCONNECTION_THREADS_CORE.key(),
|
||||||
"");
|
"");
|
||||||
String columnFamily = PropertiesUtil.getString(properties, HBaseConstant.HBASE_COLUMN_FAMILY.key(), "");
|
String columnFamily = PropertiesUtil.getString(properties, HBaseConstant.HBASE_COLUMN_FAMILY.key(), "");
|
||||||
String hbaseExecutorsNum = PropertiesUtil.getString(properties, HBaseConstant.HBASE_EXECUTOR_NUM.key(), "10");
|
String hbaseExecutorsNum = PropertiesUtil.getString(properties, HBaseConstant.HBASE_EXECUTOR_NUM.key(), "10");
|
||||||
String ipcPoolSize = PropertiesUtil.getString(properties, HBaseConstant.HBASE_IPC_POOL_SIZE.key(), "1");
|
String ipcPoolSize = PropertiesUtil.getString(properties, HBaseConstant.HBASE_IPC_POOL_SIZE.key(), "1");
|
||||||
|
|
||||||
hbaseProperties = new HbaseProperties(hbaseMaster, quorum, clientPort, znodeParent, maxThreads, coreThreads,
|
hbaseProperties = new HbaseProperties(hbaseMaster, quorum, clientPort, znodeParent, maxThreads, coreThreads,
|
||||||
columnFamily, hbaseExecutorsNum, ipcPoolSize);
|
columnFamily, hbaseExecutorsNum, ipcPoolSize);
|
||||||
init(hbaseProperties);
|
|
||||||
}
|
|
||||||
|
|
||||||
public HbaseHelper(HbaseProperties hbaseProperties) throws Exception {
|
|
||||||
this.hbaseProperties = hbaseProperties;
|
|
||||||
init(hbaseProperties);
|
init(hbaseProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,16 +59,14 @@ public class HbaseHelper {
|
||||||
Properties properties = null;
|
Properties properties = null;
|
||||||
try {
|
try {
|
||||||
properties = PropertiesUtil.loadFromFile(FIRST_CONFIG);
|
properties = PropertiesUtil.loadFromFile(FIRST_CONFIG);
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (properties == null) {
|
if (properties == null) {
|
||||||
try {
|
try {
|
||||||
properties = PropertiesUtil.loadFromFile(SECOND_CONFIG);
|
properties = PropertiesUtil.loadFromFile(SECOND_CONFIG);
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -81,32 +77,35 @@ public class HbaseHelper {
|
||||||
private void init(HbaseProperties hbaseProperties) throws Exception {
|
private void init(HbaseProperties hbaseProperties) throws Exception {
|
||||||
try {
|
try {
|
||||||
// @formatter:off
|
// @formatter:off
|
||||||
Configuration configuration = HBaseConfiguration.create();
|
Configuration configuration = HBaseConfiguration.create();
|
||||||
configuration.set(HBaseConstant.HBASE_ZOOKEEPER_QUORUM.key(), hbaseProperties.getQuorum());
|
configuration.set(HBaseConstant.HBASE_ZOOKEEPER_QUORUM.key(), hbaseProperties.getQuorum());
|
||||||
configuration.set(HBaseConstant.HBASE_MASTER.key(), hbaseProperties.getHbaseMaster());
|
configuration.set(HBaseConstant.HBASE_MASTER.key(), hbaseProperties.getHbaseMaster());
|
||||||
configuration.set(HBaseConstant.HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT.key(),
|
configuration.set(HBaseConstant.HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT.key(),
|
||||||
hbaseProperties.getClientPort());
|
hbaseProperties.getClientPort());
|
||||||
configuration.set(HBaseConstant.HBASE_HCONNECTION_THREADS_MAX.key(),
|
configuration.set(HBaseConstant.HBASE_HCONNECTION_THREADS_MAX.key(),
|
||||||
hbaseProperties.getMaxThreads());
|
hbaseProperties.getMaxThreads());
|
||||||
configuration.set(HBaseConstant.HBASE_HCONNECTION_THREADS_CORE.key(),
|
configuration.set(HBaseConstant.HBASE_HCONNECTION_THREADS_CORE.key(),
|
||||||
hbaseProperties.getCoreThreads());
|
hbaseProperties.getCoreThreads());
|
||||||
configuration.set(HBaseConstant.ZOOKEEPER_ZNODE_PARENT.key(), hbaseProperties.getZnodeParent());
|
configuration.set(HBaseConstant.ZOOKEEPER_ZNODE_PARENT.key(), hbaseProperties.getZnodeParent());
|
||||||
configuration.set(HBaseConstant.HBASE_COLUMN_FAMILY.key(), hbaseProperties.getColumnFamily());
|
configuration.set(HBaseConstant.HBASE_COLUMN_FAMILY.key(), hbaseProperties.getColumnFamily());
|
||||||
configuration.set(HBaseConstant.HBASE_IPC_POOL_SIZE.key(), hbaseProperties.getIpcPoolSize());
|
configuration.set(HBaseConstant.HBASE_IPC_POOL_SIZE.key(), hbaseProperties.getIpcPoolSize());
|
||||||
// @formatter:on
|
// @formatter:on
|
||||||
connection = ConnectionFactory.createConnection(configuration);
|
connection = ConnectionFactory.createConnection(configuration);
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
throw new Exception("hbase链接未创建", e);
|
throw new Exception("hbase链接未创建", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public HbaseHelper(HbaseProperties hbaseProperties) throws Exception {
|
||||||
|
this.hbaseProperties = hbaseProperties;
|
||||||
|
init(hbaseProperties);
|
||||||
|
}
|
||||||
|
|
||||||
public void destory() {
|
public void destory() {
|
||||||
if (connection != null) {
|
if (connection != null) {
|
||||||
try {
|
try {
|
||||||
connection.close();
|
connection.close();
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -125,12 +124,10 @@ public class HbaseHelper {
|
||||||
try {
|
try {
|
||||||
if (StringUtils.isEmpty(tableName)) {
|
if (StringUtils.isEmpty(tableName)) {
|
||||||
hTableDescriptors = connection.getAdmin().listTables();
|
hTableDescriptors = connection.getAdmin().listTables();
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
hTableDescriptors = connection.getAdmin().listTables(tableName);
|
hTableDescriptors = connection.getAdmin().listTables(tableName);
|
||||||
}
|
}
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
|
||||||
throw new Exception("执行失败", e);
|
throw new Exception("执行失败", e);
|
||||||
}
|
}
|
||||||
return hTableDescriptors;
|
return hTableDescriptors;
|
||||||
|
@ -145,7 +142,7 @@ public class HbaseHelper {
|
||||||
* </ul>
|
* </ul>
|
||||||
*/
|
*/
|
||||||
public void createTable(String tableName) throws Exception {
|
public void createTable(String tableName) throws Exception {
|
||||||
createTable(tableName, new String[] { hbaseProperties.getColumnFamily() });
|
createTable(tableName, new String[] {hbaseProperties.getColumnFamily()});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -173,8 +170,7 @@ public class HbaseHelper {
|
||||||
}
|
}
|
||||||
|
|
||||||
connection.getAdmin().createTable(tableDescriptor);
|
connection.getAdmin().createTable(tableDescriptor);
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -187,6 +183,7 @@ public class HbaseHelper {
|
||||||
* <li>disable 'tablename'</li>
|
* <li>disable 'tablename'</li>
|
||||||
* <li>drop 't1'</li>
|
* <li>drop 't1'</li>
|
||||||
* </ul>
|
* </ul>
|
||||||
|
*
|
||||||
* @param name
|
* @param name
|
||||||
*/
|
*/
|
||||||
public void dropTable(String name) throws Exception {
|
public void dropTable(String name) throws Exception {
|
||||||
|
@ -203,8 +200,7 @@ public class HbaseHelper {
|
||||||
admin.disableTable(tableName);
|
admin.disableTable(tableName);
|
||||||
admin.deleteTable(tableName);
|
admin.deleteTable(tableName);
|
||||||
}
|
}
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -216,7 +212,7 @@ public class HbaseHelper {
|
||||||
|
|
||||||
Put put = new Put(Bytes.toBytes(hBaseTableDTO.getRow()));
|
Put put = new Put(Bytes.toBytes(hBaseTableDTO.getRow()));
|
||||||
put.addColumn(Bytes.toBytes(hBaseTableDTO.getColFamily()), Bytes.toBytes(hBaseTableDTO.getCol()),
|
put.addColumn(Bytes.toBytes(hBaseTableDTO.getColFamily()), Bytes.toBytes(hBaseTableDTO.getCol()),
|
||||||
Bytes.toBytes(hBaseTableDTO.getVal()));
|
Bytes.toBytes(hBaseTableDTO.getVal()));
|
||||||
return put;
|
return put;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -230,8 +226,7 @@ public class HbaseHelper {
|
||||||
table = connection.getTable(TableName.valueOf(tableName));
|
table = connection.getTable(TableName.valueOf(tableName));
|
||||||
Delete delete = new Delete(Bytes.toBytes(rowKey));
|
Delete delete = new Delete(Bytes.toBytes(rowKey));
|
||||||
table.delete(delete);
|
table.delete(delete);
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
throw new Exception("delete失败");
|
throw new Exception("delete失败");
|
||||||
}
|
}
|
||||||
|
@ -279,14 +274,12 @@ public class HbaseHelper {
|
||||||
if (StringUtils.isNotEmpty(colFamily)) {
|
if (StringUtils.isNotEmpty(colFamily)) {
|
||||||
if (StringUtils.isNotEmpty(qualifier)) {
|
if (StringUtils.isNotEmpty(qualifier)) {
|
||||||
get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(qualifier));
|
get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(qualifier));
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
get.addFamily(Bytes.toBytes(colFamily));
|
get.addFamily(Bytes.toBytes(colFamily));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result = table.get(get);
|
result = table.get(get);
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
|
||||||
throw new Exception("查询时发生异常");
|
throw new Exception("查询时发生异常");
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
@ -301,7 +294,7 @@ public class HbaseHelper {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Result[] scan(String tableName, String colFamily, String qualifier, String startRow, String stopRow)
|
public Result[] scan(String tableName, String colFamily, String qualifier, String startRow, String stopRow)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
if (connection == null) {
|
if (connection == null) {
|
||||||
throw new Exception("hbase链接未创建");
|
throw new Exception("hbase链接未创建");
|
||||||
}
|
}
|
||||||
|
@ -333,11 +326,9 @@ public class HbaseHelper {
|
||||||
list.add(result);
|
list.add(result);
|
||||||
result = resultScanner.next();
|
result = resultScanner.next();
|
||||||
}
|
}
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
if (resultScanner != null) {
|
if (resultScanner != null) {
|
||||||
resultScanner.close();
|
resultScanner.close();
|
||||||
}
|
}
|
||||||
|
@ -366,8 +357,7 @@ public class HbaseHelper {
|
||||||
list.add(result);
|
list.add(result);
|
||||||
result = resultScanner.next();
|
result = resultScanner.next();
|
||||||
}
|
}
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
return list;
|
return list;
|
||||||
|
|
|
@ -33,7 +33,7 @@ public class HbaseProperties implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public HbaseProperties(String hbaseMaster, String quorum, String clientPort, String znodeParent, String maxThreads,
|
public HbaseProperties(String hbaseMaster, String quorum, String clientPort, String znodeParent, String maxThreads,
|
||||||
String coreThreads, String columnFamily, String hbaseExecutorsNum, String ipcPoolSize) {
|
String coreThreads, String columnFamily, String hbaseExecutorsNum, String ipcPoolSize) {
|
||||||
this.hbaseMaster = hbaseMaster;
|
this.hbaseMaster = hbaseMaster;
|
||||||
this.quorum = quorum;
|
this.quorum = quorum;
|
||||||
this.clientPort = clientPort;
|
this.clientPort = clientPort;
|
||||||
|
@ -120,9 +120,9 @@ public class HbaseProperties implements Serializable {
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "HbaseProperties{" + "quorum='" + quorum + '\'' + ", clientPort='" + clientPort + '\''
|
return "HbaseProperties{" + "quorum='" + quorum + '\'' + ", clientPort='" + clientPort + '\''
|
||||||
+ ", znodeParent='" + znodeParent + '\'' + ", maxThreads='" + maxThreads + '\'' + ", coreThreads='"
|
+ ", znodeParent='" + znodeParent + '\'' + ", maxThreads='" + maxThreads + '\'' + ", coreThreads='"
|
||||||
+ coreThreads + '\'' + ", columnFamily='" + columnFamily + '\'' + ", hbaseExecutorsNum='"
|
+ coreThreads + '\'' + ", columnFamily='" + columnFamily + '\'' + ", hbaseExecutorsNum='"
|
||||||
+ hbaseExecutorsNum + '\'' + '}';
|
+ hbaseExecutorsNum + '\'' + '}';
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Zhang Peng
|
* @author Zhang Peng
|
||||||
* @date 2019-03-29
|
* @since 2019-03-29
|
||||||
*/
|
*/
|
||||||
public class HbaseHelperTest {
|
public class HbaseHelperTest {
|
||||||
|
|
||||||
|
@ -18,8 +18,7 @@ public class HbaseHelperTest {
|
||||||
public static void BeforeClass() {
|
public static void BeforeClass() {
|
||||||
try {
|
try {
|
||||||
hbaseHelper = new HbaseHelper();
|
hbaseHelper = new HbaseHelper();
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -39,13 +38,13 @@ public class HbaseHelperTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void createTable() throws Exception {
|
public void createTable() throws Exception {
|
||||||
hbaseHelper.createTable("table1", new String[] { "columnFamliy1", "columnFamliy2" });
|
hbaseHelper.createTable("table1", new String[] {"columnFamliy1", "columnFamliy2"});
|
||||||
HTableDescriptor[] table1s = hbaseHelper.listTables("table1");
|
HTableDescriptor[] table1s = hbaseHelper.listTables("table1");
|
||||||
if (table1s == null || table1s.length <= 0) {
|
if (table1s == null || table1s.length <= 0) {
|
||||||
Assert.fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
|
|
||||||
hbaseHelper.createTable("table2", new String[] { "columnFamliy1", "columnFamliy2" });
|
hbaseHelper.createTable("table2", new String[] {"columnFamliy1", "columnFamliy2"});
|
||||||
table1s = hbaseHelper.listTables("table2");
|
table1s = hbaseHelper.listTables("table2");
|
||||||
if (table1s == null || table1s.length <= 0) {
|
if (table1s == null || table1s.length <= 0) {
|
||||||
Assert.fail();
|
Assert.fail();
|
||||||
|
|
|
@ -1,113 +1,114 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
|
||||||
<modelVersion>4.0.0</modelVersion>
|
xmlns="http://maven.apache.org/POM/4.0.0">
|
||||||
<groupId>io.github.dunwu</groupId>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>javadb-mysql</artifactId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<version>1.0.0</version>
|
<artifactId>javadb-mysql</artifactId>
|
||||||
<packaging>jar</packaging>
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<java.version>1.8</java.version>
|
<java.version>1.8</java.version>
|
||||||
<maven.compiler.source>${java.version}</maven.compiler.source>
|
<maven.compiler.source>${java.version}</maven.compiler.source>
|
||||||
<maven.compiler.target>${java.version}</maven.compiler.target>
|
<maven.compiler.target>${java.version}</maven.compiler.target>
|
||||||
|
|
||||||
<spring.version>4.3.13.RELEASE</spring.version>
|
<spring.version>4.3.13.RELEASE</spring.version>
|
||||||
<logback.version>1.2.3</logback.version>
|
<logback.version>1.2.3</logback.version>
|
||||||
<junit.version>4.12</junit.version>
|
<junit.version>4.12</junit.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- db begin -->
|
<!-- db begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>mysql</groupId>
|
<groupId>mysql</groupId>
|
||||||
<artifactId>mysql-connector-java</artifactId>
|
<artifactId>mysql-connector-java</artifactId>
|
||||||
<version>5.1.45</version>
|
<version>5.1.45</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.commons</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-pool2</artifactId>
|
<artifactId>commons-pool2</artifactId>
|
||||||
<version>2.5.0</version>
|
<version>2.5.0</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- db end -->
|
<!-- db end -->
|
||||||
|
|
||||||
<!-- log start -->
|
<!-- log start -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>ch.qos.logback</groupId>
|
<groupId>ch.qos.logback</groupId>
|
||||||
<artifactId>logback-classic</artifactId>
|
<artifactId>logback-classic</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- log end -->
|
<!-- log end -->
|
||||||
|
|
||||||
<!-- spring begin -->
|
<!-- spring begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
<artifactId>spring-context-support</artifactId>
|
<artifactId>spring-context-support</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
<artifactId>spring-test</artifactId>
|
<artifactId>spring-test</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- spring end -->
|
<!-- spring end -->
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
<artifactId>spring-framework-bom</artifactId>
|
<artifactId>spring-framework-bom</artifactId>
|
||||||
<version>${spring.version}</version>
|
<version>${spring.version}</version>
|
||||||
<type>pom</type>
|
<type>pom</type>
|
||||||
<scope>import</scope>
|
<scope>import</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- database begin -->
|
<!-- database begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>redis.clients</groupId>
|
<groupId>redis.clients</groupId>
|
||||||
<artifactId>jedis</artifactId>
|
<artifactId>jedis</artifactId>
|
||||||
<version>${jedis.version}</version>
|
<version>${jedis.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- database end -->
|
<!-- database end -->
|
||||||
|
|
||||||
<!-- log begin -->
|
<!-- log begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>ch.qos.logback</groupId>
|
<groupId>ch.qos.logback</groupId>
|
||||||
<artifactId>logback-parent</artifactId>
|
<artifactId>logback-parent</artifactId>
|
||||||
<version>${logback.version}</version>
|
<version>${logback.version}</version>
|
||||||
<type>pom</type>
|
<type>pom</type>
|
||||||
<scope>import</scope>
|
<scope>import</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- log end -->
|
<!-- log end -->
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>${junit.version}</version>
|
<version>${junit.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</dependencyManagement>
|
</dependencyManagement>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<finalName>${project.artifactId}</finalName>
|
<finalName>${project.artifactId}</finalName>
|
||||||
<resources>
|
<resources>
|
||||||
<resource>
|
<resource>
|
||||||
<filtering>true</filtering>
|
<filtering>true</filtering>
|
||||||
<directory>src/main/resources</directory>
|
<directory>src/main/resources</directory>
|
||||||
<includes>
|
<includes>
|
||||||
<include>logback.xml</include>
|
<include>logback.xml</include>
|
||||||
</includes>
|
</includes>
|
||||||
</resource>
|
</resource>
|
||||||
</resources>
|
</resources>
|
||||||
</build>
|
</build>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -6,8 +6,6 @@ import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.sql.*;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Mysql 测试例
|
* Mysql 测试例
|
||||||
*
|
*
|
||||||
|
@ -17,11 +15,17 @@ import java.sql.*;
|
||||||
public class MysqlDemoTest {
|
public class MysqlDemoTest {
|
||||||
|
|
||||||
private static final String DB_HOST = "localhost";
|
private static final String DB_HOST = "localhost";
|
||||||
|
|
||||||
private static final String DB_PORT = "3306";
|
private static final String DB_PORT = "3306";
|
||||||
|
|
||||||
private static final String DB_SCHEMA = "sakila";
|
private static final String DB_SCHEMA = "sakila";
|
||||||
|
|
||||||
private static final String DB_USER = "root";
|
private static final String DB_USER = "root";
|
||||||
|
|
||||||
private static final String DB_PASSWORD = "root";
|
private static final String DB_PASSWORD = "root";
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(MysqlDemoTest.class);
|
private static Logger logger = LoggerFactory.getLogger(MysqlDemoTest.class);
|
||||||
|
|
||||||
private static Statement statement;
|
private static Statement statement;
|
||||||
|
|
||||||
private static Connection connection;
|
private static Connection connection;
|
||||||
|
@ -35,8 +39,7 @@ public class MysqlDemoTest {
|
||||||
// DriverManager.getConnection("jdbc:mysql://localhost:3306/sakila?" +
|
// DriverManager.getConnection("jdbc:mysql://localhost:3306/sakila?" +
|
||||||
// "user=root&password=root");
|
// "user=root&password=root");
|
||||||
statement = connection.createStatement();
|
statement = connection.createStatement();
|
||||||
}
|
} catch (SQLException e) {
|
||||||
catch (SQLException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -47,8 +50,7 @@ public class MysqlDemoTest {
|
||||||
if (connection != null) {
|
if (connection != null) {
|
||||||
connection.close();
|
connection.close();
|
||||||
}
|
}
|
||||||
}
|
} catch (SQLException e) {
|
||||||
catch (SQLException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -67,10 +69,9 @@ public class MysqlDemoTest {
|
||||||
Date lastUpdate = rs.getDate("last_update");
|
Date lastUpdate = rs.getDate("last_update");
|
||||||
// 输出数据
|
// 输出数据
|
||||||
logger.debug("actor_id: {}, first_name: {}, last_name: {}, last_update: {}", id, firstName, lastName,
|
logger.debug("actor_id: {}, first_name: {}, last_name: {}, last_update: {}", id, firstName, lastName,
|
||||||
lastUpdate.toLocalDate());
|
lastUpdate.toLocalDate());
|
||||||
}
|
}
|
||||||
}
|
} catch (SQLException e) {
|
||||||
catch (SQLException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,43 +3,43 @@
|
||||||
<!-- logback中一共有5种有效级别,分别是TRACE、DEBUG、INFO、WARN、ERROR,优先级依次从低到高 -->
|
<!-- logback中一共有5种有效级别,分别是TRACE、DEBUG、INFO、WARN、ERROR,优先级依次从低到高 -->
|
||||||
<configuration scan="true" scanPeriod="60 seconds" debug="false">
|
<configuration scan="true" scanPeriod="60 seconds" debug="false">
|
||||||
|
|
||||||
<property name="FILE_NAME" value="javadb"/>
|
<property name="FILE_NAME" value="javadb" />
|
||||||
|
|
||||||
<!-- 将记录日志打印到控制台 -->
|
<!-- 将记录日志打印到控制台 -->
|
||||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||||
<encoder>
|
<encoder>
|
||||||
<pattern>%d{HH:mm:ss.SSS} [%thread] [%-5p] %c{36}.%M - %m%n</pattern>
|
<pattern>%d{HH:mm:ss.SSS} [%thread] [%-5p] %c{36}.%M - %m%n</pattern>
|
||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<!-- RollingFileAppender begin -->
|
<!-- RollingFileAppender begin -->
|
||||||
<appender name="ALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
<appender name="ALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||||
<!-- 根据时间来制定滚动策略 -->
|
<!-- 根据时间来制定滚动策略 -->
|
||||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||||
<fileNamePattern>${user.dir}/logs/${FILE_NAME}-all.%d{yyyy-MM-dd}.log</fileNamePattern>
|
<fileNamePattern>${user.dir}/logs/${FILE_NAME}-all.%d{yyyy-MM-dd}.log</fileNamePattern>
|
||||||
<maxHistory>30</maxHistory>
|
<maxHistory>30</maxHistory>
|
||||||
</rollingPolicy>
|
</rollingPolicy>
|
||||||
|
|
||||||
<!-- 根据文件大小来制定滚动策略 -->
|
<!-- 根据文件大小来制定滚动策略 -->
|
||||||
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
|
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
|
||||||
<maxFileSize>30MB</maxFileSize>
|
<maxFileSize>30MB</maxFileSize>
|
||||||
</triggeringPolicy>
|
</triggeringPolicy>
|
||||||
|
|
||||||
<encoder>
|
<encoder>
|
||||||
<pattern>%d{HH:mm:ss.SSS} [%thread] [%-5p] %c{36}.%M - %m%n</pattern>
|
<pattern>%d{HH:mm:ss.SSS} [%thread] [%-5p] %c{36}.%M - %m%n</pattern>
|
||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
<!-- RollingFileAppender end -->
|
<!-- RollingFileAppender end -->
|
||||||
|
|
||||||
<!-- logger begin -->
|
<!-- logger begin -->
|
||||||
<!-- 本项目的日志记录,分级打印 -->
|
<!-- 本项目的日志记录,分级打印 -->
|
||||||
<logger name="io.github.dunwu" level="TRACE">
|
<logger name="io.github.dunwu" level="TRACE">
|
||||||
<appender-ref ref="ALL"/>
|
<appender-ref ref="ALL" />
|
||||||
</logger>
|
</logger>
|
||||||
|
|
||||||
<root level="TRACE">
|
<root level="TRACE">
|
||||||
<appender-ref ref="STDOUT"/>
|
<appender-ref ref="STDOUT" />
|
||||||
</root>
|
</root>
|
||||||
<!-- logger end -->
|
<!-- logger end -->
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|
|
@ -1,126 +1,127 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
|
||||||
<modelVersion>4.0.0</modelVersion>
|
xmlns="http://maven.apache.org/POM/4.0.0">
|
||||||
<groupId>io.github.dunwu</groupId>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>javadb-redis</artifactId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<version>1.0.0</version>
|
<artifactId>javadb-redis</artifactId>
|
||||||
<packaging>jar</packaging>
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<java.version>1.8</java.version>
|
<java.version>1.8</java.version>
|
||||||
<maven.compiler.source>${java.version}</maven.compiler.source>
|
<maven.compiler.source>${java.version}</maven.compiler.source>
|
||||||
<maven.compiler.target>${java.version}</maven.compiler.target>
|
<maven.compiler.target>${java.version}</maven.compiler.target>
|
||||||
|
|
||||||
<spring.version>4.3.13.RELEASE</spring.version>
|
<spring.version>4.3.13.RELEASE</spring.version>
|
||||||
<logback.version>1.2.3</logback.version>
|
<logback.version>1.2.3</logback.version>
|
||||||
<jedis.version>2.9.0</jedis.version>
|
<jedis.version>2.9.0</jedis.version>
|
||||||
<redisson.version>3.7.2</redisson.version>
|
<redisson.version>3.7.2</redisson.version>
|
||||||
<junit.version>4.12</junit.version>
|
<junit.version>4.12</junit.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- database begin -->
|
<!-- database begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>redis.clients</groupId>
|
<groupId>redis.clients</groupId>
|
||||||
<artifactId>jedis</artifactId>
|
<artifactId>jedis</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.redisson</groupId>
|
<groupId>org.redisson</groupId>
|
||||||
<artifactId>redisson</artifactId>
|
<artifactId>redisson</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- database end -->
|
<!-- database end -->
|
||||||
|
|
||||||
<!-- log start -->
|
<!-- log start -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>ch.qos.logback</groupId>
|
<groupId>ch.qos.logback</groupId>
|
||||||
<artifactId>logback-classic</artifactId>
|
<artifactId>logback-classic</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- log end -->
|
<!-- log end -->
|
||||||
|
|
||||||
<!-- spring begin -->
|
<!-- spring begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
<artifactId>spring-beans</artifactId>
|
<artifactId>spring-beans</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
<artifactId>spring-context-support</artifactId>
|
<artifactId>spring-context-support</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
<artifactId>spring-core</artifactId>
|
<artifactId>spring-core</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
<artifactId>spring-test</artifactId>
|
<artifactId>spring-test</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- spring end -->
|
<!-- spring end -->
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
<artifactId>spring-framework-bom</artifactId>
|
<artifactId>spring-framework-bom</artifactId>
|
||||||
<version>${spring.version}</version>
|
<version>${spring.version}</version>
|
||||||
<type>pom</type>
|
<type>pom</type>
|
||||||
<scope>import</scope>
|
<scope>import</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- database begin -->
|
<!-- database begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>redis.clients</groupId>
|
<groupId>redis.clients</groupId>
|
||||||
<artifactId>jedis</artifactId>
|
<artifactId>jedis</artifactId>
|
||||||
<version>${jedis.version}</version>
|
<version>${jedis.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.redisson</groupId>
|
<groupId>org.redisson</groupId>
|
||||||
<artifactId>redisson</artifactId>
|
<artifactId>redisson</artifactId>
|
||||||
<version>${redisson.version}</version>
|
<version>${redisson.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- database end -->
|
<!-- database end -->
|
||||||
|
|
||||||
<!-- log begin -->
|
<!-- log begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>ch.qos.logback</groupId>
|
<groupId>ch.qos.logback</groupId>
|
||||||
<artifactId>logback-parent</artifactId>
|
<artifactId>logback-parent</artifactId>
|
||||||
<version>${logback.version}</version>
|
<version>${logback.version}</version>
|
||||||
<type>pom</type>
|
<type>pom</type>
|
||||||
<scope>import</scope>
|
<scope>import</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- log end -->
|
<!-- log end -->
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>${junit.version}</version>
|
<version>${junit.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</dependencyManagement>
|
</dependencyManagement>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<finalName>${project.artifactId}</finalName>
|
<finalName>${project.artifactId}</finalName>
|
||||||
<resources>
|
<resources>
|
||||||
<resource>
|
<resource>
|
||||||
<filtering>true</filtering>
|
<filtering>true</filtering>
|
||||||
<directory>src/main/resources</directory>
|
<directory>src/main/resources</directory>
|
||||||
<includes>
|
<includes>
|
||||||
<include>logback.xml</include>
|
<include>logback.xml</include>
|
||||||
</includes>
|
</includes>
|
||||||
</resource>
|
</resource>
|
||||||
</resources>
|
</resources>
|
||||||
</build>
|
</build>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -38,8 +38,7 @@ public class JedisDemoTest {
|
||||||
try {
|
try {
|
||||||
jedis.ping();
|
jedis.ping();
|
||||||
logger.debug("jedis 连接成功。");
|
logger.debug("jedis 连接成功。");
|
||||||
}
|
} catch (JedisConnectionException e) {
|
||||||
catch (JedisConnectionException e) {
|
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ import java.util.Set;
|
||||||
*/
|
*/
|
||||||
@ActiveProfiles("test")
|
@ActiveProfiles("test")
|
||||||
@RunWith(SpringJUnit4ClassRunner.class)
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
@ContextConfiguration(locations = { "classpath:/applicationContext.xml" })
|
@ContextConfiguration(locations = {"classpath:/applicationContext.xml"})
|
||||||
public class JedisPoolDemoTest {
|
public class JedisPoolDemoTest {
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(JedisPoolDemoTest.class);
|
private static Logger logger = LoggerFactory.getLogger(JedisPoolDemoTest.class);
|
||||||
|
|
|
@ -7,7 +7,7 @@ import org.springframework.context.support.ClassPathXmlApplicationContext;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Zhang Peng
|
* @author Zhang Peng
|
||||||
* @date 2018/6/19
|
* @since 2018/6/19
|
||||||
*/
|
*/
|
||||||
public class RedissonStandaloneTest {
|
public class RedissonStandaloneTest {
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xmlns="http://www.springframework.org/schema/beans"
|
xmlns="http://www.springframework.org/schema/beans"
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"
|
||||||
default-lazy-init="false">
|
default-lazy-init="false">
|
||||||
|
|
||||||
<description>Spring基础配置</description>
|
<description>Spring基础配置</description>
|
||||||
|
|
||||||
<import resource="classpath:/config.xml"/>
|
<import resource="classpath:/config.xml" />
|
||||||
<import resource="classpath:/redis.xml"/>
|
<import resource="classpath:/redis.xml" />
|
||||||
|
|
||||||
</beans>
|
</beans>
|
||||||
|
|
|
@ -1,19 +1,20 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
|
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xmlns="http://www.springframework.org/schema/beans"
|
xmlns:context="http://www.springframework.org/schema/context"
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
|
xmlns="http://www.springframework.org/schema/beans"
|
||||||
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
|
||||||
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd">
|
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd">
|
||||||
|
|
||||||
<!-- 开发环境配置文件 -->
|
<!-- 开发环境配置文件 -->
|
||||||
<beans profile="dev">
|
<beans profile="dev">
|
||||||
<context:property-placeholder ignore-resource-not-found="true" location="classpath*:/properties/application.properties,
|
<context:property-placeholder ignore-resource-not-found="true" location="classpath*:/properties/application.properties,
|
||||||
classpath*:/properties/application-dev.properties"/>
|
classpath*:/properties/application-dev.properties" />
|
||||||
</beans>
|
</beans>
|
||||||
|
|
||||||
<!-- 测试环境配置文件 -->
|
<!-- 测试环境配置文件 -->
|
||||||
<beans profile="test">
|
<beans profile="test">
|
||||||
<context:property-placeholder ignore-resource-not-found="true" location="classpath*:/properties/application.properties,
|
<context:property-placeholder ignore-resource-not-found="true" location="classpath*:/properties/application.properties,
|
||||||
classpath*:/properties/application-test.properties"/>
|
classpath*:/properties/application-test.properties" />
|
||||||
</beans>
|
</beans>
|
||||||
|
|
||||||
</beans>
|
</beans>
|
||||||
|
|
|
@ -3,43 +3,43 @@
|
||||||
<!-- logback中一共有5种有效级别,分别是TRACE、DEBUG、INFO、WARN、ERROR,优先级依次从低到高 -->
|
<!-- logback中一共有5种有效级别,分别是TRACE、DEBUG、INFO、WARN、ERROR,优先级依次从低到高 -->
|
||||||
<configuration scan="true" scanPeriod="60 seconds" debug="false">
|
<configuration scan="true" scanPeriod="60 seconds" debug="false">
|
||||||
|
|
||||||
<property name="FILE_NAME" value="javadb"/>
|
<property name="FILE_NAME" value="javadb" />
|
||||||
|
|
||||||
<!-- 将记录日志打印到控制台 -->
|
<!-- 将记录日志打印到控制台 -->
|
||||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||||
<encoder>
|
<encoder>
|
||||||
<pattern>%d{HH:mm:ss.SSS} [%thread] [%-5p] %c{36}.%M - %m%n</pattern>
|
<pattern>%d{HH:mm:ss.SSS} [%thread] [%-5p] %c{36}.%M - %m%n</pattern>
|
||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<!-- RollingFileAppender begin -->
|
<!-- RollingFileAppender begin -->
|
||||||
<appender name="ALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
<appender name="ALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||||
<!-- 根据时间来制定滚动策略 -->
|
<!-- 根据时间来制定滚动策略 -->
|
||||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||||
<fileNamePattern>${user.dir}/logs/${FILE_NAME}.%d{yyyy-MM-dd}.log</fileNamePattern>
|
<fileNamePattern>${user.dir}/logs/${FILE_NAME}.%d{yyyy-MM-dd}.log</fileNamePattern>
|
||||||
<maxHistory>30</maxHistory>
|
<maxHistory>30</maxHistory>
|
||||||
</rollingPolicy>
|
</rollingPolicy>
|
||||||
|
|
||||||
<!-- 根据文件大小来制定滚动策略 -->
|
<!-- 根据文件大小来制定滚动策略 -->
|
||||||
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
|
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
|
||||||
<maxFileSize>30MB</maxFileSize>
|
<maxFileSize>30MB</maxFileSize>
|
||||||
</triggeringPolicy>
|
</triggeringPolicy>
|
||||||
|
|
||||||
<encoder>
|
<encoder>
|
||||||
<pattern>%d{HH:mm:ss.SSS} [%thread] [%-5p] %c{36}.%M - %m%n</pattern>
|
<pattern>%d{HH:mm:ss.SSS} [%thread] [%-5p] %c{36}.%M - %m%n</pattern>
|
||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
<!-- RollingFileAppender end -->
|
<!-- RollingFileAppender end -->
|
||||||
|
|
||||||
<!-- logger begin -->
|
<!-- logger begin -->
|
||||||
<!-- 本项目的日志记录,分级打印 -->
|
<!-- 本项目的日志记录,分级打印 -->
|
||||||
<logger name="io.github.dunwu" level="TRACE">
|
<logger name="io.github.dunwu" level="TRACE">
|
||||||
<appender-ref ref="ALL"/>
|
<appender-ref ref="ALL" />
|
||||||
</logger>
|
</logger>
|
||||||
|
|
||||||
<root level="TRACE">
|
<root level="TRACE">
|
||||||
<appender-ref ref="STDOUT"/>
|
<appender-ref ref="STDOUT" />
|
||||||
</root>
|
</root>
|
||||||
<!-- logger end -->
|
<!-- logger end -->
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|
|
@ -4,5 +4,4 @@ redis.port = 6379
|
||||||
redis.timeout = 3000
|
redis.timeout = 3000
|
||||||
redis.password = zp
|
redis.password = zp
|
||||||
redis.database = 0
|
redis.database = 0
|
||||||
|
|
||||||
log.path = ./
|
log.path = ./
|
||||||
|
|
|
@ -4,5 +4,4 @@ redis.port = 6379
|
||||||
redis.timeout = 3000
|
redis.timeout = 3000
|
||||||
redis.password = zp
|
redis.password = zp
|
||||||
redis.database = 0
|
redis.database = 0
|
||||||
|
|
||||||
log.path = /home/zp/log
|
log.path = /home/zp/log
|
||||||
|
|
|
@ -1,21 +1,21 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.springframework.org/schema/beans"
|
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.springframework.org/schema/beans"
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||||
|
|
||||||
<description>redis configuration</description>
|
<description>redis configuration</description>
|
||||||
|
|
||||||
<!-- redis配置 -->
|
<!-- redis配置 -->
|
||||||
<bean id="jedisPoolConfig" class="redis.clients.jedis.JedisPoolConfig">
|
<bean id="jedisPoolConfig" class="redis.clients.jedis.JedisPoolConfig">
|
||||||
<property name="maxTotal" value="${jedis.pool.maxTotal}"/>
|
<property name="maxTotal" value="${jedis.pool.maxTotal}" />
|
||||||
<property name="maxIdle" value="${jedis.pool.maxIdle}"/>
|
<property name="maxIdle" value="${jedis.pool.maxIdle}" />
|
||||||
<property name="maxWaitMillis" value="${jedis.pool.maxWaitMillis}"/>
|
<property name="maxWaitMillis" value="${jedis.pool.maxWaitMillis}" />
|
||||||
<property name="testOnBorrow" value="${jedis.pool.testOnBorrow}"/>
|
<property name="testOnBorrow" value="${jedis.pool.testOnBorrow}" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- jedis pool配置 -->
|
<!-- jedis pool配置 -->
|
||||||
<bean id="jedisPool" class="redis.clients.jedis.JedisPool" destroy-method="destroy" depends-on="jedisPoolConfig">
|
<bean id="jedisPool" class="redis.clients.jedis.JedisPool" destroy-method="destroy" depends-on="jedisPoolConfig">
|
||||||
<constructor-arg ref="jedisPoolConfig"/>
|
<constructor-arg ref="jedisPoolConfig" />
|
||||||
<constructor-arg type="java.lang.String" value="${redis.host}"/>
|
<constructor-arg type="java.lang.String" value="${redis.host}" />
|
||||||
<constructor-arg type="int" value="${redis.port}"/>
|
<constructor-arg type="int" value="${redis.port}" />
|
||||||
</bean>
|
</bean>
|
||||||
</beans>
|
</beans>
|
||||||
|
|
|
@ -1,21 +1,21 @@
|
||||||
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xmlns:redisson="http://redisson.org/schema/redisson"
|
xmlns:redisson="http://redisson.org/schema/redisson"
|
||||||
xmlns="http://www.springframework.org/schema/beans"
|
xmlns="http://www.springframework.org/schema/beans"
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||||
http://www.springframework.org/schema/beans/spring-beans.xsd
|
http://www.springframework.org/schema/beans/spring-beans.xsd
|
||||||
http://redisson.org/schema/redisson
|
http://redisson.org/schema/redisson
|
||||||
http://redisson.org/schema/redisson/redisson.xsd">
|
http://redisson.org/schema/redisson/redisson.xsd">
|
||||||
<bean id="stringCodec" class="org.redisson.client.codec.StringCodec"/>
|
<bean id="stringCodec" class="org.redisson.client.codec.StringCodec" />
|
||||||
<redisson:client id="standalone"
|
<redisson:client id="standalone"
|
||||||
name="aliasName1,aliasName2"
|
name="aliasName1,aliasName2"
|
||||||
codec-ref="stringCodec">
|
codec-ref="stringCodec">
|
||||||
<redisson:single-server address="redis://127.0.0.1:6379"
|
<redisson:single-server address="redis://127.0.0.1:6379"
|
||||||
connection-pool-size="500"
|
connection-pool-size="500"
|
||||||
idle-connection-timeout="10000"
|
idle-connection-timeout="10000"
|
||||||
connect-timeout="10000"
|
connect-timeout="10000"
|
||||||
timeout="3000"
|
timeout="3000"
|
||||||
ping-timeout="30000"
|
ping-timeout="30000"
|
||||||
reconnection-timeout="30000"
|
reconnection-timeout="30000"
|
||||||
database="0"/>
|
database="0" />
|
||||||
</redisson:client>
|
</redisson:client>
|
||||||
</beans>
|
</beans>
|
||||||
|
|
|
@ -1,60 +1,60 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-starter-parent</artifactId>
|
<artifactId>spring-boot-starter-parent</artifactId>
|
||||||
<version>2.1.9.RELEASE</version>
|
<version>2.1.9.RELEASE</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<groupId>io.github.dunwu</groupId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<artifactId>javadb-sqlite</artifactId>
|
<artifactId>javadb-sqlite</artifactId>
|
||||||
<version>1.0.0</version>
|
<version>1.0.0</version>
|
||||||
<packaging>jar</packaging>
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
<java.version>1.8</java.version>
|
<java.version>1.8</java.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-starter</artifactId>
|
<artifactId>spring-boot-starter</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-starter-test</artifactId>
|
<artifactId>spring-boot-starter-test</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.xerial</groupId>
|
<groupId>org.xerial</groupId>
|
||||||
<artifactId>sqlite-jdbc</artifactId>
|
<artifactId>sqlite-jdbc</artifactId>
|
||||||
<version>3.25.2</version>
|
<version>3.25.2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<finalName>${project.artifactId}</finalName>
|
<finalName>${project.artifactId}</finalName>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<mainClass>io.github.dunwu.db.SqliteApplication</mainClass>
|
<mainClass>io.github.dunwu.db.SqliteApplication</mainClass>
|
||||||
</configuration>
|
</configuration>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>repackage</goal>
|
<goal>repackage</goal>
|
||||||
</goals>
|
</goals>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -5,7 +5,7 @@ import org.springframework.boot.builder.SpringApplicationBuilder;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Zhang Peng
|
* @author Zhang Peng
|
||||||
* @date 2019-03-05
|
* @since 2019-03-05
|
||||||
*/
|
*/
|
||||||
public class SqliteApplication implements CommandLineRunner {
|
public class SqliteApplication implements CommandLineRunner {
|
||||||
|
|
||||||
|
|
|
@ -7,28 +7,19 @@ import java.sql.Statement;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Zhang Peng
|
* @author Zhang Peng
|
||||||
* @date 2019-03-05
|
* @since 2019-03-05
|
||||||
*/
|
*/
|
||||||
public class SqliteDemo {
|
public class SqliteDemo {
|
||||||
|
|
||||||
public static void createTable() {
|
public static void main(String[] args) {
|
||||||
try {
|
SqliteDemo.dropTable();
|
||||||
Class.forName("org.sqlite.JDBC");
|
SqliteDemo.createTable();
|
||||||
Connection connection = DriverManager.getConnection("jdbc:sqlite:test.db");
|
SqliteDemo.insert();
|
||||||
|
SqliteDemo.select();
|
||||||
Statement statement = connection.createStatement();
|
SqliteDemo.delete();
|
||||||
String sql = new StringBuilder().append("CREATE TABLE COMPANY ").append("(ID INT PRIMARY KEY NOT NULL,")
|
SqliteDemo.select();
|
||||||
.append(" NAME TEXT NOT NULL, ").append(" AGE INT NOT NULL, ")
|
SqliteDemo.update();
|
||||||
.append(" ADDRESS CHAR(50), ").append(" SALARY REAL)").toString();
|
SqliteDemo.select();
|
||||||
statement.executeUpdate(sql);
|
|
||||||
statement.close();
|
|
||||||
connection.close();
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
|
||||||
System.exit(0);
|
|
||||||
}
|
|
||||||
System.out.println("Create table successfully.");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void dropTable() {
|
public static void dropTable() {
|
||||||
|
@ -41,14 +32,32 @@ public class SqliteDemo {
|
||||||
statement.executeUpdate(sql);
|
statement.executeUpdate(sql);
|
||||||
statement.close();
|
statement.close();
|
||||||
connection.close();
|
connection.close();
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
||||||
System.exit(0);
|
System.exit(0);
|
||||||
}
|
}
|
||||||
System.out.println("Drop table successfully.");
|
System.out.println("Drop table successfully.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void createTable() {
|
||||||
|
try {
|
||||||
|
Class.forName("org.sqlite.JDBC");
|
||||||
|
Connection connection = DriverManager.getConnection("jdbc:sqlite:test.db");
|
||||||
|
|
||||||
|
Statement statement = connection.createStatement();
|
||||||
|
String sql = new StringBuilder().append("CREATE TABLE COMPANY ").append("(ID INT PRIMARY KEY NOT NULL,")
|
||||||
|
.append(" NAME TEXT NOT NULL, ").append(" AGE INT NOT NULL, ")
|
||||||
|
.append(" ADDRESS CHAR(50), ").append(" SALARY REAL)").toString();
|
||||||
|
statement.executeUpdate(sql);
|
||||||
|
statement.close();
|
||||||
|
connection.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
||||||
|
System.exit(0);
|
||||||
|
}
|
||||||
|
System.out.println("Create table successfully.");
|
||||||
|
}
|
||||||
|
|
||||||
public static void insert() {
|
public static void insert() {
|
||||||
try {
|
try {
|
||||||
Class.forName("org.sqlite.JDBC");
|
Class.forName("org.sqlite.JDBC");
|
||||||
|
@ -57,7 +66,7 @@ public class SqliteDemo {
|
||||||
|
|
||||||
Statement statement = connection.createStatement();
|
Statement statement = connection.createStatement();
|
||||||
String sql = "INSERT INTO COMPANY (ID,NAME,AGE,ADDRESS,SALARY) "
|
String sql = "INSERT INTO COMPANY (ID,NAME,AGE,ADDRESS,SALARY) "
|
||||||
+ "VALUES (1, 'Paul', 32, 'California', 20000.00 );";
|
+ "VALUES (1, 'Paul', 32, 'California', 20000.00 );";
|
||||||
statement.executeUpdate(sql);
|
statement.executeUpdate(sql);
|
||||||
|
|
||||||
sql = "INSERT INTO COMPANY (ID,NAME,AGE,ADDRESS,SALARY) " + "VALUES (2, 'Allen', 25, 'Texas', 15000.00 );";
|
sql = "INSERT INTO COMPANY (ID,NAME,AGE,ADDRESS,SALARY) " + "VALUES (2, 'Allen', 25, 'Texas', 15000.00 );";
|
||||||
|
@ -67,20 +76,46 @@ public class SqliteDemo {
|
||||||
statement.executeUpdate(sql);
|
statement.executeUpdate(sql);
|
||||||
|
|
||||||
sql = "INSERT INTO COMPANY (ID,NAME,AGE,ADDRESS,SALARY) "
|
sql = "INSERT INTO COMPANY (ID,NAME,AGE,ADDRESS,SALARY) "
|
||||||
+ "VALUES (4, 'Mark', 25, 'Rich-Mond ', 65000.00 );";
|
+ "VALUES (4, 'Mark', 25, 'Rich-Mond ', 65000.00 );";
|
||||||
statement.executeUpdate(sql);
|
statement.executeUpdate(sql);
|
||||||
|
|
||||||
statement.close();
|
statement.close();
|
||||||
connection.commit();
|
connection.commit();
|
||||||
connection.close();
|
connection.close();
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
||||||
System.exit(0);
|
System.exit(0);
|
||||||
}
|
}
|
||||||
System.out.println("Insert table successfully.");
|
System.out.println("Insert table successfully.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void select() {
|
||||||
|
try {
|
||||||
|
Class.forName("org.sqlite.JDBC");
|
||||||
|
Connection connection = DriverManager.getConnection("jdbc:sqlite:test.db");
|
||||||
|
connection.setAutoCommit(false);
|
||||||
|
|
||||||
|
Statement statement = connection.createStatement();
|
||||||
|
ResultSet resultSet = statement.executeQuery("SELECT * FROM COMPANY;");
|
||||||
|
while (resultSet.next()) {
|
||||||
|
int id = resultSet.getInt("id");
|
||||||
|
String name = resultSet.getString("name");
|
||||||
|
int age = resultSet.getInt("age");
|
||||||
|
String address = resultSet.getString("address");
|
||||||
|
float salary = resultSet.getFloat("salary");
|
||||||
|
String format = String.format("ID = %s, NAME = %s, AGE = %d, ADDRESS = %s, SALARY = %f", id, name, age,
|
||||||
|
address, salary);
|
||||||
|
System.out.println(format);
|
||||||
|
}
|
||||||
|
resultSet.close();
|
||||||
|
statement.close();
|
||||||
|
connection.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
||||||
|
System.exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static void delete() {
|
public static void delete() {
|
||||||
try {
|
try {
|
||||||
Class.forName("org.sqlite.JDBC");
|
Class.forName("org.sqlite.JDBC");
|
||||||
|
@ -100,8 +135,7 @@ public class SqliteDemo {
|
||||||
|
|
||||||
statement.close();
|
statement.close();
|
||||||
connection.close();
|
connection.close();
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
||||||
System.exit(0);
|
System.exit(0);
|
||||||
}
|
}
|
||||||
|
@ -121,51 +155,11 @@ public class SqliteDemo {
|
||||||
|
|
||||||
statement.close();
|
statement.close();
|
||||||
connection.close();
|
connection.close();
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
||||||
System.exit(0);
|
System.exit(0);
|
||||||
}
|
}
|
||||||
System.out.println("Update table successfully.");
|
System.out.println("Update table successfully.");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void select() {
|
|
||||||
try {
|
|
||||||
Class.forName("org.sqlite.JDBC");
|
|
||||||
Connection connection = DriverManager.getConnection("jdbc:sqlite:test.db");
|
|
||||||
connection.setAutoCommit(false);
|
|
||||||
|
|
||||||
Statement statement = connection.createStatement();
|
|
||||||
ResultSet resultSet = statement.executeQuery("SELECT * FROM COMPANY;");
|
|
||||||
while (resultSet.next()) {
|
|
||||||
int id = resultSet.getInt("id");
|
|
||||||
String name = resultSet.getString("name");
|
|
||||||
int age = resultSet.getInt("age");
|
|
||||||
String address = resultSet.getString("address");
|
|
||||||
float salary = resultSet.getFloat("salary");
|
|
||||||
String format = String.format("ID = %s, NAME = %s, AGE = %d, ADDRESS = %s, SALARY = %f", id, name, age,
|
|
||||||
address, salary);
|
|
||||||
System.out.println(format);
|
|
||||||
}
|
|
||||||
resultSet.close();
|
|
||||||
statement.close();
|
|
||||||
connection.close();
|
|
||||||
}
|
|
||||||
catch (Exception e) {
|
|
||||||
System.err.println(e.getClass().getName() + ": " + e.getMessage());
|
|
||||||
System.exit(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void main(String[] args) {
|
|
||||||
SqliteDemo.dropTable();
|
|
||||||
SqliteDemo.createTable();
|
|
||||||
SqliteDemo.insert();
|
|
||||||
SqliteDemo.select();
|
|
||||||
SqliteDemo.delete();
|
|
||||||
SqliteDemo.select();
|
|
||||||
SqliteDemo.update();
|
|
||||||
SqliteDemo.select();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<configuration>
|
<configuration>
|
||||||
<include resource="org/springframework/boot/logging/logback/base.xml"/>
|
<include resource="org/springframework/boot/logging/logback/base.xml" />
|
||||||
<logger name="io.github.dunwu" level="DEBUG"/>
|
<logger name="io.github.dunwu" level="DEBUG" />
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<groupId>io.github.dunwu</groupId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<artifactId>javadb</artifactId>
|
<artifactId>javadb</artifactId>
|
||||||
<version>1.0.0</version>
|
<version>1.0.0</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
|
|
||||||
<modules>
|
<modules>
|
||||||
<module>javadb-h2</module>
|
<module>javadb-h2</module>
|
||||||
<module>javadb-hbase</module>
|
<module>javadb-hbase</module>
|
||||||
<module>javadb-mysql</module>
|
<module>javadb-mysql</module>
|
||||||
<module>javadb-redis</module>
|
<module>javadb-redis</module>
|
||||||
<module>javadb-sqlite</module>
|
<module>javadb-sqlite</module>
|
||||||
</modules>
|
</modules>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -1,53 +1,54 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
|
||||||
<modelVersion>4.0.0</modelVersion>
|
xmlns="http://maven.apache.org/POM/4.0.0">
|
||||||
<groupId>io.github.dunwu</groupId>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>db-middleware-flyway</artifactId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<version>1.0.0</version>
|
<artifactId>db-middleware-flyway</artifactId>
|
||||||
<packaging>jar</packaging>
|
<version>1.0.0</version>
|
||||||
<name>DB :: Middleware :: Flyway</name>
|
<packaging>jar</packaging>
|
||||||
|
<name>DB :: Middleware :: Flyway</name>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<java.version>1.8</java.version>
|
<java.version>1.8</java.version>
|
||||||
<maven.compiler.source>${java.version}</maven.compiler.source>
|
<maven.compiler.source>${java.version}</maven.compiler.source>
|
||||||
<maven.compiler.target>${java.version}</maven.compiler.target>
|
<maven.compiler.target>${java.version}</maven.compiler.target>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- db begin -->
|
<!-- db begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.flywaydb</groupId>
|
<groupId>org.flywaydb</groupId>
|
||||||
<artifactId>flyway-core</artifactId>
|
<artifactId>flyway-core</artifactId>
|
||||||
<version>5.1.4</version>
|
<version>5.1.4</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.h2database</groupId>
|
<groupId>com.h2database</groupId>
|
||||||
<artifactId>h2</artifactId>
|
<artifactId>h2</artifactId>
|
||||||
<version>1.4.197</version>
|
<version>1.4.197</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- db end -->
|
<!-- db end -->
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.flywaydb</groupId>
|
<groupId>org.flywaydb</groupId>
|
||||||
<artifactId>flyway-maven-plugin</artifactId>
|
<artifactId>flyway-maven-plugin</artifactId>
|
||||||
<version>5.1.4</version>
|
<version>5.1.4</version>
|
||||||
<configuration>
|
<configuration>
|
||||||
<url>jdbc:h2:file:./target/io/github/dunwu/db/middleware</url>
|
<url>jdbc:h2:file:./target/io/github/dunwu/db/middleware</url>
|
||||||
<user>sa</user>
|
<user>sa</user>
|
||||||
</configuration>
|
</configuration>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.h2database</groupId>
|
<groupId>com.h2database</groupId>
|
||||||
<artifactId>h2</artifactId>
|
<artifactId>h2</artifactId>
|
||||||
<version>1.4.197</version>
|
<version>1.4.197</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -124,23 +124,23 @@ VOTE_SCORE = 432
|
||||||
|
|
||||||
|
|
||||||
def article_vote(conn, user, article):
|
def article_vote(conn, user, article):
|
||||||
# 计算文章的投票截止时间。
|
# 计算文章的投票截止时间。
|
||||||
cutoff = time.time() - ONE_WEEK_IN_SECONDS
|
cutoff = time.time() - ONE_WEEK_IN_SECONDS
|
||||||
|
|
||||||
# 检查是否还可以对文章进行投票
|
# 检查是否还可以对文章进行投票
|
||||||
# (虽然使用散列也可以获取文章的发布时间,
|
# (虽然使用散列也可以获取文章的发布时间,
|
||||||
# 但有序集合返回的文章发布时间为浮点数,
|
# 但有序集合返回的文章发布时间为浮点数,
|
||||||
# 可以不进行转换直接使用)。
|
# 可以不进行转换直接使用)。
|
||||||
if conn.zscore('time:', article) < cutoff:
|
if conn.zscore('time:', article) < cutoff:
|
||||||
return
|
return
|
||||||
|
|
||||||
# 从article:id标识符(identifier)里面取出文章的ID。
|
# 从article:id标识符(identifier)里面取出文章的ID。
|
||||||
article_id = article.partition(':')[-1]
|
article_id = article.partition(':')[-1]
|
||||||
|
|
||||||
# 如果用户是第一次为这篇文章投票,那么增加这篇文章的投票数量和评分。
|
# 如果用户是第一次为这篇文章投票,那么增加这篇文章的投票数量和评分。
|
||||||
if conn.sadd('voted:' + article_id, user):
|
if conn.sadd('voted:' + article_id, user):
|
||||||
conn.zincrby('score:', article, VOTE_SCORE)
|
conn.zincrby('score:', article, VOTE_SCORE)
|
||||||
conn.hincrby(article, 'votes', 1)
|
conn.hincrby(article, 'votes', 1)
|
||||||
|
|
||||||
|
|
||||||
# <end id="upvote-code"/>
|
# <end id="upvote-code"/>
|
||||||
|
@ -149,31 +149,31 @@ def article_vote(conn, user, article):
|
||||||
# 代码清单 1-7
|
# 代码清单 1-7
|
||||||
# <start id="post-article-code"/>
|
# <start id="post-article-code"/>
|
||||||
def post_article(conn, user, title, link):
|
def post_article(conn, user, title, link):
|
||||||
# 生成一个新的文章ID。
|
# 生成一个新的文章ID。
|
||||||
article_id = str(conn.incr('article:'))
|
article_id = str(conn.incr('article:'))
|
||||||
|
|
||||||
voted = 'voted:' + article_id
|
voted = 'voted:' + article_id
|
||||||
# 将发布文章的用户添加到文章的已投票用户名单里面,
|
# 将发布文章的用户添加到文章的已投票用户名单里面,
|
||||||
# 然后将这个名单的过期时间设置为一周(第3章将对过期时间作更详细的介绍)。
|
# 然后将这个名单的过期时间设置为一周(第3章将对过期时间作更详细的介绍)。
|
||||||
conn.sadd(voted, user)
|
conn.sadd(voted, user)
|
||||||
conn.expire(voted, ONE_WEEK_IN_SECONDS)
|
conn.expire(voted, ONE_WEEK_IN_SECONDS)
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
article = 'article:' + article_id
|
article = 'article:' + article_id
|
||||||
# 将文章信息存储到一个散列里面。
|
# 将文章信息存储到一个散列里面。
|
||||||
conn.hmset(article, {
|
conn.hmset(article, {
|
||||||
'title': title,
|
'title': title,
|
||||||
'link': link,
|
'link': link,
|
||||||
'poster': user,
|
'poster': user,
|
||||||
'time': now,
|
'time': now,
|
||||||
'votes': 1,
|
'votes': 1,
|
||||||
})
|
})
|
||||||
|
|
||||||
# 将文章添加到根据发布时间排序的有序集合和根据评分排序的有序集合里面。
|
# 将文章添加到根据发布时间排序的有序集合和根据评分排序的有序集合里面。
|
||||||
conn.zadd('score:', article, now + VOTE_SCORE)
|
conn.zadd('score:', article, now + VOTE_SCORE)
|
||||||
conn.zadd('time:', article, now)
|
conn.zadd('time:', article, now)
|
||||||
|
|
||||||
return article_id
|
return article_id
|
||||||
|
|
||||||
|
|
||||||
# <end id="post-article-code"/>
|
# <end id="post-article-code"/>
|
||||||
|
@ -185,20 +185,20 @@ ARTICLES_PER_PAGE = 25
|
||||||
|
|
||||||
|
|
||||||
def get_articles(conn, page, order='score:'):
|
def get_articles(conn, page, order='score:'):
|
||||||
# 设置获取文章的起始索引和结束索引。
|
# 设置获取文章的起始索引和结束索引。
|
||||||
start = (page - 1) * ARTICLES_PER_PAGE
|
start = (page - 1) * ARTICLES_PER_PAGE
|
||||||
end = start + ARTICLES_PER_PAGE - 1
|
end = start + ARTICLES_PER_PAGE - 1
|
||||||
|
|
||||||
# 获取多个文章ID。
|
# 获取多个文章ID。
|
||||||
ids = conn.zrevrange(order, start, end)
|
ids = conn.zrevrange(order, start, end)
|
||||||
articles = []
|
articles = []
|
||||||
# 根据文章ID获取文章的详细信息。
|
# 根据文章ID获取文章的详细信息。
|
||||||
for id in ids:
|
for id in ids:
|
||||||
article_data = conn.hgetall(id)
|
article_data = conn.hgetall(id)
|
||||||
article_data['id'] = id
|
article_data['id'] = id
|
||||||
articles.append(article_data)
|
articles.append(article_data)
|
||||||
|
|
||||||
return articles
|
return articles
|
||||||
|
|
||||||
|
|
||||||
# <end id="fetch-articles-code"/>
|
# <end id="fetch-articles-code"/>
|
||||||
|
@ -207,14 +207,14 @@ def get_articles(conn, page, order='score:'):
|
||||||
# 代码清单 1-9
|
# 代码清单 1-9
|
||||||
# <start id="add-remove-groups"/>
|
# <start id="add-remove-groups"/>
|
||||||
def add_remove_groups(conn, article_id, to_add=[], to_remove=[]):
|
def add_remove_groups(conn, article_id, to_add=[], to_remove=[]):
|
||||||
# 构建存储文章信息的键名。
|
# 构建存储文章信息的键名。
|
||||||
article = 'article:' + article_id
|
article = 'article:' + article_id
|
||||||
for group in to_add:
|
for group in to_add:
|
||||||
# 将文章添加到它所属的群组里面。
|
# 将文章添加到它所属的群组里面。
|
||||||
conn.sadd('group:' + group, article)
|
conn.sadd('group:' + group, article)
|
||||||
for group in to_remove:
|
for group in to_remove:
|
||||||
# 从群组里面移除文章。
|
# 从群组里面移除文章。
|
||||||
conn.srem('group:' + group, article)
|
conn.srem('group:' + group, article)
|
||||||
|
|
||||||
|
|
||||||
# <end id="add-remove-groups"/>
|
# <end id="add-remove-groups"/>
|
||||||
|
@ -223,19 +223,19 @@ def add_remove_groups(conn, article_id, to_add=[], to_remove=[]):
|
||||||
# 代码清单 1-10
|
# 代码清单 1-10
|
||||||
# <start id="fetch-articles-group"/>
|
# <start id="fetch-articles-group"/>
|
||||||
def get_group_articles(conn, group, page, order='score:'):
|
def get_group_articles(conn, group, page, order='score:'):
|
||||||
# 为每个群组的每种排列顺序都创建一个键。
|
# 为每个群组的每种排列顺序都创建一个键。
|
||||||
key = order + group
|
key = order + group
|
||||||
# 检查是否有已缓存的排序结果,如果没有的话就现在进行排序。
|
# 检查是否有已缓存的排序结果,如果没有的话就现在进行排序。
|
||||||
if not conn.exists(key):
|
if not conn.exists(key):
|
||||||
# 根据评分或者发布时间,对群组文章进行排序。
|
# 根据评分或者发布时间,对群组文章进行排序。
|
||||||
conn.zinterstore(key,
|
conn.zinterstore(key,
|
||||||
['group:' + group, order],
|
['group:' + group, order],
|
||||||
aggregate='max',
|
aggregate='max',
|
||||||
)
|
)
|
||||||
# 让Redis在60秒钟之后自动删除这个有序集合。
|
# 让Redis在60秒钟之后自动删除这个有序集合。
|
||||||
conn.expire(key, 60)
|
conn.expire(key, 60)
|
||||||
# 调用之前定义的get_articles()函数来进行分页并获取文章数据。
|
# 调用之前定义的get_articles()函数来进行分页并获取文章数据。
|
||||||
return get_articles(conn, page, key)
|
return get_articles(conn, page, key)
|
||||||
|
|
||||||
|
|
||||||
# <end id="fetch-articles-group"/>
|
# <end id="fetch-articles-group"/>
|
||||||
|
@ -243,58 +243,58 @@ def get_group_articles(conn, group, page, order='score:'):
|
||||||
# --------------- 以下是用于测试代码的辅助函数 --------------------------------
|
# --------------- 以下是用于测试代码的辅助函数 --------------------------------
|
||||||
|
|
||||||
class TestCh01(unittest.TestCase):
|
class TestCh01(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
import redis
|
import redis
|
||||||
self.conn = redis.Redis(db=15)
|
self.conn = redis.Redis(db=15)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
del self.conn
|
del self.conn
|
||||||
print
|
print
|
||||||
print
|
print
|
||||||
|
|
||||||
def test_article_functionality(self):
|
def test_article_functionality(self):
|
||||||
conn = self.conn
|
conn = self.conn
|
||||||
import pprint
|
import pprint
|
||||||
|
|
||||||
article_id = str(post_article(conn, 'username', 'A title', 'http://www.google.com'))
|
article_id = str(post_article(conn, 'username', 'A title', 'http://www.google.com'))
|
||||||
print "We posted a new article with id:", article_id
|
print "We posted a new article with id:", article_id
|
||||||
print
|
print
|
||||||
self.assertTrue(article_id)
|
self.assertTrue(article_id)
|
||||||
|
|
||||||
print "Its HASH looks like:"
|
print "Its HASH looks like:"
|
||||||
r = conn.hgetall('article:' + article_id)
|
r = conn.hgetall('article:' + article_id)
|
||||||
print r
|
print r
|
||||||
print
|
print
|
||||||
self.assertTrue(r)
|
self.assertTrue(r)
|
||||||
|
|
||||||
article_vote(conn, 'other_user', 'article:' + article_id)
|
article_vote(conn, 'other_user', 'article:' + article_id)
|
||||||
print "We voted for the article, it now has votes:",
|
print "We voted for the article, it now has votes:",
|
||||||
v = int(conn.hget('article:' + article_id, 'votes'))
|
v = int(conn.hget('article:' + article_id, 'votes'))
|
||||||
print v
|
print v
|
||||||
print
|
print
|
||||||
self.assertTrue(v > 1)
|
self.assertTrue(v > 1)
|
||||||
|
|
||||||
print "The currently highest-scoring articles are:"
|
print "The currently highest-scoring articles are:"
|
||||||
articles = get_articles(conn, 1)
|
articles = get_articles(conn, 1)
|
||||||
pprint.pprint(articles)
|
pprint.pprint(articles)
|
||||||
print
|
print
|
||||||
|
|
||||||
self.assertTrue(len(articles) >= 1)
|
self.assertTrue(len(articles) >= 1)
|
||||||
|
|
||||||
add_remove_groups(conn, article_id, ['new-group'])
|
add_remove_groups(conn, article_id, ['new-group'])
|
||||||
print "We added the article to a new group, other articles include:"
|
print "We added the article to a new group, other articles include:"
|
||||||
articles = get_group_articles(conn, 'new-group', 1)
|
articles = get_group_articles(conn, 'new-group', 1)
|
||||||
pprint.pprint(articles)
|
pprint.pprint(articles)
|
||||||
print
|
print
|
||||||
self.assertTrue(len(articles) >= 1)
|
self.assertTrue(len(articles) >= 1)
|
||||||
|
|
||||||
to_del = (
|
to_del = (
|
||||||
conn.keys('time:*') + conn.keys('voted:*') + conn.keys('score:*') +
|
conn.keys('time:*') + conn.keys('voted:*') + conn.keys('score:*') +
|
||||||
conn.keys('article:*') + conn.keys('group:*')
|
conn.keys('article:*') + conn.keys('group:*')
|
||||||
)
|
)
|
||||||
if to_del:
|
if to_del:
|
||||||
conn.delete(*to_del)
|
conn.delete(*to_del)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
|
@ -13,7 +13,7 @@ QUIT = False
|
||||||
# 代码清单 2-1
|
# 代码清单 2-1
|
||||||
# <start id="_1311_14471_8266"/>
|
# <start id="_1311_14471_8266"/>
|
||||||
def check_token(conn, token):
|
def check_token(conn, token):
|
||||||
return conn.hget('login:', token) # 尝试获取并返回令牌对应的用户。
|
return conn.hget('login:', token) # 尝试获取并返回令牌对应的用户。
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1311_14471_8266"/>
|
# <end id="_1311_14471_8266"/>
|
||||||
|
@ -22,17 +22,17 @@ def check_token(conn, token):
|
||||||
# 代码清单 2-2
|
# 代码清单 2-2
|
||||||
# <start id="_1311_14471_8265"/>
|
# <start id="_1311_14471_8265"/>
|
||||||
def update_token(conn, token, user, item=None):
|
def update_token(conn, token, user, item=None):
|
||||||
# 获取当前时间戳。
|
# 获取当前时间戳。
|
||||||
timestamp = time.time()
|
timestamp = time.time()
|
||||||
# 维持令牌与已登录用户之间的映射。
|
# 维持令牌与已登录用户之间的映射。
|
||||||
conn.hset('login:', token, user)
|
conn.hset('login:', token, user)
|
||||||
# 记录令牌最后一次出现的时间。
|
# 记录令牌最后一次出现的时间。
|
||||||
conn.zadd('recent:', token, timestamp)
|
conn.zadd('recent:', token, timestamp)
|
||||||
if item:
|
if item:
|
||||||
# 记录用户浏览过的商品。
|
# 记录用户浏览过的商品。
|
||||||
conn.zadd('viewed:' + token, item, timestamp)
|
conn.zadd('viewed:' + token, item, timestamp)
|
||||||
# 移除旧的记录,只保留用户最近浏览过的25个商品。
|
# 移除旧的记录,只保留用户最近浏览过的25个商品。
|
||||||
conn.zremrangebyrank('viewed:' + token, 0, -26)
|
conn.zremrangebyrank('viewed:' + token, 0, -26)
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1311_14471_8265"/>
|
# <end id="_1311_14471_8265"/>
|
||||||
|
@ -45,27 +45,27 @@ LIMIT = 10000000
|
||||||
|
|
||||||
|
|
||||||
def clean_sessions(conn):
|
def clean_sessions(conn):
|
||||||
while not QUIT:
|
while not QUIT:
|
||||||
# 找出目前已有令牌的数量。
|
# 找出目前已有令牌的数量。
|
||||||
size = conn.zcard('recent:')
|
size = conn.zcard('recent:')
|
||||||
# 令牌数量未超过限制,休眠并在之后重新检查。
|
# 令牌数量未超过限制,休眠并在之后重新检查。
|
||||||
if size <= LIMIT:
|
if size <= LIMIT:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 获取需要移除的令牌ID。
|
# 获取需要移除的令牌ID。
|
||||||
end_index = min(size - LIMIT, 100)
|
end_index = min(size - LIMIT, 100)
|
||||||
tokens = conn.zrange('recent:', 0, end_index - 1)
|
tokens = conn.zrange('recent:', 0, end_index - 1)
|
||||||
|
|
||||||
# 为那些将要被删除的令牌构建键名。
|
# 为那些将要被删除的令牌构建键名。
|
||||||
session_keys = []
|
session_keys = []
|
||||||
for token in tokens:
|
for token in tokens:
|
||||||
session_keys.append('viewed:' + token)
|
session_keys.append('viewed:' + token)
|
||||||
|
|
||||||
# 移除最旧的那些令牌。
|
# 移除最旧的那些令牌。
|
||||||
conn.delete(*session_keys)
|
conn.delete(*session_keys)
|
||||||
conn.hdel('login:', *tokens)
|
conn.hdel('login:', *tokens)
|
||||||
conn.zrem('recent:', *tokens)
|
conn.zrem('recent:', *tokens)
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1311_14471_8270"/>
|
# <end id="_1311_14471_8270"/>
|
||||||
|
@ -74,35 +74,35 @@ def clean_sessions(conn):
|
||||||
# 代码清单 2-4
|
# 代码清单 2-4
|
||||||
# <start id="_1311_14471_8279"/>
|
# <start id="_1311_14471_8279"/>
|
||||||
def add_to_cart(conn, session, item, count):
|
def add_to_cart(conn, session, item, count):
|
||||||
if count <= 0:
|
if count <= 0:
|
||||||
# 从购物车里面移除指定的商品。
|
# 从购物车里面移除指定的商品。
|
||||||
conn.hrem('cart:' + session, item)
|
conn.hrem('cart:' + session, item)
|
||||||
else:
|
else:
|
||||||
# 将指定的商品添加到购物车。
|
# 将指定的商品添加到购物车。
|
||||||
conn.hset('cart:' + session, item, count)
|
conn.hset('cart:' + session, item, count)
|
||||||
# <end id="_1311_14471_8279"/>
|
# <end id="_1311_14471_8279"/>
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 2-5
|
# 代码清单 2-5
|
||||||
# <start id="_1311_14471_8271"/>
|
# <start id="_1311_14471_8271"/>
|
||||||
def clean_full_sessions(conn):
|
def clean_full_sessions(conn):
|
||||||
while not QUIT:
|
while not QUIT:
|
||||||
size = conn.zcard('recent:')
|
size = conn.zcard('recent:')
|
||||||
if size <= LIMIT:
|
if size <= LIMIT:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
end_index = min(size - LIMIT, 100)
|
end_index = min(size - LIMIT, 100)
|
||||||
sessions = conn.zrange('recent:', 0, end_index - 1)
|
sessions = conn.zrange('recent:', 0, end_index - 1)
|
||||||
|
|
||||||
session_keys = []
|
session_keys = []
|
||||||
for sess in sessions:
|
for sess in sessions:
|
||||||
session_keys.append('viewed:' + sess)
|
session_keys.append('viewed:' + sess)
|
||||||
session_keys.append('cart:' + sess) # 新增加的这行代码用于删除旧会话对应用户的购物车。
|
session_keys.append('cart:' + sess) # 新增加的这行代码用于删除旧会话对应用户的购物车。
|
||||||
|
|
||||||
conn.delete(*session_keys)
|
conn.delete(*session_keys)
|
||||||
conn.hdel('login:', *sessions)
|
conn.hdel('login:', *sessions)
|
||||||
conn.zrem('recent:', *sessions)
|
conn.zrem('recent:', *sessions)
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1311_14471_8271"/>
|
# <end id="_1311_14471_8271"/>
|
||||||
|
@ -111,23 +111,23 @@ def clean_full_sessions(conn):
|
||||||
# 代码清单 2-6
|
# 代码清单 2-6
|
||||||
# <start id="_1311_14471_8291"/>
|
# <start id="_1311_14471_8291"/>
|
||||||
def cache_request(conn, request, callback):
|
def cache_request(conn, request, callback):
|
||||||
# 对于不能被缓存的请求,直接调用回调函数。
|
# 对于不能被缓存的请求,直接调用回调函数。
|
||||||
if not can_cache(conn, request):
|
if not can_cache(conn, request):
|
||||||
return callback(request)
|
return callback(request)
|
||||||
|
|
||||||
# 将请求转换成一个简单的字符串键,方便之后进行查找。
|
# 将请求转换成一个简单的字符串键,方便之后进行查找。
|
||||||
page_key = 'cache:' + hash_request(request)
|
page_key = 'cache:' + hash_request(request)
|
||||||
# 尝试查找被缓存的页面。
|
# 尝试查找被缓存的页面。
|
||||||
content = conn.get(page_key)
|
content = conn.get(page_key)
|
||||||
|
|
||||||
if not content:
|
if not content:
|
||||||
# 如果页面还没有被缓存,那么生成页面。
|
# 如果页面还没有被缓存,那么生成页面。
|
||||||
content = callback(request)
|
content = callback(request)
|
||||||
# 将新生成的页面放到缓存里面。
|
# 将新生成的页面放到缓存里面。
|
||||||
conn.setex(page_key, content, 300)
|
conn.setex(page_key, content, 300)
|
||||||
|
|
||||||
# 返回页面。
|
# 返回页面。
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1311_14471_8291"/>
|
# <end id="_1311_14471_8291"/>
|
||||||
|
@ -136,10 +136,10 @@ def cache_request(conn, request, callback):
|
||||||
# 代码清单 2-7
|
# 代码清单 2-7
|
||||||
# <start id="_1311_14471_8287"/>
|
# <start id="_1311_14471_8287"/>
|
||||||
def schedule_row_cache(conn, row_id, delay):
|
def schedule_row_cache(conn, row_id, delay):
|
||||||
# 先设置数据行的延迟值。
|
# 先设置数据行的延迟值。
|
||||||
conn.zadd('delay:', row_id, delay)
|
conn.zadd('delay:', row_id, delay)
|
||||||
# 立即缓存数据行。
|
# 立即缓存数据行。
|
||||||
conn.zadd('schedule:', row_id, time.time())
|
conn.zadd('schedule:', row_id, time.time())
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1311_14471_8287"/>
|
# <end id="_1311_14471_8287"/>
|
||||||
|
@ -148,44 +148,44 @@ def schedule_row_cache(conn, row_id, delay):
|
||||||
# 代码清单 2-8
|
# 代码清单 2-8
|
||||||
# <start id="_1311_14471_8292"/>
|
# <start id="_1311_14471_8292"/>
|
||||||
def cache_rows(conn):
|
def cache_rows(conn):
|
||||||
while not QUIT:
|
while not QUIT:
|
||||||
# 尝试获取下一个需要被缓存的数据行以及该行的调度时间戳,
|
# 尝试获取下一个需要被缓存的数据行以及该行的调度时间戳,
|
||||||
# 命令会返回一个包含零个或一个元组(tuple)的列表。
|
# 命令会返回一个包含零个或一个元组(tuple)的列表。
|
||||||
next = conn.zrange('schedule:', 0, 0, withscores=True)
|
next = conn.zrange('schedule:', 0, 0, withscores=True)
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if not next or next[0][1] > now:
|
if not next or next[0][1] > now:
|
||||||
# 暂时没有行需要被缓存,休眠50毫秒后重试。
|
# 暂时没有行需要被缓存,休眠50毫秒后重试。
|
||||||
time.sleep(.05)
|
time.sleep(.05)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
row_id = next[0][0]
|
row_id = next[0][0]
|
||||||
# 获取下一次调度前的延迟时间。
|
# 获取下一次调度前的延迟时间。
|
||||||
delay = conn.zscore('delay:', row_id)
|
delay = conn.zscore('delay:', row_id)
|
||||||
if delay <= 0:
|
if delay <= 0:
|
||||||
# 不必再缓存这个行,将它从缓存中移除。
|
# 不必再缓存这个行,将它从缓存中移除。
|
||||||
conn.zrem('delay:', row_id)
|
conn.zrem('delay:', row_id)
|
||||||
conn.zrem('schedule:', row_id)
|
conn.zrem('schedule:', row_id)
|
||||||
conn.delete('inv:' + row_id)
|
conn.delete('inv:' + row_id)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 读取数据行。
|
# 读取数据行。
|
||||||
row = Inventory.get(row_id)
|
row = Inventory.get(row_id)
|
||||||
# 更新调度时间并设置缓存值。
|
# 更新调度时间并设置缓存值。
|
||||||
conn.zadd('schedule:', row_id, now + delay)
|
conn.zadd('schedule:', row_id, now + delay)
|
||||||
conn.set('inv:' + row_id, json.dumps(row.to_dict()))
|
conn.set('inv:' + row_id, json.dumps(row.to_dict()))
|
||||||
# <end id="_1311_14471_8292"/>
|
# <end id="_1311_14471_8292"/>
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 2-9
|
# 代码清单 2-9
|
||||||
# <start id="_1311_14471_8298"/>
|
# <start id="_1311_14471_8298"/>
|
||||||
def update_token(conn, token, user, item=None):
|
def update_token(conn, token, user, item=None):
|
||||||
timestamp = time.time()
|
timestamp = time.time()
|
||||||
conn.hset('login:', token, user)
|
conn.hset('login:', token, user)
|
||||||
conn.zadd('recent:', token, timestamp)
|
conn.zadd('recent:', token, timestamp)
|
||||||
if item:
|
if item:
|
||||||
conn.zadd('viewed:' + token, item, timestamp)
|
conn.zadd('viewed:' + token, item, timestamp)
|
||||||
conn.zremrangebyrank('viewed:' + token, 0, -26)
|
conn.zremrangebyrank('viewed:' + token, 0, -26)
|
||||||
conn.zincrby('viewed:', item, -1) # 这行代码是新添加的。
|
conn.zincrby('viewed:', item, -1) # 这行代码是新添加的。
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1311_14471_8298"/>
|
# <end id="_1311_14471_8298"/>
|
||||||
|
@ -194,28 +194,28 @@ def update_token(conn, token, user, item=None):
|
||||||
# 代码清单 2-10
|
# 代码清单 2-10
|
||||||
# <start id="_1311_14471_8288"/>
|
# <start id="_1311_14471_8288"/>
|
||||||
def rescale_viewed(conn):
|
def rescale_viewed(conn):
|
||||||
while not QUIT:
|
while not QUIT:
|
||||||
# 删除所有排名在20 000名之后的商品。
|
# 删除所有排名在20 000名之后的商品。
|
||||||
conn.zremrangebyrank('viewed:', 20000, -1)
|
conn.zremrangebyrank('viewed:', 20000, -1)
|
||||||
# 将浏览次数降低为原来的一半
|
# 将浏览次数降低为原来的一半
|
||||||
conn.zinterstore('viewed:', {'viewed:': .5})
|
conn.zinterstore('viewed:', {'viewed:': .5})
|
||||||
# 5分钟之后再执行这个操作。
|
# 5分钟之后再执行这个操作。
|
||||||
time.sleep(300)
|
time.sleep(300)
|
||||||
# <end id="_1311_14471_8288"/>
|
# <end id="_1311_14471_8288"/>
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 2-11
|
# 代码清单 2-11
|
||||||
# <start id="_1311_14471_8289"/>
|
# <start id="_1311_14471_8289"/>
|
||||||
def can_cache(conn, request):
|
def can_cache(conn, request):
|
||||||
# 尝试从页面里面取出商品ID。
|
# 尝试从页面里面取出商品ID。
|
||||||
item_id = extract_item_id(request)
|
item_id = extract_item_id(request)
|
||||||
# 检查这个页面能否被缓存以及这个页面是否为商品页面。
|
# 检查这个页面能否被缓存以及这个页面是否为商品页面。
|
||||||
if not item_id or is_dynamic(request):
|
if not item_id or is_dynamic(request):
|
||||||
return False
|
return False
|
||||||
# 取得商品的浏览次数排名。
|
# 取得商品的浏览次数排名。
|
||||||
rank = conn.zrank('viewed:', item_id)
|
rank = conn.zrank('viewed:', item_id)
|
||||||
# 根据商品的浏览次数排名来判断是否需要缓存这个页面。
|
# 根据商品的浏览次数排名来判断是否需要缓存这个页面。
|
||||||
return rank is not None and rank < 10000
|
return rank is not None and rank < 10000
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1311_14471_8289"/>
|
# <end id="_1311_14471_8289"/>
|
||||||
|
@ -224,190 +224,190 @@ def can_cache(conn, request):
|
||||||
# --------------- 以下是用于测试代码的辅助函数 --------------------------------
|
# --------------- 以下是用于测试代码的辅助函数 --------------------------------
|
||||||
|
|
||||||
def extract_item_id(request):
|
def extract_item_id(request):
|
||||||
parsed = urlparse.urlparse(request)
|
parsed = urlparse.urlparse(request)
|
||||||
query = urlparse.parse_qs(parsed.query)
|
query = urlparse.parse_qs(parsed.query)
|
||||||
return (query.get('item') or [None])[0]
|
return (query.get('item') or [None])[0]
|
||||||
|
|
||||||
|
|
||||||
def is_dynamic(request):
|
def is_dynamic(request):
|
||||||
parsed = urlparse.urlparse(request)
|
parsed = urlparse.urlparse(request)
|
||||||
query = urlparse.parse_qs(parsed.query)
|
query = urlparse.parse_qs(parsed.query)
|
||||||
return '_' in query
|
return '_' in query
|
||||||
|
|
||||||
|
|
||||||
def hash_request(request):
|
def hash_request(request):
|
||||||
return str(hash(request))
|
return str(hash(request))
|
||||||
|
|
||||||
|
|
||||||
class Inventory(object):
|
class Inventory(object):
|
||||||
def __init__(self, id):
|
def __init__(self, id):
|
||||||
self.id = id
|
self.id = id
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get(cls, id):
|
def get(cls, id):
|
||||||
return Inventory(id)
|
return Inventory(id)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {'id': self.id, 'data': 'data to cache...', 'cached': time.time()}
|
return {'id': self.id, 'data': 'data to cache...', 'cached': time.time()}
|
||||||
|
|
||||||
|
|
||||||
class TestCh02(unittest.TestCase):
|
class TestCh02(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
import redis
|
import redis
|
||||||
self.conn = redis.Redis(db=15)
|
self.conn = redis.Redis(db=15)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
conn = self.conn
|
conn = self.conn
|
||||||
to_del = (
|
to_del = (
|
||||||
conn.keys('login:*') + conn.keys('recent:*') + conn.keys('viewed:*') +
|
conn.keys('login:*') + conn.keys('recent:*') + conn.keys('viewed:*') +
|
||||||
conn.keys('cart:*') + conn.keys('cache:*') + conn.keys('delay:*') +
|
conn.keys('cart:*') + conn.keys('cache:*') + conn.keys('delay:*') +
|
||||||
conn.keys('schedule:*') + conn.keys('inv:*'))
|
conn.keys('schedule:*') + conn.keys('inv:*'))
|
||||||
if to_del:
|
if to_del:
|
||||||
self.conn.delete(*to_del)
|
self.conn.delete(*to_del)
|
||||||
del self.conn
|
del self.conn
|
||||||
global QUIT, LIMIT
|
global QUIT, LIMIT
|
||||||
QUIT = False
|
QUIT = False
|
||||||
LIMIT = 10000000
|
LIMIT = 10000000
|
||||||
print
|
print
|
||||||
print
|
print
|
||||||
|
|
||||||
def test_login_cookies(self):
|
def test_login_cookies(self):
|
||||||
conn = self.conn
|
conn = self.conn
|
||||||
global LIMIT, QUIT
|
global LIMIT, QUIT
|
||||||
token = str(uuid.uuid4())
|
token = str(uuid.uuid4())
|
||||||
|
|
||||||
update_token(conn, token, 'username', 'itemX')
|
update_token(conn, token, 'username', 'itemX')
|
||||||
print "We just logged-in/updated token:", token
|
print "We just logged-in/updated token:", token
|
||||||
print "For user:", 'username'
|
print "For user:", 'username'
|
||||||
print
|
print
|
||||||
|
|
||||||
print "What username do we get when we look-up that token?"
|
print "What username do we get when we look-up that token?"
|
||||||
r = check_token(conn, token)
|
r = check_token(conn, token)
|
||||||
print r
|
print r
|
||||||
print
|
print
|
||||||
self.assertTrue(r)
|
self.assertTrue(r)
|
||||||
|
|
||||||
print "Let's drop the maximum number of cookies to 0 to clean them out"
|
print "Let's drop the maximum number of cookies to 0 to clean them out"
|
||||||
print "We will start a thread to do the cleaning, while we stop it later"
|
print "We will start a thread to do the cleaning, while we stop it later"
|
||||||
|
|
||||||
LIMIT = 0
|
LIMIT = 0
|
||||||
t = threading.Thread(target=clean_sessions, args=(conn,))
|
t = threading.Thread(target=clean_sessions, args=(conn,))
|
||||||
t.setDaemon(1) # to make sure it dies if we ctrl+C quit
|
t.setDaemon(1) # to make sure it dies if we ctrl+C quit
|
||||||
t.start()
|
t.start()
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
QUIT = True
|
QUIT = True
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
if t.isAlive():
|
if t.isAlive():
|
||||||
raise Exception("The clean sessions thread is still alive?!?")
|
raise Exception("The clean sessions thread is still alive?!?")
|
||||||
|
|
||||||
s = conn.hlen('login:')
|
s = conn.hlen('login:')
|
||||||
print "The current number of sessions still available is:", s
|
print "The current number of sessions still available is:", s
|
||||||
self.assertFalse(s)
|
self.assertFalse(s)
|
||||||
|
|
||||||
def test_shoppping_cart_cookies(self):
|
def test_shoppping_cart_cookies(self):
|
||||||
conn = self.conn
|
conn = self.conn
|
||||||
global LIMIT, QUIT
|
global LIMIT, QUIT
|
||||||
token = str(uuid.uuid4())
|
token = str(uuid.uuid4())
|
||||||
|
|
||||||
print "We'll refresh our session..."
|
print "We'll refresh our session..."
|
||||||
update_token(conn, token, 'username', 'itemX')
|
update_token(conn, token, 'username', 'itemX')
|
||||||
print "And add an item to the shopping cart"
|
print "And add an item to the shopping cart"
|
||||||
add_to_cart(conn, token, "itemY", 3)
|
add_to_cart(conn, token, "itemY", 3)
|
||||||
r = conn.hgetall('cart:' + token)
|
r = conn.hgetall('cart:' + token)
|
||||||
print "Our shopping cart currently has:", r
|
print "Our shopping cart currently has:", r
|
||||||
print
|
print
|
||||||
|
|
||||||
self.assertTrue(len(r) >= 1)
|
self.assertTrue(len(r) >= 1)
|
||||||
|
|
||||||
print "Let's clean out our sessions and carts"
|
print "Let's clean out our sessions and carts"
|
||||||
LIMIT = 0
|
LIMIT = 0
|
||||||
t = threading.Thread(target=clean_full_sessions, args=(conn,))
|
t = threading.Thread(target=clean_full_sessions, args=(conn,))
|
||||||
t.setDaemon(1) # to make sure it dies if we ctrl+C quit
|
t.setDaemon(1) # to make sure it dies if we ctrl+C quit
|
||||||
t.start()
|
t.start()
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
QUIT = True
|
QUIT = True
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
if t.isAlive():
|
if t.isAlive():
|
||||||
raise Exception("The clean sessions thread is still alive?!?")
|
raise Exception("The clean sessions thread is still alive?!?")
|
||||||
|
|
||||||
r = conn.hgetall('cart:' + token)
|
r = conn.hgetall('cart:' + token)
|
||||||
print "Our shopping cart now contains:", r
|
print "Our shopping cart now contains:", r
|
||||||
|
|
||||||
self.assertFalse(r)
|
self.assertFalse(r)
|
||||||
|
|
||||||
def test_cache_request(self):
|
def test_cache_request(self):
|
||||||
conn = self.conn
|
conn = self.conn
|
||||||
token = str(uuid.uuid4())
|
token = str(uuid.uuid4())
|
||||||
|
|
||||||
def callback(request):
|
def callback(request):
|
||||||
return "content for " + request
|
return "content for " + request
|
||||||
|
|
||||||
update_token(conn, token, 'username', 'itemX')
|
update_token(conn, token, 'username', 'itemX')
|
||||||
url = 'http://test.com/?item=itemX'
|
url = 'http://test.com/?item=itemX'
|
||||||
print "We are going to cache a simple request against", url
|
print "We are going to cache a simple request against", url
|
||||||
result = cache_request(conn, url, callback)
|
result = cache_request(conn, url, callback)
|
||||||
print "We got initial content:", repr(result)
|
print "We got initial content:", repr(result)
|
||||||
print
|
print
|
||||||
|
|
||||||
self.assertTrue(result)
|
self.assertTrue(result)
|
||||||
|
|
||||||
print "To test that we've cached the request, we'll pass a bad callback"
|
print "To test that we've cached the request, we'll pass a bad callback"
|
||||||
result2 = cache_request(conn, url, None)
|
result2 = cache_request(conn, url, None)
|
||||||
print "We ended up getting the same response!", repr(result2)
|
print "We ended up getting the same response!", repr(result2)
|
||||||
|
|
||||||
self.assertEquals(result, result2)
|
self.assertEquals(result, result2)
|
||||||
|
|
||||||
self.assertFalse(can_cache(conn, 'http://test.com/'))
|
self.assertFalse(can_cache(conn, 'http://test.com/'))
|
||||||
self.assertFalse(can_cache(conn, 'http://test.com/?item=itemX&_=1234536'))
|
self.assertFalse(can_cache(conn, 'http://test.com/?item=itemX&_=1234536'))
|
||||||
|
|
||||||
def test_cache_rows(self):
|
def test_cache_rows(self):
|
||||||
import pprint
|
import pprint
|
||||||
conn = self.conn
|
conn = self.conn
|
||||||
global QUIT
|
global QUIT
|
||||||
|
|
||||||
print "First, let's schedule caching of itemX every 5 seconds"
|
print "First, let's schedule caching of itemX every 5 seconds"
|
||||||
schedule_row_cache(conn, 'itemX', 5)
|
schedule_row_cache(conn, 'itemX', 5)
|
||||||
print "Our schedule looks like:"
|
print "Our schedule looks like:"
|
||||||
s = conn.zrange('schedule:', 0, -1, withscores=True)
|
s = conn.zrange('schedule:', 0, -1, withscores=True)
|
||||||
pprint.pprint(s)
|
pprint.pprint(s)
|
||||||
self.assertTrue(s)
|
self.assertTrue(s)
|
||||||
|
|
||||||
print "We'll start a caching thread that will cache the data..."
|
print "We'll start a caching thread that will cache the data..."
|
||||||
t = threading.Thread(target=cache_rows, args=(conn,))
|
t = threading.Thread(target=cache_rows, args=(conn,))
|
||||||
t.setDaemon(1)
|
t.setDaemon(1)
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
print "Our cached data looks like:"
|
print "Our cached data looks like:"
|
||||||
r = conn.get('inv:itemX')
|
r = conn.get('inv:itemX')
|
||||||
print repr(r)
|
print repr(r)
|
||||||
self.assertTrue(r)
|
self.assertTrue(r)
|
||||||
print
|
print
|
||||||
print "We'll check again in 5 seconds..."
|
print "We'll check again in 5 seconds..."
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
print "Notice that the data has changed..."
|
print "Notice that the data has changed..."
|
||||||
r2 = conn.get('inv:itemX')
|
r2 = conn.get('inv:itemX')
|
||||||
print repr(r2)
|
print repr(r2)
|
||||||
print
|
print
|
||||||
self.assertTrue(r2)
|
self.assertTrue(r2)
|
||||||
self.assertTrue(r != r2)
|
self.assertTrue(r != r2)
|
||||||
|
|
||||||
print "Let's force un-caching"
|
print "Let's force un-caching"
|
||||||
schedule_row_cache(conn, 'itemX', -1)
|
schedule_row_cache(conn, 'itemX', -1)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
r = conn.get('inv:itemX')
|
r = conn.get('inv:itemX')
|
||||||
print "The cache was cleared?", not r
|
print "The cache was cleared?", not r
|
||||||
print
|
print
|
||||||
self.assertFalse(r)
|
self.assertFalse(r)
|
||||||
|
|
||||||
QUIT = True
|
QUIT = True
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
if t.isAlive():
|
if t.isAlive():
|
||||||
raise Exception("The database caching thread is still alive?!?")
|
raise Exception("The database caching thread is still alive?!?")
|
||||||
|
|
||||||
# We aren't going to bother with the top 10k requests are cached, as
|
# We aren't going to bother with the top 10k requests are cached, as
|
||||||
# we already tested it as part of the cached requests test.
|
# we already tested it as part of the cached requests test.
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
|
@ -114,18 +114,18 @@ True #
|
||||||
|
|
||||||
# <start id="exercise-update-token"/>
|
# <start id="exercise-update-token"/>
|
||||||
def update_token(conn, token, user, item=None):
|
def update_token(conn, token, user, item=None):
|
||||||
timestamp = time.time()
|
timestamp = time.time()
|
||||||
conn.hset('login:', token, user)
|
conn.hset('login:', token, user)
|
||||||
conn.zadd('recent:', token, timestamp)
|
conn.zadd('recent:', token, timestamp)
|
||||||
if item:
|
if item:
|
||||||
key = 'viewed:' + token
|
key = 'viewed:' + token
|
||||||
# 如果指定的元素存在于列表当中,那么移除它
|
# 如果指定的元素存在于列表当中,那么移除它
|
||||||
conn.lrem(key, item)
|
conn.lrem(key, item)
|
||||||
# 将元素推入到列表的右端,使得 ZRANGE 和 LRANGE 可以取得相同的结果
|
# 将元素推入到列表的右端,使得 ZRANGE 和 LRANGE 可以取得相同的结果
|
||||||
conn.rpush(key, item)
|
conn.rpush(key, item)
|
||||||
# 对列表进行修剪,让它最多只能保存 25 个元素
|
# 对列表进行修剪,让它最多只能保存 25 个元素
|
||||||
conn.ltrim(key, -25, -1)
|
conn.ltrim(key, -25, -1)
|
||||||
conn.zincrby('viewed:', item, -1)
|
conn.zincrby('viewed:', item, -1)
|
||||||
|
|
||||||
|
|
||||||
# <end id="exercise-update-token"/>
|
# <end id="exercise-update-token"/>
|
||||||
|
@ -247,24 +247,24 @@ True #
|
||||||
|
|
||||||
|
|
||||||
def publisher(n):
|
def publisher(n):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
for i in xrange(n):
|
for i in xrange(n):
|
||||||
conn.publish('channel', i)
|
conn.publish('channel', i)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
|
|
||||||
def run_pubsub():
|
def run_pubsub():
|
||||||
threading.Thread(target=publisher, args=(3,)).start()
|
threading.Thread(target=publisher, args=(3,)).start()
|
||||||
pubsub = conn.pubsub()
|
pubsub = conn.pubsub()
|
||||||
pubsub.subscribe(['channel'])
|
pubsub.subscribe(['channel'])
|
||||||
count = 0
|
count = 0
|
||||||
for item in pubsub.listen():
|
for item in pubsub.listen():
|
||||||
print item
|
print item
|
||||||
count += 1
|
count += 1
|
||||||
if count == 4:
|
if count == 4:
|
||||||
pubsub.unsubscribe()
|
pubsub.unsubscribe()
|
||||||
if count == 5:
|
if count == 5:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 3-11
|
# 代码清单 3-11
|
||||||
|
@ -380,23 +380,23 @@ def run_pubsub():
|
||||||
|
|
||||||
# <start id="exercise-fix-article-vote"/>
|
# <start id="exercise-fix-article-vote"/>
|
||||||
def article_vote(conn, user, article):
|
def article_vote(conn, user, article):
|
||||||
# 在进行投票之前,先检查这篇文章是否仍然处于可投票的时间之内
|
# 在进行投票之前,先检查这篇文章是否仍然处于可投票的时间之内
|
||||||
cutoff = time.time() - ONE_WEEK_IN_SECONDS
|
cutoff = time.time() - ONE_WEEK_IN_SECONDS
|
||||||
posted = conn.zscore('time:', article)
|
posted = conn.zscore('time:', article)
|
||||||
if posted < cutoff:
|
if posted < cutoff:
|
||||||
return
|
return
|
||||||
|
|
||||||
article_id = article.partition(':')[-1]
|
article_id = article.partition(':')[-1]
|
||||||
pipeline = conn.pipeline()
|
pipeline = conn.pipeline()
|
||||||
pipeline.sadd('voted:' + article_id, user)
|
pipeline.sadd('voted:' + article_id, user)
|
||||||
# 为文章的投票设置过期时间
|
# 为文章的投票设置过期时间
|
||||||
pipeline.expire('voted:' + article_id, int(posted - cutoff))
|
pipeline.expire('voted:' + article_id, int(posted - cutoff))
|
||||||
if pipeline.execute()[0]:
|
if pipeline.execute()[0]:
|
||||||
# 因为客户端可能会在执行 SADD/EXPIRE 之间或者执行 ZINCRBY/HINCRBY 之间掉线
|
# 因为客户端可能会在执行 SADD/EXPIRE 之间或者执行 ZINCRBY/HINCRBY 之间掉线
|
||||||
# 所以投票可能会不被计数,但这总比在执行 ZINCRBY/HINCRBY 之间失败并导致不完整的计数要好
|
# 所以投票可能会不被计数,但这总比在执行 ZINCRBY/HINCRBY 之间失败并导致不完整的计数要好
|
||||||
pipeline.zincrby('score:', article, VOTE_SCORE)
|
pipeline.zincrby('score:', article, VOTE_SCORE)
|
||||||
pipeline.hincrby(article, 'votes', 1)
|
pipeline.hincrby(article, 'votes', 1)
|
||||||
pipeline.execute()
|
pipeline.execute()
|
||||||
|
|
||||||
|
|
||||||
# <end id="exercise-fix-article-vote"/>
|
# <end id="exercise-fix-article-vote"/>
|
||||||
|
@ -406,48 +406,48 @@ def article_vote(conn, user, article):
|
||||||
# 这段代码里面用到了本书第 4 章才会介绍的技术
|
# 这段代码里面用到了本书第 4 章才会介绍的技术
|
||||||
|
|
||||||
def article_vote(conn, user, article):
|
def article_vote(conn, user, article):
|
||||||
cutoff = time.time() - ONE_WEEK_IN_SECONDS
|
cutoff = time.time() - ONE_WEEK_IN_SECONDS
|
||||||
posted = conn.zscore('time:', article)
|
posted = conn.zscore('time:', article)
|
||||||
article_id = article.partition(':')[-1]
|
article_id = article.partition(':')[-1]
|
||||||
voted = 'voted:' + article_id
|
voted = 'voted:' + article_id
|
||||||
|
|
||||||
pipeline = conn.pipeline()
|
pipeline = conn.pipeline()
|
||||||
while posted > cutoff:
|
while posted > cutoff:
|
||||||
try:
|
try:
|
||||||
pipeline.watch(voted)
|
pipeline.watch(voted)
|
||||||
if not pipeline.sismember(voted, user):
|
if not pipeline.sismember(voted, user):
|
||||||
pipeline.multi()
|
pipeline.multi()
|
||||||
pipeline.sadd(voted, user)
|
pipeline.sadd(voted, user)
|
||||||
pipeline.expire(voted, int(posted - cutoff))
|
pipeline.expire(voted, int(posted - cutoff))
|
||||||
pipeline.zincrby('score:', article, VOTE_SCORE)
|
pipeline.zincrby('score:', article, VOTE_SCORE)
|
||||||
pipeline.hincrby(article, 'votes', 1)
|
pipeline.hincrby(article, 'votes', 1)
|
||||||
pipeline.execute()
|
pipeline.execute()
|
||||||
else:
|
else:
|
||||||
pipeline.unwatch()
|
pipeline.unwatch()
|
||||||
return
|
return
|
||||||
except redis.exceptions.WatchError:
|
except redis.exceptions.WatchError:
|
||||||
cutoff = time.time() - ONE_WEEK_IN_SECONDS
|
cutoff = time.time() - ONE_WEEK_IN_SECONDS
|
||||||
|
|
||||||
|
|
||||||
# <start id="exercise-fix-get_articles"/>
|
# <start id="exercise-fix-get_articles"/>
|
||||||
def get_articles(conn, page, order='score:'):
|
def get_articles(conn, page, order='score:'):
|
||||||
start = max(page - 1, 0) * ARTICLES_PER_PAGE
|
start = max(page - 1, 0) * ARTICLES_PER_PAGE
|
||||||
end = start + ARTICLES_PER_PAGE - 1
|
end = start + ARTICLES_PER_PAGE - 1
|
||||||
|
|
||||||
ids = conn.zrevrangebyscore(order, start, end)
|
ids = conn.zrevrangebyscore(order, start, end)
|
||||||
|
|
||||||
pipeline = conn.pipeline()
|
pipeline = conn.pipeline()
|
||||||
# 将等待执行的多个 HGETALL 调用放入流水线
|
# 将等待执行的多个 HGETALL 调用放入流水线
|
||||||
map(pipeline.hgetall, ids) # A
|
map(pipeline.hgetall, ids) # A
|
||||||
|
|
||||||
articles = []
|
articles = []
|
||||||
# 执行被流水线包含的多个 HGETALL 命令,
|
# 执行被流水线包含的多个 HGETALL 命令,
|
||||||
# 并将执行所得的多个 id 添加到 articles 变量里面
|
# 并将执行所得的多个 id 添加到 articles 变量里面
|
||||||
for id, article_data in zip(ids, pipeline.execute()): # B
|
for id, article_data in zip(ids, pipeline.execute()): # B
|
||||||
article_data['id'] = id
|
article_data['id'] = id
|
||||||
articles.append(article_data)
|
articles.append(article_data)
|
||||||
|
|
||||||
return articles
|
return articles
|
||||||
|
|
||||||
|
|
||||||
# <end id="exercise-fix-get_articles"/>
|
# <end id="exercise-fix-get_articles"/>
|
||||||
|
@ -477,31 +477,31 @@ THIRTY_DAYS = 30 * 86400
|
||||||
|
|
||||||
|
|
||||||
def check_token(conn, token):
|
def check_token(conn, token):
|
||||||
# 为了能够对登录令牌进行过期,我们将把它存储为字符串值
|
# 为了能够对登录令牌进行过期,我们将把它存储为字符串值
|
||||||
return conn.get('login:' + token)
|
return conn.get('login:' + token)
|
||||||
|
|
||||||
|
|
||||||
def update_token(conn, token, user, item=None):
|
def update_token(conn, token, user, item=None):
|
||||||
# 在一次命令调用里面,同时为字符串键设置值和过期时间
|
# 在一次命令调用里面,同时为字符串键设置值和过期时间
|
||||||
conn.setex('login:' + token, user, THIRTY_DAYS)
|
conn.setex('login:' + token, user, THIRTY_DAYS)
|
||||||
key = 'viewed:' + token
|
key = 'viewed:' + token
|
||||||
if item:
|
if item:
|
||||||
conn.lrem(key, item)
|
conn.lrem(key, item)
|
||||||
conn.rpush(key, item)
|
conn.rpush(key, item)
|
||||||
conn.ltrim(key, -25, -1)
|
conn.ltrim(key, -25, -1)
|
||||||
# 跟字符串不一样,Redis 并没有提供能够在操作列表的同时,
|
# 跟字符串不一样,Redis 并没有提供能够在操作列表的同时,
|
||||||
# 为列表设置过期时间的命令,
|
# 为列表设置过期时间的命令,
|
||||||
# 所以我们需要在这里调用 EXPIRE 命令来为列表设置过期时间
|
# 所以我们需要在这里调用 EXPIRE 命令来为列表设置过期时间
|
||||||
conn.expire(key, THIRTY_DAYS)
|
conn.expire(key, THIRTY_DAYS)
|
||||||
conn.zincrby('viewed:', item, -1)
|
conn.zincrby('viewed:', item, -1)
|
||||||
|
|
||||||
|
|
||||||
def add_to_cart(conn, session, item, count):
|
def add_to_cart(conn, session, item, count):
|
||||||
key = 'cart:' + session
|
key = 'cart:' + session
|
||||||
if count <= 0:
|
if count <= 0:
|
||||||
conn.hrem(key, item)
|
conn.hrem(key, item)
|
||||||
else:
|
else:
|
||||||
conn.hset(key, item, count)
|
conn.hset(key, item, count)
|
||||||
# 散列也和列表一样,需要通过调用 EXPIRE 命令来设置过期时间
|
# 散列也和列表一样,需要通过调用 EXPIRE 命令来设置过期时间
|
||||||
conn.expire(key, THIRTY_DAYS)
|
conn.expire(key, THIRTY_DAYS)
|
||||||
# <end id="exercise-no-recent-zset"/>
|
# <end id="exercise-no-recent-zset"/>
|
||||||
|
|
|
@ -30,56 +30,56 @@ dir ./ # 共享选项,这个选项决定了快照
|
||||||
# 这个回调函数接受一个Redis连接和一个日志行作为参数,
|
# 这个回调函数接受一个Redis连接和一个日志行作为参数,
|
||||||
# 并通过调用流水线对象的方法来执行Redis命令。
|
# 并通过调用流水线对象的方法来执行Redis命令。
|
||||||
def process_logs(conn, path, callback):
|
def process_logs(conn, path, callback):
|
||||||
# 获取文件当前的处理进度。
|
# 获取文件当前的处理进度。
|
||||||
current_file, offset = conn.mget(
|
current_file, offset = conn.mget(
|
||||||
'progress:file', 'progress:position')
|
'progress:file', 'progress:position')
|
||||||
|
|
||||||
pipe = conn.pipeline()
|
pipe = conn.pipeline()
|
||||||
|
|
||||||
# 通过使用闭包(closure)来减少重复代码
|
# 通过使用闭包(closure)来减少重复代码
|
||||||
def update_progress():
|
def update_progress():
|
||||||
# 更新正在处理的日志文件的名字和偏移量。
|
# 更新正在处理的日志文件的名字和偏移量。
|
||||||
pipe.mset({
|
pipe.mset({
|
||||||
'progress:file': fname,
|
'progress:file': fname,
|
||||||
'progress:position': offset
|
'progress:position': offset
|
||||||
})
|
})
|
||||||
# 这个语句负责执行实际的日志更新操作,
|
# 这个语句负责执行实际的日志更新操作,
|
||||||
# 并将日志文件的名字和目前的处理进度记录到Redis里面。
|
# 并将日志文件的名字和目前的处理进度记录到Redis里面。
|
||||||
pipe.execute()
|
pipe.execute()
|
||||||
|
|
||||||
# 有序地遍历各个日志文件。
|
# 有序地遍历各个日志文件。
|
||||||
for fname in sorted(os.listdir(path)):
|
for fname in sorted(os.listdir(path)):
|
||||||
# 略过所有已处理的日志文件。
|
# 略过所有已处理的日志文件。
|
||||||
if fname < current_file:
|
if fname < current_file:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
inp = open(os.path.join(path, fname), 'rb')
|
inp = open(os.path.join(path, fname), 'rb')
|
||||||
# 在接着处理一个因为系统崩溃而未能完成处理的日志文件时,略过已处理的内容。
|
# 在接着处理一个因为系统崩溃而未能完成处理的日志文件时,略过已处理的内容。
|
||||||
if fname == current_file:
|
if fname == current_file:
|
||||||
inp.seek(int(offset, 10))
|
inp.seek(int(offset, 10))
|
||||||
else:
|
else:
|
||||||
offset = 0
|
offset = 0
|
||||||
|
|
||||||
current_file = None
|
current_file = None
|
||||||
|
|
||||||
# 枚举函数遍历一个由文件行组成的序列,
|
# 枚举函数遍历一个由文件行组成的序列,
|
||||||
# 并返回任意多个二元组,
|
# 并返回任意多个二元组,
|
||||||
# 每个二元组包含了行号lno和行数据line,
|
# 每个二元组包含了行号lno和行数据line,
|
||||||
# 其中行号从0开始。
|
# 其中行号从0开始。
|
||||||
for lno, line in enumerate(inp):
|
for lno, line in enumerate(inp):
|
||||||
# 处理日志行。
|
# 处理日志行。
|
||||||
callback(pipe, line)
|
callback(pipe, line)
|
||||||
# 更新已处理内容的偏移量。
|
# 更新已处理内容的偏移量。
|
||||||
offset += int(offset) + len(line)
|
offset += int(offset) + len(line)
|
||||||
|
|
||||||
# 每当处理完1000个日志行或者处理完整个日志文件的时候,
|
# 每当处理完1000个日志行或者处理完整个日志文件的时候,
|
||||||
# 都更新一次文件的处理进度。
|
# 都更新一次文件的处理进度。
|
||||||
if not (lno + 1) % 1000:
|
if not (lno + 1) % 1000:
|
||||||
update_progress()
|
update_progress()
|
||||||
|
|
||||||
update_progress()
|
update_progress()
|
||||||
|
|
||||||
inp.close()
|
inp.close()
|
||||||
|
|
||||||
|
|
||||||
# <end id="process-logs-progress"/>
|
# <end id="process-logs-progress"/>
|
||||||
|
@ -88,29 +88,29 @@ def process_logs(conn, path, callback):
|
||||||
# 代码清单 4-3
|
# 代码清单 4-3
|
||||||
# <start id="wait-for-sync"/>
|
# <start id="wait-for-sync"/>
|
||||||
def wait_for_sync(mconn, sconn):
|
def wait_for_sync(mconn, sconn):
|
||||||
identifier = str(uuid.uuid4())
|
identifier = str(uuid.uuid4())
|
||||||
# 将令牌添加至主服务器。
|
# 将令牌添加至主服务器。
|
||||||
mconn.zadd('sync:wait', identifier, time.time())
|
mconn.zadd('sync:wait', identifier, time.time())
|
||||||
|
|
||||||
# 如果有必要的话,等待从服务器完成同步。
|
# 如果有必要的话,等待从服务器完成同步。
|
||||||
while sconn.info()['master_link_status'] != 'up':
|
while sconn.info()['master_link_status'] != 'up':
|
||||||
time.sleep(.001)
|
time.sleep(.001)
|
||||||
|
|
||||||
# 等待从服务器接收数据更新。
|
# 等待从服务器接收数据更新。
|
||||||
while not sconn.zscore('sync:wait', identifier):
|
while not sconn.zscore('sync:wait', identifier):
|
||||||
time.sleep(.001)
|
time.sleep(.001)
|
||||||
|
|
||||||
# 最多只等待一秒钟。
|
# 最多只等待一秒钟。
|
||||||
deadline = time.time() + 1.01
|
deadline = time.time() + 1.01
|
||||||
while time.time() < deadline:
|
while time.time() < deadline:
|
||||||
# 检查数据更新是否已经被同步到了磁盘。
|
# 检查数据更新是否已经被同步到了磁盘。
|
||||||
if sconn.info()['aof_pending_bio_fsync'] == 0:
|
if sconn.info()['aof_pending_bio_fsync'] == 0:
|
||||||
break
|
break
|
||||||
time.sleep(.001)
|
time.sleep(.001)
|
||||||
|
|
||||||
# 清理刚刚创建的新令牌以及之前可能留下的旧令牌。
|
# 清理刚刚创建的新令牌以及之前可能留下的旧令牌。
|
||||||
mconn.zrem('sync:wait', identifier)
|
mconn.zrem('sync:wait', identifier)
|
||||||
mconn.zremrangebyscore('sync:wait', 0, time.time() - 900)
|
mconn.zremrangebyscore('sync:wait', 0, time.time() - 900)
|
||||||
|
|
||||||
|
|
||||||
# <end id="wait-for-sync"/>
|
# <end id="wait-for-sync"/>
|
||||||
|
@ -153,35 +153,35 @@ user@vpn-master ~:$
|
||||||
# 代码清单 4-5
|
# 代码清单 4-5
|
||||||
# <start id="_1313_14472_8342"/>
|
# <start id="_1313_14472_8342"/>
|
||||||
def list_item(conn, itemid, sellerid, price):
|
def list_item(conn, itemid, sellerid, price):
|
||||||
inventory = "inventory:%s" % sellerid
|
inventory = "inventory:%s" % sellerid
|
||||||
item = "%s.%s" % (itemid, sellerid)
|
item = "%s.%s" % (itemid, sellerid)
|
||||||
end = time.time() + 5
|
end = time.time() + 5
|
||||||
pipe = conn.pipeline()
|
pipe = conn.pipeline()
|
||||||
|
|
||||||
while time.time() < end:
|
while time.time() < end:
|
||||||
try:
|
try:
|
||||||
# 监视用户包裹发生的变化。
|
# 监视用户包裹发生的变化。
|
||||||
pipe.watch(inventory)
|
pipe.watch(inventory)
|
||||||
# 验证用户是否仍然持有指定的物品。
|
# 验证用户是否仍然持有指定的物品。
|
||||||
if not pipe.sismember(inventory, itemid):
|
if not pipe.sismember(inventory, itemid):
|
||||||
# 如果指定的物品不在用户的包裹里面,
|
# 如果指定的物品不在用户的包裹里面,
|
||||||
# 那么停止对包裹键的监视并返回一个空值。
|
# 那么停止对包裹键的监视并返回一个空值。
|
||||||
pipe.unwatch()
|
pipe.unwatch()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# 将指定的物品添加到物品买卖市场里面。
|
# 将指定的物品添加到物品买卖市场里面。
|
||||||
pipe.multi()
|
pipe.multi()
|
||||||
pipe.zadd("market:", item, price)
|
pipe.zadd("market:", item, price)
|
||||||
pipe.srem(inventory, itemid)
|
pipe.srem(inventory, itemid)
|
||||||
# 如果执行execute方法没有引发WatchError异常,
|
# 如果执行execute方法没有引发WatchError异常,
|
||||||
# 那么说明事务执行成功,
|
# 那么说明事务执行成功,
|
||||||
# 并且对包裹键的监视也已经结束。
|
# 并且对包裹键的监视也已经结束。
|
||||||
pipe.execute()
|
pipe.execute()
|
||||||
return True
|
return True
|
||||||
# 用户的包裹已经发生了变化;重试。
|
# 用户的包裹已经发生了变化;重试。
|
||||||
except redis.exceptions.WatchError:
|
except redis.exceptions.WatchError:
|
||||||
pass
|
pass
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1313_14472_8342"/>
|
# <end id="_1313_14472_8342"/>
|
||||||
|
@ -190,39 +190,39 @@ def list_item(conn, itemid, sellerid, price):
|
||||||
# 代码清单 4-6
|
# 代码清单 4-6
|
||||||
# <start id="_1313_14472_8353"/>
|
# <start id="_1313_14472_8353"/>
|
||||||
def purchase_item(conn, buyerid, itemid, sellerid, lprice):
|
def purchase_item(conn, buyerid, itemid, sellerid, lprice):
|
||||||
buyer = "users:%s" % buyerid
|
buyer = "users:%s" % buyerid
|
||||||
seller = "users:%s" % sellerid
|
seller = "users:%s" % sellerid
|
||||||
item = "%s.%s" % (itemid, sellerid)
|
item = "%s.%s" % (itemid, sellerid)
|
||||||
inventory = "inventory:%s" % buyerid
|
inventory = "inventory:%s" % buyerid
|
||||||
end = time.time() + 10
|
end = time.time() + 10
|
||||||
pipe = conn.pipeline()
|
pipe = conn.pipeline()
|
||||||
|
|
||||||
while time.time() < end:
|
while time.time() < end:
|
||||||
try:
|
try:
|
||||||
# 对物品买卖市场以及买家账号信息的变化进行监视。
|
# 对物品买卖市场以及买家账号信息的变化进行监视。
|
||||||
pipe.watch("market:", buyer)
|
pipe.watch("market:", buyer)
|
||||||
|
|
||||||
# 检查指定物品的价格是否出现了变化,
|
# 检查指定物品的价格是否出现了变化,
|
||||||
# 以及买家是否有足够的钱来购买指定的物品。
|
# 以及买家是否有足够的钱来购买指定的物品。
|
||||||
price = pipe.zscore("market:", item)
|
price = pipe.zscore("market:", item)
|
||||||
funds = int(pipe.hget(buyer, "funds"))
|
funds = int(pipe.hget(buyer, "funds"))
|
||||||
if price != lprice or price > funds:
|
if price != lprice or price > funds:
|
||||||
pipe.unwatch()
|
pipe.unwatch()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# 将买家支付的货款转移给卖家,并将卖家出售的物品移交给买家。
|
# 将买家支付的货款转移给卖家,并将卖家出售的物品移交给买家。
|
||||||
pipe.multi()
|
pipe.multi()
|
||||||
pipe.hincrby(seller, "funds", int(price))
|
pipe.hincrby(seller, "funds", int(price))
|
||||||
pipe.hincrby(buyer, "funds", int(-price))
|
pipe.hincrby(buyer, "funds", int(-price))
|
||||||
pipe.sadd(inventory, itemid)
|
pipe.sadd(inventory, itemid)
|
||||||
pipe.zrem("market:", item)
|
pipe.zrem("market:", item)
|
||||||
pipe.execute()
|
pipe.execute()
|
||||||
return True
|
return True
|
||||||
# 如果买家的账号或者物品买卖市场出现了变化,那么进行重试。
|
# 如果买家的账号或者物品买卖市场出现了变化,那么进行重试。
|
||||||
except redis.exceptions.WatchError:
|
except redis.exceptions.WatchError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
# <end id="_1313_14472_8353"/>
|
# <end id="_1313_14472_8353"/>
|
||||||
|
@ -231,36 +231,36 @@ def purchase_item(conn, buyerid, itemid, sellerid, lprice):
|
||||||
# 代码清单 4-7
|
# 代码清单 4-7
|
||||||
# <start id="update-token"/>
|
# <start id="update-token"/>
|
||||||
def update_token(conn, token, user, item=None):
|
def update_token(conn, token, user, item=None):
|
||||||
# 获取时间戳。
|
# 获取时间戳。
|
||||||
timestamp = time.time()
|
timestamp = time.time()
|
||||||
# 创建令牌与已登录用户之间的映射。
|
# 创建令牌与已登录用户之间的映射。
|
||||||
conn.hset('login:', token, user)
|
conn.hset('login:', token, user)
|
||||||
# 记录令牌最后一次出现的时间。
|
# 记录令牌最后一次出现的时间。
|
||||||
conn.zadd('recent:', token, timestamp)
|
conn.zadd('recent:', token, timestamp)
|
||||||
if item:
|
if item:
|
||||||
# 把用户浏览过的商品记录起来。
|
# 把用户浏览过的商品记录起来。
|
||||||
conn.zadd('viewed:' + token, item, timestamp)
|
conn.zadd('viewed:' + token, item, timestamp)
|
||||||
# 移除旧商品,只记录最新浏览的25件商品。
|
# 移除旧商品,只记录最新浏览的25件商品。
|
||||||
conn.zremrangebyrank('viewed:' + token, 0, -26)
|
conn.zremrangebyrank('viewed:' + token, 0, -26)
|
||||||
# 更新给定商品的被浏览次数。
|
# 更新给定商品的被浏览次数。
|
||||||
conn.zincrby('viewed:', item, -1)
|
conn.zincrby('viewed:', item, -1)
|
||||||
# <end id="update-token"/>
|
# <end id="update-token"/>
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 4-8
|
# 代码清单 4-8
|
||||||
# <start id="update-token-pipeline"/>
|
# <start id="update-token-pipeline"/>
|
||||||
def update_token_pipeline(conn, token, user, item=None):
|
def update_token_pipeline(conn, token, user, item=None):
|
||||||
timestamp = time.time()
|
timestamp = time.time()
|
||||||
# 设置流水线。
|
# 设置流水线。
|
||||||
pipe = conn.pipeline(False) # A
|
pipe = conn.pipeline(False) # A
|
||||||
pipe.hset('login:', token, user)
|
pipe.hset('login:', token, user)
|
||||||
pipe.zadd('recent:', token, timestamp)
|
pipe.zadd('recent:', token, timestamp)
|
||||||
if item:
|
if item:
|
||||||
pipe.zadd('viewed:' + token, item, timestamp)
|
pipe.zadd('viewed:' + token, item, timestamp)
|
||||||
pipe.zremrangebyrank('viewed:' + token, 0, -26)
|
pipe.zremrangebyrank('viewed:' + token, 0, -26)
|
||||||
pipe.zincrby('viewed:', item, -1)
|
pipe.zincrby('viewed:', item, -1)
|
||||||
# 执行那些被流水线包裹的命令。
|
# 执行那些被流水线包裹的命令。
|
||||||
pipe.execute() # B
|
pipe.execute() # B
|
||||||
|
|
||||||
|
|
||||||
# <end id="update-token-pipeline"/>
|
# <end id="update-token-pipeline"/>
|
||||||
|
@ -269,20 +269,20 @@ def update_token_pipeline(conn, token, user, item=None):
|
||||||
# 代码清单 4-9
|
# 代码清单 4-9
|
||||||
# <start id="simple-pipeline-benchmark-code"/>
|
# <start id="simple-pipeline-benchmark-code"/>
|
||||||
def benchmark_update_token(conn, duration):
|
def benchmark_update_token(conn, duration):
|
||||||
# 测试会分别执行update_token()函数和update_token_pipeline()函数。
|
# 测试会分别执行update_token()函数和update_token_pipeline()函数。
|
||||||
for function in (update_token, update_token_pipeline):
|
for function in (update_token, update_token_pipeline):
|
||||||
# 设置计数器以及测试结束的条件。
|
# 设置计数器以及测试结束的条件。
|
||||||
count = 0 # B
|
count = 0 # B
|
||||||
start = time.time() # B
|
start = time.time() # B
|
||||||
end = start + duration # B
|
end = start + duration # B
|
||||||
while time.time() < end:
|
while time.time() < end:
|
||||||
count += 1
|
count += 1
|
||||||
# 调用两个函数的其中一个。
|
# 调用两个函数的其中一个。
|
||||||
function(conn, 'token', 'user', 'item') # C
|
function(conn, 'token', 'user', 'item') # C
|
||||||
# 计算函数的执行时长。
|
# 计算函数的执行时长。
|
||||||
delta = time.time() - start # D
|
delta = time.time() - start # D
|
||||||
# 打印测试结果。
|
# 打印测试结果。
|
||||||
print function.__name__, count, delta, count / delta # E
|
print function.__name__, count, delta, count / delta # E
|
||||||
|
|
||||||
|
|
||||||
# <end id="simple-pipeline-benchmark-code"/>
|
# <end id="simple-pipeline-benchmark-code"/>
|
||||||
|
@ -316,75 +316,75 @@ LRANGE (first 600 elements): 9041.59 requests per second
|
||||||
# --------------- 以下是用于测试代码的辅助函数 --------------------------------
|
# --------------- 以下是用于测试代码的辅助函数 --------------------------------
|
||||||
|
|
||||||
class TestCh04(unittest.TestCase):
|
class TestCh04(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
import redis
|
import redis
|
||||||
self.conn = redis.Redis(db=15)
|
self.conn = redis.Redis(db=15)
|
||||||
self.conn.flushdb()
|
self.conn.flushdb()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.conn.flushdb()
|
self.conn.flushdb()
|
||||||
del self.conn
|
del self.conn
|
||||||
print
|
print
|
||||||
print
|
print
|
||||||
|
|
||||||
# We can't test process_logs, as that would require writing to disk, which
|
# We can't test process_logs, as that would require writing to disk, which
|
||||||
# we don't want to do.
|
# we don't want to do.
|
||||||
|
|
||||||
# We also can't test wait_for_sync, as we can't guarantee that there are
|
# We also can't test wait_for_sync, as we can't guarantee that there are
|
||||||
# multiple Redis servers running with the proper configuration
|
# multiple Redis servers running with the proper configuration
|
||||||
|
|
||||||
def test_list_item(self):
|
def test_list_item(self):
|
||||||
import pprint
|
import pprint
|
||||||
conn = self.conn
|
conn = self.conn
|
||||||
|
|
||||||
print "We need to set up just enough state so that a user can list an item"
|
print "We need to set up just enough state so that a user can list an item"
|
||||||
seller = 'userX'
|
seller = 'userX'
|
||||||
item = 'itemX'
|
item = 'itemX'
|
||||||
conn.sadd('inventory:' + seller, item)
|
conn.sadd('inventory:' + seller, item)
|
||||||
i = conn.smembers('inventory:' + seller)
|
i = conn.smembers('inventory:' + seller)
|
||||||
print "The user's inventory has:", i
|
print "The user's inventory has:", i
|
||||||
self.assertTrue(i)
|
self.assertTrue(i)
|
||||||
print
|
print
|
||||||
|
|
||||||
print "Listing the item..."
|
print "Listing the item..."
|
||||||
l = list_item(conn, item, seller, 10)
|
l = list_item(conn, item, seller, 10)
|
||||||
print "Listing the item succeeded?", l
|
print "Listing the item succeeded?", l
|
||||||
self.assertTrue(l)
|
self.assertTrue(l)
|
||||||
r = conn.zrange('market:', 0, -1, withscores=True)
|
r = conn.zrange('market:', 0, -1, withscores=True)
|
||||||
print "The market contains:"
|
print "The market contains:"
|
||||||
pprint.pprint(r)
|
pprint.pprint(r)
|
||||||
self.assertTrue(r)
|
self.assertTrue(r)
|
||||||
self.assertTrue(any(x[0] == 'itemX.userX' for x in r))
|
self.assertTrue(any(x[0] == 'itemX.userX' for x in r))
|
||||||
|
|
||||||
def test_purchase_item(self):
|
def test_purchase_item(self):
|
||||||
self.test_list_item()
|
self.test_list_item()
|
||||||
conn = self.conn
|
conn = self.conn
|
||||||
|
|
||||||
print "We need to set up just enough state so a user can buy an item"
|
print "We need to set up just enough state so a user can buy an item"
|
||||||
buyer = 'userY'
|
buyer = 'userY'
|
||||||
conn.hset('users:userY', 'funds', 125)
|
conn.hset('users:userY', 'funds', 125)
|
||||||
r = conn.hgetall('users:userY')
|
r = conn.hgetall('users:userY')
|
||||||
print "The user has some money:", r
|
print "The user has some money:", r
|
||||||
self.assertTrue(r)
|
self.assertTrue(r)
|
||||||
self.assertTrue(r.get('funds'))
|
self.assertTrue(r.get('funds'))
|
||||||
print
|
print
|
||||||
|
|
||||||
print "Let's purchase an item"
|
print "Let's purchase an item"
|
||||||
p = purchase_item(conn, 'userY', 'itemX', 'userX', 10)
|
p = purchase_item(conn, 'userY', 'itemX', 'userX', 10)
|
||||||
print "Purchasing an item succeeded?", p
|
print "Purchasing an item succeeded?", p
|
||||||
self.assertTrue(p)
|
self.assertTrue(p)
|
||||||
r = conn.hgetall('users:userY')
|
r = conn.hgetall('users:userY')
|
||||||
print "Their money is now:", r
|
print "Their money is now:", r
|
||||||
self.assertTrue(r)
|
self.assertTrue(r)
|
||||||
i = conn.smembers('inventory:' + buyer)
|
i = conn.smembers('inventory:' + buyer)
|
||||||
print "Their inventory is now:", i
|
print "Their inventory is now:", i
|
||||||
self.assertTrue(i)
|
self.assertTrue(i)
|
||||||
self.assertTrue('itemX' in i)
|
self.assertTrue('itemX' in i)
|
||||||
self.assertEquals(conn.zscore('market:', 'itemX.userX'), None)
|
self.assertEquals(conn.zscore('market:', 'itemX.userX'), None)
|
||||||
|
|
||||||
def test_benchmark_update_token(self):
|
def test_benchmark_update_token(self):
|
||||||
benchmark_update_token(self.conn, 5)
|
benchmark_update_token(self.conn, 5)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -12,14 +12,14 @@ from datetime import date, timedelta
|
||||||
|
|
||||||
|
|
||||||
def readblocks(conn, key, blocksize=2 ** 17):
|
def readblocks(conn, key, blocksize=2 ** 17):
|
||||||
lb = blocksize
|
lb = blocksize
|
||||||
pos = 0
|
pos = 0
|
||||||
while lb == blocksize: # A
|
while lb == blocksize: # A
|
||||||
block = conn.substr(key, pos, pos + blocksize - 1) # B
|
block = conn.substr(key, pos, pos + blocksize - 1) # B
|
||||||
yield block # C
|
yield block # C
|
||||||
lb = len(block) # C
|
lb = len(block) # C
|
||||||
pos += lb # C
|
pos += lb # C
|
||||||
yield ''
|
yield ''
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 9-1
|
# 代码清单 9-1
|
||||||
|
@ -92,27 +92,27 @@ set-max-intset-entries 512 # 集合使用整数集合表示的限制条件
|
||||||
# <start id="rpoplpush-benchmark"/>
|
# <start id="rpoplpush-benchmark"/>
|
||||||
# 为了以不同的方式进行性能测试,函数需要对所有测试指标进行参数化处理。
|
# 为了以不同的方式进行性能测试,函数需要对所有测试指标进行参数化处理。
|
||||||
def long_ziplist_performance(conn, key, length, passes, psize):
|
def long_ziplist_performance(conn, key, length, passes, psize):
|
||||||
# 删除指定的键,确保被测试数据的准确性。
|
# 删除指定的键,确保被测试数据的准确性。
|
||||||
conn.delete(key)
|
conn.delete(key)
|
||||||
# 通过从右端推入指定数量的元素来对列表进行初始化。
|
# 通过从右端推入指定数量的元素来对列表进行初始化。
|
||||||
conn.rpush(key, *range(length))
|
conn.rpush(key, *range(length))
|
||||||
# 通过流水线来降低网络通信给测试带来的影响。
|
# 通过流水线来降低网络通信给测试带来的影响。
|
||||||
pipeline = conn.pipeline(False)
|
pipeline = conn.pipeline(False)
|
||||||
|
|
||||||
# 启动计时器。
|
# 启动计时器。
|
||||||
t = time.time()
|
t = time.time()
|
||||||
# 根据 passes 参数来决定流水线操作的执行次数。
|
# 根据 passes 参数来决定流水线操作的执行次数。
|
||||||
for p in xrange(passes):
|
for p in xrange(passes):
|
||||||
# 每个流水线操作都包含了 psize 次 RPOPLPUSH 命令调用。
|
# 每个流水线操作都包含了 psize 次 RPOPLPUSH 命令调用。
|
||||||
for pi in xrange(psize):
|
for pi in xrange(psize):
|
||||||
# 每个 rpoplpush() 函数调用都会将列表最右端的元素弹出,
|
# 每个 rpoplpush() 函数调用都会将列表最右端的元素弹出,
|
||||||
# 并将它推入到同一个列表的左端。
|
# 并将它推入到同一个列表的左端。
|
||||||
pipeline.rpoplpush(key, key)
|
pipeline.rpoplpush(key, key)
|
||||||
# 执行 psize 次 RPOPLPUSH 命令。
|
# 执行 psize 次 RPOPLPUSH 命令。
|
||||||
pipeline.execute()
|
pipeline.execute()
|
||||||
|
|
||||||
# 计算每秒钟执行的 RPOPLPUSH 调用数量。
|
# 计算每秒钟执行的 RPOPLPUSH 调用数量。
|
||||||
return (passes * psize) / (time.time() - t or .001)
|
return (passes * psize) / (time.time() - t or .001)
|
||||||
|
|
||||||
|
|
||||||
# <end id="rpoplpush-benchmark"/>
|
# <end id="rpoplpush-benchmark"/>
|
||||||
|
@ -138,31 +138,31 @@ def long_ziplist_performance(conn, key, length, passes, psize):
|
||||||
|
|
||||||
|
|
||||||
def long_ziplist_index(conn, key, length, passes, psize): # A
|
def long_ziplist_index(conn, key, length, passes, psize): # A
|
||||||
conn.delete(key) # B
|
conn.delete(key) # B
|
||||||
conn.rpush(key, *range(length)) # C
|
conn.rpush(key, *range(length)) # C
|
||||||
length >>= 1
|
length >>= 1
|
||||||
pipeline = conn.pipeline(False) # D
|
pipeline = conn.pipeline(False) # D
|
||||||
t = time.time() # E
|
t = time.time() # E
|
||||||
for p in xrange(passes): # F
|
for p in xrange(passes): # F
|
||||||
for pi in xrange(psize): # G
|
for pi in xrange(psize): # G
|
||||||
pipeline.lindex(key, length) # H
|
pipeline.lindex(key, length) # H
|
||||||
pipeline.execute() # I
|
pipeline.execute() # I
|
||||||
return (passes * psize) / (time.time() - t or .001) # J
|
return (passes * psize) / (time.time() - t or .001) # J
|
||||||
|
|
||||||
|
|
||||||
def long_intset_performance(conn, key, length, passes, psize): # A
|
def long_intset_performance(conn, key, length, passes, psize): # A
|
||||||
conn.delete(key) # B
|
conn.delete(key) # B
|
||||||
conn.sadd(key, *range(1000000, 1000000 + length)) # C
|
conn.sadd(key, *range(1000000, 1000000 + length)) # C
|
||||||
cur = 1000000 - 1
|
cur = 1000000 - 1
|
||||||
pipeline = conn.pipeline(False) # D
|
pipeline = conn.pipeline(False) # D
|
||||||
t = time.time() # E
|
t = time.time() # E
|
||||||
for p in xrange(passes): # F
|
for p in xrange(passes): # F
|
||||||
for pi in xrange(psize): # G
|
for pi in xrange(psize): # G
|
||||||
pipeline.spop(key) # H
|
pipeline.spop(key) # H
|
||||||
pipeline.sadd(key, cur)
|
pipeline.sadd(key, cur)
|
||||||
cur -= 1
|
cur -= 1
|
||||||
pipeline.execute() # I
|
pipeline.execute() # I
|
||||||
return (passes * psize) / (time.time() - t or .001) # J
|
return (passes * psize) / (time.time() - t or .001) # J
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 9-7
|
# 代码清单 9-7
|
||||||
|
@ -170,24 +170,24 @@ def long_intset_performance(conn, key, length, passes, psize): # A
|
||||||
# 在调用 shard_key() 函数时,
|
# 在调用 shard_key() 函数时,
|
||||||
# 用户需要给定基础散列的名字、将要被储存到分片散列里面的键、预计的元素总数量以及请求的分片数量。
|
# 用户需要给定基础散列的名字、将要被储存到分片散列里面的键、预计的元素总数量以及请求的分片数量。
|
||||||
def shard_key(base, key, total_elements, shard_size):
|
def shard_key(base, key, total_elements, shard_size):
|
||||||
# 如果值是一个整数或者一个看上去像是整数的字符串,
|
# 如果值是一个整数或者一个看上去像是整数的字符串,
|
||||||
# 那么它将被直接用于计算分片 ID 。
|
# 那么它将被直接用于计算分片 ID 。
|
||||||
if isinstance(key, (int, long)) or key.isdigit():
|
if isinstance(key, (int, long)) or key.isdigit():
|
||||||
# 整数键将被程序假定为连续指派的 ID ,
|
# 整数键将被程序假定为连续指派的 ID ,
|
||||||
# 并基于这个整数 ID 的二进制位的高位来选择分片 ID 。
|
# 并基于这个整数 ID 的二进制位的高位来选择分片 ID 。
|
||||||
# 此外,程序在进行整数转换的时候还使用了显式的基数(以及 str()`` 函数),
|
# 此外,程序在进行整数转换的时候还使用了显式的基数(以及 str()`` 函数),
|
||||||
# 使得键 010 可以被转换为 10 ,而不是 8 。
|
# 使得键 010 可以被转换为 10 ,而不是 8 。
|
||||||
shard_id = int(str(key), 10) // shard_size
|
shard_id = int(str(key), 10) // shard_size
|
||||||
else:
|
else:
|
||||||
# 对于不是整数的键,
|
# 对于不是整数的键,
|
||||||
# 程序将基于预计的元素总数量以及请求的分片数量,
|
# 程序将基于预计的元素总数量以及请求的分片数量,
|
||||||
# 计算出实际所需的分片总数量。
|
# 计算出实际所需的分片总数量。
|
||||||
shards = 2 * total_elements // shard_size
|
shards = 2 * total_elements // shard_size
|
||||||
# 在得知了分片的数量之后,
|
# 在得知了分片的数量之后,
|
||||||
# 程序就可以通过计算键的散列值与分片数量之间的模数来得到分片 ID 。
|
# 程序就可以通过计算键的散列值与分片数量之间的模数来得到分片 ID 。
|
||||||
shard_id = binascii.crc32(key) % shards
|
shard_id = binascii.crc32(key) % shards
|
||||||
# 最后,程序会把基础键和分片 ID 组合在一起,得出分片键。
|
# 最后,程序会把基础键和分片 ID 组合在一起,得出分片键。
|
||||||
return "%s:%s" % (base, shard_id)
|
return "%s:%s" % (base, shard_id)
|
||||||
|
|
||||||
|
|
||||||
# <end id="calculate-shard-key"/>
|
# <end id="calculate-shard-key"/>
|
||||||
|
@ -196,17 +196,17 @@ def shard_key(base, key, total_elements, shard_size):
|
||||||
# 代码清单 9-8
|
# 代码清单 9-8
|
||||||
# <start id="sharded-hset-hget"/>
|
# <start id="sharded-hset-hget"/>
|
||||||
def shard_hset(conn, base, key, value, total_elements, shard_size):
|
def shard_hset(conn, base, key, value, total_elements, shard_size):
|
||||||
# 计算出应该由哪个分片来储存值。
|
# 计算出应该由哪个分片来储存值。
|
||||||
shard = shard_key(base, key, total_elements, shard_size)
|
shard = shard_key(base, key, total_elements, shard_size)
|
||||||
# 将值储存到分片里面。
|
# 将值储存到分片里面。
|
||||||
return conn.hset(shard, key, value)
|
return conn.hset(shard, key, value)
|
||||||
|
|
||||||
|
|
||||||
def shard_hget(conn, base, key, total_elements, shard_size):
|
def shard_hget(conn, base, key, total_elements, shard_size):
|
||||||
# 计算出值可能被储存到了哪个分片里面。
|
# 计算出值可能被储存到了哪个分片里面。
|
||||||
shard = shard_key(base, key, total_elements, shard_size)
|
shard = shard_key(base, key, total_elements, shard_size)
|
||||||
# 取得储存在分片里面的值。
|
# 取得储存在分片里面的值。
|
||||||
return conn.hget(shard, key)
|
return conn.hget(shard, key)
|
||||||
|
|
||||||
|
|
||||||
# <end id="sharded-hset-hget"/>
|
# <end id="sharded-hset-hget"/>
|
||||||
|
@ -237,12 +237,12 @@ def find_city_by_ip(conn, ip_address):
|
||||||
# 代码清单 9-10
|
# 代码清单 9-10
|
||||||
# <start id="sharded-sadd"/>
|
# <start id="sharded-sadd"/>
|
||||||
def shard_sadd(conn, base, member, total_elements, shard_size):
|
def shard_sadd(conn, base, member, total_elements, shard_size):
|
||||||
shard = shard_key(base,
|
shard = shard_key(base,
|
||||||
# 计算成员应该被储存到哪个分片集合里面;
|
# 计算成员应该被储存到哪个分片集合里面;
|
||||||
# 因为成员并非连续 ID ,所以程序在计算成员所属的分片之前,会先将成员转换为字符串。
|
# 因为成员并非连续 ID ,所以程序在计算成员所属的分片之前,会先将成员转换为字符串。
|
||||||
'x' + str(member), total_elements, shard_size)
|
'x' + str(member), total_elements, shard_size)
|
||||||
# 将成员储存到分片里面。
|
# 将成员储存到分片里面。
|
||||||
return conn.sadd(shard, member)
|
return conn.sadd(shard, member)
|
||||||
|
|
||||||
|
|
||||||
# <end id="sharded-sadd"/>
|
# <end id="sharded-sadd"/>
|
||||||
|
@ -255,19 +255,19 @@ SHARD_SIZE = 512
|
||||||
|
|
||||||
|
|
||||||
def count_visit(conn, session_id):
|
def count_visit(conn, session_id):
|
||||||
# 取得当天的日期,并生成唯一访客计数器的键。
|
# 取得当天的日期,并生成唯一访客计数器的键。
|
||||||
today = date.today()
|
today = date.today()
|
||||||
key = 'unique:%s' % today.isoformat()
|
key = 'unique:%s' % today.isoformat()
|
||||||
# 计算或者获取当天的预计唯一访客人数。
|
# 计算或者获取当天的预计唯一访客人数。
|
||||||
expected = get_expected(conn, key, today)
|
expected = get_expected(conn, key, today)
|
||||||
|
|
||||||
# 根据 128 位的 UUID ,计算出一个 56 位的 ID 。
|
# 根据 128 位的 UUID ,计算出一个 56 位的 ID 。
|
||||||
id = int(session_id.replace('-', '')[:15], 16)
|
id = int(session_id.replace('-', '')[:15], 16)
|
||||||
# 将 ID 添加到分片集合里面。
|
# 将 ID 添加到分片集合里面。
|
||||||
if shard_sadd(conn, key, id, expected, SHARD_SIZE):
|
if shard_sadd(conn, key, id, expected, SHARD_SIZE):
|
||||||
# 如果 ID 在分片集合里面并不存在,那么对唯一访客计数器执行加一操作。
|
# 如果 ID 在分片集合里面并不存在,那么对唯一访客计数器执行加一操作。
|
||||||
conn.incr(key)
|
conn.incr(key)
|
||||||
# <end id="unique-visitor-count"/>
|
# <end id="unique-visitor-count"/>
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 9-12
|
# 代码清单 9-12
|
||||||
|
@ -279,35 +279,35 @@ EXPECTED = {}
|
||||||
|
|
||||||
|
|
||||||
def get_expected(conn, key, today):
|
def get_expected(conn, key, today):
|
||||||
# 如果程序已经计算出或者获取到了当日的预计访客人数,
|
# 如果程序已经计算出或者获取到了当日的预计访客人数,
|
||||||
# 那么直接使用已计算出的数字。
|
# 那么直接使用已计算出的数字。
|
||||||
if key in EXPECTED:
|
if key in EXPECTED:
|
||||||
return EXPECTED[key]
|
return EXPECTED[key]
|
||||||
|
|
||||||
exkey = key + ':expected'
|
exkey = key + ':expected'
|
||||||
# 如果其他客户端已经计算出了当日的预计访客人数,
|
# 如果其他客户端已经计算出了当日的预计访客人数,
|
||||||
# 那么直接使用已计算出的数字。
|
# 那么直接使用已计算出的数字。
|
||||||
expected = conn.get(exkey)
|
expected = conn.get(exkey)
|
||||||
|
|
||||||
if not expected:
|
if not expected:
|
||||||
# 获取昨天的唯一访客人数,如果该数值不存在就使用默认值一百万。
|
# 获取昨天的唯一访客人数,如果该数值不存在就使用默认值一百万。
|
||||||
yesterday = (today - timedelta(days=1)).isoformat()
|
yesterday = (today - timedelta(days=1)).isoformat()
|
||||||
expected = conn.get('unique:%s' % yesterday)
|
expected = conn.get('unique:%s' % yesterday)
|
||||||
expected = int(expected or DAILY_EXPECTED)
|
expected = int(expected or DAILY_EXPECTED)
|
||||||
|
|
||||||
# 基于“明天的访客人数至少会比今天的访客人数多 50%”这一假设,
|
# 基于“明天的访客人数至少会比今天的访客人数多 50%”这一假设,
|
||||||
# 给昨天的访客人数加上 50% ,然后向上舍入至下一个底数为 2 的幂。
|
# 给昨天的访客人数加上 50% ,然后向上舍入至下一个底数为 2 的幂。
|
||||||
expected = 2 ** int(math.ceil(math.log(expected * 1.5, 2)))
|
expected = 2 ** int(math.ceil(math.log(expected * 1.5, 2)))
|
||||||
# 将计算出的预计访客人数写入到 Redis 里面,以便其他程序在有需要时使用。
|
# 将计算出的预计访客人数写入到 Redis 里面,以便其他程序在有需要时使用。
|
||||||
if not conn.setnx(exkey, expected):
|
if not conn.setnx(exkey, expected):
|
||||||
# 如果在我们之前,
|
# 如果在我们之前,
|
||||||
# 已经有其他客户端储存了当日的预计访客人数,
|
# 已经有其他客户端储存了当日的预计访客人数,
|
||||||
# 那么直接使用已储存的数字。
|
# 那么直接使用已储存的数字。
|
||||||
expected = conn.get(exkey)
|
expected = conn.get(exkey)
|
||||||
|
|
||||||
# 将当日的预计访客人数记录到本地副本里面,并将它返回给调用者。
|
# 将当日的预计访客人数记录到本地副本里面,并将它返回给调用者。
|
||||||
EXPECTED[key] = int(expected)
|
EXPECTED[key] = int(expected)
|
||||||
return EXPECTED[key]
|
return EXPECTED[key]
|
||||||
|
|
||||||
|
|
||||||
# <end id="expected-viewer-count"/>
|
# <end id="expected-viewer-count"/>
|
||||||
|
@ -335,10 +335,10 @@ TCD TGO THA TJK TKL TKM TLS TON TTO TUN TUR TUV TWN TZA UGA UKR UMI URY
|
||||||
USA UZB VAT VCT VEN VGB VIR VNM VUT WLF WSM YEM ZAF ZMB ZWE'''.split()
|
USA UZB VAT VCT VEN VGB VIR VNM VUT WLF WSM YEM ZAF ZMB ZWE'''.split()
|
||||||
|
|
||||||
STATES = {
|
STATES = {
|
||||||
# 加拿大的省信息和属地信息。
|
# 加拿大的省信息和属地信息。
|
||||||
'CAN': '''AB BC MB NB NL NS NT NU ON PE QC SK YT'''.split(),
|
'CAN': '''AB BC MB NB NL NS NT NU ON PE QC SK YT'''.split(),
|
||||||
# 美国各个州的信息。
|
# 美国各个州的信息。
|
||||||
'USA': '''AA AE AK AL AP AR AS AZ CA CO CT DC DE FL FM GA GU HI IA ID
|
'USA': '''AA AE AK AL AP AR AS AZ CA CO CT DC DE FL FM GA GU HI IA ID
|
||||||
IL IN KS KY LA MA MD ME MH MI MN MO MP MS MT NC ND NE NH NJ NM NV NY OH
|
IL IN KS KY LA MA MD ME MH MI MN MO MP MS MT NC ND NE NH NJ NM NV NY OH
|
||||||
OK OR PA PR PW RI SC SD TN TX UT VA VI VT WA WI WV WY'''.split(),
|
OK OR PA PR PW RI SC SD TN TX UT VA VI VT WA WI WV WY'''.split(),
|
||||||
}
|
}
|
||||||
|
@ -350,31 +350,31 @@ OK OR PA PR PW RI SC SD TN TX UT VA VI VT WA WI WV WY'''.split(),
|
||||||
# 代码清单 9-14
|
# 代码清单 9-14
|
||||||
# <start id="location-to-code"/>
|
# <start id="location-to-code"/>
|
||||||
def get_code(country, state):
|
def get_code(country, state):
|
||||||
# 寻找国家对应的偏移量。
|
# 寻找国家对应的偏移量。
|
||||||
cindex = bisect.bisect_left(COUNTRIES, country)
|
cindex = bisect.bisect_left(COUNTRIES, country)
|
||||||
# 没有找到指定的国家时,将索引设置为 -1 。
|
# 没有找到指定的国家时,将索引设置为 -1 。
|
||||||
if cindex > len(COUNTRIES) or COUNTRIES[cindex] != country:
|
if cindex > len(COUNTRIES) or COUNTRIES[cindex] != country:
|
||||||
cindex = -1
|
cindex = -1
|
||||||
# 因为 Redis 里面的未初始化数据在返回时会被转换为空值,
|
# 因为 Redis 里面的未初始化数据在返回时会被转换为空值,
|
||||||
# 所以我们要将“未找到指定国家”时的返回值改为 0 ,
|
# 所以我们要将“未找到指定国家”时的返回值改为 0 ,
|
||||||
# 并将第一个国家的索引变为 1 ,以此类推。
|
# 并将第一个国家的索引变为 1 ,以此类推。
|
||||||
cindex += 1
|
cindex += 1
|
||||||
|
|
||||||
sindex = -1
|
sindex = -1
|
||||||
if state and country in STATES:
|
if state and country in STATES:
|
||||||
# 尝试取出国家对应的州信息。
|
# 尝试取出国家对应的州信息。
|
||||||
states = STATES[country]
|
states = STATES[country]
|
||||||
# 寻找州对应的偏移量。
|
# 寻找州对应的偏移量。
|
||||||
sindex = bisect.bisect_left(states, state)
|
sindex = bisect.bisect_left(states, state)
|
||||||
# 像处理“未找到指定国家”时的情况一样,处理“未找到指定州”的情况。
|
# 像处理“未找到指定国家”时的情况一样,处理“未找到指定州”的情况。
|
||||||
if sindex > len(states) or states[sindex] != state:
|
if sindex > len(states) or states[sindex] != state:
|
||||||
sindex = -1
|
sindex = -1
|
||||||
# 如果没有找到指定的州,那么索引为 0 ;
|
# 如果没有找到指定的州,那么索引为 0 ;
|
||||||
# 如果找到了指定的州,那么索引大于 0 。
|
# 如果找到了指定的州,那么索引大于 0 。
|
||||||
sindex += 1
|
sindex += 1
|
||||||
|
|
||||||
# chr() 函数会将介于 0 至 255 之间的整数值转换为对应的 ASCII 字符。
|
# chr() 函数会将介于 0 至 255 之间的整数值转换为对应的 ASCII 字符。
|
||||||
return chr(cindex) + chr(sindex)
|
return chr(cindex) + chr(sindex)
|
||||||
|
|
||||||
|
|
||||||
# <end id="location-to-code"/>
|
# <end id="location-to-code"/>
|
||||||
|
@ -387,26 +387,26 @@ USERS_PER_SHARD = 2 ** 20
|
||||||
|
|
||||||
|
|
||||||
def set_location(conn, user_id, country, state):
|
def set_location(conn, user_id, country, state):
|
||||||
# 取得用户所在位置的编码。
|
# 取得用户所在位置的编码。
|
||||||
code = get_code(country, state)
|
code = get_code(country, state)
|
||||||
|
|
||||||
# 查找分片 ID 以及用户在指定分片中的位置(position)。
|
# 查找分片 ID 以及用户在指定分片中的位置(position)。
|
||||||
shard_id, position = divmod(user_id, USERS_PER_SHARD)
|
shard_id, position = divmod(user_id, USERS_PER_SHARD)
|
||||||
# 计算用户数据的偏移量。
|
# 计算用户数据的偏移量。
|
||||||
offset = position * 2
|
offset = position * 2
|
||||||
|
|
||||||
pipe = conn.pipeline(False)
|
pipe = conn.pipeline(False)
|
||||||
# 将用户的位置信息储存到分片后的位置表格里面。
|
# 将用户的位置信息储存到分片后的位置表格里面。
|
||||||
pipe.setrange('location:%s' % shard_id, offset, code)
|
pipe.setrange('location:%s' % shard_id, offset, code)
|
||||||
|
|
||||||
# 对记录目前已知最大用户 ID 的有序集合进行更新。
|
# 对记录目前已知最大用户 ID 的有序集合进行更新。
|
||||||
tkey = str(uuid.uuid4())
|
tkey = str(uuid.uuid4())
|
||||||
pipe.zadd(tkey, 'max', user_id)
|
pipe.zadd(tkey, 'max', user_id)
|
||||||
pipe.zunionstore('location:max',
|
pipe.zunionstore('location:max',
|
||||||
[tkey, 'location:max'], aggregate='max')
|
[tkey, 'location:max'], aggregate='max')
|
||||||
pipe.delete(tkey)
|
pipe.delete(tkey)
|
||||||
|
|
||||||
pipe.execute()
|
pipe.execute()
|
||||||
|
|
||||||
|
|
||||||
# <end id="set-location-information"/>
|
# <end id="set-location-information"/>
|
||||||
|
@ -415,29 +415,29 @@ def set_location(conn, user_id, country, state):
|
||||||
# 代码清单 9-16
|
# 代码清单 9-16
|
||||||
# <start id="aggregate-population"/>
|
# <start id="aggregate-population"/>
|
||||||
def aggregate_location(conn):
|
def aggregate_location(conn):
|
||||||
# 初始化两个特殊结构,
|
# 初始化两个特殊结构,
|
||||||
# 以便快速地对已存在的计数器以及缺失的计数器进行更新。
|
# 以便快速地对已存在的计数器以及缺失的计数器进行更新。
|
||||||
countries = defaultdict(int)
|
countries = defaultdict(int)
|
||||||
states = defaultdict(lambda: defaultdict(int))
|
states = defaultdict(lambda: defaultdict(int))
|
||||||
|
|
||||||
# 获取目前已知的最大用户 ID ,
|
# 获取目前已知的最大用户 ID ,
|
||||||
# 并使用它来计算出程序需要访问的最大分片 ID 。
|
# 并使用它来计算出程序需要访问的最大分片 ID 。
|
||||||
max_id = int(conn.zscore('location:max', 'max'))
|
max_id = int(conn.zscore('location:max', 'max'))
|
||||||
max_block = max_id // USERS_PER_SHARD
|
max_block = max_id // USERS_PER_SHARD
|
||||||
|
|
||||||
# 按顺序地处理每个分片……
|
# 按顺序地处理每个分片……
|
||||||
for shard_id in xrange(max_block + 1):
|
for shard_id in xrange(max_block + 1):
|
||||||
# 读取每个块……
|
# 读取每个块……
|
||||||
for block in readblocks(conn, 'location:%s' % shard_id):
|
for block in readblocks(conn, 'location:%s' % shard_id):
|
||||||
# 从块里面提取出每个编码,
|
# 从块里面提取出每个编码,
|
||||||
# 并根据编码查找原始的位置信息,
|
# 并根据编码查找原始的位置信息,
|
||||||
# 然后对这些位置信息进行聚合计算。
|
# 然后对这些位置信息进行聚合计算。
|
||||||
for offset in xrange(0, len(block) - 1, 2):
|
for offset in xrange(0, len(block) - 1, 2):
|
||||||
code = block[offset:offset + 2]
|
code = block[offset:offset + 2]
|
||||||
# 对聚合数据进行更新。
|
# 对聚合数据进行更新。
|
||||||
update_aggregates(countries, states, [code])
|
update_aggregates(countries, states, [code])
|
||||||
|
|
||||||
return countries, states
|
return countries, states
|
||||||
|
|
||||||
|
|
||||||
# <end id="aggregate-population"/>
|
# <end id="aggregate-population"/>
|
||||||
|
@ -446,145 +446,145 @@ def aggregate_location(conn):
|
||||||
# 代码清单 9-17
|
# 代码清单 9-17
|
||||||
# <start id="code-to-location"/>
|
# <start id="code-to-location"/>
|
||||||
def update_aggregates(countries, states, codes):
|
def update_aggregates(countries, states, codes):
|
||||||
for code in codes:
|
for code in codes:
|
||||||
# 只对合法的编码进行查找。
|
# 只对合法的编码进行查找。
|
||||||
if len(code) != 2:
|
if len(code) != 2:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 计算出国家和州在查找表格中的实际偏移量。
|
# 计算出国家和州在查找表格中的实际偏移量。
|
||||||
country = ord(code[0]) - 1
|
country = ord(code[0]) - 1
|
||||||
state = ord(code[1]) - 1
|
state = ord(code[1]) - 1
|
||||||
|
|
||||||
# 如果国家所处的偏移量不在合法范围之内,那么跳过这个编码。
|
# 如果国家所处的偏移量不在合法范围之内,那么跳过这个编码。
|
||||||
if country < 0 or country >= len(COUNTRIES):
|
if country < 0 or country >= len(COUNTRIES):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 获取 ISO3 国家编码。
|
# 获取 ISO3 国家编码。
|
||||||
country = COUNTRIES[country]
|
country = COUNTRIES[country]
|
||||||
# 在对国家信息进行解码之后,
|
# 在对国家信息进行解码之后,
|
||||||
# 把用户计入到这个国家对应的计数器里面。
|
# 把用户计入到这个国家对应的计数器里面。
|
||||||
countries[country] += 1
|
countries[country] += 1
|
||||||
|
|
||||||
# 如果程序没有找到指定的州信息,
|
# 如果程序没有找到指定的州信息,
|
||||||
# 或者查找州信息时的偏移量不在合法的范围之内,
|
# 或者查找州信息时的偏移量不在合法的范围之内,
|
||||||
# 那么跳过这个编码。
|
# 那么跳过这个编码。
|
||||||
if country not in STATES:
|
if country not in STATES:
|
||||||
continue
|
continue
|
||||||
if state < 0 or state >= STATES[country]:
|
if state < 0 or state >= STATES[country]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 根据编码获取州名。
|
# 根据编码获取州名。
|
||||||
state = STATES[country][state]
|
state = STATES[country][state]
|
||||||
# 对州计数器执行加一操作。
|
# 对州计数器执行加一操作。
|
||||||
states[country][state] += 1
|
states[country][state] += 1
|
||||||
# <end id="code-to-location"/>
|
# <end id="code-to-location"/>
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 9-18
|
# 代码清单 9-18
|
||||||
# <start id="aggregate-limited"/>
|
# <start id="aggregate-limited"/>
|
||||||
def aggregate_location_list(conn, user_ids):
|
def aggregate_location_list(conn, user_ids):
|
||||||
# 设置流水线,减少操作执行过程中与 Redis 的通信往返次数。
|
# 设置流水线,减少操作执行过程中与 Redis 的通信往返次数。
|
||||||
pipe = conn.pipeline(False)
|
pipe = conn.pipeline(False)
|
||||||
# 和之前一样,设置好基本的聚合数据。
|
# 和之前一样,设置好基本的聚合数据。
|
||||||
countries = defaultdict(int)
|
countries = defaultdict(int)
|
||||||
states = defaultdict(lambda: defaultdict(int))
|
states = defaultdict(lambda: defaultdict(int))
|
||||||
|
|
||||||
for i, user_id in enumerate(user_ids):
|
for i, user_id in enumerate(user_ids):
|
||||||
# 查找用户位置信息所在分片的 ID ,以及信息在分片中的偏移量。
|
# 查找用户位置信息所在分片的 ID ,以及信息在分片中的偏移量。
|
||||||
shard_id, position = divmod(user_id, USERS_PER_SHARD)
|
shard_id, position = divmod(user_id, USERS_PER_SHARD)
|
||||||
offset = position * 2
|
offset = position * 2
|
||||||
|
|
||||||
# 发送另一个被流水线包裹的命令,获取用户的位置信息。
|
# 发送另一个被流水线包裹的命令,获取用户的位置信息。
|
||||||
pipe.substr('location:%s' % shard_id, offset, offset + 1)
|
pipe.substr('location:%s' % shard_id, offset, offset + 1)
|
||||||
|
|
||||||
# 每处理 1000 个请求,
|
# 每处理 1000 个请求,
|
||||||
# 程序就会调用之前定义的辅助函数对聚合数据进行一次更新。
|
# 程序就会调用之前定义的辅助函数对聚合数据进行一次更新。
|
||||||
if (i + 1) % 1000 == 0:
|
if (i + 1) % 1000 == 0:
|
||||||
update_aggregates(countries, states, pipe.execute())
|
update_aggregates(countries, states, pipe.execute())
|
||||||
|
|
||||||
# 对遍历余下的最后一批用户进行处理。
|
# 对遍历余下的最后一批用户进行处理。
|
||||||
update_aggregates(countries, states, pipe.execute())
|
update_aggregates(countries, states, pipe.execute())
|
||||||
|
|
||||||
# 返回聚合数据。
|
# 返回聚合数据。
|
||||||
return countries, states
|
return countries, states
|
||||||
|
|
||||||
|
|
||||||
# <end id="aggregate-limited"/>
|
# <end id="aggregate-limited"/>
|
||||||
|
|
||||||
class TestCh09(unittest.TestCase):
|
class TestCh09(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.conn = redis.Redis(db=15)
|
self.conn = redis.Redis(db=15)
|
||||||
self.conn.flushdb()
|
self.conn.flushdb()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.conn.flushdb()
|
self.conn.flushdb()
|
||||||
|
|
||||||
def test_long_ziplist_performance(self):
|
def test_long_ziplist_performance(self):
|
||||||
long_ziplist_performance(self.conn, 'test', 5, 10, 10)
|
long_ziplist_performance(self.conn, 'test', 5, 10, 10)
|
||||||
self.assertEquals(self.conn.llen('test'), 5)
|
self.assertEquals(self.conn.llen('test'), 5)
|
||||||
|
|
||||||
def test_shard_key(self):
|
def test_shard_key(self):
|
||||||
base = 'test'
|
base = 'test'
|
||||||
self.assertEquals(shard_key(base, 1, 2, 2), 'test:0')
|
self.assertEquals(shard_key(base, 1, 2, 2), 'test:0')
|
||||||
self.assertEquals(shard_key(base, '1', 2, 2), 'test:0')
|
self.assertEquals(shard_key(base, '1', 2, 2), 'test:0')
|
||||||
self.assertEquals(shard_key(base, 125, 1000, 100), 'test:1')
|
self.assertEquals(shard_key(base, 125, 1000, 100), 'test:1')
|
||||||
self.assertEquals(shard_key(base, '125', 1000, 100), 'test:1')
|
self.assertEquals(shard_key(base, '125', 1000, 100), 'test:1')
|
||||||
|
|
||||||
for i in xrange(50):
|
for i in xrange(50):
|
||||||
self.assertTrue(0 <= int(shard_key(base, 'hello:%s' % i, 1000, 100).partition(':')[-1]) < 20)
|
self.assertTrue(0 <= int(shard_key(base, 'hello:%s' % i, 1000, 100).partition(':')[-1]) < 20)
|
||||||
self.assertTrue(0 <= int(shard_key(base, i, 1000, 100).partition(':')[-1]) < 10)
|
self.assertTrue(0 <= int(shard_key(base, i, 1000, 100).partition(':')[-1]) < 10)
|
||||||
|
|
||||||
def test_sharded_hash(self):
|
def test_sharded_hash(self):
|
||||||
for i in xrange(50):
|
for i in xrange(50):
|
||||||
shard_hset(self.conn, 'test', 'keyname:%s' % i, i, 1000, 100)
|
shard_hset(self.conn, 'test', 'keyname:%s' % i, i, 1000, 100)
|
||||||
self.assertEquals(shard_hget(self.conn, 'test', 'keyname:%s' % i, 1000, 100), str(i))
|
self.assertEquals(shard_hget(self.conn, 'test', 'keyname:%s' % i, 1000, 100), str(i))
|
||||||
shard_hset(self.conn, 'test2', i, i, 1000, 100)
|
shard_hset(self.conn, 'test2', i, i, 1000, 100)
|
||||||
self.assertEquals(shard_hget(self.conn, 'test2', i, 1000, 100), str(i))
|
self.assertEquals(shard_hget(self.conn, 'test2', i, 1000, 100), str(i))
|
||||||
|
|
||||||
def test_sharded_sadd(self):
|
def test_sharded_sadd(self):
|
||||||
for i in xrange(50):
|
for i in xrange(50):
|
||||||
shard_sadd(self.conn, 'testx', i, 50, 50)
|
shard_sadd(self.conn, 'testx', i, 50, 50)
|
||||||
self.assertEquals(self.conn.scard('testx:0') + self.conn.scard('testx:1'), 50)
|
self.assertEquals(self.conn.scard('testx:0') + self.conn.scard('testx:1'), 50)
|
||||||
|
|
||||||
def test_unique_visitors(self):
|
def test_unique_visitors(self):
|
||||||
global DAILY_EXPECTED
|
global DAILY_EXPECTED
|
||||||
DAILY_EXPECTED = 10000
|
DAILY_EXPECTED = 10000
|
||||||
|
|
||||||
for i in xrange(179):
|
for i in xrange(179):
|
||||||
count_visit(self.conn, str(uuid.uuid4()))
|
count_visit(self.conn, str(uuid.uuid4()))
|
||||||
self.assertEquals(self.conn.get('unique:%s' % (date.today().isoformat())), '179')
|
self.assertEquals(self.conn.get('unique:%s' % (date.today().isoformat())), '179')
|
||||||
|
|
||||||
self.conn.flushdb()
|
self.conn.flushdb()
|
||||||
self.conn.set('unique:%s' % ((date.today() - timedelta(days=1)).isoformat()), 1000)
|
self.conn.set('unique:%s' % ((date.today() - timedelta(days=1)).isoformat()), 1000)
|
||||||
for i in xrange(183):
|
for i in xrange(183):
|
||||||
count_visit(self.conn, str(uuid.uuid4()))
|
count_visit(self.conn, str(uuid.uuid4()))
|
||||||
self.assertEquals(self.conn.get('unique:%s' % (date.today().isoformat())), '183')
|
self.assertEquals(self.conn.get('unique:%s' % (date.today().isoformat())), '183')
|
||||||
|
|
||||||
def test_user_location(self):
|
def test_user_location(self):
|
||||||
i = 0
|
i = 0
|
||||||
for country in COUNTRIES:
|
for country in COUNTRIES:
|
||||||
if country in STATES:
|
if country in STATES:
|
||||||
for state in STATES[country]:
|
for state in STATES[country]:
|
||||||
set_location(self.conn, i, country, state)
|
set_location(self.conn, i, country, state)
|
||||||
i += 1
|
i += 1
|
||||||
else:
|
else:
|
||||||
set_location(self.conn, i, country, '')
|
set_location(self.conn, i, country, '')
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
_countries, _states = aggregate_location(self.conn)
|
_countries, _states = aggregate_location(self.conn)
|
||||||
countries, states = aggregate_location_list(self.conn, range(i + 1))
|
countries, states = aggregate_location_list(self.conn, range(i + 1))
|
||||||
|
|
||||||
self.assertEquals(_countries, countries)
|
self.assertEquals(_countries, countries)
|
||||||
self.assertEquals(_states, states)
|
self.assertEquals(_states, states)
|
||||||
|
|
||||||
for c in countries:
|
for c in countries:
|
||||||
if c in STATES:
|
if c in STATES:
|
||||||
self.assertEquals(len(STATES[c]), countries[c])
|
self.assertEquals(len(STATES[c]), countries[c])
|
||||||
for s in STATES[c]:
|
for s in STATES[c]:
|
||||||
self.assertEquals(states[c][s], 1)
|
self.assertEquals(states[c][s], 1)
|
||||||
else:
|
else:
|
||||||
self.assertEquals(countries[c], 1)
|
self.assertEquals(countries[c], 1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -12,42 +12,42 @@ import uuid
|
||||||
# 代码清单 11-1
|
# 代码清单 11-1
|
||||||
# <start id="script-load"/>
|
# <start id="script-load"/>
|
||||||
def script_load(script):
|
def script_load(script):
|
||||||
# 将 SCRIPT LOAD 命令返回的已缓存脚本 SHA1 校验和储存到一个列表里面,
|
# 将 SCRIPT LOAD 命令返回的已缓存脚本 SHA1 校验和储存到一个列表里面,
|
||||||
# 以便之后在 call() 函数内部对其进行修改。
|
# 以便之后在 call() 函数内部对其进行修改。
|
||||||
sha = [None]
|
sha = [None]
|
||||||
|
|
||||||
# 在调用已载入脚本的时候,
|
# 在调用已载入脚本的时候,
|
||||||
# 用户需要将 Redis 连接、脚本要处理的键以及脚本的其他参数传递给脚本。
|
# 用户需要将 Redis 连接、脚本要处理的键以及脚本的其他参数传递给脚本。
|
||||||
def call(conn, keys=[], args=[], force_eval=False):
|
def call(conn, keys=[], args=[], force_eval=False):
|
||||||
if not force_eval:
|
if not force_eval:
|
||||||
# 程序只会在 SHA1 校验和未被缓存的情况下尝试载入脚本。
|
# 程序只会在 SHA1 校验和未被缓存的情况下尝试载入脚本。
|
||||||
if not sha[0]:
|
if not sha[0]:
|
||||||
# 如果 SHA1 校验和未被缓存,那么载入给定的脚本
|
# 如果 SHA1 校验和未被缓存,那么载入给定的脚本
|
||||||
sha[0] = conn.execute_command(
|
sha[0] = conn.execute_command(
|
||||||
"SCRIPT", "LOAD", script, parse="LOAD")
|
"SCRIPT", "LOAD", script, parse="LOAD")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# 使用已缓存的 SHA1 校验和执行命令。
|
# 使用已缓存的 SHA1 校验和执行命令。
|
||||||
return conn.execute_command(
|
return conn.execute_command(
|
||||||
"EVALSHA", sha[0], len(keys), *(keys + args))
|
"EVALSHA", sha[0], len(keys), *(keys + args))
|
||||||
|
|
||||||
except redis.exceptions.ResponseError as msg:
|
except redis.exceptions.ResponseError as msg:
|
||||||
# 如果错误与脚本缺失无关,那么重新抛出异常。
|
# 如果错误与脚本缺失无关,那么重新抛出异常。
|
||||||
if not msg.args[0].startswith("NOSCRIPT"):
|
if not msg.args[0].startswith("NOSCRIPT"):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# 当程序接收到脚本错误的时候,
|
# 当程序接收到脚本错误的时候,
|
||||||
# 又或者程序需要强制执行脚本的时候,
|
# 又或者程序需要强制执行脚本的时候,
|
||||||
# 它会使用 EVAL 命令直接执行给定的脚本。
|
# 它会使用 EVAL 命令直接执行给定的脚本。
|
||||||
# EVAL 命令在执行完脚本之后,
|
# EVAL 命令在执行完脚本之后,
|
||||||
# 会自动地把脚本缓存起来,
|
# 会自动地把脚本缓存起来,
|
||||||
# 而缓存产生的 SHA1 校验和跟使用 EVALSHA 命令缓存脚本产生的 SHA1 校验和是完全相同的。
|
# 而缓存产生的 SHA1 校验和跟使用 EVALSHA 命令缓存脚本产生的 SHA1 校验和是完全相同的。
|
||||||
return conn.execute_command(
|
return conn.execute_command(
|
||||||
"EVAL", script, len(keys), *(keys + args))
|
"EVAL", script, len(keys), *(keys + args))
|
||||||
|
|
||||||
# 返回一个函数,这个函数在被调用的时候会自动载入并执行脚本。
|
# 返回一个函数,这个函数在被调用的时候会自动载入并执行脚本。
|
||||||
|
|
||||||
return call
|
return call
|
||||||
|
|
||||||
|
|
||||||
# <end id="script-load"/>
|
# <end id="script-load"/>
|
||||||
|
@ -65,31 +65,31 @@ def script_load(script):
|
||||||
# 代码清单 11-2
|
# 代码清单 11-2
|
||||||
# <start id="ch08-post-status"/>
|
# <start id="ch08-post-status"/>
|
||||||
def create_status(conn, uid, message, **data):
|
def create_status(conn, uid, message, **data):
|
||||||
pipeline = conn.pipeline(True)
|
pipeline = conn.pipeline(True)
|
||||||
# 根据用户 ID 获取用户的用户名。
|
# 根据用户 ID 获取用户的用户名。
|
||||||
pipeline.hget('user:%s' % uid, 'login')
|
pipeline.hget('user:%s' % uid, 'login')
|
||||||
# 为这条状态消息创建一个新的 ID 。
|
# 为这条状态消息创建一个新的 ID 。
|
||||||
pipeline.incr('status:id:')
|
pipeline.incr('status:id:')
|
||||||
login, id = pipeline.execute()
|
login, id = pipeline.execute()
|
||||||
|
|
||||||
# 在发布状态消息之前,先检查用户的账号是否存在。
|
# 在发布状态消息之前,先检查用户的账号是否存在。
|
||||||
if not login:
|
if not login:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# 准备并设置状态消息的各项信息。
|
# 准备并设置状态消息的各项信息。
|
||||||
data.update({
|
data.update({
|
||||||
'message': message,
|
'message': message,
|
||||||
'posted': time.time(),
|
'posted': time.time(),
|
||||||
'id': id,
|
'id': id,
|
||||||
'uid': uid,
|
'uid': uid,
|
||||||
'login': login,
|
'login': login,
|
||||||
})
|
})
|
||||||
pipeline.hmset('status:%s' % id, data)
|
pipeline.hmset('status:%s' % id, data)
|
||||||
# 更新用户的已发送状态消息数量。
|
# 更新用户的已发送状态消息数量。
|
||||||
pipeline.hincrby('user:%s' % uid, 'posts')
|
pipeline.hincrby('user:%s' % uid, 'posts')
|
||||||
pipeline.execute()
|
pipeline.execute()
|
||||||
# 返回新创建的状态消息的 ID 。
|
# 返回新创建的状态消息的 ID 。
|
||||||
return id
|
return id
|
||||||
|
|
||||||
|
|
||||||
# <end id="ch08-post-status"/>
|
# <end id="ch08-post-status"/>
|
||||||
|
@ -102,18 +102,18 @@ _create_status = create_status
|
||||||
# <start id="post-status-lua"/>
|
# <start id="post-status-lua"/>
|
||||||
# 这个函数接受的参数和原版消息发布函数接受的参数一样。
|
# 这个函数接受的参数和原版消息发布函数接受的参数一样。
|
||||||
def create_status(conn, uid, message, **data):
|
def create_status(conn, uid, message, **data):
|
||||||
# 准备好对状态消息进行设置所需的各个参数和属性。
|
# 准备好对状态消息进行设置所需的各个参数和属性。
|
||||||
args = [
|
args = [
|
||||||
'message', message,
|
'message', message,
|
||||||
'posted', time.time(),
|
'posted', time.time(),
|
||||||
'uid', uid,
|
'uid', uid,
|
||||||
]
|
]
|
||||||
for key, value in data.iteritems():
|
for key, value in data.iteritems():
|
||||||
args.append(key)
|
args.append(key)
|
||||||
args.append(value)
|
args.append(value)
|
||||||
|
|
||||||
return create_status_lua(
|
return create_status_lua(
|
||||||
conn, ['user:%s' % uid, 'status:id:'], args)
|
conn, ['user:%s' % uid, 'status:id:'], args)
|
||||||
|
|
||||||
|
|
||||||
create_status_lua = script_load('''
|
create_status_lua = script_load('''
|
||||||
|
@ -149,26 +149,26 @@ return id
|
||||||
# 代码清单 11-4
|
# 代码清单 11-4
|
||||||
# <start id="old-lock"/>
|
# <start id="old-lock"/>
|
||||||
def acquire_lock_with_timeout(
|
def acquire_lock_with_timeout(
|
||||||
conn, lockname, acquire_timeout=10, lock_timeout=10):
|
conn, lockname, acquire_timeout=10, lock_timeout=10):
|
||||||
# 128 位随机标识符。
|
# 128 位随机标识符。
|
||||||
identifier = str(uuid.uuid4())
|
identifier = str(uuid.uuid4())
|
||||||
lockname = 'lock:' + lockname
|
lockname = 'lock:' + lockname
|
||||||
# 确保传给 EXPIRE 的都是整数。
|
# 确保传给 EXPIRE 的都是整数。
|
||||||
lock_timeout = int(math.ceil(lock_timeout))
|
lock_timeout = int(math.ceil(lock_timeout))
|
||||||
|
|
||||||
end = time.time() + acquire_timeout
|
end = time.time() + acquire_timeout
|
||||||
while time.time() < end:
|
while time.time() < end:
|
||||||
# 获取锁并设置过期时间。
|
# 获取锁并设置过期时间。
|
||||||
if conn.setnx(lockname, identifier):
|
if conn.setnx(lockname, identifier):
|
||||||
conn.expire(lockname, lock_timeout)
|
conn.expire(lockname, lock_timeout)
|
||||||
return identifier
|
return identifier
|
||||||
# 检查过期时间,并在有需要时对其进行更新。
|
# 检查过期时间,并在有需要时对其进行更新。
|
||||||
elif not conn.ttl(lockname):
|
elif not conn.ttl(lockname):
|
||||||
conn.expire(lockname, lock_timeout)
|
conn.expire(lockname, lock_timeout)
|
||||||
|
|
||||||
time.sleep(.001)
|
time.sleep(.001)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
# <end id="old-lock"/>
|
# <end id="old-lock"/>
|
||||||
|
@ -180,21 +180,21 @@ _acquire_lock_with_timeout = acquire_lock_with_timeout
|
||||||
# 代码清单 11-5
|
# 代码清单 11-5
|
||||||
# <start id="lock-in-lua"/>
|
# <start id="lock-in-lua"/>
|
||||||
def acquire_lock_with_timeout(
|
def acquire_lock_with_timeout(
|
||||||
conn, lockname, acquire_timeout=10, lock_timeout=10):
|
conn, lockname, acquire_timeout=10, lock_timeout=10):
|
||||||
identifier = str(uuid.uuid4())
|
identifier = str(uuid.uuid4())
|
||||||
lockname = 'lock:' + lockname
|
lockname = 'lock:' + lockname
|
||||||
lock_timeout = int(math.ceil(lock_timeout))
|
lock_timeout = int(math.ceil(lock_timeout))
|
||||||
|
|
||||||
acquired = False
|
acquired = False
|
||||||
end = time.time() + acquire_timeout
|
end = time.time() + acquire_timeout
|
||||||
while time.time() < end and not acquired:
|
while time.time() < end and not acquired:
|
||||||
# 执行实际的锁获取操作,通过检查确保 Lua 调用已经执行成功。
|
# 执行实际的锁获取操作,通过检查确保 Lua 调用已经执行成功。
|
||||||
acquired = acquire_lock_with_timeout_lua(
|
acquired = acquire_lock_with_timeout_lua(
|
||||||
conn, [lockname], [lock_timeout, identifier]) == 'OK'
|
conn, [lockname], [lock_timeout, identifier]) == 'OK'
|
||||||
|
|
||||||
time.sleep(.001 * (not acquired))
|
time.sleep(.001 * (not acquired))
|
||||||
|
|
||||||
return acquired and identifier
|
return acquired and identifier
|
||||||
|
|
||||||
|
|
||||||
acquire_lock_with_timeout_lua = script_load('''
|
acquire_lock_with_timeout_lua = script_load('''
|
||||||
|
@ -210,25 +210,25 @@ end
|
||||||
|
|
||||||
|
|
||||||
def release_lock(conn, lockname, identifier):
|
def release_lock(conn, lockname, identifier):
|
||||||
pipe = conn.pipeline(True)
|
pipe = conn.pipeline(True)
|
||||||
lockname = 'lock:' + lockname
|
lockname = 'lock:' + lockname
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
pipe.watch(lockname) # A
|
pipe.watch(lockname) # A
|
||||||
if pipe.get(lockname) == identifier: # A
|
if pipe.get(lockname) == identifier: # A
|
||||||
pipe.multi() # B
|
pipe.multi() # B
|
||||||
pipe.delete(lockname) # B
|
pipe.delete(lockname) # B
|
||||||
pipe.execute() # B
|
pipe.execute() # B
|
||||||
return True # B
|
return True # B
|
||||||
|
|
||||||
pipe.unwatch()
|
pipe.unwatch()
|
||||||
break
|
break
|
||||||
|
|
||||||
except redis.exceptions.WatchError: # C
|
except redis.exceptions.WatchError: # C
|
||||||
pass # C
|
pass # C
|
||||||
|
|
||||||
return False # D
|
return False # D
|
||||||
|
|
||||||
|
|
||||||
_release_lock = release_lock
|
_release_lock = release_lock
|
||||||
|
@ -237,9 +237,9 @@ _release_lock = release_lock
|
||||||
# 代码清单 11-6
|
# 代码清单 11-6
|
||||||
# <start id="release-lock-in-lua"/>
|
# <start id="release-lock-in-lua"/>
|
||||||
def release_lock(conn, lockname, identifier):
|
def release_lock(conn, lockname, identifier):
|
||||||
lockname = 'lock:' + lockname
|
lockname = 'lock:' + lockname
|
||||||
# 调用负责释放锁的 Lua 函数。
|
# 调用负责释放锁的 Lua 函数。
|
||||||
return release_lock_lua(conn, [lockname], [identifier])
|
return release_lock_lua(conn, [lockname], [identifier])
|
||||||
|
|
||||||
|
|
||||||
release_lock_lua = script_load('''
|
release_lock_lua = script_load('''
|
||||||
|
@ -256,23 +256,23 @@ end
|
||||||
# 代码清单 11-7
|
# 代码清单 11-7
|
||||||
# <start id="old-acquire-semaphore"/>
|
# <start id="old-acquire-semaphore"/>
|
||||||
def acquire_semaphore(conn, semname, limit, timeout=10):
|
def acquire_semaphore(conn, semname, limit, timeout=10):
|
||||||
# 128 位随机标识符。
|
# 128 位随机标识符。
|
||||||
identifier = str(uuid.uuid4())
|
identifier = str(uuid.uuid4())
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
|
||||||
pipeline = conn.pipeline(True)
|
pipeline = conn.pipeline(True)
|
||||||
# 清理过期的信号量持有者。
|
# 清理过期的信号量持有者。
|
||||||
pipeline.zremrangebyscore(semname, '-inf', now - timeout)
|
pipeline.zremrangebyscore(semname, '-inf', now - timeout)
|
||||||
# 尝试获取信号量。
|
# 尝试获取信号量。
|
||||||
pipeline.zadd(semname, identifier, now)
|
pipeline.zadd(semname, identifier, now)
|
||||||
# 检查是否成功取得了信号量。
|
# 检查是否成功取得了信号量。
|
||||||
pipeline.zrank(semname, identifier)
|
pipeline.zrank(semname, identifier)
|
||||||
if pipeline.execute()[-1] < limit:
|
if pipeline.execute()[-1] < limit:
|
||||||
return identifier
|
return identifier
|
||||||
|
|
||||||
# 获取信号量失败,删除之前添加的标识符。
|
# 获取信号量失败,删除之前添加的标识符。
|
||||||
conn.zrem(semname, identifier)
|
conn.zrem(semname, identifier)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
# <end id="old-acquire-semaphore"/>
|
# <end id="old-acquire-semaphore"/>
|
||||||
|
@ -284,11 +284,11 @@ _acquire_semaphore = acquire_semaphore
|
||||||
# 代码清单 11-8
|
# 代码清单 11-8
|
||||||
# <start id="acquire-semaphore-lua"/>
|
# <start id="acquire-semaphore-lua"/>
|
||||||
def acquire_semaphore(conn, semname, limit, timeout=10):
|
def acquire_semaphore(conn, semname, limit, timeout=10):
|
||||||
# 取得当前时间戳,用于处理超时信号量。
|
# 取得当前时间戳,用于处理超时信号量。
|
||||||
now = time.time()
|
now = time.time()
|
||||||
# 把所有必须的参数传递给 Lua 函数,实际地执行信号量获取操作。
|
# 把所有必须的参数传递给 Lua 函数,实际地执行信号量获取操作。
|
||||||
return acquire_semaphore_lua(conn, [semname],
|
return acquire_semaphore_lua(conn, [semname],
|
||||||
[now - timeout, limit, now, str(uuid.uuid4())])
|
[now - timeout, limit, now, str(uuid.uuid4())])
|
||||||
|
|
||||||
|
|
||||||
acquire_semaphore_lua = script_load('''
|
acquire_semaphore_lua = script_load('''
|
||||||
|
@ -307,16 +307,16 @@ end
|
||||||
# <end id="acquire-semaphore-lua"/>
|
# <end id="acquire-semaphore-lua"/>
|
||||||
|
|
||||||
def release_semaphore(conn, semname, identifier):
|
def release_semaphore(conn, semname, identifier):
|
||||||
return conn.zrem(semname, identifier)
|
return conn.zrem(semname, identifier)
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 11-9
|
# 代码清单 11-9
|
||||||
# <start id="refresh-semaphore-lua"/>
|
# <start id="refresh-semaphore-lua"/>
|
||||||
def refresh_semaphore(conn, semname, identifier):
|
def refresh_semaphore(conn, semname, identifier):
|
||||||
return refresh_semaphore_lua(conn, [semname],
|
return refresh_semaphore_lua(conn, [semname],
|
||||||
# 如果信号量没有被刷新,那么 Lua 脚本将返回空值,
|
# 如果信号量没有被刷新,那么 Lua 脚本将返回空值,
|
||||||
# 而 Python 会将这个空值转换成 None 并返回给调用者。
|
# 而 Python 会将这个空值转换成 None 并返回给调用者。
|
||||||
[identifier, time.time()]) != None
|
[identifier, time.time()]) != None
|
||||||
|
|
||||||
|
|
||||||
refresh_semaphore_lua = script_load('''
|
refresh_semaphore_lua = script_load('''
|
||||||
|
@ -331,45 +331,45 @@ valid_characters = '`abcdefghijklmnopqrstuvwxyz{'
|
||||||
|
|
||||||
|
|
||||||
def find_prefix_range(prefix):
|
def find_prefix_range(prefix):
|
||||||
posn = bisect.bisect_left(valid_characters, prefix[-1:])
|
posn = bisect.bisect_left(valid_characters, prefix[-1:])
|
||||||
suffix = valid_characters[(posn or 1) - 1]
|
suffix = valid_characters[(posn or 1) - 1]
|
||||||
return prefix[:-1] + suffix + '{', prefix + '{'
|
return prefix[:-1] + suffix + '{', prefix + '{'
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 11-10
|
# 代码清单 11-10
|
||||||
# <start id="old-autocomplete-code"/>
|
# <start id="old-autocomplete-code"/>
|
||||||
def autocomplete_on_prefix(conn, guild, prefix):
|
def autocomplete_on_prefix(conn, guild, prefix):
|
||||||
# 根据给定的前缀计算出查找范围的起点和终点。
|
# 根据给定的前缀计算出查找范围的起点和终点。
|
||||||
start, end = find_prefix_range(prefix)
|
start, end = find_prefix_range(prefix)
|
||||||
identifier = str(uuid.uuid4())
|
identifier = str(uuid.uuid4())
|
||||||
start += identifier
|
start += identifier
|
||||||
end += identifier
|
end += identifier
|
||||||
zset_name = 'members:' + guild
|
zset_name = 'members:' + guild
|
||||||
|
|
||||||
# 将范围的起始元素和结束元素添加到有序集合里面。
|
# 将范围的起始元素和结束元素添加到有序集合里面。
|
||||||
conn.zadd(zset_name, start, 0, end, 0)
|
conn.zadd(zset_name, start, 0, end, 0)
|
||||||
pipeline = conn.pipeline(True)
|
pipeline = conn.pipeline(True)
|
||||||
while 1:
|
while 1:
|
||||||
try:
|
try:
|
||||||
pipeline.watch(zset_name)
|
pipeline.watch(zset_name)
|
||||||
# 找到两个被插入元素在有序集合中的排名。
|
# 找到两个被插入元素在有序集合中的排名。
|
||||||
sindex = pipeline.zrank(zset_name, start)
|
sindex = pipeline.zrank(zset_name, start)
|
||||||
eindex = pipeline.zrank(zset_name, end)
|
eindex = pipeline.zrank(zset_name, end)
|
||||||
erange = min(sindex + 9, eindex - 2)
|
erange = min(sindex + 9, eindex - 2)
|
||||||
pipeline.multi()
|
pipeline.multi()
|
||||||
# 获取范围内的值,然后删除之前插入的起始元素和结束元素。
|
# 获取范围内的值,然后删除之前插入的起始元素和结束元素。
|
||||||
pipeline.zrem(zset_name, start, end)
|
pipeline.zrem(zset_name, start, end)
|
||||||
pipeline.zrange(zset_name, sindex, erange)
|
pipeline.zrange(zset_name, sindex, erange)
|
||||||
items = pipeline.execute()[-1]
|
items = pipeline.execute()[-1]
|
||||||
break
|
break
|
||||||
# 如果自动补完有序集合已经被其他客户端修改过了,
|
# 如果自动补完有序集合已经被其他客户端修改过了,
|
||||||
# 那么进行重试。
|
# 那么进行重试。
|
||||||
except redis.exceptions.WatchError:
|
except redis.exceptions.WatchError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 如果有其他自动补完操作正在执行,
|
# 如果有其他自动补完操作正在执行,
|
||||||
# 那么从获取到的元素里面移除起始元素和终结元素。
|
# 那么从获取到的元素里面移除起始元素和终结元素。
|
||||||
return [item for item in items if '{' not in item]
|
return [item for item in items if '{' not in item]
|
||||||
|
|
||||||
|
|
||||||
# <end id="old-autocomplete-code"/>
|
# <end id="old-autocomplete-code"/>
|
||||||
|
@ -381,17 +381,17 @@ _autocomplete_on_prefix = autocomplete_on_prefix
|
||||||
# 代码清单 11-11
|
# 代码清单 11-11
|
||||||
# <start id="autocomplete-on-prefix-lua"/>
|
# <start id="autocomplete-on-prefix-lua"/>
|
||||||
def autocomplete_on_prefix(conn, guild, prefix):
|
def autocomplete_on_prefix(conn, guild, prefix):
|
||||||
# 取得范围和标识符。
|
# 取得范围和标识符。
|
||||||
start, end = find_prefix_range(prefix)
|
start, end = find_prefix_range(prefix)
|
||||||
identifier = str(uuid.uuid4())
|
identifier = str(uuid.uuid4())
|
||||||
|
|
||||||
# 使用 Lua 脚本从 Redis 里面获取数据。
|
# 使用 Lua 脚本从 Redis 里面获取数据。
|
||||||
items = autocomplete_on_prefix_lua(conn,
|
items = autocomplete_on_prefix_lua(conn,
|
||||||
['members:' + guild],
|
['members:' + guild],
|
||||||
[start + identifier, end + identifier])
|
[start + identifier, end + identifier])
|
||||||
|
|
||||||
# 过滤掉所有不想要的元素。
|
# 过滤掉所有不想要的元素。
|
||||||
return [item for item in items if '{' not in item]
|
return [item for item in items if '{' not in item]
|
||||||
|
|
||||||
|
|
||||||
autocomplete_on_prefix_lua = script_load('''
|
autocomplete_on_prefix_lua = script_load('''
|
||||||
|
@ -416,49 +416,49 @@ return redis.call('zrange', KEYS[1], sindex, eindex)
|
||||||
# 代码清单 11-12
|
# 代码清单 11-12
|
||||||
# <start id="ch06-purchase-item-with-lock"/>
|
# <start id="ch06-purchase-item-with-lock"/>
|
||||||
def purchase_item_with_lock(conn, buyerid, itemid, sellerid):
|
def purchase_item_with_lock(conn, buyerid, itemid, sellerid):
|
||||||
buyer = "users:%s" % buyerid
|
buyer = "users:%s" % buyerid
|
||||||
seller = "users:%s" % sellerid
|
seller = "users:%s" % sellerid
|
||||||
item = "%s.%s" % (itemid, sellerid)
|
item = "%s.%s" % (itemid, sellerid)
|
||||||
inventory = "inventory:%s" % buyerid
|
inventory = "inventory:%s" % buyerid
|
||||||
|
|
||||||
# 尝试获取锁。
|
# 尝试获取锁。
|
||||||
locked = acquire_lock(conn, 'market:')
|
locked = acquire_lock(conn, 'market:')
|
||||||
if not locked:
|
if not locked:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
pipe = conn.pipeline(True)
|
pipe = conn.pipeline(True)
|
||||||
try:
|
try:
|
||||||
# 检查物品是否已经售出,以及买家是否有足够的金钱来购买物品。
|
# 检查物品是否已经售出,以及买家是否有足够的金钱来购买物品。
|
||||||
pipe.zscore("market:", item)
|
pipe.zscore("market:", item)
|
||||||
pipe.hget(buyer, 'funds')
|
pipe.hget(buyer, 'funds')
|
||||||
price, funds = pipe.execute()
|
price, funds = pipe.execute()
|
||||||
if price is None or price > funds:
|
if price is None or price > funds:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# 将买家支付的货款转移给卖家,并将售出的物品转移给买家。
|
# 将买家支付的货款转移给卖家,并将售出的物品转移给买家。
|
||||||
pipe.hincrby(seller, 'funds', int(price))
|
pipe.hincrby(seller, 'funds', int(price))
|
||||||
pipe.hincrby(buyer, 'funds', int(-price))
|
pipe.hincrby(buyer, 'funds', int(-price))
|
||||||
pipe.sadd(inventory, itemid)
|
pipe.sadd(inventory, itemid)
|
||||||
pipe.zrem("market:", item)
|
pipe.zrem("market:", item)
|
||||||
pipe.execute()
|
pipe.execute()
|
||||||
return True
|
return True
|
||||||
finally:
|
finally:
|
||||||
# 释放锁
|
# 释放锁
|
||||||
release_lock(conn, 'market:', locked)
|
release_lock(conn, 'market:', locked)
|
||||||
# <end id="ch06-purchase-item-with-lock"/>
|
# <end id="ch06-purchase-item-with-lock"/>
|
||||||
|
|
||||||
|
|
||||||
# 代码清单 11-13
|
# 代码清单 11-13
|
||||||
# <start id="purchase-item-lua"/>
|
# <start id="purchase-item-lua"/>
|
||||||
def purchase_item(conn, buyerid, itemid, sellerid):
|
def purchase_item(conn, buyerid, itemid, sellerid):
|
||||||
# 准备好执行 Lua 脚本所需的所有键和参数。
|
# 准备好执行 Lua 脚本所需的所有键和参数。
|
||||||
buyer = "users:%s" % buyerid
|
buyer = "users:%s" % buyerid
|
||||||
seller = "users:%s" % sellerid
|
seller = "users:%s" % sellerid
|
||||||
item = "%s.%s" % (itemid, sellerid)
|
item = "%s.%s" % (itemid, sellerid)
|
||||||
inventory = "inventory:%s" % buyerid
|
inventory = "inventory:%s" % buyerid
|
||||||
|
|
||||||
return purchase_item_lua(conn,
|
return purchase_item_lua(conn,
|
||||||
['market:', buyer, seller, inventory], [item, itemid])
|
['market:', buyer, seller, inventory], [item, itemid])
|
||||||
|
|
||||||
|
|
||||||
purchase_item_lua = script_load('''
|
purchase_item_lua = script_load('''
|
||||||
|
@ -481,9 +481,9 @@ end
|
||||||
# <end id="purchase-item-lua"/>
|
# <end id="purchase-item-lua"/>
|
||||||
|
|
||||||
def list_item(conn, itemid, sellerid, price):
|
def list_item(conn, itemid, sellerid, price):
|
||||||
inv = "inventory:%s" % sellerid
|
inv = "inventory:%s" % sellerid
|
||||||
item = "%s.%s" % (itemid, sellerid)
|
item = "%s.%s" % (itemid, sellerid)
|
||||||
return list_item_lua(conn, [inv, 'market:'], [itemid, item, price])
|
return list_item_lua(conn, [inv, 'market:'], [itemid, item, price])
|
||||||
|
|
||||||
|
|
||||||
list_item_lua = script_load('''
|
list_item_lua = script_load('''
|
||||||
|
@ -498,35 +498,35 @@ end
|
||||||
# 代码清单 11-14
|
# 代码清单 11-14
|
||||||
# <start id="sharded-list-push"/>
|
# <start id="sharded-list-push"/>
|
||||||
def sharded_push_helper(conn, key, *items, **kwargs):
|
def sharded_push_helper(conn, key, *items, **kwargs):
|
||||||
# 把元素组成的序列转换成列表。
|
# 把元素组成的序列转换成列表。
|
||||||
items = list(items)
|
items = list(items)
|
||||||
total = 0
|
total = 0
|
||||||
# 仍然有元素需要推入……
|
# 仍然有元素需要推入……
|
||||||
while items:
|
while items:
|
||||||
# ……通过调用 Lua 脚本,把元素推入到分片列表里面。
|
# ……通过调用 Lua 脚本,把元素推入到分片列表里面。
|
||||||
pushed = sharded_push_lua(conn,
|
pushed = sharded_push_lua(conn,
|
||||||
[key + ':', key + ':first', key + ':last'],
|
[key + ':', key + ':first', key + ':last'],
|
||||||
# 这个程序目前每次最多只会推入 64 个元素,
|
# 这个程序目前每次最多只会推入 64 个元素,
|
||||||
# 读者可以根据自己的压缩列表最大长度来调整这个数值。
|
# 读者可以根据自己的压缩列表最大长度来调整这个数值。
|
||||||
[kwargs['cmd']] + items[:64])
|
[kwargs['cmd']] + items[:64])
|
||||||
# 计算被推入的元素数量。
|
# 计算被推入的元素数量。
|
||||||
total += pushed
|
total += pushed
|
||||||
# 移除那些已经被推入到分片列表里面的元素。
|
# 移除那些已经被推入到分片列表里面的元素。
|
||||||
del items[:pushed]
|
del items[:pushed]
|
||||||
# 返回被推入元素的总数量。
|
# 返回被推入元素的总数量。
|
||||||
return total
|
return total
|
||||||
|
|
||||||
|
|
||||||
def sharded_lpush(conn, key, *items):
|
def sharded_lpush(conn, key, *items):
|
||||||
# 调用 sharded_push_helper() 函数,
|
# 调用 sharded_push_helper() 函数,
|
||||||
# 并通过指定的参数告诉它应该执行左端推入操作还是右端推入操作。
|
# 并通过指定的参数告诉它应该执行左端推入操作还是右端推入操作。
|
||||||
return sharded_push_helper(conn, key, *items, cmd='lpush')
|
return sharded_push_helper(conn, key, *items, cmd='lpush')
|
||||||
|
|
||||||
|
|
||||||
def sharded_rpush(conn, key, *items):
|
def sharded_rpush(conn, key, *items):
|
||||||
# 调用 sharded_push_helper() 函数,
|
# 调用 sharded_push_helper() 函数,
|
||||||
# 并通过指定的参数告诉它应该执行左端推入操作还是右端推入操作。
|
# 并通过指定的参数告诉它应该执行左端推入操作还是右端推入操作。
|
||||||
return sharded_push_helper(conn, key, *items, cmd='rpush')
|
return sharded_push_helper(conn, key, *items, cmd='rpush')
|
||||||
|
|
||||||
|
|
||||||
sharded_push_lua = script_load('''
|
sharded_push_lua = script_load('''
|
||||||
|
@ -560,7 +560,7 @@ end
|
||||||
# <end id="sharded-list-push"/>
|
# <end id="sharded-list-push"/>
|
||||||
|
|
||||||
def sharded_llen(conn, key):
|
def sharded_llen(conn, key):
|
||||||
return sharded_llen_lua(conn, [key + ':', key + ':first', key + ':last'])
|
return sharded_llen_lua(conn, [key + ':', key + ':first', key + ':last'])
|
||||||
|
|
||||||
|
|
||||||
sharded_llen_lua = script_load('''
|
sharded_llen_lua = script_load('''
|
||||||
|
@ -584,13 +584,13 @@ return total
|
||||||
# 代码清单 11-15
|
# 代码清单 11-15
|
||||||
# <start id="sharded-list-pop-lua"/>
|
# <start id="sharded-list-pop-lua"/>
|
||||||
def sharded_lpop(conn, key):
|
def sharded_lpop(conn, key):
|
||||||
return sharded_list_pop_lua(
|
return sharded_list_pop_lua(
|
||||||
conn, [key + ':', key + ':first', key + ':last'], ['lpop'])
|
conn, [key + ':', key + ':first', key + ':last'], ['lpop'])
|
||||||
|
|
||||||
|
|
||||||
def sharded_rpop(conn, key):
|
def sharded_rpop(conn, key):
|
||||||
return sharded_list_pop_lua(
|
return sharded_list_pop_lua(
|
||||||
conn, [key + ':', key + ':first', key + ':last'], ['rpop'])
|
conn, [key + ':', key + ':first', key + ':last'], ['rpop'])
|
||||||
|
|
||||||
|
|
||||||
sharded_list_pop_lua = script_load('''
|
sharded_list_pop_lua = script_load('''
|
||||||
|
@ -639,49 +639,49 @@ DUMMY = str(uuid.uuid4())
|
||||||
# 定义一个辅助函数,
|
# 定义一个辅助函数,
|
||||||
# 这个函数会为左端阻塞弹出操作以及右端阻塞弹出操作执行实际的弹出动作。
|
# 这个函数会为左端阻塞弹出操作以及右端阻塞弹出操作执行实际的弹出动作。
|
||||||
def sharded_bpop_helper(conn, key, timeout, pop, bpop, endp, push):
|
def sharded_bpop_helper(conn, key, timeout, pop, bpop, endp, push):
|
||||||
# 准备好流水线对象和超时信息。
|
# 准备好流水线对象和超时信息。
|
||||||
pipe = conn.pipeline(False)
|
pipe = conn.pipeline(False)
|
||||||
timeout = max(timeout, 0) or 2 ** 64
|
timeout = max(timeout, 0) or 2 ** 64
|
||||||
end = time.time() + timeout
|
end = time.time() + timeout
|
||||||
|
|
||||||
while time.time() < end:
|
while time.time() < end:
|
||||||
# 尝试执行一次非阻塞弹出,
|
# 尝试执行一次非阻塞弹出,
|
||||||
# 如果这个操作成功取得了一个弹出值,
|
# 如果这个操作成功取得了一个弹出值,
|
||||||
# 并且这个值并不是伪元素,那么返回这个值。
|
# 并且这个值并不是伪元素,那么返回这个值。
|
||||||
result = pop(conn, key)
|
result = pop(conn, key)
|
||||||
if result not in (None, DUMMY):
|
if result not in (None, DUMMY):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# 取得程序认为需要对其执行弹出操作的分片。
|
# 取得程序认为需要对其执行弹出操作的分片。
|
||||||
shard = conn.get(key + endp) or '0'
|
shard = conn.get(key + endp) or '0'
|
||||||
# 运行 Lua 脚本辅助程序,
|
# 运行 Lua 脚本辅助程序,
|
||||||
# 它会在程序尝试从错误的分片里面弹出元素的时候,
|
# 它会在程序尝试从错误的分片里面弹出元素的时候,
|
||||||
# 将一个伪元素推入到那个分片里面。
|
# 将一个伪元素推入到那个分片里面。
|
||||||
sharded_bpop_helper_lua(pipe, [key + ':', key + endp],
|
sharded_bpop_helper_lua(pipe, [key + ':', key + endp],
|
||||||
# 因为程序不能在流水线里面执行一个可能会失败的 EVALSHA 调用,
|
# 因为程序不能在流水线里面执行一个可能会失败的 EVALSHA 调用,
|
||||||
# 所以这里需要使用 force_eval 参数,
|
# 所以这里需要使用 force_eval 参数,
|
||||||
# 确保程序调用的是 EVAL 命令而不是 EVALSHA 命令。
|
# 确保程序调用的是 EVAL 命令而不是 EVALSHA 命令。
|
||||||
[shard, push, DUMMY], force_eval=True)
|
[shard, push, DUMMY], force_eval=True)
|
||||||
# 使用用户传入的 BLPOP 命令或 BRPOP 命令,对列表执行阻塞弹出操作。
|
# 使用用户传入的 BLPOP 命令或 BRPOP 命令,对列表执行阻塞弹出操作。
|
||||||
getattr(pipe, bpop)(key + ':' + shard, 1)
|
getattr(pipe, bpop)(key + ':' + shard, 1)
|
||||||
|
|
||||||
# 如果命令返回了一个元素,那么程序执行完毕;否则的话,进行重试。
|
# 如果命令返回了一个元素,那么程序执行完毕;否则的话,进行重试。
|
||||||
result = (pipe.execute()[-1] or [None])[-1]
|
result = (pipe.execute()[-1] or [None])[-1]
|
||||||
if result not in (None, DUMMY):
|
if result not in (None, DUMMY):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# 这个函数负责调用底层的阻塞弹出操作。
|
# 这个函数负责调用底层的阻塞弹出操作。
|
||||||
|
|
||||||
|
|
||||||
def sharded_blpop(conn, key, timeout=0):
|
def sharded_blpop(conn, key, timeout=0):
|
||||||
return sharded_bpop_helper(
|
return sharded_bpop_helper(
|
||||||
conn, key, timeout, sharded_lpop, 'blpop', ':first', 'lpush')
|
conn, key, timeout, sharded_lpop, 'blpop', ':first', 'lpush')
|
||||||
|
|
||||||
|
|
||||||
# 这个函数负责调用底层的阻塞弹出操作。
|
# 这个函数负责调用底层的阻塞弹出操作。
|
||||||
def sharded_brpop(conn, key, timeout=0):
|
def sharded_brpop(conn, key, timeout=0):
|
||||||
return sharded_bpop_helper(
|
return sharded_bpop_helper(
|
||||||
conn, key, timeout, sharded_rpop, 'brpop', ':last', 'rpush')
|
conn, key, timeout, sharded_rpop, 'brpop', ':last', 'rpush')
|
||||||
|
|
||||||
|
|
||||||
sharded_bpop_helper_lua = script_load('''
|
sharded_bpop_helper_lua = script_load('''
|
||||||
|
@ -697,102 +697,102 @@ end
|
||||||
# <end id="sharded-blocking-list-pop"/>
|
# <end id="sharded-blocking-list-pop"/>
|
||||||
|
|
||||||
class TestCh11(unittest.TestCase):
|
class TestCh11(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.conn = redis.Redis(db=15)
|
self.conn = redis.Redis(db=15)
|
||||||
self.conn.flushdb()
|
self.conn.flushdb()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.conn.flushdb()
|
self.conn.flushdb()
|
||||||
|
|
||||||
def test_load_script(self):
|
def test_load_script(self):
|
||||||
self.assertEquals(script_load("return 1")(self.conn), 1)
|
self.assertEquals(script_load("return 1")(self.conn), 1)
|
||||||
|
|
||||||
def test_create_status(self):
|
def test_create_status(self):
|
||||||
self.conn.hset('user:1', 'login', 'test')
|
self.conn.hset('user:1', 'login', 'test')
|
||||||
sid = _create_status(self.conn, 1, 'hello')
|
sid = _create_status(self.conn, 1, 'hello')
|
||||||
sid2 = create_status(self.conn, 1, 'hello')
|
sid2 = create_status(self.conn, 1, 'hello')
|
||||||
|
|
||||||
self.assertEquals(self.conn.hget('user:1', 'posts'), '2')
|
self.assertEquals(self.conn.hget('user:1', 'posts'), '2')
|
||||||
data = self.conn.hgetall('status:%s' % sid)
|
data = self.conn.hgetall('status:%s' % sid)
|
||||||
data2 = self.conn.hgetall('status:%s' % sid2)
|
data2 = self.conn.hgetall('status:%s' % sid2)
|
||||||
data.pop('posted');
|
data.pop('posted');
|
||||||
data.pop('id')
|
data.pop('id')
|
||||||
data2.pop('posted');
|
data2.pop('posted');
|
||||||
data2.pop('id')
|
data2.pop('id')
|
||||||
self.assertEquals(data, data2)
|
self.assertEquals(data, data2)
|
||||||
|
|
||||||
def test_locking(self):
|
def test_locking(self):
|
||||||
identifier = acquire_lock_with_timeout(self.conn, 'test', 1, 5)
|
identifier = acquire_lock_with_timeout(self.conn, 'test', 1, 5)
|
||||||
self.assertTrue(identifier)
|
self.assertTrue(identifier)
|
||||||
self.assertFalse(acquire_lock_with_timeout(self.conn, 'test', 1, 5))
|
self.assertFalse(acquire_lock_with_timeout(self.conn, 'test', 1, 5))
|
||||||
release_lock(self.conn, 'test', identifier)
|
release_lock(self.conn, 'test', identifier)
|
||||||
self.assertTrue(acquire_lock_with_timeout(self.conn, 'test', 1, 5))
|
self.assertTrue(acquire_lock_with_timeout(self.conn, 'test', 1, 5))
|
||||||
|
|
||||||
def test_semaphore(self):
|
def test_semaphore(self):
|
||||||
ids = []
|
ids = []
|
||||||
for i in xrange(5):
|
for i in xrange(5):
|
||||||
ids.append(acquire_semaphore(self.conn, 'test', 5, timeout=1))
|
ids.append(acquire_semaphore(self.conn, 'test', 5, timeout=1))
|
||||||
self.assertTrue(None not in ids)
|
self.assertTrue(None not in ids)
|
||||||
self.assertFalse(acquire_semaphore(self.conn, 'test', 5, timeout=1))
|
self.assertFalse(acquire_semaphore(self.conn, 'test', 5, timeout=1))
|
||||||
time.sleep(.01)
|
time.sleep(.01)
|
||||||
id = acquire_semaphore(self.conn, 'test', 5, timeout=0)
|
id = acquire_semaphore(self.conn, 'test', 5, timeout=0)
|
||||||
self.assertTrue(id)
|
self.assertTrue(id)
|
||||||
self.assertFalse(refresh_semaphore(self.conn, 'test', ids[-1]))
|
self.assertFalse(refresh_semaphore(self.conn, 'test', ids[-1]))
|
||||||
self.assertFalse(release_semaphore(self.conn, 'test', ids[-1]))
|
self.assertFalse(release_semaphore(self.conn, 'test', ids[-1]))
|
||||||
|
|
||||||
self.assertTrue(refresh_semaphore(self.conn, 'test', id))
|
self.assertTrue(refresh_semaphore(self.conn, 'test', id))
|
||||||
self.assertTrue(release_semaphore(self.conn, 'test', id))
|
self.assertTrue(release_semaphore(self.conn, 'test', id))
|
||||||
self.assertFalse(release_semaphore(self.conn, 'test', id))
|
self.assertFalse(release_semaphore(self.conn, 'test', id))
|
||||||
|
|
||||||
def test_autocomplet_on_prefix(self):
|
def test_autocomplet_on_prefix(self):
|
||||||
for word in 'these are some words that we will be autocompleting on'.split():
|
for word in 'these are some words that we will be autocompleting on'.split():
|
||||||
self.conn.zadd('members:test', word, 0)
|
self.conn.zadd('members:test', word, 0)
|
||||||
|
|
||||||
self.assertEquals(autocomplete_on_prefix(self.conn, 'test', 'th'), ['that', 'these'])
|
self.assertEquals(autocomplete_on_prefix(self.conn, 'test', 'th'), ['that', 'these'])
|
||||||
self.assertEquals(autocomplete_on_prefix(self.conn, 'test', 'w'), ['we', 'will', 'words'])
|
self.assertEquals(autocomplete_on_prefix(self.conn, 'test', 'w'), ['we', 'will', 'words'])
|
||||||
self.assertEquals(autocomplete_on_prefix(self.conn, 'test', 'autocompleting'), ['autocompleting'])
|
self.assertEquals(autocomplete_on_prefix(self.conn, 'test', 'autocompleting'), ['autocompleting'])
|
||||||
|
|
||||||
def test_marketplace(self):
|
def test_marketplace(self):
|
||||||
self.conn.sadd('inventory:1', '1')
|
self.conn.sadd('inventory:1', '1')
|
||||||
self.conn.hset('users:2', 'funds', 5)
|
self.conn.hset('users:2', 'funds', 5)
|
||||||
self.assertFalse(list_item(self.conn, 2, 1, 10))
|
self.assertFalse(list_item(self.conn, 2, 1, 10))
|
||||||
self.assertTrue(list_item(self.conn, 1, 1, 10))
|
self.assertTrue(list_item(self.conn, 1, 1, 10))
|
||||||
self.assertFalse(purchase_item(self.conn, 2, '1', 1))
|
self.assertFalse(purchase_item(self.conn, 2, '1', 1))
|
||||||
self.conn.zadd('market:', '1.1', 4)
|
self.conn.zadd('market:', '1.1', 4)
|
||||||
self.assertTrue(purchase_item(self.conn, 2, '1', 1))
|
self.assertTrue(purchase_item(self.conn, 2, '1', 1))
|
||||||
|
|
||||||
def test_sharded_list(self):
|
def test_sharded_list(self):
|
||||||
self.assertEquals(sharded_lpush(self.conn, 'lst', *range(100)), 100)
|
self.assertEquals(sharded_lpush(self.conn, 'lst', *range(100)), 100)
|
||||||
self.assertEquals(sharded_llen(self.conn, 'lst'), 100)
|
self.assertEquals(sharded_llen(self.conn, 'lst'), 100)
|
||||||
|
|
||||||
self.assertEquals(sharded_lpush(self.conn, 'lst2', *range(1000)), 1000)
|
self.assertEquals(sharded_lpush(self.conn, 'lst2', *range(1000)), 1000)
|
||||||
self.assertEquals(sharded_llen(self.conn, 'lst2'), 1000)
|
self.assertEquals(sharded_llen(self.conn, 'lst2'), 1000)
|
||||||
self.assertEquals(sharded_rpush(self.conn, 'lst2', *range(-1, -1001, -1)), 1000)
|
self.assertEquals(sharded_rpush(self.conn, 'lst2', *range(-1, -1001, -1)), 1000)
|
||||||
self.assertEquals(sharded_llen(self.conn, 'lst2'), 2000)
|
self.assertEquals(sharded_llen(self.conn, 'lst2'), 2000)
|
||||||
|
|
||||||
self.assertEquals(sharded_lpop(self.conn, 'lst2'), '999')
|
self.assertEquals(sharded_lpop(self.conn, 'lst2'), '999')
|
||||||
self.assertEquals(sharded_rpop(self.conn, 'lst2'), '-1000')
|
self.assertEquals(sharded_rpop(self.conn, 'lst2'), '-1000')
|
||||||
|
|
||||||
for i in xrange(999):
|
for i in xrange(999):
|
||||||
r = sharded_lpop(self.conn, 'lst2')
|
r = sharded_lpop(self.conn, 'lst2')
|
||||||
self.assertEquals(r, '0')
|
self.assertEquals(r, '0')
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
def pop_some(conn, fcn, lst, count, timeout):
|
def pop_some(conn, fcn, lst, count, timeout):
|
||||||
for i in xrange(count):
|
for i in xrange(count):
|
||||||
results.append(sharded_blpop(conn, lst, timeout))
|
results.append(sharded_blpop(conn, lst, timeout))
|
||||||
|
|
||||||
t = threading.Thread(target=pop_some, args=(self.conn, sharded_blpop, 'lst3', 10, 1))
|
t = threading.Thread(target=pop_some, args=(self.conn, sharded_blpop, 'lst3', 10, 1))
|
||||||
t.setDaemon(1)
|
t.setDaemon(1)
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
self.assertEquals(sharded_rpush(self.conn, 'lst3', *range(4)), 4)
|
self.assertEquals(sharded_rpush(self.conn, 'lst3', *range(4)), 4)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
self.assertEquals(sharded_rpush(self.conn, 'lst3', *range(4, 8)), 4)
|
self.assertEquals(sharded_rpush(self.conn, 'lst3', *range(4, 8)), 4)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
self.assertEquals(results, ['0', '1', '2', '3', None, '4', '5', '6', '7', None])
|
self.assertEquals(results, ['0', '1', '2', '3', None, '4', '5', '6', '7', None])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
|
@ -1,65 +1,66 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
|
||||||
<modelVersion>4.0.0</modelVersion>
|
xmlns="http://maven.apache.org/POM/4.0.0">
|
||||||
<groupId>io.github.dunwu</groupId>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>redis-in-action</artifactId>
|
<groupId>io.github.dunwu</groupId>
|
||||||
<version>1.0.0</version>
|
<artifactId>redis-in-action</artifactId>
|
||||||
<packaging>jar</packaging>
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<java.version>1.8</java.version>
|
<java.version>1.8</java.version>
|
||||||
<maven.compiler.source>${java.version}</maven.compiler.source>
|
<maven.compiler.source>${java.version}</maven.compiler.source>
|
||||||
<maven.compiler.target>${java.version}</maven.compiler.target>
|
<maven.compiler.target>${java.version}</maven.compiler.target>
|
||||||
|
|
||||||
<logback.version>1.2.3</logback.version>
|
<logback.version>1.2.3</logback.version>
|
||||||
<jedis.version>2.9.0</jedis.version>
|
<jedis.version>2.9.0</jedis.version>
|
||||||
<junit.version>4.12</junit.version>
|
<junit.version>4.12</junit.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- database begin -->
|
<!-- database begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>redis.clients</groupId>
|
<groupId>redis.clients</groupId>
|
||||||
<artifactId>jedis</artifactId>
|
<artifactId>jedis</artifactId>
|
||||||
<version>${jedis.version}</version>
|
<version>${jedis.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- database end -->
|
<!-- database end -->
|
||||||
|
|
||||||
<!-- log begin -->
|
<!-- log begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>ch.qos.logback</groupId>
|
<groupId>ch.qos.logback</groupId>
|
||||||
<artifactId>logback-parent</artifactId>
|
<artifactId>logback-parent</artifactId>
|
||||||
<version>${logback.version}</version>
|
<version>${logback.version}</version>
|
||||||
<type>pom</type>
|
<type>pom</type>
|
||||||
<scope>import</scope>
|
<scope>import</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- log end -->
|
<!-- log end -->
|
||||||
|
|
||||||
<!-- test begin -->
|
<!-- test begin -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>${junit.version}</version>
|
<version>${junit.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- test end -->
|
<!-- test end -->
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.code.gson</groupId>
|
<groupId>com.google.code.gson</groupId>
|
||||||
<artifactId>gson</artifactId>
|
<artifactId>gson</artifactId>
|
||||||
<version>2.8.5</version>
|
<version>2.8.5</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.commons</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-csv</artifactId>
|
<artifactId>commons-csv</artifactId>
|
||||||
<version>1.5</version>
|
<version>1.5</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.javatuples</groupId>
|
<groupId>org.javatuples</groupId>
|
||||||
<artifactId>javatuples</artifactId>
|
<artifactId>javatuples</artifactId>
|
||||||
<version>1.1</version>
|
<version>1.1</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -40,38 +40,13 @@ public class Chapter01 {
|
||||||
printArticles(articles);
|
printArticles(articles);
|
||||||
assert articles.size() >= 1;
|
assert articles.size() >= 1;
|
||||||
|
|
||||||
addRemoveGroups(conn, articleId, new String[] { "new-group" }, new String[] {});
|
addRemoveGroups(conn, articleId, new String[] {"new-group"}, new String[] {});
|
||||||
System.out.println("We added the article to a new group, other articles include:");
|
System.out.println("We added the article to a new group, other articles include:");
|
||||||
articles = getGroupArticles(conn, "new-group", 1);
|
articles = getGroupArticles(conn, "new-group", 1);
|
||||||
printArticles(articles);
|
printArticles(articles);
|
||||||
assert articles.size() >= 1;
|
assert articles.size() >= 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* 代码清单 1-6 对文章进行投票
|
|
||||||
*/
|
|
||||||
public void articleVote(Jedis conn, String user, String article) {
|
|
||||||
// 计算文章的投票截止时间。
|
|
||||||
long cutoff = (System.currentTimeMillis() / 1000) - ONE_WEEK_IN_SECONDS;
|
|
||||||
|
|
||||||
// 检查是否还可以对文章进行投票
|
|
||||||
// (虽然使用散列也可以获取文章的发布时间,
|
|
||||||
// 但有序集合返回的文章发布时间为浮点数,
|
|
||||||
// 可以不进行转换直接使用)。
|
|
||||||
if (conn.zscore("time:", article) < cutoff) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 从article:id标识符(identifier)里面取出文章的ID。
|
|
||||||
String articleId = article.substring(article.indexOf(':') + 1);
|
|
||||||
|
|
||||||
// 如果用户是第一次为这篇文章投票,那么增加这篇文章的投票数量和评分。
|
|
||||||
if (conn.sadd("voted:" + articleId, user) == 1) {
|
|
||||||
conn.zincrby("score:", VOTE_SCORE, article);
|
|
||||||
conn.hincrBy(article, "votes", 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 代码清单 1-7 发布文章
|
* 代码清单 1-7 发布文章
|
||||||
*/
|
*/
|
||||||
|
@ -103,10 +78,67 @@ public class Chapter01 {
|
||||||
return articleId;
|
return articleId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 代码清单 1-6 对文章进行投票
|
||||||
|
*/
|
||||||
|
public void articleVote(Jedis conn, String user, String article) {
|
||||||
|
// 计算文章的投票截止时间。
|
||||||
|
long cutoff = (System.currentTimeMillis() / 1000) - ONE_WEEK_IN_SECONDS;
|
||||||
|
|
||||||
|
// 检查是否还可以对文章进行投票
|
||||||
|
// (虽然使用散列也可以获取文章的发布时间,
|
||||||
|
// 但有序集合返回的文章发布时间为浮点数,
|
||||||
|
// 可以不进行转换直接使用)。
|
||||||
|
if (conn.zscore("time:", article) < cutoff) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 从article:id标识符(identifier)里面取出文章的ID。
|
||||||
|
String articleId = article.substring(article.indexOf(':') + 1);
|
||||||
|
|
||||||
|
// 如果用户是第一次为这篇文章投票,那么增加这篇文章的投票数量和评分。
|
||||||
|
if (conn.sadd("voted:" + articleId, user) == 1) {
|
||||||
|
conn.zincrby("score:", VOTE_SCORE, article);
|
||||||
|
conn.hincrBy(article, "votes", 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public List<Map<String, String>> getArticles(Jedis conn, int page) {
|
public List<Map<String, String>> getArticles(Jedis conn, int page) {
|
||||||
return getArticles(conn, page, "score:");
|
return getArticles(conn, page, "score:");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void printArticles(List<Map<String, String>> articles) {
|
||||||
|
for (Map<String, String> article : articles) {
|
||||||
|
System.out.println(" id: " + article.get("id"));
|
||||||
|
for (Map.Entry<String, String> entry : article.entrySet()) {
|
||||||
|
if (entry.getKey().equals("id")) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
System.out.println(" " + entry.getKey() + ": " + entry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 代码清单 1-9
|
||||||
|
*/
|
||||||
|
public void addRemoveGroups(Jedis conn, String articleId, String[] toAdd, String[] toRemove) {
|
||||||
|
// 构建存储文章信息的键名。
|
||||||
|
String article = "article:" + articleId;
|
||||||
|
// 将文章添加到它所属的群组里面。
|
||||||
|
for (String group : toAdd) {
|
||||||
|
conn.sadd("group:" + group, article);
|
||||||
|
}
|
||||||
|
// 从群组里面移除文章。
|
||||||
|
for (String group : toRemove) {
|
||||||
|
conn.srem("group:" + group, article);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Map<String, String>> getGroupArticles(Jedis conn, String group, int page) {
|
||||||
|
return getGroupArticles(conn, group, page, "score:");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 代码清单 1-8 获取文章
|
* 代码清单 1-8 获取文章
|
||||||
*/
|
*/
|
||||||
|
@ -128,26 +160,6 @@ public class Chapter01 {
|
||||||
return articles;
|
return articles;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* 代码清单 1-9
|
|
||||||
*/
|
|
||||||
public void addRemoveGroups(Jedis conn, String articleId, String[] toAdd, String[] toRemove) {
|
|
||||||
// 构建存储文章信息的键名。
|
|
||||||
String article = "article:" + articleId;
|
|
||||||
// 将文章添加到它所属的群组里面。
|
|
||||||
for (String group : toAdd) {
|
|
||||||
conn.sadd("group:" + group, article);
|
|
||||||
}
|
|
||||||
// 从群组里面移除文章。
|
|
||||||
for (String group : toRemove) {
|
|
||||||
conn.srem("group:" + group, article);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Map<String, String>> getGroupArticles(Jedis conn, String group, int page) {
|
|
||||||
return getGroupArticles(conn, group, page, "score:");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 代码清单 1-10 取出群组里的文章
|
* 代码清单 1-10 取出群组里的文章
|
||||||
*/
|
*/
|
||||||
|
@ -166,16 +178,4 @@ public class Chapter01 {
|
||||||
return getArticles(conn, page, key);
|
return getArticles(conn, page, key);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void printArticles(List<Map<String, String>> articles) {
|
|
||||||
for (Map<String, String> article : articles) {
|
|
||||||
System.out.println(" id: " + article.get("id"));
|
|
||||||
for (Map.Entry<String, String> entry : article.entrySet()) {
|
|
||||||
if (entry.getKey().equals("id")) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
System.out.println(" " + entry.getKey() + ": " + entry.getValue());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -193,8 +193,7 @@ public class Chapter02 {
|
||||||
if (count <= 0) {
|
if (count <= 0) {
|
||||||
// 从购物车里面移除指定的商品。
|
// 从购物车里面移除指定的商品。
|
||||||
conn.hdel("cart:" + session, item);
|
conn.hdel("cart:" + session, item);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
// 将指定的商品添加到购物车。
|
// 将指定的商品添加到购物车。
|
||||||
conn.hset("cart:" + session, item, String.valueOf(count));
|
conn.hset("cart:" + session, item, String.valueOf(count));
|
||||||
}
|
}
|
||||||
|
@ -259,8 +258,7 @@ public class Chapter02 {
|
||||||
Long rank = conn.zrank("viewed:", itemId);
|
Long rank = conn.zrank("viewed:", itemId);
|
||||||
// 根据商品的浏览次数排名来判断是否需要缓存这个页面。
|
// 根据商品的浏览次数排名来判断是否需要缓存这个页面。
|
||||||
return rank != null && rank < 10000;
|
return rank != null && rank < 10000;
|
||||||
}
|
} catch (MalformedURLException mue) {
|
||||||
catch (MalformedURLException mue) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -283,7 +281,6 @@ public class Chapter02 {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static class Inventory {
|
public static class Inventory {
|
||||||
|
|
||||||
private String id;
|
private String id;
|
||||||
|
@ -304,7 +301,6 @@ public class Chapter02 {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 代码清单 2-3
|
* 代码清单 2-3
|
||||||
*/
|
*/
|
||||||
|
@ -335,8 +331,7 @@ public class Chapter02 {
|
||||||
if (size <= limit) {
|
if (size <= limit) {
|
||||||
try {
|
try {
|
||||||
sleep(1000);
|
sleep(1000);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
@ -362,7 +357,6 @@ public class Chapter02 {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 代码清单 2-5
|
* 代码清单 2-5
|
||||||
*/
|
*/
|
||||||
|
@ -391,8 +385,7 @@ public class Chapter02 {
|
||||||
if (size <= limit) {
|
if (size <= limit) {
|
||||||
try {
|
try {
|
||||||
sleep(1000);
|
sleep(1000);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
@ -417,7 +410,6 @@ public class Chapter02 {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 代码清单 2-8
|
* 代码清单 2-8
|
||||||
*/
|
*/
|
||||||
|
@ -449,8 +441,7 @@ public class Chapter02 {
|
||||||
try {
|
try {
|
||||||
// 暂时没有行需要被缓存,休眠50毫秒后重试。
|
// 暂时没有行需要被缓存,休眠50毫秒后重试。
|
||||||
sleep(50);
|
sleep(50);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -160,9 +160,9 @@ public class Chapter04 {
|
||||||
public void benchmarkUpdateToken(Jedis conn, int duration) {
|
public void benchmarkUpdateToken(Jedis conn, int duration) {
|
||||||
try {
|
try {
|
||||||
@SuppressWarnings("rawtypes")
|
@SuppressWarnings("rawtypes")
|
||||||
Class[] args = new Class[] { Jedis.class, String.class, String.class, String.class };
|
Class[] args = new Class[] {Jedis.class, String.class, String.class, String.class}
|
||||||
Method[] methods = new Method[] { this.getClass().getDeclaredMethod("updateToken", args),
|
Method[] methods = new Method[] {this.getClass().getDeclaredMethod("updateToken", args),
|
||||||
this.getClass().getDeclaredMethod("updateTokenPipeline", args), };
|
this.getClass().getDeclaredMethod("updateTokenPipeline", args),}
|
||||||
for (Method method : methods) {
|
for (Method method : methods) {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
|
@ -173,10 +173,9 @@ public class Chapter04 {
|
||||||
}
|
}
|
||||||
long delta = System.currentTimeMillis() - start;
|
long delta = System.currentTimeMillis() - start;
|
||||||
System.out.println(
|
System.out.println(
|
||||||
method.getName() + ' ' + count + ' ' + (delta / 1000) + ' ' + (count / (delta / 1000)));
|
method.getName() + ' ' + count + ' ' + (delta / 1000) + ' ' + (count / (delta / 1000)));
|
||||||
}
|
}
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -281,7 +281,7 @@
|
||||||
// updateCounter(conn, name, count, System.currentTimeMillis() / 1000);
|
// updateCounter(conn, name, count, System.currentTimeMillis() / 1000);
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// public static final int[] PRECISION = new int[]{1, 5, 60, 300, 3600, 18000, 86400};
|
// public static final int[] PRECISION = new int[]{1, 5, 60, 300, 3600, 18000, 86400}
|
||||||
// public void updateCounter(Jedis conn, String name, int count, long now){
|
// public void updateCounter(Jedis conn, String name, int count, long now){
|
||||||
// Transaction trans = conn.multi();
|
// Transaction trans = conn.multi();
|
||||||
// for (int prec : PRECISION) {
|
// for (int prec : PRECISION) {
|
||||||
|
|
|
@ -5,8 +5,6 @@ import redis.clients.jedis.Transaction;
|
||||||
import redis.clients.jedis.Tuple;
|
import redis.clients.jedis.Tuple;
|
||||||
import redis.clients.jedis.ZParams;
|
import redis.clients.jedis.ZParams;
|
||||||
|
|
||||||
import java.io.*;
|
|
||||||
import java.util.*;
|
|
||||||
import java.util.zip.GZIPInputStream;
|
import java.util.zip.GZIPInputStream;
|
||||||
import java.util.zip.GZIPOutputStream;
|
import java.util.zip.GZIPOutputStream;
|
||||||
|
|
||||||
|
@ -92,7 +90,7 @@ public class Chapter06 {
|
||||||
System.out.println();
|
System.out.println();
|
||||||
|
|
||||||
System.out.println("Let's add a few people to the guild");
|
System.out.println("Let's add a few people to the guild");
|
||||||
for (String name : new String[] { "jeff", "jenny", "jack", "jennifer" }) {
|
for (String name : new String[] {"jeff", "jenny", "jack", "jennifer"}) {
|
||||||
joinGuild(conn, "test", name);
|
joinGuild(conn, "test", name);
|
||||||
}
|
}
|
||||||
System.out.println();
|
System.out.println();
|
||||||
|
@ -172,7 +170,7 @@ public class Chapter06 {
|
||||||
System.out.println("\n----- testDelayedTasks -----");
|
System.out.println("\n----- testDelayedTasks -----");
|
||||||
conn.del("queue:tqueue", "delayed:");
|
conn.del("queue:tqueue", "delayed:");
|
||||||
System.out.println("Let's start some regular and delayed tasks...");
|
System.out.println("Let's start some regular and delayed tasks...");
|
||||||
for (long delay : new long[] { 0, 500, 0, 1500 }) {
|
for (long delay : new long[] {0, 500, 0, 1500}) {
|
||||||
assert executeLater(conn, "tqueue", "testfn", new ArrayList<String>(), delay) != null;
|
assert executeLater(conn, "tqueue", "testfn", new ArrayList<String>(), delay) != null;
|
||||||
}
|
}
|
||||||
long r = conn.llen("queue:tqueue");
|
long r = conn.llen("queue:tqueue");
|
||||||
|
@ -341,7 +339,7 @@ public class Chapter06 {
|
||||||
String start = prefix.substring(0, prefix.length() - 1) + suffix + '{';
|
String start = prefix.substring(0, prefix.length() - 1) + suffix + '{';
|
||||||
String end = prefix + '{';
|
String end = prefix + '{';
|
||||||
// 返回范围。
|
// 返回范围。
|
||||||
return new String[] { start, end };
|
return new String[] {start, end}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void joinGuild(Jedis conn, String guild, String user) {
|
public void joinGuild(Jedis conn, String guild, String user) {
|
||||||
|
@ -394,7 +392,7 @@ public class Chapter06 {
|
||||||
|
|
||||||
// 如果有其他自动补完操作正在执行,
|
// 如果有其他自动补完操作正在执行,
|
||||||
// 那么从获取到的元素里面移除起始元素和终结元素。
|
// 那么从获取到的元素里面移除起始元素和终结元素。
|
||||||
for (Iterator<String> iterator = items.iterator(); iterator.hasNext();) {
|
for (Iterator<String> iterator = items.iterator(); iterator.hasNext(); ) {
|
||||||
if (iterator.next().indexOf('{') != -1) {
|
if (iterator.next().indexOf('{') != -1) {
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
}
|
}
|
||||||
|
@ -422,8 +420,7 @@ public class Chapter06 {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Thread.sleep(1);
|
Thread.sleep(1);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -452,8 +449,7 @@ public class Chapter06 {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Thread.sleep(1);
|
Thread.sleep(1);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -540,11 +536,10 @@ public class Chapter06 {
|
||||||
Gson gson = new Gson();
|
Gson gson = new Gson();
|
||||||
String identifier = UUID.randomUUID().toString();
|
String identifier = UUID.randomUUID().toString();
|
||||||
String itemArgs = gson.toJson(args);
|
String itemArgs = gson.toJson(args);
|
||||||
String item = gson.toJson(new String[] { identifier, queue, name, itemArgs });
|
String item = gson.toJson(new String[] {identifier, queue, name, itemArgs});
|
||||||
if (delay > 0) {
|
if (delay > 0) {
|
||||||
conn.zadd("delayed:", System.currentTimeMillis() + delay, item);
|
conn.zadd("delayed:", System.currentTimeMillis() + delay, item);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
conn.rpush("queue:" + queue, item);
|
conn.rpush("queue:" + queue, item);
|
||||||
}
|
}
|
||||||
return identifier;
|
return identifier;
|
||||||
|
@ -582,8 +577,7 @@ public class Chapter06 {
|
||||||
values.put("message", message);
|
values.put("message", message);
|
||||||
String packed = new Gson().toJson(values);
|
String packed = new Gson().toJson(values);
|
||||||
conn.zadd("msgs:" + chatId, messageId, packed);
|
conn.zadd("msgs:" + chatId, messageId, packed);
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
releaseLock(conn, "chat:" + chatId, identifier);
|
releaseLock(conn, "chat:" + chatId, identifier);
|
||||||
}
|
}
|
||||||
return chatId;
|
return chatId;
|
||||||
|
@ -621,8 +615,8 @@ public class Chapter06 {
|
||||||
List<Map<String, Object>> messages = new ArrayList<Map<String, Object>>();
|
List<Map<String, Object>> messages = new ArrayList<Map<String, Object>>();
|
||||||
for (String messageJson : messageStrings) {
|
for (String messageJson : messageStrings) {
|
||||||
Map<String, Object> message = (Map<String, Object>) gson.fromJson(messageJson,
|
Map<String, Object> message = (Map<String, Object>) gson.fromJson(messageJson,
|
||||||
new TypeToken<Map<String, Object>>() {
|
new TypeToken<Map<String, Object>>() {
|
||||||
}.getType());
|
}.getType());
|
||||||
int messageId = ((Double) message.get("id")).intValue();
|
int messageId = ((Double) message.get("id")).intValue();
|
||||||
if (messageId > seenId) {
|
if (messageId > seenId) {
|
||||||
seenId = messageId;
|
seenId = messageId;
|
||||||
|
@ -632,11 +626,11 @@ public class Chapter06 {
|
||||||
}
|
}
|
||||||
|
|
||||||
conn.zadd("chat:" + chatId, seenId, recipient);
|
conn.zadd("chat:" + chatId, seenId, recipient);
|
||||||
seenUpdates.add(new Object[] { "seen:" + recipient, seenId, chatId });
|
seenUpdates.add(new Object[] {"seen:" + recipient, seenId, chatId});
|
||||||
|
|
||||||
Set<Tuple> minIdSet = conn.zrangeWithScores("chat:" + chatId, 0, 0);
|
Set<Tuple> minIdSet = conn.zrangeWithScores("chat:" + chatId, 0, 0);
|
||||||
if (minIdSet.size() > 0) {
|
if (minIdSet.size() > 0) {
|
||||||
msgRemoves.add(new Object[] { "msgs:" + chatId, minIdSet.iterator().next().getScore() });
|
msgRemoves.add(new Object[] {"msgs:" + chatId, minIdSet.iterator().next().getScore()});
|
||||||
}
|
}
|
||||||
chatMessages.add(new ChatMessages(chatId, messages));
|
chatMessages.add(new ChatMessages(chatId, messages));
|
||||||
}
|
}
|
||||||
|
@ -654,7 +648,7 @@ public class Chapter06 {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void processLogsFromRedis(Jedis conn, String id, Callback callback)
|
public void processLogsFromRedis(Jedis conn, String id, Callback callback)
|
||||||
throws InterruptedException, IOException {
|
throws InterruptedException, IOException {
|
||||||
while (true) {
|
while (true) {
|
||||||
List<ChatMessages> fdata = fetchPendingMessages(conn, id);
|
List<ChatMessages> fdata = fetchPendingMessages(conn, id);
|
||||||
|
|
||||||
|
@ -681,8 +675,7 @@ public class Chapter06 {
|
||||||
callback.callback(line);
|
callback.callback(line);
|
||||||
}
|
}
|
||||||
callback.callback(null);
|
callback.callback(null);
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -702,10 +695,10 @@ public class Chapter06 {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public class TestCallback implements Callback {
|
public class TestCallback implements Callback {
|
||||||
|
|
||||||
public List<Integer> counts = new ArrayList<Integer>();
|
public List<Integer> counts = new ArrayList<Integer>();
|
||||||
|
|
||||||
private int index;
|
private int index;
|
||||||
|
|
||||||
public void callback(String line) {
|
public void callback(String line) {
|
||||||
|
@ -721,7 +714,6 @@ public class Chapter06 {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public class RedisInputStream extends InputStream {
|
public class RedisInputStream extends InputStream {
|
||||||
|
|
||||||
private Jedis conn;
|
private Jedis conn;
|
||||||
|
@ -769,7 +761,6 @@ public class Chapter06 {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public class ChatMessages {
|
public class ChatMessages {
|
||||||
|
|
||||||
public String chatId;
|
public String chatId;
|
||||||
|
@ -815,8 +806,7 @@ public class Chapter06 {
|
||||||
if (item == null || item.getScore() > System.currentTimeMillis()) {
|
if (item == null || item.getScore() > System.currentTimeMillis()) {
|
||||||
try {
|
try {
|
||||||
sleep(10);
|
sleep(10);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.interrupted();
|
Thread.interrupted();
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
@ -884,12 +874,10 @@ public class Chapter06 {
|
||||||
long cleaned = clean(waiting, count);
|
long cleaned = clean(waiting, count);
|
||||||
if (cleaned != 0) {
|
if (cleaned != 0) {
|
||||||
bytesInRedis -= cleaned;
|
bytesInRedis -= cleaned;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
try {
|
try {
|
||||||
sleep(250);
|
sleep(250);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.interrupted();
|
Thread.interrupted();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -905,21 +893,17 @@ public class Chapter06 {
|
||||||
byte[] bytes = new byte[read];
|
byte[] bytes = new byte[read];
|
||||||
System.arraycopy(buffer, 0, bytes, 0, read);
|
System.arraycopy(buffer, 0, bytes, 0, read);
|
||||||
conn.append((channel + logFile).getBytes(), bytes);
|
conn.append((channel + logFile).getBytes(), bytes);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
conn.append((channel + logFile).getBytes(), buffer);
|
conn.append((channel + logFile).getBytes(), buffer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
} catch (IOException ioe) {
|
||||||
catch (IOException ioe) {
|
|
||||||
ioe.printStackTrace();
|
ioe.printStackTrace();
|
||||||
throw new RuntimeException(ioe);
|
throw new RuntimeException(ioe);
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
try {
|
try {
|
||||||
in.close();
|
in.close();
|
||||||
}
|
} catch (Exception ignore) {
|
||||||
catch (Exception ignore) {
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -935,12 +919,10 @@ public class Chapter06 {
|
||||||
long cleaned = clean(waiting, count);
|
long cleaned = clean(waiting, count);
|
||||||
if (cleaned != 0) {
|
if (cleaned != 0) {
|
||||||
bytesInRedis -= cleaned;
|
bytesInRedis -= cleaned;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
try {
|
try {
|
||||||
sleep(250);
|
sleep(250);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.interrupted();
|
Thread.interrupted();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -267,7 +267,7 @@
|
||||||
// indexAd(conn, "1", new String[]{"USA", "CA"}, CONTENT, Ecpm.CPC, .25);
|
// indexAd(conn, "1", new String[]{"USA", "CA"}, CONTENT, Ecpm.CPC, .25);
|
||||||
// indexAd(conn, "2", new String[]{"USA", "VA"}, CONTENT + " wooooo", Ecpm.CPC, .125);
|
// indexAd(conn, "2", new String[]{"USA", "VA"}, CONTENT + " wooooo", Ecpm.CPC, .125);
|
||||||
//
|
//
|
||||||
// String[] usa = new String[]{"USA"};
|
// String[] usa = new String[]{"USA"}
|
||||||
// for (int i = 0; i < 100; i++) {
|
// for (int i = 0; i < 100; i++) {
|
||||||
// targetAds(conn, usa, CONTENT);
|
// targetAds(conn, usa, CONTENT);
|
||||||
// }
|
// }
|
||||||
|
@ -526,7 +526,7 @@
|
||||||
// int updateWeight = weights.containsKey("update") ? weights.get("update") : 1;
|
// int updateWeight = weights.containsKey("update") ? weights.get("update") : 1;
|
||||||
// int voteWeight = weights.containsKey("vote") ? weights.get("vote") : 0;
|
// int voteWeight = weights.containsKey("vote") ? weights.get("vote") : 0;
|
||||||
//
|
//
|
||||||
// String[] keys = new String[]{id, "sort:update", "sort:votes"};
|
// String[] keys = new String[]{id, "sort:update", "sort:votes"}
|
||||||
// Transaction trans = conn.multi();
|
// Transaction trans = conn.multi();
|
||||||
// id = zintersect(
|
// id = zintersect(
|
||||||
// trans, ttl, new ZParams().weights(0, updateWeight, voteWeight), keys);
|
// trans, ttl, new ZParams().weights(0, updateWeight, voteWeight), keys);
|
||||||
|
|
|
@ -145,15 +145,13 @@ public class Chapter08 {
|
||||||
if (conn.setnx(lockName, id) >= 1) {
|
if (conn.setnx(lockName, id) >= 1) {
|
||||||
conn.expire(lockName, lockTimeout);
|
conn.expire(lockName, lockTimeout);
|
||||||
return id;
|
return id;
|
||||||
}
|
} else if (conn.ttl(lockName) <= 0) {
|
||||||
else if (conn.ttl(lockName) <= 0) {
|
|
||||||
conn.expire(lockName, lockTimeout);
|
conn.expire(lockName, lockTimeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Thread.sleep(1);
|
Thread.sleep(1);
|
||||||
}
|
} catch (InterruptedException ie) {
|
||||||
catch (InterruptedException ie) {
|
|
||||||
Thread.interrupted();
|
Thread.interrupted();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -340,7 +338,7 @@ public class Chapter08 {
|
||||||
|
|
||||||
public void syndicateStatus(Jedis conn, long uid, long postId, long postTime, double start) {
|
public void syndicateStatus(Jedis conn, long uid, long postId, long postTime, double start) {
|
||||||
Set<Tuple> followers = conn.zrangeByScoreWithScores("followers:" + uid, String.valueOf(start), "inf", 0,
|
Set<Tuple> followers = conn.zrangeByScoreWithScores("followers:" + uid, String.valueOf(start), "inf", 0,
|
||||||
POSTS_PER_PASS);
|
POSTS_PER_PASS);
|
||||||
|
|
||||||
Transaction trans = conn.multi();
|
Transaction trans = conn.multi();
|
||||||
for (Tuple tuple : followers) {
|
for (Tuple tuple : followers) {
|
||||||
|
@ -355,10 +353,9 @@ public class Chapter08 {
|
||||||
if (followers.size() >= POSTS_PER_PASS) {
|
if (followers.size() >= POSTS_PER_PASS) {
|
||||||
try {
|
try {
|
||||||
Method method = getClass().getDeclaredMethod("syndicateStatus", Jedis.class, Long.TYPE, Long.TYPE,
|
Method method = getClass().getDeclaredMethod("syndicateStatus", Jedis.class, Long.TYPE, Long.TYPE,
|
||||||
Long.TYPE, Double.TYPE);
|
Long.TYPE, Double.TYPE);
|
||||||
executeLater("default", method, uid, postId, postTime, start);
|
executeLater("default", method, uid, postId, postTime, start);
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -384,8 +381,7 @@ public class Chapter08 {
|
||||||
trans.exec();
|
trans.exec();
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
releaseLock(conn, key, lock);
|
releaseLock(conn, key, lock);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -453,10 +449,9 @@ public class Chapter08 {
|
||||||
if (users.size() >= REFILL_USERS_STEP) {
|
if (users.size() >= REFILL_USERS_STEP) {
|
||||||
try {
|
try {
|
||||||
Method method = getClass().getDeclaredMethod("refillTimeline", Jedis.class, String.class, String.class,
|
Method method = getClass().getDeclaredMethod("refillTimeline", Jedis.class, String.class, String.class,
|
||||||
Double.TYPE);
|
Double.TYPE);
|
||||||
executeLater("default", method, incoming, timeline, start);
|
executeLater("default", method, incoming, timeline, start);
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -486,17 +481,14 @@ public class Chapter08 {
|
||||||
Method method = null;
|
Method method = null;
|
||||||
try {
|
try {
|
||||||
method = getClass().getDeclaredMethod("cleanTimelines", Jedis.class, Long.TYPE, Long.TYPE, Double.TYPE,
|
method = getClass().getDeclaredMethod("cleanTimelines", Jedis.class, Long.TYPE, Long.TYPE, Double.TYPE,
|
||||||
Boolean.TYPE);
|
Boolean.TYPE);
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (followers.size() >= POSTS_PER_PASS) {
|
if (followers.size() >= POSTS_PER_PASS) {
|
||||||
executeLater("default", method, uid, statusId, start, onLists);
|
executeLater("default", method, uid, statusId, start, onLists);
|
||||||
|
} else if (!onLists) {
|
||||||
}
|
|
||||||
else if (!onLists) {
|
|
||||||
executeLater("default", method, uid, statusId, 0, true);
|
executeLater("default", method, uid, statusId, 0, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -530,8 +522,7 @@ public class Chapter08 {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
method.invoke(instance, args);
|
method.invoke(instance, args);
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,8 @@ import java.util.zip.CRC32;
|
||||||
|
|
||||||
public class Chapter09 {
|
public class Chapter09 {
|
||||||
|
|
||||||
private static final String[] COUNTRIES = ("ABW AFG AGO AIA ALA ALB AND ARE ARG ARM ASM ATA ATF ATG AUS AUT AZE BDI "
|
private static final String[] COUNTRIES =
|
||||||
|
("ABW AFG AGO AIA ALA ALB AND ARE ARG ARM ASM ATA ATF ATG AUS AUT AZE BDI "
|
||||||
+ "BEL BEN BES BFA BGD BGR BHR BHS BIH BLM BLR BLZ BMU BOL BRA BRB BRN BTN "
|
+ "BEL BEN BES BFA BGD BGR BHR BHS BIH BLM BLR BLZ BMU BOL BRA BRB BRN BTN "
|
||||||
+ "BVT BWA CAF CAN CCK CHE CHL CHN CIV CMR COD COG COK COL COM CPV CRI CUB "
|
+ "BVT BWA CAF CAN CCK CHE CHL CHN CIV CMR COD COG COK COL COM CPV CRI CUB "
|
||||||
+ "CUW CXR CYM CYP CZE DEU DJI DMA DNK DOM DZA ECU EGY ERI ESH ESP EST ETH "
|
+ "CUW CXR CYM CYP CZE DEU DJI DMA DNK DOM DZA ECU EGY ERI ESH ESP EST ETH "
|
||||||
|
@ -26,20 +27,27 @@ public class Chapter09 {
|
||||||
+ "USA UZB VAT VCT VEN VGB VIR VNM VUT WLF WSM YEM ZAF ZMB ZWE").split(" ");
|
+ "USA UZB VAT VCT VEN VGB VIR VNM VUT WLF WSM YEM ZAF ZMB ZWE").split(" ");
|
||||||
|
|
||||||
private static final Map<String, String[]> STATES = new HashMap<String, String[]>();
|
private static final Map<String, String[]> STATES = new HashMap<String, String[]>();
|
||||||
|
|
||||||
private static final SimpleDateFormat ISO_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:00:00");
|
private static final SimpleDateFormat ISO_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:00:00");
|
||||||
|
|
||||||
static {
|
static {
|
||||||
STATES.put("CAN", "AB BC MB NB NL NS NT NU ON PE QC SK YT".split(" "));
|
STATES.put("CAN", "AB BC MB NB NL NS NT NU ON PE QC SK YT".split(" "));
|
||||||
STATES.put("USA",
|
STATES.put("USA",
|
||||||
("AA AE AK AL AP AR AS AZ CA CO CT DC DE FL FM GA GU HI IA ID IL IN "
|
("AA AE AK AL AP AR AS AZ CA CO CT DC DE FL FM GA GU HI IA ID IL IN "
|
||||||
+ "KS KY LA MA MD ME MH MI MN MO MP MS MT NC ND NE NH NJ NM NV NY OH "
|
+ "KS KY LA MA MD ME MH MI MN MO MP MS MT NC ND NE NH NJ NM NV NY OH "
|
||||||
+ "OK OR PA PR PW RI SC SD TN TX UT VA VI VT WA WI WV WY").split(" "));
|
+ "OK OR PA PR PW RI SC SD TN TX UT VA VI VT WA WI WV WY").split(" "));
|
||||||
}
|
}
|
||||||
|
|
||||||
static {
|
static {
|
||||||
ISO_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
|
ISO_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private int SHARD_SIZE = 512;
|
private int SHARD_SIZE = 512;
|
||||||
|
|
||||||
private long DAILY_EXPECTED = 1000000;
|
private long DAILY_EXPECTED = 1000000;
|
||||||
|
|
||||||
private Map<String, Long> EXPECTED = new HashMap<String, Long>();
|
private Map<String, Long> EXPECTED = new HashMap<String, Long>();
|
||||||
|
|
||||||
private long USERS_PER_SHARD = (long) Math.pow(2, 20);
|
private long USERS_PER_SHARD = (long) Math.pow(2, 20);
|
||||||
|
|
||||||
public static final void main(String[] args) {
|
public static final void main(String[] args) {
|
||||||
|
@ -135,8 +143,7 @@ public class Chapter09 {
|
||||||
setLocation(conn, i, country, state);
|
setLocation(conn, i, country, state);
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
setLocation(conn, i, country, "");
|
setLocation(conn, i, country, "");
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
|
@ -160,8 +167,7 @@ public class Chapter09 {
|
||||||
for (String state : STATES.get(country)) {
|
for (String state : STATES.get(country)) {
|
||||||
assert states.get(country).get(state) == 1;
|
assert states.get(country).get(state) == 1;
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
assert countries.get(country) == 1;
|
assert countries.get(country) == 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -189,8 +195,7 @@ public class Chapter09 {
|
||||||
long shardId = 0;
|
long shardId = 0;
|
||||||
if (isDigit(key)) {
|
if (isDigit(key)) {
|
||||||
shardId = Integer.parseInt(key, 10) / shardSize;
|
shardId = Integer.parseInt(key, 10) / shardSize;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
CRC32 crc = new CRC32();
|
CRC32 crc = new CRC32();
|
||||||
crc.update(key.getBytes());
|
crc.update(key.getBytes());
|
||||||
long shards = 2 * totalElements / shardSize;
|
long shards = 2 * totalElements / shardSize;
|
||||||
|
@ -241,8 +246,7 @@ public class Chapter09 {
|
||||||
expectedStr = conn.get(exkey);
|
expectedStr = conn.get(exkey);
|
||||||
expected = Integer.parseInt(expectedStr);
|
expected = Integer.parseInt(expectedStr);
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
expected = Long.parseLong(expectedStr);
|
expected = Long.parseLong(expectedStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -287,15 +291,12 @@ public class Chapter09 {
|
||||||
updateAggregates(countries, states, code);
|
updateAggregates(countries, states, code);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
} catch (IOException ioe) {
|
||||||
catch (IOException ioe) {
|
|
||||||
throw new RuntimeException(ioe);
|
throw new RuntimeException(ioe);
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
try {
|
try {
|
||||||
in.close();
|
in.close();
|
||||||
}
|
} catch (Exception e) {
|
||||||
catch (Exception e) {
|
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -328,7 +329,7 @@ public class Chapter09 {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void updateAggregates(Map<String, Long> countries, Map<String, Map<String, Long>> states,
|
public void updateAggregates(Map<String, Long> countries, Map<String, Map<String, Long>> states,
|
||||||
List<Object> codes) {
|
List<Object> codes) {
|
||||||
for (Object code : codes) {
|
for (Object code : codes) {
|
||||||
updateAggregates(countries, states, (String) code);
|
updateAggregates(countries, states, (String) code);
|
||||||
}
|
}
|
||||||
|
@ -390,7 +391,7 @@ public class Chapter09 {
|
||||||
}
|
}
|
||||||
sindex++;
|
sindex++;
|
||||||
|
|
||||||
return new String(new char[] { (char) cindex, (char) sindex });
|
return new String(new char[] {(char) cindex, (char) sindex});
|
||||||
}
|
}
|
||||||
|
|
||||||
private int bisectLeft(String[] values, String key) {
|
private int bisectLeft(String[] values, String key) {
|
||||||
|
|
Loading…
Reference in New Issue