Skip to content

Commit 3c1a542

Browse files
committed
Java:MultiDataSource 不用 Redis 缓存 APIJSON 配置表
1 parent 060a10e commit 3c1a542

File tree

1 file changed

+131
-127
lines changed

1 file changed

+131
-127
lines changed

APIJSON-Java-Server/APIJSONBoot-MultiDataSource/src/main/java/apijson/demo/DemoSQLExecutor.java

+131-127
Original file line numberDiff line numberDiff line change
@@ -45,159 +45,163 @@
4545
import static apijson.framework.APIJSONConstant.USER_;
4646

4747

48-
/**SQL 执行器,支持连接池及多数据源
48+
/**
49+
* SQL 执行器,支持连接池及多数据源
4950
* 具体见 https://github.com/Tencent/APIJSON/issues/151
51+
*
5052
* @author Lemon
5153
*/
5254
public class DemoSQLExecutor extends APIJSONSQLExecutor {
53-
public static final String TAG = "DemoSQLExecutor";
54-
55-
// Redis 缓存 <<<<<<<<<<<<<<<<<<<<<<<
56-
public static final RedisTemplate<String, String> REDIS_TEMPLATE;
57-
static {
58-
REDIS_TEMPLATE = new RedisTemplate<>();
59-
REDIS_TEMPLATE.setConnectionFactory(new JedisConnectionFactory(new RedisStandaloneConfiguration("127.0.0.1", 6379)));
60-
REDIS_TEMPLATE.setKeySerializer(new StringRedisSerializer());
61-
REDIS_TEMPLATE.setHashValueSerializer(new GenericToStringSerializer<>(Serializable.class));
62-
REDIS_TEMPLATE.setValueSerializer(new GenericToStringSerializer<>(Serializable.class));
63-
// REDIS_TEMPLATE.setValueSerializer(new FastJsonRedisSerializer<List<JSONObject>>(List.class));
64-
REDIS_TEMPLATE.afterPropertiesSet();
65-
}
66-
67-
// 可重写以下方法,支持 Redis 等单机全局缓存或分布式缓存
68-
@Override
69-
public List<JSONObject> getCache(String sql, SQLConfig config) {
70-
List<JSONObject> list = super.getCache(sql, config);
71-
if (list == null) {
72-
list = JSON.parseArray(REDIS_TEMPLATE.opsForValue().get(sql), JSONObject.class);
73-
}
74-
return list;
75-
}
76-
@Override
77-
public synchronized void putCache(String sql, List<JSONObject> list, SQLConfig config) {
78-
super.putCache(sql, list, config);
79-
if (config != null && config.isMain()) {
80-
if (config.isExplain() || RequestMethod.isHeadMethod(config.getMethod(), true)) {
81-
REDIS_TEMPLATE.opsForValue().set(sql, JSON.toJSONString(list), 10*60, TimeUnit.SECONDS);
82-
} else {
83-
String table = config.getTable();
84-
REDIS_TEMPLATE.opsForValue().set(sql, JSON.toJSONString(list), USER_.equals(table) || PRIVACY_.equals(table) ? 10*60 : 60, TimeUnit.SECONDS);
85-
}
55+
public static final String TAG = "DemoSQLExecutor";
56+
57+
// Redis 缓存 <<<<<<<<<<<<<<<<<<<<<<<
58+
public static final RedisTemplate<String, String> REDIS_TEMPLATE;
59+
static {
60+
REDIS_TEMPLATE = new RedisTemplate<>();
61+
REDIS_TEMPLATE.setConnectionFactory(new JedisConnectionFactory(new RedisStandaloneConfiguration("127.0.0.1", 6379)));
62+
REDIS_TEMPLATE.setKeySerializer(new StringRedisSerializer());
63+
REDIS_TEMPLATE.setHashValueSerializer(new GenericToStringSerializer<>(Serializable.class));
64+
REDIS_TEMPLATE.setValueSerializer(new GenericToStringSerializer<>(Serializable.class));
65+
// REDIS_TEMPLATE.setValueSerializer(new FastJsonRedisSerializer<List<JSONObject>>(List.class));
66+
REDIS_TEMPLATE.afterPropertiesSet();
67+
}
68+
69+
// 可重写以下方法,支持 Redis 等单机全局缓存或分布式缓存
70+
@Override
71+
public List<JSONObject> getCache(String sql, SQLConfig config) {
72+
List<JSONObject> list = super.getCache(sql, config);
73+
if (list == null) {
74+
list = JSON.parseArray(REDIS_TEMPLATE.opsForValue().get(sql), JSONObject.class);
75+
}
76+
return list;
77+
}
78+
79+
@Override
80+
public synchronized void putCache(String sql, List<JSONObject> list, SQLConfig config) {
81+
super.putCache(sql, list, config);
82+
83+
String table = config != null && config.isMain() ? config.getTable() : null;
84+
if (table != null && DemoSQLConfig.CONFIG_TABLE_LIST.contains(table) == false) {
85+
if (config.isExplain() || RequestMethod.isHeadMethod(config.getMethod(), true)) {
86+
REDIS_TEMPLATE.opsForValue().set(sql, JSON.toJSONString(list), 10 * 60, TimeUnit.SECONDS);
87+
} else {
88+
REDIS_TEMPLATE.opsForValue().set(sql, JSON.toJSONString(list), USER_.equals(table) || PRIVACY_.equals(table) ? 10 * 60 : 60, TimeUnit.SECONDS);
89+
}
8690
}
87-
}
88-
@Override
89-
public synchronized void removeCache(String sql, SQLConfig config) {
90-
super.removeCache(sql, config);
91+
}
92+
93+
@Override
94+
public synchronized void removeCache(String sql, SQLConfig config) {
95+
super.removeCache(sql, config);
9196
if (config.getMethod() == RequestMethod.DELETE) { // 避免缓存击穿
92-
REDIS_TEMPLATE.expire(sql, 60, TimeUnit.SECONDS);
97+
REDIS_TEMPLATE.expire(sql, 60, TimeUnit.SECONDS);
9398
} else {
94-
REDIS_TEMPLATE.delete(sql);
99+
REDIS_TEMPLATE.delete(sql);
95100
}
96-
}
101+
}
97102

98-
@Override
99-
public JSONObject execute(SQLConfig config, boolean unknownType) throws Exception {
100-
JSONObject result = super.execute(config, unknownType);
101-
RequestMethod method = config.getMethod();
102-
if (method == RequestMethod.POST) { // 没必要,直接查就行了
103+
@Override
104+
public JSONObject execute(SQLConfig config, boolean unknownType) throws Exception {
105+
JSONObject result = super.execute(config, unknownType);
106+
RequestMethod method = config.getMethod();
107+
if (method == RequestMethod.POST) { // 没必要,直接查就行了
103108
// Object id = result.get(config.getIdKey());
104109
// Object idIn = result.get(config.getIdKey() + "[]");
105110
// SQLConfig cacheConfig = APIJSONRouterApplication.DEFAULT_APIJSON_CREATOR.createSQLConfig();
106111
// cacheConfig.setMethod(RequestMethod.GET);
107112
//
113+
} else if (method == RequestMethod.PUT || method == RequestMethod.DELETE) { // RequestMethod.isQueryMethod(method) == false) {
114+
config.setMethod(RequestMethod.GET);
115+
boolean isPrepared = config.isPrepared();
116+
removeCache(config.getSQL(false), config);
117+
config.setPrepared(isPrepared);
118+
config.setMethod(method);
119+
}
120+
return result;
108121
}
109-
else if (method == RequestMethod.PUT || method == RequestMethod.DELETE) { // RequestMethod.isQueryMethod(method) == false) {
110-
config.setMethod(RequestMethod.GET);
111-
boolean isPrepared = config.isPrepared();
112-
removeCache(config.getSQL(false), config);
113-
config.setPrepared(isPrepared);
114-
config.setMethod(method);
115-
}
116-
return result;
117-
}
118122

119-
// Redis 缓存 >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
123+
// Redis 缓存 >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
120124

121-
// 适配连接池,如果这里能拿到连接池的有效 Connection,则 SQLConfig 不需要配置 dbVersion, dbUri, dbAccount, dbPassword
122-
@Override
123-
public Connection getConnection(SQLConfig config) throws Exception {
124-
if ("NEBULA".equals(config.getDatabase())) { // 3.0.0 及以下要这样连接
125-
String uri = config.getDBUri();
125+
// 适配连接池,如果这里能拿到连接池的有效 Connection,则 SQLConfig 不需要配置 dbVersion, dbUri, dbAccount, dbPassword
126+
@Override
127+
public Connection getConnection(SQLConfig config) throws Exception {
128+
if ("NEBULA".equals(config.getDatabase())) { // 3.0.0 及以下要这样连接
129+
String uri = config.getDBUri();
126130

127-
int start = uri.indexOf("://");
128-
String prefix = uri.substring(0, start);
131+
int start = uri.indexOf("://");
132+
String prefix = uri.substring(0, start);
129133

130-
uri = uri.substring(start + "://".length());
131-
int end = uri.indexOf("/");
132-
String space = uri.substring(end + 1);
134+
uri = uri.substring(start + "://".length());
135+
int end = uri.indexOf("/");
136+
String space = uri.substring(end + 1);
133137

134-
Properties props = new Properties();
135-
props.put("url", prefix + "://" + space);
136-
props.put("graphSpace", space);
138+
Properties props = new Properties();
139+
props.put("url", prefix + "://" + space);
140+
props.put("graphSpace", space);
137141

138-
NebulaDriver driver = new NebulaDriver(uri.substring(0, end));
139-
return driver.connect(prefix + "://" + space, props);
140-
// return DriverManager.getConnection("jdbc:nebula://JDBC_TEST_SPACE", "root", "nebula");
141-
}
142+
NebulaDriver driver = new NebulaDriver(uri.substring(0, end));
143+
return driver.connect(prefix + "://" + space, props);
144+
// return DriverManager.getConnection("jdbc:nebula://JDBC_TEST_SPACE", "root", "nebula");
145+
}
142146

143-
String datasource = config.getDatasource();
144-
Log.d(TAG, "getConnection config.getDatasource() = " + datasource);
145-
146-
String key = datasource + "-" + config.getDatabase();
147-
Connection c = connectionMap.get(key);
148-
if (datasource != null && (c == null || c.isClosed())) {
149-
try {
150-
DataSource ds;
151-
switch (datasource) {
152-
case "HIKARICP":
153-
ds = DemoApplication.getApplicationContext().getBean(HikariDataSource.class);
154-
// 另一种方式是 DemoDataSourceConfig 初始化获取到 DataSource 后给静态变量 DATA_SOURCE_HIKARICP 赋值: ds = DemoDataSourceConfig.DATA_SOURCE_HIKARICP.getConnection();
155-
break;
156-
default:
157-
Map<String, DruidDataSource> dsMap = DemoApplication.getApplicationContext().getBeansOfType(DruidDataSource.class);
158-
// 另一种方式是 DemoDataSourceConfig 初始化获取到 DataSource 后给静态变量 DATA_SOURCE_DRUID 赋值: ds = DemoDataSourceConfig.DATA_SOURCE_DRUID.getConnection();
159-
switch (datasource) {
160-
case "DRUID-TEST":
161-
ds = dsMap.get("druidTestDataSource");
162-
break;
163-
case "DRUID-ONLINE":
164-
ds = dsMap.get("druidOnlineDataSource");
165-
break;
166-
case "DRUID":
167-
ds = dsMap.get("druidDataSource");
168-
break;
169-
default:
170-
ds = null;
171-
break;
147+
String datasource = config.getDatasource();
148+
Log.d(TAG, "getConnection config.getDatasource() = " + datasource);
149+
150+
String key = datasource + "-" + config.getDatabase();
151+
Connection c = connectionMap.get(key);
152+
if (datasource != null && (c == null || c.isClosed())) {
153+
try {
154+
DataSource ds;
155+
switch (datasource) {
156+
case "HIKARICP":
157+
ds = DemoApplication.getApplicationContext().getBean(HikariDataSource.class);
158+
// 另一种方式是 DemoDataSourceConfig 初始化获取到 DataSource 后给静态变量 DATA_SOURCE_HIKARICP 赋值: ds = DemoDataSourceConfig.DATA_SOURCE_HIKARICP.getConnection();
159+
break;
160+
default:
161+
Map<String, DruidDataSource> dsMap = DemoApplication.getApplicationContext().getBeansOfType(DruidDataSource.class);
162+
// 另一种方式是 DemoDataSourceConfig 初始化获取到 DataSource 后给静态变量 DATA_SOURCE_DRUID 赋值: ds = DemoDataSourceConfig.DATA_SOURCE_DRUID.getConnection();
163+
switch (datasource) {
164+
case "DRUID-TEST":
165+
ds = dsMap.get("druidTestDataSource");
166+
break;
167+
case "DRUID-ONLINE":
168+
ds = dsMap.get("druidOnlineDataSource");
169+
break;
170+
case "DRUID":
171+
ds = dsMap.get("druidDataSource");
172+
break;
173+
default:
174+
ds = null;
175+
break;
176+
}
177+
break;
178+
}
179+
180+
connectionMap.put(key, ds == null ? null : ds.getConnection());
181+
} catch (Exception e) {
182+
Log.e(TAG, "getConnection try { "
183+
+ "DataSource ds = DemoApplication.getApplicationContext().getBean(DataSource.class); .."
184+
+ "} catch (Exception e) = " + e.getMessage());
172185
}
173-
break;
174186
}
175187

176-
connectionMap.put(key, ds == null ? null : ds.getConnection());
177-
} catch (Exception e) {
178-
Log.e(TAG, "getConnection try { "
179-
+ "DataSource ds = DemoApplication.getApplicationContext().getBean(DataSource.class); .."
180-
+ "} catch (Exception e) = " + e.getMessage());
181-
}
188+
// 必须最后执行 super 方法,因为里面还有事务相关处理。
189+
// 如果这里是 return c,则会导致 增删改 多个对象时只有第一个会 commit,即只有第一个对象成功插入数据库表
190+
return super.getConnection(config);
182191
}
183192

184-
// 必须最后执行 super 方法,因为里面还有事务相关处理。
185-
// 如果这里是 return c,则会导致 增删改 多个对象时只有第一个会 commit,即只有第一个对象成功插入数据库表
186-
return super.getConnection(config);
187-
}
188-
189-
// 取消注释支持 !key 反选字段 和 字段名映射,需要先依赖插件 https://github.com/APIJSON/apijson-column
190-
// @Override
191-
// protected String getKey(SQLConfig config, ResultSet rs, ResultSetMetaData rsmd, int tablePosition, JSONObject table,
192-
// int columnIndex, Map<String, JSONObject> childMap) throws Exception {
193-
// return ColumnUtil.compatOutputKey(super.getKey(config, rs, rsmd, tablePosition, table, columnIndex, childMap), config.getTable(), config.getMethod());
194-
// }
195-
196-
// 不需要隐藏字段这个功能时,取消注释来提升性能
197-
// @Override
198-
// protected boolean isHideColumn(SQLConfig config, ResultSet rs, ResultSetMetaData rsmd, int tablePosition,
199-
// JSONObject table, int columnIndex, Map<String, JSONObject> childMap) throws SQLException {
200-
// return false;
201-
// }
193+
// 取消注释支持 !key 反选字段 和 字段名映射,需要先依赖插件 https://github.com/APIJSON/apijson-column
194+
// @Override
195+
// protected String getKey(SQLConfig config, ResultSet rs, ResultSetMetaData rsmd, int tablePosition, JSONObject table,
196+
// int columnIndex, Map<String, JSONObject> childMap) throws Exception {
197+
// return ColumnUtil.compatOutputKey(super.getKey(config, rs, rsmd, tablePosition, table, columnIndex, childMap), config.getTable(), config.getMethod());
198+
// }
199+
200+
// 不需要隐藏字段这个功能时,取消注释来提升性能
201+
// @Override
202+
// protected boolean isHideColumn(SQLConfig config, ResultSet rs, ResultSetMetaData rsmd, int tablePosition,
203+
// JSONObject table, int columnIndex, Map<String, JSONObject> childMap) throws SQLException {
204+
// return false;
205+
// }
202206

203207
}

0 commit comments

Comments
 (0)