Detailed explanation of java to solve the problem of repeated data insertion in highly concurrent environment in distributed environment
- 2021-07-13 05:09:19
- OfStack
java Solves the Problem of Repeated Data Insertion in High Concurrent Environment in Distributed Environment
Preface
Reason: Duplicate requests received by the server at the same time
Phenomenon: Repeated data insertion/modification operations
Solution: Distributed locks
Generate summary information for request message + redis to realize distributed locking
Tool class
Application of Distributed Lock
package com.nursling.web.filter.context;
import com.nursling.nosql.redis.RedisUtil;
import com.nursling.sign.SignType;
import com.nursling.sign.SignUtil;
import redis.clients.jedis.Jedis;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.Map;
/**
* Concurrent interception
* Under high concurrency Filter out Tools with the same request
* @author Yang .
*
*/
public class ContextLJ {
private static final Integer JD = 0;
/**
* Lock Use redis For distributed projects Lock
* @param sign
* @param tiD
* @return
* @throws Exception
*/
public static boolean lock(String sign, String tiD) {
synchronized (JD) { // Lock
Jedis jedis = RedisUtil.getJedis();
String uTid = jedis.get(sign);
if (uTid == null) {
jedis.set(sign, tiD);
jedis.expire(sign, 36);
return true;
}
return false;
}
}
/**
* Lock verification
* @param sign
* @param tiD
* @return
*/
public static boolean checklock(String sign, String tiD){
Jedis jedis = RedisUtil.getJedis();
String uTid = jedis.get(sign);
return tiD.equals(uTid);
}
/**
* Remove the lock
* @param sign
* @param tiD
*/
public static void clent (String sign, String tiD){
if (checklock(sign, tiD)) {
Jedis jedis = RedisUtil.getJedis();
jedis.del(sign);
}
}
/**
* Get a summary
* @param request
* @return
*/
public static String getSign(ServletRequest request){
// This tool is the request Content requested in Assemble into key=value&key=value2 Form of Source code on-line surface
Map<String, String> map = SignUtil.getRequstMap((HttpServletRequest) request);
String sign = null;
try {
// Use here md5 Method generates summary SignUtil.getRequstMap The method source code will not be posted
sign = SignUtil.buildRequest(map, SignType.MD5);
} catch (Exception e) {
e.printStackTrace();
}
return sign;
}
}
public static Map<String, String> getRequstMap(HttpServletRequest req){
Map<String,String> params = new HashMap<String,String>();
Map<String, String[]> requestParams = req.getParameterMap();
for (Iterator<String> iter = requestParams.keySet().iterator(); iter.hasNext();) {
String name = (String) iter.next();
String[] values = (String[]) requestParams.get(name);
String valueStr = "";
for (int i = 0; i < values.length; i++) {
valueStr = (i == values.length - 1) ? valueStr + values[i]
: valueStr + values[i] + ",";
}
params.put(name, valueStr);
}
return params;
}
Here's the filter code
Utilization of distributed locks
package com.nursling.web.filter.transaction;
import com.google.gson.Gson;
import com.nursling.common.RandomUtil;
import com.nursling.dao.util.TransactionUtils;
import com.nursling.model.ApiResult;
import com.nursling.model.ApiRtnCode;
import com.nursling.web.filter.context.ContextLJ;
import org.apache.log4j.Logger;
import javax.servlet.*;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* Take control of things And Avoid interface Direct omission of abnormal information
* And filter frequent requests
* Created by yangchao on 2016/11/4.
*/
public class TransactionFilter implements Filter {
Logger log = Logger.getLogger(this.getClass());
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse myResp, FilterChain chain) throws IOException, ServletException {
String sign = "sign_" + ContextLJ.getSign(request); // Generate summary
String tiD = RandomUtil.getRandomString(3) + "_" + Thread.currentThread().getId(); // The identity of the current thread
try {
if (!ContextLJ.lock(sign, tiD)) {
log.warn(" Abandon the same Concurrent request " + sign);
frequentlyError(myResp);
return;
}
if (!ContextLJ.checklock(sign, tiD)) {
log.warn(" Lock verification failed " + sign + " " + tiD);
frequentlyError(myResp);
return;
}
chain.doFilter(request, myResp); // Release
} catch (Exception e) { // Exception caught Perform anomaly filtering
log.error("", e);
retrunErrorInfo(myResp);
} finally {
ContextLJ.clent(sign, tiD);
}
}
/**
* Frequent requests
* @param myResp
*/
private void frequentlyError(ServletResponse myResp) throws IOException {
ApiResult<Object> re = new ApiResult<>();
((HttpServletResponse) myResp).setHeader("Content-type", "text/html;charset=UTF-8");
re.setMsg(" Be calm and don't ask frequently ");
re.setCode(ApiRtnCode.API_VERIFY_FAIL);
myResp.getWriter().write(new Gson().toJson(re));
}
/**
* Return exception information
* @param myResp
*/
private void retrunErrorInfo(ServletResponse myResp) throws IOException {
ApiResult<Object> re = new ApiResult<>();
re.setMsg("server error");
// Don't bother here
re.setCode(ApiRtnCode.SERVICE_ERROR);
myResp.getWriter().write(new Gson().toJson(re));
}
@Override
public void destroy() {
}
}
The program itself should still need to be improved, but after a period of testing. This solution is still a reliable concurrent test + there is no problem of duplicate requests in the production environment
It is still very reliable in non-extreme cases