Spring+MyBatis implements the database read write separation scheme

  • 2020-06-01 09:42:53
  • OfStack

A fourth is recommended

Plan 1

Separate the two DataSource reads and writes by creating the MyBatis configuration file, and the mapperLocations properties of each SqlSessionFactoryBean object specify the configuration files for the two read and write data sources. Configure all read operations in the read file and all write operations in the write file.

Pros: easy to implement

Disadvantages: maintenance is difficult, the original xml file needs to be revised, does not support multi-read, not easy to extend

implementation


<bean id="abstractDataSource" abstract="true" class="com.alibaba.druid.pool.DruidDataSource" init-method="init"
 destroy-method="close">
 <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
 <!--  Configure to get the time for the connection wait timeout  -->
 <property name="maxWait" value="60000"/>
 <!--  How long is the configuration interval 1 A secondary detection that detects idle connections that need to be closed in milliseconds  -->
 <property name="timeBetweenEvictionRunsMillis" value="60000"/>
 <!--  configuration 1 The minimum time a connection can live in a pool, in milliseconds  -->
 <property name="minEvictableIdleTimeMillis" value="300000"/>
 <property name="validationQuery" value="SELECT 'x'"/>
 <property name="testWhileIdle" value="true"/>
 <property name="testOnBorrow" value="false"/>
 <property name="testOnReturn" value="false"/>
 <!--  Open the PSCache , and specify each connection PSCache The size of the  -->
 <property name="poolPreparedStatements" value="true"/>
 <property name="maxPoolPreparedStatementPerConnectionSize" value="20"/>
 <property name="filters" value="config"/>
 <property name="connectionProperties" value="config.decrypt=true" />
</bean>
<bean id="readDataSource" parent="abstractDataSource">
 <!--  Basic attributes  url , user , password -->
 <property name="url" value="${read.jdbc.url}"/>
 <property name="username" value="${read.jdbc.user}"/>
 <property name="password" value="${read.jdbc.password}"/>
 <!--  Configure the initial size, minimum, and maximum  -->
 <property name="initialSize" value="${read.jdbc.initPoolSize}"/>
 <property name="minIdle" value="10"/>
 <property name="maxActive" value="${read.jdbc.maxPoolSize}"/>
</bean>
<bean id="writeDataSource" parent="abstractDataSource">
 <!--  Basic attributes  url , user , password -->
 <property name="url" value="${write.jdbc.url}"/>
 <property name="username" value="${write.jdbc.user}"/>
 <property name="password" value="${write.jdbc.password}"/>
 <!--  Configure the initial size, minimum, and maximum  -->
 <property name="initialSize" value="${write.jdbc.initPoolSize}"/>
 <property name="minIdle" value="10"/>
 <property name="maxActive" value="${write.jdbc.maxPoolSize}"/>
</bean>
<bean id="readSqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
 <!--  instantiation sqlSessionFactory You need to use the above configured data source and SQL The mapping file  -->
 <property name="dataSource" ref="readDataSource"/>
 <property name="mapperLocations" value="classpath:mapper/read/*.xml"/>
</bean>
<bean id="writeSqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
 <!--  instantiation sqlSessionFactory You need to use the above configured data source and SQL The mapping file  -->
 <property name="dataSource" ref="writeDataSource"/>
 <property name="mapperLocations" value="classpath:mapper/write/*.xml"/>
</bean>

Scheme 2

Through Spring AOP in the business layer to achieve read and write separation, before the call of DAO layer to define the section, using Spring AbstractRoutingDataSource to solve the problem of multiple data sources, to achieve dynamic selection of data sources

Advantages: the annotation method is used to configure the data source on each method of DAO. The original code can be changed with less momentum, and it is easy to be extended. It supports multiple reads

Cons: annotations need to be configured on each DAO method, manual management, error-prone

implementation


// Defines an enumeration type, read and write 
public enum DynamicDataSourceGlobal {
 READ, WRITE;
}

import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
 * RUNTIME
 *  Custom annotation 
 *  The compiler will record the comments in a class file at run time  VM  Comments are retained so they can be read reflectively. 
 * @author shma1664
 *
 */
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface DataSource {
 public DynamicDataSourceGlobal value() default DynamicDataSourceGlobal.READ;
}

/**
 * Created by IDEA
 *  The local thread sets and gets the data source information 
 * User: mashaohua
 * Date: 2016-07-07 13:35
 * Desc:
 */
public class DynamicDataSourceHolder {
 private static final ThreadLocal<DynamicDataSourceGlobal> holder = new ThreadLocal<DynamicDataSourceGlobal>();
 public static void putDataSource(DynamicDataSourceGlobal dataSource){
 holder.set(dataSource);
 }
 public static DynamicDataSourceGlobal getDataSource(){
 return holder.get();
 }
 public static void clearDataSource() {
 holder.remove();
 }
}

import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
 * Created by IDEA
 * User: mashaohua
 * Date: 2016-07-14 10:56
 * Desc:  Dynamic data sources for read and write separation 
 */
public class DynamicDataSource extends AbstractRoutingDataSource {
 private Object writeDataSource; // Write the data source 
 private List<Object> readDataSources; // Multiple read data sources 
 private int readDataSourceSize; // Number of read data sources 
 private int readDataSourcePollPattern = 0; // Get the way to read the data source, 0 : a randomized, 1 : polling 
 private AtomicLong counter = new AtomicLong(0);
 private static final Long MAX_POOL = Long.MAX_VALUE;
 private final Lock lock = new ReentrantLock();
 @Override
 public void afterPropertiesSet() {
 if (this.writeDataSource == null) {
  throw new IllegalArgumentException("Property 'writeDataSource' is required");
 }
 setDefaultTargetDataSource(writeDataSource);
 Map<Object, Object> targetDataSources = new HashMap<>();
 targetDataSources.put(DynamicDataSourceGlobal.WRITE.name(), writeDataSource);
 if (this.readDataSources == null) {
  readDataSourceSize = 0;
 } else {
  for(int i=0; i<readDataSources.size(); i++) {
  targetDataSources.put(DynamicDataSourceGlobal.READ.name() + i, readDataSources.get(i));
  }
  readDataSourceSize = readDataSources.size();
 }
 setTargetDataSources(targetDataSources);
 super.afterPropertiesSet();
 }
 @Override
 protected Object determineCurrentLookupKey() {
 DynamicDataSourceGlobal dynamicDataSourceGlobal = DynamicDataSourceHolder.getDataSource();
 if(dynamicDataSourceGlobal == null
  || dynamicDataSourceGlobal == DynamicDataSourceGlobal.WRITE
  || readDataSourceSize <= 0) {
  return DynamicDataSourceGlobal.WRITE.name();
 }
 int index = 1;
 if(readDataSourcePollPattern == 1) {
  // Polling mode 
  long currValue = counter.incrementAndGet();
  if((currValue + 1) >= MAX_POOL) {
  try {
   lock.lock();
   if((currValue + 1) >= MAX_POOL) {
   counter.set(0);
   }
  } finally {
   lock.unlock();
  }
  }
  index = (int) (currValue % readDataSourceSize);
 } else {
  // Random way 
  index = ThreadLocalRandom.current().nextInt(0, readDataSourceSize);
 }
 return dynamicDataSourceGlobal.name() + index;
 }
 public void setWriteDataSource(Object writeDataSource) {
 this.writeDataSource = writeDataSource;
 }
 public void setReadDataSources(List<Object> readDataSources) {
 this.readDataSources = readDataSources;
 }
 public void setReadDataSourcePollPattern(int readDataSourcePollPattern) {
 this.readDataSourcePollPattern = readDataSourcePollPattern;
 }
}

import org.apache.log4j.Logger;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.reflect.MethodSignature;
import java.lang.reflect.Method;
/**
 * Created by IDEA
 * User: mashaohua
 * Date: 2016-07-07 13:39
 * Desc:  Define the select data source aspect 
 */
public class DynamicDataSourceAspect {
 private static final Logger logger = Logger.getLogger(DynamicDataSourceAspect.class);
 public void pointCut(){};
 public void before(JoinPoint point)
 {
 Object target = point.getTarget();
 String methodName = point.getSignature().getName();
 Class<?>[] clazz = target.getClass().getInterfaces();
 Class<?>[] parameterTypes = ((MethodSignature) point.getSignature()).getMethod().getParameterTypes();
 try {
  Method method = clazz[0].getMethod(methodName, parameterTypes);
  if (method != null && method.isAnnotationPresent(DataSource.class)) {
  DataSource data = method.getAnnotation(DataSource.class);
  DynamicDataSourceHolder.putDataSource(data.value());
  }
 } catch (Exception e) {
  logger.error(String.format("Choose DataSource error, method:%s, msg:%s", methodName, e.getMessage()));
 }
 }
 public void after(JoinPoint point) {
 DynamicDataSourceHolder.clearDataSource();
 }
}

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:tx="http://www.springframework.org/schema/tx"
xmlns:aop="http://www.springframework.org/schema/aop"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.1.xsd
http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-4.1.xsd
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-4.1.xsd">
 <bean id="abstractDataSource" abstract="true" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
 <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
 <!--  Configure to get the time for the connection wait timeout  -->
 <property name="maxWait" value="60000"/>
 <!--  How long is the configuration interval 1 A secondary detection that detects idle connections that need to be closed in milliseconds  -->
 <property name="timeBetweenEvictionRunsMillis" value="60000"/>
 <!--  configuration 1 The minimum time a connection can live in a pool, in milliseconds  -->
 <property name="minEvictableIdleTimeMillis" value="300000"/>
 <property name="validationQuery" value="SELECT 'x'"/>
 <property name="testWhileIdle" value="true"/>
 <property name="testOnBorrow" value="false"/>
 <property name="testOnReturn" value="false"/>
 <!--  Open the PSCache , and specify each connection PSCache The size of the  -->
 <property name="poolPreparedStatements" value="true"/>
 <property name="maxPoolPreparedStatementPerConnectionSize" value="20"/>
 <property name="filters" value="config"/>
 <property name="connectionProperties" value="config.decrypt=true" />
 </bean>
 <bean id="dataSourceRead1" parent="abstractDataSource">
 <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
 <!--  Basic attributes  url , user , password -->
 <property name="url" value="${read1.jdbc.url}"/>
 <property name="username" value="${read1.jdbc.user}"/>
 <property name="password" value="${read1.jdbc.password}"/>
 <!--  Configure the initial size, minimum, and maximum  -->
 <property name="initialSize" value="${read1.jdbc.initPoolSize}"/>
 <property name="minIdle" value="${read1.jdbc.minPoolSize}"/>
 <property name="maxActive" value="${read1.jdbc.maxPoolSize}"/>
 </bean>
 <bean id="dataSourceRead2" parent="abstractDataSource">
 <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
 <!--  Basic attributes  url , user , password -->
 <property name="url" value="${read2.jdbc.url}"/>
 <property name="username" value="${read2.jdbc.user}"/>
 <property name="password" value="${read2.jdbc.password}"/>
 <!--  Configure the initial size, minimum, and maximum  -->
 <property name="initialSize" value="${read2.jdbc.initPoolSize}"/>
 <property name="minIdle" value="${read2.jdbc.minPoolSize}"/>
 <property name="maxActive" value="${read2.jdbc.maxPoolSize}"/>
 </bean>
 <bean id="dataSourceWrite" parent="abstractDataSource">
 <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
 <!--  Basic attributes  url , user , password -->
 <property name="url" value="${write.jdbc.url}"/>
 <property name="username" value="${write.jdbc.user}"/>
 <property name="password" value="${write.jdbc.password}"/>
 <!--  Configure the initial size, minimum, and maximum  -->
 <property name="initialSize" value="${write.jdbc.initPoolSize}"/>
 <property name="minIdle" value="${write.jdbc.minPoolSize}"/>
 <property name="maxActive" value="${write.jdbc.maxPoolSize}"/>
 </bean>
 <bean id="dataSource" class="com.test.api.dao.datasource.DynamicDataSource">
 <property name="writeDataSource" ref="dataSourceWrite" />
 <property name="readDataSources">
  <list>
  <ref bean="dataSourceRead1" />
  <ref bean="dataSourceRead2" />
  </list>
 </property>
 <!-- Polling mode -->
 <property name="readDataSourcePollPattern" value="1" />
 <property name="defaultTargetDataSource" ref="dataSourceWrite"/>
 </bean>
 <tx:annotation-driven transaction-manager="transactionManager"/>
 <bean id="transactionManager" class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
 <property name="dataSource" ref="dataSource"/>
 </bean>
 <!--  for myBatis The configuration of the item  -->
 <!--  configuration sqlSessionFactory -->
 <bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
 <!--  instantiation sqlSessionFactory You need to use the above configured data source and SQL The mapping file  -->
 <property name="dataSource" ref="dataSource"/>
 <property name="mapperLocations" value="classpath:mapper/*.xml"/>
 </bean>
 <!--  Configure scanner  -->
 <bean class="org.mybatis.spring.mapper.MapperScannerConfigurer">
 <!--  Scans the package and all the mapped interface classes under its child packages  -->
 <property name="basePackage" value="com.test.api.dao.inte"/>
 <property name="sqlSessionFactoryBeanName" value="sqlSessionFactory"/>
 </bean>
 <!--  Configure database annotations aop -->
 <bean id="dynamicDataSourceAspect" class="com.test.api.dao.datasource.DynamicDataSourceAspect" />
 <aop:config>
 <aop:aspect id="c" ref="dynamicDataSourceAspect">
  <aop:pointcut id="tx" expression="execution(* com.test.api.dao.inte..*.*(..))"/>
  <aop:before pointcut-ref="tx" method="before"/>
  <aop:after pointcut-ref="tx" method="after"/>
 </aop:aspect>
 </aop:config>
 <!--  Configure database annotations aop -->
</beans>

Plan 3

Database read-write separation is achieved at the business layer through Mybatis's Plugin, the real data source is selected through interceptors before MyBatis creates Statement objects, and the data source is selected in interceptors according to method names (select, update, insert, delete).

Advantages: original code unchanged, support for more reading, easy to extend

Disadvantages:

implementation


/**
 * Created by IDEA
 * User: mashaohua
 * Date: 2016-07-19 15:40
 * Desc:  create Connection The agent interface 
 */
public interface ConnectionProxy extends Connection {
 /**
 *  Separation is required based on incoming reads and writes key Route to the correct one connection
 * @param key  Data source identification 
 * @return
 */
 Connection getTargetConnection(String key);
}

import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import javax.sql.DataSource;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.jdbc.datasource.AbstractDataSource;
import org.springframework.jdbc.datasource.lookup.DataSourceLookup;
import org.springframework.jdbc.datasource.lookup.JndiDataSourceLookup;
import org.springframework.util.Assert;
public abstract class AbstractDynamicDataSourceProxy extends AbstractDataSource implements InitializingBean {
 private List<Object> readDataSources;
 private List<DataSource> resolvedReadDataSources;
 private Object writeDataSource;
 private DataSource resolvedWriteDataSource;
 private int readDataSourcePollPattern = 0;
 private int readDsSize;
 private boolean defaultAutoCommit = true;
 private int defaultTransactionIsolation = Connection.TRANSACTION_READ_COMMITTED;
 public static final String READ = "read";
 public static final String WRITE = "write";
 private DataSourceLookup dataSourceLookup = new JndiDataSourceLookup();
 @Override
 public Connection getConnection() throws SQLException {
 return (Connection) Proxy.newProxyInstance(
com.autohome.api.dealer.tuan.dao.rwmybatis.ConnectionProxy.class.getClassLoader(),
  new Class[] {com.autohome.api.dealer.tuan.dao.rwmybatis.ConnectionProxy.class},
  new RWConnectionInvocationHandler());
 }
 @Override
 public Connection getConnection(String username, String password)
  throws SQLException {
 return (Connection) Proxy.newProxyInstance( com.autohome.api.dealer.tuan.dao.rwmybatis.ConnectionProxy.class.getClassLoader(),
  new Class[] {com.autohome.api.dealer.tuan.dao.rwmybatis.ConnectionProxy.class},
  new RWConnectionInvocationHandler(username,password));
 }
 public int getReadDsSize(){
 return readDsSize;
 }
 public List<DataSource> getResolvedReadDataSources() {
 return resolvedReadDataSources;
 }
 public void afterPropertiesSet() throws Exception {
 if(writeDataSource == null){
  throw new IllegalArgumentException("Property 'writeDataSource' is required");
 }
 this.resolvedWriteDataSource = resolveSpecifiedDataSource(writeDataSource);
 resolvedReadDataSources = new ArrayList<DataSource>(readDataSources.size());
 for(Object item : readDataSources){
  resolvedReadDataSources.add(resolveSpecifiedDataSource(item));
 }
 readDsSize = readDataSources.size();
 }
 protected DataSource determineTargetDataSource(String key) {
 Assert.notNull(this.resolvedReadDataSources, "DataSource router not initialized");
 if(WRITE.equals(key)){
  return resolvedWriteDataSource;
 }else{
  return loadReadDataSource();
 }
 }
 public Logger getParentLogger() {
 // NOOP Just ignore
 return null;
 }
 /**
 *  Get real data source
 * @param dataSource (jndi | real data source)
 * @return
 * @throws IllegalArgumentException
 */
 protected DataSource resolveSpecifiedDataSource(Object dataSource) throws IllegalArgumentException {
 if (dataSource instanceof DataSource) {
  return (DataSource) dataSource;
 }
 else if (dataSource instanceof String) {
  return this.dataSourceLookup.getDataSource((String) dataSource);
 }
 else {
  throw new IllegalArgumentException(
   "Illegal data source value - only [javax.sql.DataSource] and String supported: " + dataSource);
 }
 }
 protected abstract DataSource loadReadDataSource();
 public void setReadDsSize(int readDsSize) {
 this.readDsSize = readDsSize;
 }
 public List<Object> getReadDataSources() {
 return readDataSources;
 }
 public void setReadDataSources(List<Object> readDataSources) {
 this.readDataSources = readDataSources;
 }
 public Object getWriteDataSource() {
 return writeDataSource;
 }
 public void setWriteDataSource(Object writeDataSource) {
 this.writeDataSource = writeDataSource;
 }
 public void setResolvedReadDataSources(List<DataSource> resolvedReadDataSources) {
 this.resolvedReadDataSources = resolvedReadDataSources;
 }
 public DataSource getResolvedWriteDataSource() {
 return resolvedWriteDataSource;
 }
 public void setResolvedWriteDataSource(DataSource resolvedWriteDataSource) {
 this.resolvedWriteDataSource = resolvedWriteDataSource;
 }
 public int getReadDataSourcePollPattern() {
 return readDataSourcePollPattern;
 }
 public void setReadDataSourcePollPattern(int readDataSourcePollPattern) {
 this.readDataSourcePollPattern = readDataSourcePollPattern;
 }
 /**
 * Invocation handler that defers fetching an actual JDBC Connection
 * until first creation of a Statement.
 */
 private class RWConnectionInvocationHandler implements InvocationHandler {
 private String username;
 private String password;
 private Boolean readOnly = Boolean.FALSE;
 private Integer transactionIsolation;
 private Boolean autoCommit;
 private boolean closed = false;
 private Connection target;
 public RWConnectionInvocationHandler() {
 }
 public RWConnectionInvocationHandler(String username, String password) {
  this();
  this.username = username;
  this.password = password;
 }
 public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
  // Invocation on ConnectionProxy interface coming in...
  if (method.getName().equals("equals")) {
  // We must avoid fetching a target Connection for "equals".
  // Only consider equal when proxies are identical.
  return (proxy == args[0] ? Boolean.TRUE : Boolean.FALSE);
  }
  else if (method.getName().equals("hashCode")) {
  // We must avoid fetching a target Connection for "hashCode",
  // and we must return the same hash code even when the target
  // Connection has been fetched: use hashCode of Connection proxy.
  return new Integer(System.identityHashCode(proxy));
  }
  else if (method.getName().equals("getTargetConnection")) {
  // Handle getTargetConnection method: return underlying connection.
  return getTargetConnection(method,args);
  }
  if (!hasTargetConnection()) {
  // No physical target Connection kept yet ->
  // resolve transaction demarcation methods without fetching
  // a physical JDBC Connection until absolutely necessary.
  if (method.getName().equals("toString")) {
   return "RW Routing DataSource Proxy";
  }
  else if (method.getName().equals("isReadOnly")) {
   return this.readOnly;
  }
  else if (method.getName().equals("setReadOnly")) {
   this.readOnly = (Boolean) args[0];
   return null;
  }
  else if (method.getName().equals("getTransactionIsolation")) {
   if (this.transactionIsolation != null) {
   return this.transactionIsolation;
   }
   return defaultTransactionIsolation;
   // Else fetch actual Connection and check there,
   // because we didn't have a default specified.
  }
  else if (method.getName().equals("setTransactionIsolation")) {
   this.transactionIsolation = (Integer) args[0];
   return null;
  }
  else if (method.getName().equals("getAutoCommit")) {
   if (this.autoCommit != null)
   return this.autoCommit;
   return defaultAutoCommit;
   // Else fetch actual Connection and check there,
   // because we didn't have a default specified.
  }
  else if (method.getName().equals("setAutoCommit")) {
   this.autoCommit = (Boolean) args[0];
   return null;
  }
  else if (method.getName().equals("commit")) {
   // Ignore: no statements created yet.
   return null;
  }
  else if (method.getName().equals("rollback")) {
   // Ignore: no statements created yet.
   return null;
  }
  else if (method.getName().equals("getWarnings")) {
   return null;
  }
  else if (method.getName().equals("clearWarnings")) {
   return null;
  }
  else if (method.getName().equals("isClosed")) {
   return (this.closed ? Boolean.TRUE : Boolean.FALSE);
  }
  else if (method.getName().equals("close")) {
   // Ignore: no target connection yet.
   this.closed = true;
   return null;
  }
  else if (this.closed) {
   // Connection proxy closed, without ever having fetched a
   // physical JDBC Connection: throw corresponding SQLException.
   throw new SQLException("Illegal operation: connection is closed");
  }
  }
  // Target Connection already fetched,
  // or target Connection necessary for current operation ->
  // invoke method on target connection.
  try {
  return method.invoke(target, args);
  }
  catch (InvocationTargetException ex) {
  throw ex.getTargetException();
  }
 }
 /**
  * Return whether the proxy currently holds a target Connection.
  */
 private boolean hasTargetConnection() {
  return (this.target != null);
 }
 /**
  * Return the target Connection, fetching it and initializing it if necessary.
  */
 private Connection getTargetConnection(Method operation,Object[] args) throws SQLException {
  if (this.target == null) {
  String key = (String) args[0];
  // No target Connection held -> fetch one.
  if (logger.isDebugEnabled()) {
   logger.debug("Connecting to database for operation '" + operation.getName() + "'");
  }
  // Fetch physical Connection from DataSource.
  this.target = (this.username != null) ?
   determineTargetDataSource(key).getConnection(this.username, this.password) :
   determineTargetDataSource(key).getConnection();
  // If we still lack default connection properties, check them now.
  //checkDefaultConnectionProperties(this.target);
  // Apply kept transaction settings, if any.
  if (this.readOnly.booleanValue()) {
   this.target.setReadOnly(this.readOnly.booleanValue());
  }
  if (this.transactionIsolation != null) {
this.target.setTransactionIsolation(this.transactionIsolation.intValue());
  }
  if (this.autoCommit != null && this.autoCommit.booleanValue() != this.target.getAutoCommit()) {
   this.target.setAutoCommit(this.autoCommit.booleanValue());
  }
  }
  else {
  // Target Connection already held -> return it.
  if (logger.isDebugEnabled()) {
   logger.debug("Using existing database connection for operation '" + operation.getName() + "'");
  }
  }
  return this.target;
 }
 }
}

import javax.sql.DataSource;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
 * Created by IDEA
 * User: mashaohua
 * Date: 2016-07-19 16:04
 * Desc:
 */
public class DynamicRoutingDataSourceProxy extends AbstractDynamicDataSourceProxy {
 private AtomicLong counter = new AtomicLong(0);
 private static final Long MAX_POOL = Long.MAX_VALUE;
 private final Lock lock = new ReentrantLock();
 @Override
 protected DataSource loadReadDataSource() {
 int index = 1;
 if(getReadDataSourcePollPattern() == 1) {
  // Polling mode 
  long currValue = counter.incrementAndGet();
  if((currValue + 1) >= MAX_POOL) {
  try {
   lock.lock();
   if((currValue + 1) >= MAX_POOL) {
   counter.set(0);
   }
  } finally {
   lock.unlock();
  }
  }
  index = (int) (currValue % getReadDsSize());
 } else {
  // Random way 
  index = ThreadLocalRandom.current().nextInt(0, getReadDsSize());
 }
 return getResolvedReadDataSources().get(index);
 }
}

// Defines an enumeration type, read and write 
public enum DynamicDataSourceGlobal {
 READ, WRITE;
}
0

// Defines an enumeration type, read and write 
public enum DynamicDataSourceGlobal {
 READ, WRITE;
}
1


// Defines an enumeration type, read and write 
public enum DynamicDataSourceGlobal {
 READ, WRITE;
}
2

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:tx="http://www.springframework.org/schema/tx"
xmlns:aop="http://www.springframework.org/schema/aop"
xsi:schemaLocation="http://www.springframework.org/schema/beans  http://www.springframework.org/schema/beans/spring-beans-4.1.xsd
http://www.springframework.org/schema/tx
http://www.springframework.org/schema/tx/spring-tx-4.1.xsd
http://www.springframework.org/schema/aop  http://www.springframework.org/schema/aop/spring-aop-4.1.xsd">
 <bean id="abstractDataSource" abstract="true" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
  <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
  <!--  Configure to get the time for the connection wait timeout  -->
  <property name="maxWait" value="60000"/>
  <!--  How long is the configuration interval 1 A secondary detection that detects idle connections that need to be closed in milliseconds  -->
  <property name="timeBetweenEvictionRunsMillis" value="60000"/>
  <!--  configuration 1 The minimum time a connection can live in a pool, in milliseconds  -->
  <property name="minEvictableIdleTimeMillis" value="300000"/>
  <property name="validationQuery" value="SELECT 'x'"/>
  <property name="testWhileIdle" value="true"/>
  <property name="testOnBorrow" value="false"/>
  <property name="testOnReturn" value="false"/>
  <!--  Open the PSCache , and specify each connection PSCache The size of the  -->
  <property name="poolPreparedStatements" value="true"/>
  <property name="maxPoolPreparedStatementPerConnectionSize" value="20"/>
  <property name="filters" value="config"/>
  <property name="connectionProperties" value="config.decrypt=true" />
 </bean>
 <bean id="dataSourceRead1" parent="abstractDataSource">
  <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
  <!--  Basic attributes  url , user , password -->
  <property name="url" value="${read1.jdbc.url}"/>
  <property name="username" value="${read1.jdbc.user}"/>
  <property name="password" value="${read1.jdbc.password}"/>
  <!--  Configure the initial size, minimum, and maximum  -->
  <property name="initialSize" value="${read1.jdbc.initPoolSize}"/>
  <property name="minIdle" value="${read1.jdbc.minPoolSize}"/>
  <property name="maxActive" value="${read1.jdbc.maxPoolSize}"/>
 </bean>
 <bean id="dataSourceRead2" parent="abstractDataSource">
  <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
  <!--  Basic attributes  url , user , password -->
  <property name="url" value="${read2.jdbc.url}"/>
  <property name="username" value="${read2.jdbc.user}"/>
  <property name="password" value="${read2.jdbc.password}"/>
  <!--  Configure the initial size, minimum, and maximum  -->
  <property name="initialSize" value="${read2.jdbc.initPoolSize}"/>
  <property name="minIdle" value="${read2.jdbc.minPoolSize}"/>
  <property name="maxActive" value="${read2.jdbc.maxPoolSize}"/>
 </bean>
 <bean id="dataSourceWrite" parent="abstractDataSource">
  <property name="driverClassName" value="com.microsoft.sqlserver.jdbc.SQLServerDriver"/>
  <!--  Basic attributes  url , user , password -->
  <property name="url" value="${write.jdbc.url}"/>
  <property name="username" value="${write.jdbc.user}"/>
  <property name="password" value="${write.jdbc.password}"/>
  <!--  Configure the initial size, minimum, and maximum  -->
  <property name="initialSize" value="${write.jdbc.initPoolSize}"/>
  <property name="minIdle" value="${write.jdbc.minPoolSize}"/>
  <property name="maxActive" value="${write.jdbc.maxPoolSize}"/>
 </bean>
 <bean id="dataSource" class="com.test.api.dao.datasource.DynamicRoutingDataSourceProxy">
  <property name="writeDataSource" ref="dataSourceWrite" />
  <property name="readDataSources">
   <list>
    <ref bean="dataSourceRead1" />
    <ref bean="dataSourceRead2" />
   </list>
  </property>
  <!-- Polling mode -->
  <property name="readDataSourcePollPattern" value="1" />
 </bean>
 <tx:annotation-driven transaction-manager="transactionManager"/>
 <bean id="transactionManager" class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
  <property name="dataSource" ref="dataSource"/>
 </bean>
 <!--  for myBatis The configuration of the item  -->
 <!--  configuration sqlSessionFactory -->
 <bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
  <!--  instantiation sqlSessionFactory You need to use the above configured data source and SQL The mapping file  -->
  <property name="dataSource" ref="dataSource"/>
  <property name="mapperLocations" value="classpath:mapper/*.xml"/>
  <property name="configLocation" value="classpath:mybatis-plugin-config.xml" />
 </bean>
 <bean id="sqlSessionTemplate" class="org.mybatis.spring.SqlSessionTemplate">
  <constructor-arg ref="sqlSessionFactory" />
 </bean>
 <!--  By scanning mode, scan the directory for all mapper .   According to corresponding mapper.xml Generate a proxy class for it -->
 <bean id="mapper" class="org.mybatis.spring.mapper.MapperScannerConfigurer">
  <property name="basePackage" value="com.test.api.dao.inte" />
  <property name="sqlSessionTemplate" ref="sqlSessionTemplate"></property>
 </bean>
</beans>

Plan 4

If your background structure is spring+mybatis, you can achieve very friendly read/write separation with spring's AbstractRoutingDataSource and mybatis Plugin interceptors without any changes to the original code. The fourth option is recommended


// Defines an enumeration type, read and write 
public enum DynamicDataSourceGlobal {
 READ, WRITE;
}
4

// Defines an enumeration type, read and write 
public enum DynamicDataSourceGlobal {
 READ, WRITE;
}
5

// Defines an enumeration type, read and write 
public enum DynamicDataSourceGlobal {
 READ, WRITE;
}
6

// Defines an enumeration type, read and write 
public enum DynamicDataSourceGlobal {
 READ, WRITE;
}
7

import org.apache.ibatis.executor.Executor;
import org.apache.ibatis.executor.keygen.SelectKeyGenerator;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.mapping.SqlCommandType;
import org.apache.ibatis.plugin.*;
import org.apache.ibatis.session.ResultHandler;
import org.apache.ibatis.session.RowBounds;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
/**
 * Created by IDEA
 * User: mashaohua
 * Date: 2016-08-10 11:09
 * Desc:
 */
@Intercepts({
@Signature(type = Executor.class, method = "update", args = {
  MappedStatement.class, Object.class }),
@Signature(type = Executor.class, method = "query", args = {
  MappedStatement.class, Object.class, RowBounds.class,
  ResultHandler.class }) })
public class DynamicPlugin implements Interceptor {
 protected static final Logger logger = LoggerFactory.getLogger(DynamicPlugin.class);
 private static final String REGEX = ".*insert\\u0020.*|.*delete\\u0020.*|.*update\\u0020.*";
 private static final Map<String, DynamicDataSourceGlobal> cacheMap = new ConcurrentHashMap<>();
 @Override
 public Object intercept(Invocation invocation) throws Throwable {
  boolean synchronizationActive = TransactionSynchronizationManager.isSynchronizationActive();
  if(!synchronizationActive) {
   Object[] objects = invocation.getArgs();
   MappedStatement ms = (MappedStatement) objects[0];
   DynamicDataSourceGlobal dynamicDataSourceGlobal = null;
   if((dynamicDataSourceGlobal = cacheMap.get(ms.getId())) == null) {
    // The read method 
    if(ms.getSqlCommandType().equals(SqlCommandType.SELECT)) {
     //!selectKey  For the increase id Query the primary key (SELECT LAST_INSERT_ID() ) Method, using the main library 
     if(ms.getId().contains(SelectKeyGenerator.SELECT_KEY_SUFFIX)) {
      dynamicDataSourceGlobal = DynamicDataSourceGlobal.WRITE;
     } else {
      BoundSql boundSql = ms.getSqlSource().getBoundSql(objects[1]);
      String sql = boundSql.getSql().toLowerCase(Locale.CHINA).replaceAll("[\\t\\n\\r]", " ");
      if(sql.matches(REGEX)) {
       dynamicDataSourceGlobal = DynamicDataSourceGlobal.WRITE;
      } else {
       dynamicDataSourceGlobal = DynamicDataSourceGlobal.READ;
      }
     }
    }else{
     dynamicDataSourceGlobal = DynamicDataSourceGlobal.WRITE;
    }
    logger.warn(" Set the method [{}] use [{}] Strategy, SqlCommandType [{}]..", ms.getId(), dynamicDataSourceGlobal.name(), ms.getSqlCommandType().name());
    cacheMap.put(ms.getId(), dynamicDataSourceGlobal);
   }
   DynamicDataSourceHolder.putDataSource(dynamicDataSourceGlobal);
  }
  return invocation.proceed();
 }
 @Override
 public Object plugin(Object target) {
  if (target instanceof Executor) {
   return Plugin.wrap(target, this);
  } else {
   return target;
  }
 }
 @Override
 public void setProperties(Properties properties) {
  //
 }
}

Related articles: