提交 | 用户 | 时间
|
1ac2bc
|
1 |
package cn.stylefeng.guns.base.db.context; |
懒 |
2 |
|
|
3 |
import cn.stylefeng.guns.base.db.dao.DataBaseInfoDao; |
|
4 |
import cn.stylefeng.guns.base.db.factory.AtomikosFactory; |
|
5 |
import cn.stylefeng.roses.core.config.properties.DruidProperties; |
|
6 |
|
|
7 |
import javax.sql.DataSource; |
|
8 |
import java.util.Map; |
|
9 |
import java.util.concurrent.ConcurrentHashMap; |
|
10 |
|
|
11 |
/** |
|
12 |
* 数据源的上下文容器(单例) |
|
13 |
* |
|
14 |
* @author fengshuonan |
|
15 |
* @date 2019-06-12-13:37 |
|
16 |
*/ |
|
17 |
public class DataSourceContext { |
|
18 |
|
|
19 |
/** |
|
20 |
* 主数据源名称 |
|
21 |
*/ |
|
22 |
public static final String MASTER_DATASOURCE_NAME = "master"; |
|
23 |
|
|
24 |
/** |
|
25 |
* 数据源容器 |
|
26 |
*/ |
|
27 |
private static Map<String, DataSource> DATA_SOURCES = new ConcurrentHashMap<>(); |
|
28 |
|
|
29 |
/** |
|
30 |
* 数据源的配置容器 |
|
31 |
*/ |
|
32 |
private static Map<String, DruidProperties> DATA_SOURCES_CONF = new ConcurrentHashMap<>(); |
|
33 |
|
|
34 |
/** |
|
35 |
* 初始化所有dataSource |
|
36 |
* |
|
37 |
* @author fengshuonan |
|
38 |
* @Date 2019-06-12 13:48 |
|
39 |
*/ |
|
40 |
public static void initDataSource(DruidProperties masterDataSourceProperties, DataSource dataSourcePrimary) { |
|
41 |
|
|
42 |
//清空数据库中的主数据源信息 |
|
43 |
new DataBaseInfoDao(masterDataSourceProperties).deleteMasterDatabaseInfo(); |
|
44 |
|
|
45 |
//初始化主数据源信息 |
|
46 |
new DataBaseInfoDao(masterDataSourceProperties).createMasterDatabaseInfo(); |
|
47 |
|
|
48 |
//从数据库中获取所有的数据源信息 |
|
49 |
DataBaseInfoDao dataBaseInfoDao = new DataBaseInfoDao(masterDataSourceProperties); |
|
50 |
Map<String, DruidProperties> allDataBaseInfo = dataBaseInfoDao.getAllDataBaseInfo(); |
|
51 |
|
|
52 |
//赋给全局变量 |
|
53 |
DATA_SOURCES_CONF = allDataBaseInfo; |
|
54 |
|
|
55 |
//根据数据源信息初始化所有的DataSource |
|
56 |
for (Map.Entry<String, DruidProperties> entry : allDataBaseInfo.entrySet()) { |
|
57 |
|
|
58 |
String dbName = entry.getKey(); |
|
59 |
DruidProperties druidProperties = entry.getValue(); |
|
60 |
|
|
61 |
//如果是主数据源,不用初始化第二遍,如果是其他数据源就通过property初始化 |
|
62 |
if (dbName.equalsIgnoreCase(MASTER_DATASOURCE_NAME)) { |
|
63 |
DATA_SOURCES_CONF.put(dbName, druidProperties); |
|
64 |
DATA_SOURCES.put(dbName, dataSourcePrimary); |
|
65 |
} else { |
|
66 |
DataSource dataSource = createDataSource(dbName, druidProperties); |
|
67 |
DATA_SOURCES.put(dbName, dataSource); |
|
68 |
} |
|
69 |
} |
|
70 |
} |
|
71 |
|
|
72 |
/** |
|
73 |
* 新增datasource |
|
74 |
* |
|
75 |
* @author fengshuonan |
|
76 |
* @Date 2019-06-12 14:51 |
|
77 |
*/ |
|
78 |
public static void addDataSource(String dbName, DataSource dataSource) { |
|
79 |
DATA_SOURCES.put(dbName, dataSource); |
|
80 |
} |
|
81 |
|
|
82 |
/** |
|
83 |
* 获取数据源 |
|
84 |
* |
|
85 |
* @author fengshuonan |
|
86 |
* @Date 2019-06-12 13:50 |
|
87 |
*/ |
|
88 |
public static Map<String, DataSource> getDataSources() { |
|
89 |
return DATA_SOURCES; |
|
90 |
} |
|
91 |
|
|
92 |
/** |
|
93 |
* 获取数据源的配置 |
|
94 |
* |
|
95 |
* @author fengshuonan |
|
96 |
* @Date 2019-06-18 19:26 |
|
97 |
*/ |
|
98 |
public static Map<String, DruidProperties> getDataSourcesConfs() { |
|
99 |
return DATA_SOURCES_CONF; |
|
100 |
} |
|
101 |
|
|
102 |
/** |
|
103 |
* 数据源创建模板 |
|
104 |
*/ |
|
105 |
public static DataSource createDataSource(String dataSourceName, DruidProperties druidProperties) { |
|
106 |
|
|
107 |
//添加到全局配置里 |
|
108 |
DATA_SOURCES_CONF.put(dataSourceName, druidProperties); |
|
109 |
|
|
110 |
return AtomikosFactory.create(dataSourceName, druidProperties); |
|
111 |
} |
|
112 |
|
|
113 |
} |