待编码

This commit is contained in:
chenlw 2016-11-19 17:40:47 +08:00
parent ac8e453aa1
commit b00d19ce1a
18 changed files with 505 additions and 123 deletions

View File

@ -47,8 +47,11 @@ jdbc.minEvictableIdleTimeMillis=300000
#==============================================================================================================
table-suffix=_20152016
extract-log-localtion=/home/web_manage/log/
extract-standard-log-localtion=/home/web_manage/log2/
#extract-log-localtion=/home/web_manage/log/
#extract-standard-log-localtion=/home/web_manage/log2/
extract-log-localtion=D:\\test\\log\\
extract-standard-log-localtion=D:\\test\\log2\\
gather-tablespace-name=TS_TTSSS
@ -84,31 +87,31 @@ oracle-psw=oracle
#=============================================================================================================
# 文件上传下载
#=============================================================================================================
#file_upload_path=D:\\test\\
#
#file_download_path=D:\\test\\export.xlsx
#
#package_download_path=D:\\test\\
#
#package_name=sql_script_standard
#
#sql_script_path_last=D:\\test\\sql_script_last\\
#
#sql_script_path_standard=D:\\test\\sql_script_standard\\
file_upload_path=D:\\test\\
file_download_path=D:\\test\\export.xlsx
package_download_path=D:\\test\\
package_name=sql_script_standard
sql_script_path_last=D:\\test\\sql_script_last\\
sql_script_path_standard=D:\\test\\sql_script_standard\\
#=============================================================================================================
# 文件上传下载
#=============================================================================================================
file_upload_path=/excel_import_dir/
file_download_path=/excel_export_dir/export.xlsx
package_download_path=/
package_name=DefaultDescription
sql_script_path_last=/DefaultDescription_last/
sql_script_path_standard=/DefaultDescription/
#file_upload_path=/excel_import_dir/
#
#file_download_path=/excel_export_dir/export.xlsx
#
#package_download_path=/
#
#package_name=DefaultDescription
#
#sql_script_path_last=/DefaultDescription_last/
#
#sql_script_path_standard=/DefaultDescription/

View File

@ -13,6 +13,12 @@
<typeAlias alias="RegionalismEntity" type="com.platform.entities.RegionalismEntity"/>
<typeAlias alias="oracleForm" type="com.platform.entities.oracleForm"/>
</typeAliases>
<plugins>
<plugin interceptor="com.platform.utils.page.PageInterceptor">
</plugin>
</plugins>
<mappers>
<mapper resource="com/dao/mapper/data-details-mapper.xml" />
<mapper resource="com/dao/mapper/config-details-mapper.xml"/>
@ -22,4 +28,6 @@
<mapper resource="com/dao/mapper/SystemCodeMapper.xml"/>
<mapper resource="com/dao/mapper/VolumeMapper.xml"/>
</mappers>
</configuration>

View File

@ -65,30 +65,30 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
</resultMap>
<sql id="conditionsFilters">
<if test="dataType!=null">
AND data_details.data_type=#{dataType}
AND data_details.data_type=#{dataType,jdbcType=VARCHAR}
</if>
<if test="mark!=null and mark != ''">
AND data_details.mark=#{mark}
AND data_details.mark=#{mark,jdbcType=VARCHAR}
</if>
<if test="submittedBatch!=null">
AND
data_details.submitted_batch=#{submittedBatch}
data_details.submitted_batch=#{submittedBatch,jdbcType=VARCHAR}
</if>
<if test="cityName!=null">
AND data_details.city_name=#{cityName}
AND data_details.city_name=#{cityName,jdbcType=VARCHAR}
</if>
<if test="districtName!=null">
AND
data_details.district_name=#{districtName}
data_details.district_name=#{districtName,jdbcType=VARCHAR}
</if>
<if test="dataVersion !=null">
AND data_details.data_version=#{dataVersion}
AND data_details.data_version=#{dataVersion,jdbcType=INTEGER}
</if>
<if test="systemName !=null">
AND data_details.system_name=#{systemName}
AND data_details.system_name=#{systemName,jdbcType=VARCHAR}
</if>
<if test="dataYear !=null">
AND data_details.data_year=#{dataYear}
AND data_details.data_year=#{dataYear,jdbcType=VARCHAR}
</if>
<if test="array !=null and array.length > 0">
<foreach collection="array" item="item" index="index">
@ -120,6 +120,17 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
LIMIT #{limit}
</if>
</select>
<!-- 获取数据符合筛选条件的全部记录信息 -->
<select id="getLimitedDataInfoByPage" parameterType="com.platform.entities.PagerOptions"
resultMap="getEntityByText">
SELECT
id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path,data_base_type,standard_extract_status,checkout_flag,mark
FROM data_details
<where>
<include refid="conditionsFilters" />
</where>
ORDER BY data_details.id
</select>
<!-- 获取数据符合筛选条件的全部记录信息 -->
<select id="findAll" resultMap="getEntityByText">

View File

@ -106,8 +106,9 @@ public class DataModelController extends BaseController {
.append("}").toString());
PagerOptions pagerOptions = (PagerOptions) UtilsHelper
.newObjAndSetAttrsByClass(PagerOptions.class, params);
pagerOptions.setCurrentPageNum(Integer.valueOf(params.get("currentPageNum")));
//冷热区查询字段mark
pagerOptions.setMark(pagerOptions.getVolumeType());
pagerOptions.setMark(pagerOptions.getVolumeType().trim());
return dfs.getPagerTableData(pagerOptions);
}
@ -327,9 +328,9 @@ public class DataModelController extends BaseController {
HttpServletResponse req, @RequestBody DataInfoEntity move)
throws Exception {
log.debug("---------/task/transfer/save-----------------------");
int result = dfs.save(move);
// int result = dfs.save(move);
req.setStatus(200);
return result;
return 1;
}
@RequestMapping(value = "/code/list", method = RequestMethod.POST)

View File

@ -23,6 +23,7 @@ import com.platform.entities.oracleForm;
import com.platform.service.ILogRead;
import com.platform.service.IOracleExtractService;
import com.platform.service.thread.ThreadExtractOracle;
import com.platform.service.thread.ThreadExtractStandardOracle;
import com.platform.service.thread.ThreadGainOracleConnect;
import com.platform.utils.Configs;
import com.platform.utils.Constant;
@ -43,10 +44,10 @@ public class OracleController extends BaseController {
@Resource(name = "gatherOracleDao")
private GatherOracleDao gatherOracleDao;
@RequestMapping(value = "/oracle/{id}/StandardExtract", method = RequestMethod.POST)
@RequestMapping(value = "/oracle/standardextract/{name}/extract", method = RequestMethod.POST)
public void oracleStandardExtract(HttpServletRequest res, HttpServletResponse req,
@RequestBody oracleForm form) throws Exception {
Configs.CONSOLE_LOGGER.info("/oracle//StandardExtract");
Configs.CONSOLE_LOGGER.info("/oracle/standardextract/{name}/extract");
// res.setCharacterEncoding("UTF-8");
Configs.CONSOLE_LOGGER.info(form.getInneed().get(0).getName());
boolean isConnect = false;
@ -91,7 +92,7 @@ public class OracleController extends BaseController {
req.setStatus(500);
// 开始抽取数据到汇总库
if (isConnect && null != form.getInneed() && form.getInneed().size() > 0) {
ThreadExtractOracle thExtra = new ThreadExtractOracle(form, OracleExtract);
ThreadExtractStandardOracle thExtra = new ThreadExtractStandardOracle(form, OracleExtract);
thExtra.start();
// OracleExtract.extractOracle(form.getName(), form.getInneed(),
// form.getTarget());
@ -105,11 +106,11 @@ public class OracleController extends BaseController {
* @return
* @throws Exception
*/
@RequestMapping(value = "/StandardExtract/log", method = RequestMethod.POST)
@RequestMapping(value = "/standardextract/log", method = RequestMethod.POST)
@ResponseBody
public Object getStandardExtractLog(@RequestParam("rcName") String name,
HttpServletRequest res, HttpServletResponse req) throws Exception {
Configs.CONSOLE_LOGGER.info("/oracle/StandardExtract/log");
Configs.CONSOLE_LOGGER.info("/StandardExtract/log");
String result = logReadService.readStandardLog(name);
// StringBuilder sb = new StringBuilder();
// sb.append("查看相应日志").append("\n").append("看到了");

View File

@ -17,6 +17,8 @@ public interface DataInfoDao {
int getLimitedBeginId(PagerOptions pagerOptions);
List<DataInfoEntity> getLimitedDataInfoEntities(PagerOptions pagerOptions);
List<DataInfoEntity> getLimitedDataInfoByPage(PagerOptions pagerOptions);
List<String> getIdIsExist(List<Integer> list)throws Exception;

View File

@ -1,8 +1,16 @@
package com.platform.entities;
import com.platform.utils.page.Page;
public class PagerOptions {
private Integer currentPageNum = 1; //当前页码
public class PagerOptions extends Page{
// private Integer currentPageNum = 1; //当前页码
// private Integer totalLimit; //当前页前面已有多少条数据
private Integer priorTableSize; //前一次操作一页显示的数据条数
// private Integer limit; //一次查询返回记录条数
private String dataType; //数据类型
@ -17,15 +25,9 @@ public class PagerOptions {
private String systemName; //系统名称
private String dataYear; // 数据年度
private Integer limit; //一次查询返回记录条数
private Integer offset; // 查询偏移量起始id
private Integer totalLimit; //当前页前面已有多少条数据
private Integer priorTableSize; //前一次操作一页显示的数据条数
private String keyQuery;
//模糊查询字段
private String[] array;
@ -33,14 +35,6 @@ public class PagerOptions {
private String volumeType;
//冷热区字段
private String mark;
public Integer getCurrentPageNum() {
return currentPageNum;
}
public void setCurrentPageNum(Integer currentPageNum) {
this.currentPageNum = currentPageNum;
}
public String getDataType() {
return dataType;
@ -98,14 +92,6 @@ public class PagerOptions {
this.dataYear = dataYear;
}
public Integer getLimit() {
return limit;
}
public void setLimit(Integer limit) {
this.limit = limit;
}
public Integer getOffset() {
return offset;
}
@ -114,14 +100,6 @@ public class PagerOptions {
this.offset = offset;
}
public Integer getTotalLimit() {
return totalLimit;
}
public void setTotalLimit(Integer totalLimit) {
this.totalLimit = totalLimit;
}
public Integer getPriorTableSize() {
return priorTableSize;
}

View File

@ -1,6 +1,6 @@
package com.platform.entities;
public class SqlFileInfoEntity{
public class SqlFileInfoEntity implements Comparable<SqlFileInfoEntity> {
private int id;
@ -375,4 +375,14 @@ public class SqlFileInfoEntity{
this.ckIndicateStatusStandardModified = ckIndicateStatusStandardModified;
}
/** 按checkResult的状态排序 */
@Override
public int compareTo(SqlFileInfoEntity arg0) {
if (this.getSysStatus() < arg0.getSysStatus())
return 1;
else
return -1;
}
}

View File

@ -54,7 +54,9 @@ public class OracleConnector {
public synchronized static ResultSet getSQLExecResultSet(Connection conn, String sql, String filePath) {
ResultSet resultSet = null;
filePath = filePath.replace(".log", "");
if (null != filePath) {
filePath = filePath.replace(".log", "");
}
try {
Statement statement = conn
.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,
@ -89,7 +91,9 @@ public class OracleConnector {
* @return true:执行的不返回集合数据的sql成功 是否执行成功
*/
public synchronized static boolean execOracleSQL(Connection conn, String sql, String filePath) {
filePath = filePath.replace(".log", "");
if (null != filePath) {
filePath = filePath.replace(".log", "");
}
boolean flag = false;
try {
Statement statement = conn.createStatement();
@ -100,7 +104,7 @@ public class OracleConnector {
} catch (SQLException e) {
flag = false;
FileOperateHelper
.fileWrite(filePath, sql+ "\r\n"+e.getMessage()+"\r\n");
.fileWrite(filePath+".log", sql+ "\r\n"+e.getMessage()+"\r\n");
new CustomException(Custom4exception.OracleSQL_Except, e);
}
return flag;
@ -112,7 +116,9 @@ public class OracleConnector {
* @return true:执行结果大于1即有数据 是否执行成功
*/
public synchronized static boolean execUpdateOracleSQL(Connection conn, String sql, String filePath) {
filePath = filePath.replace(".log", "");
if (null != filePath) {
filePath = filePath.replace(".log", "");
}
boolean flag = false;
try {
Statement statement = conn.createStatement();
@ -125,7 +131,7 @@ public class OracleConnector {
} catch (SQLException e) {
flag = false;
FileOperateHelper
.fileWrite(filePath, sql+ "\r\n"+e.getMessage()+"\r\n");
.fileWrite(filePath+".log", sql+ "\r\n"+e.getMessage()+"\r\n");
new CustomException(Custom4exception.OracleSQL_Except, e, sql);
}
return flag;

View File

@ -268,11 +268,11 @@ public class OracleExtractHelper {
public void extractStandardPayTable(Connection conn, OracleConnectorParams oc, GatherOracleInfo totalOracle) {
String strTUser = Configs.GATHER_STANDARD_USER_NAME;
createPay(conn, oc);
String sql = "insert into " + strTUser + "." + Configs.GATHER_STANDARD_PAY_TABLE_NAME + "select * from "
+ strTUser + "." + Configs.GATHER_STANDARD_PAY_TABLE_NAME +"'@LINKTO"
String sql = "insert into " + strTUser + "." + Configs.GATHER_STANDARD_PAY_TABLE_NAME + " select * from "
+ strTUser + "." + Configs.GATHER_STANDARD_PAY_TABLE_NAME +"@LINKTO"
+ oc.getName();
String resultSql = sql.replace(";", "");
OracleConnector.execUpdateOracleSQL(conn, resultSql, Configs.EXTRACT_STANDARD_LOG_LOCALTION + oc.getName());
OracleConnector.execOracleSQL(conn, resultSql, Configs.EXTRACT_STANDARD_LOG_LOCALTION + oc.getName());
}
/** 执行抽取操作--执行表
@ -285,11 +285,11 @@ public class OracleExtractHelper {
createExec(conn, collectOracle);
String strTUser = Configs.GATHER_STANDARD_USER_NAME;
String sql = "insert into " + strTUser + "." + Configs.GATHER_STANDARD_EXEC_TABLE_NAME + "select * from "
+ strTUser + "." + Configs.GATHER_STANDARD_EXEC_TABLE_NAME +"'@LINKTO"
String sql = "insert into " + strTUser + "." + Configs.GATHER_STANDARD_EXEC_TABLE_NAME + " select * from "
+ strTUser + "." + Configs.GATHER_STANDARD_EXEC_TABLE_NAME +"@LINKTO"
+ collectOracle.getName();
String resultSql = sql.replace(";", "");
OracleConnector.execUpdateOracleSQL(conn, resultSql, Configs.EXTRACT_STANDARD_LOG_LOCALTION + collectOracle.getName());
OracleConnector.execOracleSQL(conn, resultSql, Configs.EXTRACT_STANDARD_LOG_LOCALTION + collectOracle.getName());
}
@ -301,7 +301,11 @@ public class OracleExtractHelper {
+ "SKRZHZH Varchar(255),FKZHCODE Varchar(255),FKZHNAME Varchar(255),FKYHCODE Varchar(255),FKYHNAME Varchar(255),QSZHCODE Varchar(255),"
+ "QSZHNAME Varchar(255),QSYHCODE Varchar(255),QSYHNAME Varchar(255),JE Numeric(18,2), SFTK Varchar(255),NIAN Varchar(255),ZY Varchar(255))";
OracleConnector.execOracleSQL(conn, payCmd, Configs.EXTRACT_STANDARD_LOG_LOCALTION + oc.getName());
try {
OracleConnector.execOracleSQL(conn, payCmd, Configs.EXTRACT_STANDARD_LOG_LOCALTION + oc.getName());
} catch (Exception e) {
// TODO: handle exception
}
}
private void createExec(Connection conn, OracleConnectorParams oc) {
@ -312,6 +316,10 @@ public class OracleExtractHelper {
+ "JJBZ Varchar(255),CGBZ Varchar(255),ZFFSCODE Varchar(255),ZFFSNAME Varchar(255),JZZFBZ Varchar(255),ZBJE Numeric(18,2),ZBTJJE Numeric(18,2),ZBDJJE Numeric(18,2),"
+ "ZBKYJE Numeric(18,2),ZYZFBZ Varchar(255),BZ Varchar(255))";
OracleConnector.execOracleSQL(conn, execCmd, Configs.EXTRACT_STANDARD_LOG_LOCALTION + oc.getName());
try {
OracleConnector.execOracleSQL(conn, execCmd, Configs.EXTRACT_STANDARD_LOG_LOCALTION + oc.getName());
} catch (Exception e) {
// TODO: handle exception
}
}
}

View File

@ -428,7 +428,7 @@ public class CheckoutServiceImpl implements ICheckoutService {
ck.setCheckResult("");
if ("y".equals(pay) || "Y".equals(pay)) {
String payFilePath = FileOperateHelper.addLastSeparator(ck.getPath())+Constant.standard_pay
+ ck.getAreaCode().toLowerCase()+ck.getSysCode()+".sql";
+ ck.getAreaCode().toLowerCase()+"_"+ck.getSysCode()+".sql";
File f = new File(payFilePath);
if(f.exists()){
ck.setPayResultLast(Constant.CHECKOUT_STATUS_THREE);
@ -444,7 +444,7 @@ public class CheckoutServiceImpl implements ICheckoutService {
}
if ("y".equals(exec) || "Y".equals(exec)) {
String execFilePath = FileOperateHelper.addLastSeparator(ck.getPath())+Constant.standard_indicate
+ ck.getAreaCode().toLowerCase()+ck.getSysCode()+".sql";
+ ck.getAreaCode().toLowerCase()+"_"+ck.getSysCode()+".sql";
File f = new File(execFilePath);
if (f.exists()) {
ck.setExecResultLast(Constant.CHECKOUT_STATUS_THREE);
@ -540,7 +540,7 @@ public class CheckoutServiceImpl implements ICheckoutService {
}
if(Constant.CHECKOUT_STATUS_THREE.equals(checkoutEntity.getExecResultLast())){
isTract = true;
checkoutEntity.setPayResultLast(Constant.CHECKOUT_STATUS_FIVE);
checkoutEntity.setExecResultLast(Constant.CHECKOUT_STATUS_FIVE);
}
if (isTract) {
Extractlist.add(checkoutEntity);

View File

@ -77,28 +77,31 @@ public class DataInfoServiceImp implements DataInfoService {
if (arrays.length > 0) {
pagerOptions.setArray(arrays);
}
int count = dfdDao.getLimitedDataCount(pagerOptions); //获取总记录条数
log.info("total colume " + count);
int offset = 0;
if (pagerOptions.getCurrentPageNum() > 1) {
pagerOptions.setTotalLimit((pagerOptions.getCurrentPageNum() - 1)
* pagerOptions.getPriorTableSize());
offset = dfdDao.getLimitedBeginId(pagerOptions); //获取起始查询id
log.info(offset);
}
pagerOptions.setOffset(offset + 1);
if (null !=pagerOptions.getSubmittedBatch() && !"".equals(pagerOptions.getSubmittedBatch()) ) {
pagerOptions.setSubmittedBatch("批次"+pagerOptions.getSubmittedBatch());
}
List<DataInfoEntity> result = dfdDao
.getLimitedDataInfoEntities(pagerOptions);
if (null != result) {
for (DataInfoEntity dataInfoEntity : result) {
dataInfoEntity.setVolumeType(dataInfoEntity.getMark());
}
}
List<DataInfoEntity> result = dfdDao.getLimitedDataInfoByPage(pagerOptions);
int count = result.size();
// int count = dfdDao.getLimitedDataCount(pagerOptions); //获取总记录条数
// log.info("total colume " + count);
// int offset = 0;
// if (pagerOptions.getCurrentPageNum() > 1) {
// pagerOptions.setTotalLimit((pagerOptions.getCurrentPageNum() - 1)
// * pagerOptions.getPriorTableSize());
// offset = dfdDao.getLimitedBeginId(pagerOptions); //获取起始查询id
// log.info(offset);
// }
// pagerOptions.setOffset(offset + 1);
// List<DataInfoEntity> result = dfdDao
// .getLimitedDataInfoEntities(pagerOptions);
// if (null != result) {
// for (DataInfoEntity dataInfoEntity : result) {
// dataInfoEntity.setVolumeType(dataInfoEntity.getMark());
// }
// }
modelMap.addAttribute("page", pagerOptions);
modelMap.addAttribute("data", result);
modelMap.addAttribute("length", count);
modelMap.addAttribute("length", pagerOptions.getTotleSize());
} catch (Exception e) {
new CustomException();
}

View File

@ -146,7 +146,7 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
dataInfoDao.update(data);
String replicasName = collectOracle.getName();
collectOracle.setName("CQ" + collectOracle.getName().replace("-", "_"));
String cmd = "kubectl label --overwrite rc "
String cmd = "kubectl annotate --overwrite rc "
+ replicasName + " standardExtractStatus=1";
List<String> rList = Constant.ganymedSSH
.execCmdWaitAcquiescent(cmd);
@ -164,14 +164,16 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
|| Constant.CHECKOUT_STATUS_FIVE.equals(tmpdata.getPayResultLast())
|| Constant.CHECKOUT_STATUS_SIX.equals(tmpdata.getPayResultLast())) {
oracleExtract.extractStandardPayTable(conn, collectOracle, oracleModel);//执行抽取
data.setPayResultLast(Constant.CHECKOUT_STATUS_SEVEN);
}
if (Constant.CHECKOUT_STATUS_THREE.equals(tmpdata.getExecResultLast())
|| Constant.CHECKOUT_STATUS_FIVE.equals(tmpdata.getExecResultLast())
|| Constant.CHECKOUT_STATUS_SIX.equals(tmpdata.getExecResultLast())) {
oracleExtract.extractStandardExecTable(conn, collectOracle, oracleModel);//执行抽取
data.setExecResultLast(Constant.CHECKOUT_STATUS_SEVEN);
}
// client.updateOrAddReplicasLabelById(collectOracle.getName(), "isExtract", "2"); //更新oracle汇总状态0标示为未汇总1标示汇总中2标示汇总完成
cmd = "kubectl label --overwrite rc "
cmd = "kubectl annotate --overwrite rc "
+ replicasName + " standardExtractStatus=2";
rList = Constant.ganymedSSH.execCmdWaitAcquiescent(cmd);
sb = new StringBuffer();

View File

@ -362,9 +362,24 @@ public class ScriptMakeService implements IScriptMakeService {
myfile.setSysStatus(getTotalStatus(myfile));
fileEntitys.add(myfile);
}
Compare4SqlFilesEntity com = new Compare4SqlFilesEntity();
Collections.sort(fileEntitys, com);
return fileEntitys;
// Compare4SqlFilesEntity com = new Compare4SqlFilesEntity();
// System.setProperty("java.util.Arrays.useLegacyMergeSort", "true");
// Collections.sort(fileEntitys, com);
// Collections.sort(fileEntitys);
List<ArrayList<SqlFileInfoEntity>> tmpList = new ArrayList<ArrayList<SqlFileInfoEntity>>();
for (int i = 0; i < 4; i++) {
tmpList.add(new ArrayList<SqlFileInfoEntity>());
}
for (SqlFileInfoEntity sqlFileInfoEntity : fileEntitys) {
tmpList.get(sqlFileInfoEntity.getSysStatus()).add(sqlFileInfoEntity);
}
ArrayList<SqlFileInfoEntity> result = new ArrayList<SqlFileInfoEntity>();
for (int i = (tmpList.size()-1); i >= 0; i--) {
result.addAll(tmpList.get(i));
}
return result;
}
/**

View File

@ -53,6 +53,17 @@ public class ThreadCheckoutStandardOracle extends Thread {
}
String[] taskNamekeys = taskNames.toArray(new String[lengs]);
for (String key : taskNamekeys) {
String cmd3 = "kubectl annotate --overwrite rc " + key
+ " checkoutFlag=0";
List<String> rList3 = Constant.ganymedSSH
.execCmdWaitAcquiescent(cmd3);
StringBuffer sb3 = new StringBuffer();
for (String str : rList3)
sb3.append(str).append("\n");
Configs.CONSOLE_LOGGER.info("更新replicationController标签 "
+ key + "\t[标签更新为: 失败]");
Configs.CONSOLE_LOGGER.info(sb3.toString());
// 获得 kuber的 pod
Pod tmpPod = filterPod(key);
if (null == tmpPod) {
@ -71,6 +82,29 @@ public class ThreadCheckoutStandardOracle extends Thread {
sb.append(str).append("\n");
Configs.CONSOLE_LOGGER.info("更新replicationController标签 "
+ key + "\t[标签更新为: 失败]");
Configs.CONSOLE_LOGGER.info(sb.toString());
String cmd2 = "kubectl annotate --overwrite rc " + key
+ " checkoutFlag=0";
List<String> rList2 = Constant.ganymedSSH
.execCmdWaitAcquiescent(cmd2);
StringBuffer sb2 = new StringBuffer();
for (String str : rList2)
sb2.append(str).append("\n");
Configs.CONSOLE_LOGGER.info("更新replicationController标签 "
+ key + "\t[标签更新为: 失败]");
Configs.CONSOLE_LOGGER.info(sb2.toString());
//更新数据库CheckoutEntity tmp = CacheOracleCheckoutEntity.getCheck(key);
CheckoutEntity tmp = CacheOracleCheckoutEntity.getCheck(key);
tmp.setCheckoutFlag(Constant.CHECKOUTFLAG_ZERO);
tmp.setPayResultLast(Constant.CHECKOUT_STATUS_ZERO);
tmp.setExecResultLast(Constant.CHECKOUT_STATUS_ZERO);
try {
this.updateDataInfo(tmp);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
try {
@ -162,7 +196,7 @@ public class ThreadCheckoutStandardOracle extends Thread {
try {
Connection conn = OracleConnector.ConnectionBuilder(url, Configs.ORACLE_USER, Configs.ORACLE_PSW, oc);
//支付--校验
if (Constant.CHECKOUT_STATUS_TWO.equals(tmp.getPayResultLast())) {
if (!Constant.CHECKOUT_STATUS_ONE.equals(tmp.getPayResultLast())) {
String paySql = "select * from dba_tables where owner = '"+Configs.COLLECT_STANDARD_TABLE_USER.toUpperCase()
+ "' and table_name = '"+Configs.COLLECT_PAY_TABLE.toUpperCase() +"'";
if(OracleConnector.execUpdateOracleSQL(conn, paySql,
@ -174,7 +208,7 @@ public class ThreadCheckoutStandardOracle extends Thread {
}
}
//可执行-- 校验
if (Constant.CHECKOUT_STATUS_TWO.equals(tmp.getExecResultLast())) {
if (!Constant.CHECKOUT_STATUS_ONE.equals(tmp.getExecResultLast())) {
String execSql = "select * from dba_tables where owner = '"+Configs.COLLECT_STANDARD_TABLE_USER.toUpperCase()
+ "' and table_name = '"+Configs.COLLECT_EXEC_TABLE.toUpperCase() +"'";
if(OracleConnector.execUpdateOracleSQL(conn, execSql,
@ -195,7 +229,7 @@ public class ThreadCheckoutStandardOracle extends Thread {
// client.updateOrAddReplicasLabelById(taskNSyame,
// "status", "2");
rList = Constant.ganymedSSH
.execCmdWaitAcquiescent(cmd);
.execCmdWaitAcquiescent(cmd2);
sb = new StringBuffer();
for (String str : rList)
sb.append(str).append("\n");

View File

@ -54,9 +54,10 @@ public class ThreadExtractStandardSqlServer extends Thread{
//校验结果成功或失 0未校验1不需校验2正在校验3校验成功4校验失败5待抽取6正在抽取7抽取完成
//TODO 开始抽取
//如果校验成功的 进行 抽取----支付标准表
if (Constant.CHECKOUT_STATUS_FIVE.equals(element.getPayResultLast())) {
if (Constant.CHECKOUT_STATUS_FIVE.equals(element.getPayResultLast())
|| Constant.CHECKOUT_STATUS_SIX.equals(element.getPayResultLast())) {
String payFilePath = FileOperateHelper.addLastSeparator(element.getPath())+Constant.standard_pay
+ element.getAreaCode().toLowerCase() + element.getSysCode()+".sql";
+ element.getAreaCode().toLowerCase()+"_" + element.getSysCode()+".sql";
DataInfoEntity data = new DataInfoEntity();
data.setId(element.getDataId());
data.setPayResultLast(Constant.CHECKOUT_STATUS_SIX);
@ -66,8 +67,8 @@ public class ThreadExtractStandardSqlServer extends Thread{
// TODO Auto-generated catch block
e.printStackTrace();
}
File paySql = new File(payFilePath);
try {
File paySql = new File(payFilePath);
//获取连接
Connection conn = OracleConnector.ConnectionBuilder("jdbc:oracle:thin:@" + oracleConnect.getIp() + ":" + oracleConnect.getPort() + ":"
+ oracleConnect.getDatabaseName(), oracleConnect.getUser(), oracleConnect.getPassword(), null);
@ -77,6 +78,7 @@ public class ThreadExtractStandardSqlServer extends Thread{
// 创建表空间 创建 抽取标准表的 用户并授权
oracleExtract.createTableSpace(conn, collectOracle , oracleConnect); //
oracleExtract.createOnlyUser(conn, collectOracle, oracleConnect);//
createPay(conn, collectOracle);
//读取文件流
FileInputStream fis = new FileInputStream(paySql);
@ -113,9 +115,10 @@ public class ThreadExtractStandardSqlServer extends Thread{
}
}
//如果校验成功的 进行 抽取----执行标准表
if (Constant.CHECKOUT_STATUS_FIVE.equals(element.getExecResultLast())){
if (Constant.CHECKOUT_STATUS_FIVE.equals(element.getExecResultLast())
|| Constant.CHECKOUT_STATUS_SIX.equals(element.getExecResultLast())){
String execFilePath = FileOperateHelper.addLastSeparator(element.getPath())+Constant.standard_indicate
+ element.getAreaCode().toLowerCase() + element.getSysCode()+".sql";
+ element.getAreaCode().toLowerCase() +"_" + element.getSysCode()+".sql";
DataInfoEntity data = new DataInfoEntity();
data.setId(element.getDataId());
data.setExecResultLast(Constant.CHECKOUT_STATUS_SIX);
@ -125,13 +128,21 @@ public class ThreadExtractStandardSqlServer extends Thread{
// TODO Auto-generated catch block
e.printStackTrace();
}
File execSql = new File(execFilePath);
try {
File execSql = new File(execFilePath);
Connection conn = OracleConnector.ConnectionBuilder("jdbc:oracle:thin:@" + oracleConnect.getIp() + ":" + oracleConnect.getPort() + ":"
+ oracleConnect.getDatabaseName(), oracleConnect.getUser(), oracleConnect.getPassword(), null);
//设置 日志 文件名
OracleConnectorParams collectOracle = new OracleConnectorParams();
collectOracle.setName("CQ"+ element.getAreaCode().toLowerCase()+"_"+element.getSysCode()+"_"+element.getDataVersion());
// 创建表空间 创建 抽取标准表的 用户并授权
oracleExtract.createTableSpace(conn, collectOracle , oracleConnect); //
oracleExtract.createOnlyUser(conn, collectOracle, oracleConnect);//
createExec(conn, collectOracle);
FileInputStream fis = new FileInputStream(execSql);
BufferedReader br = new BufferedReader(new InputStreamReader(fis,"GBK"));
String sql = br.readLine();
Connection conn = OracleConnector.ConnectionBuilder("jdbc:oracle:thin:@" + oracleConnect.getIp() + ":" + oracleConnect.getPort() + ":"
+ oracleConnect.getDatabaseName(), oracleConnect.getUser(), oracleConnect.getPassword(), null);
while (sql != null) {
//TODO 执行sql
//TODO 执行sql
@ -167,6 +178,26 @@ public class ThreadExtractStandardSqlServer extends Thread{
}
}
private void createPay(Connection conn, OracleConnectorParams oc) {
String payCmd = "CREATE TABLE u_bzbjy.zfxxb(XZQHDM Varchar(255),XZQHMC Varchar(255),PZBH Varchar(255),LYZBKZH Varchar(255),"
+ "ZFDATE Varchar(255),YSDWCODE Varchar(255),YSDWNAME Varchar(255),YWGKCS Varchar(255),XMCODE Varchar(255),XMNAME Varchar(255),"
+"XMLBCODE Varchar(255),XMLBNAME Varchar(255),ZB_NO Varchar(255),GNFLCODE Varchar(255),GNFLNAME Varchar(255),JJFLCODE Varchar(255),"
+"JJFLNAME Varchar(255),ZJXZCODE Varchar(255),ZJXZNAME Varchar(255),JSBFFSNAME Varchar(255),SKR Varchar(255),SKRYH Varchar(255),"
+ "SKRZHZH Varchar(255),FKZHCODE Varchar(255),FKZHNAME Varchar(255),FKYHCODE Varchar(255),FKYHNAME Varchar(255),QSZHCODE Varchar(255),"
+ "QSZHNAME Varchar(255),QSYHCODE Varchar(255),QSYHNAME Varchar(255),JE Numeric(18,2), SFTK Varchar(255),NIAN Varchar(255),ZY Varchar(255))";
OracleConnector.execOracleSQL(conn, payCmd, Configs.EXTRACT_STANDARD_LOG_LOCALTION + oc.getName());
}
private void createExec(Connection conn, OracleConnectorParams oc) {
String execCmd = "CREATE TABLE u_bzbjy.kzxzb(XZQHDM Varchar(255),XZQHMC Varchar(255),YSND Varchar(255),ZBCODE Varchar(255),ZB_ID Varchar(255),ZB_NO Varchar(255),"
+ "ZBDJLXCODE Varchar(255),ZBDJLXNAME Varchar(255),ZBLXNAME Varchar(255),DOCNO Varchar(255),ZBSM Varchar(255),ZBFWDATE Varchar(255),ZBYSLXCODE Varchar(255),"
+ "ZBYSLXNAME Varchar(255),ZBYSLYNAME Varchar(255),YSDWCODE Varchar(255),YSDWNAME Varchar(255),GNFLCODE Varchar(255),GNFLNAME Varchar(255),JJFLCODE Varchar(255),"
+ "JJFLNAME Varchar(255),ZBGLCSNAME Varchar(255),SZGLCODE Varchar(255),SZGLNAME Varchar(255),XMCODE Varchar(255),XMNAME Varchar(255),GZBZ Varchar(255),"
+ "JJBZ Varchar(255),CGBZ Varchar(255),ZFFSCODE Varchar(255),ZFFSNAME Varchar(255),JZZFBZ Varchar(255),ZBJE Numeric(18,2),ZBTJJE Numeric(18,2),ZBDJJE Numeric(18,2),"
+ "ZBKYJE Numeric(18,2),ZYZFBZ Varchar(255),BZ Varchar(255))";
OracleConnector.execOracleSQL(conn, execCmd, Configs.EXTRACT_STANDARD_LOG_LOCALTION + oc.getName());
}
}

View File

@ -0,0 +1,75 @@
package com.platform.utils.page;
public class Page {
/** 当前是第几页 (当前页) */
// private int index = 0;
private int currentPageNum = 1;
/** 每页显示多少条 (当前页前面已有多少条数据:一次查询返回记录条数)*/
// private int pageSize = 10;
private int limit = 20;
/** 总共的数据量 */
// private int totle;
private int totleSize;
/** 共有多少页 */
private int totlePage;
/**
* @return the currentPageNum
*/
public int getCurrentPageNum() {
return currentPageNum;
}
/**
* @param currentPageNum the currentPageNum to set
*/
public void setCurrentPageNum(int currentPageNum) {
this.currentPageNum = currentPageNum;
}
/**
* @return the limit
*/
public int getLimit() {
return limit;
}
/**
* @param limit the limit to set
*/
public void setLimit(int limit) {
this.limit = limit;
}
/**
* @return the totleSize
*/
public int getTotleSize() {
return totleSize;
}
/**
* @param totleSize the totleSize to set
*/
public void setTotleSize(int totleSize) {
this.totleSize = totleSize;
}
/**
* @return the totlePage
*/
public int getTotlePage() {
return totlePage;
}
/**
* @param totlePage the totlePage to set
*/
public void setTotlePage(int totlePage) {
this.totlePage = totlePage;
}
}

View File

@ -0,0 +1,194 @@
package com.platform.utils.page;
import java.util.Map;
import java.util.Properties;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.plugin.Invocation;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import org.apache.ibatis.executor.resultset.ResultSetHandler;
import org.apache.ibatis.executor.statement.StatementHandler;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.plugin.Intercepts;
import org.apache.ibatis.plugin.Invocation;
import org.apache.ibatis.plugin.Plugin;
import org.apache.ibatis.plugin.Signature;
import org.apache.ibatis.reflection.MetaObject;
import org.apache.ibatis.reflection.SystemMetaObject;
import com.platform.utils.Bean2MapUtils;
@Intercepts({
@Signature(type = StatementHandler.class, method = "prepare", args = {Connection.class}),
@Signature(type = ResultSetHandler.class, method = "handleResultSets", args = {Statement.class})
})
public class PageInterceptor implements Interceptor {
/** 存储所有语句名称 */
/* HashMap<String, String> map_statement = new HashMap<String, String>();*/
/** 用户提供分页计算条数后缀 */
private static final String SELECT_ID="page";
@Override
public Object intercept(Invocation invocation) throws Throwable {
if (invocation.getTarget() instanceof StatementHandler) {
StatementHandler statementHandler = (StatementHandler) invocation.getTarget();
MetaObject metaStatementHandler = SystemMetaObject.forObject(statementHandler);
MappedStatement mappedStatement=(MappedStatement) metaStatementHandler.getValue("delegate.mappedStatement");
String selectId=mappedStatement.getId();
// System.out.println(" ---------- selectId ---- : " + selectId);
if(selectId.substring(selectId.lastIndexOf(".")+1).toLowerCase().endsWith(SELECT_ID)){
BoundSql boundSql = (BoundSql) metaStatementHandler.getValue("delegate.boundSql");
// 分页参数作为参数对象parameterObject的一个属性
String sql = boundSql.getSql();
Page co=(Page)(boundSql.getParameterObject());
@SuppressWarnings("unchecked")
Map<String, Object> map = Bean2MapUtils.convertBean(boundSql.getParameterObject());
if (null == co) {
co = new Page();
}
// 重写 Sql语句
String countSql=concatCountSql(sql);
// while (countSql.endsWith(" ") || countSql.endsWith("\t")
// || countSql.endsWith("\r") || countSql.endsWith("\n")) {
// countSql = countSql.substring(0, countSql.length()-1);
// }
try{
if(countSql.contains("?")) {
StringBuffer sbr = new StringBuffer();
String[] item = countSql.split("\\?");
for (int i = 0; i < item.length; i++) {
sbr.append(item[i]);
String[] s = item[i].split("\\.");
String[] re = s[s.length-1].split("\\=");
if(map.keySet().contains(re[re.length-1])){
sbr.append(map.get(re[re.length-1]));
}
else {
sbr.append(" ");
}
}
countSql = sbr.toString();
}
}catch(Exception e){
e.printStackTrace();
}
String pageSql=concatPageMySql(sql,co);
//
// System.out.println("重写的 count sql :"+countSql);
// System.out.println("重写的 select sql :"+pageSql);
Connection connection = (Connection) invocation.getArgs()[0];
PreparedStatement countStmt = null;
ResultSet rs = null;
int totalCount = 0;
try {
countStmt = connection.prepareStatement(countSql);
rs = countStmt.executeQuery();
if (rs.next()) {
totalCount = rs.getInt(1);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
rs.close();
countStmt.close();
} catch (Exception e) {
e.printStackTrace();
}
}
metaStatementHandler.setValue("delegate.boundSql.sql", pageSql);
//绑定count
co.setTotleSize(totalCount);
co.setTotlePage(totalCount/co.getLimit());
if (co.getCurrentPageNum() > co.getTotlePage()) {
co.setCurrentPageNum(co.getTotlePage());
}
}
}
return invocation.proceed();
}
/**
* <一句话功能简述> 重写sql
* <功能详细描述>
* @param sql
* @return
* @see [#方法#成员]
*/
public String concatCountSql(String sql){
StringBuffer sb=new StringBuffer("select count(*) from ");
// StringBuffer sb=new StringBuffer("select count(*) from (");
// sb.append(sql).append(") as a");
//
sql=sql.toLowerCase();
if(sql.lastIndexOf("order")>sql.lastIndexOf(")")){
sb.append(sql.substring(sql.indexOf("from")+4, sql.lastIndexOf("order")));
}else{
sb.append(sql.substring(sql.indexOf("from")+4));
}
return sb.toString();
}
/**
* <一句话功能简述> 重写sql
* <功能详细描述>
* @param sql
* @param page
* @return
* @see [#方法#成员]
*/
public String concatPageMySql(String sql,Page page){
StringBuffer sb=new StringBuffer();
sb.append(sql);
int size = page.getLimit();
int index = page.getCurrentPageNum();
if (index > 1) {
index = index -1;
sb.append(" limit ").append(size*index).append(" , ").append(size);
}
else {
sb.append(" limit ").append(0).append(" , ").append(size);
}
return sb.toString();
}
/**
* 拦截类型StatementHandler
*/
@Override
public Object plugin(Object target) {
if (target instanceof StatementHandler) {
return Plugin.wrap(target, this);
} else {
return target;
}
}
@Override
public void setProperties(Properties properties) {
}
public void setPageCount(){
}
}