数据记录改为记录进data_info一张表,冷区迁移热区改进,热区暂时为不可迁,

web_backend_develope
chenlw 9 years ago
parent 8ebf310700
commit 65afcb67e9

@ -30,7 +30,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<result property="volumePath" column="volume_path"/> <result property="volumePath" column="volume_path"/>
<result property="dataBaseType" column="data_base_type"/> <result property="dataBaseType" column="data_base_type"/>
<result property="execResultLast" column="checkout_indicate_last"/> <result property="execResultLast" column="checkout_indicate_last"/>
<result property="srcId" column="src_id"/>
<result property="payResultLast" column="checkout_pay_last"/> <result property="payResultLast" column="checkout_pay_last"/>
<result property="standardExtractStatus" column="standard_extract_status"/> <result property="standardExtractStatus" column="standard_extract_status"/>
<result property="checkoutFlag" column="checkout_flag"/> <result property="checkoutFlag" column="checkout_flag"/>
@ -125,7 +125,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<!-- 获取数据符合筛选条件的全部记录信息 --> <!-- 获取数据符合筛选条件的全部记录信息 -->
<select id="findById" resultMap="getEntityByText" parameterType="java.lang.Integer"> <select id="findById" resultMap="getEntityByText" parameterType="java.lang.Integer">
SELECT SELECT
id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path,data_base_type,standard_extract_status,checkout_indicate_last,checkout_pay_last,checkout_flag,mark id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path,data_base_type,standard_extract_status,checkout_indicate_last,checkout_pay_last,checkout_flag,mark,src_id
FROM data_details FROM data_details
WHERE id =#{id} WHERE id =#{id}
</select> </select>
@ -224,6 +224,9 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<if test="dataBaseType != null and dataBaseType != ''"> <if test="dataBaseType != null and dataBaseType != ''">
data_base_type, data_base_type,
</if> </if>
<if test="srcId != null ">
src_id,
</if>
</trim> </trim>
) )
VALUES( VALUES(
@ -291,6 +294,9 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<if test="dataBaseType != null and dataBaseType != ''"> <if test="dataBaseType != null and dataBaseType != ''">
#{dataBaseType}, #{dataBaseType},
</if> </if>
<if test="srcId != null ">
#{srcId},
</if>
</trim> </trim>
) )
</insert> </insert>
@ -305,6 +311,17 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
and remove ='0' and remove ='0'
</select> </select>
<select id="getExistBySrcIds" parameterType="java.util.List" resultMap="getEntityByText">
SELECT
id,regionalism_code,system_code,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path,data_base_type,standard_extract_status,checkout_flag,src_id
FROM data_info
where src_id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
and remove ='0'
</select>
<update id="removes" parameterType="java.util.List"> <update id="removes" parameterType="java.util.List">
UPDATE UPDATE
data_info data_info

@ -49,7 +49,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
regionalism_code,system_code,dst_path,lasttime,fkid,dst_volume_ip,dst_volume_path regionalism_code,system_code,dst_path,lasttime,fkid,dst_volume_ip,dst_volume_path,add_data
</sql> </sql>
<!-- 获取数据全部记录信息 --> <!-- 获取数据全部记录信息 -->
@ -57,7 +57,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
SELECT SELECT
a.id id,a.regionalism_code regionalismCode,a.dst_path dstPath,a.complete_status completeStatus, a.id id,a.regionalism_code regionalismCode,a.dst_path dstPath,a.complete_status completeStatus,
a.rate rate, a.lasttime lastTime, a.system_code systemCode,a.dst_volume_ip dstVolumeIp,a.dst_volume_path dstVolumePath, a.rate rate, a.lasttime lastTime, a.system_code systemCode,a.dst_volume_ip dstVolumeIp,a.dst_volume_path dstVolumePath,
b.city_name cityName,b.district_name districtName,b.system_name systemName,b.data_type dataType, a.add_data addData,b.city_name cityName,b.district_name districtName,b.system_name systemName,b.data_type dataType,
b.data_version dataVersion,b.submitted_batch submittedBatch,b.data_path dataPath,b.data_charset charset, b.data_version dataVersion,b.submitted_batch submittedBatch,b.data_path dataPath,b.data_charset charset,
b.collection_time collectingTime,b.collector_name collectorName,b.collector_contacts collectorContacts, b.collection_time collectingTime,b.collector_name collectorName,b.collector_contacts collectorContacts,
b.data_year year,b.extract_status extractStatus,b.start_year startYear,b.end_year endYear, b.data_year year,b.extract_status extractStatus,b.start_year startYear,b.end_year endYear,
@ -73,7 +73,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<select id="findById" parameterType ="java.lang.Integer" resultType="com.platform.entities.DataInfoEntityMoveTmp"> <select id="findById" parameterType ="java.lang.Integer" resultType="com.platform.entities.DataInfoEntityMoveTmp">
SELECT SELECT
a.id id,a.regionalism_code regionalismCode,a.dst_path dstPath,a.complete_status completeStatus, a.id id,a.regionalism_code regionalismCode,a.dst_path dstPath,a.complete_status completeStatus,
a.rate rate, a.lasttime lastTime, a.system_code systemCode,a.dst_volume_ip dstVolumeIp,a.dst_volume_path dstVolumePath a.rate rate, a.lasttime lastTime, a.system_code systemCode,a.dst_volume_ip dstVolumeIp,a.dst_volume_path dstVolumePath a.add_data addData
FROM FROM
move_data_tmp a move_data_tmp a
<where> <where>
@ -140,6 +140,9 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<if test="dstVolumePath != null and dstVolumePath != ''"> <if test="dstVolumePath != null and dstVolumePath != ''">
dst_volume_path, dst_volume_path,
</if> </if>
<if test="addData != null and addData != ''">
add_data,
</if>
</trim> </trim>
) )
VALUES( VALUES(
@ -174,6 +177,9 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<if test="dstVolumePath != null and dstVolumePath != ''"> <if test="dstVolumePath != null and dstVolumePath != ''">
#{dstVolumePath}, #{dstVolumePath},
</if> </if>
<if test="addData != null and addData != ''">
#{addData},
</if>
</trim> </trim>
) )
</insert> </insert>
@ -182,7 +188,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
INSERT INTO move_data_tmp ( <include refid="Base_Column_List" /> ) INSERT INTO move_data_tmp ( <include refid="Base_Column_List" /> )
VALUES VALUES
<foreach collection="list" item="item" index="index" separator=","> <foreach collection="list" item="item" index="index" separator=",">
(#{item.regionalismCode,jdbcType=VARCHAR},#{item.systemCode,jdbcType=INTEGER},#{item.dstPath,jdbcType=VARCHAR},#{item.lastTime,jdbcType=VARCHAR},#{item.fkid},#{item.dstVolumeIp},#{item.dstVolumePath}) (#{item.regionalismCode,jdbcType=VARCHAR},#{item.systemCode,jdbcType=INTEGER},#{item.dstPath,jdbcType=VARCHAR},#{item.lastTime,jdbcType=VARCHAR},#{item.fkid},#{item.dstVolumeIp},#{item.dstVolumePath},#{item.addData})
</foreach> </foreach>
</select> </select>

@ -1,357 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
"http://ibatis.apache.org/dtd/ibatis-3-mapper.dtd">
<mapper namespace="com.platform.dao.IHotDataInfoDao">
<resultMap id="getEntityByText" type="com.platform.entities.DataInfoEntity">
<id property="id" column="id"/>
<result property="regionalismCode" column="regionalism_code"/>
<result property="cityName" column="city_name"/>
<result property="districtName" column="district_name"/>
<result property="systemCode" column="system_code"/>
<result property="systemName" column="system_name"/>
<result property="dataType" column="data_type"/>
<result property="dataVersion" column="data_version"/>
<result property="submittedBatch" column="submitted_batch"/>
<result property="dataPath" column="data_path"/>
<result property="charset" column="data_charset"/>
<result property="collectingTime" column="collection_time"/>
<result property="collectorContacts" column="collector_contacts"/>
<result property="collectorName" column="collector_name"/>
<result property="extractStatus" column="extract_status"/>
<result property="year" column="data_year"/>
<result property="startYear" column="start_year"/>
<result property="endYear" column="end_year"/>
<result property="volumeIp" column="volume_ip"/>
<result property="volumePath" column="volume_path"/>
<result property="mark" column="mark"/>
<result property="volumePath" column="volume_path"/>
<result property="dataBaseType" column="data_base_type"/>
<result property="execResultLast" column="checkout_indicate_last"/>
<result property="payResultLast" column="checkout_pay_last"/>
<result property="standardExtractStatus" column="standard_extract_status"/>
<result property="checkoutFlag" column="checkout_flag"/>
<result property="mark" column="mark"/>
</resultMap>
<sql id="conditionsFilters">
<if test="dataType!=null">
AND data_details.data_type=#{dataType}
</if>
<if test="mark!=null and mark != ''">
AND data_details.mark=#{mark}
</if>
<if test="submittedBatch!=null">
AND
data_details.submitted_batch=#{submittedBatch}
</if>
<if test="cityName!=null">
AND data_details.city_name=#{cityName}
</if>
<if test="districtName!=null">
AND
data_details.district_name=#{districtName}
</if>
<if test="dataVersion !=null">
AND data_details.data_version=#{dataVersion}
</if>
<if test="systemName !=null">
AND data_details.system_name=#{systemName}
</if>
<if test="dataYear !=null">
AND data_details.data_year=#{dataYear}
</if>
<if test="dataBaseType !=null and dataBaseType !=''">
AND data_details.data_base_type=#{dataBaseType}
</if>
<if test="array !=null and array.size() != 0">
<foreach collection="array" item="item" index="index">
AND CONCAT(regionalism_code,system_code,city_name,district_name,system_name,data_year) LIKE CONCAT('%',CONCAT('${item}','%'))
</foreach>
</if>
</sql>
<!-- 获取数据符合筛选条件的全部记录信息 -->
<select id="getLimitedDataInfoEntities" parameterType="com.platform.form.PagerOptions"
resultMap="getEntityByText">
SELECT
id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path,data_base_type,standard_extract_status,checkout_flag,mark
FROM data_details
<where>
<include refid="conditionsFilters" />
<choose>
<when test="offset > 0">
and data_details.id>= #{offset}
</when>
<otherwise>
and data_details.id>=0
</otherwise>
</choose>
</where>
ORDER BY data_details.id
<if test="limit > 0">
LIMIT #{limit}
</if>
</select>
<!-- 获取数据符合筛选条件的全部记录信息 -->
<select id="getLimitedDataInfoByPage" parameterType="com.platform.form.PagerOptions"
resultMap="getEntityByText">
SELECT
id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path,data_base_type,standard_extract_status,checkout_flag,mark
FROM data_details
<where>
<include refid="conditionsFilters" />
</where>
ORDER BY data_details.id
</select>
<!-- 获取数据符合筛选条件的全部记录信息 -->
<select id="findAll" resultMap="getEntityByText">
SELECT
id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path,data_base_type,standard_extract_status,checkout_flag,mark
FROM data_details
ORDER BY data_details.id
</select>
<select id="countByDataPath" resultType="java.lang.Integer" parameterType="com.platform.entities.DataInfoEntity">
SELECT count(id)
FROM data_info
where data_path=#{dataPath} and remove='0'
</select>
<!-- 获取数据符合筛选条件的全部记录信息 -->
<select id="findById" resultMap="getEntityByText" parameterType="java.lang.Integer">
SELECT
id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path,data_base_type,standard_extract_status,checkout_indicate_last,checkout_pay_last,checkout_flag,mark
FROM data_details
WHERE id =#{id}
</select>
<!-- 获取数据 条件:采集时间 -->
<select id="findByParam" resultType="com.platform.entities.DataInfoEntity" parameterType="com.platform.entities.DataInfoEntity">
SELECT
regionalism_code regionalismCode,system_code systemCode,checkout_indicate execResult,checkout_pay payResult
FROM data_details
WHERE
collection_time > #{collectingTime}
ORDER BY data_details.id
</select>
<!-- 获取数据符合筛选条件的总记录条数 -->
<select id="getLimitedDataCount" resultType="java.lang.Integer"
parameterType="com.platform.form.PagerOptions">
SELECT COUNT(id) FROM data_details
<where>
<include refid="conditionsFilters" />
</where>
</select>
<!-- 获取数据查询的起始di -->
<select id="getLimitedBeginId" resultType="java.lang.Integer"
parameterType="com.platform.form.PagerOptions">
SELECT MAX(idx) FROM (SELECT id idx FROM data_details
ORDER BY id LIMIT 0,#{totalLimit}) AS TEMP
</select>
<insert id="save" parameterType="com.platform.entities.DataInfoEntity">
INSERT INTO
data_info(
<trim suffixOverrides=",">
<if test="regionalismCode != null and regionalismCode != ''">
regionalism_code,
</if>
<if test="systemCode != null">
system_code,
</if>
<if test="dataType != null and dataType != ''">
data_type,
</if>
<if test="dataVersion != null">
data_version,
</if>
<if test="submittedBatch != null and submittedBatch != ''">
submitted_batch,
</if>
<if test="dataPath != null and dataPath != ''">
data_path,
</if>
<if test="collectingTime != null">
collection_time,
</if>
<if test="collectorName != null and collectorName != ''">
collector_name,
</if>
<if test="collectorContacts != null and collectorContacts != ''">
collector_contacts,
</if>
<if test="charset != null and charset != ''">
data_charset,
</if>
<if test="year != null and year != ''">
data_year,
</if>
<if test="startYear != null and startYear != ''">
start_year,
</if>
<if test="endYear != null and endYear != ''">
end_year,
</if>
<if test="volumeIp != null and volumeIp != ''">
volume_ip,
</if>
<if test="volumePath != null and volumePath != ''">
volume_path,
</if>
<if test="extractStatus > 0">
extract_status,
</if>
<if test="payResult != null and payResult != ''">
checkout_pay,
</if>
<if test="execResult != null and execResult != ''">
checkout_indicate,
</if>
<if test="startMonth != null and startMonth != ''">
start_month,
</if>
<if test="endMonth != null and endMonth != ''">
end_month,
</if>
<if test="dataBaseType != null and dataBaseType != ''">
data_base_type,
</if>
</trim>
)
VALUES(
<trim suffixOverrides=",">
<if test="regionalismCode != null and regionalismCode != ''">
#{regionalismCode},
</if>
<if test="systemCode != null">
#{systemCode},
</if>
<if test="dataType != null and dataType != ''">
#{dataType},
</if>
<if test="dataVersion != null">
#{dataVersion},
</if>
<if test="submittedBatch != null and submittedBatch != ''">
#{submittedBatch},
</if>
<if test="dataPath != null and dataPath != ''">
#{dataPath},
</if>
<if test="collectingTime != null">
#{collectingTime},
</if>
<if test="collectorName != null and collectorName != ''">
#{collectorName},
</if>
<if test="collectorContacts != null and collectorContacts != ''">
#{collectorContacts},
</if>
<if test="charset != null and charset != ''">
#{charset},
</if>
<if test="year != null and year != ''">
#{year},
</if>
<if test="startYear != null and startYear != ''">
#{startYear},
</if>
<if test="endYear != null and endYear != ''">
#{endYear},
</if>
<if test="volumeIp != null and volumeIp != ''">
#{volumeIp},
</if>
<if test="volumePath != null and volumePath != ''">
#{volumePath},
</if>
<if test="extractStatus > 0">
#{extractStatus},
</if>
<if test="payResult != null and payResult != ''">
#{payResult},
</if>
<if test="execResult != null and execResult != ''">
#{execResult},
</if>
<if test="startMonth != null and startMonth != ''">
#{startMonth},
</if>
<if test="endMonth != null and endMonth != ''">
#{endMonth},
</if>
<if test="dataBaseType != null and dataBaseType != ''">
#{dataBaseType},
</if>
</trim>
)
</insert>
<select id="getIdIsExist" parameterType="java.util.List" resultType="java.lang.String">
select data_path
from data_info
where id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
and remove ='0'
</select>
<update id="removes" parameterType="java.util.List">
UPDATE
data_info
set remove = '1'
where id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
and remove ='0'
</update>
<update id="updateExtract" parameterType="com.platform.entities.DataInfoEntity">
UPDATE
data_info
set extract_status = #{extractStatus}
<where>
id = #{id}
AND remove ='0'
</where>
</update>
<update id="update" parameterType="com.platform.entities.DataInfoEntity">
UPDATE
data_info
<set>
<trim suffixOverrides=",">
<if test="dataPath != null and dataPath != ''">
data_path= #{dataPath},
</if>
<if test="extractStatus != null and extractStatus != ''">
extract_status= #{extractStatus},
</if>
<if test="payResultLast != null and payResultLast != ''">
checkout_pay_last= #{payResultLast},
</if>
<if test="execResultLast != null and execResultLast != ''">
checkout_indicate_last= #{execResultLast},
</if>
<if test="checkoutFlag != null and checkoutFlag != ''">
checkout_flag= #{checkoutFlag},
</if>
</trim>
</set>
<where>
id = #{id}
AND remove ='0'
</where>
</update>
</mapper>

@ -196,18 +196,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
SELECT SELECT
<include refid="find_city_Column_List" /> <include refid="find_city_Column_List" />
FROM standard_data_details a left join collection_data_description l on a.area_code=l.regionalism_code AND a.sys_code=l.sys_code FROM standard_data_details a left join collection_data_description l on a.area_code=l.regionalism_code AND a.sys_code=l.sys_code
left join data_details d on a.area_code=d.regionalism_code AND a.sys_code=d.system_code left join data_details d on a.area_code=d.regionalism_code AND a.sys_code=d.system_code AND d.collection_time>#{collectingTime}
<where>
a.is_collection='是' AND a.city_name=#{cityName}
</where>
ORDER BY a.area_code,a.sys_code,d.data_version
</select>
<select id="findHotDataByCity" resultType="com.platform.entities.CheckoutEntity" parameterType="com.platform.entities.PreDataInfo">
SELECT
<include refid="find_city_Column_List" />
FROM standard_data_details a left join collection_data_description l on a.area_code=l.regionalism_code AND a.sys_code=l.sys_code
left join hot_data_details d on a.area_code=d.regionalism_code AND a.sys_code=d.system_code
<where> <where>
a.is_collection='是' AND a.city_name=#{cityName} a.is_collection='是' AND a.city_name=#{cityName}
</where> </where>

@ -107,12 +107,12 @@ public class DataModelController extends BaseController {
* @param res * @param res
* @param req * @param req
* @return * @return
* @throws UnsupportedEncodingException * @throws Exception
*/ */
@RequestMapping("/data.json") @RequestMapping("/data.json")
@ResponseBody @ResponseBody
public ModelMap getAllDataToJson(HttpServletRequest res, public ModelMap getAllDataToJson(HttpServletRequest res,
HttpServletResponse req) throws UnsupportedEncodingException { HttpServletResponse req) throws Exception {
log.info("-----------/data.json-----------"); log.info("-----------/data.json-----------");
res.setCharacterEncoding("UTF-8"); res.setCharacterEncoding("UTF-8");
Map<String, String[]> paramMap = res.getParameterMap(); Map<String, String[]> paramMap = res.getParameterMap();

@ -32,11 +32,6 @@ public class VolumeController extends BaseController{
@Resource(name = "volumeService") @Resource(name = "volumeService")
private IVolumeService volumeService; private IVolumeService volumeService;
/**
* volumeClient
*/
SetVolume volumeClient = new SetVolume();
/** volume /** volume
* @param res * @param res
* @param req * @param req
@ -80,7 +75,7 @@ public class VolumeController extends BaseController{
else { else {
req.setStatus(500); req.setStatus(500);
} }
String result = volumeClient.getAllvolume(); String result = volumeService.getAllvolume();
new ThreadVolumeImm("ThreadVolumeImm-in-VolumeController-delete").start(); new ThreadVolumeImm("ThreadVolumeImm-in-VolumeController-delete").start();
return result; return result;
} }

@ -22,6 +22,8 @@ public interface DataInfoDao {
List<String> getIdIsExist(List<Integer> list)throws Exception; List<String> getIdIsExist(List<Integer> list)throws Exception;
List<DataInfoEntity> getExistBySrcIds(List<Integer> list)throws Exception;
List<DataInfoEntity> findAll()throws Exception; List<DataInfoEntity> findAll()throws Exception;
DataInfoEntity findById(int id)throws Exception; DataInfoEntity findById(int id)throws Exception;

@ -1,39 +0,0 @@
package com.platform.dao;
import java.util.List;
import org.springframework.stereotype.Repository;
import com.platform.entities.DataInfoEntity;
import com.platform.form.PagerOptions;
@Repository(value = "hotDataInfoDao")
public interface IHotDataInfoDao {
int getLimitedDataCount(PagerOptions pagerOptions);
int getLimitedBeginId(PagerOptions pagerOptions);
List<DataInfoEntity> getLimitedDataInfoEntities(PagerOptions pagerOptions);
List<DataInfoEntity> getLimitedDataInfoByPage(PagerOptions pagerOptions);
List<String> getIdIsExist(List<Integer> list)throws Exception;
List<DataInfoEntity> findAll()throws Exception;
DataInfoEntity findById(int id)throws Exception;
List<DataInfoEntity> findByParam(DataInfoEntity data)throws Exception;
int countByDataPath(DataInfoEntity data)throws Exception;
int removes(List<Integer> list)throws Exception;
int save(DataInfoEntity data) throws Exception;
int update(DataInfoEntity data) throws Exception;
int updateExtract(DataInfoEntity data) throws Exception;
}

@ -25,8 +25,6 @@ public interface PreDataInfoDao {
List<CheckoutEntity> findByCity(PreDataInfo pre)throws Exception; List<CheckoutEntity> findByCity(PreDataInfo pre)throws Exception;
List<CheckoutEntity> findHotDataByCity(PreDataInfo pre)throws Exception;
void insertBatch(List<PreDataInfo> list) throws Exception; void insertBatch(List<PreDataInfo> list) throws Exception;
int update(PreDataInfo data) throws Exception; int update(PreDataInfo data) throws Exception;

@ -66,6 +66,16 @@ public class DataInfoEntity {
/** 是否进行了抽取 */ /** 是否进行了抽取 */
private String standardExtractStatus; private String standardExtractStatus;
/**
* id
*/
private int srcId;
/**
* 0: 1
*/
private int existHotData;
public DataInfoEntity() { public DataInfoEntity() {
} }
@ -436,6 +446,34 @@ public class DataInfoEntity {
this.endMonth = endMonth; this.endMonth = endMonth;
} }
/**
* @return the srcId
*/
public int getSrcId() {
return srcId;
}
/**
* @param srcId the srcId to set
*/
public void setSrcId(int srcId) {
this.srcId = srcId;
}
/**
* @return the existHotData
*/
public int getExistHotData() {
return existHotData;
}
/**
* @param existHotData the existHotData to set
*/
public void setExistHotData(int existHotData) {
this.existHotData = existHotData;
}
@Override @Override
public String toString() { public String toString() {
return "id=" + this.id + " ,regionalismCode=" + this.regionalismCode return "id=" + this.id + " ,regionalismCode=" + this.regionalismCode

@ -19,6 +19,8 @@ public class DataInfoEntityMoveTmp extends DataInfoEntity {
private int fkid; private int fkid;
private String addData;
public DataInfoEntityMoveTmp() { public DataInfoEntityMoveTmp() {
// TODO Auto-generated constructor stub // TODO Auto-generated constructor stub
} }
@ -121,4 +123,18 @@ public class DataInfoEntityMoveTmp extends DataInfoEntity {
this.dstVolumePath = dstVolumePath; this.dstVolumePath = dstVolumePath;
} }
/**
* @return the addData
*/
public String getAddData() {
return addData;
}
/**
* @param addData the addData to set
*/
public void setAddData(String addData) {
this.addData = addData;
}
} }

@ -47,6 +47,11 @@ public class VolumeDataEntity {
/** volume的 块 */ /** volume的 块 */
private List<Brick> brick = new ArrayList<Brick>(); private List<Brick> brick = new ArrayList<Brick>();
/**
* 0 1
*/
private String hot;
/** /**
* @return the allSize * @return the allSize
@ -168,5 +173,18 @@ public class VolumeDataEntity {
this.brick = brick; this.brick = brick;
} }
/**
* @return the hot
*/
public String getHot() {
return hot;
}
/**
* @param hot the hot to set
*/
public void setHot(String hot) {
this.hot = hot;
}
} }

@ -18,7 +18,7 @@ public interface DataInfoService {
* @param pagerOptions * @param pagerOptions
* @return * @return
*/ */
public ModelMap getPagerTableData(PagerOptions pagerOptions); public ModelMap getPagerTableData(PagerOptions pagerOptions)throws Exception;
/** /**
* @param id * @param id

@ -129,74 +129,53 @@ public class CheckoutServiceImpl implements ICheckoutService {
cksql.setCityName(city); cksql.setCityName(city);
Map<String,CheckoutEntity> resul = new HashMap<String,CheckoutEntity>(); Map<String,CheckoutEntity> resul = new HashMap<String,CheckoutEntity>();
Map<String,CheckoutEntity> nodata = new HashMap<String,CheckoutEntity>(); // Map<String,CheckoutEntity> nodata = new HashMap<String,CheckoutEntity>();
//冷区数据 //冷区数据
List<CheckoutEntity> list = preDataInfoDao.findByCity(cksql); List<CheckoutEntity> totalList = preDataInfoDao.findByCity(cksql);
Map<String, List<CheckoutEntity>> coolDataMap = new HashMap<String, List<CheckoutEntity>>(); List<CheckoutEntity> list = new ArrayList<CheckoutEntity>();
List<CheckoutEntity> hotDatalist = new ArrayList<CheckoutEntity>();
for (CheckoutEntity checkoutEntity : totalList) {
// 如果是热区的数据
if ("0".equals(checkoutEntity.getMark())) {
hotDatalist.add(checkoutEntity);
}
else {
list.add(checkoutEntity);
}
}
// 对于 冷区热区都存在的相同数据 (地区,系统,版本,采集时间相同的 认为是同一份数据)
for (CheckoutEntity checkoutEntity : list) { for (CheckoutEntity checkoutEntity : list) {
if(null == checkoutEntity.getCollectingTime() if(null == checkoutEntity.getCollectingTime()){
|| c2.getTime().after(DateForm.string2DateByDay(checkoutEntity.getCollectingTime()))){ resul.put(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode()+"_0_0",checkoutEntity);
continue; continue;
} }
List<CheckoutEntity> tmpList = coolDataMap.get(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode()); resul.put(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode()+"_"
if (null == tmpList) { +checkoutEntity.getDataVersion()+"_"+checkoutEntity.getCollectingTime(),checkoutEntity);
tmpList = new ArrayList<CheckoutEntity>();
coolDataMap.put(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode(),tmpList);
}
// 带数据的 系统 // 带数据的 系统
tmpList.add(checkoutEntity);
} }
//热区数据 //热区数据
List<CheckoutEntity> hotDatalist = preDataInfoDao.findHotDataByCity(cksql);
for (CheckoutEntity checkoutEntity : hotDatalist) { for (CheckoutEntity checkoutEntity : hotDatalist) {
if (null != checkoutEntity.getCollectingTime()) { //一段时间内的数据(半年)
//一段时间内的数据(半年) // 带数据的 系统
if (c2.getTime().before(DateForm.string2DateByDay(checkoutEntity.getCollectingTime()))) { resul.put(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode()+"_"
// 带数据的 系统 +checkoutEntity.getDataVersion()+"_"+checkoutEntity.getCollectingTime(), checkoutEntity);
resul.put(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode()+"_"+checkoutEntity.getDataVersion(), checkoutEntity);
}
// 半年之前的数据
else {
checkoutEntity.setDataId(0);
checkoutEntity.setPath(null);
checkoutEntity.setDataVersion(0);
checkoutEntity.setExecResult(null);
checkoutEntity.setExecResultLast(null);
checkoutEntity.setPayResult(null);
checkoutEntity.setPayResultLast(null);
checkoutEntity.setCheckoutFlag(null);
checkoutEntity.setCollection("否");
nodata.put(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode()+"_"+checkoutEntity.getDataVersion(), checkoutEntity);
}
}
else {
// DataVersion = 0
checkoutEntity.setCollection("否");
nodata.put(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode()+"_"+checkoutEntity.getDataVersion(), checkoutEntity);
}
} }
// 将 nodata中的 无热区数据的 系统,在冷区中查询,如果有的话,将冷区数据放入 resul中 而没有数据的 系统 的 key放入 keyList // 将 nodata中的 无热区数据的 系统,在冷区中查询,如果有的话,将冷区数据放入 resul中 而没有数据的 系统 的 key放入 keyList
List<String> noDataKeyList = new ArrayList<String>();
for ( String noDataKey : nodata.keySet()) {
CheckoutEntity checkData = nodata.get(noDataKey);
String coolDataMapKey = checkData.getAreaCode()+"_"+checkData.getSysCode();
if (coolDataMap.containsKey(coolDataMapKey)) {
List<CheckoutEntity> coolDatas = coolDataMap.get(coolDataMapKey);
for (CheckoutEntity checkoutEntity : coolDatas) {
resul.put(checkoutEntity.getAreaCode()+"_"+checkoutEntity.getSysCode()+"_"+ checkoutEntity.getDataVersion(), checkoutEntity);
}
}
else {
//记录 不存在数据 在 nodata的 key值
noDataKeyList.add(noDataKey);
}
}
List<CheckoutEntity> resultList = new ArrayList<CheckoutEntity>(); List<CheckoutEntity> resultList = new ArrayList<CheckoutEntity>();
List<CheckoutEntity> noDataList = new ArrayList<CheckoutEntity>();
//带数据的 //带数据的
for (String key : resul.keySet()) { for (String key : resul.keySet()) {
//如果有 校验失败的结果则 为否 //如果有 校验失败的结果则 为否
CheckoutEntity tmp = resul.get(key); CheckoutEntity tmp = resul.get(key);
if (null == tmp.getCollectingTime()) {
//无数据的 系统
tmp.setCollection("否");
noDataList.add(tmp);
continue;
}
resultList.add(tmp);
if(!Constant.CHECKOUT_STATUS_ZERO.equals(tmp.getPayResultLast()) && !Constant.CHECKOUT_STATUS_ZERO.equals(tmp.getExecResultLast())){ if(!Constant.CHECKOUT_STATUS_ZERO.equals(tmp.getPayResultLast()) && !Constant.CHECKOUT_STATUS_ZERO.equals(tmp.getExecResultLast())){
if (Constant.CHECKOUT_STATUS_FOUR.equals(tmp.getPayResultLast()) || Constant.CHECKOUT_STATUS_FOUR.equals(tmp.getExecResultLast())) { if (Constant.CHECKOUT_STATUS_FOUR.equals(tmp.getPayResultLast()) || Constant.CHECKOUT_STATUS_FOUR.equals(tmp.getExecResultLast())) {
tmp.setCheckResult(Constant.ORACLE_CHECK_REULT_ONE); tmp.setCheckResult(Constant.ORACLE_CHECK_REULT_ONE);
@ -205,16 +184,8 @@ public class CheckoutServiceImpl implements ICheckoutService {
tmp.setCheckResult(Constant.ORACLE_CHECK_REULT_ZERO); tmp.setCheckResult(Constant.ORACLE_CHECK_REULT_ZERO);
} }
} }
resultList.add(resul.get(key));
}
//无数据的 系统
for (String nodatakey : noDataKeyList) {
if (nodata.keySet().contains(nodatakey)) {
CheckoutEntity checkoutEntity = nodata.get(nodatakey);
checkoutEntity.setCollection("否");
resultList.add(checkoutEntity);
}
} }
resultList.addAll(noDataList);
return resultList; return resultList;
} }

@ -2,7 +2,9 @@ package com.platform.service.impl;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -15,7 +17,6 @@ import org.springframework.ui.ModelMap;
import com.base.Custom4exception; import com.base.Custom4exception;
import com.base.CustomException; import com.base.CustomException;
import com.platform.dao.DataInfoDao; import com.platform.dao.DataInfoDao;
import com.platform.dao.IHotDataInfoDao;
import com.platform.entities.DataInfoEntity; import com.platform.entities.DataInfoEntity;
import com.platform.form.PagerOptions; import com.platform.form.PagerOptions;
import com.platform.http.gfs.RemoveData; import com.platform.http.gfs.RemoveData;
@ -31,9 +32,6 @@ public class DataInfoServiceImp implements DataInfoService {
@Resource(name = "dataInfoDao") @Resource(name = "dataInfoDao")
private DataInfoDao dfdDao; private DataInfoDao dfdDao;
@Resource(name = "hotDataInfoDao")
private IHotDataInfoDao hotDataInfoDao;
private RemoveData removedata = new RemoveData(); private RemoveData removedata = new RemoveData();
public void setDfdDao(DataInfoDao dfdDao) { public void setDfdDao(DataInfoDao dfdDao) {
@ -41,93 +39,87 @@ public class DataInfoServiceImp implements DataInfoService {
} }
@Override @Override
public ModelMap getPagerTableData(PagerOptions pagerOptions) { public ModelMap getPagerTableData(PagerOptions pagerOptions) throws Exception{
ModelMap modelMap = new ModelMap(); ModelMap modelMap = new ModelMap();
String querystr = pagerOptions.getKeyQuery(); String querystr = pagerOptions.getKeyQuery();
String[] querys = null; String[] querys = null;
try { List<String> removelist = new ArrayList<String>();
List<String> removelist = new ArrayList<String>(); List<String> alllist = new ArrayList<String>();
List<String> alllist = new ArrayList<String>(); List<String> list = new ArrayList<String>();
List<String> list = new ArrayList<String>(); //如果有查询数据库类型的
//如果有查询数据库类型的 //去掉版本字段
//去掉版本字段 Pattern pattern = Pattern.compile("^版本\\d+");
Pattern pattern = Pattern.compile("^版本\\d+"); if (null != querystr && !"".equals(querystr)) {
if (null != querystr && !"".equals(querystr)) { if (querystr.toLowerCase().contains("oracle")) {
if (querystr.toLowerCase().contains("oracle")) { pagerOptions.setDataBaseType("ORACLE");
pagerOptions.setDataBaseType("ORACLE"); querystr = querystr.toUpperCase().replaceAll("ORACLE", "");
querystr = querystr.toUpperCase().replaceAll("ORACLE", ""); }else if(querystr.toLowerCase().contains("sql server")){
}else if(querystr.toLowerCase().contains("sql server")){ pagerOptions.setDataBaseType("SQL SERVER");
pagerOptions.setDataBaseType("SQL SERVER"); querystr = querystr.toUpperCase().replaceAll("SQL SERVER", "");
querystr = querystr.toUpperCase().replaceAll("SQL SERVER", "");
}
querys = querystr.trim().split(" ");
list = Arrays.asList(querys);
}
//遍历 list
for (String ss : list) {
ss = ss.trim();
if (!"".equals(ss)) {
alllist.add(ss);
}
} }
for (String ss : alllist) { querys = querystr.trim().split(" ");
Matcher matcher2 = pattern.matcher(ss); list = Arrays.asList(querys);
// 去掉 最后 的 / 符合 }
if (matcher2.find()) { //遍历 list
String s2 = matcher2.group(); for (String ss : list) {
removelist.add(ss); ss = ss.trim();
} if (!"".equals(ss)) {
} alllist.add(ss);
alllist.removeAll(removelist);
Object[] strs = alllist.toArray();
int length = strs.length;
List<String> arrays = new ArrayList<String>();
for (int i = 0; i < length; i++) {
arrays.add(strs[i].toString().trim());
}
for (String version : removelist) {
pagerOptions.setDataVersion(Integer.valueOf(version.replace("版本", "")));
}
if (arrays.size() > 0) {
pagerOptions.setArray(arrays);
} }
if (null !=pagerOptions.getSubmittedBatch() && !"".equals(pagerOptions.getSubmittedBatch()) ) { }
pagerOptions.setSubmittedBatch("批次"+pagerOptions.getSubmittedBatch()); for (String ss : alllist) {
Matcher matcher2 = pattern.matcher(ss);
// 去掉 最后 的 / 符合
if (matcher2.find()) {
String s2 = matcher2.group();
removelist.add(ss);
} }
List<DataInfoEntity> result = null; }
try{ alllist.removeAll(removelist);
if ("1".equals(pagerOptions.getMark())) { Object[] strs = alllist.toArray();
result = dfdDao.getLimitedDataInfoByPage(pagerOptions); int length = strs.length;
List<String> arrays = new ArrayList<String>();
for (int i = 0; i < length; i++) {
arrays.add(strs[i].toString().trim());
}
for (String version : removelist) {
pagerOptions.setDataVersion(Integer.valueOf(version.replace("版本", "")));
}
if (arrays.size() > 0) {
pagerOptions.setArray(arrays);
}
if (null !=pagerOptions.getSubmittedBatch() && !"".equals(pagerOptions.getSubmittedBatch()) ) {
pagerOptions.setSubmittedBatch("批次"+pagerOptions.getSubmittedBatch());
}
List<DataInfoEntity> result = null;
try{
result = dfdDao.getLimitedDataInfoByPage(pagerOptions);
if ("1".equals(pagerOptions.getMark())) {
List<Integer> ids = new ArrayList<Integer>();
for (DataInfoEntity dataInfoEntity : result) {
ids.add(dataInfoEntity.getId());
} }
else { Map<Integer, Object> srcIds = new HashMap<Integer, Object>();
result = hotDataInfoDao.getLimitedDataInfoByPage(pagerOptions); List<DataInfoEntity> hotData = dfdDao.getExistBySrcIds(ids);
if (hotData.size() > 0) {
for (DataInfoEntity dataInfoEntity : hotData) {
srcIds.put(dataInfoEntity.getSrcId(), null);
}
for (DataInfoEntity dataInfoEntity : result) {
//存在 对应 的热区数据
if (srcIds.keySet().contains(dataInfoEntity.getId())) {
dataInfoEntity.setExistHotData(1);
}
}
} }
}catch(Exception e){
log.error(e.getMessage());
} }
// int count = dfdDao.getLimitedDataCount(pagerOptions); //获取总记录条数
// log.info("total colume " + count); }catch(Exception e){
// int offset = 0; log.error(e.getMessage());
// if (pagerOptions.getCurrentPageNum() > 1) {
// pagerOptions.setTotalLimit((pagerOptions.getCurrentPageNum() - 1)
// * pagerOptions.getPriorTableSize());
// offset = dfdDao.getLimitedBeginId(pagerOptions); //获取起始查询id
// log.info(offset);
// }
// pagerOptions.setOffset(offset + 1);
// List<DataInfoEntity> result = dfdDao
// .getLimitedDataInfoEntities(pagerOptions);
// if (null != result) {
// for (DataInfoEntity dataInfoEntity : result) {
// dataInfoEntity.setVolumeType(dataInfoEntity.getMark());
// }
// }
modelMap.addAttribute("page", pagerOptions);
modelMap.addAttribute("data", result);
modelMap.addAttribute("length", pagerOptions.getTotleSize());
} catch (Exception e) {
new CustomException();
} }
modelMap.addAttribute("page", pagerOptions);
modelMap.addAttribute("data", result);
modelMap.addAttribute("length", pagerOptions.getTotleSize());
return modelMap; return modelMap;
} }

@ -20,9 +20,8 @@ import com.platform.entities.DataInfoEntity;
import com.platform.entities.DataInfoEntityMoveTmp; import com.platform.entities.DataInfoEntityMoveTmp;
import com.platform.entities.FolderNode; import com.platform.entities.FolderNode;
import com.platform.entities.VolumeInitEntity; import com.platform.entities.VolumeInitEntity;
import com.platform.glusterfs.CheckoutMD5;
import com.platform.http.gfs.RemoveData;
import com.platform.glusterfs.ShowData; import com.platform.glusterfs.ShowData;
import com.platform.http.gfs.RemoveData;
import com.platform.service.IMoveDataService; import com.platform.service.IMoveDataService;
import com.platform.utils.DateForm; import com.platform.utils.DateForm;
@ -138,7 +137,7 @@ public class MoveDataServiceImpl implements IMoveDataService {
dataMove.setFkid(dataInfoEntity.getId()); dataMove.setFkid(dataInfoEntity.getId());
// 末尾 含有 / // 末尾 含有 /
Matcher matcher3 = pattern2.matcher(node.getPath()); Matcher matcher3 = pattern2.matcher(node.getPath());
// 去掉 最后 的 / 符合 // 加上最后 的 / 符合
String volumePath = ""; String volumePath = "";
if (!matcher3.find()) { if (!matcher3.find()) {
volumePath = node.getPath()+"/"; volumePath = node.getPath()+"/";
@ -150,6 +149,7 @@ public class MoveDataServiceImpl implements IMoveDataService {
String ip = ""; String ip = "";
for ( VolumeInitEntity ve : listVolume) { for ( VolumeInitEntity ve : listVolume) {
String tmpPath = ve.getPath(); String tmpPath = ve.getPath();
// volumePath : 迁移目的 volume
if (volumePath.contains(tmpPath)) { if (volumePath.contains(tmpPath)) {
if (tmpPath.length() > path.length()) { if (tmpPath.length() > path.length()) {
path = tmpPath; path = tmpPath;
@ -165,7 +165,11 @@ public class MoveDataServiceImpl implements IMoveDataService {
dataMove.setDstVolumeIp(ip); dataMove.setDstVolumeIp(ip);
} }
dataMove.setCompleteStatus("0"); dataMove.setCompleteStatus("0");
moveList.add(dataMove); //只 允许 冷区的数据迁移 mark=1表示冷区
if ("1".equals(dataInfoEntity.getMark())) {
dataMove.setAddData("1");
moveList.add(dataMove);
}
} }
if (moveList.size() > 0) { if (moveList.size() > 0) {
dataInfoMoveTmpDao.insertBatch(moveList); dataInfoMoveTmpDao.insertBatch(moveList);

@ -12,7 +12,6 @@ import org.springframework.stereotype.Service;
import com.base.Custom4exception; import com.base.Custom4exception;
import com.base.CustomException; import com.base.CustomException;
import com.platform.dao.DataInfoDao; import com.platform.dao.DataInfoDao;
import com.platform.dao.IHotDataInfoDao;
import com.platform.entities.DataInfoEntity; import com.platform.entities.DataInfoEntity;
import com.platform.entities.GatherOracleInfo; import com.platform.entities.GatherOracleInfo;
import com.platform.entities.OracleConnectorParams; import com.platform.entities.OracleConnectorParams;
@ -34,8 +33,8 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
public final static Logger log = Logger public final static Logger log = Logger
.getLogger(OracleExtractServiceImpl.class); .getLogger(OracleExtractServiceImpl.class);
@Resource(name = "hotDataInfoDao") @Resource(name = "dataInfoDao")
private IHotDataInfoDao hotDataInfoDao; private DataInfoDao dataInfoDao;
/** /**
* kubernetes client * kubernetes client
@ -84,7 +83,7 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
&& !"".equals(collectOracle.getDataId())) { && !"".equals(collectOracle.getDataId())) {
data.setId(Integer.valueOf(collectOracle.getDataId())); data.setId(Integer.valueOf(collectOracle.getDataId()));
data.setExtractStatus(1); data.setExtractStatus(1);
hotDataInfoDao.updateExtract(data); dataInfoDao.updateExtract(data);
collectOracle.setName("J" collectOracle.setName("J"
+ collectOracle.getName().replace("-", "_")); + collectOracle.getName().replace("-", "_"));
String cmd = "kubectl label --overwrite rc " String cmd = "kubectl label --overwrite rc "
@ -126,7 +125,10 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
sb.append(string).append("\n"); sb.append(string).append("\n");
Configs.CONSOLE_LOGGER.info(sb.toString()); Configs.CONSOLE_LOGGER.info(sb.toString());
data.setExtractStatus(2); data.setExtractStatus(2);
hotDataInfoDao.updateExtract(data); dataInfoDao.updateExtract(data);
DataInfoEntity tmpdata = dataInfoDao.findById(data.getId());
data.setId(tmpdata.getSrcId());
dataInfoDao.updateExtract(data);
} }
} catch (Exception e) { } catch (Exception e) {
// sql日志记录时间 // sql日志记录时间
@ -140,7 +142,10 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
+ " isExtract=0"; + " isExtract=0";
Constant.ganymedSSH.execCmdWaitAcquiescent(cmd); Constant.ganymedSSH.execCmdWaitAcquiescent(cmd);
data.setExtractStatus(0); data.setExtractStatus(0);
hotDataInfoDao.updateExtract(data); dataInfoDao.updateExtract(data);
DataInfoEntity tmpdata = dataInfoDao.findById(data.getId());
data.setId(tmpdata.getSrcId());
dataInfoDao.updateExtract(data);
log.error(Custom4exception.OracleSQL_Except, e); log.error(Custom4exception.OracleSQL_Except, e);
} }
} }
@ -208,7 +213,7 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
// 设置为 标准表 抽取中 // 设置为 标准表 抽取中
data.setCheckoutFlag(Constant.CHECKOUTFLAG_SIX); data.setCheckoutFlag(Constant.CHECKOUTFLAG_SIX);
data.setStandardExtractStatus("1"); data.setStandardExtractStatus("1");
hotDataInfoDao.update(data); dataInfoDao.update(data);
collectOracle.setName("CQ" collectOracle.setName("CQ"
+ collectOracle.getName().replace("-", "_")); + collectOracle.getName().replace("-", "_"));
String cmd = "kubectl annotate --overwrite rc " String cmd = "kubectl annotate --overwrite rc "
@ -237,7 +242,7 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
oracleModel); // 创建表空间 oracleModel); // 创建表空间
oracleExtract.createOnlyUser(conn, collectOracle, oracleExtract.createOnlyUser(conn, collectOracle,
oracleModel);// 创建 抽取标准表的 用户并授权 oracleModel);// 创建 抽取标准表的 用户并授权
DataInfoEntity tmpdata = hotDataInfoDao.findById(data DataInfoEntity tmpdata = dataInfoDao.findById(data
.getId()); .getId());
if (null != tmpdata) { if (null != tmpdata) {
if (Constant.CHECKOUT_STATUS_THREE.equals(tmpdata if (Constant.CHECKOUT_STATUS_THREE.equals(tmpdata
@ -248,7 +253,7 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
.equals(tmpdata.getPayResultLast())) { .equals(tmpdata.getPayResultLast())) {
// 抽取中 // 抽取中
data.setPayResultLast(Constant.CHECKOUT_STATUS_SIX); data.setPayResultLast(Constant.CHECKOUT_STATUS_SIX);
hotDataInfoDao.update(data); dataInfoDao.update(data);
boolean isExtrac = true; boolean isExtrac = true;
try { try {
oracleExtract.extractStandardPayTable(conn, oracleExtract.extractStandardPayTable(conn,
@ -256,13 +261,13 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
} catch (Exception e) { } catch (Exception e) {
// 改回 校验存在的状态 // 改回 校验存在的状态
data.setPayResultLast(Constant.CHECKOUT_STATUS_THREE); data.setPayResultLast(Constant.CHECKOUT_STATUS_THREE);
hotDataInfoDao.update(data); dataInfoDao.update(data);
isExtrac = false; isExtrac = false;
} }
if (isExtrac) { if (isExtrac) {
// 抽取成功 // 抽取成功
data.setPayResultLast(Constant.CHECKOUT_STATUS_SEVEN); data.setPayResultLast(Constant.CHECKOUT_STATUS_SEVEN);
hotDataInfoDao.update(data); dataInfoDao.update(data);
} }
} }
if (Constant.CHECKOUT_STATUS_THREE.equals(tmpdata if (Constant.CHECKOUT_STATUS_THREE.equals(tmpdata
@ -273,7 +278,7 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
.equals(tmpdata.getExecResultLast())) { .equals(tmpdata.getExecResultLast())) {
// 抽取中 // 抽取中
data.setExecResultLast(Constant.CHECKOUT_STATUS_SIX); data.setExecResultLast(Constant.CHECKOUT_STATUS_SIX);
hotDataInfoDao.update(data); dataInfoDao.update(data);
boolean isExtrac = true; boolean isExtrac = true;
try { try {
oracleExtract.extractStandardExecTable( oracleExtract.extractStandardExecTable(
@ -281,12 +286,12 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
} catch (Exception e) { } catch (Exception e) {
// 改回 校验存在的状态 // 改回 校验存在的状态
data.setExecResultLast(Constant.CHECKOUT_STATUS_THREE); data.setExecResultLast(Constant.CHECKOUT_STATUS_THREE);
hotDataInfoDao.update(data); dataInfoDao.update(data);
isExtrac = false; isExtrac = false;
} }
if (isExtrac) { if (isExtrac) {
data.setExecResultLast(Constant.CHECKOUT_STATUS_SEVEN); data.setExecResultLast(Constant.CHECKOUT_STATUS_SEVEN);
hotDataInfoDao.update(data); dataInfoDao.update(data);
} }
} }
// client.updateOrAddReplicasLabelById(collectOracle.getName(), // client.updateOrAddReplicasLabelById(collectOracle.getName(),
@ -302,7 +307,10 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
Configs.CONSOLE_LOGGER.info(sb.toString()); Configs.CONSOLE_LOGGER.info(sb.toString());
data.setCheckoutFlag(Constant.CHECKOUTFLAG_SEVEN); data.setCheckoutFlag(Constant.CHECKOUTFLAG_SEVEN);
data.setStandardExtractStatus("2"); data.setStandardExtractStatus("2");
hotDataInfoDao.update(data); dataInfoDao.update(data);
DataInfoEntity tmpSrcData = dataInfoDao.findById(data.getId());
data.setId(tmpSrcData.getSrcId());
dataInfoDao.update(data);
} }
} }
} catch (Exception e) { } catch (Exception e) {
@ -317,7 +325,10 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
+ replicasName + " standardExtractStatus=0"; + replicasName + " standardExtractStatus=0";
Constant.ganymedSSH.execCmdWaitAcquiescent(cmd); Constant.ganymedSSH.execCmdWaitAcquiescent(cmd);
data.setStandardExtractStatus("0"); data.setStandardExtractStatus("0");
hotDataInfoDao.update(data); dataInfoDao.update(data);
DataInfoEntity tmpSrcData = dataInfoDao.findById(data.getId());
data.setId(tmpSrcData.getSrcId());
dataInfoDao.update(data);
log.error(Custom4exception.OracleSQL_Except, e); log.error(Custom4exception.OracleSQL_Except, e);
} }
} }
@ -336,7 +347,7 @@ public class OracleExtractServiceImpl implements IOracleExtractService {
data.setCheckoutFlag(Constant.CHECKOUTFLAG_SIX); data.setCheckoutFlag(Constant.CHECKOUTFLAG_SIX);
data.setExtractStatus(status); data.setExtractStatus(status);
try { try {
hotDataInfoDao.update(data); dataInfoDao.update(data);
} catch (Exception e) { } catch (Exception e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();

@ -8,6 +8,8 @@ import java.util.Set;
import javax.annotation.Resource; import javax.annotation.Resource;
import net.sf.json.JSONArray;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@ -193,7 +195,19 @@ public class VolumeServiceImpl implements IVolumeService {
@Override @Override
public String getAllvolume() throws Exception { public String getAllvolume() throws Exception {
return setVolume.getAllvolume(); List<VolumeDataEntity> volumes = setVolume.getAllvolumeEntity();
List<VolumeInitEntity> volumeInits = volumeDao.findAll();
Map<String, VolumeInitEntity> volumMap = new HashMap<String, VolumeInitEntity>();
for (VolumeInitEntity volumeInitEntity : volumeInits) {
volumMap.put(volumeInitEntity.getName(), volumeInitEntity);
}
for (VolumeDataEntity volumeDataEntity : volumes) {
if (volumMap.keySet().contains(volumeDataEntity.getName())) {
//设置 热区 冷区类型
volumeDataEntity.setHot(volumMap.get(volumeDataEntity.getName()).getMark());
}
}
JSONArray jsonarr = JSONArray.fromObject(volumes);
return jsonarr.toString();
} }
} }

@ -277,5 +277,8 @@ public class ThreadCheckoutStandardOracle extends Thread {
data.setExecResultLast(checkoutEntity.getExecResultLast()); data.setExecResultLast(checkoutEntity.getExecResultLast());
data.setCheckoutFlag(checkoutEntity.getCheckoutFlag()); data.setCheckoutFlag(checkoutEntity.getCheckoutFlag());
dataInfoDao.update(data); dataInfoDao.update(data);
DataInfoEntity tmpdata = dataInfoDao.findById(checkoutEntity.getDataId());
data.setId(tmpdata.getSrcId());
dataInfoDao.update(data);
} }
} }

@ -97,6 +97,9 @@ public class ThreadExtractStandardSqlServer extends Thread{
data.setPayResultLast(Constant.CHECKOUT_STATUS_SEVEN); data.setPayResultLast(Constant.CHECKOUT_STATUS_SEVEN);
try { try {
dataInfoDao.update(data); dataInfoDao.update(data);
DataInfoEntity tmpSrcData = dataInfoDao.findById(data.getId());
data.setId(tmpSrcData.getSrcId());
dataInfoDao.update(data);
} catch (Exception e) { } catch (Exception e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
@ -157,6 +160,9 @@ public class ThreadExtractStandardSqlServer extends Thread{
data.setExecResultLast(Constant.CHECKOUT_STATUS_SEVEN); data.setExecResultLast(Constant.CHECKOUT_STATUS_SEVEN);
try { try {
dataInfoDao.update(data); dataInfoDao.update(data);
DataInfoEntity tmpSrcData = dataInfoDao.findById(data.getId());
data.setId(tmpSrcData.getSrcId());
dataInfoDao.update(data);
} catch (Exception e) { } catch (Exception e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();

@ -17,7 +17,6 @@ import org.springframework.stereotype.Component;
import com.base.TaskOperateData; import com.base.TaskOperateData;
import com.platform.dao.DataInfoDao; import com.platform.dao.DataInfoDao;
import com.platform.dao.DataInfoMoveTmpDao; import com.platform.dao.DataInfoMoveTmpDao;
import com.platform.dao.IHotDataInfoDao;
import com.platform.entities.DataInfoEntity; import com.platform.entities.DataInfoEntity;
import com.platform.entities.DataInfoEntityMoveTmp; import com.platform.entities.DataInfoEntityMoveTmp;
import com.platform.glusterfs.CheckoutMD5; import com.platform.glusterfs.CheckoutMD5;
@ -37,8 +36,8 @@ public class ThreadMoveData{
public final static Logger log = Logger.getLogger(ThreadMoveData.class); public final static Logger log = Logger.getLogger(ThreadMoveData.class);
@Resource(name = "hotDataInfoDao") @Resource(name = "dataInfoDao")
private IHotDataInfoDao hotDataInfoDao; private DataInfoDao dataInfoDao;
/** /**
* *
@ -59,7 +58,7 @@ public class ThreadMoveData{
ShowData show = new ShowData(); ShowData show = new ShowData();
/** /**
* : -- * : --
*/ */
public ThreadMoveData() { public ThreadMoveData() {
} }
@ -68,118 +67,21 @@ public class ThreadMoveData{
@Scheduled(fixedDelay = 4000) @Scheduled(fixedDelay = 4000)
public void moveDataByWebGfs(){ public void moveDataByWebGfs(){
List<DataInfoEntityMoveTmp> result = new ArrayList<DataInfoEntityMoveTmp>(); List<DataInfoEntityMoveTmp> result = new ArrayList<DataInfoEntityMoveTmp>();
List<DataInfoEntityMoveTmp>[] subMove = new ArrayList[4];
for (int i = 0; i < 4; i++) {
subMove[i] = new ArrayList<DataInfoEntityMoveTmp>();
}
try { try {
result = dataInfoMoveTmpDao.findAll(); result = dataInfoMoveTmpDao.findAll();
} catch (Exception e) { } catch (Exception e) {
log.error(e); log.error(e);
} }
Map<String, TaskOperateData> taskMap = new HashMap<String, TaskOperateData>(); try {
for ( DataInfoEntityMoveTmp moveE : result) { List<DataInfoEntityMoveTmp> insertData = this.doMoveService(result);
switch (moveE.getCompleteStatus()) { if (insertData.size() > 0) {
//待迁移 for (DataInfoEntityMoveTmp dataInfoEntityMoveTmp : insertData) {
case "0": this.makeDataInfo(dataInfoEntityMoveTmp);
subMove[0].add(moveE);
break;
//正在迁移
case "1":
subMove[1].add(moveE);
break;
//迁移完成的
case "2":
subMove[2].add(moveE);
break;
//迁移失败
case "3":
subMove[3].add(moveE);
break;
default:
break;
}
}
//迁移失败---不处理 status = -1表示迁移完成校验失败,-2:表示迁移失败 -3表示删除失败
//迁移完成的--不处理 status = 3表示校验成功
//正则迁移 status = 1表示正在迁移(如果 web gfs 迁移成功 则 增加一条记录)
if(subMove[1].size() > 0){
//请求进度
List<TaskOperateData> list = new ArrayList<TaskOperateData>();
try {
list = copy.operationTask();
} catch (Exception e) {
log.error("copy.operationTask()");
log.error(e);
}
for (TaskOperateData taskOperateData : list) {
taskMap.put(FileOperateHelper.addLastLinuxSeparator(taskOperateData.getSourcePath())
+ "-" + FileOperateHelper.addLastLinuxSeparator(taskOperateData.getDestPath()),
taskOperateData);
}
for (DataInfoEntityMoveTmp moveE : subMove[1]) {
TaskOperateData taskOne = taskMap.get(FileOperateHelper.addLastLinuxSeparator(moveE.getDataPath())
+"-"+FileOperateHelper.addLastLinuxSeparator(makeDstPath(moveE.getDstPath())));
if (null == taskOne) {
long nowTime = new Date().getTime();
long timelong = nowTime - DateForm.string2DateBysecond(moveE.getLastTime()).getTime();
if (timelong > 1000*60*20) {
try {
moveE.setCompleteStatus("3");
dataInfoMoveTmpDao.update(moveE);
} catch (Exception e) {
log.error(e);
}
}
continue;
}
moveE.setRate(taskOne.getProgress());
moveE.setLastTime(DateForm.date2StringBysecond(new Date()));
if (3 == taskOne.getStatus()) {
//成功---创建数据
makeDataInfo(moveE);
}else if(taskOne.getStatus() < 0){
//失败
moveE.setCompleteStatus("3");
}
try {
dataInfoMoveTmpDao.update(moveE);
} catch (Exception e) {
log.error(e);
}
}
}
//待迁移 status = 0准备迁移则开始迁移
if(subMove[0].size() > 0){
//正则迁移的 数量
int curMoveNum = subMove[1].size();
for ( DataInfoEntityMoveTmp moveE : subMove[0]) {
if(curMoveNum <= Constant.moveFileMaxNum){
moveE.setLastTime(DateForm.date2StringBysecond(new Date()));
//请求迁移
curMoveNum++;
try {
if(1==copy.copyFolder(moveE.getDataPath(), makeDstPath(moveE.getDstPath()))){
moveE.setCompleteStatus("1");
}
else {
moveE.setCompleteStatus("3");
}
} catch (Exception e) {
log.error("copy.copyFolder()");
log.error(e);
}
try {
dataInfoMoveTmpDao.update(moveE);
} catch (Exception e) {
log.error(e);
}
} }
} }
} catch (Exception e) {
log.error(e);
} }
} }
@ -337,7 +239,7 @@ public class ThreadMoveData{
data.setPayResult(dataMove.getPayResult()); data.setPayResult(dataMove.getPayResult());
data.setExecResult(dataMove.getExecResult()); data.setExecResult(dataMove.getExecResult());
data.setId(0); data.setId(0);
hotDataInfoDao.save(data); dataInfoDao.save(data);
} }
else { else {
dataMove.setCompleteStatus("2"); dataMove.setCompleteStatus("2");
@ -356,6 +258,122 @@ public class ThreadMoveData{
} }
} }
private List<DataInfoEntityMoveTmp> doMoveService(List<DataInfoEntityMoveTmp> result) throws Exception {
List<DataInfoEntityMoveTmp> data4Insert = new ArrayList<DataInfoEntityMoveTmp>();
List<DataInfoEntityMoveTmp>[] subMove = new ArrayList[4];
for (int i = 0; i < 4; i++) {
subMove[i] = new ArrayList<DataInfoEntityMoveTmp>();
}
Map<String, TaskOperateData> taskMap = new HashMap<String, TaskOperateData>();
for ( DataInfoEntityMoveTmp moveE : result) {
switch (moveE.getCompleteStatus()) {
//待迁移
case "0":
subMove[0].add(moveE);
break;
//正在迁移
case "1":
subMove[1].add(moveE);
break;
//迁移完成的
case "2":
subMove[2].add(moveE);
break;
//迁移失败
case "3":
subMove[3].add(moveE);
break;
default:
break;
}
}
//迁移失败---不处理 status = -1表示迁移完成校验失败,-2:表示迁移失败 -3表示删除失败
//迁移完成的--不处理 status = 3表示校验成功
//正则迁移 status = 1表示正在迁移(如果 web gfs 迁移成功 则 增加一条记录)
if(subMove[1].size() > 0){
//请求进度
List<TaskOperateData> list = new ArrayList<TaskOperateData>();
try {
list = copy.operationTask();
} catch (Exception e) {
log.error("copy.operationTask()");
log.error(e);
}
for (TaskOperateData taskOperateData : list) {
taskMap.put(FileOperateHelper.addLastLinuxSeparator(taskOperateData.getSourcePath())
+ "-" + FileOperateHelper.addLastLinuxSeparator(taskOperateData.getDestPath()),
taskOperateData);
}
for (DataInfoEntityMoveTmp moveE : subMove[1]) {
TaskOperateData taskOne = taskMap.get(FileOperateHelper.addLastLinuxSeparator(moveE.getDataPath())
+"-"+FileOperateHelper.addLastLinuxSeparator(makeDstPath(moveE.getDstPath())));
if (null == taskOne) {
long nowTime = new Date().getTime();
long timelong = nowTime - DateForm.string2DateBysecond(moveE.getLastTime()).getTime();
if (timelong > 1000*60*20) {
try {
moveE.setCompleteStatus("3");
dataInfoMoveTmpDao.update(moveE);
} catch (Exception e) {
log.error(e);
}
}
continue;
}
moveE.setRate(taskOne.getProgress());
moveE.setLastTime(DateForm.date2StringBysecond(new Date()));
if (3 == taskOne.getStatus()) {
//成功---创建数据(改为 记录 到 data4Insert 中 )
// makeDataInfo(moveE);
data4Insert.add(moveE);
}else if(taskOne.getStatus() < 0){
//失败
moveE.setCompleteStatus("3");
}
try {
dataInfoMoveTmpDao.update(moveE);
} catch (Exception e) {
log.error(e);
}
}
}
//待迁移 status = 0准备迁移则开始迁移
if(subMove[0].size() > 0){
//正则迁移的 数量
int curMoveNum = subMove[1].size();
for ( DataInfoEntityMoveTmp moveE : subMove[0]) {
if(curMoveNum <= Constant.moveFileMaxNum){
moveE.setLastTime(DateForm.date2StringBysecond(new Date()));
//请求迁移
curMoveNum++;
//TODO 判断 moveE.getDataPath()的空间大小 和 makeDstPath(moveE.getDstPath())的大小
try {
if(1==copy.copyFolder(moveE.getDataPath(), makeDstPath(moveE.getDstPath()))){
moveE.setCompleteStatus("1");
}
else {
moveE.setCompleteStatus("3");
}
} catch (Exception e) {
log.error("copy.copyFolder()");
log.error(e);
}
try {
dataInfoMoveTmpDao.update(moveE);
} catch (Exception e) {
log.error(e);
}
}
}
}
return data4Insert;
}
private int makeDataInfo(DataInfoEntityMoveTmp dataMove){ private int makeDataInfo(DataInfoEntityMoveTmp dataMove){
try{ try{
// 判断 迁移数据的status是否改为 2 // 判断 迁移数据的status是否改为 2
@ -383,10 +401,11 @@ public class ThreadMoveData{
} }
data.setPayResult(dataMove.getPayResult()); data.setPayResult(dataMove.getPayResult());
data.setExecResult(dataMove.getExecResult()); data.setExecResult(dataMove.getExecResult());
data.setSrcId(dataMove.getFkid());
data.setId(0); data.setId(0);
try { try {
if (hotDataInfoDao.countByDataPath(data) == 0) { if (dataInfoDao.countByDataPath(data) == 0) {
hotDataInfoDao.save(data); dataInfoDao.save(data);
} }
} catch (Exception e) { } catch (Exception e) {
log.error(e); log.error(e);

Loading…
Cancel
Save