Merge branch 'web_backend_develope' of https://git.trustie.net/fhx569287825/aggregation-platform into develope

web_backend_develope
wu ming 9 years ago
commit d4db086706

@ -16,5 +16,14 @@ public class Custom4exception {
* 001
* 001
*/
/**
* ThreadVolume volume
*/
public final static String threadVolume_class_Except = "3001001001";
/**
* ThreadVolume线
*/
public final static String threadVolume_Thread_Except = "3001001002";
}

@ -4,7 +4,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
"http://ibatis.apache.org/dtd/ibatis-3-mapper.dtd">
<mapper namespace="com.platform.dao.DataInfoDao">
<resultMap id="getEntityByText" type="DataInfoEntity">
<resultMap id="getEntityByText" type="com.platform.entities.DataInfoEntity">
<id property="id" column="id" javaType="int" jdbcType="INTEGER" />
<result property="regionalismCode" column="regionalism_code"
javaType="string" jdbcType="VARCHAR" />
@ -46,43 +46,47 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
jdbcType="VARCHAR" />
</resultMap>
<sql id="conditionsFilters">
<if test="PagerOptions.dataType!=null">
and data_details.data_type=#{PagerOptions.dataType}
<if test="dataType!=null">
AND data_details.data_type=#{dataType}
</if>
<if test="PagerOptions.submittedBatch!=null">
and
data_details.submitted_batch=#{PagerOptions.submittedBatch}
<if test="submittedBatch!=null">
AND
data_details.submitted_batch=#{submittedBatch}
</if>
<if test="PagerOptions.cityName!=null">
and data_details.city_name=#{PagerOptions.cityName}
<if test="cityName!=null">
AND data_details.city_name=#{cityName}
</if>
<if test="PagerOptions.districtName!=null">
and
data_details.district_name=#{PagerOptions.districtName}
<if test="districtName!=null">
AND
data_details.district_name=#{districtName}
</if>
<if test="PagerOptions.dataVersion !=null">
and data_details.data_version=#{PagerOptions.dataVersion}
<if test="dataVersion !=null">
AND data_details.data_version=#{dataVersion}
</if>
<if test="PagerOptions.systemName !=null">
and data_details=#{PagerOptions.systemName}
<if test="systemName !=null">
AND data_details.system_name=#{systemName}
</if>
<if test="PagerOptions.dataYear !=null">
and data_details.data_year=#{PagerOptions.dataYear}
<if test="dataYear !=null">
AND data_details.data_year=#{dataYear}
</if>
<if test="array !=null and array.length > 0">
<foreach collection="array" item="item" index="index">
AND CONCAT(regionalism_code,system_code,city_name,district_name,system_name) LIKE CONCAT('%',CONCAT(#{item},'%'))
</foreach>
</if>
</sql>
<!-- 获取数据符合筛选条件的全部记录信息 -->
<select id="getLimitedDataInfoEntities" parameterType="PagerOptions"
<select id="getLimitedDataInfoEntities" parameterType="com.platform.entities.PagerOptions"
resultMap="getEntityByText">
SELECT
id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,extract_status,data_year,start_year,end_year,volume_ip,volume_path
FROM data_details
<if test="PagerOptions!=null">
<where>
<include refid="conditionsFilters" />
<choose>
<when test="PagerOptions.offset > 0">
and data_details.id>= #{PagerOptions.offset}
<when test="offset > 0">
and data_details.id>= #{offset}
</when>
<otherwise>
and data_details.id>=0
@ -91,29 +95,26 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
</where>
ORDER BY data_details.id
<if test="PagerOptions.limit > 0">
LIMIT #{PagerOptions.limit}
<if test="limit > 0">
LIMIT #{limit}
</if>
</if>
</select>
<!-- 获取数据符合筛选条件的总记录条数 -->
<select id="getLimitedDataCount" resultType="java.lang.Integer"
parameterType="PagerOptions">
parameterType="com.platform.entities.PagerOptions">
SELECT COUNT(id) FROM data_details
<if test="PagerOptions!=null">
<where>
<include refid="conditionsFilters" />
</where>
</if>
</select>
<!-- 获取数据查询的起始di -->
<select id="getLimitedBeginId" resultType="java.lang.Integer"
parameterType="PagerOptions">
parameterType="com.platform.entities.PagerOptions">
SELECT MAX(idx) FROM (SELECT id idx FROM data_details
ORDER BY id LIMIT 0,#{PagerOptions.totalLimit}) AS TEMP
ORDER BY id LIMIT 0,#{totalLimit}) AS TEMP
</select>
<insert id="save" parameterType="com.platform.entities.DataInfoEntity">

@ -1,6 +1,7 @@
package com.platform.dao;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
@ -11,11 +12,11 @@ import com.platform.entities.PagerOptions;
@Repository(value = "dataInfoDao")
public interface DataInfoDao {
int getLimitedDataCount(@Param("PagerOptions")PagerOptions pagerOptions);
int getLimitedDataCount(PagerOptions pagerOptions);
int getLimitedBeginId(@Param("PagerOptions")PagerOptions pagerOptions);
int getLimitedBeginId(PagerOptions pagerOptions);
List<DataInfoEntity> getLimitedDataInfoEntities(@Param("PagerOptions")PagerOptions pagerOptions);
List<DataInfoEntity> getLimitedDataInfoEntities(PagerOptions pagerOptions);
List<String> getIdIsExist(List<Integer> list)throws Exception;

@ -1,5 +1,7 @@
package com.platform.entities;
import java.util.List;
public class PagerOptions {
private Integer currentPageNum; //当前页码
@ -24,7 +26,11 @@ public class PagerOptions {
private Integer totalLimit; //当前页前面已有多少条数据
private Integer priorTableSize; //前一次操作一页显示的数据条数
private String keyQuery;
private String[] array;
public Integer getCurrentPageNum() {
return currentPageNum;
}
@ -119,6 +125,34 @@ public class PagerOptions {
public void setPriorTableSize(Integer priorTableSize) {
this.priorTableSize = priorTableSize;
}
}
/**
* @return the keyQuery
*/
public String getKeyQuery() {
return keyQuery;
}
/**
* @param keyQuery the keyQuery to set
*/
public void setKeyQuery(String keyQuery) {
this.keyQuery = keyQuery;
}
/**
* @return the array
*/
public String[] getArray() {
return array;
}
/**
* @param array the array to set
*/
public void setArray(String[] array) {
this.array = array;
}
}

@ -36,6 +36,11 @@ public class VolumeEntity {
/** 挂载点 */
private String path;
/** * exist正常返回状态Started,Stopped,Created */
private String status;
private String type;
/** volume树形目录 */
private List<FolderNode> folder = new ArrayList<FolderNode>();
@ -98,6 +103,34 @@ public class VolumeEntity {
this.path = path;
}
/**
* @return the status
*/
public String getStatus() {
return status;
}
/**
* @param status the status to set
*/
public void setStatus(String status) {
this.status = status;
}
/**
* @return the type
*/
public String getType() {
return type;
}
/**
* @param type the type to set
*/
public void setType(String type) {
this.type = type;
}
/**
* @return the folder
*/

@ -161,7 +161,7 @@ public class VolumeInfo {
* @return
* @see [##]
*/
public Long getVolumeAvailableSize(String volumeName) {
public Long getVolumeAvailableSize(String volumeName) throws Exception{
log.info("get volume availableSize");
Long allSize = 0L;
@ -187,7 +187,7 @@ public class VolumeInfo {
* @return
* @see [##]
*/
public Long getVolumeUseSize(String volumeName) {
public Long getVolumeUseSize(String volumeName) throws Exception{
log.info("get volume used size");
Long usedSize = 0L;
if (volumeIsExists(volumeName) == false) {

@ -2,15 +2,18 @@ package com.platform.service;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.springframework.stereotype.Service;
import org.springframework.ui.ModelMap;
import com.base.CustomException;
import com.platform.dao.DataInfoDao;
import com.platform.entities.DataInfoEntity;
import com.platform.entities.PagerOptions;
import com.platform.utils.Bean2MapUtils;
@Service(value = "dataInfoService")
public class DataInfoServiceImp implements DataInfoService {
@ -23,22 +26,31 @@ public class DataInfoServiceImp implements DataInfoService {
@Override
public ModelMap getPagerTableData(PagerOptions pagerOptions) {
// TODO Auto-generated method stub
ModelMap modelMap = new ModelMap();
int count = dfdDao.getLimitedDataCount(pagerOptions); //获取总记录条数
System.out.println("total colume " + count);
int offset = 0;
if (pagerOptions.getCurrentPageNum() > 1) {
pagerOptions.setTotalLimit((pagerOptions.getCurrentPageNum() - 1)
* pagerOptions.getPriorTableSize());
offset = dfdDao.getLimitedBeginId(pagerOptions); //获取起始查询id
System.out.println(offset);
String querystr = pagerOptions.getKeyQuery();
String[] querys = null;
try {
if (null != querystr && !"".equals(querystr)) {
querys = querystr.split(" ");
}
pagerOptions.setArray(querys);
int count = dfdDao.getLimitedDataCount(pagerOptions); //获取总记录条数
System.out.println("total colume " + count);
int offset = 0;
if (pagerOptions.getCurrentPageNum() > 1) {
pagerOptions.setTotalLimit((pagerOptions.getCurrentPageNum() - 1)
* pagerOptions.getPriorTableSize());
offset = dfdDao.getLimitedBeginId(pagerOptions); //获取起始查询id
System.out.println(offset);
}
pagerOptions.setOffset(offset + 1);
List<DataInfoEntity> result = dfdDao
.getLimitedDataInfoEntities(pagerOptions);
modelMap.addAttribute("data", result);
modelMap.addAttribute("length", count);
} catch (Exception e) {
new CustomException();
}
pagerOptions.setOffset(offset + 1);
List<DataInfoEntity> result = dfdDao
.getLimitedDataInfoEntities(pagerOptions);
modelMap.addAttribute("data", result);
modelMap.addAttribute("length", count);
return modelMap;
}

@ -84,11 +84,16 @@ public class MoveDataServiceImpl implements IMoveDataService{
}
//TODO 正则:取出 data 的后面 的 路径eg: XXX/320198_16/1,or XXX/320122KFQ_15/1
Matcher matcher = pattern.matcher(dataInfoEntity.getDataPath());
// tailPath 第一个字符是 / 符号
// tailPath 第一个字符是 / 符号取尾:"\\/\\d+[a-z]*[A-Z]*_\\d+\\/\\d*\\/*$"
if (matcher.find()) {
tailPath = matcher.group();
}
String finalDestPath = dstPath + tailPath;
Matcher matchertmp2 = pattern2.matcher(finalDestPath);
//去掉 最后 的 / 符合
if (!matchertmp2.find()) {
finalDestPath = finalDestPath + "/";
}
DataInfoEntityMoveTmp dataMove = new DataInfoEntityMoveTmp();
dataMove.setSystemCode(dataInfoEntity.getSystemCode());
dataMove.setRegionalismCode(dataInfoEntity.getRegionalismCode());

@ -108,14 +108,14 @@ public class ThreadMoveData{
}
if (srcSize > 0 && dstSize > 0) {
realRate = (dstSize*100 / srcSize );
dataMove.setRate((int) realRate);
dataMove.setLastTime(DateForm.date2StringBysecond(new Date()));
}
if (srcSize == dstSize) {
realRate = 100;
}
dataMove.setRate((int) realRate);
}
if("1".equals(dataMove.getCompleteStatus()) &&dataMove.getRate() > 0){
if("1".equals(dataMove.getCompleteStatus()) && dataMove.getRate() > 0){
//传输完毕:进行校验
if (realRate == 100) {
//TODO 进行MD5校验

@ -39,11 +39,11 @@ public class Constant {
/**
* volume 线
*/
public final static int get_volume_sleep_time = 300000;
public final static int get_volume_sleep_time = 180000;
/**
* volume 线
*/
public final static int update_dataInfo_sleep_time = 3000;
public final static int update_dataInfo_sleep_time = 30000;
}

@ -6,6 +6,8 @@ import java.util.Map;
import org.springframework.stereotype.Service;
import com.base.Custom4exception;
import com.base.CustomException;
import com.platform.entities.Brick;
import com.platform.entities.FolderNode;
import com.platform.entities.VolumeEntity;
@ -42,7 +44,6 @@ public class ThreadVolume extends Thread implements Runnable{
public void run() {
super.run();
while(true){
try {
List<FolderNode> folderlist = new ArrayList<FolderNode>();
List<VolumeEntity> volumeList = new ArrayList<VolumeEntity>();
// brick状态 map集合
@ -52,63 +53,71 @@ public class ThreadVolume extends Thread implements Runnable{
List<String> volumeNameList = volumeInfo.showAllVolumeName();
if (null != volumeNameList) {
for (String volumeName : volumeNameList) {
VolumeEntity volume = new VolumeEntity();
volume.setName(volumeName);
List<String> path = volumeInfo.getVolumeMountPoint(volumeName);
//默认加载第一个路径
if (null != path && path.size() > 0) {
volume.setPath(path.get(0));
}
volume.setAllSize(volumeInfo.getVolumeAvailableSize(volumeName));
volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName));
//TODO 查询brick--
//返回 ip:path
List<String> brickStrs = volumeInfo.getVolumeBricks(volumeName);
//brick已用大小
Map<String, Double> usedSize = volumeInfo.getVolumebricksDataSize(volumeName);
Map<String, Double> availableSize = volumeInfo.getVolumebricksAvailableSize(volumeName);
List<Brick> brickList = new ArrayList<Brick>();
for (String brickIpPath : brickStrs) {
Brick b = new Brick();
String ipAndpath[] = brickIpPath.split(":");
String brickip = ipAndpath[0];
String brickpath = ipAndpath[1];
//iP , path ,
b.setIp(brickip);
if(brickStatusMap==null || brickStatusMap.size()==0){
b.setStatus(false);
}else if (brickStatusMap.containsKey(brickip)) {
b.setStatus(true);
try {
VolumeEntity volume = new VolumeEntity();
volume.setName(volumeName);
List<String> path = volumeInfo.getVolumeMountPoint(volumeName);
//默认加载第一个路径
if (null != path && path.size() > 0) {
volume.setPath(path.get(0));
}
else {
b.setStatus(false);
volume.setAllSize(volumeInfo.getVolumeAvailableSize(volumeName));
volume.setStatus(volumeInfo.getVolumeStatus(volumeName));
volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName));
volume.setType(volumeInfo.getVolumeType(volumeName));
//TODO 查询brick--
//返回 ip:path
List<String> brickStrs = volumeInfo.getVolumeBricks(volumeName);
//brick已用大小
Map<String, Double> usedSize = volumeInfo.getVolumebricksDataSize(volumeName);
Map<String, Double> availableSize = volumeInfo.getVolumebricksAvailableSize(volumeName);
List<Brick> brickList = new ArrayList<Brick>();
for (String brickIpPath : brickStrs) {
Brick b = new Brick();
String ipAndpath[] = brickIpPath.split(":");
String brickip = ipAndpath[0];
String brickpath = ipAndpath[1];
//iP , path ,
b.setIp(brickip);
if(brickStatusMap==null || brickStatusMap.size()==0){
b.setStatus(false);
}else if (brickStatusMap.containsKey(brickip)) {
b.setStatus(true);
}
else {
b.setStatus(false);
}
b.setPath(brickpath);
b.setAvailableSize(availableSize.get(brickIpPath));
b.setUsedSize(usedSize.get(brickIpPath));
brickList.add(b);
}
b.setPath(brickpath);
b.setAvailableSize(availableSize.get(brickIpPath));
b.setUsedSize(usedSize.get(brickIpPath));
brickList.add(b);
}
volume.setBrick(brickList);
//默认加载第一个路径
if (null != path && path.size() > 0) {
//装入 folder
//查询 每个 volume 下的 folder
FolderNode foldertmp = gfsTree.getDatas(path.get(0));
folderlist.add(foldertmp);
volume.setBrick(brickList);
//默认加载第一个路径
if (null != path && path.size() > 0) {
//装入 folder
//查询 每个 volume 下的 folder
FolderNode foldertmp = gfsTree.getDatas(path.get(0));
folderlist.add(foldertmp);
}
volumeList.add(volume);
}catch (Exception e) {
new CustomException(Custom4exception.threadVolume_class_Except,e);
}
volumeList.add(volume);
}
}
//TODO 更新folder 目录
CacheTreeData.setFolders(folderlist);
CacheTreeData.setVolumeList(volumeList);
try {
Thread.sleep(Constant.get_volume_sleep_time);
} catch (InterruptedException e) {
new CustomException(Custom4exception.threadVolume_Thread_Except,e);
}
Thread.sleep(Constant.get_volume_sleep_time);
} catch (InterruptedException e) {
}
}
}

Loading…
Cancel
Save