glusterfs-api
chenlw 8 years ago
parent 355b6e47a4
commit cb3af77285

@ -8,9 +8,11 @@
<typeAlias alias="DataInfoEntity" type="com.platform.entities.DataInfoEntity" />
<typeAlias alias="PagerOptions" type="com.platform.entities.PagerOptions"/>
<typeAlias alias="GatherOracleInfo" type="com.platform.entities.GatherOracleInfo"/>
<typeAlias alias="DataInfoEntityMoveTmp" type="com.platform.entities.DataInfoEntityMoveTmp"/>
</typeAliases>
<mappers>
<mapper resource="com/dao/mapper/data-details-mapper.xml" />
<mapper resource="com/dao/mapper/config-details-mapper.xml"/>
<mapper resource="com/dao/mapper/dataInfoMoveTmpmapper.xml"/>
</mappers>
</configuration>

@ -3,6 +3,7 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
xmlns:tx="http://www.springframework.org/schema/tx" xmlns:aop="http://www.springframework.org/schema/aop"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:task="http://www.springframework.org/schema/task"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/tx
@ -10,7 +11,9 @@
http://www.springframework.org/schema/aop
http://www.springframework.org/schema/aop/spring-aop-3.0.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-3.0.xsd">
http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task-3.0.xsd">
<!-- 读取db.properties中的属性值 -->
<bean
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
@ -71,4 +74,8 @@
<context:exclude-filter type="annotation"
expression="org.springframework.stereotype.Controller" />
</context:component-scan>
<!-- Enables the Spring Task @Scheduled programming model -->
<task:executor id="executor" pool-size="5" />
<task:scheduler id="scheduler" pool-size="10" />
<task:annotation-driven executor="executor" scheduler="scheduler" />
</beans>

@ -18,7 +18,7 @@
</filter>
<filter-mapping>
<filter-name>CharacterEncodingFilter</filter-name>
<url-pattern>/*</url-pattern>
<url-pattern>/</url-pattern>
</filter-mapping>
<!-- spring配置文件 -->
<context-param>

@ -79,6 +79,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
</otherwise>
</choose>
</where>
ORDER BY data_details.id
<if test="PagerOptions.limit > 0">
LIMIT #{PagerOptions.limit}
@ -105,4 +106,103 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
ORDER BY id LIMIT 0,#{PagerOptions.totalLimit}) AS TEMP
</select>
<insert id="save" parameterType="com.platform.entities.DataInfoEntity">
INSERT INTO
data_info(
<trim suffixOverrides=",">
<if test="regionalismCode != null and regionalismCode != ''">
regionalism_code,
</if>
<if test="systemCode > 0">
system_code,
</if>
<if test="dataType != null and dataType != ''">
data_type,
</if>
<if test="dataVersion != null">
data_version,
</if>
<if test="submittedBatch != null and submittedBatch != ''">
submitted_batch,
</if>
<if test="dataPath != null and dataPath != ''">
data_path,
</if>
<if test="collectingTime != null">
collection_time,
</if>
<if test="collectorName != null and collectorName != ''">
collector_name,
</if>
<if test="collectorContacts != null and collectorContacts != ''">
collector_contacts,
</if>
<if test="charset != null and charset != ''">
data_charset,
</if>
<if test="year != null and year != ''">
data_year
</if>
</trim>
)
VALUES(
<trim suffixOverrides=",">
<if test="regionalismCode != null and regionalismCode != ''">
#{regionalismCode},
</if>
<if test="systemCode != null">
#{systemCode},
</if>
<if test="dataType != null and dataType != ''">
#{dataType},
</if>
<if test="dataVersion != null">
#{dataVersion},
</if>
<if test="submittedBatch != null and submittedBatch != ''">
#{submittedBatch},
</if>
<if test="dataPath != null and dataPath != ''">
#{dataPath},
</if>
<if test="collectingTime != null">
#{collectingTime},
</if>
<if test="collectorName != null and collectorName != ''">
#{collectorName},
</if>
<if test="collectorContacts != null and collectorContacts != ''">
#{collectorContacts},
</if>
<if test="charset != null and charset != ''">
#{charset},
</if>
<if test="year != null and year != ''">
#{year}
</if>
</trim>
)
</insert>
<select id="getIdIsExist" parameterType="java.util.List" resultType="java.lang.String">
select data_path
from data_info
where id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
and remove ='0'
</select>
<update id="removes" parameterType="java.util.List">
UPDATE
data_info
set remove = '1'
where id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
and remove ='0'
</update>
</mapper>

@ -41,16 +41,26 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
jdbcType="VARCHAR" />
<result property="rate" column="rate" javaType="int"
jdbcType="INTEGER" />
<result property="lastTime" column="lasttime"
javaType="string" jdbcType="VARCHAR" />
</resultMap>
<sql id="Base_Column_List">
regionalism_code,system_code,dst_path,lasttime
</sql>
<!-- 获取数据全部记录信息 -->
<select id="findAll" parameterType="" resultMap="getEntityByText">
<select id="findAll" resultType="com.platform.entities.DataInfoEntityMoveTmp">
SELECT
a.id,a.regionalism_code,b.city_name,b.district_name, a.system_code,b.system_name,b.data_type,b.data_version,b.submitted_batch,
b.data_path,b.data_charset,b.collection_time,b.collector_name,b.collector_contacts,b.data_year,a.dst_path,a.complete_status,a.rate
a.id id,a.regionalism_code regionalismCode,b.city_name cityName,b.district_name districtName,
a.system_code systemCode,b.system_name systemName,b.data_type dataType,b.data_version dataVersion,
b.submitted_batch submittedBatch,b.data_path dataPath,b.data_charset charset,b.collection_time collectionTime,
b.collector_name collectorName,b.collector_contacts collectorContacts,b.data_year dataYear,a.dst_path dstPath,
a.complete_status completeStatus,a.rate rate, a.lasttime lastTime
FROM
move_data_tmp a LEFT JOIN data_details b
ON a.system_code = b.system_code AND a.regionalism_code = b.regionalism_code;
ON a.system_code = b.system_code AND a.regionalism_code = b.regionalism_code
ORDER BY a.id
</select>
<update id="update" parameterType="com.platform.entities.DataInfoEntityMoveTmp">
@ -58,19 +68,18 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
move_data_tmp
<set >
<trim suffixOverrides=",">
<if test="regionalismCode != null and regionalismCode != ''">
regionalism_code = #{regionalismCode},
</if>
<if test="systemCode != null and systemCode != ''">
system_code= #{systemCode},
</if>
<if test="dstPath != null and dstPath != ''">
dst_path= #{dstPath},
</if>
<if test="completeStatus != null and completeStatus != ''">
complete_status= #{completeStatus},
</if>
rate= #{rate}
<if test="rate > 0">
rate= #{rate},
</if>
<if test="lastTime != null and lastTime != ''">
lasttime= #{lastTime},
</if>
</trim>
</set>
<where>
@ -78,7 +87,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
</where>
</update>
<insert id="save" parameterType="">
<insert id="save" parameterType="com.platform.entities.DataInfoEntityMoveTmp">
INSERT INTO
move_data_tmp(
<trim suffixOverrides=",">
@ -88,7 +97,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<if test="regionalismCode != null and regionalismCode != ''">
regionalism_code,
</if>
<if test="systemCode != null and systemCode != ''">
<if test="systemCode != null">
system_code,
</if>
<if test="dstPath != null and dstPath != ''">
@ -97,7 +106,12 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<if test="completeStatus != null and completeStatus != ''">
complete_status,
</if>
rate
<if test="rate > 0">
rate,
</if>
<if test="lastTime != null and lastTime != ''">
lasttime,
</if>
</trim>
)
VALUES(
@ -117,19 +131,32 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
<if test="completeStatus != null and completeStatus != ''">
#{completeStatus},
</if>
#{rate}
<if test="rate > 0">
#{rate}
</if>
<if test="lastTime != null and lastTime != ''">
#{lastTime},
</if>
</trim>
)
</insert>
<delete id="remove" parameterType="java.lang.INTEGER">
<select id="insertBatch" parameterType="java.util.List">
INSERT INTO move_data_tmp ( <include refid="Base_Column_List" /> )
VALUES
<foreach collection="list" item="item" index="index" separator=",">
(#{item.regionalismCode,jdbcType=VARCHAR},#{item.systemCode,jdbcType=INTEGER},#{item.dstPath,jdbcType=VARCHAR},#{item.lastTime,jdbcType=VARCHAR})
</foreach>
</select>
<delete id="remove" parameterType="java.lang.Integer">
DELETE FROM
move_data_tmp
WHERE
id = #{id}
</delete>
<!-- 获取数据符合筛选条件的总记录条数 -->
<!-- 获取数据符合筛选条件的总记录条数 -->
<!--
<select id="getLimitedDataCount" resultType="java.lang.Integer"
parameterType="PagerOptions">
SELECT COUNT(id) FROM move_data_tmp
@ -141,11 +168,11 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
</select>
<!-- 获取数据查询的起始di -->
获取数据查询的起始di
<select id="getLimitedBeginId" resultType="java.lang.Integer"
parameterType="PagerOptions">
SELECT MAX(idx) FROM (SELECT id idx FROM move_data_tmp
ORDER BY id LIMIT 0,#{PagerOptions.totalLimit}) AS TEMP
</select>
</select> -->
</mapper>

@ -1,5 +1,6 @@
package com.platform.controller;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
@ -24,6 +25,7 @@ import org.springframework.web.bind.annotation.ResponseBody;
import com.base.BaseController;
import com.platform.entities.DataInfoEntity;
import com.platform.entities.DataInfoEntityMoveTmp;
import com.platform.entities.FolderNode;
import com.platform.entities.GatherOracleInfo;
import com.platform.entities.OracleConnectorParams;
@ -31,6 +33,7 @@ import com.platform.entities.PagerOptions;
import com.platform.entities.VolumeEntity;
import com.platform.service.DataInfoService;
import com.platform.service.IGfsService;
import com.platform.service.IMoveDataService;
import com.platform.service.IMySqlService;
import com.platform.service.IOracleExtractService;
import com.platform.service.OracleExtractHelper;
@ -39,6 +42,7 @@ import com.platform.service.impl.MySqlServiceImpl;
import com.platform.test.Brick;
import com.platform.test.FolderReader;
import com.platform.test.Volume;
import com.platform.utils.Bean2MapUtils;
import com.platform.utils.Configs;
import com.platform.utils.UtilsHelper;
@ -56,6 +60,9 @@ public class DataModelController extends BaseController{
@Resource(name = "OracleExtract")
private IOracleExtractService OracleExtract;
@Resource(name = "moveDataService")
private IMoveDataService moveDataService;
public void setDfsImp(DataInfoService dfs) {
this.dfs = dfs;
}
@ -63,11 +70,12 @@ public class DataModelController extends BaseController{
@RequestMapping("/data.json")
@ResponseBody
public ModelMap getAllDataToJson(HttpServletRequest res,
HttpServletResponse req) {
HttpServletResponse req) throws UnsupportedEncodingException {
res.setCharacterEncoding("UTF-8");
Map<String, String[]> paramMap = res.getParameterMap();
Set<String> keySet = paramMap.keySet();
Map<String, String> params = new HashMap<String, String>();
StringBuffer sb = new StringBuffer().append("<EFBFBD><EFBFBD>ǰ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>:{");
StringBuffer sb = new StringBuffer().append("当前的请求参数:{");
for (String str : keySet) {
String value = paramMap.get(str)[0];
if (StringUtils.isNotEmpty(value)) {
@ -85,15 +93,17 @@ public class DataModelController extends BaseController{
return dfs.getPagerTableData(pagerOptions);
}
@RequestMapping("/delete/data")
public void deleteData(HttpServletRequest res, HttpServletResponse req) {
@RequestMapping(value="/delete/data", method= RequestMethod.POST)
public void deleteData(HttpServletRequest res, HttpServletResponse req) throws Exception {
res.setCharacterEncoding("UTF-8");
Map<String, String[]> paramMap = res.getParameterMap();
String[] data = paramMap.get("data");
dfs.deleteData(data);
}
@RequestMapping("/connectOracle")
public void connectOracle(HttpServletRequest res, HttpServletResponse req) {
public void connectOracle(HttpServletRequest res, HttpServletResponse req) throws UnsupportedEncodingException {
res.setCharacterEncoding("UTF-8");
Map<String, String[]> paramMap = res.getParameterMap();
String[] oraclesName = paramMap.get("oracleName");
if (oraclesName != null)
@ -105,21 +115,23 @@ public class DataModelController extends BaseController{
@RequestMapping("/cancelOracleConection")
public void cancelOracleConnection(HttpServletRequest res,
HttpServletResponse req) {
HttpServletResponse req) throws UnsupportedEncodingException {
res.setCharacterEncoding("UTF-8");
Map<String, String[]> paramMap = res.getParameterMap();
String[] oraclesName = paramMap.get("oracleName");
String operate = paramMap.get("operation")[0];
if (null != oraclesName) {
for (String rcName : oraclesName) {
Configs.CONSOLE_LOGGER.info("连接成功:\t" + rcName);
Configs.CONSOLE_LOGGER.info("取消连接:\t" + rcName);
new OracleStatusService().cancelToOracle(rcName, operate);
}
}
}
@RequestMapping("/oracle/{name}/extract")
@RequestMapping(value="/oracle/{name}/extract", method= RequestMethod.POST)
public void extractOracleData(HttpServletRequest res,
HttpServletResponse req, String name) throws Exception {
res.setCharacterEncoding("UTF-8");
System.out.println("------extract-------");
System.out.println(name);
Map<String, String[]> paramMap = res.getParameterMap();
@ -144,7 +156,7 @@ public class DataModelController extends BaseController{
}
@RequestMapping("/volume/list")
@RequestMapping(value="/volume/list", method= RequestMethod.POST)
@ResponseBody
public List<VolumeEntity> getFolder(HttpServletRequest res, HttpServletResponse req) throws Exception {
System.out.println("get Request");
@ -161,6 +173,7 @@ public class DataModelController extends BaseController{
@ResponseBody
public Object move(HttpServletRequest res, HttpServletResponse req, String name,
FolderNode selectNode, DataInfoEntity selectItems) throws Exception {
res.setCharacterEncoding("UTF-8");
System.out.println("get Request");
Map<String, String[]> paramMap = res.getParameterMap();
// System.out.println(paramMap);
@ -174,22 +187,23 @@ public class DataModelController extends BaseController{
List<String> listItemPath = new ArrayList<String>();
String[] items = paramMap.get("selectItems");
System.out.println("selectItems");
List<DataInfoEntity> datas = new ArrayList<DataInfoEntity>();
for (String string : items) {
System.out.println(string);
JSONObject jsobj = JSONObject.fromObject(string);
Map itemmap = jsobj;
listItemPath.add((String)itemmap.get("dataPath"));
DataInfoEntity data = (DataInfoEntity) Bean2MapUtils.convertMap(DataInfoEntity.class, itemmap);
datas.add(data);
}
System.out.println("------/volume/{name}/move--------");
boolean result = false ;
if (datas.size() >0) {
result = moveDataService.moveData(datas, (String)map.get("path"));
}
System.out.println(name);
System.out.println(nodes);
System.out.println(selectItems);
System.out.println("--------------");
int result = gfsService.copyFolder(listItemPath, (String)map.get("path"), "app");
return result;
}
@RequestMapping("/oracle/list")
@RequestMapping(value="/oracle/list", method= RequestMethod.POST)
@ResponseBody
public List<GatherOracleInfo> getOracleInfo(HttpServletRequest res,
HttpServletResponse req) throws Exception {
@ -199,30 +213,48 @@ public class DataModelController extends BaseController{
return result;
}
@RequestMapping("/oracle/{id}/delete")
@RequestMapping(value="/oracle/{id}/delete", method= RequestMethod.POST)
@ResponseBody
public void deleteOracleInfo(HttpServletRequest res,
HttpServletResponse req, int id) throws Exception {
res.setCharacterEncoding("UTF-8");
mySqlService.deleteMySql(id);
System.out.println("----------deleteOracleInfo-----------------------");
}
@RequestMapping("/oracle/{id}/insert")
@RequestMapping(value="/oracle/{id}/insert", method= RequestMethod.POST)
@ResponseBody
public void insertOracleInfo(HttpServletRequest res,
HttpServletResponse req, GatherOracleInfo oracle) throws Exception {
res.setCharacterEncoding("UTF-8");
mySqlService.insertOracle(oracle);
System.out.println("----------insertOracleInfo-----------------------");
}
@RequestMapping("/oracle/{id}/update")
@RequestMapping(value="/oracle/update", method= RequestMethod.POST)
@ResponseBody
public void updateOracleInfo(HttpServletRequest res,
HttpServletResponse req, GatherOracleInfo oracle) throws Exception {
res.setCharacterEncoding("gb2312");
System.out.println(oracle.getName());
System.out.println(oracle);
mySqlService.updateOracle(oracle);
System.out.println("----------updateOracleInfo-----------------------");
}
@RequestMapping(value="/task/transfer/list", method= RequestMethod.POST)
@ResponseBody
public Object taskList() throws Exception {
List<DataInfoEntityMoveTmp> result = moveDataService.findAll();
return result;
}
@RequestMapping(value="/task/transfer/deletes", method= RequestMethod.POST)
@ResponseBody
public Object taskdeletes() throws Exception {
DataInfoEntityMoveTmp dataMove = null;
int result = moveDataService.delete(dataMove);
return result;
}
}

@ -3,10 +3,12 @@ package com.platform.dao;
import java.util.List;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
import com.platform.entities.DataInfoEntity;
import com.platform.entities.PagerOptions;
@Repository(value = "dataInfoDao")
public interface DataInfoDao {
int getLimitedDataCount(@Param("PagerOptions")PagerOptions pagerOptions);
@ -15,6 +17,10 @@ public interface DataInfoDao {
List<DataInfoEntity> getLimitedDataInfoEntities(@Param("PagerOptions")PagerOptions pagerOptions);
List<String> getIdIsExist(int parseInt);
List<String> getIdIsExist(List<Integer> list)throws Exception;
int removes(List<Integer> list)throws Exception;
int save(DataInfoEntity data) throws Exception;
}

@ -23,6 +23,7 @@ public interface DataInfoMoveTmpDao {
int save(DataInfoEntityMoveTmp data) throws Exception;
int remove(DataInfoEntityMoveTmp data) throws Exception;
void insertBatch(List<DataInfoEntityMoveTmp> list) throws Exception;
int remove(int id) throws Exception;
}

@ -33,6 +33,11 @@ public class Brick {
/** 路径 */
private String path;
/**
* true false
*/
private boolean status;
/**
* @return the availableSize
*/
@ -89,5 +94,17 @@ public class Brick {
this.path = path;
}
/**
* @return the status
*/
public boolean isStatus() {
return status;
}
/**
* @param status the status to set
*/
public void setStatus(boolean status) {
this.status = status;
}
}

@ -8,6 +8,12 @@ public class DataInfoEntityMoveTmp extends DataInfoEntity {
private int rate;
private String lastTime; // 采集时间
public DataInfoEntityMoveTmp() {
// TODO Auto-generated constructor stub
}
/**
* @return the dstPath
*/
@ -50,5 +56,18 @@ public class DataInfoEntityMoveTmp extends DataInfoEntity {
this.rate = rate;
}
/**
* @return the lastTime
*/
public String getLastTime() {
return lastTime;
}
/**
* @param lastTime the lastTime to set
*/
public void setLastTime(String lastTime) {
this.lastTime = lastTime;
}
}

@ -1,12 +1,13 @@
package com.platform.entities;
import java.util.ArrayList;
import java.util.List;
public class FolderNode {
private String name;
private int isFolder; // 1 is file and other integer is folder show children number
private String path;
private List<FolderNode> childNodes;
private List<FolderNode> childNodes = new ArrayList<FolderNode>();
public FolderNode() {
// TODO Auto-generated constructor stub

@ -37,7 +37,7 @@ public class VolumeEntity {
private String path;
/** volume树形目录 */
private FolderNode folder;
private List<FolderNode> folder = new ArrayList<FolderNode>();
/** volume的 块 */
private List<Brick> brick = new ArrayList<Brick>();
@ -101,14 +101,14 @@ public class VolumeEntity {
/**
* @return the folder
*/
public FolderNode getFolder() {
public List<FolderNode> getFolder() {
return folder;
}
/**
* @param folder the folder to set
*/
public void setFolder(FolderNode folder) {
public void setFolder(List<FolderNode> folder) {
this.folder = folder;
}

@ -1,8 +1,9 @@
package com.platform.glusterfs;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
@ -46,6 +47,13 @@ public class CopyData {
*/
public int copyFolderFiles(String sourceFolderName, String destFolderName, String fileName) {
int progress=0;
// 末尾 含有 /
Pattern pattern2 = Pattern.compile("\\/$");
Matcher matcher = pattern2.matcher(sourceFolderName);
if (matcher.find()) {
sourceFolderName = sourceFolderName.substring(0, sourceFolderName.length()-1);
}
log.info("start copy " + fileName + " from " + sourceFolderName + " to " + destFolderName);
ShowData showData=new ShowData();
Map<String,String> reStrings=showData.showFolderData(destFolderName);
@ -71,6 +79,41 @@ public class CopyData {
return 1;
}
/**
* -1 :error; -2: the filename is not exists ;-3 :destFolderName ; 1: right
* not exists
*
* @param folderName
* @param fileName
* @return
*/
public int copyFolderFiles(String sourceFolderName, String destFolderName) {
int progress=0;
log.info("start copy " + " from " + sourceFolderName + " to " + destFolderName);
ShowData showData=new ShowData();
Map<String,String> reStrings=showData.showFolderData(destFolderName);
if(reStrings==null){
log.info("3201 "+destFolderName+" is not exists");
return -3;
}
reStrings=showData.showFolderData(sourceFolderName);
if(reStrings==null){
log.info("3202 "+sourceFolderName+" is not exists");
return -2;
}
String command = "cp -r " + sourceFolderName+" "+destFolderName;
/*
* RunCommand runCommand = new RunCommand();
List<String> reStrings = runCommand.runCommandWait(command);
*/
Constant.ganymedSSH.execCmdNoWaitAcquiescent(command);
log.info("copy " + sourceFolderName +"/" + " to " + destFolderName + " running");
return 1;
}
@Test
public void testCopyFolderFiles() {

@ -89,6 +89,32 @@ public class ShowData {
return data_type;
}
/**
* folder
* -2-1folderfolder
* @param folderPath
* @return
* @see [##]
*/
public long getFolderSize(String folderPath) {
log.info("get " + folderPath + " Size ");
String command = "du -k -d 0 "+folderPath+" | grep " + folderPath + "|awk \'{print $1}\'";
List<String> reStrings = Constant.ganymedSSH.execCmdWaitAcquiescent(command);
if(reStrings==null || reStrings.size()==0){
log.error("get " + folderPath + " Size error!");
return -2;
}
if (reStrings.get(0).contains(Constant.noSuchFile)) {
log.error(folderPath+" is not exists");
return -1;
}
long size = Long.valueOf(reStrings.get(0));
return size;
}
/**
*

@ -2,10 +2,14 @@ package com.platform.service;
import org.springframework.ui.ModelMap;
import com.platform.dao.DataInfoDao;
import com.platform.entities.DataInfoEntity;
import com.platform.entities.PagerOptions;
public interface DataInfoService {
public ModelMap getPagerTableData(PagerOptions pagerOptions);
void deleteData(String[] id);
void deleteData(String[] id) throws Exception;
int save(DataInfoEntity data) throws Exception;
}

@ -1,5 +1,6 @@
package com.platform.service;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
@ -25,6 +26,7 @@ public class DataInfoServiceImp implements DataInfoService {
// TODO Auto-generated method stub
ModelMap modelMap = new ModelMap();
int count = dfdDao.getLimitedDataCount(pagerOptions); //获取总记录条数
System.out.println("total colume " + count);
int offset = 0;
if (pagerOptions.getCurrentPageNum() > 1) {
pagerOptions.setTotalLimit((pagerOptions.getCurrentPageNum() - 1)
@ -41,19 +43,29 @@ public class DataInfoServiceImp implements DataInfoService {
}
@Override
public void deleteData(String[] id) {
public void deleteData(String[] id) throws Exception {
// TODO Auto-generated method stub
List<Integer> ids = new ArrayList<Integer>();
for(String idx: id){
ids.add(Integer.parseInt(idx));
}
if (ids.size() > 0) {
//数据在不在?
List<String> paths = dfdDao.getIdIsExist(Integer.parseInt(idx));
List<String> paths = dfdDao.getIdIsExist(ids);
if(paths.size()>0){
//删除文件操作
for (int i = 0; i < paths.size(); i++) {
System.out.println(paths.get(i));
}
//删除数据库记录
//dfdDao.deleteRow(idx);
dfdDao.removes(ids);
}
}
}
@Override
public int save(DataInfoEntity data) throws Exception {
int result = dfdDao.save(data);
return result;
}
}

@ -71,46 +71,51 @@ public class GfsServiceImpl implements IGfsService {
*/
@Override
public List<VolumeEntity> getAllVolumes() throws Exception {
List<VolumeEntity> volumeList = new ArrayList<>();
List<String> volumeNameList = volumeInfo.showAllVolumeName();
if (null == volumeNameList) {
return null;
List<VolumeEntity> volumeList = CacheTreeData.getVolumeList();
if (null == volumeList) {
return new ArrayList<VolumeEntity>();
}
for (String volumeName : volumeNameList) {
VolumeEntity volume = new VolumeEntity();
volume.setName(volumeName);
List<String> path = volumeInfo.getVolumeMountPoint(volumeName);
//默认加载第一个路径
if (null != path && path.size() > 0) {
volume.setPath(path.get(0));
}
volume.setAllSize(volumeInfo.getVolumeAllSize(volumeName));
volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName));
//TODO 查询brick--
//返回 ip:path
List<String> brickStrs = volumeInfo.getVolumeBricks(volumeName);
//brick已用大小
Map<String, Double> usedSize = volumeInfo.getVolumebricksDataSize(volumeName);
Map<String, Double> availableSize = volumeInfo.getVolumebricksAvailableSize(volumeName);
List<Brick> brickList = new ArrayList<Brick>();
for (String brickIpPath : brickStrs) {
Brick b = new Brick();
String ipAndpath[] = brickIpPath.split(":");
String brickip = ipAndpath[0];
String brickpath = ipAndpath[1];
//iP , path ,
b.setIp(brickip);
b.setPath(brickpath);
b.setAvailableSize(availableSize.get(brickIpPath));
b.setUsedSize(usedSize.get(brickIpPath));
brickList.add(b);
}
volume.setBrick(brickList);
for (VolumeEntity volume : volumeList) {
// VolumeEntity volume = new VolumeEntity();
// volume.setName(volumeName);
// List<String> path = volumeInfo.getVolumeMountPoint(volumeName);
// //默认加载第一个路径
// if (null != path && path.size() > 0) {
// volume.setPath(path.get(0));
// }
// volume.setAllSize(volumeInfo.getVolumeAllSize(volumeName));
// volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName));
// //TODO 查询brick--
// //返回 ip:path
// List<String> brickStrs = volumeInfo.getVolumeBricks(volumeName);
// //brick已用大小
// Map<String, Double> usedSize = volumeInfo.getVolumebricksDataSize(volumeName);
// Map<String, Double> availableSize = volumeInfo.getVolumebricksAvailableSize(volumeName);
//
//
// List<Brick> brickList = new ArrayList<Brick>();
// for (String brickIpPath : brickStrs) {
// Brick b = new Brick();
// String ipAndpath[] = brickIpPath.split(":");
// String brickip = ipAndpath[0];
// String brickpath = ipAndpath[1];
// //iP , path ,
// b.setIp(brickip);
// b.setPath(brickpath);
// b.setAvailableSize(availableSize.get(brickIpPath));
// b.setUsedSize(usedSize.get(brickIpPath));
// brickList.add(b);
// }
// volume.setBrick(brickList);
if (null != volume.getPath()) {
// 获得 folder 目录
volume.setFolder(getFolder(volume.getPath()));
List<FolderNode> list = new ArrayList<FolderNode>();
FolderNode currNode = getFolder(volume.getPath());
if (null != currNode && null != currNode.getChildNodes()) {
list.addAll(currNode.getChildNodes());
}
volume.setFolder(list);
}
volumeList.add(volume);
}
return volumeList;
}
@ -127,7 +132,14 @@ public class GfsServiceImpl implements IGfsService {
volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName));
// volume.setBrick(brick);
if (null != volume.getPath()) {
volume.setFolder(getFolder(path.get(0)));
List<FolderNode> list = new ArrayList<FolderNode>();
if (null != path && path.size() > 0) {
FolderNode currNode = getFolder(path.get(0));
if (null != currNode && null != currNode.getChildNodes()) {
list.addAll(currNode.getChildNodes());
}
}
volume.setFolder(list);
}
return volume;
}

@ -0,0 +1,158 @@
package com.platform.service.thread;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import com.platform.dao.DataInfoDao;
import com.platform.dao.DataInfoMoveTmpDao;
import com.platform.dao.GatherOracleDao;
import com.platform.entities.DataInfoEntity;
import com.platform.entities.DataInfoEntityMoveTmp;
import com.platform.entities.GatherOracleInfo;
import com.platform.glusterfs.CheckoutMD5;
import com.platform.glusterfs.CopyData;
import com.platform.glusterfs.ShowData;
import com.platform.service.DataInfoService;
import com.platform.service.DataInfoServiceImp;
import com.platform.service.IMoveDataService;
import com.platform.service.impl.MoveDataServiceImpl;
import com.platform.utils.Bean2MapUtils;
import com.platform.utils.Constant;
import com.platform.utils.DateForm;
@Component
public class ThreadMoveData{
@Resource(name = "dataInfoDao")
private DataInfoDao dataInfoDao;
/**
*
*/
CopyData copy = new CopyData();
/**
* MD5
*/
CheckoutMD5 check = new CheckoutMD5();
@Resource(name = "dataInfoMoveTmpDao")
private DataInfoMoveTmpDao dataInfoMoveTmpDao;
/**
*
*/
ShowData show = new ShowData();
/**
* : --
*/
public ThreadMoveData() {
}
//5秒
@Scheduled(fixedDelay = 5000)
public void doSomething() {
try {
List<DataInfoEntityMoveTmp> result = null;
//查询 表 move_data_tmp
result = dataInfoMoveTmpDao.findAll();
if (null != result) {
//gfs 获取size
int rsize = result.size();
boolean isNoMove = true;
//该循环必须 循环每个,不能有 break;
// rate:大小:假的,待换成真实比例
double realRate = 0.00;
for (int i = 0; i < rsize; i++) {
DataInfoEntityMoveTmp dataMove = result.get(i);
//如果拷贝进度超过20分钟未进行-- 判断为 迁移失败。
// "1" :正在上传0等待 迁移, 2成功 3失败
if ("1".equals(dataMove.getCompleteStatus())) {
long nowTime = new Date().getTime();
long timelong = nowTime - DateForm.string2DateBysecond(dataMove.getLastTime()).getTime();
if (timelong > 1000*60*20) {
dataMove.setCompleteStatus("3");
dataInfoMoveTmpDao.update(dataMove);
}
isNoMove = false;
// 查询大小:。//gfs 获取size
long srcSize = show.getFolderSize(dataMove.getDataPath());
long dstSize = show.getFolderSize(dataMove.getDstPath());
if (srcSize > 0 && dstSize > 0) {
realRate = dstSize / srcSize * 100;
dataMove.setRate((int) realRate);
dataMove.setLastTime(DateForm.date2StringBysecond(new Date()));
}
}
if("1".equals(dataMove.getCompleteStatus()) &&dataMove.getRate() > 0){
//传输完毕:进行校验
if (realRate == 100) {
//TODO 进行MD5校验
int resl = check.checkoutMD5Folder(dataMove.getDataPath(), dataMove.getDstPath());
//TODO 校验成功--则删除数据库记录
if(resl == 1){
//校验成功--修改 数据库记录--
dataMove.setCompleteStatus("2");
dataMove.setLastTime(DateForm.date2StringBysecond(new Date()));
dataInfoMoveTmpDao.update(dataMove);
//TODO 新增 一条数据-到-dataInfo
DataInfoEntity data = (DataInfoEntity) Bean2MapUtils.convertMap(
DataInfoEntity.class, Bean2MapUtils.convertBean(dataMove));
data.setDataPath(dataMove.getDstPath());
data.setYear(dataMove.getYear()+" (迁移完成时间 " + DateForm.date2StringByMin(new Date())+")");
dataInfoDao.save(data);
}
else {
// 3:表示 迁移失败
dataMove.setCompleteStatus("3");
}
}
dataMove.setLastTime(DateForm.date2StringBysecond(new Date()));
dataInfoMoveTmpDao.update(dataMove);
}
}
//循环 完了, 确定没有上传的 ,没有正在上传的
if (isNoMove) {
//查询 表 move_data_tmp
result = dataInfoMoveTmpDao.findAll();
if (null != result) {
int tmpsize = result.size();
// 上传下一个后
for (int i = 0; i < tmpsize; i++) {
DataInfoEntityMoveTmp next2move = result.get(i);
//如果是 待 迁移状态的
if ("0".equals(next2move.getCompleteStatus())) {
//待迁移的数据 -- 开始迁移
copy.copyFolderFiles(next2move.getDataPath(), next2move.getDstPath(), "app");
// "1" :正在上传0等待 迁移, 2成功 3失败
next2move.setCompleteStatus("1");
next2move.setLastTime(DateForm.date2StringBysecond(new Date()));
//更新sql
dataInfoMoveTmpDao.update(next2move);
break;
}
}
}
}
}
Thread.sleep(Constant.update_dataInfo_sleep_time);
} catch (Exception e) {
System.err.println(e);
}
}
}

@ -1,13 +1,17 @@
package com.platform.utils;
import java.util.List;
import java.util.Map;
import com.platform.entities.FolderNode;
import com.platform.entities.VolumeEntity;
public class CacheTreeData {
private static List<FolderNode> folders = null;
private static List<VolumeEntity> volumeList = null;
public static List<FolderNode> getFolders() {
return folders;
}
@ -16,4 +20,20 @@ public class CacheTreeData {
CacheTreeData.folders = folders;
}
/**
* @return the volumeList
*/
public static List<VolumeEntity> getVolumeList() {
return volumeList;
}
/**
* @param volumeList the volumeList to set
*/
public static void setVolumeList(List<VolumeEntity> volumeList) {
CacheTreeData.volumeList = volumeList;
}
}

@ -5,6 +5,8 @@ import java.util.Properties;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import com.platform.service.thread.ThreadMoveData;
public class ConfigsLoader implements ServletContextListener {
private static ConfigPropertyReader cReader = null;

@ -12,13 +12,20 @@ public class Constant {
public static String glusterVolumeInfo = "gluster volume info ";
public static String df = "df -k ";
public static String peerincluster_connected="PeerinCluster(Connected)";
public static String peerincluster_disconnected="PeerinCluster(Disconnected)";
public static String distributed="distributed";
public static String replica="replica";
public static String stripe="stripe";
public static String noVolume="No volumes present";
public static String noSuchFile = "No such file or directory";
public static GanymedSSH ganymedSSH=new GanymedSSH(hostIp, rootUser, rootPasswd, port);
public static String fileGetTreeData="./WEB-INF/config/getTreedata.sh";
public static String strGetTreeData = "function ergodic(){\n "
+ "for file in \\`ls \\$1\\`\n do\n if [ \"\\$file\" != \"app\" -a -d \\$1\\\"/\\\"\\$file ]\n "
+ "then\n ergodic \\$1\"/\"\\$file\n else\n local path=\\$1\"/\"\\$file\n "
+ "echo \\$path \n fi\n done\n}\n\nIFS=\\$\\'\\n\\' "
+ "#这个必须要,否则会在文件名中有空格时出错\nINIT_PATH=\".\";\nergodic \\$1\n";
/**
* volume 线

@ -1,31 +0,0 @@
package com.platform.utils;
public class ThreadMoveData extends Thread {
/**
* : --
*/
public ThreadMoveData() {
// TODO Auto-generated constructor stub
}
/* (non-Javadoc)
* @see java.lang.Thread#run()
*
*/
@Override
public void run() {
// TODO Auto-generated method stub
super.run();
//查询 表 move_data_tmp
try {
Thread.sleep(Constant.update_dataInfo_sleep_time);
} catch (InterruptedException e) {
}
}
}

@ -2,9 +2,14 @@ package com.platform.utils;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.springframework.stereotype.Service;
import com.platform.entities.Brick;
import com.platform.entities.FolderNode;
import com.platform.entities.VolumeEntity;
import com.platform.glusterfs.ClusterInfo;
import com.platform.glusterfs.GetTreeData;
import com.platform.glusterfs.VolumeInfo;
@ -18,6 +23,8 @@ public class ThreadVolume extends Thread implements Runnable{
/** Volume信息查询 */
private VolumeInfo volumeInfo = new VolumeInfo();
private ClusterInfo cluster = new ClusterInfo();
public ThreadVolume() {
// TODO Auto-generated constructor stub
}
@ -35,26 +42,70 @@ public class ThreadVolume extends Thread implements Runnable{
public void run() {
super.run();
while(true){
List<FolderNode> folderlist = new ArrayList<FolderNode>();
//查询 volume name
List<String> volumeNameList = volumeInfo.showAllVolumeName();
if (null != volumeNameList) {
for (String volumeName : volumeNameList) {
VolumeEntity volume = new VolumeEntity();
volume.setName(volumeName);
List<String> path = volumeInfo.getVolumeMountPoint(volumeName);
//默认加载第一个路径
if (null != path && path.size() > 0) {
//装入 folder
//查询 每个 volume 下的 folder
FolderNode foldertmp = gfsTree.getDatas(path.get(0));
folderlist.add(foldertmp);
try {
List<FolderNode> folderlist = new ArrayList<FolderNode>();
List<VolumeEntity> volumeList = new ArrayList<VolumeEntity>();
// brick状态 map集合
Map<String, String> brickStatusMap = cluster.showClusterInfo();
//查询 volume name
List<String> volumeNameList = volumeInfo.showAllVolumeName();
if (null != volumeNameList) {
for (String volumeName : volumeNameList) {
VolumeEntity volume = new VolumeEntity();
volume.setName(volumeName);
List<String> path = volumeInfo.getVolumeMountPoint(volumeName);
//默认加载第一个路径
if (null != path && path.size() > 0) {
volume.setPath(path.get(0));
}
volume.setAllSize(volumeInfo.getVolumeAllSize(volumeName));
volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName));
//TODO 查询brick--
//返回 ip:path
List<String> brickStrs = volumeInfo.getVolumeBricks(volumeName);
//brick已用大小
Map<String, Double> usedSize = volumeInfo.getVolumebricksDataSize(volumeName);
Map<String, Double> availableSize = volumeInfo.getVolumebricksAvailableSize(volumeName);
List<Brick> brickList = new ArrayList<Brick>();
for (String brickIpPath : brickStrs) {
Brick b = new Brick();
String ipAndpath[] = brickIpPath.split(":");
String brickip = ipAndpath[0];
String brickpath = ipAndpath[1];
//iP , path ,
b.setIp(brickip);
if(brickStatusMap==null || brickStatusMap.size()==0){
b.setStatus(false);
}else if (brickStatusMap.containsKey(brickip)) {
b.setStatus(true);
}
else {
b.setStatus(false);
}
b.setPath(brickpath);
b.setAvailableSize(availableSize.get(brickIpPath));
b.setUsedSize(usedSize.get(brickIpPath));
brickList.add(b);
}
volume.setBrick(brickList);
//默认加载第一个路径
if (null != path && path.size() > 0) {
//装入 folder
//查询 每个 volume 下的 folder
FolderNode foldertmp = gfsTree.getDatas(path.get(0));
folderlist.add(foldertmp);
}
volumeList.add(volume);
}
}
}
//TODO 更新folder 目录
CacheTreeData.setFolders(folderlist);
try {
//TODO 更新folder 目录
CacheTreeData.setFolders(folderlist);
CacheTreeData.setVolumeList(volumeList);
Thread.sleep(Constant.get_volume_sleep_time);
} catch (InterruptedException e) {
}

Loading…
Cancel
Save