diff --git a/WebContent/WEB-INF/config/mybatis-applicationConfig.xml b/WebContent/WEB-INF/config/mybatis-applicationConfig.xml index 85138096..f1679b32 100644 --- a/WebContent/WEB-INF/config/mybatis-applicationConfig.xml +++ b/WebContent/WEB-INF/config/mybatis-applicationConfig.xml @@ -8,9 +8,11 @@ + - + + \ No newline at end of file diff --git a/WebContent/WEB-INF/config/spring-applicationContext.xml b/WebContent/WEB-INF/config/spring-applicationContext.xml index 33ffe0a4..6f2a35ac 100644 --- a/WebContent/WEB-INF/config/spring-applicationContext.xml +++ b/WebContent/WEB-INF/config/spring-applicationContext.xml @@ -3,6 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:aop="http://www.springframework.org/schema/aop" xmlns:context="http://www.springframework.org/schema/context" + xmlns:task="http://www.springframework.org/schema/task" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd http://www.springframework.org/schema/tx @@ -10,7 +11,9 @@ http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.0.xsd http://www.springframework.org/schema/context - http://www.springframework.org/schema/context/spring-context-3.0.xsd"> + http://www.springframework.org/schema/context/spring-context-3.0.xsd + http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task-3.0.xsd"> @@ -71,4 +74,8 @@ + + + + \ No newline at end of file diff --git a/WebContent/WEB-INF/web.xml b/WebContent/WEB-INF/web.xml index d9935a26..cfdc2ad5 100644 --- a/WebContent/WEB-INF/web.xml +++ b/WebContent/WEB-INF/web.xml @@ -18,7 +18,7 @@ CharacterEncodingFilter - /* + / diff --git a/src/com/dao/mapper/data-details-mapper.xml b/src/com/dao/mapper/data-details-mapper.xml index 52b98767..89b9f52e 100644 --- a/src/com/dao/mapper/data-details-mapper.xml +++ b/src/com/dao/mapper/data-details-mapper.xml @@ -79,6 +79,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN" + ORDER BY data_details.id LIMIT #{PagerOptions.limit} @@ -101,8 +102,107 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN" + + + INSERT INTO + data_info( + + + regionalism_code, + + + system_code, + + + data_type, + + + data_version, + + + submitted_batch, + + + data_path, + + + collection_time, + + + collector_name, + + + collector_contacts, + + + data_charset, + + + data_year + + + ) + VALUES( + + + #{regionalismCode}, + + + #{systemCode}, + + + #{dataType}, + + + #{dataVersion}, + + + #{submittedBatch}, + + + #{dataPath}, + + + #{collectingTime}, + + + #{collectorName}, + + + #{collectorContacts}, + + + #{charset}, + + + #{year} + + + ) + + + + + + UPDATE + data_info + set remove = '1' + where id in + + #{item} + + and remove ='0' + \ No newline at end of file diff --git a/src/com/dao/mapper/dataInfoMoveTmpmapper.xml b/src/com/dao/mapper/dataInfoMoveTmpmapper.xml index c0545560..7d1145d6 100644 --- a/src/com/dao/mapper/dataInfoMoveTmpmapper.xml +++ b/src/com/dao/mapper/dataInfoMoveTmpmapper.xml @@ -41,16 +41,26 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN" jdbcType="VARCHAR" /> + + + regionalism_code,system_code,dst_path,lasttime + + - SELECT - a.id,a.regionalism_code,b.city_name,b.district_name, a.system_code,b.system_name,b.data_type,b.data_version,b.submitted_batch, - b.data_path,b.data_charset,b.collection_time,b.collector_name,b.collector_contacts,b.data_year,a.dst_path,a.complete_status,a.rate + a.id id,a.regionalism_code regionalismCode,b.city_name cityName,b.district_name districtName, + a.system_code systemCode,b.system_name systemName,b.data_type dataType,b.data_version dataVersion, + b.submitted_batch submittedBatch,b.data_path dataPath,b.data_charset charset,b.collection_time collectionTime, + b.collector_name collectorName,b.collector_contacts collectorContacts,b.data_year dataYear,a.dst_path dstPath, + a.complete_status completeStatus,a.rate rate, a.lasttime lastTime FROM move_data_tmp a LEFT JOIN data_details b - ON a.system_code = b.system_code AND a.regionalism_code = b.regionalism_code; + ON a.system_code = b.system_code AND a.regionalism_code = b.regionalism_code + ORDER BY a.id @@ -58,19 +68,18 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN" move_data_tmp - - regionalism_code = #{regionalismCode}, - - - system_code= #{systemCode}, - dst_path= #{dstPath}, complete_status= #{completeStatus}, - rate= #{rate} + + rate= #{rate}, + + + lasttime= #{lastTime}, + @@ -78,7 +87,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN" - + INSERT INTO move_data_tmp( @@ -88,7 +97,7 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN" regionalism_code, - + system_code, @@ -97,7 +106,12 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN" complete_status, - rate + + rate, + + + lasttime, + ) VALUES( @@ -117,19 +131,32 @@ PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN" #{completeStatus}, - #{rate} + + #{rate} + + + #{lastTime}, + ) - + + + DELETE FROM move_data_tmp WHERE id = #{id} - - + + + 获取数据查询的起始di + --> \ No newline at end of file diff --git a/src/com/platform/controller/DataModelController.java b/src/com/platform/controller/DataModelController.java index 853651ba..7cf47830 100644 --- a/src/com/platform/controller/DataModelController.java +++ b/src/com/platform/controller/DataModelController.java @@ -1,231 +1,265 @@ - - -package com.platform.controller; - -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.annotation.Resource; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import net.sf.json.JSONArray; -import net.sf.json.JSONObject; - -import org.apache.commons.lang.StringUtils; -import org.springframework.stereotype.Controller; -import org.springframework.stereotype.Service; -import org.springframework.ui.ModelMap; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.ResponseBody; - -import com.base.BaseController; -import com.platform.entities.DataInfoEntity; -import com.platform.entities.FolderNode; -import com.platform.entities.GatherOracleInfo; -import com.platform.entities.OracleConnectorParams; -import com.platform.entities.PagerOptions; -import com.platform.entities.VolumeEntity; -import com.platform.service.DataInfoService; -import com.platform.service.IGfsService; -import com.platform.service.IMySqlService; -import com.platform.service.IOracleExtractService; -import com.platform.service.OracleExtractHelper; -import com.platform.service.OracleStatusService; -import com.platform.service.impl.MySqlServiceImpl; -import com.platform.test.Brick; -import com.platform.test.FolderReader; -import com.platform.test.Volume; -import com.platform.utils.Configs; -import com.platform.utils.UtilsHelper; - -@Controller -public class DataModelController extends BaseController{ - @Resource(name = "dataInfoService") - private DataInfoService dfs; - - @Resource(name = "gfsService") - private IGfsService gfsService; - - @Resource(name = "mySqlService") - private IMySqlService mySqlService; - - @Resource(name = "OracleExtract") - private IOracleExtractService OracleExtract; - - public void setDfsImp(DataInfoService dfs) { - this.dfs = dfs; - } - - @RequestMapping("/data.json") - @ResponseBody - public ModelMap getAllDataToJson(HttpServletRequest res, - HttpServletResponse req) { - Map paramMap = res.getParameterMap(); - Set keySet = paramMap.keySet(); - Map params = new HashMap(); - StringBuffer sb = new StringBuffer().append("��ǰ���������:{"); - for (String str : keySet) { - String value = paramMap.get(str)[0]; - if (StringUtils.isNotEmpty(value)) { - params.put(str, value); - sb.append(str).append(":").append(value).append(","); - } else { - sb.append(str).append(":").append("null").append(","); - } - } - Configs.CONSOLE_LOGGER.info(sb.deleteCharAt(sb.length() - 1) - .append("}").toString()); - PagerOptions pagerOptions = (PagerOptions) UtilsHelper - .newObjAndSetAttrsByClass(PagerOptions.class, params); - - return dfs.getPagerTableData(pagerOptions); - } - - @RequestMapping("/delete/data") - public void deleteData(HttpServletRequest res, HttpServletResponse req) { - Map paramMap = res.getParameterMap(); - String[] data = paramMap.get("data"); - dfs.deleteData(data); - } - - @RequestMapping("/connectOracle") - public void connectOracle(HttpServletRequest res, HttpServletResponse req) { - Map paramMap = res.getParameterMap(); - String[] oraclesName = paramMap.get("oracleName"); - if (oraclesName != null) - for (String rcName : oraclesName) { - Configs.CONSOLE_LOGGER.info("连接成功\t" + rcName); - new OracleStatusService().connectToOracle(rcName); - } - } - - @RequestMapping("/cancelOracleConection") - public void cancelOracleConnection(HttpServletRequest res, - HttpServletResponse req) { - Map paramMap = res.getParameterMap(); - String[] oraclesName = paramMap.get("oracleName"); - String operate = paramMap.get("operation")[0]; - if (null != oraclesName) { - for (String rcName : oraclesName) { - Configs.CONSOLE_LOGGER.info("连接成功:\t" + rcName); - new OracleStatusService().cancelToOracle(rcName, operate); - } - } - } - - @RequestMapping("/oracle/{name}/extract") - public void extractOracleData(HttpServletRequest res, - HttpServletResponse req, String name) throws Exception { - System.out.println("------extract-------"); - System.out.println(name); - Map paramMap = res.getParameterMap(); - //汇总库 对象信息--带有tableName- - String[] nodes = paramMap.get("target"); - Map map = null; - for (String string : nodes) { - JSONObject jsonobject = JSONObject.fromObject(string); - map = jsonobject; - System.out.println(map); - } - - // 采集库对象--(多个采集库抽取到1个汇总库的1个tableName下) - String[] inneed = paramMap.get("inneed"); - List> colleclist = new ArrayList>(); - for (String string : nodes) { - JSONObject jsonobject = JSONObject.fromObject(string); - Map inneedMap = jsonobject; - colleclist.add(inneedMap); - } - OracleExtract.extractOracle(name, colleclist, map); - - } - - @RequestMapping("/volume/list") - @ResponseBody - public List getFolder(HttpServletRequest res, HttpServletResponse req) throws Exception { - System.out.println("get Request"); -// Brick brick1 = new Brick("192.168.0.101", "D:/bootstrap"); -// Brick brick2 = new Brick("192.168.0.103", "D:\book"); -// List bricks = new ArrayList(); -// bricks.add(brick1); -// bricks.add(brick2); - List result = gfsService.getAllVolumes(); - return result; - } - - @RequestMapping(value = "/volume/{name}/move", method= RequestMethod.POST) - @ResponseBody - public Object move(HttpServletRequest res, HttpServletResponse req, String name, - FolderNode selectNode, DataInfoEntity selectItems) throws Exception { - System.out.println("get Request"); - Map paramMap = res.getParameterMap(); -// System.out.println(paramMap); - String[] nodes = paramMap.get("selectNode"); - Map map = null; - for (String string : nodes) { - System.out.println(string); - JSONObject jsonobject = JSONObject.fromObject(string); - map = jsonobject; - } - List listItemPath = new ArrayList(); - String[] items = paramMap.get("selectItems"); - System.out.println("selectItems"); - for (String string : items) { - System.out.println(string); - JSONObject jsobj = JSONObject.fromObject(string); - Map itemmap = jsobj; - listItemPath.add((String)itemmap.get("dataPath")); - } - - System.out.println(name); - System.out.println(nodes); - System.out.println(selectItems); - System.out.println("--------------"); - int result = gfsService.copyFolder(listItemPath, (String)map.get("path"), "app"); - return result; - } - - @RequestMapping("/oracle/list") - @ResponseBody - public List getOracleInfo(HttpServletRequest res, - HttpServletResponse req) throws Exception { - - List result = mySqlService.findAllMySql(); - System.out.println("----------getOracleInfo-----------------------"); - return result; - } - - @RequestMapping("/oracle/{id}/delete") - @ResponseBody - public void deleteOracleInfo(HttpServletRequest res, - HttpServletResponse req, int id) throws Exception { - - mySqlService.deleteMySql(id); - System.out.println("----------deleteOracleInfo-----------------------"); - } - - @RequestMapping("/oracle/{id}/insert") - @ResponseBody - public void insertOracleInfo(HttpServletRequest res, - HttpServletResponse req, GatherOracleInfo oracle) throws Exception { - - mySqlService.insertOracle(oracle); - System.out.println("----------insertOracleInfo-----------------------"); - } - - @RequestMapping("/oracle/{id}/update") - @ResponseBody - public void updateOracleInfo(HttpServletRequest res, - HttpServletResponse req, GatherOracleInfo oracle) throws Exception { - - mySqlService.updateOracle(oracle); - System.out.println("----------updateOracleInfo-----------------------"); - } -} - +package com.platform.controller; + +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import net.sf.json.JSONArray; +import net.sf.json.JSONObject; + +import org.apache.commons.lang.StringUtils; +import org.springframework.stereotype.Controller; +import org.springframework.stereotype.Service; +import org.springframework.ui.ModelMap; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.ResponseBody; + +import com.base.BaseController; +import com.platform.entities.DataInfoEntity; +import com.platform.entities.DataInfoEntityMoveTmp; +import com.platform.entities.FolderNode; +import com.platform.entities.GatherOracleInfo; +import com.platform.entities.OracleConnectorParams; +import com.platform.entities.PagerOptions; +import com.platform.entities.VolumeEntity; +import com.platform.service.DataInfoService; +import com.platform.service.IGfsService; +import com.platform.service.IMoveDataService; +import com.platform.service.IMySqlService; +import com.platform.service.IOracleExtractService; +import com.platform.service.OracleExtractHelper; +import com.platform.service.OracleStatusService; +import com.platform.service.impl.MySqlServiceImpl; +import com.platform.test.Brick; +import com.platform.test.FolderReader; +import com.platform.test.Volume; +import com.platform.utils.Bean2MapUtils; +import com.platform.utils.Configs; +import com.platform.utils.UtilsHelper; + +@Controller +public class DataModelController extends BaseController{ + @Resource(name = "dataInfoService") + private DataInfoService dfs; + + @Resource(name = "gfsService") + private IGfsService gfsService; + + @Resource(name = "mySqlService") + private IMySqlService mySqlService; + + @Resource(name = "OracleExtract") + private IOracleExtractService OracleExtract; + + @Resource(name = "moveDataService") + private IMoveDataService moveDataService; + + public void setDfsImp(DataInfoService dfs) { + this.dfs = dfs; + } + + @RequestMapping("/data.json") + @ResponseBody + public ModelMap getAllDataToJson(HttpServletRequest res, + HttpServletResponse req) throws UnsupportedEncodingException { + res.setCharacterEncoding("UTF-8"); + Map paramMap = res.getParameterMap(); + Set keySet = paramMap.keySet(); + Map params = new HashMap(); + StringBuffer sb = new StringBuffer().append("当前的请求参数:{"); + for (String str : keySet) { + String value = paramMap.get(str)[0]; + if (StringUtils.isNotEmpty(value)) { + params.put(str, value); + sb.append(str).append(":").append(value).append(","); + } else { + sb.append(str).append(":").append("null").append(","); + } + } + Configs.CONSOLE_LOGGER.info(sb.deleteCharAt(sb.length() - 1) + .append("}").toString()); + PagerOptions pagerOptions = (PagerOptions) UtilsHelper + .newObjAndSetAttrsByClass(PagerOptions.class, params); + + return dfs.getPagerTableData(pagerOptions); + } + + @RequestMapping(value="/delete/data", method= RequestMethod.POST) + public void deleteData(HttpServletRequest res, HttpServletResponse req) throws Exception { + res.setCharacterEncoding("UTF-8"); + Map paramMap = res.getParameterMap(); + String[] data = paramMap.get("data"); + dfs.deleteData(data); + } + + @RequestMapping("/connectOracle") + public void connectOracle(HttpServletRequest res, HttpServletResponse req) throws UnsupportedEncodingException { + res.setCharacterEncoding("UTF-8"); + Map paramMap = res.getParameterMap(); + String[] oraclesName = paramMap.get("oracleName"); + if (oraclesName != null) + for (String rcName : oraclesName) { + Configs.CONSOLE_LOGGER.info("连接成功\t" + rcName); + new OracleStatusService().connectToOracle(rcName); + } + } + + @RequestMapping("/cancelOracleConection") + public void cancelOracleConnection(HttpServletRequest res, + HttpServletResponse req) throws UnsupportedEncodingException { + res.setCharacterEncoding("UTF-8"); + Map paramMap = res.getParameterMap(); + String[] oraclesName = paramMap.get("oracleName"); + String operate = paramMap.get("operation")[0]; + if (null != oraclesName) { + for (String rcName : oraclesName) { + Configs.CONSOLE_LOGGER.info("取消连接:\t" + rcName); + new OracleStatusService().cancelToOracle(rcName, operate); + } + } + } + + @RequestMapping(value="/oracle/{name}/extract", method= RequestMethod.POST) + public void extractOracleData(HttpServletRequest res, + HttpServletResponse req, String name) throws Exception { + res.setCharacterEncoding("UTF-8"); + System.out.println("------extract-------"); + System.out.println(name); + Map paramMap = res.getParameterMap(); + //汇总库 对象信息--带有tableName- + String[] nodes = paramMap.get("target"); + Map map = null; + for (String string : nodes) { + JSONObject jsonobject = JSONObject.fromObject(string); + map = jsonobject; + System.out.println(map); + } + + // 采集库对象--(多个采集库抽取到1个汇总库的1个tableName下) + String[] inneed = paramMap.get("inneed"); + List> colleclist = new ArrayList>(); + for (String string : nodes) { + JSONObject jsonobject = JSONObject.fromObject(string); + Map inneedMap = jsonobject; + colleclist.add(inneedMap); + } + OracleExtract.extractOracle(name, colleclist, map); + + } + + @RequestMapping(value="/volume/list", method= RequestMethod.POST) + @ResponseBody + public List getFolder(HttpServletRequest res, HttpServletResponse req) throws Exception { + System.out.println("get Request"); +// Brick brick1 = new Brick("192.168.0.101", "D:/bootstrap"); +// Brick brick2 = new Brick("192.168.0.103", "D:\book"); +// List bricks = new ArrayList(); +// bricks.add(brick1); +// bricks.add(brick2); + List result = gfsService.getAllVolumes(); + return result; + } + + @RequestMapping(value = "/volume/{name}/move", method= RequestMethod.POST) + @ResponseBody + public Object move(HttpServletRequest res, HttpServletResponse req, String name, + FolderNode selectNode, DataInfoEntity selectItems) throws Exception { + res.setCharacterEncoding("UTF-8"); + System.out.println("get Request"); + Map paramMap = res.getParameterMap(); +// System.out.println(paramMap); + String[] nodes = paramMap.get("selectNode"); + Map map = null; + for (String string : nodes) { + System.out.println(string); + JSONObject jsonobject = JSONObject.fromObject(string); + map = jsonobject; + } + String dstVolume = (String) map.get("volume"); + + List listItemPath = new ArrayList(); + String[] items = paramMap.get("selectItems"); + System.out.println("selectItems"); + List datas = new ArrayList(); + List srcVolumes = new ArrayList(); + for (String string : items) { + System.out.println(string); + JSONObject jsobj = JSONObject.fromObject(string); + Map itemmap = jsobj; + if (null != itemmap.get("volume")) { + srcVolumes.add((String) itemmap.get("volume")); + } + DataInfoEntity data = (DataInfoEntity) Bean2MapUtils.convertMap(DataInfoEntity.class, itemmap); + datas.add(data); + } + System.out.println("------/volume/{name}/move--------"); + boolean result = false ; + if (datas.size() >0) { + result = moveDataService.moveData(datas, (String)map.get("path")); + } + return result; + } + + @RequestMapping(value="/oracle/list", method= RequestMethod.POST) + @ResponseBody + public List getOracleInfo(HttpServletRequest res, + HttpServletResponse req) throws Exception { + + List result = mySqlService.findAllMySql(); + System.out.println("----------getOracleInfo-----------------------"); + return result; + } + + @RequestMapping(value="/oracle/{id}/delete", method= RequestMethod.POST) + @ResponseBody + public void deleteOracleInfo(HttpServletRequest res, + HttpServletResponse req, int id) throws Exception { + res.setCharacterEncoding("UTF-8"); + mySqlService.deleteMySql(id); + System.out.println("----------deleteOracleInfo-----------------------"); + } + + @RequestMapping(value="/oracle/{id}/insert", method= RequestMethod.POST) + @ResponseBody + public void insertOracleInfo(HttpServletRequest res, + HttpServletResponse req, GatherOracleInfo oracle) throws Exception { + res.setCharacterEncoding("UTF-8"); + mySqlService.insertOracle(oracle); + System.out.println("----------insertOracleInfo-----------------------"); + } + + @RequestMapping(value="/oracle/{id}/update", method= RequestMethod.POST) + @ResponseBody + public void updateOracleInfo(HttpServletRequest res, + HttpServletResponse req, GatherOracleInfo oracle) throws Exception { + res.setCharacterEncoding("gb2312"); + System.out.println(oracle.getName()); + System.out.println(oracle); + + mySqlService.updateOracle(oracle); + System.out.println("----------updateOracleInfo-----------------------"); + } + + @RequestMapping(value="/task/transfer/list", method= RequestMethod.POST) + @ResponseBody + public Object taskList() throws Exception { + List result = moveDataService.findAll(); + return result; + } + + @RequestMapping(value="/task/transfer/delete", method= RequestMethod.POST) + @ResponseBody + public Object taskdeletes(DataInfoEntityMoveTmp move) throws Exception { + int result = moveDataService.delete(move); + return result; + } +} diff --git a/src/com/platform/controller/FolderController.java b/src/com/platform/controller/FolderController.java index 3460cec2..04254ffb 100644 --- a/src/com/platform/controller/FolderController.java +++ b/src/com/platform/controller/FolderController.java @@ -66,7 +66,8 @@ public class FolderController extends BaseController { @RequestMapping("/copyFolder") public Object copyFolder(String srcpath, String dstPath, String name) throws Exception { // -1 :error; -2: the filename is not exists ;-3 :destFolderName ; 1: right - int result = gfsService.copyFolder(srcpath, dstPath, name); + int result = 0; +// int result = gfsService.copyFolder(srcpath, dstPath, name); System.out.println(srcpath); System.out.println(dstPath); String obj = "right"; diff --git a/src/com/platform/dao/DataInfoDao.java b/src/com/platform/dao/DataInfoDao.java index f42b9f8f..f8423548 100644 --- a/src/com/platform/dao/DataInfoDao.java +++ b/src/com/platform/dao/DataInfoDao.java @@ -3,10 +3,12 @@ package com.platform.dao; import java.util.List; import org.apache.ibatis.annotations.Param; +import org.springframework.stereotype.Repository; import com.platform.entities.DataInfoEntity; import com.platform.entities.PagerOptions; +@Repository(value = "dataInfoDao") public interface DataInfoDao { int getLimitedDataCount(@Param("PagerOptions")PagerOptions pagerOptions); @@ -15,6 +17,10 @@ public interface DataInfoDao { List getLimitedDataInfoEntities(@Param("PagerOptions")PagerOptions pagerOptions); - List getIdIsExist(int parseInt); + List getIdIsExist(List list)throws Exception; + + int removes(List list)throws Exception; + + int save(DataInfoEntity data) throws Exception; } diff --git a/src/com/platform/dao/DataInfoMoveTmpDao.java b/src/com/platform/dao/DataInfoMoveTmpDao.java index 6d6a1d6a..096d8938 100644 --- a/src/com/platform/dao/DataInfoMoveTmpDao.java +++ b/src/com/platform/dao/DataInfoMoveTmpDao.java @@ -23,6 +23,7 @@ public interface DataInfoMoveTmpDao { int save(DataInfoEntityMoveTmp data) throws Exception; - int remove(DataInfoEntityMoveTmp data) throws Exception; + void insertBatch(List list) throws Exception; + int remove(int id) throws Exception; } diff --git a/src/com/platform/entities/Brick.java b/src/com/platform/entities/Brick.java index 5b74b5f1..e6701dab 100644 --- a/src/com/platform/entities/Brick.java +++ b/src/com/platform/entities/Brick.java @@ -1,5 +1,4 @@ - /** * 文件名 : Brick.java * 版权 : <版权/公司名> @@ -33,6 +32,11 @@ public class Brick { /** 路径 */ private String path; + + /** + * true 有连接, false: 失去连接 + */ + private boolean status; /** * @return the availableSize @@ -90,6 +94,17 @@ public class Brick { this.path = path; } - -} + /** + * @return the status + */ + public boolean isStatus() { + return status; + } + /** + * @param status the status to set + */ + public void setStatus(boolean status) { + this.status = status; + } +} diff --git a/src/com/platform/entities/DataInfoEntityMoveTmp.java b/src/com/platform/entities/DataInfoEntityMoveTmp.java index 74be9e5d..0fb469e5 100644 --- a/src/com/platform/entities/DataInfoEntityMoveTmp.java +++ b/src/com/platform/entities/DataInfoEntityMoveTmp.java @@ -7,6 +7,12 @@ public class DataInfoEntityMoveTmp extends DataInfoEntity { private String completeStatus; private int rate; + + private String lastTime; // 采集时间 + + public DataInfoEntityMoveTmp() { + // TODO Auto-generated constructor stub + } /** * @return the dstPath @@ -49,6 +55,19 @@ public class DataInfoEntityMoveTmp extends DataInfoEntity { public void setRate(int rate) { this.rate = rate; } - + + /** + * @return the lastTime + */ + public String getLastTime() { + return lastTime; + } + + /** + * @param lastTime the lastTime to set + */ + public void setLastTime(String lastTime) { + this.lastTime = lastTime; + } } diff --git a/src/com/platform/entities/FolderNode.java b/src/com/platform/entities/FolderNode.java index eea0e75e..5a9a56b7 100644 --- a/src/com/platform/entities/FolderNode.java +++ b/src/com/platform/entities/FolderNode.java @@ -1,12 +1,13 @@ package com.platform.entities; +import java.util.ArrayList; import java.util.List; public class FolderNode { private String name; private int isFolder; // 1 is file and other integer is folder show children number private String path; - private List childNodes; + private List childNodes = new ArrayList(); public FolderNode() { diff --git a/src/com/platform/entities/VolumeEntity.java b/src/com/platform/entities/VolumeEntity.java index 84455003..f66670d4 100644 --- a/src/com/platform/entities/VolumeEntity.java +++ b/src/com/platform/entities/VolumeEntity.java @@ -37,7 +37,7 @@ public class VolumeEntity { private String path; /** volume树形目录 */ - private FolderNode folder; + private List folder = new ArrayList(); /** volume的 块 */ private List brick = new ArrayList(); @@ -101,14 +101,14 @@ public class VolumeEntity { /** * @return the folder */ - public FolderNode getFolder() { + public List getFolder() { return folder; } /** * @param folder the folder to set */ - public void setFolder(FolderNode folder) { + public void setFolder(List folder) { this.folder = folder; } diff --git a/src/com/platform/glusterfs/CopyData.java b/src/com/platform/glusterfs/CopyData.java index 694aec06..05594621 100644 --- a/src/com/platform/glusterfs/CopyData.java +++ b/src/com/platform/glusterfs/CopyData.java @@ -1,4 +1,5 @@ + package com.platform.glusterfs; import java.util.List; @@ -67,7 +68,7 @@ public class CopyData { log.info("copy " + sourceFolderName +"/" + fileName+ " to " + destFolderName + " running"); return 1; } - + /** * 将sourceFolderName拷贝到destFolderName * 如果拷贝正常返回1,如果sourceFolderName不存在返回-2 ,如果destFolderName不存在返回-3 @@ -114,19 +115,19 @@ public class CopyData { List reStrings=Constant.ganymedSSH.execCmdWaitAcquiescent(cmd); } return 1; - } + } @Test public void testcreateFolders() { createFolders("/aaa/vvv/ddd/www/rrrr"); - } + } - //@Test - public void testCopyFolderFiles() { - - copyFolderFiles("/home", "/home/ubuntu", "system_data"); - } -} + //@Test + public void testCopyFolderFiles() { + + copyFolderFiles("/home", "/home/ubuntu", "system_data"); + } +} diff --git a/src/com/platform/glusterfs/ShowData.java b/src/com/platform/glusterfs/ShowData.java index b20afba4..ff4180ff 100644 --- a/src/com/platform/glusterfs/ShowData.java +++ b/src/com/platform/glusterfs/ShowData.java @@ -1,4 +1,3 @@ - package com.platform.glusterfs; import java.util.HashMap; @@ -8,50 +7,52 @@ import java.util.Map; import org.apache.log4j.Logger; + import org.apache.log4j.PropertyConfigurator; import org.junit.Test; import com.platform.utils.Constant; -public class ShowData { - public static Logger log = Logger.getLogger(ShowData.class); +public class ShowData { + + public static Logger log = Logger.getLogger ( ShowData.class); /** - * get the data of volumeName Map s1 is data name and - * s2 is type file or folder <功能详细描述> - * + * get the data of volumeName Map s1 is data name and s2 is type file or folder + * <功能详细描述> * @param volumeName * @return * @see [类、类#方法、类#成员] */ - public Map showVolumeFiles(String volumeName) { - log.info("start show the data"); - Map data_type = new HashMap(); + public Map showVolumeFiles(String volumeName){ + log.info("start show the data"); + Map data_type=new HashMap(); /** * get mount point of volumeName */ - String folderName = volumeName; - - data_type = showFolderData(volumeName); + String folderName=volumeName; + + data_type=showFolderData(volumeName); return data_type; - } - +} /** - * get the data of folder name Map is folder name and type 1 - * is file and others is folder - * - * + * get the data of folder name + * Map is folder name and type 1 is file and others is folder + + * @param FolderName * @return */ - public Map showFolderData(String folderName) { - log.info(" start get " + folderName + " data"); + public Map showFolderData(String folderName){ + log.info(" start get "+folderName+" data"); - Map data_type = new HashMap(); - String command = "ls -l " + folderName; + Map data_type=new HashMap(); + String command="ls -l "+folderName; + /* +<<<<<<< HEAD * RunCommand runCommand=new RunCommand(); List * reStrings=runCommand.runCommandWait(command); */ @@ -60,35 +61,35 @@ public class ShowData { log.error("2101 command get result is null"); return null; } - if (reStrings.size() == 0) { + if(reStrings.size()==0){ log.info("2102 the folder is empty"); return data_type; } - if (reStrings.get(0).contains("No such file or directory")) { - log.info("2103 the " + folderName + " is not exists"); + if(reStrings.get(0).contains("No such file or directory")){ + log.info("2103 the "+folderName+" is not exists"); return null; } /** * remove first line total number */ reStrings.remove(0); - - for (Iterator it2 = reStrings.iterator(); it2.hasNext();) { - String line = (String) it2.next(); - line = line.replaceAll(" +", " "); - String keyValue[] = line.split(" "); - if (keyValue.length < 9) { - log.error("2104 " + line + " length is short"); + + for(Iterator it2 = reStrings.iterator();it2.hasNext();){ + String line=(String)it2.next(); + line=line.replaceAll(" +", " "); + String keyValue[]=line.split(" "); + if(keyValue.length<9){ + log.error("2104 "+line+" length is short"); continue; } - + data_type.put(keyValue[8], keyValue[1]); - + } - log.info(" get " + folderName + " data successed"); + log.info(" get "+folderName+" data successed"); return data_type; } - + /** * 返回folder的大小字节表示 * -2表示获取大小出错,-1表示folder不存在,其他表示folder的大小 @@ -114,19 +115,18 @@ public class ShowData { return size; } - - //@Test - public void testShowData() { - - System.out.println(showFolderData("/home")); - - } + /** + * + * <一句话功能简述> + * <功能详细描述> + * @see [类、类#方法、类#成员] + */ @Test - public void test_getFolderSize() { - - System.out.println(getFolderSize("/home/ubuntu")); + public void testShowData(){ + + System.out.println(showFolderData("/home")); } } \ No newline at end of file diff --git a/src/com/platform/service/DataInfoService.java b/src/com/platform/service/DataInfoService.java index f5102301..c08d0850 100644 --- a/src/com/platform/service/DataInfoService.java +++ b/src/com/platform/service/DataInfoService.java @@ -3,10 +3,14 @@ package com.platform.service; import org.springframework.ui.ModelMap; +import com.platform.dao.DataInfoDao; +import com.platform.entities.DataInfoEntity; import com.platform.entities.PagerOptions; public interface DataInfoService { public ModelMap getPagerTableData(PagerOptions pagerOptions); - void deleteData(String[] id); + void deleteData(String[] id) throws Exception; + + int save(DataInfoEntity data) throws Exception; } diff --git a/src/com/platform/service/DataInfoServiceImp.java b/src/com/platform/service/DataInfoServiceImp.java index bcbe94fb..4da74444 100644 --- a/src/com/platform/service/DataInfoServiceImp.java +++ b/src/com/platform/service/DataInfoServiceImp.java @@ -1,5 +1,6 @@ package com.platform.service; +import java.util.ArrayList; import java.util.List; import javax.annotation.Resource; @@ -24,7 +25,8 @@ public class DataInfoServiceImp implements DataInfoService { public ModelMap getPagerTableData(PagerOptions pagerOptions) { // TODO Auto-generated method stub ModelMap modelMap = new ModelMap(); - int count = dfdDao.getLimitedDataCount(pagerOptions); //获取总记录条数 + int count = dfdDao.getLimitedDataCount(pagerOptions); //获取总记录条数 + System.out.println("total colume " + count); int offset = 0; if (pagerOptions.getCurrentPageNum() > 1) { pagerOptions.setTotalLimit((pagerOptions.getCurrentPageNum() - 1) @@ -41,19 +43,29 @@ public class DataInfoServiceImp implements DataInfoService { } @Override - public void deleteData(String[] id) { + public void deleteData(String[] id) throws Exception { // TODO Auto-generated method stub + List ids = new ArrayList(); for(String idx: id){ + ids.add(Integer.parseInt(idx)); + } + if (ids.size() > 0) { //数据在不在? - List paths = dfdDao.getIdIsExist(Integer.parseInt(idx)); + List paths = dfdDao.getIdIsExist(ids); if(paths.size()>0){ //删除文件操作 for (int i = 0; i < paths.size(); i++) { System.out.println(paths.get(i)); } //删除数据库记录 - //dfdDao.deleteRow(idx); + dfdDao.removes(ids); } } } + + @Override + public int save(DataInfoEntity data) throws Exception { + int result = dfdDao.save(data); + return result; + } } diff --git a/src/com/platform/service/IMoveDataService.java b/src/com/platform/service/IMoveDataService.java new file mode 100644 index 00000000..bddb1e04 --- /dev/null +++ b/src/com/platform/service/IMoveDataService.java @@ -0,0 +1,40 @@ +package com.platform.service; + +import java.util.List; + +import com.platform.entities.DataInfoEntity; +import com.platform.entities.DataInfoEntityMoveTmp; + +/** 数据迁移 + * @author chen + * + */ +public interface IMoveDataService { + + /** 迁移(新增) + * @param a dataInfo实体 + * @param dstPath volume下的某个folder路径(需要补齐路径,eg: XXX/320198_16/1,or XXX/320122KFQ_15/1) + * @return + * @throws Exception + */ + public boolean moveData(List data, String dstPath) throws Exception; + + /** 查询所有进度 + * @return 返回所有的实体 + * @throws Exception + */ + public List findAll() throws Exception; + + /** 删除 + * @return + * @throws Exception + */ + public int delete(DataInfoEntityMoveTmp dataMove) throws Exception; + + public int update(DataInfoEntityMoveTmp data) throws Exception; + + public int save(DataInfoEntityMoveTmp data) throws Exception; + + public int insertBatch(List list) throws Exception; + +} diff --git a/src/com/platform/service/impl/GfsServiceImpl.java b/src/com/platform/service/impl/GfsServiceImpl.java index 63fb8bf4..39ea77d2 100644 --- a/src/com/platform/service/impl/GfsServiceImpl.java +++ b/src/com/platform/service/impl/GfsServiceImpl.java @@ -1,5 +1,4 @@ - /** * 文件名 : GfsServiceImpl.java * 版权 : <版权/公司名> @@ -61,7 +60,7 @@ public class GfsServiceImpl implements IGfsService { if (null != srcFolders) { for (String string : srcFolders) { - status = copydata.copyFolderFiles(string, dstFolder, name); + status = copydata.copyFolderFilesAnyway(string, dstFolder, name); } } return status; @@ -72,46 +71,51 @@ public class GfsServiceImpl implements IGfsService { */ @Override public List getAllVolumes() throws Exception { - List volumeList = new ArrayList<>(); - List volumeNameList = volumeInfo.showAllVolumeName(); - if (null == volumeNameList) { - return null; + List volumeList = CacheTreeData.getVolumeList(); + if (null == volumeList) { + return new ArrayList(); } - for (String volumeName : volumeNameList) { - VolumeEntity volume = new VolumeEntity(); - volume.setName(volumeName); - List path = volumeInfo.getVolumeMountPoint(volumeName); - //默认加载第一个路径 - if (null != path && path.size() > 0) { - volume.setPath(path.get(0)); - } - volume.setAllSize(volumeInfo.getVolumeAllSize(volumeName)); - volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName)); - //TODO 查询brick-- - //返回 ip:path - List brickStrs = volumeInfo.getVolumeBricks(volumeName); - //brick已用大小: - Map usedSize = volumeInfo.getVolumebricksDataSize(volumeName); - Map availableSize = volumeInfo.getVolumebricksAvailableSize(volumeName); - List brickList = new ArrayList(); - for (String brickIpPath : brickStrs) { - Brick b = new Brick(); - String ipAndpath[] = brickIpPath.split(":"); - String brickip = ipAndpath[0]; - String brickpath = ipAndpath[1]; - //iP , path , - b.setIp(brickip); - b.setPath(brickpath); - b.setAvailableSize(availableSize.get(brickIpPath)); - b.setUsedSize(usedSize.get(brickIpPath)); - brickList.add(b); - } - volume.setBrick(brickList); + for (VolumeEntity volume : volumeList) { +// VolumeEntity volume = new VolumeEntity(); +// volume.setName(volumeName); +// List path = volumeInfo.getVolumeMountPoint(volumeName); +// //默认加载第一个路径 +// if (null != path && path.size() > 0) { +// volume.setPath(path.get(0)); +// } +// volume.setAllSize(volumeInfo.getVolumeAllSize(volumeName)); +// volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName)); +// //TODO 查询brick-- +// //返回 ip:path +// List brickStrs = volumeInfo.getVolumeBricks(volumeName); +// //brick已用大小: +// Map usedSize = volumeInfo.getVolumebricksDataSize(volumeName); +// Map availableSize = volumeInfo.getVolumebricksAvailableSize(volumeName); +// +// +// List brickList = new ArrayList(); +// for (String brickIpPath : brickStrs) { +// Brick b = new Brick(); +// String ipAndpath[] = brickIpPath.split(":"); +// String brickip = ipAndpath[0]; +// String brickpath = ipAndpath[1]; +// //iP , path , +// b.setIp(brickip); +// b.setPath(brickpath); +// b.setAvailableSize(availableSize.get(brickIpPath)); +// b.setUsedSize(usedSize.get(brickIpPath)); +// brickList.add(b); +// } +// volume.setBrick(brickList); if (null != volume.getPath()) { // 获得 folder 目录 - volume.setFolder(getFolder(volume.getPath())); + List list = new ArrayList(); + FolderNode currNode = getFolder(volume.getPath()); + if (null != currNode && null != currNode.getChildNodes()) { + list.addAll(currNode.getChildNodes()); + } + volume.setFolder(list); } - volumeList.add(volume); } return volumeList; } @@ -124,11 +128,18 @@ public class GfsServiceImpl implements IGfsService { if (null != path && path.size() > 0) { volume.setPath(path.get(0)); } - volume.setAllSize(volumeInfo.getVolumeAllSize(volumeName)); + volume.setAllSize(volumeInfo.getVolumeAvailableSize(volumeName)); volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName)); // volume.setBrick(brick); if (null != volume.getPath()) { - volume.setFolder(getFolder(path.get(0))); + List list = new ArrayList(); + if (null != path && path.size() > 0) { + FolderNode currNode = getFolder(path.get(0)); + if (null != currNode && null != currNode.getChildNodes()) { + list.addAll(currNode.getChildNodes()); + } + } + volume.setFolder(list); } return volume; } @@ -137,7 +148,7 @@ public class GfsServiceImpl implements IGfsService { public int moveData(String volumeName, String srcPath, String dstPath) throws Exception { - int result = copydata.copyFolderFiles(srcPath, dstPath, "app"); + int result = copydata.copyFolderFilesAnyway(srcPath, dstPath, "app"); return result; } @@ -148,4 +159,3 @@ public class GfsServiceImpl implements IGfsService { } } - diff --git a/src/com/platform/service/impl/MoveDataServiceImpl.java b/src/com/platform/service/impl/MoveDataServiceImpl.java new file mode 100644 index 00000000..511676a7 --- /dev/null +++ b/src/com/platform/service/impl/MoveDataServiceImpl.java @@ -0,0 +1,141 @@ +package com.platform.service.impl; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.annotation.Resource; + +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; +import org.springframework.stereotype.Service; + +import com.platform.dao.DataInfoDao; +import com.platform.dao.DataInfoMoveTmpDao; +import com.platform.entities.DataInfoEntity; +import com.platform.entities.DataInfoEntityMoveTmp; +import com.platform.glusterfs.CheckoutMD5; +import com.platform.glusterfs.CopyData; +import com.platform.glusterfs.ShowData; +import com.platform.service.IMoveDataService; +import com.platform.utils.Bean2MapUtils; +import com.platform.utils.Constant; +import com.platform.utils.DateForm; + +@Component +@Service(value = "moveDataService") +public class MoveDataServiceImpl implements IMoveDataService{ + + @Resource(name = "dataInfoDao") + private DataInfoDao dataInfoDao; + + /** + * 迁移 + */ + CopyData copy = new CopyData(); + + /** + * MD5校验 + */ + CheckoutMD5 check = new CheckoutMD5(); + + @Resource(name = "dataInfoMoveTmpDao") + private DataInfoMoveTmpDao dataInfoMoveTmpDao; + + /** + * 查看数据 + */ + ShowData show = new ShowData(); + + @Override + public boolean moveData(List data, String dstPath) throws Exception { + boolean isSuccess = false; + String tailPath = ""; + if (null != data) { + //XXX/320198_16/1,or XXX/320122KFQ_15/1 ---> /320198_16/1, or /320122KFQ_15/1 + List exist = dataInfoMoveTmpDao.findAll(); + List existIds = new ArrayList(); + if (null != exist) { + for (DataInfoEntityMoveTmp dataInfoEntityMoveTmp : exist) { + if ("0".equals(dataInfoEntityMoveTmp.getCompleteStatus()) || "1".equals(dataInfoEntityMoveTmp.getCompleteStatus())) { + if (null != dataInfoEntityMoveTmp.getDataPath()) { + existIds.add(dataInfoEntityMoveTmp.getDataPath()); + } + } + } + } + Pattern pattern = Pattern.compile("\\/\\d+[a-z]*[A-Z]*_\\d+\\/\\d*\\/*$"); + // 末尾 含有 / + Pattern pattern2 = Pattern.compile("\\/$"); + Matcher matcher2 = pattern2.matcher(dstPath); + //去掉 最后 的 / 符合 + if (matcher2.find()) { + dstPath = dstPath.substring(0, dstPath.length()-1); + } + List moveList = new ArrayList(); + for (DataInfoEntity dataInfoEntity : data) { + if (existIds.contains(dataInfoEntity.getDataPath())) { + continue; + } + //TODO 正则:取出 data 的后面 的 路径,eg: XXX/320198_16/1,or XXX/320122KFQ_15/1) + Matcher matcher = pattern.matcher(dataInfoEntity.getDataPath()); + // tailPath 第一个字符是 / 符号 + if (matcher.find()) { + tailPath = matcher.group(); + } + String finalDestPath = dstPath + tailPath; + DataInfoEntityMoveTmp dataMove = new DataInfoEntityMoveTmp(); + dataMove.setSystemCode(dataInfoEntity.getSystemCode()); + dataMove.setRegionalismCode(dataInfoEntity.getRegionalismCode()); + dataMove.setDstPath(finalDestPath); + dataMove.setLastTime(DateForm.date2StringBysecond(new Date())); + moveList.add(dataMove); + } + if (moveList.size() > 0) { + dataInfoMoveTmpDao.insertBatch(moveList); + isSuccess = true; + } + } + return isSuccess; + } + + @Override + public List findAll() throws Exception { + List result = new ArrayList(); + try { + result = dataInfoMoveTmpDao.findAll(); + + } catch (Exception e) { + System.err.println(e); + } + return result; + } + + @Override + public int delete(DataInfoEntityMoveTmp dataMove) throws Exception { + int result = dataInfoMoveTmpDao.remove(dataMove.getId()); + return result; + } + + + @Override + public int save(DataInfoEntityMoveTmp data) throws Exception { + dataInfoMoveTmpDao.save(data); + return 0; + } + + @Override + public int insertBatch(List list) throws Exception { + // TODO Auto-generated method stub + return 0; + } + + @Override + public int update(DataInfoEntityMoveTmp data) throws Exception { + int result = dataInfoMoveTmpDao.update(data); + return result; + } + +} diff --git a/src/com/platform/service/thread/ThreadMoveData.java b/src/com/platform/service/thread/ThreadMoveData.java new file mode 100644 index 00000000..b7e2f867 --- /dev/null +++ b/src/com/platform/service/thread/ThreadMoveData.java @@ -0,0 +1,158 @@ +package com.platform.service.thread; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.annotation.Resource; + +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; +import org.springframework.stereotype.Service; + +import com.platform.dao.DataInfoDao; +import com.platform.dao.DataInfoMoveTmpDao; +import com.platform.dao.GatherOracleDao; +import com.platform.entities.DataInfoEntity; +import com.platform.entities.DataInfoEntityMoveTmp; +import com.platform.entities.GatherOracleInfo; +import com.platform.glusterfs.CheckoutMD5; +import com.platform.glusterfs.CopyData; +import com.platform.glusterfs.ShowData; +import com.platform.service.DataInfoService; +import com.platform.service.DataInfoServiceImp; +import com.platform.service.IMoveDataService; +import com.platform.service.impl.MoveDataServiceImpl; +import com.platform.utils.Bean2MapUtils; +import com.platform.utils.Constant; +import com.platform.utils.DateForm; + +@Component +public class ThreadMoveData{ + + @Resource(name = "dataInfoDao") + private DataInfoDao dataInfoDao; + + /** + * 迁移 + */ + CopyData copy = new CopyData(); + + /** + * MD5校验 + */ + CheckoutMD5 check = new CheckoutMD5(); + + @Resource(name = "dataInfoMoveTmpDao") + private DataInfoMoveTmpDao dataInfoMoveTmpDao; + + /** + * 查看数据 + */ + ShowData show = new ShowData(); + + /** + * : 实时更新数据库--根据查询到的 正则迁移的数据 + */ + public ThreadMoveData() { + } + + //5秒 + @Scheduled(fixedDelay = 5000) + public void doSomething() { + + try { + List result = null; + //查询 表 move_data_tmp + result = dataInfoMoveTmpDao.findAll(); + if (null != result) { + //gfs 获取size, + int rsize = result.size(); + boolean isNoMove = true; + //该循环必须 循环每个,不能有 break; + // rate:大小:假的,待换成真实比例 + double realRate = 0.00; + for (int i = 0; i < rsize; i++) { + DataInfoEntityMoveTmp dataMove = result.get(i); + //如果拷贝进度超过20分钟未进行-- 判断为 迁移失败。 + // "1" :正在上传,0:等待 迁移, 2:成功 3:失败 + if ("1".equals(dataMove.getCompleteStatus())) { + long nowTime = new Date().getTime(); + long timelong = nowTime - DateForm.string2DateBysecond(dataMove.getLastTime()).getTime(); + if (timelong > 1000*60*20) { + dataMove.setCompleteStatus("3"); + dataInfoMoveTmpDao.update(dataMove); + } + isNoMove = false; + // 查询大小:。//gfs 获取size, + long srcSize = show.getFolderSize(dataMove.getDataPath()); + long dstSize = show.getFolderSize(dataMove.getDstPath()); + if (srcSize > 0 && dstSize > 0) { + realRate = dstSize / srcSize * 100; + dataMove.setRate((int) realRate); + dataMove.setLastTime(DateForm.date2StringBysecond(new Date())); + } + + } + if("1".equals(dataMove.getCompleteStatus()) &&dataMove.getRate() > 0){ + //传输完毕:进行校验 + if (realRate == 100) { + //TODO 进行MD5校验 + int resl = check.checkoutMD5Folder(dataMove.getDataPath(), dataMove.getDstPath()); + //TODO 校验成功--则删除数据库记录 + if(resl == 1){ + //校验成功--修改 数据库记录-- + dataMove.setCompleteStatus("2"); + dataMove.setLastTime(DateForm.date2StringBysecond(new Date())); + dataInfoMoveTmpDao.update(dataMove); + //TODO 新增 一条数据-到-dataInfo + DataInfoEntity data = (DataInfoEntity) Bean2MapUtils.convertMap( + DataInfoEntity.class, Bean2MapUtils.convertBean(dataMove)); + data.setDataPath(dataMove.getDstPath()); + data.setYear(dataMove.getYear()+" (迁移完成时间 " + DateForm.date2StringByMin(new Date())+")"); + dataInfoDao.save(data); + } + else { + // 3:表示 迁移失败 + dataMove.setCompleteStatus("3"); + } + } + dataMove.setLastTime(DateForm.date2StringBysecond(new Date())); + dataInfoMoveTmpDao.update(dataMove); + } + } + //循环 完了, 确定没有上传的 ,没有正在上传的 + if (isNoMove) { + //查询 表 move_data_tmp + result = dataInfoMoveTmpDao.findAll(); + if (null != result) { + + int tmpsize = result.size(); + // 上传下一个后 + for (int i = 0; i < tmpsize; i++) { + DataInfoEntityMoveTmp next2move = result.get(i); + //如果是 待 迁移状态的 + if ("0".equals(next2move.getCompleteStatus())) { + //待迁移的数据 -- 开始迁移 + copy.copyFolderFilesAnyway(next2move.getDataPath(), next2move.getDstPath(), "app"); + // "1" :正在上传,0:等待 迁移, 2:成功 3:失败 + next2move.setCompleteStatus("1"); + next2move.setLastTime(DateForm.date2StringBysecond(new Date())); + //更新sql + dataInfoMoveTmpDao.update(next2move); + break; + } + } + } + } + } + + Thread.sleep(Constant.update_dataInfo_sleep_time); + } catch (Exception e) { + System.err.println(e); + } + + } + +} diff --git a/src/com/platform/service/thread/TreadMoveData2Start.java b/src/com/platform/service/thread/TreadMoveData2Start.java new file mode 100644 index 00000000..91f9bb92 --- /dev/null +++ b/src/com/platform/service/thread/TreadMoveData2Start.java @@ -0,0 +1,47 @@ +package com.platform.service.thread; + +import java.util.List; + +import javax.annotation.Resource; + +import org.springframework.stereotype.Service; + +import com.platform.dao.DataInfoMoveTmpDao; +import com.platform.entities.DataInfoEntityMoveTmp; +import com.platform.service.IMoveDataService; +import com.platform.service.impl.MoveDataServiceImpl; +import com.platform.utils.Constant; + +public class TreadMoveData2Start extends Thread{ + + + private IMoveDataService dataInfoMove= new MoveDataServiceImpl(); + + public TreadMoveData2Start() { + } + + @Override + public void run() { + boolean isBreak = false; + while(true){ + if (isBreak) { + break; + } + try { + List resultlist = dataInfoMove.findAll(); + for (DataInfoEntityMoveTmp moveEntity : resultlist) { + if ("1".equals(moveEntity.getCompleteStatus())) { + moveEntity.setCompleteStatus("3"); + dataInfoMove.update(moveEntity); + } + } + isBreak = true; + Thread.sleep(Constant.update_dataInfo_sleep_time); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + +} diff --git a/src/com/platform/utils/CacheTreeData.java b/src/com/platform/utils/CacheTreeData.java index fc9f0c5e..6d6caa1b 100644 --- a/src/com/platform/utils/CacheTreeData.java +++ b/src/com/platform/utils/CacheTreeData.java @@ -1,13 +1,16 @@ - package com.platform.utils; import java.util.List; +import java.util.Map; import com.platform.entities.FolderNode; +import com.platform.entities.VolumeEntity; public class CacheTreeData { private static List folders = null; + + private static List volumeList = null; public static List getFolders() { return folders; @@ -16,6 +19,21 @@ public class CacheTreeData { public static void setFolders(List folders) { CacheTreeData.folders = folders; } + + /** + * @return the volumeList + */ + public static List getVolumeList() { + return volumeList; + } + + /** + * @param volumeList the volumeList to set + */ + public static void setVolumeList(List volumeList) { + CacheTreeData.volumeList = volumeList; + } } + diff --git a/src/com/platform/utils/ConfigsLoader.java b/src/com/platform/utils/ConfigsLoader.java index 4d524d60..9f3880df 100644 --- a/src/com/platform/utils/ConfigsLoader.java +++ b/src/com/platform/utils/ConfigsLoader.java @@ -1,4 +1,3 @@ - package com.platform.utils; import java.util.Properties; @@ -6,6 +5,8 @@ import java.util.Properties; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; +import com.platform.service.thread.ThreadMoveData; + public class ConfigsLoader implements ServletContextListener { private static ConfigPropertyReader cReader = null; @@ -73,4 +74,3 @@ public class ConfigsLoader implements ServletContextListener { } } - diff --git a/src/com/platform/utils/DateForm.java b/src/com/platform/utils/DateForm.java new file mode 100644 index 00000000..c6531466 --- /dev/null +++ b/src/com/platform/utils/DateForm.java @@ -0,0 +1,137 @@ +package com.platform.utils; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; + +public class DateForm { + + private static final String date_format_second = "yyyy-MM-dd HH:mm:ss"; + + private static final String date_format_second_non = "yyyy-MM-dd_HH-mm-ss"; + + private static final String data_format_min = "yyyy-MM-dd HH:mm"; + + private static final String data_format_day = "yyyy-MM-dd"; + + private static ThreadLocal threadLocal_second = new ThreadLocal(); + + private static ThreadLocal threadLocal_second_non = new ThreadLocal(); + + private static ThreadLocal threadLocal_min = new ThreadLocal(); + + private static ThreadLocal threadLocal_day = new ThreadLocal(); + + public static String date2StringBysecond(Date date) { + if (date == null) { + return null; + } + DateFormat format = threadLocal_second.get(); + if(format == null){ + format = new SimpleDateFormat(date_format_second); + threadLocal_second.set(format); + } + return format.format(date); + } + + public static String date2StringBysecondNon(Date date) { + if (date == null) { + return null; + } + DateFormat format = threadLocal_second_non.get(); + if(format == null){ + format = new SimpleDateFormat(date_format_second_non); + threadLocal_second_non.set(format); + } + return format.format(date); + } + + public static String date2StringByMin(Date date) { + if (date == null) { + return null; + } + DateFormat format = threadLocal_min.get(); + if(format == null){ + format = new SimpleDateFormat(data_format_min); + threadLocal_min.set(format); + } + return format.format(date); + } + + public static String date2StringByDay(Date date) { + if (date == null) { + return null; + } + DateFormat format = threadLocal_day.get(); + if(format == null){ + format = new SimpleDateFormat(data_format_day); + threadLocal_day.set(format); + } + return format.format(date); + } + + public static Date string2DateBysecond(String date) { + if (date == null) { + return null; + } + date = date.trim(); + if (date.isEmpty()) { + return null; + } + DateFormat format = threadLocal_second.get(); + try { + if(format == null){ + format = new SimpleDateFormat(date_format_second); + threadLocal_second.set(format); + } + return format.parse(date); + } catch (ParseException e) { + e.printStackTrace(); + } + return null; + } + + public static Date string2DateByMin(String date) { + if (date == null) { + return null; + } + date = date.trim(); + if (date.isEmpty()) { + return null; + } + DateFormat format = threadLocal_min.get(); + try { + if(format == null){ + format = new SimpleDateFormat(data_format_min); + threadLocal_min.set(format); + } + return format.parse(date); + } catch (ParseException e) { + e.printStackTrace(); + } + return null; + } + + public static Date string2DateByDay(String date) { + if (date == null) { + return null; + } + date = date.trim(); + if (date.isEmpty()) { + return null; + } + DateFormat format = threadLocal_day.get(); + try { + if(format == null){ + format = new SimpleDateFormat(data_format_day); + threadLocal_day.set(format); + } + return format.parse(date); + } catch (ParseException e) { + e.printStackTrace(); + } + return null; + } + +} diff --git a/src/com/platform/utils/ThreadMoveData.java b/src/com/platform/utils/ThreadMoveData.java deleted file mode 100644 index e05c07b1..00000000 --- a/src/com/platform/utils/ThreadMoveData.java +++ /dev/null @@ -1,31 +0,0 @@ -package com.platform.utils; - -public class ThreadMoveData extends Thread { - - /** - * : 实时更新数据库--根据查询到的 正则迁移的数据 - */ - public ThreadMoveData() { - // TODO Auto-generated constructor stub - } - - /* (non-Javadoc) - * @see java.lang.Thread#run() - * - */ - @Override - public void run() { - // TODO Auto-generated method stub - super.run(); - //查询 表 move_data_tmp - - - - - try { - Thread.sleep(Constant.update_dataInfo_sleep_time); - } catch (InterruptedException e) { - } - } - -} diff --git a/src/com/platform/utils/ThreadVolume.java b/src/com/platform/utils/ThreadVolume.java index b7d49086..1170c58d 100644 --- a/src/com/platform/utils/ThreadVolume.java +++ b/src/com/platform/utils/ThreadVolume.java @@ -2,9 +2,14 @@ package com.platform.utils; import java.util.ArrayList; import java.util.List; +import java.util.Map; +import org.springframework.stereotype.Service; + +import com.platform.entities.Brick; import com.platform.entities.FolderNode; import com.platform.entities.VolumeEntity; +import com.platform.glusterfs.ClusterInfo; import com.platform.glusterfs.GetTreeData; import com.platform.glusterfs.VolumeInfo; @@ -18,6 +23,8 @@ public class ThreadVolume extends Thread implements Runnable{ /** Volume信息查询 */ private VolumeInfo volumeInfo = new VolumeInfo(); + private ClusterInfo cluster = new ClusterInfo(); + public ThreadVolume() { // TODO Auto-generated constructor stub } @@ -35,26 +42,70 @@ public class ThreadVolume extends Thread implements Runnable{ public void run() { super.run(); while(true){ - List folderlist = new ArrayList(); - //查询 volume name - List volumeNameList = volumeInfo.showAllVolumeName(); - if (null != volumeNameList) { - for (String volumeName : volumeNameList) { - VolumeEntity volume = new VolumeEntity(); - volume.setName(volumeName); - List path = volumeInfo.getVolumeMountPoint(volumeName); - //默认加载第一个路径 - if (null != path && path.size() > 0) { - //装入 folder: - //查询 每个 volume 下的 folder - FolderNode foldertmp = gfsTree.getDatas(path.get(0)); - folderlist.add(foldertmp); + try { + List folderlist = new ArrayList(); + List volumeList = new ArrayList(); + // brick状态 map集合 + Map brickStatusMap = cluster.showClusterInfo(); + + //查询 volume name + List volumeNameList = volumeInfo.showAllVolumeName(); + if (null != volumeNameList) { + for (String volumeName : volumeNameList) { + VolumeEntity volume = new VolumeEntity(); + volume.setName(volumeName); + List path = volumeInfo.getVolumeMountPoint(volumeName); + //默认加载第一个路径 + if (null != path && path.size() > 0) { + volume.setPath(path.get(0)); + } + volume.setAllSize(volumeInfo.getVolumeAvailableSize(volumeName)); + volume.setUsedSize(volumeInfo.getVolumeUseSize(volumeName)); + //TODO 查询brick-- + //返回 ip:path + List brickStrs = volumeInfo.getVolumeBricks(volumeName); + //brick已用大小: + Map usedSize = volumeInfo.getVolumebricksDataSize(volumeName); + Map availableSize = volumeInfo.getVolumebricksAvailableSize(volumeName); + + List brickList = new ArrayList(); + for (String brickIpPath : brickStrs) { + Brick b = new Brick(); + String ipAndpath[] = brickIpPath.split(":"); + String brickip = ipAndpath[0]; + String brickpath = ipAndpath[1]; + //iP , path , + b.setIp(brickip); + if(brickStatusMap==null || brickStatusMap.size()==0){ + b.setStatus(false); + }else if (brickStatusMap.containsKey(brickip)) { + b.setStatus(true); + } + else { + b.setStatus(false); + } + b.setPath(brickpath); + b.setAvailableSize(availableSize.get(brickIpPath)); + b.setUsedSize(usedSize.get(brickIpPath)); + brickList.add(b); + } + volume.setBrick(brickList); + + + //默认加载第一个路径 + if (null != path && path.size() > 0) { + //装入 folder: + //查询 每个 volume 下的 folder + FolderNode foldertmp = gfsTree.getDatas(path.get(0)); + folderlist.add(foldertmp); + } + volumeList.add(volume); } } - } - //TODO 更新folder 目录 - CacheTreeData.setFolders(folderlist); - try { + //TODO 更新folder 目录 + CacheTreeData.setFolders(folderlist); + CacheTreeData.setVolumeList(volumeList); + Thread.sleep(Constant.get_volume_sleep_time); } catch (InterruptedException e) { }