From cdd85cf1fa566dc8c6ef7563a23d6f264d4a59fc Mon Sep 17 00:00:00 2001 From: keyeqing <1057516690@qq.com> Date: Mon, 28 Oct 2013 11:23:33 +0800 Subject: [PATCH] hdfs path --- src/main/java/haflow/service/HdfsService.java | 51 +++- .../haflow/ui/controller/HdfsController.java | 83 ++++++- .../haflow/ui/controller/HomeController.java | 4 + .../ui/controller/MainLogonController.java | 20 ++ .../java/haflow/ui/helper/HdfsHelper.java | 16 ++ .../java/haflow/ui/model/RenameModel.java | 30 +++ src/main/resources/servlet-context.xml | 1 + src/main/webapp/WEB-INF/views/hive.jsp | 15 ++ src/main/webapp/WEB-INF/web.xml | 2 +- src/main/webapp/script/haflow.main.js | 224 +++++++++--------- 10 files changed, 325 insertions(+), 121 deletions(-) create mode 100644 src/main/java/haflow/ui/model/RenameModel.java create mode 100644 src/main/webapp/WEB-INF/views/hive.jsp diff --git a/src/main/java/haflow/service/HdfsService.java b/src/main/java/haflow/service/HdfsService.java index 694e184..0435ed0 100644 --- a/src/main/java/haflow/service/HdfsService.java +++ b/src/main/java/haflow/service/HdfsService.java @@ -4,6 +4,7 @@ import haflow.util.ClusterConfiguration; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; +import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -56,6 +57,8 @@ public class HdfsService { public boolean createDirectory(String remotePath, String directoryname) { try { FileSystem fs = this.getFileSystem(); + System.out + .println("service:"+(fs==null)); fs.mkdirs(new Path(remotePath+"/"+directoryname)); fs.close(); return true; @@ -110,6 +113,18 @@ public class HdfsService { return null; } } + + public FSDataInputStream readPicture(String remotePath) { + try { + FileSystem fs = this.getFileSystem(); + FSDataInputStream hdfsInStream = fs.open(new Path(remotePath)); + return hdfsInStream; + } catch (Exception e) { + e.printStackTrace(); + return null; + } + } + public boolean appendFile(String content, String remotePath) { try { @@ -150,7 +165,7 @@ public class HdfsService { public boolean renameFile(String fromPath, String toPath) { try { FileSystem fs = this.getFileSystem(); - return fs.rename(new Path(fromPath), new Path(toPath)); + return fs.rename(new Path(fromPath), new Path(toPath)); } catch (Exception e) { e.printStackTrace(); return false; @@ -167,5 +182,39 @@ public class HdfsService { return null; } } + + public boolean movefile(String fromPath,String toPath,String filename){ + FileSystem fs; + try { + fs = this.getFileSystem(); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + String localPath = "/home/tmp"; + File dirPath = new File(localPath); + if (!dirPath.exists()) { + dirPath.mkdirs(); + System.out.print(localPath); + } + else + System.out.print(localPath); + Path fromhdfsPath = new Path(fromPath); + Path tmpPath = new Path(localPath); + Path tohdfsPath = new Path(toPath); + try { + fs.moveToLocalFile (fromhdfsPath, tmpPath); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + try { + fs.moveFromLocalFile (tmpPath, tohdfsPath); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + return true; + } } diff --git a/src/main/java/haflow/ui/controller/HdfsController.java b/src/main/java/haflow/ui/controller/HdfsController.java index e40bef4..6afd36b 100644 --- a/src/main/java/haflow/ui/controller/HdfsController.java +++ b/src/main/java/haflow/ui/controller/HdfsController.java @@ -13,6 +13,7 @@ import javax.servlet.http.HttpServletResponse; import haflow.ui.helper.HdfsHelper; import haflow.ui.model.HdfsFileListModel; import haflow.ui.model.HdfsFileModel; +import haflow.ui.model.RenameModel; import haflow.ui.model.UploadFileModel; import haflow.ui.model.RemoveHdfsFileModel; import haflow.ui.model.CreateDirectoryModel; @@ -50,7 +51,16 @@ public class HdfsController { public UploadFileModel upload(MultipartHttpServletRequest request,@RequestParam(value = "remotePath", required = true) String remotepath){ System.out.println("begin to upload"); UploadFileModel model=new UploadFileModel(); - MultipartFile file =(MultipartFile)request.getFile("file"); + MultipartFile file=null; + try{ + file =(MultipartFile)request.getFile("file"); + }catch(Exception e){ + System.out + .println("got exception:"+e.getMessage()); + e.printStackTrace(); + } + + try{ byte[] bytes=file.getBytes(); String uploadDir = "c:\\uploadFile"; @@ -205,4 +215,75 @@ public class HdfsController { String out_fileName = new String(in_fileName.getBytes("iso-8859-1"),"UTF-8"); return this.getHdfsHelper().getFile(out_path, out_fileName); } + + @RequestMapping(value = "/picture", method = RequestMethod.GET) + @ResponseBody + public ResponseEntity getpicture(HttpServletResponse response, + @RequestParam(value = "path", required = true) String path, + @RequestParam(value = "fileName", required = true) String fileName) throws UnsupportedEncodingException { + response.setContentType("image/jpg"); + String in_path=path; + String out_path = new String(in_path.getBytes("iso-8859-1"),"UTF-8"); + String in_fileName=fileName; + String out_fileName = new String(in_fileName.getBytes("iso-8859-1"),"UTF-8"); +// String new_path=out_path + "/" + out_fileName; + try{ + BufferedOutputStream bos = new BufferedOutputStream(response.getOutputStream()); + BufferedInputStream bis = new BufferedInputStream(this.hdfsHelper.getPicture(out_path, out_fileName)); + byte[] buf = new byte[1024]; + int read; + while((read=bis.read(buf))!=-1){ + bos.write(buf,0,read); + } + bos.close(); + bis.close(); + } + catch (Exception e) { + e.printStackTrace(); + } + return null; + } + + @RequestMapping(value = "/rename", method = RequestMethod.GET) + @ResponseBody + public RenameModel rename( + @RequestParam("path") String path,@RequestParam("newpath") String newpath) throws UnsupportedEncodingException{ + String in_path=path; + String out_path = new String(in_path.getBytes("iso-8859-1"),"UTF-8"); + String in_newpath=newpath; + String out_newpath = new String(in_newpath.getBytes("iso-8859-1"),"UTF-8"); + RenameModel model=new RenameModel(); + System.out.println("out_path:"+out_path); + System.out.println("out_newpath:"+out_newpath); + if(this.getHdfsHelper().rename(out_path,out_newpath)) + { + model.setSuccess(true); + model.setMessage("Succeed to rename"); + System.out.println("Succeed to rename"); + } + else + { + model.setSuccess(false); + model.setMessage("Fail to rename"); + model.setMessage("Fail to rename"); + } + return model; + + } + + @RequestMapping(value = "/movefile", method = RequestMethod.GET) + @ResponseBody + public boolean movefile( + @RequestParam("frompath") String frompath,@RequestParam("topath") String topath,@RequestParam("filename") String filename) throws UnsupportedEncodingException{ + System.out.println("out_path:"); + String in_frompath=frompath; + String out_frompath = new String(in_frompath.getBytes("iso-8859-1"),"UTF-8"); + String in_topath=topath; + String out_topath = new String(in_topath.getBytes("iso-8859-1"),"UTF-8"); + String in_filename=filename; + String out_filename = new String(in_filename.getBytes("iso-8859-1"),"UTF-8"); + System.out.println("out_path:"+out_frompath); + System.out.println("out_newpath:"+out_topath); + return this.getHdfsHelper().movefile(out_frompath,out_topath,out_filename); + } } diff --git a/src/main/java/haflow/ui/controller/HomeController.java b/src/main/java/haflow/ui/controller/HomeController.java index 6bd03af..8cd0d7f 100644 --- a/src/main/java/haflow/ui/controller/HomeController.java +++ b/src/main/java/haflow/ui/controller/HomeController.java @@ -26,4 +26,8 @@ public class HomeController { public ModelAndView oozie() { return new ModelAndView("oozie"); } + @RequestMapping({ "/hive" }) + public ModelAndView hive() { + return new ModelAndView("hive"); + } } diff --git a/src/main/java/haflow/ui/controller/MainLogonController.java b/src/main/java/haflow/ui/controller/MainLogonController.java index 48bfa43..6983c38 100644 --- a/src/main/java/haflow/ui/controller/MainLogonController.java +++ b/src/main/java/haflow/ui/controller/MainLogonController.java @@ -1,8 +1,10 @@ package haflow.ui.controller; + import javax.servlet.http.HttpServletRequest; import haflow.module.util.Md5Util; +import haflow.ui.helper.HdfsHelper; import haflow.ui.helper.UserHelper; import org.springframework.beans.factory.annotation.Autowired; @@ -16,7 +18,16 @@ import org.springframework.web.servlet.mvc.support.RedirectAttributes; @Controller public class MainLogonController { private UserHelper userHelper; + private HdfsHelper hdfsHelper; + private HdfsHelper getHdfsHelper() { + return hdfsHelper; + } + + @Autowired + private void setHdfsHelper(HdfsHelper hdfsHelper) { + this.hdfsHelper = hdfsHelper; + } private UserHelper getUserHelper() { return userHelper; } @@ -40,8 +51,17 @@ public class MainLogonController { password=Md5Util.getMd5Hex(password); if (this.getUserHelper().saveUser(username, password,email, mora)) { //System.out.println("successful return main"); +// System.out +// .println("controller:"+(this.getHdfsHelper()==null)); + if(this.getHdfsHelper().createdirectory("hdfs://133.133.2.150:9000/user/root",username)) + { redirectAttributes.addFlashAttribute("message", "注册成功"); return "redirect:/"; + } + else{ + redirectAttributes.addFlashAttribute("message", "用户hdfs空间未分配成功!"); + return "redirect:/registration"; + } } else { redirectAttributes.addFlashAttribute("message", "用户名或邮箱已存在"); diff --git a/src/main/java/haflow/ui/helper/HdfsHelper.java b/src/main/java/haflow/ui/helper/HdfsHelper.java index f4907d6..19163da 100644 --- a/src/main/java/haflow/ui/helper/HdfsHelper.java +++ b/src/main/java/haflow/ui/helper/HdfsHelper.java @@ -8,6 +8,7 @@ import haflow.ui.model.HdfsFileListModel; import haflow.ui.model.HdfsFileModel; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FSDataInputStream; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -54,6 +55,11 @@ public class HdfsHelper { return model; } + public FSDataInputStream getPicture(String path, String fileName) { + String filePath = path + "/" + fileName; + return this.getHdfsService().readPicture(filePath); + } + public Boolean uploadFile(String localpath,String remotePath) { Boolean ret = this.getHdfsService().uploadFile(localpath,remotePath); return ret; @@ -75,4 +81,14 @@ public class HdfsHelper { boolean ret=this.getHdfsService().deleteFile(remotePath); return ret; } + public boolean rename(String path,String newpath) { + boolean ret=this.getHdfsService().renameFile(path,newpath); + System.out.println("helper:"+ret); + return ret; + } + public boolean movefile(String fromPath,String toPath,String filename) { + boolean ret=this.getHdfsService().renameFile(fromPath,toPath); + System.out.println("movefile:"+ret); + return ret; + } } diff --git a/src/main/java/haflow/ui/model/RenameModel.java b/src/main/java/haflow/ui/model/RenameModel.java new file mode 100644 index 0000000..0a35a1c --- /dev/null +++ b/src/main/java/haflow/ui/model/RenameModel.java @@ -0,0 +1,30 @@ +package haflow.ui.model; + +import javax.xml.bind.annotation.XmlElement; + +import javax.xml.bind.annotation.XmlRootElement; + +@XmlRootElement(name = "CreateDirectoryModel") +public class RenameModel { + private boolean success; + private String message; + + @XmlElement(name = "success") + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + @XmlElement(name = "message") + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + +} diff --git a/src/main/resources/servlet-context.xml b/src/main/resources/servlet-context.xml index 693ddbf..269571f 100644 --- a/src/main/resources/servlet-context.xml +++ b/src/main/resources/servlet-context.xml @@ -21,4 +21,5 @@ class="org.springframework.web.multipart.commons.CommonsMultipartResolver"> + diff --git a/src/main/webapp/WEB-INF/views/hive.jsp b/src/main/webapp/WEB-INF/views/hive.jsp new file mode 100644 index 0000000..3996ba5 --- /dev/null +++ b/src/main/webapp/WEB-INF/views/hive.jsp @@ -0,0 +1,15 @@ +<%@ page language="java" contentType="text/html; charset=utf-8" + pageEncoding="utf-8" isELIgnored ="false"%> +<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %> + + + + + + +ozzie + + + + + \ No newline at end of file diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index 93406e4..264ce84 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -41,5 +41,5 @@ dispatcherServlet / + - \ No newline at end of file diff --git a/src/main/webapp/script/haflow.main.js b/src/main/webapp/script/haflow.main.js index 9e2464c..88d956e 100644 --- a/src/main/webapp/script/haflow.main.js +++ b/src/main/webapp/script/haflow.main.js @@ -1183,7 +1183,7 @@ HAFlow.Main.prototype.initFlowMenu = function() { userRealTextBox.set("value", data.realname); userEmailTextBox.set("value", data.email); if(data.realname==null) tmp="blank"; - else tmp=data.realname + else tmp=data.realname; dojo.byId("user_real_text_box").innerHTML=tmp; dojo.byId("user_email_text_box").innerHTML=data.email; dojo.byId("user_space_text_box").innerHTML=data.space; @@ -1429,7 +1429,6 @@ HAFlow.Main.prototype.initHdfsFileListTree = function() { + "\"" + path + "\">" - + "
", style : "width:400px" }); @@ -1471,20 +1470,7 @@ HAFlow.Main.prototype.initHdfsFileListTree = function() { } dialog.destroy(); }); - } - else - HAFlow.showDialog("Upload", "It's a file.Can't upload to it."); - - - }// 锟结交失锟斤拷 - }); - } else { - HAFlow.showDialog("Upload", - "File exits."); - } - dialog.destroy(); - }); - } else + }else HAFlow.showDialog("Upload", "It's a file.Can't upload to it."); }); @@ -1616,104 +1602,99 @@ HAFlow.Main.prototype.initHdfsFileListTree = function() { }); dojo - .connect( - this.menu.treeMenu.CreateMenuItem, - "onClick", - function() { - var tn = dijit.byNode(this.getParent().currentTarget); - var path = tn.item.path; - var isDirectory = tn.item.isDirectory; - if (isDirectory == true) { - HAFlow - .showDialog( - "create new directory", - "
" - + "new name: " - + "
"); - dojo - .connect( - dojo.byId("create_btn"), - "onclick", - function() { - var directoryname = document - .getElementById("directoryname").value; - var result = _currentInstance.hdfsFileListStore - .query({ - path : path - + "/" - + directoryname - }); - if (result.total == 0) { - $ - .ajax({ - url : _currentInstance.basePath - + "hdfs/createdirectory?remotepath=" - + path - + "&directoryname=" - + dojo - .byId("directoryname").value, - type : "GET", - dataType : "json", - contentType : "application/json", - data : JSON - .stringify({}), - success : function( - data, - status) { - if (data.success = true) { - HAFlow - .showDialog( - "Create HdfsFile Directory", - "HdfsFile Directory created."); - _currentInstance.hdfsFileListStore - .put({ - id : path - + "/" - + data.directoryname, - name : data.directoryname, - isDirectory : true, - path : path - + "/" - + data.directoryname, - parentPath : path, - }); + .connect( + this.menu.treeMenu.CreateMenuItem, + "onClick", + function() { + var tn = dijit.byNode(this.getParent().currentTarget); + var path = tn.item.path; + var isDirectory = tn.item.isDirectory; + if (isDirectory == true) { + HAFlow + .showDialog( + "create new directory", + "
" + + "new name: " + + "
"); + dojo + .connect( + dojo.byId("create_btn"), + "onclick", + function() { + var directoryname = document + .getElementById("directoryname").value; + var result = _currentInstance.hdfsFileListStore + .query({ + path : path + + "/" + + directoryname + }); + if (result.total == 0) { + $ + .ajax({ + url : _currentInstance.basePath + + "hdfs/createdirectory?remotepath=" + + path + + "&directoryname=" + + dojo + .byId("directoryname").value, + type : "GET", + dataType : "json", + contentType : "application/json", + data : JSON + .stringify({}), + success : function( + data, + status) { + if (data.success = true) { + HAFlow + .showDialog( + "Create HdfsFile Directory", + "HdfsFile Directory created."); + _currentInstance.hdfsFileListStore + .put({ + id : path + + "/" + + data.directoryname, + name : data.directoryname, + isDirectory : true, + path : path + + "/" + + data.directoryname, + parentPath : path, + }); - } else - HAFlow - .showDialog( - "Create HdfsFile Directory", - "HdfsFile Directory can't be created."); - }, - error : function( - request, - status, - error) { - HAFlow - .showDialog( - "Error", - "An error occurred while removing HdfsFile Directory: " - + error); - } - }); - } else { - HAFlow - .showDialog( - "Create HdfsFile Directory", - "HdfsFile Directory exits."); - } + } else + HAFlow + .showDialog( + "Create HdfsFile Directory", + "HdfsFile Directory can't be created."); + }, + error : function( + request, + status, + error) { + HAFlow + .showDialog( + "Error", + "An error occurred while removing HdfsFile Directory: " + + error); + } + }); + } else { + HAFlow + .showDialog( + "Create HdfsFile Directory", + "HdfsFile Directory exits."); + } + + }); + } else { + HAFlow + .showDialog("Create HdfsFile Directory", + "It's a file.HdfsFile Directory can't be created in it."); + } - }); - else{ - HAFlow.showDialog("Create HdfsFile Directory", "HdfsFile Directory exits."); - } - - }); - } - else - { - HAFlow.showDialog("Create HdfsFile Directory", "It's a file.HdfsFile Directory can't be created in it."); - } - }); dojo.connect( this.menu.treeMenu.DownloadMenuItem, @@ -2322,10 +2303,10 @@ HAFlow.Main.prototype.onNodeClicked = function(instance, flowId, nodeId) { + module.configurations[i].key; var divId = textBoxId + "_container"; var hdfspathButtonId = textBoxId + "_hdfspathButton"; - if (dijit.byId(textBoxId) != null) { - dijit.registry.remove(textBoxId); - } if (module.configurations[i].type == "BOOLEAN") { + if (dijit.byId(textBoxId) != null) { + dijit.registry.remove(textBoxId); + } var configtype_true = new dijit.form.CheckBox({ id : textBoxId, checked : (instance.getConfigurationValue(instance, flowId, @@ -2335,19 +2316,26 @@ HAFlow.Main.prototype.onNodeClicked = function(instance, flowId, nodeId) { configtype_true.placeAt(dojo.byId(divId)); configtype_true.startup(); } else { + if (dijit.byId(textBoxId) != null) { + dijit.registry.remove(textBoxId); + } var configurationTextBox = new dijit.form.TextBox({ - id : textBoxId + "_textbox", + id : textBoxId, value : instance.getConfigurationValue(instance, flowId, nodeId, module.configurations[i].key), style : "width:600px;" }); configurationTextBox.placeAt(dojo.byId(divId)); configurationTextBox.startup(); + if (dijit.byId(textBoxId+"_hdfspath") != null) { + dijit.registry.remove(textBoxId+"_hdfspath"); + } + var a="_hdfspath"; var hdfspathButton = new dijit.form.Button({ - id : textBoxId, + id : textBoxId+a, label : "Hdfs Path", onClick : function() { - dijit.byId(this.id + "_textbox").set("value", hdfspath); + dijit.byId(this.id.replace(a,"")).set("value", hdfspath); } }); hdfspathButton.placeAt(dojo.byId(hdfspathButtonId));