hdfs path

This commit is contained in:
keyeqing 2013-10-28 11:23:33 +08:00
parent 9312ac6b14
commit cdd85cf1fa
10 changed files with 325 additions and 121 deletions

View File

@ -4,6 +4,7 @@ import haflow.util.ClusterConfiguration;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@ -56,6 +57,8 @@ public class HdfsService {
public boolean createDirectory(String remotePath, String directoryname) {
try {
FileSystem fs = this.getFileSystem();
System.out
.println("service:"+(fs==null));
fs.mkdirs(new Path(remotePath+"/"+directoryname));
fs.close();
return true;
@ -110,6 +113,18 @@ public class HdfsService {
return null;
}
}
public FSDataInputStream readPicture(String remotePath) {
try {
FileSystem fs = this.getFileSystem();
FSDataInputStream hdfsInStream = fs.open(new Path(remotePath));
return hdfsInStream;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public boolean appendFile(String content, String remotePath) {
try {
@ -150,7 +165,7 @@ public class HdfsService {
public boolean renameFile(String fromPath, String toPath) {
try {
FileSystem fs = this.getFileSystem();
return fs.rename(new Path(fromPath), new Path(toPath));
return fs.rename(new Path(fromPath), new Path(toPath));
} catch (Exception e) {
e.printStackTrace();
return false;
@ -167,5 +182,39 @@ public class HdfsService {
return null;
}
}
public boolean movefile(String fromPath,String toPath,String filename){
FileSystem fs;
try {
fs = this.getFileSystem();
} catch (IOException e) {
e.printStackTrace();
return false;
}
String localPath = "/home/tmp";
File dirPath = new File(localPath);
if (!dirPath.exists()) {
dirPath.mkdirs();
System.out.print(localPath);
}
else
System.out.print(localPath);
Path fromhdfsPath = new Path(fromPath);
Path tmpPath = new Path(localPath);
Path tohdfsPath = new Path(toPath);
try {
fs.moveToLocalFile (fromhdfsPath, tmpPath);
} catch (IOException e) {
e.printStackTrace();
return false;
}
try {
fs.moveFromLocalFile (tmpPath, tohdfsPath);
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
}

View File

@ -13,6 +13,7 @@ import javax.servlet.http.HttpServletResponse;
import haflow.ui.helper.HdfsHelper;
import haflow.ui.model.HdfsFileListModel;
import haflow.ui.model.HdfsFileModel;
import haflow.ui.model.RenameModel;
import haflow.ui.model.UploadFileModel;
import haflow.ui.model.RemoveHdfsFileModel;
import haflow.ui.model.CreateDirectoryModel;
@ -50,7 +51,16 @@ public class HdfsController {
public UploadFileModel upload(MultipartHttpServletRequest request,@RequestParam(value = "remotePath", required = true) String remotepath){
System.out.println("begin to upload");
UploadFileModel model=new UploadFileModel();
MultipartFile file =(MultipartFile)request.getFile("file");
MultipartFile file=null;
try{
file =(MultipartFile)request.getFile("file");
}catch(Exception e){
System.out
.println("got exception:"+e.getMessage());
e.printStackTrace();
}
try{
byte[] bytes=file.getBytes();
String uploadDir = "c:\\uploadFile";
@ -205,4 +215,75 @@ public class HdfsController {
String out_fileName = new String(in_fileName.getBytes("iso-8859-1"),"UTF-8");
return this.getHdfsHelper().getFile(out_path, out_fileName);
}
@RequestMapping(value = "/picture", method = RequestMethod.GET)
@ResponseBody
public ResponseEntity<byte[]> getpicture(HttpServletResponse response,
@RequestParam(value = "path", required = true) String path,
@RequestParam(value = "fileName", required = true) String fileName) throws UnsupportedEncodingException {
response.setContentType("image/jpg");
String in_path=path;
String out_path = new String(in_path.getBytes("iso-8859-1"),"UTF-8");
String in_fileName=fileName;
String out_fileName = new String(in_fileName.getBytes("iso-8859-1"),"UTF-8");
// String new_path=out_path + "/" + out_fileName;
try{
BufferedOutputStream bos = new BufferedOutputStream(response.getOutputStream());
BufferedInputStream bis = new BufferedInputStream(this.hdfsHelper.getPicture(out_path, out_fileName));
byte[] buf = new byte[1024];
int read;
while((read=bis.read(buf))!=-1){
bos.write(buf,0,read);
}
bos.close();
bis.close();
}
catch (Exception e) {
e.printStackTrace();
}
return null;
}
@RequestMapping(value = "/rename", method = RequestMethod.GET)
@ResponseBody
public RenameModel rename(
@RequestParam("path") String path,@RequestParam("newpath") String newpath) throws UnsupportedEncodingException{
String in_path=path;
String out_path = new String(in_path.getBytes("iso-8859-1"),"UTF-8");
String in_newpath=newpath;
String out_newpath = new String(in_newpath.getBytes("iso-8859-1"),"UTF-8");
RenameModel model=new RenameModel();
System.out.println("out_path:"+out_path);
System.out.println("out_newpath:"+out_newpath);
if(this.getHdfsHelper().rename(out_path,out_newpath))
{
model.setSuccess(true);
model.setMessage("Succeed to rename");
System.out.println("Succeed to rename");
}
else
{
model.setSuccess(false);
model.setMessage("Fail to rename");
model.setMessage("Fail to rename");
}
return model;
}
@RequestMapping(value = "/movefile", method = RequestMethod.GET)
@ResponseBody
public boolean movefile(
@RequestParam("frompath") String frompath,@RequestParam("topath") String topath,@RequestParam("filename") String filename) throws UnsupportedEncodingException{
System.out.println("out_path:");
String in_frompath=frompath;
String out_frompath = new String(in_frompath.getBytes("iso-8859-1"),"UTF-8");
String in_topath=topath;
String out_topath = new String(in_topath.getBytes("iso-8859-1"),"UTF-8");
String in_filename=filename;
String out_filename = new String(in_filename.getBytes("iso-8859-1"),"UTF-8");
System.out.println("out_path:"+out_frompath);
System.out.println("out_newpath:"+out_topath);
return this.getHdfsHelper().movefile(out_frompath,out_topath,out_filename);
}
}

View File

@ -26,4 +26,8 @@ public class HomeController {
public ModelAndView oozie() {
return new ModelAndView("oozie");
}
@RequestMapping({ "/hive" })
public ModelAndView hive() {
return new ModelAndView("hive");
}
}

View File

@ -1,8 +1,10 @@
package haflow.ui.controller;
import javax.servlet.http.HttpServletRequest;
import haflow.module.util.Md5Util;
import haflow.ui.helper.HdfsHelper;
import haflow.ui.helper.UserHelper;
import org.springframework.beans.factory.annotation.Autowired;
@ -16,7 +18,16 @@ import org.springframework.web.servlet.mvc.support.RedirectAttributes;
@Controller
public class MainLogonController {
private UserHelper userHelper;
private HdfsHelper hdfsHelper;
private HdfsHelper getHdfsHelper() {
return hdfsHelper;
}
@Autowired
private void setHdfsHelper(HdfsHelper hdfsHelper) {
this.hdfsHelper = hdfsHelper;
}
private UserHelper getUserHelper() {
return userHelper;
}
@ -40,8 +51,17 @@ public class MainLogonController {
password=Md5Util.getMd5Hex(password);
if (this.getUserHelper().saveUser(username, password,email, mora)) {
//System.out.println("successful return main");
// System.out
// .println("controller:"+(this.getHdfsHelper()==null));
if(this.getHdfsHelper().createdirectory("hdfs://133.133.2.150:9000/user/root",username))
{
redirectAttributes.addFlashAttribute("message", "注册成功");
return "redirect:/";
}
else{
redirectAttributes.addFlashAttribute("message", "Óû§hdfs¿Õ¼äδ·ÖÅä³É¹¦£¡");
return "redirect:/registration";
}
} else {
redirectAttributes.addFlashAttribute("message", "用户名或邮箱已存在");

View File

@ -8,6 +8,7 @@ import haflow.ui.model.HdfsFileListModel;
import haflow.ui.model.HdfsFileModel;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FSDataInputStream;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@ -54,6 +55,11 @@ public class HdfsHelper {
return model;
}
public FSDataInputStream getPicture(String path, String fileName) {
String filePath = path + "/" + fileName;
return this.getHdfsService().readPicture(filePath);
}
public Boolean uploadFile(String localpath,String remotePath) {
Boolean ret = this.getHdfsService().uploadFile(localpath,remotePath);
return ret;
@ -75,4 +81,14 @@ public class HdfsHelper {
boolean ret=this.getHdfsService().deleteFile(remotePath);
return ret;
}
public boolean rename(String path,String newpath) {
boolean ret=this.getHdfsService().renameFile(path,newpath);
System.out.println("helper:"+ret);
return ret;
}
public boolean movefile(String fromPath,String toPath,String filename) {
boolean ret=this.getHdfsService().renameFile(fromPath,toPath);
System.out.println("movefile:"+ret);
return ret;
}
}

View File

@ -0,0 +1,30 @@
package haflow.ui.model;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "CreateDirectoryModel")
public class RenameModel {
private boolean success;
private String message;
@XmlElement(name = "success")
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
@XmlElement(name = "message")
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}

View File

@ -21,4 +21,5 @@
class="org.springframework.web.multipart.commons.CommonsMultipartResolver">
<property name="maxUploadSize" value="10000000" />
</bean>
</beans>

View File

@ -0,0 +1,15 @@
<%@ page language="java" contentType="text/html; charset=utf-8"
pageEncoding="utf-8" isELIgnored ="false"%>
<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %>
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>ozzie</title>
</head>
<body>
<iframe src="http://localhost:8011/javaHiveAdimin/" height="570px" width="1080px" frameborder="0" align="middle" ></iframe>
</body>
</html>

View File

@ -41,5 +41,5 @@
<servlet-name>dispatcherServlet</servlet-name>
<url-pattern>/</url-pattern>
</servlet-mapping>
</web-app>
</web-app>

View File

@ -1183,7 +1183,7 @@ HAFlow.Main.prototype.initFlowMenu = function() {
userRealTextBox.set("value", data.realname);
userEmailTextBox.set("value", data.email);
if(data.realname==null) tmp="blank";
else tmp=data.realname
else tmp=data.realname;
dojo.byId("user_real_text_box").innerHTML=tmp;
dojo.byId("user_email_text_box").innerHTML=data.email;
dojo.byId("user_space_text_box").innerHTML=data.space;
@ -1429,7 +1429,6 @@ HAFlow.Main.prototype.initHdfsFileListTree = function() {
+ "\""
+ path
+ "\">"
+ " <button type=\"button\" id=\"upload_btn\">submit</button></form><div id=\"debug\"><div>",
style : "width:400px"
});
@ -1471,20 +1470,7 @@ HAFlow.Main.prototype.initHdfsFileListTree = function() {
}
dialog.destroy();
});
}
else
HAFlow.showDialog("Upload", "It's a file.Can't upload to it.");
}// <20>ύʧ<E1BDBB><CAA7>
});
} else {
HAFlow.showDialog("Upload",
"File exits.");
}
dialog.destroy();
});
} else
}else
HAFlow.showDialog("Upload",
"It's a file.Can't upload to it.");
});
@ -1616,104 +1602,99 @@ HAFlow.Main.prototype.initHdfsFileListTree = function() {
});
dojo
.connect(
this.menu.treeMenu.CreateMenuItem,
"onClick",
function() {
var tn = dijit.byNode(this.getParent().currentTarget);
var path = tn.item.path;
var isDirectory = tn.item.isDirectory;
if (isDirectory == true) {
HAFlow
.showDialog(
"create new directory",
"<html><body><form id=\"hdfsfilepath\" method=\"post\">"
+ "new name:<input type=\"text\" id=\"directoryname\" name=\"directoryname\"> </input>"
+ " <button type=\"button\" id=\"create_btn\">submit</button></form></body></html>");
dojo
.connect(
dojo.byId("create_btn"),
"onclick",
function() {
var directoryname = document
.getElementById("directoryname").value;
var result = _currentInstance.hdfsFileListStore
.query({
path : path
+ "/"
+ directoryname
});
if (result.total == 0) {
$
.ajax({
url : _currentInstance.basePath
+ "hdfs/createdirectory?remotepath="
+ path
+ "&directoryname="
+ dojo
.byId("directoryname").value,
type : "GET",
dataType : "json",
contentType : "application/json",
data : JSON
.stringify({}),
success : function(
data,
status) {
if (data.success = true) {
HAFlow
.showDialog(
"Create HdfsFile Directory",
"HdfsFile Directory created.");
_currentInstance.hdfsFileListStore
.put({
id : path
+ "/"
+ data.directoryname,
name : data.directoryname,
isDirectory : true,
path : path
+ "/"
+ data.directoryname,
parentPath : path,
});
.connect(
this.menu.treeMenu.CreateMenuItem,
"onClick",
function() {
var tn = dijit.byNode(this.getParent().currentTarget);
var path = tn.item.path;
var isDirectory = tn.item.isDirectory;
if (isDirectory == true) {
HAFlow
.showDialog(
"create new directory",
"<html><body><form id=\"hdfsfilepath\" method=\"post\">"
+ "new name:<input type=\"text\" id=\"directoryname\" name=\"directoryname\"> </input>"
+ " <button type=\"button\" id=\"create_btn\">submit</button></form></body></html>");
dojo
.connect(
dojo.byId("create_btn"),
"onclick",
function() {
var directoryname = document
.getElementById("directoryname").value;
var result = _currentInstance.hdfsFileListStore
.query({
path : path
+ "/"
+ directoryname
});
if (result.total == 0) {
$
.ajax({
url : _currentInstance.basePath
+ "hdfs/createdirectory?remotepath="
+ path
+ "&directoryname="
+ dojo
.byId("directoryname").value,
type : "GET",
dataType : "json",
contentType : "application/json",
data : JSON
.stringify({}),
success : function(
data,
status) {
if (data.success = true) {
HAFlow
.showDialog(
"Create HdfsFile Directory",
"HdfsFile Directory created.");
_currentInstance.hdfsFileListStore
.put({
id : path
+ "/"
+ data.directoryname,
name : data.directoryname,
isDirectory : true,
path : path
+ "/"
+ data.directoryname,
parentPath : path,
});
} else
HAFlow
.showDialog(
"Create HdfsFile Directory",
"HdfsFile Directory can't be created.");
},
error : function(
request,
status,
error) {
HAFlow
.showDialog(
"Error",
"An error occurred while removing HdfsFile Directory: "
+ error);
}
});
} else {
HAFlow
.showDialog(
"Create HdfsFile Directory",
"HdfsFile Directory exits.");
}
} else
HAFlow
.showDialog(
"Create HdfsFile Directory",
"HdfsFile Directory can't be created.");
},
error : function(
request,
status,
error) {
HAFlow
.showDialog(
"Error",
"An error occurred while removing HdfsFile Directory: "
+ error);
}
});
} else {
HAFlow
.showDialog(
"Create HdfsFile Directory",
"HdfsFile Directory exits.");
}
});
} else {
HAFlow
.showDialog("Create HdfsFile Directory",
"It's a file.HdfsFile Directory can't be created in it.");
}
});
else{
HAFlow.showDialog("Create HdfsFile Directory", "HdfsFile Directory exits.");
}
});
}
else
{
HAFlow.showDialog("Create HdfsFile Directory", "It's a file.HdfsFile Directory can't be created in it.");
}
});
dojo.connect(
this.menu.treeMenu.DownloadMenuItem,
@ -2322,10 +2303,10 @@ HAFlow.Main.prototype.onNodeClicked = function(instance, flowId, nodeId) {
+ module.configurations[i].key;
var divId = textBoxId + "_container";
var hdfspathButtonId = textBoxId + "_hdfspathButton";
if (dijit.byId(textBoxId) != null) {
dijit.registry.remove(textBoxId);
}
if (module.configurations[i].type == "BOOLEAN") {
if (dijit.byId(textBoxId) != null) {
dijit.registry.remove(textBoxId);
}
var configtype_true = new dijit.form.CheckBox({
id : textBoxId,
checked : (instance.getConfigurationValue(instance, flowId,
@ -2335,19 +2316,26 @@ HAFlow.Main.prototype.onNodeClicked = function(instance, flowId, nodeId) {
configtype_true.placeAt(dojo.byId(divId));
configtype_true.startup();
} else {
if (dijit.byId(textBoxId) != null) {
dijit.registry.remove(textBoxId);
}
var configurationTextBox = new dijit.form.TextBox({
id : textBoxId + "_textbox",
id : textBoxId,
value : instance.getConfigurationValue(instance, flowId,
nodeId, module.configurations[i].key),
style : "width:600px;"
});
configurationTextBox.placeAt(dojo.byId(divId));
configurationTextBox.startup();
if (dijit.byId(textBoxId+"_hdfspath") != null) {
dijit.registry.remove(textBoxId+"_hdfspath");
}
var a="_hdfspath";
var hdfspathButton = new dijit.form.Button({
id : textBoxId,
id : textBoxId+a,
label : "Hdfs Path",
onClick : function() {
dijit.byId(this.id + "_textbox").set("value", hdfspath);
dijit.byId(this.id.replace(a,"")).set("value", hdfspath);
}
});
hdfspathButton.placeAt(dojo.byId(hdfspathButtonId));