Conflicts:
	src/main/java/haflow/module/util/ModuleUtil.java
This commit is contained in:
dawncx 2013-11-06 16:55:22 +08:00
commit a9afe150c4
20 changed files with 3307 additions and 3261 deletions

224
pom.xml
View File

@ -1,112 +1,112 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>cn.ac.iscas</groupId>
<artifactId>haflow</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>war</packaging>
<name>haflow</name>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.12</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.25</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>4.3.0.Beta3</version>
</dependency>
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Draft-16</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>1.1.2</version>
</dependency>
<dependency>
<groupId>com.yahoo.oozie</groupId>
<artifactId>oozie-client</artifactId>
<version>3.3.2</version>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>1.3</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>0.9.0</version>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
</dependencies>
<profiles></profiles>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
<version>9.0.3.v20130506</version>
</plugin>
</plugins>
<finalName>haflow</finalName>
</build>
</project>
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>cn.ac.iscas</groupId>
<artifactId>haflow</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>war</packaging>
<name>haflow</name>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.12</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.25</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>4.3.0.Beta3</version>
</dependency>
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Draft-16</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>1.1.2</version>
</dependency>
<dependency>
<groupId>com.yahoo.oozie</groupId>
<artifactId>oozie-client</artifactId>
<version>3.3.2</version>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>1.3</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>0.9.0</version>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>3.2.3.RELEASE</version>
</dependency>
</dependencies>
<profiles></profiles>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
<version>9.0.3.v20130506</version>
</plugin>
</plugins>
<finalName>haflow</finalName>
</build>
</project>

View File

@ -38,12 +38,12 @@ public class HiveModuleGenerator extends OozieXmlGenerator {
String ok = outputs.get("ok").getName();
String error = outputs.get("error").getName();
String main_class = "haflow.module.zrace.HiveJdbcClient";
String main_class = "haflow.module.hive.HiveJdbcClient";
String uri = configurations.get("oozie.hive.connection.url");
String xml = "<action name=\"" + name + "\">" +
"<java xmlns=\"uri:oozie:hive-action:0.2\">" +
"<java>" +
"<job-tracker>" + job_tracker + "</job-tracker>" + "\n"
+ "<name-node>" + name_node + "</name-node>" + "\n"
+ "<configuration>" + "\n" + "<property>" + "\n"

View File

@ -24,7 +24,8 @@ public class OozieService {
conf.setProperty("jobTracker", "m150:9001");
conf.setProperty("queueName", "default");
conf.setProperty("examplesRoot", "examples");
conf.setProperty("oozie.use.system.libpath", "true");
// conf.setProperty("oozie.use.system.libpath", "true");
conf.setProperty("oozie.libpath", "share/lib/hive");
String jobId = null;
try {

View File

@ -1,3 +1,4 @@
package haflow.module.util;
import haflow.module.AbstractModule;

View File

@ -1,132 +0,0 @@
package haflow.module.zrace;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
public class HiveJdbcClient {
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
public static void execQuery(String uri, String sql, String separator, boolean printHead) throws SQLException{
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
Connection con = DriverManager.getConnection(uri, "", "");
Statement stmt = con.createStatement();
ResultSet res = stmt.executeQuery(sql);
ResultSetMetaData resultSetMetaData = res.getMetaData();
int columnCount = resultSetMetaData.getColumnCount();
if( printHead){
for( int i = 1; i <= columnCount; i++){
System.out.print(resultSetMetaData.getColumnName(i) + separator);
}
}
while (res.next()) {
for( int i = 1; i <= columnCount; i++){
System.out.print(res.getString(i) + separator);
}
System.out.println();
}
con.close();
}
public static boolean execSql(String uri, String[] sqls) throws SQLException{
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
Connection con = DriverManager.getConnection(uri, "", "");
Statement stmt = con.createStatement();
for( String sql : sqls){
stmt.execute(sql);
}
con.close();
return true;
}
public static void main(String[] args) {
if( args.length < 2){
System.out.println("Invalid arguments!");
System.exit(1);
}
String uri = args[0];
String sql = args[1];
try {
// execQuery(uri, sql, ",", true);
execSql(uri, sql.split(";"));
} catch (SQLException e) {
e.printStackTrace();
}
// try {
// test(null);
// } catch (SQLException e) {
// e.printStackTrace();
// }
}
public static void test(String[] args) throws SQLException {
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
Connection con = DriverManager.getConnection(
"jdbc:hive://m150:10000/default", "", "");
Statement stmt = con.createStatement();
String tableName = "testHiveDriverTable";
stmt.executeQuery("drop table " + tableName);
ResultSet res = stmt.executeQuery("create table " + tableName
+ " (key int, value string) row format delimited fields terminated by ','");
// show tables
String sql = "show tables '" + tableName + "'";
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
if (res.next()) {
System.out.println(res.getString(1));
}
// describe table
sql = "describe " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
// load data into table
// NOTE: filepath has to be local to the hive server
// NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
String filepath = "/opt/zptest/a.txt";
sql = "load data local inpath '" + filepath + "' into table "
+ tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
// select * query
sql = "select * from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(String.valueOf(res.getInt(1)) + "\t"
+ res.getString(2));
}
// regular hive query
sql = "select count(1) from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1));
}
}
}

View File

@ -1,41 +0,0 @@
package haflow.module.zrace;
import haflow.module.AbstractHiveModule;
import haflow.module.DataType;
import haflow.module.Module;
import haflow.module.ModuleConfiguration;
import haflow.module.ModuleConfigurationType;
import haflow.module.ModuleEndpoint;
import haflow.module.ModuleType;
import java.util.Map;
@Module(id = "add600a8-aa63-8901-ca46-aaffa0e0bd2f", name = "Hive", category = "Basic", type = ModuleType.HIVE, configurations = {
@ModuleConfiguration(key = "sql", displayName = "Sql Command", pattern = "^(.*)$", type = ModuleConfigurationType.PLAIN_TEXT),
@ModuleConfiguration(key = "output_dir", displayName = "Output Directory", pattern = "^(.*)$", type = ModuleConfigurationType.PLAIN_TEXT), }, inputs = { @ModuleEndpoint(name = "from", minNumber = 1, maxNumber = 1, dataType = DataType.PlainText) }, outputs = {
@ModuleEndpoint(name = "ok", minNumber = 1, maxNumber = 1, dataType = DataType.PlainText),
@ModuleEndpoint(name = "error", minNumber = 1, maxNumber = 1, dataType = DataType.PlainText) })
public class HiveModule extends AbstractHiveModule {
@Override
public boolean validate(Map<String, String> configurations,
Map<String, String> inputs, Map<String, String> outputs) {
// TODO Auto-generated method stub
return false;
}
public static void main(String[] args) {
System.out.println("Demo Java Main");
System.out.println("# Arguments: " + args.length);
for (int i = 0; i < args.length; i++) {
System.out.println("Argument[" + i + "]: " + args[i]);
}
}
@Override
public String getSQL(Map<String, String> configurations) {
// TODO Auto-generated method stub
return configurations.get("sql");
}
}

View File

@ -3,10 +3,12 @@ package haflow.service;
import haflow.util.ClusterConfiguration;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import org.apache.hadoop.conf.Configuration;
@ -17,6 +19,8 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.tools.ant.filters.StringInputStream;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@ -124,6 +128,48 @@ public class HdfsService {
return null;
}
}
public String readCsvFile(String remotePath) {
try {
FileSystem fs = this.getFileSystem();
FSDataInputStream hdfsInStream = fs.open(new Path(remotePath));
BufferedReader d=new BufferedReader(new InputStreamReader(hdfsInStream));
String s;
String[] col=new String[12];
String[] value=new String[12];
JSONArray arr=new JSONArray();
JSONObject obj=new JSONObject();
obj.put("length",value.length);
arr.put(obj);
if ((s=d.readLine())!=null)
{
value=s.split(",");
JSONObject jobj=new JSONObject();
for(int i=0;i<value.length;i++){
col[i]=value[i];
String s1=""+i;
jobj.put(s1,col[i]);
}
arr.put(jobj);
}
int line=0;
while (((s=d.readLine())!=null)&&(line<=9)){
line++;
value=s.split(",");
JSONObject jobj=new JSONObject();
for(int j=0;j<value.length;j++){
jobj.put(col[j], value[j]);
}
arr.put(jobj);
}
d.close();
System.out.println(arr.toString());
return arr.toString();
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public boolean appendFile(String content, String remotePath) {

View File

@ -29,6 +29,7 @@ import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.MultipartHttpServletRequest;
import org.springframework.web.servlet.ModelAndView;
//import org.springframework.web.servlet.ModelAndView;
import org.springframework.util.FileCopyUtils;
@ -92,6 +93,7 @@ public class HdfsController {
@ResponseBody
public ResponseEntity<byte[]> download(HttpServletResponse response,@RequestParam(value = "remotepath", required = true) String remotepath,@RequestParam(value = "filename", required = true) String filename){
response.setContentType("application/x-download");
response.setHeader("content-disposition","attachment;filename="+filename);
try {
String downloadDir = "c:\\downloadFile";
File dirPath = new File(downloadDir);
@ -118,10 +120,6 @@ public class HdfsController {
return null;
}
@RequestMapping(value = "/createdirectory", method = RequestMethod.GET)
@ResponseBody
public CreateDirectoryModel createdirectory(
@ -146,9 +144,6 @@ public class HdfsController {
}
@RequestMapping(value = "/deletedirectory", method = RequestMethod.GET)
@ResponseBody
public RemoveHdfsFileModel deletedirectory(
@ -216,6 +211,8 @@ public class HdfsController {
return this.getHdfsHelper().getFile(out_path, out_fileName);
}
@RequestMapping(value = "/picture", method = RequestMethod.GET)
@ResponseBody
public ResponseEntity<byte[]> getpicture(HttpServletResponse response,
@ -226,7 +223,6 @@ public class HdfsController {
String out_path = new String(in_path.getBytes("iso-8859-1"),"UTF-8");
String in_fileName=fileName;
String out_fileName = new String(in_fileName.getBytes("iso-8859-1"),"UTF-8");
// String new_path=out_path + "/" + out_fileName;
try{
BufferedOutputStream bos = new BufferedOutputStream(response.getOutputStream());
BufferedInputStream bis = new BufferedInputStream(this.hdfsHelper.getPicture(out_path, out_fileName));
@ -244,6 +240,21 @@ public class HdfsController {
return null;
}
@RequestMapping(value = "/cvs_file", method = RequestMethod.GET)
@ResponseBody
public ModelAndView getcvsfile(
@RequestParam(value = "path", required = true) String path) {
String in_path=path;
ModelAndView mv=new ModelAndView("cvs");
try {
String out_path = new String(in_path.getBytes("iso-8859-1"),"UTF-8");
mv.addObject("content",this.getHdfsHelper().getCsvFile(out_path).getContent());
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
return mv;
}
@RequestMapping(value = "/rename", method = RequestMethod.GET)
@ResponseBody
public RenameModel rename(

View File

@ -30,12 +30,23 @@ public class RunHistoryController {
this.runHistoryHelper = runHistoryHelper;
}
@RequestMapping(value = "/{flowId}", method = RequestMethod.GET)
@RequestMapping(value = "/get/{flowId}", method = RequestMethod.GET)
public ModelAndView get(@PathVariable UUID flowId,
HttpServletRequest request, HttpServletResponse response) {
FlowRunHistoryListModel fhlm = this.getRunHistoryHelper()
.getFlowRunHistoryList(flowId);
request.setAttribute("flowHistory", fhlm);
request.setAttribute("flowIdOfHistory", flowId);
return new ModelAndView("run-history");
}
@RequestMapping(value = "/refresh/{flowId}", method = RequestMethod.GET)
public ModelAndView refresh(@PathVariable UUID flowId,
HttpServletRequest request, HttpServletResponse response) {
FlowRunHistoryListModel fhlm = this.getRunHistoryHelper()
.getFlowRunHistoryList(flowId);
request.setAttribute("flowHistory", fhlm);
request.setAttribute("flowIdOfHistory", flowId);
return new ModelAndView("run-history");
}
}

View File

@ -1,5 +1,7 @@
package haflow.ui.helper;
import java.sql.Timestamp;
import java.util.ArrayList;
import haflow.service.HdfsService;
@ -35,6 +37,7 @@ public class HdfsHelper {
file.setName(stat.getPath().getName());
file.setLength(stat.getLen());
file.setDirectory(stat.isDir());
file.setTime(new Timestamp(stat.getModificationTime()).toString());
model.getFiles().add(file);
}
}
@ -60,6 +63,17 @@ public class HdfsHelper {
return this.getHdfsService().readPicture(filePath);
}
public HdfsFileModel getCsvFile(String path) {
HdfsFileModel model = new HdfsFileModel();
String ret = this.getHdfsService().readCsvFile(path);
if (ret != null) {
model.setContent(ret);
model.setLength(ret.length());
}
model.setPath(path);
return model;
}
public Boolean uploadFile(String localpath,String remotePath) {
Boolean ret = this.getHdfsService().uploadFile(localpath,remotePath);
return ret;

View File

@ -8,6 +8,9 @@ public class HdfsFileListItemModel {
private String name;
private boolean isDirectory;
private long length;
private String type;
private String time;
@XmlElement
public String getName() {
@ -35,5 +38,24 @@ public class HdfsFileListItemModel {
public void setLength(long length) {
this.length = length;
}
@XmlElement
public String getTime() {
return time;
}
public void setTime(String time) {
this.time = time;
}
@XmlElement
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}

View File

@ -20,6 +20,8 @@
<mapping class="haflow.dto.entity.Node" />
<mapping class="haflow.dto.entity.MainUser"/>
<mapping class="haflow.dto.entity.FlowRunHistory" />
<mapping class="haflow.dto.entity.TestType"/>
<mapping class="haflow.dto.entity.Test"/>
<mapping class="haflow.dto.profile.NodeAppearance" />

View File

@ -0,0 +1,75 @@
<%@ page language="java" contentType="text/html; charset=utf-8"
pageEncoding="utf-8" isELIgnored="false"%>
<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet"
href="http://dojotoolkit.org/reference-guide/1.9/_static/js/dojo/../dijit/themes/claro/claro.css">
<style type="text/css">
@import
"http://dojotoolkit.org/reference-guide/1.9/_static/js/dojo/../dojox/grid/resources/claroGrid.css"
;
/*Grid needs an explicit height by default*/
#gridDiv {
height: 20em;
}
</style>
<script>
dojoConfig = {
async : true,
parseOnLoad : false
}
</script>
<script type="text/javascript"
src='http://dojotoolkit.org/reference-guide/1.9/_static/js/dojo/dojo.js'></script>
<script>
require([ 'dojo/_base/lang', 'dojox/grid/DataGrid',
'dojo/data/ItemFileWriteStore', 'dojox/grid/cells/dijit',
'dojo/dom', 'dojo/domReady!' ],
function(lang, DataGrid,
ItemFileWriteStore, cells, dom) {
var data = {
identifier : "id",
items : []
};
var data_list=JSON.parse('${content}');
for(var i = 2 ; i < data_list.length; i++){
data.items.push(lang.mixin({ id: i+1 }, data_list[i]));
}
var store = new ItemFileWriteStore({
data : data
});
var length=eval(data_list)[0]["length"];
var layout=[];
for(i=0;i<length;i++)
{
layout.push({
'name' :eval(data_list)[1][i],
'field' : eval(data_list)[1][i],
'width' : '80px',
type : dojox.grid.cells.CheckBox,
styles : 'text-align: center;'
});
}
var grid = new DataGrid({
id : 'grid',
store : store,
structure : layout,
rowSelector : '20px'
});
grid.placeAt("gridDiv");
grid.startup();
});
</script>
</head>
<body class="claro">
<p class="info">iris</p>
<div id="gridDiv"></div>
</body>
</html>

View File

@ -10,6 +10,6 @@
<title>ozzie</title>
</head>
<body>
<iframe src="http://localhost:8011/javaHiveAdimin/" height="570px" width="1080px" frameborder="0" align="middle" ></iframe>
<iframe src="http://133.133.133.89:8011/javaHiveAdimin/" height="570px" width="1080px" frameborder="0" align="middle" ></iframe>
</body>
</html>

View File

@ -10,6 +10,6 @@
<title>ozzie</title>
</head>
<body>
<iframe src="http://133.133.2.150:11000/oozie/" height="570px" width="1080px" frameborder="0" align="middle" ></iframe>
<iframe src="http://133.133.2.150:11000/oozie/" height="100%" width="100%" frameborder="0" align="middle" ></iframe>
</body>
</html>

View File

@ -1,4 +1,4 @@
<%@page import="haflow.ui.model.*"%>
<%@ page import="haflow.ui.model.*" %>
<%@ page language="java" contentType="text/html; charset=ISO-8859-1"
pageEncoding="ISO-8859-1"%>
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
@ -12,12 +12,23 @@
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>HaFlow: Flow Run Histories</title>
<link rel="stylesheet" href="<%=basePath%>/style/site.css">
<style>
table
{
border-collapse:collapse;
}
table, th, td{
border : 1px solid #C0C0C0;
padding-left : 5px;
padding-right : 5px;
}
</style>
</head>
<body>
<h1>Flow Run History</h1>
<h2>History List</h2>
<%
FlowRunHistoryListModel frhlm =
(FlowRunHistoryListModel)request.getAttribute("flowHistory");
@ -26,7 +37,6 @@
<% if(frhlm != null && frhlm.getFlowHistorys().size() != 0){ %>
<table>
<tr>
<th>History Id</th>
<th>Oozie Job Id</th>
<th>Time</th>
<th>Commit Message</th>
@ -36,7 +46,6 @@
FlowRunHistoryModel frhm = frhlm.getFlowHistorys().get(i);
%>
<tr>
<td><%=frhm.getId() %></td>
<td><%=frhm.getOozieJobId() %></td>
<td><%=frhm.getTimestamp() %></td>
<td><%=frhm.getCommitMessage() %></td>

File diff suppressed because one or more lines are too long

View File

@ -18,6 +18,7 @@ HAFlow.UI.prototype.init = function() {
this.initMainContainer();
this.initMainMenu();
this.initMainoozieContainer();
this.initOozieHiveContainer();
this.initLeadingContainer();
this.initTrailingContainer();
this.initCenterContainer();
@ -32,6 +33,7 @@ HAFlow.UI.prototype.initId = function() {
this.mainContainerId = "main";
this.mainMenuContainerId = "mainMenu";
this.mainoozieContainerId="mainoozie";
this.oozieHiveContainerId="ooziehive";
this.bottomContainerId = "bottom";
this.leadingContainerId = "leading";
this.trailingContainerId = "trailing";
@ -67,6 +69,15 @@ HAFlow.UI.prototype.initMainoozieContainer = function() {
this.mainoozieContainer.startup();
};
HAFlow.UI.prototype.initOozieHiveContainer= function() {
this.oozieHiveContainer = new dijit.layout.TabContainer({
id : "ooziehive",
region : "center",
splitter : "true",
});
this.oozieHiveContainer.startup();
};
HAFlow.UI.prototype.initBottomContainer = function() {
this.bottomContainer = new dijit.layout.TabContainer({
id : this.bottomContainerId,
@ -101,12 +112,16 @@ HAFlow.UI.prototype.initCenterContainer = function() {
this.centerContainerParent = new dijit.layout.BorderContainer({
region : "center",
splitter : "true"
});
},dojo.create("div", {
id : "centerparent"
}, dojo.body()));
this.centerContainer = new dijit.layout.TabContainer({
id : this.centerContainerId,
region : "center",
splitter : "true"
});
}, dojo.create("div", {
id : this.centerContainerId
}, dojo.body()));
this.mainoozieContainer.addChild(this.centerContainerParent);
this.centerContainerParent.addChild(this.centerContainer);
this.centerContainerParent.startup();

View File

@ -78,3 +78,10 @@
path {
cursor: pointer;
}
tr{
/* background-color:#99bbbb; */
height:30px;
width:100px;
text-align:top;
}

View File

@ -37,6 +37,11 @@ public class ModuleUtilTest extends AbstractJUnit4SpringContextTests {
@Test
public void testSearchForModuleClasses() {
Map<UUID, Class<?>> map = this.getModuleUtil().searchForModuleClasses();
for(Map.Entry<UUID,Class<?>> m:map.entrySet()){
System.out.println(m.getKey());
System.out.println(m.getValue());
}
Assert.assertNotNull(map);
Assert.assertTrue(map.keySet().size() > 0);
}