add jars that need to be installed into maven

This commit is contained in:
zhaowei8188127 2013-06-05 21:39:09 +08:00
parent eea3593eb5
commit e42da2e163
14 changed files with 393 additions and 98 deletions

Binary file not shown.

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>1.0</modelVersion>
<groupId>com.otcaix.haflow</groupId>
<artifactId>java7-module</artifactId>
<packaging>jar</packaging>
<name>java7</name>
<description>java7 module</description>
<url>http://www.baidu.com</url>
<version>1.0</version>
</project>

Binary file not shown.

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>3.3.2</modelVersion>
<groupId>com.yahoo.oozie</groupId>
<artifactId>oozie-client</artifactId>
<packaging>jar</packaging>
<name>oozie</name>
<description>oozie client 3.3.2</description>
<url>http://www.baidu.com</url>
<version>3.3.2</version>
</project>

Binary file not shown.

View File

@ -0,0 +1,33 @@
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>/opt/hadooptmp</value>
<description>A base for other temporary directories.</description>
</property>
<property>
<name>hadoop.log.dir</name>
<value>/opt/hadooplog</value>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://m150:9000</value>
</property>
<!-- OOZIE -->
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
</configuration>

View File

@ -0,0 +1,122 @@
package haflow.hdfs.client;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
/**
* Read top 10 lines, from files on hadoop
*
* @author ZhaoWei
*
*/
public class HdfsHelper {
public static final String filePath = "hdfs://m150:9000/ztest/split/part-r-00000";
public static final String newFile = "hdfs://m150:9000/ztest/test/part-r-00000";
private FileSystem hdfs;
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
HdfsHelper reader = new HdfsHelper(conf);
reader.ReadFile(filePath);
}
public HdfsHelper(Configuration conf){
try {
hdfs = FileSystem.get(conf);
} catch (IOException e) {
e.printStackTrace();
}
}
public void ReadFile(String filePath) {
try {
FSDataInputStream stream = hdfs.open(new Path(filePath));
BufferedReader br = new BufferedReader(new InputStreamReader(stream,
"UTF-8"));
String line = br.readLine();
for (int i = 0; i < 15 && line != null; i++) {
System.out.println(line);
line = br.readLine();
}
} catch (IOException e) {
e.printStackTrace();
}
}
public FSDataOutputStream CreateFile(String FileName) {
try {
Path path = new Path(FileName);
FSDataOutputStream outputStream = hdfs.create(path);
return outputStream;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public List<String[]> GetFileBolckHost(String FileName) {
try {
List<String[]> list = new ArrayList<String[]>();
Path path = new Path(FileName);
FileStatus fileStatus = hdfs.getFileStatus(path);
BlockLocation[] blkLocations = hdfs.getFileBlockLocations(
fileStatus, 0, fileStatus.getLen());
int blkCount = blkLocations.length;
for (int i = 0; i < blkCount; i++) {
String[] hosts = blkLocations[i].getHosts();
list.add(hosts);
}
return list;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public void PutFile( String srcFile, String dstFile) {
try {
Path srcPath = new Path(srcFile);
Path dstPath = new Path(dstFile);
hdfs.copyFromLocalFile(srcPath, dstPath);
} catch (IOException e) {
e.printStackTrace();
}
}
public void ReadFile(Configuration conf, String FileName) {
try {
FileSystem hdfs = FileSystem.get(conf);
FSDataInputStream dis = hdfs.open(new Path(FileName));
IOUtils.copyBytes(dis, System.out, 4096, false);
dis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public void closeHdfs(){
try {
if( hdfs != null)
hdfs.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,112 @@
package haflow.hdfs.client;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
public class HdfsStaticHelper {
public static void main(String[] args) {
try {
uploadTohdfs();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
};
}
public static void uploadTohdfs() throws FileNotFoundException, IOException {
String localSrc = "D://a.txt";
String dst = "hdfs://m150:9000/ztest/test/a.txt";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst), new Progressable() {
public void progress() {
System.out.println(".");
}
});
System.out.println("上传文件成功");
IOUtils.copyBytes(in, out, 4096, true);
}
/** 从HDFS上读取文件 */
public static void readHdfs() throws FileNotFoundException, IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing/test.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FSDataInputStream hdfsInStream = fs.open(new Path(dst));
OutputStream out = new FileOutputStream("d:/qq-hdfs.txt");
byte[] ioBuffer = new byte[1024];
int readLen = hdfsInStream.read(ioBuffer);
while (-1 != readLen) {
out.write(ioBuffer, 0, readLen);
readLen = hdfsInStream.read(ioBuffer);
}
System.out.println("读文件成功");
out.close();
hdfsInStream.close();
fs.close();
}
/**
* 以append方式将内容添加到HDFS上文件的末尾;注意文件更新需要在hdfs-site.xml中添<property><name>dfs.
* append.support</name><value>true</value></property>
*/
public static void appendToHdfs() throws FileNotFoundException, IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing/test.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FSDataOutputStream out = fs.append(new Path(dst));
int readLen = "zhangzk add by hdfs java api".getBytes().length;
while (-1 != readLen) {
out.write("zhangzk add by hdfs java api".getBytes(), 0, readLen);
}
out.close();
fs.close();
}
/** 从HDFS上删除文件 */
public static void deleteFromHdfs() throws FileNotFoundException,
IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
fs.deleteOnExit(new Path(dst));
fs.close();
}
/** 遍历HDFS上的文件和目录 */
public static void getDirectoryFromHdfs() throws FileNotFoundException,
IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FileStatus fileList[] = fs.listStatus(new Path(dst));
int size = fileList.length;
for (int i = 0; i < size; i++) {
System.out.println("文件名name:" + fileList[i].getPath().getName()
+ "文件大小/t/tsize:" + fileList[i].getLen());
}
fs.close();
}
}

View File

@ -0,0 +1,14 @@
package haflow.hdfs.client;
import haflow.module.general.JavaModule;
import java.io.File;
public class TestClassPath {
public static void main(String[] args) {
String path = JavaModule.class.getProtectionDomain().getCodeSource().getLocation().getFile();
File jarFile = new File(path);
System.out.println(path);
}
}

View File

@ -1,29 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package haflow.module.general;
public class DemoJavaMain {
public static void main(String[] args) {
System.out.println("Demo Java Main");
System.out.println("# Arguments: " + args.length);
for (int i = 0; i < args.length; i++) {
System.out.println("Argument[" + i + "]: " + args[i]);
}
}
}

View File

@ -1,69 +0,0 @@
package haflow.module.general;
import haflow.module.Module;
import haflow.module.ModuleConfiguration;
import haflow.module.ModuleMetadata;
import java.io.IOException;
import java.io.StringReader;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
@Module(id = "5da600a8-aa63-968a-ca46-9085e0e0bd2e", name = "Java", category = "General")
@ModuleConfiguration(configurationKeys = { "eee" }, configurationDisplayNames = { "fff" })
public class JavaModule implements ModuleMetadata {
public Document generate(Map<String, String> configurations) {
String name = configurations.get("name");
String eee = configurations.get("eee");
String ok = configurations.get("ok");
Class<?> moduleClass = this.getClass();
assert (moduleClass.isAnnotationPresent(haflow.module.Module.class));
// String moduleName = moduleClass.getAnnotation(haflow.module.Module.class).name();
String actionXML =
"<action name=\"" + name + "\">" + "\n" +
"<java>" + "\n" +
"<job-tracker>${jobTracker}</job-tracker>" + "\n" +
"<name-node>${nameNode}</name-node>" + "\n" +
"<configuration>" + "\n" +
"<property>" + "\n" +
"<name>mapred.job.queue.name</name>" + "\n" +
"<value>${queueName}</value>" + "\n" +
"</property>" + "\n" +
"</configuration>" + "\n" +
"<main-class>" + DemoJavaMain.class.getName() + "</main-class>" + "\n" +//TODO
"<arg>" + "-eee " + eee + "</arg>" + "\n" +
"</java>" + "\n" +
"<ok to=\"" + ok + "\"/>" + "\n" +
"<error to=\"fail\"/>" + "\n" +
"</action>";
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
try {
DocumentBuilder db = dbf.newDocumentBuilder();
StringReader sr = new StringReader(actionXML);
InputSource is = new InputSource(sr);
Document doc = db.parse(is);
return doc;
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ParserConfigurationException e) {
e.printStackTrace();
}
return null;
}
}

View File

@ -0,0 +1,43 @@
package haflow.service;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import org.springframework.stereotype.Component;
@Component
public class FlowDeployService {
public long copyJarFile(File srcFile, File dstPath, String dstName){
File dst = new File(dstPath, dstName);
if(!srcFile.exists()){
System.out.println(srcFile.getAbsolutePath() + " do not exist!");
}else if( !dstPath.exists()){
System.out.println(dstPath.getAbsolutePath() + " do not exist!");
}else if(dst.exists()){
System.out.println(dst.getAbsolutePath() + " already exists!");
}else{
//start copy file
try {
@SuppressWarnings("resource")
FileChannel ifc = new FileInputStream(srcFile).getChannel();
@SuppressWarnings("resource")
FileChannel ofc = new FileOutputStream(dst).getChannel();
long inSize = ifc.size();
ifc.transferTo(0, inSize, ofc);
ifc.close();
ofc.close();
return inSize;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return 0;
}
}

View File

@ -7,12 +7,14 @@ import haflow.flow.entity.Topological;
import haflow.module.ModuleMetadata;
import haflow.module.basic.EndModule;
import haflow.module.basic.StartModule;
import haflow.module.general.JavaModule;
import haflow.profile.NodeConfigurationProfile;
import haflow.ui.model.RunFlowResultModel;
import haflow.utility.ModuleLoader;
import haflow.utility.SessionHelper;
import haflow.utility.XmlHelper;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -74,6 +76,17 @@ public class FlowExecuteService {
this.xmlHelper = xmlHelper;
}
private FlowDeployService flowDeployService;
public FlowDeployService getFlowDeployService() {
return flowDeployService;
}
@Autowired
public void setFlowDeployService(FlowDeployService flowDeployService) {
this.flowDeployService = flowDeployService;
}
public RunFlowResultModel runFlow(UUID flowId) {
RunFlowResultModel model = new RunFlowResultModel();
model.setFlowId(flowId);
@ -168,6 +181,21 @@ public class FlowExecuteService {
sb.append("</workflow-app>" + "\n");
System.out.println(sb.toString());
messageBuilder.append("Generated xml : \n" + sb.toString());
//deploy workflow
String flowName = flow.getName();
for (Node node : nodes) {
Class<?> module = moduleClasses.get(node.getModuleId()
.toString());
String path = JavaModule.class.getProtectionDomain().getCodeSource().getLocation().getFile();
if(path.endsWith(".jar")){
File jarFile = new File(path);
File dstPath = new File("D:/haflow/flows/" + flowName);
System.out.println(path);
this.flowDeployService.copyJarFile(jarFile, dstPath, jarFile.getName());
}
}
}
session.close();

View File

@ -0,0 +1,17 @@
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>dfs.replication</name>
<value>3</value>
<description>default 3, can not be greater than the node count.</description>
</property>
<property>
<name>dfs.data.dir</name>
<value>${hadoop.tmp.dir}/dfs/data</value>
</property>
</configuration>