舞台灯光网站建设,无锡房产网,dw简单网页制作代码,智汇团建网站登录平台一、编程环境设置
编程使用到idea2018#xff0c;maven
#xff08;1#xff09;启动集群
在window上启动vmware软件的虚拟机hadoop01#xff0c;hadoop02#xff0c;hadoop03。
进入hadoop01虚拟机启动集群#xff0c;执行命令
start-dfs.sh
#xff08;2#x…一、编程环境设置
编程使用到idea2018maven
1启动集群
在window上启动vmware软件的虚拟机hadoop01hadoop02hadoop03。
进入hadoop01虚拟机启动集群执行命令
start-dfs.sh
2检查开发环境网络
在window的命令行cmd窗口ping hadoop01的ip【192.168.150.128】telnet hadoop01的ip【192.168.150.128】 hdfs端口【9000】。
注意window默认不启动telnet服务的需要在抚慰设置上先启动telnet服务。
ping 192.168.150.128
telnet 192.168.150.128 9000
如果ip不能ping通设置计算机网络的VMnet01适配器分配一个ip给window系统。如图 如果telnet不通端口则hadoop01虚拟机的防火墙拦截了关闭防火墙或者开启特定端口不拦截。 二、项目编程 1pom.xml
?xml version1.0 encodingUTF-8?project xmlnshttp://maven.apache.org/POM/4.0.0 xmlns:xsihttp://www.w3.org/2001/XMLSchema-instancexsi:schemaLocationhttp://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsdmodelVersion4.0.0/modelVersiongroupIdcom.mk/groupIdartifactIdhadoop-test/artifactIdversion1.0-SNAPSHOT/versionnamehadoop-test/name!-- FIXME change it to the projects website --urlhttp://www.example.com/urlpropertiesproject.build.sourceEncodingUTF-8/project.build.sourceEncodingmaven.compiler.source1.7/maven.compiler.sourcemaven.compiler.target1.7/maven.compiler.targetproject.build.sourceEncodingUTF-8/project.build.sourceEncodinghadoop.version2.6.0/hadoop.version/propertiesdependenciesdependencygroupIdorg.apache.hadoop/groupIdartifactIdhadoop-client/artifactIdversion${hadoop.version}/version/dependencydependencygroupIdorg.apache.hadoop/groupIdartifactIdhadoop-common/artifactIdversion${hadoop.version}/version/dependencydependencygroupIdorg.apache.hadoop/groupIdartifactIdhadoop-hdfs/artifactIdversion${hadoop.version}/version/dependencydependencygroupIdjunit/groupIdartifactIdjunit/artifactIdversion4.11/versionscopetest/scope/dependency/dependenciesbuildpluginManagementpluginspluginartifactIdmaven-clean-plugin/artifactIdversion3.1.0/version/pluginpluginartifactIdmaven-resources-plugin/artifactIdversion3.0.2/version/pluginpluginartifactIdmaven-compiler-plugin/artifactIdversion3.8.0/version/pluginpluginartifactIdmaven-surefire-plugin/artifactIdversion2.22.1/version/pluginpluginartifactIdmaven-jar-plugin/artifactIdversion3.0.2/version/pluginpluginartifactIdmaven-install-plugin/artifactIdversion2.5.2/version/pluginpluginartifactIdmaven-deploy-plugin/artifactIdversion2.8.2/version/pluginpluginartifactIdmaven-site-plugin/artifactIdversion3.7.1/version/pluginpluginartifactIdmaven-project-info-reports-plugin/artifactIdversion3.0.0/version/plugin/plugins/pluginManagement/build
/project2App.java
package com.mk;import com.mk.hdfs.DirectoryOp;
import com.mk.hdfs.FileOp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.net.URI;public class App {public static void main(String[] args) throws Exception {String uri hdfs://192.168.150.128:9000/;Configuration conf new Configuration();FileSystem fileSystem FileSystem.get(URI.create(uri), conf);DirectoryOp directoryOp new DirectoryOp(conf, fileSystem);//directoryOp.list(/);// hdfs://192.168.150.128:9000/home//directoryOp.create(/mytest);//directoryOp.list(/);//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/mytest//directoryOp.rename(/mytest,/my);//directoryOp.list(/);//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/my//directoryOp.delete(/my);//directoryOp.list(/);//hdfs://192.168.150.128:9000/homeFileOp fileOp new FileOp(conf, fileSystem);//fileOp.create(/a.txt);//directoryOp.list(/);//hdfs://192.168.150.128:9000/a.txt//hdfs://192.168.150.128:9000/home//fileOp.write(/a.txt,你好泰山);//fileOp.read(/a.txt);//你好泰山//fileOp.readTextLine(/a.txt);//你好泰山//fileOp.rename(/a.txt, b.txt);//directoryOp.list(/);//hdfs://192.168.150.128:9000/b.txt//hdfs://192.168.150.128:9000/home//fileOp.delete(/b.txt);//directoryOp.list(/);//hdfs://192.168.150.128:9000/home//fileOp.localToHdfs(pom.xml,/pom.xml);//directoryOp.list(/);//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/pom.xmlfileOp.hdfsToLocal(/pom.xml,/pom2.xml);directoryOp.list(/);//hdfs://192.168.150.128:9000/home//hdfs://192.168.150.128:9000/pom.xml}
}3FileOp.java
package com.mk.hdfs;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.LineReader;public class FileOp {private Configuration conf;private FileSystem fs;public FileOp(Configuration conf, FileSystem fs) {this.conf conf;this.fs fs;}public void create(String file) throws Exception {Path path new Path(file);Path parent path.getParent();fs.mkdirs(parent);fs.create(path).close();}public void delete(String file) throws Exception {Path path new Path(file);fs.delete(path,true);}public void rename(String file, String name) throws Exception {Path path new Path(file);Path parent path.getParent();fs.rename(path,new Path(parent, name));}public void read(String file) throws Exception {Path path new Path(file);FSDataInputStream inputStream fs.open(path);byte[] data new byte[inputStream.available()];IOUtils.readFully(inputStream, data, 0, data.length);IOUtils.closeStream(inputStream);System.out.println(new String(data, utf-8));}public void readTextLine(String file) throws Exception{Path path new Path(file);FSDataInputStream inputStream fs.open(path);Text line new Text();LineReader liReader new LineReader(inputStream);while (liReader.readLine(line) 0) {System.out.println(line);}inputStream.close();}public void write(String file, String text) throws Exception {Path path new Path(file);FSDataOutputStream outputStream fs.create(path);outputStream.write(text.getBytes(utf-8));outputStream.flush();IOUtils.closeStream(outputStream);}public void append(String file, String text) throws Exception {Path path new Path(file);FSDataOutputStream outputStream fs.append(path);outputStream.write(text.getBytes(utf-8));outputStream.flush();IOUtils.closeStream(outputStream);}public void localToHdfs(String localFile, String hdfsFile) throws Exception {Path localPath new Path(localFile);Path hdfsPath new Path(hdfsFile);fs.copyFromLocalFile(false, true,localPath, hdfsPath);}public void hdfsToLocal(String hdfsFile, String localFile) throws Exception {Path localPath new Path(localFile);Path hdfsPath new Path(hdfsFile);fs.copyToLocalFile(false, hdfsPath, localPath, true);}}4DirectoryOp.java
package com.mk.hdfs;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;public class DirectoryOp {private Configuration conf;private FileSystem fs;public DirectoryOp(Configuration conf, FileSystem fs) {this.conf conf;this.fs fs;}public void create(String dir) throws Exception {Path path new Path(dir);fs.mkdirs(path);}public void delete(String dir) throws Exception {Path path new Path(dir);fs.delete(path,true);}public void rename(String dir, String name) throws Exception {Path path new Path(dir);Path parent path.getParent();fs.rename(path,new Path(parent, name));}public void list(Path path) throws Exception {FileStatus[] list fs.listStatus(path);for (FileStatus status:list){System.out.println(status.getPath());}}public void list(String p) throws Exception {FileStatus[] list fs.listStatus(new Path(p));for (FileStatus status:list){System.out.println(status.getPath());}}}