1.新建Maven工程
2.pom.xml中引入依赖
<repositories>
<repository>
<id>apache</id>
<url>http://maven.apache.org</url>
</repository>
</repositories>
<properties>
<hadoopVersion>2.9.2</hadoopVersion>
</properties>
<dependencies>
<!-- Hadoop start -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoopVersion}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoopVersion}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoopVersion}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoopVersion}</version>
</dependency>
<!-- Hadoop -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.4</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<configuration>
<excludeTransitive>false</excludeTransitive>
<stripVersion>true</stripVersion>
<outputDirectory>./lib</outputDirectory>
</configuration>
</plugin>
</plugins>
</build>
3.测试HDFS的类
public class HDFSDemo {
FileSystem fs = null;
@Before
public void init() throws IOException, URISyntaxException,InterruptedException{
//创建FileSystem的实现类
fs = FileSystem.get(new URI("hdfs://192.168.56.101:9000"),new Configuration(),"root");
}
@Test
public void testUpload()throws Exception{
//读取本地文件
InputStream in = new FileInputStream("g://test.txt");
//HDFS上创建一个文件,返回输出流
OutputStream out = fs.create(new Path("/test.txt"));
//输入->输出
IOUtils.copyBytes(in,out,4096,true);
}
@Test
public void testMakedir()throws Exception{
boolean mkdirs = fs.mkdirs(new Path("/007"));
System.out.println(mkdirs);
}
@Test
public void testDel() throws Exception{
boolean del = fs.delete(new Path("/007"),false);
System.out.println(del);
}
}