大数据基础之Hadoop——Java API hdfs读写
pom文件:<dependencies><dependency><groupId>junit</groupId><artifactId>junit</artifactId><version>4.11</version></dependency><!-- https://mvnrepos
·
pom文件:
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.6.0</version>
</dependency>
</dependencies>
Java API 创建文件夹:
public void testMkdir() throws Exception {
// 创建配置文件
Configuration conf = new Configuration();
// 获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.142.150:9000"),conf,"root");
// 调用API操作
fs.mkdirs(new Path("/user/java"));
// 关闭文件
fs.close();
}
Java API 写文件:
public void testCopyFromLocalFile() throws Exception {
// 创建配置文件
Configuration conf = new Configuration();
// 获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.142.150:9000"),conf,"root");
// 调用API操作
fs.copyFromLocalFile(new Path("d://abc1.txt"),new Path("/user/java/mobile.txt"));
// 关闭文件
fs.close();
}
Java API 读取文件:
public void testCopyToLocalFile() throws Exception{
// 创建配置文件
Configuration conf = new Configuration();
// 获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.142.150:9000"),conf,"root");
// 调用API操作
fs.copyToLocalFile(new Path("/user/java/mobile.txt"),new Path("d://abc1.txt"));
// 关闭文件
fs.close();
}
Java API 删除文件:
public void testDel() throws Exception{
// 创建配置文件
Configuration conf = new Configuration();
// 获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.142.150:9000"),conf,"root");
// 调用API操作
// b:代表是否递归删除
fs.delete(new Path("/user"),true);
// 关闭文件
fs.close();
}
更多推荐
所有评论(0)