hadoop实验课-hdfs


HDFS实验

通过Java API操作HDFS

准备工作

导入hdfs的jar包

1.将/usr/…/share/common下;/usr/…/share/common/lib下;/usr/…/share/hdfs/下的jar包导入到hadoop项目下的lib

2.eclipse的项目单击右键build path->configure..->libraries->external jars

代码编写

文件上传

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
//
package sugon.edu.hadoop.hdfs;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class putFile_HDFS {
public static void main(String[] args) throws Exception
{
Configuration conf = new Configuration();
URI uri = new URI("hdfs://192.168.206.129:8020");
FileSystem fs = FileSystem.get(uri, conf, "ttyong");
Path src = new Path("/home/ttyong/Public/java/wordCount.txt");
Path dst = new Path("/mydir/");
fs.copyFromLocalFile(src, dst);
fs.close();
System.out.println("upload successfully");
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
// 流拷贝上传
package sugon.edu.hadoop.hdfs;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class putFile_HDFS_byLiuKaoBei {
public static void main(String[] args) throws Exception
{
Configuration conf = new Configuration();
URI uri = new URI("hdfs://192.168.206.129:8020");
FileSystem fs = FileSystem.get(uri, conf, "ttyong");
InputStream is = new FileInputStream("/home/ttyong/Public/java/salaryTotal.txt");
OutputStream os = fs.create(new Path("/mydir/salaryTotal.txt"));
IOUtils.copyBytes(is, os, 1024);
is.close();
os.close();
fs.close();
System.out.println("upload successfully");
}
}

文件下载

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
package sugon.edu.hadoop.hdfs;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class getFile_HDFS {
public static void main(String[] args) throws Exception
{
Configuration conf = new Configuration();
URI uri = new URI("hdfs://192.168.206.129:8020");
FileSystem fs = FileSystem.get(uri, conf, "ttyong");
Path src = new Path("/mydir/salaryTotal.txt");
Path dst = new Path("/home/ttyong/Public/java/test4.txt");
System.setProperty("HADOOP_USER_NAME", "root");
fs.copyToLocalFile(src, dst); //linux
fs.close();
System.out.println("upload successfully");
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
// 流拷贝下载
package sugon.edu.hadoop.hdfs;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class getFile_HDFS_ByLiukaobei {
public static void main(String[] args) throws Exception
{
Configuration conf = new Configuration();
URI uri = new URI("hdfs://192.168.206.129:8020");
FileSystem fs = FileSystem.get(uri, conf, "ttyong");
InputStream is = fs.open(new Path("/mydir/salaryTotal.txt"));
OutputStream os = new FileOutputStream("/home/ttyong/Public/java/test5.txt");
IOUtils.copyBytes(is, os, 1024);
is.close();
os.close();
fs.close();
System.out.println("upload successfully");
}
}

文件创建

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
package sugon.edu.hadoop.hdfs;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class CreateFile_HDFS {
public static void main(String[] args) throws Exception{
// TODO Auto-generated method stub
System.setProperty("HADOOP_USER_NAME", "root");
Configuration conf = new Configuration();
URI uri = new URI("hdfs://192.168.206.129:8020");
FileSystem fs = FileSystem.get(uri, conf, "ttyong");
Path path = new Path("/mydir/test2.txt");
FSDataOutputStream os = fs.create(path);
os.writeBytes("hello HDFS");
os.close();
fs.close();
}
}

文件删除

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
package sugon.edu.hadoop.hdfs;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class DeleteFile_HDFS {
public static void main(String[] args) throws Exception
{
Configuration conf = new Configuration();
URI uri = new URI("hdfs://192.168.206.129:8020");
FileSystem fs = FileSystem.get(uri, conf, "ttyong");
Path path = new Path("/mydir/test2.txt");
fs.delete(path);
fs.close();
}
}

shell命令访问HDFS

管理命令

操作命令


本文标题:hadoop实验课-hdfs

文章作者:TTYONG

发布时间:2020年04月21日 - 10:04

最后更新:2023年06月04日 - 15:06

原始链接:http://tianyong.fun/hadoop%E5%A4%A7%E6%95%B0%E6%8D%AE%E6%8A%80%E6%9C%AF%E4%B8%8E%E5%BA%94%E7%94%A8-hdfs(%E5%AE%9E%E9%AA%8C%E8%AF%BE).html

许可协议: 转载请保留原文链接及作者。

多少都是爱
0%