SpringBoot集成Hadoop系列一 ---- 对HDFS的文件操作

快来打我* 2022-01-28 09:03 1036阅读 0赞

一.对HDFS操作设计以下几个主要的类:

Configuration:封装了客户端或者服务器的配置信息

FileSystem:此类的对象是一个文件系统对象,可以用该对象的一些方法来对文件进行操作通过FileSystem的静态方法get获得该对象,例:FileSystem hdfs = FileSystem.get(conf);

FSDataInputStream:这是HDFS中的输入流,通过由FileSystem的open方法获取

FSDataOutputStream:这是HDFS中的输出流,通过由FileSystem的create方法获取

二.依赖配置

  1. <project xmlns="http://maven.apache.org/POM/4.0.0"
  2. xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  3. xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  4. <modelVersion>4.0.0</modelVersion>
  5. <groupId>com.hdfs</groupId>
  6. <artifactId>HadoopTest</artifactId>
  7. <version>0.0.1-SNAPSHOT</version>
  8. <packaging>jar</packaging>
  9. <name>HadoopTest</name>
  10. <url>http://maven.apache.org</url>
  11. <parent>
  12. <groupId>org.springframework.boot</groupId>
  13. <artifactId>spring-boot-starter-parent</artifactId>
  14. <version>2.0.0.RELEASE</version>
  15. <relativePath />
  16. </parent>
  17. <properties>
  18. <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
  19. <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
  20. <java.version>1.8</java.version>
  21. </properties>
  22. <dependencies>
  23. <dependency>
  24. <groupId>org.springframework.boot</groupId>
  25. <artifactId>spring-boot-starter</artifactId>
  26. </dependency>
  27. <dependency>
  28. <groupId>org.springframework.boot</groupId>
  29. <artifactId>spring-boot-starter-test</artifactId>
  30. <scope>test</scope>
  31. </dependency>
  32. <dependency>
  33. <groupId>org.springframework.boot</groupId>
  34. <artifactId>spring-boot-starter-web</artifactId>
  35. </dependency>
  36. <dependency>
  37. <groupId>org.apache.hadoop</groupId>
  38. <artifactId>hadoop-common</artifactId>
  39. <version>3.1.1</version>
  40. </dependency>
  41. <dependency>
  42. <groupId>org.apache.hadoop</groupId>
  43. <artifactId>hadoop-hdfs</artifactId>
  44. <version>3.1.1</version>
  45. </dependency>
  46. <dependency>
  47. <groupId>org.apache.hadoop</groupId>
  48. <artifactId>hadoop-client</artifactId>
  49. <version>3.1.1</version>
  50. </dependency>
  51. <dependency>
  52. <groupId>org.apache.hadoop</groupId>
  53. <artifactId>hadoop-mapreduce-client-core</artifactId>
  54. <version>3.1.1</version>
  55. </dependency>
  56. <dependency>
  57. <groupId>cn.bestwu</groupId>
  58. <artifactId>ik-analyzers</artifactId>
  59. <version>5.1.0</version>
  60. </dependency>
  61. <dependency>
  62. <groupId>jdk.tools</groupId>
  63. <artifactId>jdk.tools</artifactId>
  64. <version>1.8</version>
  65. <scope>system</scope>
  66. <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
  67. </dependency>
  68. <dependency>
  69. <groupId>junit</groupId>
  70. <artifactId>junit</artifactId>
  71. <scope>test</scope>
  72. </dependency>
  73. </dependencies>
  74. <build>
  75. <plugins>
  76. <plugin>
  77. <groupId>org.springframework.boot</groupId>
  78. <artifactId>spring-boot-maven-plugin</artifactId>
  79. </plugin>
  80. <plugin>
  81. <groupId>org.apache.maven.plugins</groupId>
  82. <artifactId>maven-compiler-plugin</artifactId>
  83. <configuration>
  84. <source>1.8</source>
  85. <target>1.8</target>
  86. </configuration>
  87. </plugin>
  88. </plugins>
  89. </build>
  90. </project>
  91. # tomcat thread = 200
  92. server.tomcat.max-threads=1000
  93. <!--edit tomcat port-->
  94. server.port=8900
  95. # session time 30
  96. server.session-timeout=60
  97. spring.application.name=hadoop
  98. spring.servlet.multipart.max-file-size=50MB
  99. spring.servlet.multipart.max-request-size=50MB
  100. hdfs.path=hdfs://localhost:9000
  101. hdfs.username=linhaiy
  102. logging.config=classpath:logback.xml

三.HDFS文件操作接口开发

  1. package com.hadoop.config;
  2. import org.springframework.beans.factory.annotation.Value;
  3. import org.springframework.context.annotation.Configuration;
  4. /**
  5. * HDFS配置类
  6. * @author linhaiy
  7. * @date 2019.05.18
  8. */
  9. @Configuration
  10. public class HdfsConfig {
  11. @Value("${hdfs.path}")
  12. private String path;
  13. public String getPath() {
  14. return path;
  15. }
  16. public void setPath(String path) {
  17. this.path = path;
  18. }
  19. }
  20. package com.hadoop.hdfs.entity;
  21. import java.io.DataInput;
  22. import java.io.DataOutput;
  23. import java.io.IOException;
  24. import org.apache.hadoop.io.Writable;
  25. /**
  26. * 用户实体类
  27. * @author linhaiy
  28. * @date 2019.05.18
  29. */
  30. public class User implements Writable {
  31. private String username;
  32. private Integer age;
  33. private String address;
  34. public User() {
  35. super();
  36. // TODO Auto-generated constructor stub
  37. }
  38. public User(String username, Integer age, String address) {
  39. super();
  40. this.username = username;
  41. this.age = age;
  42. this.address = address;
  43. }
  44. @Override
  45. public void write(DataOutput output) throws IOException {
  46. // 把对象序列化
  47. output.writeChars(username);
  48. output.writeInt(age);
  49. output.writeChars(address);
  50. }
  51. @Override
  52. public void readFields(DataInput input) throws IOException {
  53. // 把序列化的对象读取到内存中
  54. username = input.readUTF();
  55. age = input.readInt();
  56. address = input.readUTF();
  57. }
  58. public String getUsername() {
  59. return username;
  60. }
  61. public void setUsername(String username) {
  62. this.username = username;
  63. }
  64. public Integer getAge() {
  65. return age;
  66. }
  67. public void setAge(Integer age) {
  68. this.age = age;
  69. }
  70. public String getAddress() {
  71. return address;
  72. }
  73. public void setAddress(String address) {
  74. this.address = address;
  75. }
  76. @Override
  77. public String toString() {
  78. return "User [username=" + username + ", age=" + age + ", address=" + address + "]";
  79. }
  80. }
  81. package com.hadoop.hdfs.service;
  82. import java.io.BufferedReader;
  83. import java.io.InputStreamReader;
  84. import java.net.URI;
  85. import java.util.ArrayList;
  86. import java.util.HashMap;
  87. import java.util.List;
  88. import java.util.Map;
  89. import javax.annotation.PostConstruct;
  90. import org.apache.commons.lang.StringUtils;
  91. import org.apache.hadoop.conf.Configuration;
  92. import org.apache.hadoop.fs.BlockLocation;
  93. import org.apache.hadoop.fs.FSDataInputStream;
  94. import org.apache.hadoop.fs.FSDataOutputStream;
  95. import org.apache.hadoop.fs.FileStatus;
  96. import org.apache.hadoop.fs.FileSystem;
  97. import org.apache.hadoop.fs.LocatedFileStatus;
  98. import org.apache.hadoop.fs.Path;
  99. import org.apache.hadoop.fs.RemoteIterator;
  100. import org.apache.hadoop.io.IOUtils;
  101. import org.springframework.beans.factory.annotation.Value;
  102. import org.springframework.stereotype.Component;
  103. import org.springframework.web.multipart.MultipartFile;
  104. import com.hadoop.util.JsonUtil;
  105. @Component
  106. public class HdfsService {
  107. @Value("${hdfs.path}")
  108. private String path;
  109. @Value("${hdfs.username}")
  110. private String username;
  111. private static String hdfsPath;
  112. private static String hdfsName;
  113. private static final int bufferSize = 1024 * 1024 * 64;
  114. /**
  115. * 获取HDFS配置信息
  116. * @return
  117. */
  118. private static Configuration getConfiguration() {
  119. Configuration configuration = new Configuration();
  120. configuration.set("fs.defaultFS", hdfsPath);
  121. return configuration;
  122. }
  123. /**
  124. * 获取HDFS文件系统对象
  125. * @return
  126. * @throws Exception
  127. */
  128. public static FileSystem getFileSystem() throws Exception {
  129. // 客户端去操作hdfs时是有一个用户身份的,默认情况下hdfs客户端api会从jvm中获取一个参数作为自己的用户身份
  130. // DHADOOP_USER_NAME=hadoop
  131. // 也可以在构造客户端fs对象时,通过参数传递进去
  132. FileSystem fileSystem = FileSystem.get(new URI(hdfsPath), getConfiguration(), hdfsName);
  133. return fileSystem;
  134. }
  135. /**
  136. * 在HDFS创建文件夹
  137. * @param path
  138. * @return
  139. * @throws Exception
  140. */
  141. public static boolean mkdir(String path) throws Exception {
  142. if (StringUtils.isEmpty(path)) {
  143. return false;
  144. }
  145. if (existFile(path)) {
  146. return true;
  147. }
  148. FileSystem fs = getFileSystem();
  149. // 目标路径
  150. Path srcPath = new Path(path);
  151. boolean isOk = fs.mkdirs(srcPath);
  152. fs.close();
  153. return isOk;
  154. }
  155. /**
  156. * 判断HDFS文件是否存在
  157. * @param path
  158. * @return
  159. * @throws Exception
  160. */
  161. public static boolean existFile(String path) throws Exception {
  162. if (StringUtils.isEmpty(path)) {
  163. return false;
  164. }
  165. FileSystem fs = getFileSystem();
  166. Path srcPath = new Path(path);
  167. boolean isExists = fs.exists(srcPath);
  168. return isExists;
  169. }
  170. /**
  171. * 读取HDFS目录信息
  172. * @param path
  173. * @return
  174. * @throws Exception
  175. */
  176. public static List<Map<String, Object>> readPathInfo(String path) throws Exception {
  177. if (StringUtils.isEmpty(path)) {
  178. return null;
  179. }
  180. if (!existFile(path)) {
  181. return null;
  182. }
  183. FileSystem fs = getFileSystem();
  184. // 目标路径
  185. Path newPath = new Path(path);
  186. FileStatus[] statusList = fs.listStatus(newPath);
  187. List<Map<String, Object>> list = new ArrayList<>();
  188. if (null != statusList && statusList.length > 0) {
  189. for (FileStatus fileStatus : statusList) {
  190. Map<String, Object> map = new HashMap<>();
  191. map.put("filePath", fileStatus.getPath());
  192. map.put("fileStatus", fileStatus.toString());
  193. list.add(map);
  194. }
  195. return list;
  196. } else {
  197. return null;
  198. }
  199. }
  200. /**
  201. * HDFS创建文件
  202. * @param path
  203. * @param file
  204. * @throws Exception
  205. */
  206. public static void createFile(String path, MultipartFile file) throws Exception {
  207. if (StringUtils.isEmpty(path) || null == file.getBytes()) {
  208. return;
  209. }
  210. String fileName = file.getOriginalFilename();
  211. FileSystem fs = getFileSystem();
  212. // 上传时默认当前目录,后面自动拼接文件的目录
  213. Path newPath = new Path(path + "/" + fileName);
  214. // 打开一个输出流
  215. FSDataOutputStream outputStream = fs.create(newPath);
  216. outputStream.write(file.getBytes());
  217. outputStream.close();
  218. fs.close();
  219. }
  220. /**
  221. * 读取HDFS文件内容
  222. * @param path
  223. * @return
  224. * @throws Exception
  225. */
  226. public static String readFile(String path) throws Exception {
  227. if (StringUtils.isEmpty(path)) {
  228. return null;
  229. }
  230. if (!existFile(path)) {
  231. return null;
  232. }
  233. FileSystem fs = getFileSystem();
  234. // 目标路径
  235. Path srcPath = new Path(path);
  236. FSDataInputStream inputStream = null;
  237. try {
  238. inputStream = fs.open(srcPath);
  239. // 防止中文乱码
  240. BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
  241. String lineTxt = "";
  242. StringBuffer sb = new StringBuffer();
  243. while ((lineTxt = reader.readLine()) != null) {
  244. sb.append(lineTxt);
  245. }
  246. return sb.toString();
  247. } finally {
  248. inputStream.close();
  249. fs.close();
  250. }
  251. }
  252. /**
  253. * 读取HDFS文件列表
  254. * @param path
  255. * @return
  256. * @throws Exception
  257. */
  258. public static List<Map<String, String>> listFile(String path) throws Exception {
  259. if (StringUtils.isEmpty(path)) {
  260. return null;
  261. }
  262. if (!existFile(path)) {
  263. return null;
  264. }
  265. FileSystem fs = getFileSystem();
  266. // 目标路径
  267. Path srcPath = new Path(path);
  268. // 递归找到所有文件
  269. RemoteIterator<LocatedFileStatus> filesList = fs.listFiles(srcPath, true);
  270. List<Map<String, String>> returnList = new ArrayList<>();
  271. while (filesList.hasNext()) {
  272. LocatedFileStatus next = filesList.next();
  273. String fileName = next.getPath().getName();
  274. Path filePath = next.getPath();
  275. Map<String, String> map = new HashMap<>();
  276. map.put("fileName", fileName);
  277. map.put("filePath", filePath.toString());
  278. returnList.add(map);
  279. }
  280. fs.close();
  281. return returnList;
  282. }
  283. /**
  284. * HDFS重命名文件
  285. * @param oldName
  286. * @param newName
  287. * @return
  288. * @throws Exception
  289. */
  290. public static boolean renameFile(String oldName, String newName) throws Exception {
  291. if (StringUtils.isEmpty(oldName) || StringUtils.isEmpty(newName)) {
  292. return false;
  293. }
  294. FileSystem fs = getFileSystem();
  295. // 原文件目标路径
  296. Path oldPath = new Path(oldName);
  297. // 重命名目标路径
  298. Path newPath = new Path(newName);
  299. boolean isOk = fs.rename(oldPath, newPath);
  300. fs.close();
  301. return isOk;
  302. }
  303. /**
  304. * 删除HDFS文件
  305. * @param path
  306. * @return
  307. * @throws Exception
  308. */
  309. public static boolean deleteFile(String path) throws Exception {
  310. if (StringUtils.isEmpty(path)) {
  311. return false;
  312. }
  313. if (!existFile(path)) {
  314. return false;
  315. }
  316. FileSystem fs = getFileSystem();
  317. Path srcPath = new Path(path);
  318. boolean isOk = fs.deleteOnExit(srcPath);
  319. fs.close();
  320. return isOk;
  321. }
  322. /**
  323. * 上传HDFS文件
  324. * @param path
  325. * @param uploadPath
  326. * @throws Exception
  327. */
  328. public static void uploadFile(String path, String uploadPath) throws Exception {
  329. if (StringUtils.isEmpty(path) || StringUtils.isEmpty(uploadPath)) {
  330. return;
  331. }
  332. FileSystem fs = getFileSystem();
  333. // 上传路径
  334. Path clientPath = new Path(path);
  335. // 目标路径
  336. Path serverPath = new Path(uploadPath);
  337. // 调用文件系统的文件复制方法,第一个参数是否删除原文件true为删除,默认为false
  338. fs.copyFromLocalFile(false, clientPath, serverPath);
  339. fs.close();
  340. }
  341. /**
  342. * 下载HDFS文件
  343. * @param path
  344. * @param downloadPath
  345. * @throws Exception
  346. */
  347. public static void downloadFile(String path, String downloadPath) throws Exception {
  348. if (StringUtils.isEmpty(path) || StringUtils.isEmpty(downloadPath)) {
  349. return;
  350. }
  351. FileSystem fs = getFileSystem();
  352. // 上传路径
  353. Path clientPath = new Path(path);
  354. // 目标路径
  355. Path serverPath = new Path(downloadPath);
  356. // 调用文件系统的文件复制方法,第一个参数是否删除原文件true为删除,默认为false
  357. fs.copyToLocalFile(false, clientPath, serverPath);
  358. fs.close();
  359. }
  360. /**
  361. * HDFS文件复制
  362. * @param sourcePath
  363. * @param targetPath
  364. * @throws Exception
  365. */
  366. public static void copyFile(String sourcePath, String targetPath) throws Exception {
  367. if (StringUtils.isEmpty(sourcePath) || StringUtils.isEmpty(targetPath)) {
  368. return;
  369. }
  370. FileSystem fs = getFileSystem();
  371. // 原始文件路径
  372. Path oldPath = new Path(sourcePath);
  373. // 目标路径
  374. Path newPath = new Path(targetPath);
  375. FSDataInputStream inputStream = null;
  376. FSDataOutputStream outputStream = null;
  377. try {
  378. inputStream = fs.open(oldPath);
  379. outputStream = fs.create(newPath);
  380. IOUtils.copyBytes(inputStream, outputStream, bufferSize, false);
  381. } finally {
  382. inputStream.close();
  383. outputStream.close();
  384. fs.close();
  385. }
  386. }
  387. /**
  388. * 打开HDFS上的文件并返回byte数组
  389. * @param path
  390. * @return
  391. * @throws Exception
  392. */
  393. public static byte[] openFileToBytes(String path) throws Exception {
  394. if (StringUtils.isEmpty(path)) {
  395. return null;
  396. }
  397. if (!existFile(path)) {
  398. return null;
  399. }
  400. FileSystem fs = getFileSystem();
  401. // 目标路径
  402. Path srcPath = new Path(path);
  403. try {
  404. FSDataInputStream inputStream = fs.open(srcPath);
  405. return IOUtils.readFullyToByteArray(inputStream);
  406. } finally {
  407. fs.close();
  408. }
  409. }
  410. /**
  411. * 打开HDFS上的文件并返回java对象
  412. * @param path
  413. * @return
  414. * @throws Exception
  415. */
  416. public static <T extends Object> T openFileToObject(String path, Class<T> clazz) throws Exception {
  417. if (StringUtils.isEmpty(path)) {
  418. return null;
  419. }
  420. if (!existFile(path)) {
  421. return null;
  422. }
  423. String jsonStr = readFile(path);
  424. return JsonUtil.fromObject(jsonStr, clazz);
  425. }
  426. /**
  427. * 获取某个文件在HDFS的集群位置
  428. * @param path
  429. * @return
  430. * @throws Exception
  431. */
  432. public static BlockLocation[] getFileBlockLocations(String path) throws Exception {
  433. if (StringUtils.isEmpty(path)) {
  434. return null;
  435. }
  436. if (!existFile(path)) {
  437. return null;
  438. }
  439. FileSystem fs = getFileSystem();
  440. // 目标路径
  441. Path srcPath = new Path(path);
  442. FileStatus fileStatus = fs.getFileStatus(srcPath);
  443. return fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
  444. }
  445. @PostConstruct
  446. public void getPath() {
  447. hdfsPath = this.path;
  448. }
  449. @PostConstruct
  450. public void getName() {
  451. hdfsName = this.username;
  452. }
  453. public static String getHdfsPath() {
  454. return hdfsPath;
  455. }
  456. public String getUsername() {
  457. return username;
  458. }
  459. }
  460. package com.hadoop.hdfs.controller;
  461. import java.util.List;
  462. import java.util.Map;
  463. import org.apache.commons.lang.StringUtils;
  464. import org.apache.hadoop.fs.BlockLocation;
  465. import org.slf4j.Logger;
  466. import org.slf4j.LoggerFactory;
  467. import org.springframework.web.bind.annotation.PostMapping;
  468. import org.springframework.web.bind.annotation.RequestMapping;
  469. import org.springframework.web.bind.annotation.RequestMethod;
  470. import org.springframework.web.bind.annotation.RequestParam;
  471. import org.springframework.web.bind.annotation.ResponseBody;
  472. import org.springframework.web.bind.annotation.RestController;
  473. import org.springframework.web.multipart.MultipartFile;
  474. import com.hadoop.hdfs.entity.User;
  475. import com.hadoop.hdfs.service.HdfsService;
  476. import com.hadoop.util.Result;
  477. @RestController
  478. @RequestMapping("/hadoop/hdfs")
  479. public class HdfsAction {
  480. private static Logger LOGGER = LoggerFactory.getLogger(HdfsAction.class);
  481. /**
  482. * 创建文件夹
  483. * @param path
  484. * @return
  485. * @throws Exception
  486. */
  487. @RequestMapping(value = "mkdir", method = RequestMethod.POST)
  488. @ResponseBody
  489. public Result mkdir(@RequestParam("path") String path) throws Exception {
  490. if (StringUtils.isEmpty(path)) {
  491. LOGGER.debug("请求参数为空");
  492. return new Result(Result.FAILURE, "请求参数为空");
  493. }
  494. // 创建空文件夹
  495. boolean isOk = HdfsService.mkdir(path);
  496. if (isOk) {
  497. LOGGER.debug("文件夹创建成功");
  498. return new Result(Result.SUCCESS, "文件夹创建成功");
  499. } else {
  500. LOGGER.debug("文件夹创建失败");
  501. return new Result(Result.FAILURE, "文件夹创建失败");
  502. }
  503. }
  504. /**
  505. * 读取HDFS目录信息
  506. * @param path
  507. * @return
  508. * @throws Exception
  509. */
  510. @PostMapping("/readPathInfo")
  511. public Result readPathInfo(@RequestParam("path") String path) throws Exception {
  512. List<Map<String, Object>> list = HdfsService.readPathInfo(path);
  513. return new Result(Result.SUCCESS, "读取HDFS目录信息成功", list);
  514. }
  515. /**
  516. * 获取HDFS文件在集群中的位置
  517. * @param path
  518. * @return
  519. * @throws Exception
  520. */
  521. @PostMapping("/getFileBlockLocations")
  522. public Result getFileBlockLocations(@RequestParam("path") String path) throws Exception {
  523. BlockLocation[] blockLocations = HdfsService.getFileBlockLocations(path);
  524. return new Result(Result.SUCCESS, "获取HDFS文件在集群中的位置", blockLocations);
  525. }
  526. /**
  527. * 创建文件
  528. * @param path
  529. * @return
  530. * @throws Exception
  531. */
  532. @PostMapping("/createFile")
  533. public Result createFile(@RequestParam("path") String path, @RequestParam("file") MultipartFile file)
  534. throws Exception {
  535. if (StringUtils.isEmpty(path) || null == file.getBytes()) {
  536. return new Result(Result.FAILURE, "请求参数为空");
  537. }
  538. HdfsService.createFile(path, file);
  539. return new Result(Result.SUCCESS, "创建文件成功");
  540. }
  541. /**
  542. * 读取HDFS文件内容
  543. * @param path
  544. * @return
  545. * @throws Exception
  546. */
  547. @PostMapping("/readFile")
  548. public Result readFile(@RequestParam("path") String path) throws Exception {
  549. String targetPath = HdfsService.readFile(path);
  550. return new Result(Result.SUCCESS, "读取HDFS文件内容", targetPath);
  551. }
  552. /**
  553. * 读取HDFS文件转换成Byte类型
  554. * @param path
  555. * @return
  556. * @throws Exception
  557. */
  558. @PostMapping("/openFileToBytes")
  559. public Result openFileToBytes(@RequestParam("path") String path) throws Exception {
  560. byte[] files = HdfsService.openFileToBytes(path);
  561. return new Result(Result.SUCCESS, "读取HDFS文件转换成Byte类型", files);
  562. }
  563. /**
  564. * 读取HDFS文件装换成User对象
  565. * @param path
  566. * @return
  567. * @throws Exception
  568. */
  569. @PostMapping("/openFileToUser")
  570. public Result openFileToUser(@RequestParam("path") String path) throws Exception {
  571. User user = HdfsService.openFileToObject(path, User.class);
  572. return new Result(Result.SUCCESS, "读取HDFS文件装换成User对象", user);
  573. }
  574. /**
  575. * 读取文件列表
  576. * @param path
  577. * @return
  578. * @throws Exception
  579. */
  580. @PostMapping("/listFile")
  581. public Result listFile(@RequestParam("path") String path) throws Exception {
  582. if (StringUtils.isEmpty(path)) {
  583. return new Result(Result.FAILURE, "请求参数为空");
  584. }
  585. List<Map<String, String>> returnList = HdfsService.listFile(path);
  586. return new Result(Result.SUCCESS, "读取文件列表成功", returnList);
  587. }
  588. /**
  589. * 重命名文件
  590. * @param oldName
  591. * @param newName
  592. * @return
  593. * @throws Exception
  594. */
  595. @PostMapping("/renameFile")
  596. public Result renameFile(@RequestParam("oldName") String oldName, @RequestParam("newName") String newName)
  597. throws Exception {
  598. if (StringUtils.isEmpty(oldName) || StringUtils.isEmpty(newName)) {
  599. return new Result(Result.FAILURE, "请求参数为空");
  600. }
  601. boolean isOk = HdfsService.renameFile(oldName, newName);
  602. if (isOk) {
  603. return new Result(Result.SUCCESS, "文件重命名成功");
  604. } else {
  605. return new Result(Result.FAILURE, "文件重命名失败");
  606. }
  607. }
  608. /**
  609. * 删除文件
  610. * @param path
  611. * @return
  612. * @throws Exception
  613. */
  614. @PostMapping("/deleteFile")
  615. public Result deleteFile(@RequestParam("path") String path) throws Exception {
  616. boolean isOk = HdfsService.deleteFile(path);
  617. if (isOk) {
  618. return new Result(Result.SUCCESS, "delete file success");
  619. } else {
  620. return new Result(Result.FAILURE, "delete file fail");
  621. }
  622. }
  623. /**
  624. * 上传文件
  625. * @param path
  626. * @param uploadPath
  627. * @return
  628. * @throws Exception
  629. */
  630. @PostMapping("/uploadFile")
  631. public Result uploadFile(@RequestParam("path") String path, @RequestParam("uploadPath") String uploadPath)
  632. throws Exception {
  633. HdfsService.uploadFile(path, uploadPath);
  634. return new Result(Result.SUCCESS, "upload file success");
  635. }
  636. /**
  637. * 下载文件
  638. * @param path
  639. * @param downloadPath
  640. * @return
  641. * @throws Exception
  642. */
  643. @PostMapping("/downloadFile")
  644. public Result downloadFile(@RequestParam("path") String path, @RequestParam("downloadPath") String downloadPath)
  645. throws Exception {
  646. HdfsService.downloadFile(path, downloadPath);
  647. return new Result(Result.SUCCESS, "download file success");
  648. }
  649. /**
  650. * HDFS文件复制
  651. * @param sourcePath
  652. * @param targetPath
  653. * @return
  654. * @throws Exception
  655. */
  656. @PostMapping("/copyFile")
  657. public Result copyFile(@RequestParam("sourcePath") String sourcePath, @RequestParam("targetPath") String targetPath)
  658. throws Exception {
  659. HdfsService.copyFile(sourcePath, targetPath);
  660. return new Result(Result.SUCCESS, "copy file success");
  661. }
  662. /**
  663. * 查看文件是否已存在
  664. * @param path
  665. * @return
  666. * @throws Exception
  667. */
  668. @PostMapping("/existFile")
  669. public Result existFile(@RequestParam("path") String path) throws Exception {
  670. boolean isExist = HdfsService.existFile(path);
  671. return new Result(Result.SUCCESS, "file isExist: " + isExist);
  672. }
  673. }

四.一些测试结果截图

20190523193125578.png

watermark_type_ZmFuZ3poZW5naGVpdGk_shadow_10_text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L2xpbmhhaXl1bl95dGR4_size_16_color_FFFFFF_t_70

watermark_type_ZmFuZ3poZW5naGVpdGk_shadow_10_text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L2xpbmhhaXl1bl95dGR4_size_16_color_FFFFFF_t_70 1

watermark_type_ZmFuZ3poZW5naGVpdGk_shadow_10_text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L2xpbmhhaXl1bl95dGR4_size_16_color_FFFFFF_t_70 2

发表评论

表情:
评论列表 (有 0 条评论,1036人围观)

还没有评论,来说两句吧...

相关阅读

    相关 Hadoop HDFS操作

    启动HDFS 首先,您必须格式化配置的HDFS文件系统,打开namenode(HDFS服务器),然后执行以下命令。 $ hadoop namenode -for