springboot集成hadoop实战

springboot集成hadoop实现hdfs增删改查

maven坐标

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-streaming</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
</dependency>

<!-- 中文分词器 -->
<dependency>
<groupId>cn.bestwu</groupId>
<artifactId>ik-analyzers</artifactId>
<version>5.1.0</version>
</dependency>

配置

hdfs的配置

1
2
3
hdfs:
hdfsPath: hdfs://bigdata-master:8020
hdfsName: bigdata-master

将fileSystem配置并注册到spring容器

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
@Slf4j
@Configuration
public class HadoopHDFSConfiguration {

@Value("${hdfs.hdfsPath}")
private String hdfsPath;
@Value("${hdfs.hdfsName}")
private String hdfsName;

@Bean
public org.apache.hadoop.conf.Configuration getConfiguration(){
org.apache.hadoop.conf.Configuration configuration = new org.apache.hadoop.conf.Configuration();
configuration.set("fs.defaultFS", hdfsPath);
return configuration;
}

@Bean
public FileSystem getFileSystem(){
FileSystem fileSystem = null;
try {
fileSystem = FileSystem.get(new URI(hdfsPath), getConfiguration(), hdfsName);
} catch (IOException e) {
// TODO Auto-generated catch block
log.error(e.getMessage());
} catch (InterruptedException e) {
// TODO Auto-generated catch block
log.error(e.getMessage());
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
log.error(e.getMessage());
}
return fileSystem;
}

}

增删改查

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
public interface HDFSService {

// 创建文件夹
boolean makeFolder(String path);
// 是否存在文件
boolean existFile(String path);

List<Map<String, Object>> readCatalog(String path);

boolean createFile(String path, MultipartFile file);

String readFileContent(String path);

List<Map<String, Object>> listFile(String path);

boolean renameFile(String oldName, String newName);

boolean deleteFile(String path);

boolean uploadFile(String path, String uploadPath);

boolean downloadFile(String path, String downloadPath);

boolean copyFile(String sourcePath, String targetPath);

byte[] openFileToBytes(String path);

BlockLocation[] getFileBlockLocations(String path);

}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
@Slf4j
@Service
public class HDFSServiceImpl implements HDFSService {

private static final int bufferSize = 1024 * 1024 * 64;

@Autowired
private FileSystem fileSystem;

@Override
public boolean makeFolder(String path) {
boolean target = false;
if (StringUtils.isEmpty(path)) {
return false;
}
if (existFile(path)) {
return true;
}
Path src = new Path(path);
try {
target = fileSystem.mkdirs(src);
} catch (IOException e) {
log.error(e.getMessage());
}
return target;
}

@Override
public boolean existFile(String path) {
if (StringUtils.isEmpty(path)){
return false;
}
Path src = new Path(path);
try {
return fileSystem.exists(src);
} catch (IOException e) {
log.error(e.getMessage());
}
return false;
}

@Override
public List<Map<String, Object>> readCatalog(String path) {
if (StringUtils.isEmpty(path)){
return Collections.emptyList();
}
if (!existFile(path)){
log.error("catalog is not exist!!");
return Collections.emptyList();
}

Path src = new Path(path);
FileStatus[] fileStatuses = null;
try {
fileStatuses = fileSystem.listStatus(src);
} catch (IOException e) {
log.error(e.getMessage());
}
List<Map<String, Object>> result = new ArrayList<>(fileStatuses.length);

if (null != fileStatuses && 0 < fileStatuses.length) {
for (FileStatus fileStatus : fileStatuses) {
Map<String, Object> cataLogMap = new HashMap<>();
cataLogMap.put("filePath", fileStatus.getPath());
cataLogMap.put("fileStatus", fileStatus);
result.add(cataLogMap);
}
}
return result;
}

@Override
public boolean createFile(String path, MultipartFile file) {
boolean target = false;
if (StringUtils.isEmpty(path)) {
return false;
}
String fileName = file.getName();
Path newPath = new Path(path + "/" + fileName);

FSDataOutputStream outputStream = null;
try {
outputStream = fileSystem.create(newPath);
outputStream.write(file.getBytes());
target = true;
} catch (IOException e) {
log.error(e.getMessage());
} finally {
if (null != outputStream) {
try {
outputStream.close();
} catch (IOException e) {
log.error(e.getMessage());
}
}
}
return target;
}

@Override
public String readFileContent(String path) {
if (StringUtils.isEmpty(path)){
return null;
}

if (!existFile(path)) {
return null;
}

Path src = new Path(path);

FSDataInputStream inputStream = null;
StringBuilder sb = new StringBuilder();
try {
inputStream = fileSystem.open(src);
String lineText = "";
while ((lineText = inputStream.readLine()) != null) {
sb.append(lineText);
}
} catch (IOException e) {
log.error(e.getMessage());
} finally {
if (null != inputStream) {
try {
inputStream.close();
} catch (IOException e) {
log.error(e.getMessage());
}
}
}
return sb.toString();
}

@Override
public List<Map<String, Object>> listFile(String path) {
if (StringUtils.isEmpty(path)) {
return Collections.emptyList();
}
if (!existFile(path)) {
return Collections.emptyList();
}
List<Map<String,Object>> resultList = new ArrayList<>();

Path src = new Path(path);
try {
RemoteIterator<LocatedFileStatus> fileIterator = fileSystem.listFiles(src, true);
while (fileIterator.hasNext()) {
LocatedFileStatus next = fileIterator.next();
Path filePath = next.getPath();
String fileName = filePath.getName();
Map<String, Object> map = new HashMap<>();
map.put("fileName", fileName);
map.put("filePath", filePath.toString());
resultList.add(map);
}
} catch (IOException e) {
log.error(e.getMessage());
}

return resultList;
}

@Override
public boolean renameFile(String oldName, String newName) {
boolean target = false;
if (StringUtils.isEmpty(oldName) || StringUtils.isEmpty(newName)) {
return false;
}
Path oldPath = new Path(oldName);
Path newPath = new Path(newName);
try {
target = fileSystem.rename(oldPath, newPath);
} catch (IOException e) {
log.error(e.getMessage());
}

return target;
}

@Override
public boolean deleteFile(String path) {
boolean target = false;
if (StringUtils.isEmpty(path)) {
return false;
}
if (!existFile(path)) {
return false;
}
Path src = new Path(path);
try {
target = fileSystem.deleteOnExit(src);
} catch (IOException e) {
log.error(e.getMessage());
}
return target;
}

@Override
public boolean uploadFile(String path, String uploadPath) {
if (StringUtils.isEmpty(path) || StringUtils.isEmpty(uploadPath)) {
return false;
}

Path clientPath = new Path(path);

Path serverPath = new Path(uploadPath);

try {
fileSystem.copyFromLocalFile(false,clientPath,serverPath);
return true;
} catch (IOException e) {
log.error(e.getMessage(), e);
}
return false;
}

@Override
public boolean downloadFile(String path, String downloadPath) {
if (StringUtils.isEmpty(path) || StringUtils.isEmpty(downloadPath)) {
return false;
}

Path clienPath = new Path(path);

Path targetPath = new Path(downloadPath);

try {
fileSystem.copyToLocalFile(false,clienPath, targetPath);
return true;
} catch (IOException e) {
log.error(e.getMessage());
}
return false;
}

@Override
public boolean copyFile(String sourcePath, String targetPath) {
if (StringUtils.isEmpty(sourcePath) || StringUtils.isEmpty(targetPath)) {
return false;
}

Path oldPath = new Path(sourcePath);

Path newPath = new Path(targetPath);

FSDataInputStream inputStream = null;
FSDataOutputStream outputStream = null;

try {
inputStream = fileSystem.open(oldPath);
outputStream = fileSystem.create(newPath);

IOUtils.copyBytes(inputStream,outputStream,bufferSize,false);
return true;
} catch (IOException e) {
log.error(e.getMessage());
} finally {
if (null != inputStream) {
try {
inputStream.close();
} catch (IOException e) {
log.error(e.getMessage());
}
}
if (null != outputStream) {
try {
outputStream.close();
} catch (IOException e) {
log.error(e.getMessage());
}
}
}
return false;
}

@Override
public byte[] openFileToBytes(String path) {

if (StringUtils.isEmpty(path)) {
return null;
}

if (!existFile(path)) {
return null;
}

Path src = new Path(path);
byte[] result = null;
FSDataInputStream inputStream = null;
try {
inputStream = fileSystem.open(src);
result = IOUtils.readFullyToByteArray(inputStream);
} catch (IOException e) {
log.error(e.getMessage());
} finally {
if (null != inputStream){
try {
inputStream.close();
} catch (IOException e) {
log.error(e.getMessage());
}
}
}

return result;
}

@Override
public BlockLocation[] getFileBlockLocations(String path) {
if (StringUtils.isEmpty(path)) {
return null;
}
if (!existFile(path)) {
return null;
}
BlockLocation[] blocks = null;
Path src = new Path(path);
try{
FileStatus fileStatus = fileSystem.getFileStatus(src);
blocks = fileSystem.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
}catch(Exception e){
log.error(e.getMessage());
}
return blocks;
}
}

mapReduce

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
package com.winterchen.hadoopdemo.reduce;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

/*
* 继承Reducer类需要定义四个输出、输出类型泛型:
* 四个泛型类型分别代表:
* KeyIn Reducer的输入数据的Key,这里是每行文字中的单词"hello"
* ValueIn Reducer的输入数据的Value,这里是每行文字中的次数
* KeyOut Reducer的输出数据的Key,这里是每行文字中的单词"hello"
* ValueOut Reducer的输出数据的Value,这里是每行文字中的出现的总次数
*/
public class WordReduce extends Reducer<Text, IntWritable, Text, IntWritable> {

private IntWritable result = new IntWritable();
private List<String> textList = new ArrayList<>();

@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);

String keyStr = key.toString();

// 使用分词器,内容已经被统计好了,直接输出即可
if (textList.contains(keyStr)) {
System.out.println("============ " + keyStr + " 统计分词为: " + sum + " ============");
}
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
package com.winterchen.hadoopdemo.configuration;

import com.winterchen.hadoopdemo.HadoopDemoApplication;
import com.winterchen.hadoopdemo.mapper.WordMapper;
import com.winterchen.hadoopdemo.reduce.WordReduce;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.io.IOException;

@Component
public class ReduceJobsConfiguration {

@Value("${hdfs.hdfsPath}")
private String hdfsPath;

/**
* 获取HDFS配置信息
*
* @return
*/
public Configuration getConfiguration() {
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS", hdfsPath);
configuration.set("mapred.job.tracker", hdfsPath);
return configuration;
}

/**
* 获取单词统计的配置信息
*
* @param jobName
* @param inputPath
* @param outputPath
* @throws IOException
* @throws ClassNotFoundException
* @throws InterruptedException
*/
public void getWordCountJobsConf(String jobName, String inputPath, String outputPath)
throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = getConfiguration();
Job job = Job.getInstance(conf, jobName);

job.setMapperClass(WordMapper.class);
job.setCombinerClass(WordReduce.class);
job.setJarByClass(HadoopDemoApplication.class);
job.setReducerClass(WordReduce.class);

job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);

FileInputFormat.addInputPath(job, new Path(inputPath));
FileOutputFormat.setOutputPath(job, new Path(outputPath));
job.waitForCompletion(true);
}

@PostConstruct
public void getPath() {
hdfsPath = this.hdfsPath;
}

public String getHdfsPath() {
return hdfsPath;
}
}
1
2
3
4
5
public interface MapReduceService {

void wordCount(String jobName, String inputPath, String outputPath) throws Exception;

}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
package com.winterchen.hadoopdemo.service.impl;

import com.winterchen.hadoopdemo.configuration.ReduceJobsConfiguration;
import com.winterchen.hadoopdemo.service.HDFSService;
import com.winterchen.hadoopdemo.service.MapReduceService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;

@Service
public class MapReduceServiceImpl implements MapReduceService {

@Autowired
private HDFSService hdfsService;

@Autowired
private ReduceJobsConfiguration reduceJobsConfiguration;

@Override
public void wordCount(String jobName, String inputPath, String outputPath) throws Exception {
if (StringUtils.isEmpty(jobName) || StringUtils.isEmpty(inputPath)) {
return;
}
// 输出目录 = output/当前Job,如果输出路径存在则删除,保证每次都是最新的
if (hdfsService.existFile(outputPath)) {
hdfsService.deleteFile(outputPath);
}
reduceJobsConfiguration.getWordCountJobsConf(jobName, inputPath, outputPath);
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
package com.winterchen.hadoopdemo.service.impl;

import com.winterchen.hadoopdemo.configuration.ReduceJobsConfiguration;
import com.winterchen.hadoopdemo.service.HDFSService;
import com.winterchen.hadoopdemo.service.MapReduceService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;

@Service
public class MapReduceServiceImpl implements MapReduceService {

@Autowired
private HDFSService hdfsService;

@Autowired
private ReduceJobsConfiguration reduceJobsConfiguration;

@Override
public void wordCount(String jobName, String inputPath, String outputPath) throws Exception {
if (StringUtils.isEmpty(jobName) || StringUtils.isEmpty(inputPath)) {
return;
}
// 输出目录 = output/当前Job,如果输出路径存在则删除,保证每次都是最新的
if (hdfsService.existFile(outputPath)) {
hdfsService.deleteFile(outputPath);
}
reduceJobsConfiguration.getWordCountJobsConf(jobName, inputPath, outputPath);
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
@Slf4j
@Api(tags = "map reduce api")
@RestController
@RequestMapping("/api/v1/map-reduce")
public class MapReduceController {

@Autowired
private MapReduceService mapReduceService;

@ApiOperation("count word")
@PostMapping("/word/count")
public APIResponse wordCount(
@ApiParam(name = "jobName", required = true)
@RequestParam(name = "jobName", required = true)
String jobName,
@ApiParam(name = "inputPath", required = true)
@RequestParam(name = "inputPath", required = true)
String inputPath,
@ApiParam(name = "outputPath", required = true)
@RequestParam(name = "outputPath", required = true)
String outputPath
){
try {
mapReduceService.wordCount(jobName, inputPath, outputPath);
return APIResponse.success();
} catch (Exception e) {
log.error(e.getMessage());
return APIResponse.fail(e.getMessage());
}
}
}

以上就是日常开发中能使用到的基本的功能:hdfs的增删改查,以及MapReduce;

源码地址:

WinterChenS/springboot-learning-experience

查看评论