-
Notifications
You must be signed in to change notification settings - Fork 18
/
Copy pathdocker-compose-hadoop.yml
114 lines (104 loc) · 2.13 KB
/
docker-compose-hadoop.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
version: "2.1"
services:
namenode:
build: ./namenode
container_name: namenode
volumes:
- hadoop_namenode:/hadoop/dfs/name
- ./data/:/hadoop-data/input
- ./map_reduce/:/hadoop-data/map_reduce
- ./requirements.txt:/hadoop-data/requirements.txt
environment:
- CLUSTER_NAME=test
env_file:
- ./hadoop.env
ports:
- 9870:9870
- 8020:8020
networks:
- hadoop_network
resourcemanager:
build: ./resourcemanager
container_name: resourcemanager
restart: on-failure
depends_on:
- namenode
- datanode1
- datanode2
- datanode3
env_file:
- ./hadoop.env
ports:
- 8089:8088
networks:
- hadoop_network
historyserver:
build: ./historyserver
container_name: historyserver
depends_on:
- namenode
- datanode1
- datanode2
volumes:
- hadoop_historyserver:/hadoop/yarn/timeline
env_file:
- ./hadoop.env
ports:
- 8188:8188
networks:
- hadoop_network
nodemanager1:
build: ./nodemanager
container_name: nodemanager1
depends_on:
- namenode
- datanode1
- datanode2
env_file:
- ./hadoop.env
ports:
- 8042:8042
networks:
- hadoop_network
datanode1:
build: ./datanode
container_name: datanode1
depends_on:
- namenode
volumes:
- hadoop_datanode1:/hadoop/dfs/data
env_file:
- ./hadoop.env
networks:
- hadoop_network
datanode2:
build: ./datanode
container_name: datanode2
depends_on:
- namenode
volumes:
- hadoop_datanode2:/hadoop/dfs/data
env_file:
- ./hadoop.env
networks:
- hadoop_network
# datanode3:
# build: ./datanode
# container_name: datanode3
# depends_on:
# - namenode
# volumes:
# - hadoop_datanode3:/hadoop/dfs/data
# env_file:
# - ./hadoop.env
# networks:
# - hadoop_network
volumes:
hadoop_namenode:
hadoop_datanode1:
hadoop_datanode2: # hadoop_datanode3:
hadoop_historyserver:
networks:
hadoop_network:
name: hadoop_network
external: true