-
Notifications
You must be signed in to change notification settings - Fork 0
/
spark_hdfs-compose.yml
63 lines (56 loc) · 1.28 KB
/
spark_hdfs-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
# version: '3.5'
version: '3'
services:
master:
image: mjhea0/spark:2.4.1
command: bin/spark-class org.apache.spark.deploy.master.Master -h master
hostname: master
environment:
MASTER: spark://master:7077
SPARK_CONF_DIR: /conf
# SPARK_PUBLIC_DNS: ${EXTERNAL_IP}
ports:
- 4040:4040
- 6066:6066
- 7077:7077
- 8080:8080
worker:
image: mjhea0/spark:2.4.1
command: bin/spark-class org.apache.spark.deploy.worker.Worker spark://master:7077
hostname: spark-worker
deploy:
replicas: 2
restart_policy:
condition: on-failure
environment:
SPARK_CONF_DIR: /conf
SPARK_WORKER_CORES: 1
SPARK_WORKER_MEMORY: 1g
# SPARK_PUBLIC_DNS: ${EXTERNAL_IP}
depends_on:
- master
ports:
- 8081:8081
# command: hadoop fs -put ../step_07/script/employees.json hdfs://hadoop:8020/user/me/json
hadoop:
image: harisekhon/hadoop:2.8
hostname: hadoop
ports:
#- 8020:8020
- 8042:8042
- 8088:8088
#- 9000:9000
#- 10020:10020
- 19888:19888
- 50010:50010
- 50020:50020
- 50070:50070
- 50075:50075
- 50090:50090
volumes:
jupyter-data:
external: true
networks:
default:
external:
name: spark-network