-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathspark-hadoop.yml
65 lines (54 loc) · 1.45 KB
/
spark-hadoop.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
---
#
# Install basic packages and python
#
- name: install basic packages, python, and thunder
hosts: all
become: yes
become_user: root
vars_files:
- user_data/user-defines.yml
- user_data/user-passwords.yml
vars:
ssh_public_key_content: "{{ lookup('file', ssh_public_key ) }}"
pre_tasks:
- name: Update APT cache
apt: update_cache=yes
roles:
- common
- {role: supervisord, tags: supervisor}
- {role: miniconda, tags: miniconda}
- {role: thunder,
tags: thunder,
environment: {PATH: "/usr/local/miniconda/bin:{{ ansible_env.PATH }}"}}
#
# Starting up spark, hadoop, and jupyterhub
#
# Note: we do this after the python installation because we want to
# use the custom python installed above instead of system python
#
- name: install and deploy spark and hdfs
hosts: all
become: yes
become_user: root
environment:
PATH: "/usr/local/miniconda/bin:{{ ansible_env.PATH }}"
PYSPARK_PYTHON: "/usr/local/miniconda/bin/python"
vars_files:
- user_data/user-defines.yml
tags: spark-hadoop
roles:
- {role: spark, tags: spark}
- {role: hadoop, tags: hadoop}
- name: deploy jupyterhub
hosts: spark_masters
become: yes
become_user: root
environment:
PATH: "/usr/local/miniconda/envs/python35/bin:{{ ansible_env.PATH }}"
tags: jupyterhub
vars_files:
- user_data/user-defines.yml
- user_data/user-passwords.yml
roles:
- jupyterhub