-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathinit_notebook.py
More file actions
151 lines (130 loc) · 4.84 KB
/
init_notebook.py
File metadata and controls
151 lines (130 loc) · 4.84 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
#!/usr/bin/python
import os
import json
from subprocess import check_output, call
import sys
if sys.version_info >= (3,0):
# Python 3 check_output returns a byte string
decode_f = lambda x: x.decode()
else:
# In Python 2, bytes and str are the same
decode_f = lambda x: x
def get_metadata(key):
return decode_f(check_output(['/usr/share/google/get_metadata_value', 'attributes/{}'.format(key)]))
def mkdir_if_not_exists(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != os.errno.EEXIST:
raise
# get role of machine (master or worker)
role = get_metadata('dataproc-role')
if role == 'Master':
# additional packages to install
pkgs = [
'numpy',
'pandas',
'matplotlib',
'seaborn',
'decorator==4.2.1',
'parsimonious',
'jupyter',
'lxml',
'jupyter-spark',
'bokeh',
'ipywidgets',
'google-cloud==0.32.0',
'cloudstorage',
'google.appengine.api'
]
# add user-requested packages
try:
user_pkgs = get_metadata('PKGS')
except:
pass
else:
pkgs.extend(user_pkgs.split(','))
call('/opt/conda/bin/conda update setuptools', shell=True)
for pkg in pkgs:
call('/opt/conda/bin/pip install {}'.format(pkg), shell=True)
py4j = decode_f(check_output('ls /usr/lib/spark/python/lib/py4j*', shell=True).strip())
jar_path = get_metadata('JAR')
zip_path = get_metadata('ZIP')
call(['gsutil', 'cp', jar_path, '/home/hail/hail.jar'])
call(['gsutil', 'cp', zip_path, '/home/hail/hail.zip'])
env_to_set = {
'PYTHONHASHSEED': '0',
'PYTHONPATH':
'/usr/lib/spark/python/:{}:/home/hail/hail.zip'.format(py4j),
'SPARK_HOME': '/usr/lib/spark/',
'PYSPARK_PYTHON': '/opt/conda/bin/python',
'PYSPARK_DRIVER_PYTHON': '/opt/conda/bin/python'
}
for e, value in env_to_set.items():
call('echo "export {}={}" | tee -a /etc/environment /usr/lib/spark/conf/spark-env.sh'.format(e, value), shell=True)
conf_to_set = [
'spark.jars=/home/hail/hail.jar',
'spark.executorEnv.PYTHONHASHSEED=0',
'spark.submit.pyFiles=/home/hail/hail.zip',
'spark.driver.extraClassPath=/home/hail/hail.jar',
'spark.executor.extraClassPath=./hail.jar'
]
for c in conf_to_set:
call('echo "{}" >> /etc/spark/conf/spark-defaults.conf'.format(c), shell=True)
# modify custom Spark conf file to reference Hail jar and zip
# create Jupyter kernel spec file
kernel = {
'argv': [
'/opt/conda/bin/python',
'-m',
'ipykernel',
'-f',
'{connection_file}'
],
'display_name': 'Hail',
'language': 'python',
'env': env_to_set
}
# write kernel spec file to default Jupyter kernel directory
mkdir_if_not_exists('/opt/conda/share/jupyter/kernels/hail/')
with open('/opt/conda/share/jupyter/kernels/hail/kernel.json', 'w') as f:
json.dump(kernel, f)
# create Jupyter configuration file
mkdir_if_not_exists('/opt/conda/etc/jupyter/')
with open('/opt/conda/etc/jupyter/jupyter_notebook_config.py', 'w') as f:
opts = [
'c.Application.log_level = "DEBUG"',
'c.NotebookApp.ip = "*"',
'c.NotebookApp.open_browser = False',
'c.NotebookApp.port = 8123',
'c.NotebookApp.token = ""',
'c.NotebookApp.contents_manager_class = "jgscm.GoogleStorageContentManager"'
]
f.write('\n'.join(opts) + '\n')
# setup jupyter-spark extension
call(['/opt/conda/bin/jupyter', 'serverextension', 'enable', '--user', '--py', 'jupyter_spark'])
call(['/opt/conda/bin/jupyter', 'nbextension', 'install', '--user', '--py', 'jupyter_spark'])
call(['/opt/conda/bin/jupyter', 'nbextension', 'enable', '--user', '--py', 'jupyter_spark'])
call(['/opt/conda/bin/jupyter', 'nbextension', 'enable', '--user', '--py', 'widgetsnbextension'])
# create systemd service file for Jupyter notebook server process
with open('/lib/systemd/system/jupyter.service', 'w') as f:
opts = [
'[Unit]',
'Description=Jupyter Notebook',
'After=hadoop-yarn-resourcemanager.service',
'[Service]',
'Type=simple',
'User=root',
'Group=root',
'WorkingDirectory=/home/hail/',
'ExecStart=/opt/conda/bin/python /opt/conda/bin/jupyter notebook --allow-root',
'Restart=always',
'RestartSec=1',
'[Install]',
'WantedBy=multi-user.target'
]
f.write('\n'.join(opts) + '\n')
# add Jupyter service to autorun and start it
call(['systemctl', 'daemon-reload'])
call(['systemctl', 'enable', 'jupyter'])
call(['service', 'jupyter', 'start'])