forked from oppia/oppia
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathinstall_third_party.py
More file actions
470 lines (397 loc) · 18.5 KB
/
install_third_party.py
File metadata and controls
470 lines (397 loc) · 18.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Installation script for Oppia third-party libraries."""
from __future__ import annotations
import argparse
import contextlib
import io
import json
import os
import subprocess
import sys
import tarfile
import urllib
import zipfile
from core import utils
from . import common
from . import install_backend_python_libs
TOOLS_DIR = os.path.join('..', 'oppia_tools')
THIRD_PARTY_DIR = os.path.join('.', 'third_party')
THIRD_PARTY_STATIC_DIR = os.path.join(THIRD_PARTY_DIR, 'static')
DEPENDENCIES_FILE_PATH = os.path.join(os.getcwd(), 'dependencies.json')
# Place to download zip files for temporary storage.
TMP_UNZIP_PATH = os.path.join('.', 'tmp_unzip.zip')
# Check that the current directory is correct.
common.require_cwd_to_be_oppia(allow_deploy_dir=True)
TARGET_DOWNLOAD_DIRS = {
'proto': THIRD_PARTY_DIR,
'frontend': THIRD_PARTY_STATIC_DIR,
'oppiaTools': TOOLS_DIR
}
_DOWNLOAD_FORMAT_ZIP = 'zip'
_DOWNLOAD_FORMAT_TAR = 'tar'
_DOWNLOAD_FORMAT_FILES = 'files'
DOWNLOAD_FORMATS_TO_DEPENDENCIES_KEYS = {
'zip': {
'mandatory_keys': ['version', 'url', 'downloadFormat'],
'optional_key_pairs': [
['rootDir', 'rootDirPrefix'], ['targetDir', 'targetDirPrefix']]
},
'files': {
'mandatory_keys': [
'version', 'url', 'files',
'targetDirPrefix', 'downloadFormat'],
'optional_key_pairs': []
},
'tar': {
'mandatory_keys': [
'version', 'url', 'tarRootDirPrefix',
'targetDirPrefix', 'downloadFormat'],
'optional_key_pairs': []
}
}
_PARSER = argparse.ArgumentParser(
description="""
Installation script for Oppia third-party libraries.
""")
def download_files(source_url_root, target_dir, source_filenames):
"""Downloads a group of files and saves them to a given directory.
Each file is downloaded only if it does not already exist.
Args:
source_url_root: str. The URL to prepend to all the filenames.
target_dir: str. The directory to save the files to.
source_filenames: list(str). Each filename is appended to the
end of the source_url_root in order to give the URL from which to
download the file. The downloaded file is then placed in target_dir,
and retains the same filename.
"""
assert isinstance(source_filenames, list), (
'Expected list of filenames, got \'%s\'' % source_filenames)
common.ensure_directory_exists(target_dir)
for filename in source_filenames:
if not os.path.exists(os.path.join(target_dir, filename)):
print('Downloading file %s to %s ...' % (filename, target_dir))
urllib.request.urlretrieve(
'%s/%s' % (source_url_root, filename),
filename=os.path.join(target_dir, filename))
print('Download of %s succeeded.' % filename)
def download_and_unzip_files(
source_url, target_parent_dir, zip_root_name, target_root_name):
"""Downloads a zip file, unzips it, and saves the result in a given dir.
The download occurs only if the target directory that the zip file unzips
to does not exist.
NB: This function assumes that the root level of the zip file has exactly
one folder.
Args:
source_url: str. The URL from which to download the zip file.
target_parent_dir: str. The directory to save the contents of the zip
file to.
zip_root_name: str. The name of the top-level folder in the zip
directory.
target_root_name: str. The name that the top-level folder should be
renamed to in the local directory.
"""
if not os.path.exists(os.path.join(target_parent_dir, target_root_name)):
print('Downloading and unzipping file %s to %s ...' % (
zip_root_name, target_parent_dir))
common.ensure_directory_exists(target_parent_dir)
urllib.request.urlretrieve(source_url, filename=TMP_UNZIP_PATH)
try:
with zipfile.ZipFile(TMP_UNZIP_PATH, 'r') as zfile:
zfile.extractall(path=target_parent_dir)
os.remove(TMP_UNZIP_PATH)
except Exception:
if os.path.exists(TMP_UNZIP_PATH):
os.remove(TMP_UNZIP_PATH)
# Some downloads (like jqueryui-themes) may require a user-agent.
req = urllib.request.Request(source_url, None, {})
req.add_header('User-agent', 'python')
# This is needed to get a seekable filestream that can be used
# by zipfile.ZipFile.
file_stream = io.StringIO(utils.url_open(req).read())
with zipfile.ZipFile(file_stream, 'r') as zfile:
zfile.extractall(path=target_parent_dir)
# Rename the target directory.
os.rename(
os.path.join(target_parent_dir, zip_root_name),
os.path.join(target_parent_dir, target_root_name))
print('Download of %s succeeded.' % zip_root_name)
def download_and_untar_files(
source_url, target_parent_dir, tar_root_name, target_root_name):
"""Downloads a tar file, untars it, and saves the result in a given dir.
The download occurs only if the target directory that the tar file untars
to does not exist.
NB: This function assumes that the root level of the tar file has exactly
one folder.
Args:
source_url: str. The URL from which to download the tar file.
target_parent_dir: str. The directory to save the contents of the tar
file to.
tar_root_name: str. The name of the top-level folder in the tar
directory.
target_root_name: str. The name that the top-level folder should be
renamed to in the local directory.
"""
if not os.path.exists(os.path.join(target_parent_dir, target_root_name)):
print('Downloading and untarring file %s to %s ...' % (
tar_root_name, target_parent_dir))
common.ensure_directory_exists(target_parent_dir)
urllib.request.urlretrieve(source_url, filename=TMP_UNZIP_PATH)
with contextlib.closing(tarfile.open(
name=TMP_UNZIP_PATH, mode='r:gz')) as tfile:
tfile.extractall(target_parent_dir)
os.remove(TMP_UNZIP_PATH)
# Rename the target directory.
os.rename(
os.path.join(target_parent_dir, tar_root_name),
os.path.join(target_parent_dir, target_root_name))
print('Download of %s succeeded.' % tar_root_name)
def get_file_contents(filepath, mode='r'):
"""Gets the contents of a file, given a relative filepath from oppia/."""
with utils.open_file(filepath, mode) as f:
return f.read()
def return_json(filepath):
"""Return json object when provided url
Args:
filepath: str. The path to the json file.
Returns:
*. A parsed json object. Actual conversion is different based on input
to json.loads. More details can be found here:
https://docs.python.org/3/library/json.html#encoders-and-decoders.
"""
response = get_file_contents(filepath)
return json.loads(response)
def test_dependencies_syntax(dependency_type, dependency_dict):
"""This checks syntax of the dependencies.json dependencies.
Display warning message when there is an error and terminate the program.
Args:
dependency_type: str. Dependency download format.
dependency_dict: dict. A dependencies.json dependency dict.
"""
keys = list(dependency_dict.keys())
mandatory_keys = DOWNLOAD_FORMATS_TO_DEPENDENCIES_KEYS[
dependency_type]['mandatory_keys']
# Optional keys requires exactly one member of the pair
# to be available as a key in the dependency_dict.
optional_key_pairs = DOWNLOAD_FORMATS_TO_DEPENDENCIES_KEYS[
dependency_type]['optional_key_pairs']
for key in mandatory_keys:
if key not in keys:
print('------------------------------------------')
print('There is syntax error in this dependency')
print(dependency_dict)
print('This key is missing or misspelled: "%s".' % key)
print('Exiting')
sys.exit(1)
if optional_key_pairs:
for optional_keys in optional_key_pairs:
optional_keys_in_dict = [
key for key in optional_keys if key in keys]
if len(optional_keys_in_dict) != 1:
print('------------------------------------------')
print('There is syntax error in this dependency')
print(dependency_dict)
print(
'Only one of these keys pair must be used: "%s".'
% ', '.join(optional_keys))
print('Exiting')
sys.exit(1)
# Checks the validity of the URL corresponding to the file format.
dependency_url = dependency_dict['url']
if '#' in dependency_url:
dependency_url = dependency_url.rpartition('#')[0]
is_zip_file_format = dependency_type == _DOWNLOAD_FORMAT_ZIP
is_tar_file_format = dependency_type == _DOWNLOAD_FORMAT_TAR
if (dependency_url.endswith('.zip') and not is_zip_file_format or
is_zip_file_format and not dependency_url.endswith('.zip') or
dependency_url.endswith('.tar.gz') and not is_tar_file_format or
is_tar_file_format and not dependency_url.endswith('.tar.gz')):
print('------------------------------------------')
print('There is syntax error in this dependency')
print(dependency_dict)
print('This url %s is invalid for %s file format.' % (
dependency_url, dependency_type))
print('Exiting.')
sys.exit(1)
def validate_dependencies(filepath):
"""This validates syntax of the dependencies.json
Args:
filepath: str. The path to the json file.
Raises:
Exception. The 'downloadFormat' not specified.
"""
dependencies_data = return_json(filepath)
dependencies = dependencies_data['dependencies']
for _, dependency in dependencies.items():
for _, dependency_contents in dependency.items():
if 'downloadFormat' not in dependency_contents:
raise Exception(
'downloadFormat not specified in %s' %
dependency_contents)
download_format = dependency_contents['downloadFormat']
test_dependencies_syntax(download_format, dependency_contents)
def download_all_dependencies(filepath):
"""This download all files to the required folders.
Args:
filepath: str. The path to the json file.
"""
validate_dependencies(filepath)
dependencies_data = return_json(filepath)
dependencies = dependencies_data['dependencies']
for data, dependency in dependencies.items():
for _, dependency_contents in dependency.items():
dependency_rev = dependency_contents['version']
dependency_url = dependency_contents['url']
download_format = dependency_contents['downloadFormat']
if download_format == _DOWNLOAD_FORMAT_FILES:
dependency_files = dependency_contents['files']
target_dirname = (
dependency_contents['targetDirPrefix'] + dependency_rev)
dependency_dst = os.path.join(
TARGET_DOWNLOAD_DIRS[data], target_dirname)
download_files(dependency_url, dependency_dst, dependency_files)
elif download_format == _DOWNLOAD_FORMAT_ZIP:
if 'rootDir' in dependency_contents:
dependency_zip_root_name = dependency_contents['rootDir']
else:
dependency_zip_root_name = (
dependency_contents['rootDirPrefix'] + dependency_rev)
if 'targetDir' in dependency_contents:
dependency_target_root_name = (
dependency_contents['targetDir'])
else:
dependency_target_root_name = (
dependency_contents['targetDirPrefix'] + dependency_rev)
download_and_unzip_files(
dependency_url, TARGET_DOWNLOAD_DIRS[data],
dependency_zip_root_name, dependency_target_root_name)
elif download_format == _DOWNLOAD_FORMAT_TAR:
dependency_tar_root_name = (
dependency_contents['tarRootDirPrefix'] + dependency_rev)
dependency_target_root_name = (
dependency_contents['targetDirPrefix'] + dependency_rev)
download_and_untar_files(
dependency_url, TARGET_DOWNLOAD_DIRS[data],
dependency_tar_root_name, dependency_target_root_name)
def install_elasticsearch_dev_server():
"""This installs a local ElasticSearch server to the oppia_tools
directory to be used by development servers and backend tests.
"""
try:
subprocess.call(
['%s/bin/elasticsearch' % common.ES_PATH, '--version'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
# Set the minimum heap size to 100 MB and maximum to 500 MB.
env={'ES_JAVA_OPTS': '-Xms100m -Xmx500m'}
)
print('ElasticSearch is already installed.')
return
except OSError:
print('Installing ElasticSearch...')
if common.is_mac_os() or common.is_linux_os():
file_ext = 'tar.gz'
def download_and_extract(*args):
"""This downloads and extracts the elasticsearch files."""
download_and_untar_files(*args)
elif common.is_windows_os():
file_ext = 'zip'
def download_and_extract(*args):
"""This downloads and extracts the elasticsearch files."""
download_and_unzip_files(*args)
else:
raise Exception('Unrecognized or unsupported operating system.')
download_and_extract(
'https://artifacts.elastic.co/downloads/elasticsearch/' +
'elasticsearch-%s-%s-x86_64.%s' % (
common.ELASTICSEARCH_VERSION,
common.OS_NAME.lower(),
file_ext
),
TARGET_DOWNLOAD_DIRS['oppiaTools'],
'elasticsearch-%s' % common.ELASTICSEARCH_VERSION,
'elasticsearch-%s' % common.ELASTICSEARCH_VERSION
)
print('ElasticSearch installed successfully.')
def install_redis_cli():
"""This installs the redis-cli to the local oppia third_party directory so
that development servers and backend tests can make use of a local redis
cache. Redis-cli installed here (redis-cli-6.0.6) is different from the
redis package installed in dependencies.json (redis-3.5.3). The redis-3.5.3
package detailed in dependencies.json is the Python library that allows
users to communicate with any Redis cache using Python. The redis-cli-6.0.6
package installed in this function contains C++ scripts for the redis-cli
and redis-server programs detailed below.
The redis-cli program is the command line interface that serves up an
interpreter that allows users to connect to a redis database cache and
query the cache using the Redis CLI API. It also contains functionality to
shutdown the redis server. We need to install redis-cli separately from the
default installation of backend libraries since it is a system program and
we need to build the program files after the library is untarred.
The redis-server starts a Redis database on the local machine that can be
queried using either the Python redis library or the redis-cli interpreter.
"""
try:
subprocess.call(
[common.REDIS_SERVER_PATH, '--version'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
print('Redis-cli is already installed.')
except OSError:
# The redis-cli is not installed, run the script to install it.
# NOTE: We do the installation here since we need to use make.
print('Installing redis-cli...')
download_and_untar_files(
('https://download.redis.io/releases/redis-%s.tar.gz') %
common.REDIS_CLI_VERSION,
TARGET_DOWNLOAD_DIRS['oppiaTools'],
'redis-%s' % common.REDIS_CLI_VERSION,
'redis-cli-%s' % common.REDIS_CLI_VERSION)
# Temporarily change the working directory to redis-cli-6.0.6 so we can
# build the source code.
with common.CD(
os.path.join(
TARGET_DOWNLOAD_DIRS['oppiaTools'],
'redis-cli-%s' % common.REDIS_CLI_VERSION)):
# Build the scripts necessary to start the redis server.
# The make command only builds the C++ files in the src/ folder
# without modifying anything outside of the oppia root directory.
# It will build the redis-cli and redis-server files so that we can
# run the server from inside the oppia folder by executing the
# script src/redis-cli and src/redis-server.
subprocess.call(['make'])
# Make the scripts executable.
subprocess.call([
'chmod', '+x', common.REDIS_SERVER_PATH])
subprocess.call([
'chmod', '+x', common.REDIS_CLI_PATH])
print('Redis-cli installed successfully.')
def main(args=None):
"""Installs all the third party libraries."""
if common.is_windows_os():
# The redis cli is not compatible with Windows machines.
raise Exception(
'The redis command line interface will not be installed because '
'your machine is on the Windows operating system.')
unused_parsed_args = _PARSER.parse_args(args=args)
install_backend_python_libs.main()
download_all_dependencies(DEPENDENCIES_FILE_PATH)
install_redis_cli()
install_elasticsearch_dev_server()
# The 'no coverage' pragma is used as this line is un-testable. This is because
# it will only be called when install_third_party.py is used as a script.
if __name__ == '__main__': # pragma: no cover
main()