forked from templateflow/python-client
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_s3.py
More file actions
151 lines (116 loc) · 4.81 KB
/
test_s3.py
File metadata and controls
151 lines (116 loc) · 4.81 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
#
# Copyright 2024 The NiPreps Developers <nipreps@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# We support and encourage derived works from this project, please read
# about our expectations at
#
# https://www.nipreps.org/community/licensing/
#
"""Check S3-type repo tooling."""
from importlib import reload
from pathlib import Path
import pytest
import requests
from templateflow import api as tf
from templateflow import conf as tfc
from .data import load_data
def test_get_skel_file(tmp_path, monkeypatch):
"""Exercise the skeleton file generation."""
home = (tmp_path / 's3-skel-file').resolve()
monkeypatch.setenv('TEMPLATEFLOW_USE_DATALAD', 'off')
monkeypatch.setenv('TEMPLATEFLOW_HOME', str(home))
# First execution, the S3 stub is created (or datalad install)
reload(tfc)
local_md5 = tfc._s3.TF_SKEL_MD5
monkeypatch.setattr(tfc._s3, 'TF_SKEL_MD5', 'invent')
new_skel = tfc._s3._get_skeleton_file()
assert new_skel is not None
assert Path(new_skel).exists()
assert Path(new_skel).stat().st_size > 0
latest_md5 = (
requests.get(
tfc._s3.TF_SKEL_URL(release='master', ext='md5', allow_redirects=True), timeout=10
)
.content.decode()
.split()[0]
)
monkeypatch.setattr(tfc._s3, 'TF_SKEL_MD5', latest_md5)
assert tfc._s3._get_skeleton_file() is None
monkeypatch.setattr(tfc._s3, 'TF_SKEL_MD5', local_md5)
monkeypatch.setattr(tfc._s3, 'TF_SKEL_URL', 'http://weird/{release}/{ext}'.format)
assert tfc._s3._get_skeleton_file() is None
monkeypatch.setattr(
tfc._s3, 'TF_SKEL_URL', tfc._s3.TF_SKEL_URL(release='{release}', ext='{ext}z').format
)
assert tfc._s3._get_skeleton_file() is None
def test_update_s3(tmp_path, monkeypatch):
"""Exercise updating the S3 skeleton."""
newhome = (tmp_path / 's3-update').resolve()
monkeypatch.setenv('TEMPLATEFLOW_USE_DATALAD', 'off')
monkeypatch.setenv('TEMPLATEFLOW_HOME', str(newhome))
assert tfc._s3.update(newhome)
assert not tfc._s3.update(newhome, overwrite=False)
for p in (newhome / 'tpl-MNI152NLin6Sym').glob('*.nii.gz'):
p.unlink()
assert tfc._s3.update(newhome, overwrite=False)
# This should cover the remote zip file fetching
monkeypatch.setattr(tfc._s3, 'TF_SKEL_MD5', 'invent')
assert tfc._s3.update(newhome, local=False)
assert not tfc._s3.update(newhome, local=False, overwrite=False)
for p in (newhome / 'tpl-MNI152NLin6Sym').glob('*.nii.gz'):
p.unlink()
assert tfc._s3.update(newhome, local=False, overwrite=False)
def mock_get(*args, **kwargs):
class MockResponse:
status_code = 400
return MockResponse()
def test_s3_400_error(monkeypatch):
"""Simulate a 400 error when fetching the skeleton file."""
reload(tfc)
reload(tf)
monkeypatch.setattr(requests, 'get', mock_get)
with pytest.raises(RuntimeError, match=r'Failed to download .* code 400'):
tf._s3_get(
Path(tfc.TF_LAYOUT.root)
/ 'tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-02_T1w.nii.gz'
)
def test_bad_skeleton(tmp_path, monkeypatch):
newhome = (tmp_path / 's3-update').resolve()
monkeypatch.setattr(tfc, 'TF_USE_DATALAD', False)
monkeypatch.setattr(tfc, 'TF_HOME', newhome)
monkeypatch.setattr(tfc, 'TF_LAYOUT', None)
tfc._init_cache()
tfc.init_layout()
assert tfc.TF_LAYOUT is not None
assert tfc.TF_LAYOUT.root == str(newhome)
# Instead of reloading
monkeypatch.setattr(tf, 'TF_LAYOUT', tfc.TF_LAYOUT)
paths = tf.ls('MNI152NLin2009cAsym', resolution='02', suffix='T1w', desc=None)
assert paths
path = Path(paths[0])
assert path.read_bytes() == b''
error_file = load_data.readable('error_response.xml')
path.write_bytes(error_file.read_bytes())
# Test directly before testing through API paths
tf._truncate_s3_errors(paths)
assert path.read_bytes() == b''
path.write_bytes(error_file.read_bytes())
monkeypatch.setattr(requests, 'get', mock_get)
with pytest.raises(RuntimeError):
tf.get('MNI152NLin2009cAsym', resolution='02', suffix='T1w', desc=None)
# Running get clears bad files before attempting to download
assert path.read_bytes() == b''