Skip to content

Commit 82cdce0

Browse files
authored
Merge pull request #3086 from pared/2896_remote
remote: test: migrate to dir helpers
2 parents 2777470 + 04912f2 commit 82cdce0

File tree

1 file changed

+25
-49
lines changed

1 file changed

+25
-49
lines changed

tests/func/test_remote.py

Lines changed: 25 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,14 @@
77
from mock import patch
88

99
from dvc.config import Config
10+
from dvc.exceptions import DownloadError, UploadError
1011
from dvc.main import main
1112
from dvc.path_info import PathInfo
1213
from dvc.remote import RemoteLOCAL, RemoteConfig
1314
from dvc.remote.base import RemoteBASE
1415
from dvc.compat import fspath
1516
from tests.basic_env import TestDvc
16-
from tests.remotes import get_local_url, get_local_storagepath
17+
from tests.remotes import get_local_storagepath
1718

1819

1920
class TestRemote(TestDvc):
@@ -147,7 +148,7 @@ def test(self):
147148
self.assertEqual(default, None)
148149

149150

150-
def test_show_default(dvc_repo, capsys):
151+
def test_show_default(dvc, capsys):
151152
assert main(["remote", "default", "foo"]) == 0
152153
assert main(["remote", "default"]) == 0
153154
out, _ = capsys.readouterr()
@@ -169,34 +170,11 @@ def test(self):
169170
self.assertEqual(ret, 0)
170171

171172

172-
def test_large_dir_progress(repo_dir, dvc_repo):
173-
from dvc.utils import LARGE_DIR_SIZE
174-
from dvc.progress import Tqdm
173+
def test_dir_checksum_should_be_key_order_agnostic(tmp_dir, dvc):
174+
tmp_dir.gen({"data": {"1": "1 content", "2": "2 content"}})
175175

176-
# Create a "large dir"
177-
for i in range(LARGE_DIR_SIZE + 1):
178-
repo_dir.create(os.path.join("gen", "{}.txt".format(i)), str(i))
179-
180-
with patch.object(Tqdm, "update") as update:
181-
assert not update.called
182-
dvc_repo.add("gen")
183-
assert update.called
184-
185-
186-
def test_dir_checksum_should_be_key_order_agnostic(dvc_repo):
187-
data_dir = os.path.join(dvc_repo.root_dir, "data")
188-
file1 = os.path.join(data_dir, "1")
189-
file2 = os.path.join(data_dir, "2")
190-
191-
os.mkdir(data_dir)
192-
with open(file1, "w") as fobj:
193-
fobj.write("1")
194-
195-
with open(file2, "w") as fobj:
196-
fobj.write("2")
197-
198-
path_info = PathInfo(data_dir)
199-
with dvc_repo.state:
176+
path_info = PathInfo("data")
177+
with dvc.state:
200178
with patch.object(
201179
RemoteBASE,
202180
"_collect_dir",
@@ -205,7 +183,7 @@ def test_dir_checksum_should_be_key_order_agnostic(dvc_repo):
205183
{"relpath": "2", "md5": "2"},
206184
],
207185
):
208-
checksum1 = dvc_repo.cache.local.get_dir_checksum(path_info)
186+
checksum1 = dvc.cache.local.get_dir_checksum(path_info)
209187

210188
with patch.object(
211189
RemoteBASE,
@@ -215,19 +193,19 @@ def test_dir_checksum_should_be_key_order_agnostic(dvc_repo):
215193
{"md5": "2", "relpath": "2"},
216194
],
217195
):
218-
checksum2 = dvc_repo.cache.local.get_dir_checksum(path_info)
196+
checksum2 = dvc.cache.local.get_dir_checksum(path_info)
219197

220198
assert checksum1 == checksum2
221199

222200

223-
def test_partial_push_n_pull(dvc_repo, repo_dir, caplog):
224-
assert main(["remote", "add", "-d", "upstream", get_local_url()]) == 0
225-
# Recreate the repo to reread config
226-
repo = dvc_repo.__class__(dvc_repo.root_dir)
227-
remote = repo.cloud.get_remote("upstream")
201+
def test_partial_push_n_pull(tmp_dir, dvc, tmp_path_factory):
202+
remote_config = RemoteConfig(dvc.config)
203+
remote_config.add(
204+
"upstream", fspath(tmp_path_factory.mktemp("upstream")), default=True
205+
)
228206

229-
foo = repo.add(repo_dir.FOO)[0].outs[0]
230-
bar = repo.add(repo_dir.BAR)[0].outs[0]
207+
foo = tmp_dir.dvc_gen({"foo": "foo content"})[0].outs[0]
208+
bar = tmp_dir.dvc_gen({"bar": "bar content"})[0].outs[0]
231209

232210
# Faulty upload version, failing on foo
233211
original = RemoteLOCAL._upload
@@ -238,24 +216,22 @@ def unreliable_upload(self, from_file, to_info, name=None, **kwargs):
238216
return original(self, from_file, to_info, name, **kwargs)
239217

240218
with patch.object(RemoteLOCAL, "_upload", unreliable_upload):
241-
assert main(["push"]) == 1
242-
assert str(get_last_exc(caplog)) == "1 files failed to upload"
219+
with pytest.raises(UploadError) as upload_error_info:
220+
dvc.push()
221+
assert upload_error_info.value.amount == 1
243222

223+
remote = dvc.cloud.get_remote("upstream")
244224
assert not remote.exists(remote.checksum_to_path_info(foo.checksum))
245225
assert remote.exists(remote.checksum_to_path_info(bar.checksum))
246226

247227
# Push everything and delete local cache
248-
assert main(["push"]) == 0
249-
shutil.rmtree(repo.cache.local.cache_dir)
228+
dvc.push()
229+
shutil.rmtree(dvc.cache.local.cache_dir)
250230

251231
with patch.object(RemoteLOCAL, "_download", side_effect=Exception):
252-
assert main(["pull"]) == 1
253-
assert str(get_last_exc(caplog)) == "2 files failed to download"
254-
255-
256-
def get_last_exc(caplog):
257-
_, exc, _ = caplog.records[-2].exc_info
258-
return exc
232+
with pytest.raises(DownloadError) as download_error_info:
233+
dvc.pull()
234+
assert download_error_info.value.amount == 2
259235

260236

261237
def test_raise_on_too_many_open_files(tmp_dir, dvc, tmp_path_factory, mocker):

0 commit comments

Comments
 (0)