33
44import datetime
55import filecmp
6+ import logging
67import time
78import zipfile
89import pytest
910import icat
11+ import icat .client
1012import icat .config
11- from icat .ids import DataSelection
13+ from icat .ids import IDSClient , DataSelection
1214from icat .query import Query
1315from conftest import DummyDatafile , UtcTimezone
1416from conftest import getConfig , tmpSessionId , tmpClient
1517
18+ logger = logging .getLogger (__name__ )
1619
17- @pytest .fixture (scope = "module" )
18- def client (setupicat ):
19- client , conf = getConfig (ids = "mandatory" )
20- client .login (conf .auth , conf .credentials )
21- yield client
22- query = "SELECT df FROM Datafile df WHERE df.location IS NOT NULL"
20+ GiB = 1073741824
21+
22+ class LoggingIDSClient (IDSClient ):
23+ """Modified version of IDSClient that logs some calls.
24+ """
25+ def getStatus (self , selection ):
26+ status = super ().getStatus (selection )
27+ logger .debug ("getStatus(%s): %s" , selection , status , stacklevel = 2 )
28+ return status
29+
30+ def _delete_datafiles (client , query ):
2331 while True :
2432 try :
2533 client .deleteData (client .search (query ))
@@ -28,6 +36,45 @@ def client(setupicat):
2836 else :
2937 break
3038
39+ @pytest .fixture (scope = "module" )
40+ def cleanup (setupicat ):
41+ client , conf = getConfig (confSection = "root" , ids = "mandatory" )
42+ client .login (conf .auth , conf .credentials )
43+ yield
44+ query = "SELECT df FROM Datafile df WHERE df.location IS NOT NULL"
45+ _delete_datafiles (client , query )
46+
47+ @pytest .fixture (scope = "function" )
48+ def client (monkeypatch , cleanup ):
49+ monkeypatch .setattr (icat .client , "IDSClient" , LoggingIDSClient )
50+ client , conf = getConfig (ids = "mandatory" )
51+ client .login (conf .auth , conf .credentials )
52+ yield client
53+
54+ @pytest .fixture (scope = "function" )
55+ def dataset (client , cleanup_objs ):
56+ """A dataset to be used in the test.
57+
58+ The dataset will be eventually be deleted after the test.
59+ """
60+ inv = client .assertedSearch (Query (client , "Investigation" , conditions = {
61+ "name" : "= '10100601-ST'" ,
62+ }))[0 ]
63+ dstype = client .assertedSearch (Query (client , "DatasetType" , conditions = {
64+ "name" : "= 'raw'" ,
65+ }))[0 ]
66+ dataset = client .new ("Dataset" ,
67+ name = "e208343" , complete = False ,
68+ investigation = inv , type = dstype )
69+ dataset .create ()
70+ cleanup_objs .append (dataset )
71+ yield dataset
72+ query = Query (client , "Datafile" , conditions = {
73+ "dataset.id" : "= %d" % dataset .id ,
74+ "location" : "IS NOT NULL" ,
75+ })
76+ _delete_datafiles (client , query )
77+
3178
3279# ============================ testdata ============================
3380
@@ -392,6 +439,55 @@ def test_restore(client, case):
392439 # outcome of the restore() call.
393440 print ("Status of dataset %s is now %s" % (case ['dsname' ], status ))
394441
442+ @pytest .mark .parametrize (("case" ), markeddatasets )
443+ def test_restoreDataCall (client , case ):
444+ """Test the high level call restoreData().
445+
446+ This is essentially a no-op as the dataset in question will
447+ already be ONLINE. It only tests that the call does not throw an
448+ error.
449+ """
450+ dataset = getDataset (client , case )
451+ client .restoreData ([dataset ])
452+ status = client .ids .getStatus (DataSelection ([dataset ]))
453+ assert status == "ONLINE"
454+
455+ @pytest .mark .parametrize (("case" ), markeddatasets )
456+ def test_restoreDataCallSelection (client , case ):
457+ """Test the high level call restoreData().
458+
459+ Same as last test, but now pass a DataSelection as argument.
460+ """
461+ selection = DataSelection ([getDataset (client , case )])
462+ client .restoreData (selection )
463+ status = client .ids .getStatus (selection )
464+ assert status == "ONLINE"
465+
466+ @pytest .mark .slow
467+ def test_restoreData (tmpdirsec , client , dataset ):
468+ """Test restoring data with the high level call restoreData().
469+
470+ This test archives a dataset and calls restoreData() to restore it
471+ again. The size of the dataset is large enough so that restoring
472+ takes some time, so that we actually can observe the call to wait
473+ until the restoring is finished. As a result, the test is rather
474+ slow. It is marked as such and thus disabled by default.
475+ """
476+ if not client .ids .isTwoLevel ():
477+ pytest .skip ("This IDS does not use two levels of data storage" )
478+ f = DummyDatafile (tmpdirsec , "e208343.nxs" , GiB )
479+ query = Query (client , "DatafileFormat" , conditions = {
480+ "name" : "= 'NeXus'" ,
481+ })
482+ datafileformat = client .assertedSearch (query )[0 ]
483+ datafile = client .new ("Datafile" , name = f .fname .name ,
484+ dataset = dataset , datafileFormat = datafileformat )
485+ client .putData (f .fname , datafile )
486+ client .ids .archive (DataSelection ([dataset ]))
487+ client .restoreData ([dataset ])
488+ status = client .ids .getStatus (DataSelection ([dataset ]))
489+ assert status == "ONLINE"
490+
395491@pytest .mark .parametrize (("case" ), markeddatasets )
396492def test_reset (client , case ):
397493 """Call reset() on a dataset.
0 commit comments