Commit ac884a50 authored by Derek Homeier's avatar Derek Homeier
Browse files

Set data type and path to f"l{a.Level().value}_data"

parent 92ce4bd9
Pipeline #2792 failed with stage
in 4 minutes and 11 seconds
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
# #
import os import os
import re
import copy import copy
import warnings import warnings
...@@ -285,21 +286,28 @@ class KISClient(BaseClient): ...@@ -285,21 +286,28 @@ class KISClient(BaseClient):
queries = self._make_search(walker.create(query)) queries = self._make_search(walker.create(query))
results = [] results = []
for query_string in queries: for query_str in queries:
full_url = f"{self._BASE_URL}{query_string}" full_url = f"{self._BASE_URL}{query_str}"
try: try:
response = urllib.request.urlopen(full_url) response = urllib.request.urlopen(full_url)
obs_dict = json.loads(response.read()).get('_embedded', []) obs_dict = json.loads(response.read()).get('_embedded', [])
if len(obs_dict) > 0 and 'links' in obs_dict[0]: if len(obs_dict) > 0 and 'links' in obs_dict[0]:
l1_data = obs_dict[0]['links'].get('l1_data', []) # Check if a.Level was set in this query (defaults to 1)
cl = re.compile(r"{'description.CALIB_LEVEL':(?P<level>\d+)}").search(query_str)
if cl:
data_type = f"l{int(cl['level'])}_data"
else:
data_type = "l1_data"
data_ln = obs_dict[0]['links'].get(data_type, [])
obs_dict[0].update(obs_dict[0].pop('description', dict())) obs_dict[0].update(obs_dict[0].pop('description', dict()))
for k in obs_dict[0].keys(): for k in obs_dict[0].keys():
if k[:5] in converters: if k[:5] in converters:
obs_dict[0][k] = converters[k[:5]](obs_dict[0][k]) obs_dict[0][k] = converters[k[:5]](obs_dict[0][k])
obs_rec = [] obs_rec = []
for exp in l1_data: for exp in data_ln:
obs_rec.append(copy.deepcopy(obs_dict[0])) obs_rec.append(copy.deepcopy(obs_dict[0]))
obs_rec[-1]['links']['l1_data'] = exp obs_rec[-1]['links'] = exp
obs_rec[-1]['data_type'] = data_type
results += obs_rec results += obs_rec
except(HTTPError, URLError) as exc: except(HTTPError, URLError) as exc:
raise URLError(f'Unable to execute search "{full_url}": {exc}. Confirm that ' raise URLError(f'Unable to execute search "{full_url}": {exc}. Confirm that '
...@@ -336,11 +344,11 @@ class KISClient(BaseClient): ...@@ -336,11 +344,11 @@ class KISClient(BaseClient):
for row in query_results: for row in query_results:
inst = row['INSTRUMENT'] inst = row['INSTRUMENT']
oid = row['links']['l1_data']['$oid'] oid = row['links']['$oid']
# Content-Disposition header default is "{row['_id']['$oid']}/{oid}.{ext}" (no '.json'). # Content-Disposition header default is "{row['_id']['$oid']}/{oid}.{ext}" (no '.json').
# rowpath = row['_id']['$oid'] # rowpath = row['_id']['$oid']
filepath = os.path.join(row['OBS_NAME'], f"{oid}.{ext}") filepath = os.path.join(row['OBS_NAME'], f"{oid}.{ext}")
url = f"{self._BASE_URL}{inst}_l1_data.files/{oid}{binfile}" url = f"{self._BASE_URL}{inst}_{row['data_type']}.files/{oid}{binfile}"
downloader.enqueue_file(url, filename=str(path).format(file=filepath, downloader.enqueue_file(url, filename=str(path).format(file=filepath,
**row.response_block_map)) **row.response_block_map))
......
...@@ -76,7 +76,7 @@ def test_docker(client): ...@@ -76,7 +76,7 @@ def test_docker(client):
assert res[0]['DATE_BEG'].unix == 1398505619.000 assert res[0]['DATE_BEG'].unix == 1398505619.000
assert res[0]['DATE_END'].unix == 1398506021.300 assert res[0]['DATE_END'].unix == 1398506021.300
file_ids = [ld['links']['l1_data']['$oid'] for ld in res] file_ids = [ld['links']['$oid'] for ld in res]
assert len(file_ids) == 105 assert len(file_ids) == 105
for oid in file_ids[0], file_ids[104]: for oid in file_ids[0], file_ids[104]:
meta = json.loads(urllib.request.urlopen(f"{_BASE_URL}gris_l1_data.files/{oid}").read()) meta = json.loads(urllib.request.urlopen(f"{_BASE_URL}gris_l1_data.files/{oid}").read())
...@@ -93,7 +93,7 @@ def test_docker(client): ...@@ -93,7 +93,7 @@ def test_docker(client):
binfile = '' binfile = ''
ext = 'json' ext = 'json'
for i, ld in enumerate(res[:10]): for i, ld in enumerate(res[:10]):
oid = ld['links']['l1_data']['$oid'] oid = ld['links']['$oid']
filename = f"{oid}.{ext}" filename = f"{oid}.{ext}"
url = f"{_BASE_URL}{inst}_l1_data.files/{oid}{binfile}" url = f"{_BASE_URL}{inst}_l1_data.files/{oid}{binfile}"
assert url == f"{_BASE_URL}gris_l1_data.files/{file_ids[i]}" assert url == f"{_BASE_URL}gris_l1_data.files/{file_ids[i]}"
...@@ -102,14 +102,14 @@ def test_docker(client): ...@@ -102,14 +102,14 @@ def test_docker(client):
binfile = '/binary' binfile = '/binary'
ext = 'fits' ext = 'fits'
for ld in res[:2]: for ld in res[:2]:
oid = ld['links']['l1_data']['$oid'] oid = ld['links']['$oid']
filename = f"{oid}.{ext}" filename = f"{oid}.{ext}"
url = f"{_BASE_URL}{inst}_l1_data.files/{oid}{binfile}" url = f"{_BASE_URL}{inst}_l1_data.files/{oid}{binfile}"
downloader.enqueue_file(url, filename=os.path.join(rowpath, filename), max_splits=1) downloader.enqueue_file(url, filename=os.path.join(rowpath, filename), max_splits=1)
assert downloader.queued_downloads == 12 assert downloader.queued_downloads == 12
assert downloader.http_queue[0].keywords['url'].startswith(_BASE_URL) assert downloader.http_queue[0].keywords['url'].startswith(_BASE_URL)
assert res[0]['links']['l1_data']['$oid'] in downloader.http_queue[0].keywords['url'] assert res[0]['links']['$oid'] in downloader.http_queue[0].keywords['url']
assert downloader.http_queue[10].keywords['url'].endswith(binfile) assert downloader.http_queue[10].keywords['url'].endswith(binfile)
files = downloader.download() files = downloader.download()
...@@ -367,6 +367,7 @@ def test_fido_fetch_2(): ...@@ -367,6 +367,7 @@ def test_fido_fetch_2():
desc = table.vstack(res['kis']) desc = table.vstack(res['kis'])
assert len(desc) == 301 assert len(desc) == 301
assert all(desc['CALIB_LEVEL']) == 1
assert max(desc['DATE_BEG']) < date.end assert max(desc['DATE_BEG']) < date.end
assert min(desc['DATE_END']) > date.start assert min(desc['DATE_END']) > date.start
...@@ -399,6 +400,43 @@ def test_fido_fetch_2(): ...@@ -399,6 +400,43 @@ def test_fido_fetch_2():
assert len(files) == 11 assert len(files) == 11
@pytest.mark.parametrize("level", ((0, 0), (1, 400), (2, 0)))
def test_level_data(level):
"""Test search and fetch of l{level[0]}_data - no actual data found for l0 + l2."""
obsname = 'gris_20160826_012'
query = (a.Instrument("GRIS"), a.sdc.ObsName(obsname), a.Level(level[0]))
if not HAS_DOCKERTEST:
with pytest.raises(URLError, match=rf"{_dockerexc('gris')}"
rf"{{'description.OBS_NAME':'{obsname}'}},"
rf"{{'description.CALIB_LEVEL':{level[0]}"):
res = Fido.search(*query)
return
res = Fido.search(*query)
assert len(res['kis']) == level[1]
if level[1] == 0:
pytest.xfail(f"No level {level[0]} data in observation '{obsname}'")
assert all(res['kis']['CALIB_LEVEL']) == level[0]
files = Fido.fetch(res['kis'][:10])
assert len(files) == min(level[1], 10)
for filepath in files:
assert dirnames(filepath)[-1] == res['kis'][0]['OBS_NAME']
meta = json.load(open(filepath))
assert meta['_id']['$oid'] == os.path.splitext(os.path.basename(filepath))[0]
assert meta['metadata']['header']['CAMERA'] == 'IR1024'
assert meta['metadata']['header']['FILENAME'].split('-')[0] in res['kis'][0]['OBS_NAME']
files = Fido.fetch(res['kis'][:1], binary=True)
assert len(files) == 1
assert dirnames(files[0])[-1] == res['kis'][0]['OBS_NAME']
hdulist = fits.open(files[0])
assert hdulist[0].header.get('TELESCOP') == 'GREGOR'
assert hdulist[0].header.get('CAMERA') == 'IR1024'
hdulist.close()
@pytest.mark.parametrize("query", ((a.Instrument("GRIS") & a.Level(3)), @pytest.mark.parametrize("query", ((a.Instrument("GRIS") & a.Level(3)),
(a.Instrument("ChroTel") & a.Physobs("perspective.vortex")), (a.Instrument("ChroTel") & a.Physobs("perspective.vortex")),
(a.Level(0) & a.Instrument("Bob")), (a.Level(0) & a.Instrument("Bob")),
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment