I’ve been using some of Imran Hasan’s Stack Club tutorial but last week it failed, and now I see his code fails as well. Could it be an issue with w_2021_49? :
[60] # For the rest of this tutorial, we are going to work with coadd data products
tract = 4851
patch = 29 #gen3 analog to gen2 patch id ‘1,4’
bandList = [“i”, “r”]
filterList = bandList # using filterList for consistency with cells folllowing below
[61] objects = []
for filter in filterList:
dataId = {‘band’:filter, ‘tract’:tract, ‘patch’:patch}
objects.append(butler.get(“deepCoadd_forced_src”, dataId=dataId))
iSources, rSources = objects
KeyError Traceback (most recent call last)
/opt/lsst/software/stack/stack/miniconda3-py38_4.9.2-0.7.0/Linux64/daf_butler/22.0.1-110-g1427568b+500492d978/python/lsst/daf/butler/core/dimensions/_coordinate.py in standardize(mapping, graph, universe, defaults, **kwargs)
233 try:
→ 234 values = tuple(d[name] for name in graph.required.names)
235 except KeyError as err:
/opt/lsst/software/stack/stack/miniconda3-py38_4.9.2-0.7.0/Linux64/daf_butler/22.0.1-110-g1427568b+500492d978/python/lsst/daf/butler/core/dimensions/_coordinate.py in (.0)
233 try:
→ 234 values = tuple(d[name] for name in graph.required.names)
235 except KeyError as err:
KeyError: ‘skymap’
The above exception was the direct cause of the following exception:
KeyError Traceback (most recent call last)
/tmp/ipykernel_3588/2849544431.py in
2 for filter in filterList:
3 dataId = {‘band’:filter, ‘tract’:tract, ‘patch’:patch}
----> 4 objects.append(butler.get(“deepCoadd_forced_src”, dataId=dataId))
5 iSources, rSources = objects
/opt/lsst/software/stack/stack/miniconda3-py38_4.9.2-0.7.0/Linux64/daf_butler/22.0.1-110-g1427568b+500492d978/python/lsst/daf/butler/_butler.py in get(self, datasetRefOrType, dataId, parameters, collections, **kwargs)
1160 “”"
1161 log.debug(“Butler get: %s, dataId=%s, parameters=%s”, datasetRefOrType, dataId, parameters)
→ 1162 ref = self._findDatasetRef(datasetRefOrType, dataId, collections=collections, **kwargs)
1163 return self.getDirect(ref, parameters=parameters)
1164
/opt/lsst/software/stack/stack/miniconda3-py38_4.9.2-0.7.0/Linux64/daf_butler/22.0.1-110-g1427568b+500492d978/python/lsst/daf/butler/_butler.py in _findDatasetRef(self, datasetRefOrType, dataId, collections, allowUnresolved, **kwargs)
902 # type instead of letting registry.findDataset do it, so we get the
903 # result even if no dataset is found.
→ 904 dataId = DataCoordinate.standardize(dataId, graph=datasetType.dimensions,
905 defaults=self.registry.defaults.dataId, **kwargs)
906 # Always lookup the DatasetRef, even if one is given, to ensure it is
/opt/lsst/software/stack/stack/miniconda3-py38_4.9.2-0.7.0/Linux64/daf_butler/22.0.1-110-g1427568b+500492d978/python/lsst/daf/butler/core/dimensions/_coordinate.py in standardize(mapping, graph, universe, defaults, **kwargs)
234 values = tuple(d[name] for name in graph.required.names)
235 except KeyError as err:
→ 236 raise KeyError(f"No value in data ID ({mapping}) for required dimension {err}.") from err
237 # Some backends cannot handle numpy.int64 type which is a subclass of
238 # numbers.Integral; convert that to int.
KeyError: “No value in data ID ({‘band’: ‘i’, ‘tract’: 4851, ‘patch’: 29}) for required dimension ‘skymap’.”