Coverage for /opt/conda/envs/apienv/lib/python3.10/site-packages/daiquiri/cli/hdf5_to_json.py: 0%
21 statements
« prev ^ index » next coverage.py v7.6.4, created at 2024-11-14 02:13 +0000
« prev ^ index » next coverage.py v7.6.4, created at 2024-11-14 02:13 +0000
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
4import sys
5import json
6import argparse
7from daiquiri.core.components.hdf5 import hdf5_to_dict
10def file_and_path(string):
11 """Expect a filename string as provided by `silx.io`: `filename.h5::path`"""
12 if "::" in string:
13 return string.split("::", 1)
14 return string, None
17def main():
18 parser = argparse.ArgumentParser()
19 parser = argparse.ArgumentParser(
20 description="HDF5 to JSON as provided by the hdf5 component server"
21 )
22 parser.add_argument(
23 "input",
24 type=file_and_path,
25 default=None,
26 help="Link to the HDF5 file and data path. filename.h5::path",
27 )
28 parser.add_argument(
29 "--load_data",
30 action="store_true",
31 default=False,
32 help="If defined, the data of the datasets will be included. Else it is set to None",
33 )
35 options = parser.parse_args()
36 filename, path = options.input
37 result = hdf5_to_dict(filename, path, load_data=options.load_data)
38 print(json.dumps(result))
40 return 0
43if __name__ == "__main__":
44 result = main()
45 sys.exit(result)