From 0550eaf48c9998289f5a2408ac71d22739a79951 Mon Sep 17 00:00:00 2001 From: Ben Bornstein Date: Tue, 5 Apr 2016 06:46:22 -0700 Subject: [PATCH] Initial import Merge OCO-3 BLISS and ECOSTRESS BLISS. --- .gitignore | 45 + .mailmap | 5 + .pylintrc | 336 ++++++ LICENSE.txt | 7 + README.md | 51 + bliss/__init__.py | 15 + bliss/core/__init__.py | 15 + bliss/core/api.py | 672 +++++++++++ bliss/core/bin/__init__.py | 13 + bliss/core/bin/bliss_bsc.py | 87 ++ bliss/core/bin/bliss_bsc_create_handler.py | 89 ++ bliss/core/bin/bliss_bsc_stop_handler.py | 54 + bliss/core/bin/bliss_cmd_send.py | 107 ++ bliss/core/bin/bliss_create_dirs.py | 235 ++++ bliss/core/bin/bliss_dict_writer.py | 86 ++ bliss/core/bin/bliss_pcap.py | 134 +++ bliss/core/bin/bliss_pcap_segment.py | 110 ++ bliss/core/bin/bliss_seq_decode.py | 73 ++ bliss/core/bin/bliss_seq_encode.py | 70 ++ bliss/core/bin/bliss_seq_print.py | 66 + bliss/core/bin/bliss_seq_send.py | 113 ++ bliss/core/bin/bliss_table_decode.py | 77 ++ bliss/core/bin/bliss_table_encode.py | 74 ++ bliss/core/bin/bliss_tlm_db_insert.py | 109 ++ bliss/core/bin/bliss_tlm_send.py | 108 ++ bliss/core/bin/bliss_yaml_validate.py | 207 ++++ bliss/core/bsc.py | 815 +++++++++++++ bliss/core/ccsds.py | 124 ++ bliss/core/cfg.py | 417 +++++++ bliss/core/cmd.py | 516 ++++++++ bliss/core/coord.py | 150 +++ bliss/core/data/cmd_schema.json | 105 ++ bliss/core/data/evr_schema.json | 27 + bliss/core/data/limits_schema.json | 69 ++ bliss/core/data/table_schema.json | 124 ++ bliss/core/data/tlm_schema.json | 133 ++ bliss/core/db.py | 106 ++ bliss/core/dmc.py | 298 +++++ bliss/core/dtype.py | 821 +++++++++++++ bliss/core/evr.py | 198 +++ bliss/core/gds.py | 314 +++++ bliss/core/geom.py | 823 +++++++++++++ bliss/core/json.py | 97 ++ bliss/core/limits.py | 226 ++++ bliss/core/log.py | 326 +++++ bliss/core/pcap.py | 557 +++++++++ bliss/core/seq.py | 737 ++++++++++++ bliss/core/table.py | 720 +++++++++++ bliss/core/test/__init__.py | 84 ++ bliss/core/test/test_bsc.py | 542 +++++++++ bliss/core/test/test_ccsds.py | 73 ++ bliss/core/test/test_cfg.py | 278 +++++ bliss/core/test/test_cmd.py | 143 +++ bliss/core/test/test_coord.py | 82 ++ bliss/core/test/test_dmc.py | 45 + bliss/core/test/test_dtype.py | 213 ++++ bliss/core/test/test_evr.py | 73 ++ bliss/core/test/test_limits.py | 138 +++ bliss/core/test/test_log.py | 73 ++ bliss/core/test/test_pcap.py | 317 +++++ bliss/core/test/test_table.py | 223 ++++ bliss/core/test/test_tlm.py | 446 +++++++ bliss/core/test/test_util.py | 189 +++ bliss/core/test/test_val.py | 450 +++++++ bliss/core/test/testdata/testValidTable1.yaml | 45 + bliss/core/test/testdata/util/test_util.txt | 2 + .../test/testdata/val/testCmdValidator1.yaml | 31 + .../test/testdata/val/testCmdValidator2.yaml | 31 + .../test/testdata/val/testCmdValidator3.yaml | 14 + .../test/testdata/val/testCmdValidator4.yaml | 24 + .../test/testdata/val/testCmdValidator5.yaml | 24 + .../test/testdata/val/testCmdValidator6.yaml | 24 + .../test/testdata/val/testCmdValidator7.yaml | 24 + .../test/testdata/val/testCmdValidator8.yaml | 24 + .../test/testdata/val/testInvalidCmd1.yaml | 14 + .../test/testdata/val/testInvalidTlm1.yaml | 15 + .../test/testdata/val/testSchemaLoad1.json | 20 + .../test/testdata/val/testTlmValidator1.yaml | 34 + .../test/testdata/val/testTlmValidator2.yaml | 44 + .../test/testdata/val/testTlmValidator3.yaml | 44 + .../test/testdata/val/testTlmValidator4.yaml | 44 + .../test/testdata/val/testTlmValidator5.yaml | 44 + .../test/testdata/val/testTlmValidator6.yaml | 44 + .../test/testdata/val/testTlmValidator7.yaml | 15 + .../core/test/testdata/val/testValidCmd1.yaml | 85 ++ .../core/test/testdata/val/testValidTlm1.yaml | 14 + bliss/core/tlm.py | 974 +++++++++++++++ bliss/core/util.py | 452 +++++++ bliss/core/val.py | 734 +++++++++++ bliss/data/settings.yaml | 2 + build/clean.py | 21 + build/generate_changelog.py | 61 + build/pypi/make-pypi.sh | 16 + build/pypi/simple.py | 98 ++ build/update_docs_release.sh | 22 + data/config/bsc.yaml | 41 + data/config/ccsds_header.yaml | 239 ++++ data/config/cmd.yaml | 51 + data/config/config.yaml | 52 + data/config/evr.yaml | 23 + data/config/limits/limits.yaml | 21 + data/config/table.yaml | 64 + data/config/tlm.yaml | 88 ++ doc/Makefile | 223 ++++ doc/make.bat | 272 +++++ doc/source/_static/ccsds_prim_header.png | Bin 0 -> 6947 bytes doc/source/api_intro.rst | 107 ++ doc/source/bliss.core.api.rst | 7 + doc/source/bliss.core.bin.bliss_bsc.rst | 7 + ...liss.core.bin.bliss_bsc_create_handler.rst | 7 + .../bliss.core.bin.bliss_bsc_stop_handler.rst | 7 + doc/source/bliss.core.bin.bliss_cmd_send.rst | 7 + .../bliss.core.bin.bliss_create_dirs.rst | 7 + .../bliss.core.bin.bliss_dict_writer.rst | 7 + doc/source/bliss.core.bin.bliss_orbits.rst | 7 + doc/source/bliss.core.bin.bliss_pcap.rst | 7 + .../bliss.core.bin.bliss_pcap_segment.rst | 7 + .../bliss.core.bin.bliss_seq_decode.rst | 7 + .../bliss.core.bin.bliss_seq_encode.rst | 7 + doc/source/bliss.core.bin.bliss_seq_print.rst | 7 + doc/source/bliss.core.bin.bliss_seq_send.rst | 7 + .../bliss.core.bin.bliss_table_decode.rst | 7 + .../bliss.core.bin.bliss_table_encode.rst | 7 + .../bliss.core.bin.bliss_tlm_db_insert.rst | 7 + doc/source/bliss.core.bin.bliss_tlm_send.rst | 7 + .../bliss.core.bin.bliss_yaml_validate.rst | 7 + doc/source/bliss.core.bin.rst | 33 + doc/source/bliss.core.bsc.rst | 7 + doc/source/bliss.core.ccsds.rst | 7 + doc/source/bliss.core.cfg.rst | 7 + doc/source/bliss.core.cmd.rst | 7 + doc/source/bliss.core.coord.rst | 7 + doc/source/bliss.core.db.rst | 7 + doc/source/bliss.core.dmc.rst | 7 + doc/source/bliss.core.dtype.rst | 7 + doc/source/bliss.core.evr.rst | 7 + doc/source/bliss.core.gds.rst | 7 + doc/source/bliss.core.geom.rst | 7 + doc/source/bliss.core.json.rst | 7 + doc/source/bliss.core.limit.rst | 7 + doc/source/bliss.core.limits.rst | 7 + doc/source/bliss.core.log.rst | 7 + doc/source/bliss.core.pcap.rst | 7 + doc/source/bliss.core.rst | 45 + doc/source/bliss.core.seq.rst | 7 + doc/source/bliss.core.table.rst | 7 + doc/source/bliss.core.test.rst | 31 + doc/source/bliss.core.test.test_bsc.rst | 7 + doc/source/bliss.core.test.test_ccsds.rst | 7 + doc/source/bliss.core.test.test_cfg.rst | 7 + doc/source/bliss.core.test.test_cmd.rst | 7 + doc/source/bliss.core.test.test_coord.rst | 7 + doc/source/bliss.core.test.test_dmc.rst | 7 + doc/source/bliss.core.test.test_dtype.rst | 7 + doc/source/bliss.core.test.test_evr.rst | 7 + doc/source/bliss.core.test.test_limit.rst | 7 + doc/source/bliss.core.test.test_limits.rst | 7 + doc/source/bliss.core.test.test_log.rst | 7 + doc/source/bliss.core.test.test_pcap.rst | 7 + doc/source/bliss.core.test.test_table.rst | 7 + doc/source/bliss.core.test.test_tlm.rst | 7 + doc/source/bliss.core.test.test_util.rst | 7 + doc/source/bliss.core.test.test_val.rst | 7 + doc/source/bliss.core.tlm.rst | 7 + doc/source/bliss.core.util.rst | 7 + doc/source/bliss.core.val.rst | 7 + doc/source/bliss.rst | 17 + doc/source/bsc_intro.rst | 307 +++++ doc/source/c_and_dh_intro.rst | 33 + doc/source/command_intro.rst | 223 ++++ doc/source/command_line.rst | 121 ++ doc/source/conf.py | 303 +++++ doc/source/configuration_intro.rst | 167 +++ doc/source/contribute.rst | 102 ++ doc/source/dev_index.rst | 14 + doc/source/developer_info.rst | 119 ++ doc/source/evr_intro.rst | 64 + doc/source/index.rst | 37 + doc/source/installation.rst | 148 +++ doc/source/limits_intro.rst | 73 ++ doc/source/project_setup.rst | 32 + doc/source/telemetry_intro.rst | 363 ++++++ setup.cfg | 16 + setup.py | 66 + src/doc/dict/cmd/Makefile | 39 + src/doc/dict/cmd/bliss-cmddefs-tex | 135 +++ src/doc/dict/cmd/cmddict-01-defs.tex | 96 ++ src/doc/dict/cmd/cmddict-02-preamble.tex | 183 +++ src/doc/dict/cmd/cmddict-03-title.tex | 41 + src/doc/dict/cmd/cmddict-04-signatures.tex | 40 + src/doc/dict/cmd/cmddict-05-changelog.tex | 23 + src/doc/dict/cmd/cmddict-06-intro.tex | 0 src/doc/dict/cmd/cmddict-08-acronyms.tex | 24 + src/doc/dict/cmd/cmddict.tex | 43 + src/doc/dict/cmd/figures/jpl-logo.pdf | Bin 0 -> 8814 bytes src/doc/dict/tlm/Makefile | 39 + src/doc/dict/tlm/bliss-tlmdefs-tex | 45 + src/doc/dict/tlm/figures/jpl-logo.pdf | Bin 0 -> 8814 bytes src/doc/dict/tlm/tlmdict-01-defs.tex | 95 ++ src/doc/dict/tlm/tlmdict-02-preamble.tex | 184 +++ src/doc/dict/tlm/tlmdict-03-title.tex | 41 + src/doc/dict/tlm/tlmdict-04-signatures.tex | 40 + src/doc/dict/tlm/tlmdict-05-changelog.tex | 23 + src/doc/dict/tlm/tlmdict-06-intro.tex | 7 + src/doc/dict/tlm/tlmdict-07-tlmdefs.tex | 1069 +++++++++++++++++ src/doc/dict/tlm/tlmdict-08-acronyms.tex | 24 + src/doc/dict/tlm/tlmdict.tex | 43 + test/test_cli.sh | 42 + 208 files changed, 23233 insertions(+) create mode 100644 .gitignore create mode 100644 .mailmap create mode 100644 .pylintrc create mode 100644 LICENSE.txt create mode 100644 README.md create mode 100644 bliss/__init__.py create mode 100644 bliss/core/__init__.py create mode 100644 bliss/core/api.py create mode 100644 bliss/core/bin/__init__.py create mode 100755 bliss/core/bin/bliss_bsc.py create mode 100755 bliss/core/bin/bliss_bsc_create_handler.py create mode 100755 bliss/core/bin/bliss_bsc_stop_handler.py create mode 100755 bliss/core/bin/bliss_cmd_send.py create mode 100755 bliss/core/bin/bliss_create_dirs.py create mode 100755 bliss/core/bin/bliss_dict_writer.py create mode 100755 bliss/core/bin/bliss_pcap.py create mode 100644 bliss/core/bin/bliss_pcap_segment.py create mode 100755 bliss/core/bin/bliss_seq_decode.py create mode 100755 bliss/core/bin/bliss_seq_encode.py create mode 100755 bliss/core/bin/bliss_seq_print.py create mode 100755 bliss/core/bin/bliss_seq_send.py create mode 100755 bliss/core/bin/bliss_table_decode.py create mode 100755 bliss/core/bin/bliss_table_encode.py create mode 100755 bliss/core/bin/bliss_tlm_db_insert.py create mode 100755 bliss/core/bin/bliss_tlm_send.py create mode 100755 bliss/core/bin/bliss_yaml_validate.py create mode 100644 bliss/core/bsc.py create mode 100644 bliss/core/ccsds.py create mode 100644 bliss/core/cfg.py create mode 100644 bliss/core/cmd.py create mode 100644 bliss/core/coord.py create mode 100644 bliss/core/data/cmd_schema.json create mode 100644 bliss/core/data/evr_schema.json create mode 100644 bliss/core/data/limits_schema.json create mode 100644 bliss/core/data/table_schema.json create mode 100644 bliss/core/data/tlm_schema.json create mode 100644 bliss/core/db.py create mode 100644 bliss/core/dmc.py create mode 100644 bliss/core/dtype.py create mode 100644 bliss/core/evr.py create mode 100644 bliss/core/gds.py create mode 100755 bliss/core/geom.py create mode 100644 bliss/core/json.py create mode 100644 bliss/core/limits.py create mode 100644 bliss/core/log.py create mode 100644 bliss/core/pcap.py create mode 100644 bliss/core/seq.py create mode 100644 bliss/core/table.py create mode 100644 bliss/core/test/__init__.py create mode 100644 bliss/core/test/test_bsc.py create mode 100644 bliss/core/test/test_ccsds.py create mode 100644 bliss/core/test/test_cfg.py create mode 100644 bliss/core/test/test_cmd.py create mode 100644 bliss/core/test/test_coord.py create mode 100644 bliss/core/test/test_dmc.py create mode 100644 bliss/core/test/test_dtype.py create mode 100644 bliss/core/test/test_evr.py create mode 100644 bliss/core/test/test_limits.py create mode 100644 bliss/core/test/test_log.py create mode 100644 bliss/core/test/test_pcap.py create mode 100644 bliss/core/test/test_table.py create mode 100644 bliss/core/test/test_tlm.py create mode 100644 bliss/core/test/test_util.py create mode 100644 bliss/core/test/test_val.py create mode 100644 bliss/core/test/testdata/testValidTable1.yaml create mode 100644 bliss/core/test/testdata/util/test_util.txt create mode 100644 bliss/core/test/testdata/val/testCmdValidator1.yaml create mode 100644 bliss/core/test/testdata/val/testCmdValidator2.yaml create mode 100644 bliss/core/test/testdata/val/testCmdValidator3.yaml create mode 100644 bliss/core/test/testdata/val/testCmdValidator4.yaml create mode 100644 bliss/core/test/testdata/val/testCmdValidator5.yaml create mode 100644 bliss/core/test/testdata/val/testCmdValidator6.yaml create mode 100644 bliss/core/test/testdata/val/testCmdValidator7.yaml create mode 100644 bliss/core/test/testdata/val/testCmdValidator8.yaml create mode 100644 bliss/core/test/testdata/val/testInvalidCmd1.yaml create mode 100644 bliss/core/test/testdata/val/testInvalidTlm1.yaml create mode 100644 bliss/core/test/testdata/val/testSchemaLoad1.json create mode 100644 bliss/core/test/testdata/val/testTlmValidator1.yaml create mode 100644 bliss/core/test/testdata/val/testTlmValidator2.yaml create mode 100644 bliss/core/test/testdata/val/testTlmValidator3.yaml create mode 100644 bliss/core/test/testdata/val/testTlmValidator4.yaml create mode 100644 bliss/core/test/testdata/val/testTlmValidator5.yaml create mode 100644 bliss/core/test/testdata/val/testTlmValidator6.yaml create mode 100644 bliss/core/test/testdata/val/testTlmValidator7.yaml create mode 100644 bliss/core/test/testdata/val/testValidCmd1.yaml create mode 100644 bliss/core/test/testdata/val/testValidTlm1.yaml create mode 100644 bliss/core/tlm.py create mode 100755 bliss/core/util.py create mode 100644 bliss/core/val.py create mode 100644 bliss/data/settings.yaml create mode 100755 build/clean.py create mode 100755 build/generate_changelog.py create mode 100755 build/pypi/make-pypi.sh create mode 100755 build/pypi/simple.py create mode 100755 build/update_docs_release.sh create mode 100644 data/config/bsc.yaml create mode 100644 data/config/ccsds_header.yaml create mode 100644 data/config/cmd.yaml create mode 100644 data/config/config.yaml create mode 100644 data/config/evr.yaml create mode 100644 data/config/limits/limits.yaml create mode 100644 data/config/table.yaml create mode 100644 data/config/tlm.yaml create mode 100644 doc/Makefile create mode 100644 doc/make.bat create mode 100644 doc/source/_static/ccsds_prim_header.png create mode 100644 doc/source/api_intro.rst create mode 100644 doc/source/bliss.core.api.rst create mode 100644 doc/source/bliss.core.bin.bliss_bsc.rst create mode 100644 doc/source/bliss.core.bin.bliss_bsc_create_handler.rst create mode 100644 doc/source/bliss.core.bin.bliss_bsc_stop_handler.rst create mode 100644 doc/source/bliss.core.bin.bliss_cmd_send.rst create mode 100644 doc/source/bliss.core.bin.bliss_create_dirs.rst create mode 100644 doc/source/bliss.core.bin.bliss_dict_writer.rst create mode 100644 doc/source/bliss.core.bin.bliss_orbits.rst create mode 100644 doc/source/bliss.core.bin.bliss_pcap.rst create mode 100644 doc/source/bliss.core.bin.bliss_pcap_segment.rst create mode 100644 doc/source/bliss.core.bin.bliss_seq_decode.rst create mode 100644 doc/source/bliss.core.bin.bliss_seq_encode.rst create mode 100644 doc/source/bliss.core.bin.bliss_seq_print.rst create mode 100644 doc/source/bliss.core.bin.bliss_seq_send.rst create mode 100644 doc/source/bliss.core.bin.bliss_table_decode.rst create mode 100644 doc/source/bliss.core.bin.bliss_table_encode.rst create mode 100644 doc/source/bliss.core.bin.bliss_tlm_db_insert.rst create mode 100644 doc/source/bliss.core.bin.bliss_tlm_send.rst create mode 100644 doc/source/bliss.core.bin.bliss_yaml_validate.rst create mode 100644 doc/source/bliss.core.bin.rst create mode 100644 doc/source/bliss.core.bsc.rst create mode 100644 doc/source/bliss.core.ccsds.rst create mode 100644 doc/source/bliss.core.cfg.rst create mode 100644 doc/source/bliss.core.cmd.rst create mode 100644 doc/source/bliss.core.coord.rst create mode 100644 doc/source/bliss.core.db.rst create mode 100644 doc/source/bliss.core.dmc.rst create mode 100644 doc/source/bliss.core.dtype.rst create mode 100644 doc/source/bliss.core.evr.rst create mode 100644 doc/source/bliss.core.gds.rst create mode 100644 doc/source/bliss.core.geom.rst create mode 100644 doc/source/bliss.core.json.rst create mode 100644 doc/source/bliss.core.limit.rst create mode 100644 doc/source/bliss.core.limits.rst create mode 100644 doc/source/bliss.core.log.rst create mode 100644 doc/source/bliss.core.pcap.rst create mode 100644 doc/source/bliss.core.rst create mode 100644 doc/source/bliss.core.seq.rst create mode 100644 doc/source/bliss.core.table.rst create mode 100644 doc/source/bliss.core.test.rst create mode 100644 doc/source/bliss.core.test.test_bsc.rst create mode 100644 doc/source/bliss.core.test.test_ccsds.rst create mode 100644 doc/source/bliss.core.test.test_cfg.rst create mode 100644 doc/source/bliss.core.test.test_cmd.rst create mode 100644 doc/source/bliss.core.test.test_coord.rst create mode 100644 doc/source/bliss.core.test.test_dmc.rst create mode 100644 doc/source/bliss.core.test.test_dtype.rst create mode 100644 doc/source/bliss.core.test.test_evr.rst create mode 100644 doc/source/bliss.core.test.test_limit.rst create mode 100644 doc/source/bliss.core.test.test_limits.rst create mode 100644 doc/source/bliss.core.test.test_log.rst create mode 100644 doc/source/bliss.core.test.test_pcap.rst create mode 100644 doc/source/bliss.core.test.test_table.rst create mode 100644 doc/source/bliss.core.test.test_tlm.rst create mode 100644 doc/source/bliss.core.test.test_util.rst create mode 100644 doc/source/bliss.core.test.test_val.rst create mode 100644 doc/source/bliss.core.tlm.rst create mode 100644 doc/source/bliss.core.util.rst create mode 100644 doc/source/bliss.core.val.rst create mode 100644 doc/source/bliss.rst create mode 100644 doc/source/bsc_intro.rst create mode 100644 doc/source/c_and_dh_intro.rst create mode 100644 doc/source/command_intro.rst create mode 100644 doc/source/command_line.rst create mode 100644 doc/source/conf.py create mode 100644 doc/source/configuration_intro.rst create mode 100644 doc/source/contribute.rst create mode 100644 doc/source/dev_index.rst create mode 100644 doc/source/developer_info.rst create mode 100644 doc/source/evr_intro.rst create mode 100644 doc/source/index.rst create mode 100644 doc/source/installation.rst create mode 100644 doc/source/limits_intro.rst create mode 100644 doc/source/project_setup.rst create mode 100644 doc/source/telemetry_intro.rst create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 src/doc/dict/cmd/Makefile create mode 100755 src/doc/dict/cmd/bliss-cmddefs-tex create mode 100644 src/doc/dict/cmd/cmddict-01-defs.tex create mode 100644 src/doc/dict/cmd/cmddict-02-preamble.tex create mode 100644 src/doc/dict/cmd/cmddict-03-title.tex create mode 100644 src/doc/dict/cmd/cmddict-04-signatures.tex create mode 100644 src/doc/dict/cmd/cmddict-05-changelog.tex create mode 100644 src/doc/dict/cmd/cmddict-06-intro.tex create mode 100644 src/doc/dict/cmd/cmddict-08-acronyms.tex create mode 100644 src/doc/dict/cmd/cmddict.tex create mode 100644 src/doc/dict/cmd/figures/jpl-logo.pdf create mode 100644 src/doc/dict/tlm/Makefile create mode 100755 src/doc/dict/tlm/bliss-tlmdefs-tex create mode 100644 src/doc/dict/tlm/figures/jpl-logo.pdf create mode 100644 src/doc/dict/tlm/tlmdict-01-defs.tex create mode 100644 src/doc/dict/tlm/tlmdict-02-preamble.tex create mode 100644 src/doc/dict/tlm/tlmdict-03-title.tex create mode 100644 src/doc/dict/tlm/tlmdict-04-signatures.tex create mode 100644 src/doc/dict/tlm/tlmdict-05-changelog.tex create mode 100644 src/doc/dict/tlm/tlmdict-06-intro.tex create mode 100644 src/doc/dict/tlm/tlmdict-07-tlmdefs.tex create mode 100644 src/doc/dict/tlm/tlmdict-08-acronyms.tex create mode 100644 src/doc/dict/tlm/tlmdict.tex create mode 100755 test/test_cli.sh diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..d71c62a2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,45 @@ +# Distribution / Packaging / Build files +.coverage +doc/build/* +*.egg-info/ +*.egg + +# Object Files +*.o +*.pkl +*.pyc + +# Libraries +*.lib +*.a + +# Log Files +bliss-logmsgs-*.txt + +# Shared objects (inc. Windows DLLs) +*.dll +*.so +*.so.* +*.dylib + +# Executables +*.exe +*.out +*.app + +# LaTeX / PDF Documentation +src/doc/dict/cmd/*.aux +src/doc/dict/cmd/*.log +src/doc/dict/cmd/*.out +src/doc/dict/cmd/*.toc +src/doc/dict/cmd/*.pdf +src/doc/dict/cmd/cmddict-07-cmddefs.tex + +# Temporary files +*~ + +# Unignore (hack) +!build/clean.py + +# Nosetests ouptut file +nosetests.xml diff --git a/.mailmap b/.mailmap new file mode 100644 index 00000000..8b8ea909 --- /dev/null +++ b/.mailmap @@ -0,0 +1,5 @@ +Michael Joyce +Alice Stanboli stanboli +Alan Mazer Alan Mazer +Alan Mazer Alan S Mazer +Jordan Padams J. Padams diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..2b92226a --- /dev/null +++ b/.pylintrc @@ -0,0 +1,336 @@ +[MASTER] + +# Specify a configuration file. +rcfile=.pylintrc + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Profiled execution. +profile=no + +# Add files or directories to the blacklist. They should be base names, not +# paths. +#ignore= + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# DEPRECATED +#include-ids=no + +# DEPRECATED +#symbols=no + + +[MESSAGES CONTROL] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time. See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +#disable= + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=parseable + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +files-output=yes + +# Tells whether to display a full report or only the messages +reports=yes + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Add a comment according to your evaluation note. This is used by the global +# evaluation report (RP0004). +comment=no + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg} + + +[BASIC] + +# Required attributes for module, separated by a comma +required-attributes= + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter,apply,input,file + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Regular expression matching correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for function names +function-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for variable names +variable-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression matching correct attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for attribute names +attr-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for argument names +argument-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for method names +method-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=__.*__ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=80 + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + +# List of optional constructs for which whitespace checking is disabled +no-space-check=trailing-comma,dict-separator + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis +ignored-modules= + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +ignored-classes=SQLObject + +# When zope mode is activated, add a predefined set of Zope acquired attributes +# to generated-members. +zope=no + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E0201 when accessed. Python regular +# expressions are accepted. +generated-members=REQUEST,acl_users,aq_parent + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_$|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + + +[CLASSES] + +# List of interface methods to ignore, separated by a comma. This is used for +# instance to not check methods defines in Zope's Interface base class. +ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 00000000..2ce669d2 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,7 @@ +Copyright 2013 California Institute of Technology + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 00000000..d178ab32 --- /dev/null +++ b/README.md @@ -0,0 +1,51 @@ +Bespoke Links to Instruments for Surface and Space (BLISS) +========================================================== + +The Bespoke Links to Instruments for Surface and Space (BLISS) framework is a +Python-based software suite developed to handle Ground Data System (GDS), +Electronic Ground Support Equipment (EGSE), commanding, +telemetry uplink/downlink, and sequencing for JPL International Space Station +Missions. It is a generalization and expansion of tools developed for the +following JPL ISS projects: + +* [Orbiting Carbon Observatory 3 (OCO-3)](http://oco.jpl.nasa.gov) +* [Vehicle Cabin Atmosphere Monitor (VCAM)](http://www.nasa.gov/mission_pages/station/research/experiments/35.html) +* [ECOsystem Spaceborne Thermal Radiometer Experiment on Space Station (ECOSTRESS)](http://ecostress.jpl.nasa.gov) + + +Getting Started +--------------- + +You can read through the [Installation and Configuration Page](https://github.jpl.nasa.gov/pages/bliss/bliss-core/installation.html) for instruction on how to install BLISS Core. + +You can read through the [New Project Setup Page](https://github.jpl.nasa.gov/pages/bliss/bliss-core/project_setup.html) for +instructions on how to use BLISS on your next project. + + +Contributing +------------ + +For information on how to contribute please see the [BLISS Contributors Guides](https://github.jpl.nasa.gov/pages/bliss/bliss-core/contribute) + + +File and Directory Structure +---------------------------- + + ├── bin <-- Utilities and command line scripts + ├── doc <-- Project and API documentation + ├── src/doc/dict <-- Cmd/Tlm Dictionary definitions and documentation + ├── bliss <-- BLISS Python source tree + ├── data/config <-- Example project configuration files + + +Authors +------- + +BLISS authors (alphabetically): + + * Ben Bornstein + * Erik Hovland + * Michael Joyce + * Alan Mazer + * Jordan Padams + * Alice Stanboli diff --git a/bliss/__init__.py b/bliss/__init__.py new file mode 100644 index 00000000..948d641d --- /dev/null +++ b/bliss/__init__.py @@ -0,0 +1,15 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +__import__('pkg_resources').declare_namespace(__name__) diff --git a/bliss/core/__init__.py b/bliss/core/__init__.py new file mode 100644 index 00000000..d06b3a66 --- /dev/null +++ b/bliss/core/__init__.py @@ -0,0 +1,15 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import cfg diff --git a/bliss/core/api.py b/bliss/core/api.py new file mode 100644 index 00000000..a3f1bbd5 --- /dev/null +++ b/bliss/core/api.py @@ -0,0 +1,672 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS API + +The bliss.core.api module provides an Application Programming +Interface (API) to your instrument by bringing together the core.cmd +and core.tlm modules in a complementary whole, allowing you to +script instrument interactions, e.g.: + +.. code-block:: python + + # TBA +""" + +from __future__ import absolute_import + +import gevent.monkey; gevent.monkey.patch_all() +import gevent +import gevent.event +import gevent.server +import requests + +import collections +import inspect +import json +import socket +import time + +import bliss.core +from bliss.core import cmd, gds, log, tlm + + +class APIError (Exception): + """All BLISS API exceptions are derived from this class""" + pass + + +class APITimeoutError (Exception): + """Raised when a timeout limit is exceeded""" + def __init__ (self, timeout=0, msg=None): + self._timeout = timeout + self._msg = msg + + def __str__ (self): + return self.msg + + @property + def msg(self): + s = 'APITimeoutError: Timeout (%d seconds) exceeded' % self._timeout + + if self._msg: + s += ': ' + self._msg + + return s + + @property + def timeout(self): + return self._timeout + + +class FalseWaitError (Exception): + """Raised when a 'False' boolean is passed as an argument to wait (in order to avoid infinite loop)""" + def __init__ (self, msg=None): + self._msg = msg + + def __str__ (self): + return self.msg + + @property + def msg(self): + s = 'FalseWaitError: "False" boolean passed as argument to wait. Ensure wait condition args are surounded by lambda or " "' + + if self._msg: + s += ': ' + self._msg + + return s + + + +class CmdAPI: + """CmdAPI + + Provides an API to send commands to your Instrument via User + Datagram Protocol (UDP) packets. + """ + def __init__ (self, destination, cmddict=None, verbose=False): + if type(destination) is int: + destination = ('127.0.0.1', destination) + + if cmddict is None: + cmddict = cmd.getDefaultCmdDict() + + self._host = destination[0] + self._port = destination[1] + self._cmddict = cmddict + self._verbose = verbose + self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + + + def send (self, command, *args, **kwargs): + """Creates, validates, and sends the given command as a UDP + packet to the destination (host, port) specified when this + CmdAPI was created. + + Returns True if the command was created, valid, and sent, + False otherwise. + """ + status = False + cmdobj = self._cmddict.create(command, *args, **kwargs) + messages = [] + + if not cmdobj.validate(messages): + for msg in messages: + log.error(msg) + else: + encoded = cmdobj.encode() + + if self._verbose: + size = len(cmdobj.name) + pad = (size - len(cmdobj.name) + 1) * ' ' + gds.hexdump(encoded, preamble=cmdobj.name + ':' + pad) + + try: + values = (self._host, self._port, cmdobj.name) + log.info('Sending to %s:%d: %s' % values) + self._socket.sendto(encoded, (self._host, self._port)) + status = True + except socket.error as e: + log.error(e.message) + except IOError as e: + log.error(e.message) + + return status + + def validate(self, command, *args, **kwargs): + if not isinstance(command, bliss.core.cmd.Cmd): + try: + command = self._cmddict.create(command, *args, **kwargs) + except TypeError as e: + log.error(e) + return False, [e] + + messages = [] + if not command.validate(messages): + for msg in messages: + log.error(msg) + return False, messages + + return True, [] + + +class GeventDeque (object): + """GeventDeque + + A Python collections.deque that can be used in a Gevent context. + """ + + def __init__(self, iterable=None, maxlen=None): + """Returns a new GeventDeque object initialized left-to-right + (using append()) with data from *iterable*. If *iterable* is + not specified, the new GeventDeque is empty. + + If *maxlen* is not specified or is ``None``, GeventDeques may + grow to an arbitrary length. Otherwise, the GeventDeque is + bounded to the specified maximum length. Once a bounded + length GeventDeque is full, when new items are added, a + corresponding number of items are discarded from the opposite + end. + """ + if iterable is None: + self._deque = collections.deque(maxlen=maxlen) + else: + self._deque = collections.deque(iterable, maxlen) + + self.notEmpty = gevent.event.Event() + + if len(self._deque) > 0: + self.notEmpty.set() + + def _pop(self, block=True, timeout=None, left=False): + """Removes and returns the an item from this GeventDeque. + + This is an internal method, called by the public methods + pop() and popleft(). + """ + item = None + timer = None + deque = self._deque + empty = IndexError('pop from an empty deque') + + if block is False: + if len(self._deque) > 0: + item = deque.popleft() if left else deque.pop() + else: + raise empty + else: + try: + if timeout is not None: + timer = gevent.Timeout(timeout, empty) + timer.start() + + while True: + self.notEmpty.wait() + if len(deque) > 0: + item = deque.popleft() if left else deque.pop() + break + finally: + if timer is not None: + timer.cancel() + + if len(deque) == 0: + self.notEmpty.clear() + + return item + + def __copy__(self): + """Creates a new copy of this GeventDeque.""" + return GeventDeque(self._deque, self.maxlen) + + def __eq__(self, other): + """True if other is equal to this GeventDeque, False otherwise.""" + return self._deque == other + + def __getitem__(self, index): + """Returns GeventDeque[index]""" + return self._deque.__getitem__(index) + + def __iter__(self): + """Returns an iterable of items in this GeventDeque.""" + return self._deque.__iter__() + + def __len__(self): + """The number of items in this GeventDeque.""" + return len(self._deque) + + @property + def maxlen(self): + """Maximum size of this GeventDeque or None if unbounded.""" + return self.maxlen + + def append(self, item): + """Add item to the right side of the GeventDeque. + + This method does not block. Either the GeventDeque grows to + consume available memory, or if this GeventDeque has and is at + maxlen, the leftmost item is removed. + """ + self._deque.append(item) + self.notEmpty.set() + + def appendleft(self, item): + """Add item to the left side of the GeventDeque. + + This method does not block. Either the GeventDeque grows to + consume available memory, or if this GeventDeque has and is at + maxlen, the rightmost item is removed. + """ + self._deque.appendleft(item) + self.notEmpty.set() + + def clear(self): + """Remove all elements from the GeventDeque leaving it with + length 0. + """ + self._deque.clear() + self.notEmpty.clear() + + def count(self, item): + """Count the number of GeventDeque elements equal to item.""" + return self._deque.count(item) + + def extend(self, iterable): + """Extend the right side of this GeventDeque by appending + elements from the iterable argument. + """ + self._deque.extend(iterable) + if len(self._deque) > 0: + self.notEmpty.set() + + def extendleft(self, iterable): + """Extend the left side of this GeventDeque by appending + elements from the iterable argument. Note, the series of left + appends results in reversing the order of elements in the + iterable argument. + """ + self._deque.extendleft(iterable) + if len(self._deque) > 0: + self.notEmpty.set() + + def pop(self, block=True, timeout=None): + """Remove and return an item from the right side of the + GeventDeque. If no elements are present, raises an IndexError. + + If optional args *block* is True and *timeout* is ``None`` + (the default), block if necessary until an item is + available. If *timeout* is a positive number, it blocks at + most *timeout* seconds and raises the :class:`IndexError` + exception if no item was available within that time. Otherwise + (*block* is False), return an item if one is immediately + available, else raise the :class:`IndexError` exception + (*timeout* is ignored in that case). + """ + return self._pop(block, timeout) + + def popleft(self, block=True, timeout=None): + """Remove and return an item from the right side of the + GeventDeque. If no elements are present, raises an IndexError. + + If optional args *block* is True and *timeout* is ``None`` + (the default), block if necessary until an item is + available. If *timeout* is a positive number, it blocks at + most *timeout* seconds and raises the :class:`IndexError` + exception if no item was available within that time. Otherwise + (*block* is False), return an item if one is immediately + available, else raise the :class:`IndexError` exception + (*timeout* is ignored in that case). + """ + return self._pop(block, timeout, left=True) + + def remove(item): + """Removes the first occurrence of *item*. If not found, + raises a ValueError. + + Unlike ``pop()`` and ``popleft()`` this method does not have + an option to block for a specified period of time (to wait for + item). + """ + self._deque.remove(item) + + def reverse(self): + """Reverse the elements of the deque in-place and then return + None.""" + self._deque.reverse() + + def rotate(self, n): + """Rotate the GeventDeque *n* steps to the right. If *n* is + negative, rotate to the left. Rotating one step to the right + is equivalent to: ``d.appendleft(d.pop())``. + """ + self._deque.rotate(n) + + +class PacketBuffers (dict): + def __init__(self): + super(PacketBuffers, self).__init__() + + + def __getitem__(self, key): + return dict.__getitem__(self, key) + + def create(self, name, capacity=60): + created = False + + if name not in self: + self[name] = GeventDeque(maxlen=capacity) + created = True + + return created + + + def insert(self, name, packet): + if name not in self: + self._create(name) + self[name].appendleft(packet) + + +class TlmWrapper (object): + def __init__ (self, packets): + self._packets = packets + + def __getattr__(self, name): + return self._packets[0].__getattr__(name) + + def __getitem__(self, index): + return self._packets[index] + + def __len__(self): + return len(self._packets) + + + +class TlmWrapperAttr (object): + def __init__(self, buffers): + super(TlmWrapperAttr, self).__init__() + self._buffers = buffers + + def __getattr__(self, name): + return TlmWrapper(self._buffers[name]) + + + +class UdpTelemetryServer (gevent.server.DatagramServer): + """UdpTelemetryServer + + Listens for telemetry packets delivered via User Datagram Protocol + (UDP) to a particular (host, port). + """ + + def __init__ (self, listener, pktbuf, defn=None): + """Creates a new UdpTelemetryServer. + + The server listens for UDP packets matching the given + ``PacketDefinition`` *defn*. + + The *listener* is either a port on localhost, a tuple + containing ``(hostname, port)``, or a + ``gevent.socket.socket``. + + If the optional *defn* is not specified, the first + ``PacketDefinition`` (alphabetical by name) in the default + telemetry dictionary (i.e. ``tlm.getDefaultDict()``) is used. + """ + if type(listener) is int: + listener = ('127.0.0.1', listener) + + super(UdpTelemetryServer, self).__init__(listener) + self._defn = defn + self._pktbuf = pktbuf + + @property + def packets (self): + """The packet buffer.""" + return self._pktbuf + + def handle (self, data, address): + self._pktbuf.appendleft( tlm.Packet(self._defn, data) ) + + def start (self): + """Starts this UdpTelemetryServer.""" + values = self._defn.name, self.server_host, self.server_port + log.info('Listening for %s telemetry on %s:%d (UDP)' % values) + super(UdpTelemetryServer, self).start() + + + +class Instrument (object): + def __init__ (self, cmdport=3075, tlmport=3076, defn=None): + if defn is None: + tlmdict = tlm.getDefaultDict() + names = sorted( tlmdict.keys() ) + + if len(names) == 0: + msg = 'No packets defined in default TLM dictionary.' + raise TypeError(msg) + + defn = tlmdict[ names[0] ] + + self._packets = PacketBuffers() + self._cmd = CmdAPI(cmdport) + + self._packets.create(defn.name) + pktbuf = self._packets[defn.name] + self._tlm = UdpTelemetryServer(tlmport, pktbuf, defn) + self._tlm.start() + + @property + def cmd (self): + return self._cmd + + @property + def tlm (self): + return TlmWrapperAttr(self._packets) + + +def wait (cond, msg=None, _timeout=10, _raiseException=True): + """Waits either a specified number of seconds, e.g.: + + .. code-block:: python + + wait(1.2) + + or for a given condition to be True. Conditions may be take + several forms: Python string expression, lambda, or function, + e.g.: + + .. code-block:: python + + wait('instrument_mode == "SAFE"') + wait(lambda: instrument_mode == "SAFE") + + def isSafe(): return instrument_mode == "SAFE" + wait(isSafe) + + The default ``_timeout`` is 10 seconds. If the condition is not + satisfied before the timeout has elapsed, an + :exception:``APITimeoutError`` exception is raised. + + The :exception:``APITimeoutError`` exception may be supressed in + favor of returning ``True`` on success (i.e. condition satisfied) + and ``False`` on failure (i.e. timeout exceeded) by setting the + ``_raiseException`` parameter to ``False``. + + The :exception:``FalseWaitError`` will be thrown only if a boolean + with value "False" is passed as an argument to wait. The purpose of + this is to avoid infinite loops and catch conditional arguments are + not passed in as strings and therefore evaluated before the wait + function gets called. + + These parameters are prefixed with an underscore so they may also + be used to control exception handling when sending commands. + Since methods that generate commands take keyword arguments, we + did not want these parameter names to conflict with command + parameter names. + """ + status = False + delay = 0.25 + elapsed = 0 + + if msg is None and type(cond) is str: + msg = cond + + if type(cond) is bool: + if cond: + log.warn('Boolean passed as argument to wait. Make sure argument to wait is surrounded by a lambda or " "') + else: + raise FalseWaitError(msg) + + if type(cond) in (int, float): + gevent.sleep(cond) + status = True + else: + while True: + if _timeout is not None and elapsed >= _timeout: + if _raiseException: + raise APITimeoutError(_timeout, msg) + else: + status = False + break + + if type(cond) is str: + caller = inspect.stack()[1][0] + status = eval(cond, caller.f_globals, caller.f_locals) + elif callable(cond): + status = cond() + else: + status = cond + + if status: + break + + gevent.sleep(delay) + elapsed += delay + + return status + + +class UIAPI(object): + def confirm(self, msg, _timeout=-1): + ''' Send a confirm prompt to the GUI + + Arguments: + msg (string): + The message to display to the user. + + _timeout (int): + The optional amount of time for which the prompt + should be displayed to the user before a timeout occurs. + Defaults to -1 which indicates there is no timeout limit. + ''' + return self.msgBox('confirm', _timeout=_timeout, msg=msg) + + def msgBox(self, promptType, _timeout=-1, **options): + ''' Send a user prompt request to the GUI + + Arguments: + promptType (string): + The prompt type to send to the GUI. Currently + the only type supported is 'confirm'. + + _timeout (int): + The optional amount of time for which the prompt + should be displayed to the user before a timeout occurs. + Defaults to -1 which indicates there is no timeout limit. + + options (dict): + The keyword arguments that should be passed to the requested + prompt type. Check prompt specific sections below for information on what + arguments are expected to be present. + + Raises: + ValueError: + If the prompt type received is an unexpected value + + **Confirm Prompt** + + Display a message to the user and prompt them for a confirm/deny + response to the message. + + Arguments: + msg (string): + The message to display to the user + + Returns: + True if the user picks 'Confirm', False if the user picks 'Deny' + + Raises: + KeyError: + If the options passed to the prompt handler doesn't contain a + `msg` attribute. + + APITimeoutError: + If the timeout value is reached without receiving a response. + ''' + if promptType == 'confirm': + return self._sendConfirmPrompt(_timeout, options) + else: + raise ValueError('Unknown prompt type: {}'.format(promptType)) + + def _sendConfirmPrompt(self, _timeout, options): + '''''' + if 'msg' not in options: + raise KeyError('Confirm prompt options does not contain a `msg` attribute') + + data = { + 'type': 'confirm', + 'options': options, + 'timeout': _timeout + } + ret = self._sendMsgBoxRequest(data) + + if ret == 'timeout': + raise APIError('Confirm request returned invalid response: {}'.format(ret)) + elif ret == 'confirm': + return True + elif ret == 'deny': + return False + + def _sendMsgBoxRequest(self, data): + host = bliss.config.get('gui.host', 'localhost') + port = bliss.config.get('gui.port', 8080) + url = 'http://{}:{}/prompt'.format(host, port) + connTimeout = data['timeout'] * 2 + + try: + if connTimeout > 0: + ret = requests.post(url, json=data, timeout=connTimeout) + else: + ret = requests.post(url, json=data) + + ret = json.loads(ret.text)['response'] + except requests.exceptions.ConnectionError as e: + log.error('User prompt request connection failed') + ret = None + except requests.exceptions.HTTPError: + log.error('User prompt request received an unsuccessful HTTP status code') + ret = None + except requests.exceptions.TooManyRedirects: + log.error('User prompt request failed due to too many redirects') + ret = None + except requests.exceptions.Timeout: + raise APITimeoutError(timeout=timeout, msg='User confirm prompt timed out') + except KeyError: + log.error('User prompt request received malformed response') + ret = None + + return ret + + +ui = UIAPI() diff --git a/bliss/core/bin/__init__.py b/bliss/core/bin/__init__.py new file mode 100644 index 00000000..91bbca20 --- /dev/null +++ b/bliss/core/bin/__init__.py @@ -0,0 +1,13 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. diff --git a/bliss/core/bin/bliss_bsc.py b/bliss/core/bin/bliss_bsc.py new file mode 100755 index 00000000..81a59c16 --- /dev/null +++ b/bliss/core/bin/bliss_bsc.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +Usage: bliss-bsc + +Start the bliss BSC for capturing network traffic into PCAP files +and the manager server for RESTful manipulation of active loggers. +''' + +import os +import threading +import yaml +import argparse + +import bliss +from bliss.core import bsc + + +config_file = bliss.config.bsc.filename + +def main(): + ap = argparse.ArgumentParser( + description = __doc__, + formatter_class = argparse.ArgumentDefaultsHelpFormatter + ) + args = ap.parse_args() + + if not os.path.isfile(config_file): + print "Unable to locate config. Starting up handlers with default values ..." + host = 'localhost' + port = '8080' + handler_configs = [] + root_log_dir = '/tmp' + mngr_conf = { + 'root_log_directory': root_log_dir + } + + else: + with open(config_file) as log_conf: + conf = yaml.load(log_conf) + + mngr_conf = conf['capture_manager'] + host = mngr_conf['manager_server']['host'] + port = mngr_conf['manager_server']['port'] + + handler_configs = [] + for handler_conf in conf['handlers']: + if 'path' in handler_conf: + handler_path = handler_conf.pop('path') + if not os.path.isabs(handler_path): + handler_path = os.path.join(mngr_conf['root_log_directory'], handler_path) + else: + handler_path = mngr_conf['root_log_directory'] + + handler_configs.append(( + handler_conf.pop('name'), + handler_conf.pop('address'), + handler_conf.pop('conn_type'), + handler_path, + handler_conf + )) + + lgr_mngr = bsc.StreamCaptureManager(mngr_conf, handler_configs) + manager_server = bsc.StreamCaptureManagerServer(logger_manager=lgr_mngr, host=host, port=port) + + t = threading.Thread(target=manager_server.start) + t.setDaemon(True) + t.start() + + lgr_mngr.run_socket_event_loop() + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_bsc_create_handler.py b/bliss/core/bin/bliss_bsc_create_handler.py new file mode 100755 index 00000000..b2ba464c --- /dev/null +++ b/bliss/core/bin/bliss_bsc_create_handler.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +Usage: + bliss-bsc-create-handler [options] + +--service-host= The host for the BSC REST service connection + [default: localhost] +--service-port= The port for the BSC REST service connection + [default: 8080] +--rotate= Flag saying whether the log should be rotated + automatically [default: True] +--rotate-index= If log rotation is enabled, this determines the + frequency of a rotation. One of 'year', 'month', + 'day', 'hour', 'minutes', 'second' [default: day] +--rotate-delta= If log rotation is enabled, this determines the + delta between log creation and current time + rotate-index value needed to trigger a log + rotation [default: 1] +--file-pattern= The file pattern for the log file name. This can + include handler metadata values as well as strftime + format characters [default: %Y-%m-%d-%H-%M-%S-{name}.pcap] +''' + +import argparse +import requests + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Add required command line arguments + parser.add_argument('name') + parser.add_argument('loc') + parser.add_argument('port', type=int) + parser.add_argument('conn_type') + + # Add optional command line arguments + parser.add_argument('--service-host', default='localhost') + parser.add_argument('--service-port', type=int, default=8080) + parser.add_argument('--rotate', type=int, default=1) + parser.add_argument('--rotate-index', choices=['year','month','day','hour','minutes','second'], default='day') + parser.add_argument('--rotate-delta', type=int, default=1) + parser.add_argument('--file-pattern', default='\%Y-\%m-\%d-\%H-\%M-\%S-{name}.pcap') + + # Get command line arguments + args = vars(parser.parse_args()) + + host = args['service-host'] + port = args['service-port'] + + handler_name = args['name'] + + handler_port = args['port'] + arguments['port'] = handler_port + + handler_conn_type = args['conn_type'] + arguments['conn_type'] = handler_conn_type + + handler_loc = args['loc'] + arguments['loc'] = handler_loc + + arguments['rotate_log'] = eval(args['rotate']) + arguments['rotate_log_index'] = args['rotate-index'] + arguments['rotate_log_delta'] = args['rotate-delta'] + arguments['file_name_pattern'] = args['file-pattern'] + + requests.post( + 'http://{}:{}/{}/start'.format(host, port, handler_name), + data=arguments + ) + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_bsc_stop_handler.py b/bliss/core/bin/bliss_bsc_stop_handler.py new file mode 100755 index 00000000..d848f39f --- /dev/null +++ b/bliss/core/bin/bliss_bsc_stop_handler.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +Usage: + bliss-bsc-stop-handler [options] + +--service-host= The host for the BSC REST service connection + [default: localhost] +--service-port= The port for the BSC REST service connection + [default: 8080] +''' + +import requests +import argparse + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Add required command line arguments + parser.add_argument('name') + + # Add optional command line arguments + parser.add_argument('--service-host', default='localhost') + parser.add_argument('--service-port', type=int, default=8080) + + # Get command line arguments + args = vars(parser.parse_args()) + + host = args['service-host'] + port = args['service-port'] + + handler_name = args['name'] + + requests.delete('http://{}:{}/{}/stop'.format(host, port, handler_name)) + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_cmd_send.py b/bliss/core/bin/bliss_cmd_send.py new file mode 100755 index 00000000..a0b5a3cc --- /dev/null +++ b/bliss/core/bin/bliss_cmd_send.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +bliss-cmd-send +''' + + +import sys +import socket +import time +import argparse +from collections import OrderedDict + +from bliss.core import cmd, gds, log, util + + +def main(): + log.begin() + + description = """ + + Sends the given command and its arguments to the ISS simulator via UDP. + + Examples: + $ bliss-cmd-send OCO3_CMD_START_SEQUENCE_NOW 1 + + """ + + arguments = OrderedDict({ + '--port': { + 'type' : int, + 'default' : 3075, + 'help' : 'Port on which to send data' + }, + '--verbose': { + 'action' : 'store_true', + 'default' : False, + 'help' : 'Hexdump of the raw command being sent.' + } + }) + + arguments['command'] = { + 'type' : str, + 'help' : 'Name of the command to send.' + } + + arguments['arguments'] = { + 'type' : util.toNumberOrStr, + 'metavar' : 'argument', + 'nargs' : '*', + 'help' : 'Command arguments.' + } + + args = gds.arg_parse(arguments, description) + + host = "127.0.0.1" + port = args.port + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + verbose = args.verbose + cmddict = cmd.getDefaultCmdDict() + + if cmddict is not None: + name = args.command + cmdargs = args.arguments + command = cmddict.create(name, *cmdargs) + messages = [ ] + + if command is None: + log.error("unrecognized command: %s" % name) + elif not command.validate(messages): + for msg in messages: + log.error(msg) + else: + encoded = command.encode() + + if verbose: + size = len(command.name) + pad = (size - len(command.name) + 1) * " " + preamble = command.name + ":" + pad + gds.hexdump(encoded, preamble=preamble) + + try: + msg = "Sending to %s:%d: %s" + log.info(msg, host, port, command.name) + sock.sendto(encoded, (host, port)) + except socket.error, err: + log.error( str(err) ) + + log.end() + + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_create_dirs.py b/bliss/core/bin/bliss_create_dirs.py new file mode 100755 index 00000000..cb85f2b7 --- /dev/null +++ b/bliss/core/bin/bliss_create_dirs.py @@ -0,0 +1,235 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +Usage: + bliss-create-dirs [options] + +Arguments: + + -d DATETIME, --datetime= Create directory structure using this + ISO 8601 datetime for strftime replacement + in directory path. Default: TODAY + +Description: + + BLISS Create Directory Structure + + Based on the data paths specified in the BLISS_CONFIG, this software creates + daily directories for the GDS based on the paths and any applicable variable + substitution. + + Define the Paths + ================ + + Paths should be specified within the 'data' portion of the BLISS_CONFIG. It + should follow the following hierarchy within the YAML file: + + data: + data-type: + path: + + For example: + + data: + type_a: + path: /path/to/data/type_a + type_b: + path: /path/to/data/type_b + + Be sure to use 'path' to specify the data path so the software knows to + translate these paths as needed. You can use absolute or relative paths: + + data: + type_a: + path: to/data/type_a + type_b: + path: ~/to/data/type_b + + + Variable Substitution + ===================== + + Variable substituion is also possible using any of the default-, platform-, + or host- level attributes within the BLISS_CONFIG. To include a variable + in a path use the following syntax, `${variable}` + + For example, + + default: + mission: 'oco3' + phase: 'int' + + data: + type_a: + path: /${mission}/${phase}/data/type_a + type_b: + path: /${mission}/${phase}/data/type_b + + Will create the directories: + + /oco3/int/data/type_a + /oco3/int/data/type_b + + + Special Variables and strftime directives + ========================================= + + There are also several special variables available: + * hostname = current machine hostname + * platform = platform of the current machine (darwin, win32, etc.) + + You can also use strftime format characters + (https://docs.python.org/2/library/time.html#time.strftime). + + For example, + + default: + mission: 'oco3' + phase: 'int' + + data: + type_a: + path: /${mission}/${phase}/%Y/%Y-%j/type_a + type_b: + path: /${mission}/${phase}/%Y/%Y-%j/type_b + + Will produce paths like (depending on the date): + + /oco3/int/2016/2016-299/type_a + /oco3/int/2016/2016-299/type_b + + +''' + +import os +import errno +import traceback +import yaml +import argparse +import time + +import bliss +from bliss.core import dmc, log + + +def createDirStruct(paths, verbose=True): + '''Loops bliss.config._datapaths from BLISS_CONFIG and creates a directory. + + Replaces year and doy with the respective year and day-of-year. + If neither are given as arguments, current UTC day and year are used. + + Args: + paths: + [optional] list of directory paths you would like to create. + doy and year will be replaced by the datetime day and year, respectively. + + datetime: + UTC Datetime string in ISO 8601 Format YYYY-MM-DDTHH:mm:ssZ + + ''' + for k, path in paths.items(): + p = None + try: + pathlist = path if type(path) is list else [ path ] + for p in pathlist: + os.makedirs(p) + if verbose: + log.info('Creating directory: ' + p) + except OSError, e: + #print path + if e.errno == errno.EEXIST and os.path.isdir(p): + pass + else: + raise + + return True + +def main(): + argparser = argparse.ArgumentParser( + description = """ + BLISS Create Directories Script + + Based on the data paths specified in the BLISS_CONFIG, this software creates + daily directories for the GDS based on the paths and any applicable variable + substitution. +""", + epilog = """ + Create directories based on some set of variables in a separate YAML config + + $ bliss-create-dirs -c vars.yaml + + Create directories starting 3 days from now for 90 days + + $ bliss-create-dirs -d 2016-01-01T00:00:00Z +""", + formatter_class = argparse.RawDescriptionHelpFormatter + ) + + argparser.add_argument( + '-d', '--date', + metavar = '', + type = str, + help = 'Create directory structure using this' + + 'ISO 8610 datetime for strftime replacement' + + 'in directory path. Default: TODAY' + ) + + argparser.add_argument( + '-t', '--timedelta', + metavar = '', + type = int, + help = 'Number of days in the future you would like '+ + 'to create a directory.' + + 'Default: 0' + ) + + options = argparser.parse_args() + + log.begin() + + retcode = 0 + + try: + pathvars = { } + + if options.date: + bliss.config._datetime = time.strptime(options.date, dmc.ISO_8601_Format) + + if options.timedelta: + bliss.config._datetime = time.strptime(dmc.getUTCDatetimeDOY(days=options.timedelta), + dmc.DOY_Format) + + pathvars['year'] = bliss.config._datetime.tm_year + pathvars['doy'] = '%03d' % bliss.config._datetime.tm_yday + + # Add the updated path variables for the date + bliss.config.addPathVariables(pathvars) + + bliss.config.reload() + + # Create the directory + retcode = createDirStruct(bliss.config._datapaths) + + except Exception as e: + print e + log.error('BLISS Create Directories error: %s' % traceback.format_exc()) + + log.end() + return retcode + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_dict_writer.py b/bliss/core/bin/bliss_dict_writer.py new file mode 100755 index 00000000..c4207b9e --- /dev/null +++ b/bliss/core/bin/bliss_dict_writer.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +Usage: + bliss-dict-writer [options] (--tlm | --cmd) + +--tlm Run dictionary processor for Telemetry dictionary. +--cmd Run dictionary processor for Command dictionary. +--format= Specify output format. Possible values: csv + [Default: csv] +--path= Output file path. + + +Description: + BLISS TLM and CMD Dictionary Definitions to Specified Output Format + + Outputs BLISS TLM and CMD Dictionary Definitions in Specific output format. Currently supports: + * TLM -> CSV + + TODO + * TLM -> TeX + * CMD -> CSV + * CMD -> TeX + + Copyright 2016 California Institute of Technology. ALL RIGHTS RESERVED. + U.S. Government Sponsorship acknowledged. +''' + +import sys +import argparse + +from bliss.core import log, tlm + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Add optional command line arguments + parser.add_argument('--format', default='csv') + parser.add_argument('--path', default='') + + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument('--tlm', action='store_true', default=False) + group.add_argument('--cmd', action='store_true', default=False) + + # Get command line arguments + args = vars(parser.parse_args()) + + # output format for the file + format = args['format'] + + # output path + path = args['path'] + + # initialize telemetry dictionary writer + if args['tlm']: + writer = tlm.TlmDictWriter() + + # initialize command dictionary writer + if args['cmd']: + log.error("Not yet supported") + sys.exit() + + # write to csv + if format == 'csv': + writer.writeToCSV(output_path=path) + else: + log.error("Invalid specified.") + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_pcap.py b/bliss/core/bin/bliss_pcap.py new file mode 100755 index 00000000..23bbf6f5 --- /dev/null +++ b/bliss/core/bin/bliss_pcap.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +Provides a command line script for running pcap library functions. +''' + +import argparse +import datetime +import os + +from bliss.core import dmc, log, pcap, util + +def main(): + ap = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + + arguments = { + '--query': { + 'action' : 'store_true', + 'help' : ('Creates a new file containing the data from one or ' + 'more given pcap files in a given time range. If no ' + 'output file name is given, the new file name will ' + 'be the name of the first file with the time frame ' + 'appended to the name.') + }, + + '--times': { + 'action' : 'store_true', + 'help' : 'Lists time ranges available in pcap file(s)' + }, + + '--stime': { + 'default' : dmc.GPS_Epoch, + 'help' : ('Starting time for desired telemetry range in ' + 'ISO 8601 Format "YY-MM-DDThh:mm:SSZ"') + }, + + '--etime': { + 'default' : datetime.datetime.now(), + 'help' : ('Ending time for desired telemetry range in ' + 'ISO 8601 Format "YY-MM-DDThh:mm:SSZ"') + }, + + '--output': { + 'default' : None, + 'help' : 'The name of the output file to be generated' + }, + + '--tol': { + 'type' : int, + 'default' : 2, + 'help' : 'Number of seconds allowed between time ranges' + }, + + 'file': { + 'nargs': '+', + 'metavar': '', + 'help': 'File or directory path containing .pcap file(s)', + } + } + + for name, params in arguments.items(): + ap.add_argument(name, **params) + + args = ap.parse_args() + + pcapfiles = [] + for p in args.file: + if os.path.isdir(p): + pcapfiles.extend(util.listAllFiles(p, 'pcap', True)) + elif os.path.isfile(p): + pcapfiles.append(p) + else: + ap.print_help() + raise IOError("Invalid pcapfile. Check path and try again: %s" % p) + + log.begin() + + # if using pcap.query + if args.query: + stime = args.stime + etime = args.etime + output = args.output + + try: + # Convert start time to datetime object + starttime = datetime.datetime.strptime(stime, dmc.ISO_8601_Format) + + # Convert end time to datetime object + endtime = datetime.datetime.strptime(etime, dmc.ISO_8601_Format) + + except ValueError: + ap.print_help() + print + print + raise ValueError("Start and end time must be formatted as YY-MM-DDThh:mm:SSZ") + + pcap.query(starttime, endtime, output, *pcapfiles) + + # if using pcap.times + elif args.times: + times = pcap.times(pcapfiles, args.tol) + + if len(times) == 1: + for start, stop in times.values()[0]: + print '%s - %s' % (start, stop) + else: + for filename in sorted(times.keys()): + basename = os.path.basename(filename) + for start, stop in times[filename]: + print '%s: %s - %s' % (filename, start, stop) + else: + ap.print_help() + + log.end() + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_pcap_segment.py b/bliss/core/bin/bliss_pcap_segment.py new file mode 100644 index 00000000..50b8b8cd --- /dev/null +++ b/bliss/core/bin/bliss_pcap_segment.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + + +""" +Segments one or more pcap files into multiple pcap files, according to +a threshold number of bytes, packets, and/or seconds. New segment +filenames are determined based on a strftime(3) format string and +the timestamp of the first packet in the file. + +When segmenting based on time (-s, --seconds), for file naming and +interval calculation purposes ONLY, the timestamp of the first packet +in the file is rounded down to nearest even multiple of the number of +seconds. This yields nice round number timestamps for filenames. For +example: + + bliss-pcap-segment -s 3600 %Y%m%dT%H%M%S.pcap foo.pcap bar.pcap + +If the first packet written to a file has a time of 2017-11-23 +19:28:58, the file will be named: + + 20171123T190000.pcap + +And a new file will be started when a packet is written with a +timestamp that exceeds 2017-11-23 19:59:59. +""" + + +import argparse +import datetime +import os + +from bliss.core import log, pcap + + +def main(): + ap = argparse.ArgumentParser( + epilog=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + + ap.add_argument('-n', '--dry-run', + action = 'store_true', + help = 'Dry run; do not actually write files', + ) + + ap.add_argument('-b', '--bytes', + help = 'Segment evey B bytes', + metavar = 'B', + type = int + ) + + ap.add_argument('-p', '--packets', + help = 'Segment evey P packets', + metavar = 'P', + type = int + ) + + ap.add_argument('-s', '--seconds', + help = 'Segment when first and last pcap timestamps span S seconds', + metavar = 'S', + type = int + ) + + ap.add_argument('format', + help = 'Segment filename (should include strftime(3) time format)' + ) + + ap.add_argument('file', + nargs = '+', + help = 'Packet Capture (.pcap) file(s)' + ) + + args = ap.parse_args() + + if args.bytes is None and args.packets is None and args.seconds is None: + msg = 'At least one of -b, -p, or -s is required.' + ap.error(msg) + + try: + pcap.segment(filenames = args.file, + format = args.format, + nbytes = args.bytes, + npackets = args.packets, + nseconds = args.seconds, + dryrun = args.dry_run) + + except KeyboardInterrupt: + log.info('Received Ctrl-C. Aborting pcap segmentation.') + + except IOError as e: + log.error(str(e)) + + log.end() + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_seq_decode.py b/bliss/core/bin/bliss_seq_decode.py new file mode 100755 index 00000000..00b063aa --- /dev/null +++ b/bliss/core/bin/bliss_seq_decode.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' + +usage: bliss-seq-decode oco3_seq_SSS_NNN_desc.bin + +Decodes the given relative time command sequence to text. + +Examples: + + $ bliss-seq-decode seq/oco3_seq_gps_001_reset.bin +''' + +import os +import sys +import argparse + +from bliss.core import gds, log, seq + + +def main(): + log.begin() + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Add required command line arguments + parser.add_argument('filename', default=None) + + # Get command line arguments + args = vars(parser.parse_args()) + + filename = os.path.abspath(args['filename']) + extension = os.path.splitext(filename)[1] + + if extension.lower() != '.bin': + log.warn("Filename '%s' does not have a '.bin' extension", filename) + + sequence = seq.Seq(filename) + + if not sequence.validate(): + for msg in sequence.messages: + log.error(msg) + else: + txtpath = sequence.txtpath + seqid = sequence.seqid + version = sequence.version + + msg = "Writing %s (seqid=0x%04x, version=%u)." + log.info(msg, txtpath, seqid, version) + + sequence.writeText() + + log.end() + + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_seq_encode.py b/bliss/core/bin/bliss_seq_encode.py new file mode 100755 index 00000000..d15e74a0 --- /dev/null +++ b/bliss/core/bin/bliss_seq_encode.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +usage: bliss-seq-encode oco3_seq_SSS_NNN_desc.txt + +Encodes the given relative time command sequence to binary. + +Examples: + + $ bliss-seq-encode seq/oco3_seq_gps_001_reset.txt +''' + +import os +import sys +import argparse + +from bliss.core import gds, log, seq + + +def main(): + log.begin() + + defaults = { } + parser = argparse.ArgumentParser( + description = __doc__, + formatter_class = argparse.RawDescriptionHelpFormatter) + + # Add required command line arguments + parser.add_argument('filename') + + # Add optional command line arguments + args = vars(parser.parse_args()) + + filename = os.path.abspath(args['filename']) + extension = os.path.splitext(filename)[1] + + if extension.lower() != '.txt': + log.warn("Filename '%s' does not have a '.txt' extension", filename) + + sequence = seq.Seq(filename) + + if not sequence.validate(): + for msg in sequence.log.messages: + log.error(msg) + else: + binpath = sequence.binpath + seqid = sequence.seqid + + log.info("Writing %s (seqid=0x%04x).", binpath, seqid) + sequence.writeBinary() + + log.end() + + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_seq_print.py b/bliss/core/bin/bliss_seq_print.py new file mode 100755 index 00000000..36bbab88 --- /dev/null +++ b/bliss/core/bin/bliss_seq_print.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +usage: bliss-seq-print oco3_seq_SSS_NNN_desc.bin + +Prints the given binary relative time command sequence to standard +output as text. + +Examples: + + $ bliss-seq-print seq/oco3_seq_gps_001_reset.bin +''' + +import os +import sys +import argparse + +from bliss.core import gds, log, seq + + +def main(): + log.begin() + + parser = argparse.ArgumentParser( + description = __doc__, + formatter_class = argparse.RawDescriptionHelpFormatter) + + # Add required command line argument + parser.add_argument('filename') + + # Get command line arguments + args = vars(parser.parse_args()) + + filename = os.path.abspath(args['filename']) + extension = os.path.splitext(filename)[1] + + if extension.lower() != '.bin': + log.warn("Filename '%s' does not have a '.bin' extension", filename) + + sequence = seq.Seq(filename) + + if not sequence.validate(): + for msg in sequence.messages: + log.error(msg) + + sequence.printText() + + log.end() + + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_seq_send.py b/bliss/core/bin/bliss_seq_send.py new file mode 100755 index 00000000..8a7a07cc --- /dev/null +++ b/bliss/core/bin/bliss_seq_send.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +usage: bliss-seq-send [options] filename.rts + +Sends the given relative timed sequence via UDP. + +--port=number Port on which to send data (default: 3075) +--verbose=0|1 Hexdump data (default: 0) + +Examples: + + $ bliss-seq-send test.rts +''' + +import os +import sys +import socket +import time +import argparse + +from bliss.core import cmd, gds, log, seq, util + + +def system (command): + log.info('Executing: %s' % command) + os.system(command) + + +def main (): + log.begin() + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Add required command line argument + parser.add_argument('filename', default=None) + + # Add optional command line arguments + parser.add_argument('--port', default=3075, type=int) + parser.add_argument('--verbose', default=0, type=int) + + # Get command line arguments + args = vars(parser.parse_args()) + + host = '127.0.0.1' + port = args['port'] + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + data = ' '.join(args) + verbose = args['verbose'] + + cmddict = cmd.getDefaultCmdDict() + filename = args['filename'] + + try: + with open(filename, 'r') as stream: + for line in stream.readlines(): + line = line.strip() + + # Skip blank lines and comments + if len(line) == 0 or line.startswith('#'): + continue + + # Meta-command + elif line.startswith('%'): + command = line[1:].strip() + system(command) + + # Sequence command + else: + tokens = line.split() + delay = float(tokens[0]) + name = tokens[1] + args = [ util.toNumber(t, t) for t in tokens[2:] ] + command = cmddict.create(name, *args) + messages = [ ] + + time.sleep(delay) + log.info(line) + + if command is None: + log.error('unrecognized command: %s' % name) + elif command.validate(messages): + sock.sendto(command.encode(), (host, port)) + else: + msg = 'Command validation error: %s' + log.error(msg, ' '.join(messages)) + + except socket.error, err: + log.error( str(err) ) + + except IOError: + log.error("Could not open '%s' for reading." % filename) + + log.end() + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_table_decode.py b/bliss/core/bin/bliss_table_decode.py new file mode 100755 index 00000000..ad37a4ba --- /dev/null +++ b/bliss/core/bin/bliss_table_decode.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +usage: bliss-table-decode --fswtabdict config/table.yaml --tabletype targets --binfile /Users/ays/targets.bin + +Decodes the given FSW binary table to text. + +Examples: + + $ bliss-table-decode --fswtabdict config/table.yaml --tabletype targets --binfile /Users/ays/targets.bin +''' + +import os +import sys +import argparse + +from bliss.core import gds, log, table + + +def main(): + log.begin() + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Add optional command line arguments + parser.add_argument('--binfile', default=None, required=True) + parser.add_argument('--fswtabdict', default=None, required=True) + parser.add_argument('--tabletype', default=None, required=True) + parser.add_argument('--verbose', action='store_true', default=False) + parser.add_argument('--version', default=0, type=int) + + # Get command line arguments + args = vars(parser.parse_args()) + binfile = args['binfile'] + dictpath = args['fswtabdict'] + tabletype = args['tabletype'] + verbose = args['verbose'] + version = args['version'] + + # Grab default table dictionary + if dictpath is not None: + dictCache = table.FSWTabDictCache(filename=dictpath) + + try: + filename = dictCache.filename + except IOError, e: + msg = 'Could not load default table dictionary "%s": %s' + log.error(msg, filename, str(e)) + + fswtabdict = table.getDefaultFSWTabDict() + + # Check if cmddict exists + if fswtabdict is not None: + # Write out the table file using the command dictionary + table.writeToText(fswtabdict, tabletype, binfile, verbose, version) + + log.end() + + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_table_encode.py b/bliss/core/bin/bliss_table_encode.py new file mode 100755 index 00000000..27fdf95b --- /dev/null +++ b/bliss/core/bin/bliss_table_encode.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +usage: bliss-table-encode --fswtabdict config/table.yaml --tabletype targets --tabfile /Users/ays/Documents/workspace/bliss-workspace/output/targets_table.txt + +Encodes the given FSW text table to binary. + +Examples: + + $ bliss-table-encode --fswtabdict config/table.yaml --tabletype targets --tabfile /Users/ays/Documents/workspace/bliss-workspace/output/targets_table.txt +''' + +import os +import sys +import argparse + +from bliss.core import gds, log, table + +def main(): + log.begin() + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Add optional command line arguments + parser.add_argument('--fswtabdict', default=None, required=True) + parser.add_argument('--tabfile', default=None, required=True) + parser.add_argument('--tabletype', default=None, required=True) + parser.add_argument('--verbose', action='store_true', default=False) + + # Get command line arguments + args = vars(parser.parse_args()) + dictpath = args['fswtabdict'] + tabfile = args['tabfile'] + tabletype = args['tabletyle'] + verbose = args['verbose'] + + # Grab default command dictionary + if dictpath is not None: + dictCache = table.FSWTabDictCache(filename=dictpath) + + try: + filename = dictCache.filename + except IOError, e: + msg = 'Could not load default table dictionary "%s": %s' + log.error(msg, filename, str(e)) + + fswtabdict = table.getDefaultFSWTabDict() + + # Check if cmddict exists + if fswtabdict is not None: + # Write out the table file using the command dictionary + table.writeToBinary(fswtabdict, tabletype, tabfile, verbose) + + log.end() + + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_tlm_db_insert.py b/bliss/core/bin/bliss_tlm_db_insert.py new file mode 100755 index 00000000..30570584 --- /dev/null +++ b/bliss/core/bin/bliss_tlm_db_insert.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +Inserts telemetry into a database from one or more files. +""" + + +import argparse +import os +import sys +import socket +import time + +import bliss +from bliss.core import db, log, tlm + + +def main(): + tlmdict = tlm.getDefaultDict() + pnames = tlmdict.keys() + ap = argparse.ArgumentParser( + description = __doc__, + formatter_class = argparse.ArgumentDefaultsHelpFormatter + ) + + arguments = { + '--packet': { + 'type' : str, + 'choices' : pnames, + 'default' : pnames[0] if len(pnames) > 0 else None, + 'help' : 'Type of packets (!Packet name in tlm.yaml) in file', + 'required': len(pnames) > 1, + }, + + '--database': { + 'default' : bliss.config.get('database.name'), + 'help' : ('Name of database in which to insert packets (may ' + 'also be specified in config.yaml database.name)'), + 'required': bliss.config.get('database.name') is None + }, + + 'file': { + 'nargs': '+', + 'help' : 'File(s) containing telemetry packets' + } + } + + for name, params in arguments.items(): + ap.add_argument(name, **params) + + args = ap.parse_args() + + log.begin() + + try: + npackets = 0 + dbconn = None + defn = tlm.getDefaultDict()[args.packet] + nbytes = defn.nbytes + + if args.database == ':memory:' or not os.path.exists(args.database): + dbconn = db.create(args.database) + else: + dbconn = db.connect(args.database) + + for filename in args.file: + log.info('Processing %s' % filename) + with dbconn: + with open(filename, 'rb') as stream: + data = stream.read(nbytes) + + while len(data) > 0: + packet = tlm.Packet(defn, data) + db.insert(dbconn, packet) + data = stream.read(nbytes) + npackets += 1 + + except KeyboardInterrupt: + log.info('Received Ctrl-C. Stopping database insert.') + + except IOError as e: + log.error(str(e)) + + finally: + if dbconn: + dbconn.close() + + values = npackets, args.packet, args.database + log.info('Inserted %d %s packets into database %s.' % values) + + log.end() + + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_tlm_send.py b/bliss/core/bin/bliss_tlm_send.py new file mode 100755 index 00000000..e8424985 --- /dev/null +++ b/bliss/core/bin/bliss_tlm_send.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +Usage: bliss-tlm-send [options] + +Sends the telemetry contained in the given pcap file via UDP. + + --port=number Port to which to send data (default: 3076) + --verbose Report every packet sent (default:False) + +Examples: + + $ bliss-tlm-send test/data/pcap/oco3fsw-iss1553-2015-04-22.pcap + +''' + + +import sys +import socket +import time +import argparse + +from bliss.core import gds, log, pcap + + +def main(): + try: + + log.begin() + + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # Add required command line arguments + parser.add_argument('filename') + + # Add optional command line arguments + parser.add_argument('--port', default=3076, type=int) + parser.add_argument('--verbose', action='store_true', default=False) + + # Get command line arguments + args = vars(parser.parse_args()) + + filename = args['filename'] + host = 'localhost' + port = args['port'] + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + verbose = args['verbose'] + + if not verbose: + log.info('Will only report every 10 telemetry packets') + log.info('Will only report long telemetry send delays') + + with pcap.open(filename, 'r') as stream: + npackets = 0 + prev_ts = None + + for header, packet in stream: + if prev_ts is None: + prev_ts = header.ts + + delay = header.ts - prev_ts + + if delay >= 2: + log.info('Next telemetry in %1.2f seconds' % delay) + + time.sleep(delay) + + nbytes = len(packet) + + if npackets == 0: + log.info('Sent first telemetry packet (%d bytes)' % nbytes) + elif verbose: + log.info('Sent telemetry (%d bytes)' % nbytes) + elif npackets % 10 == 0: + log.info('Sent 10 telemetry packets') + + sock.sendto(packet, (host, port)) + + npackets += 1 + prev_ts = header.ts + + except KeyboardInterrupt: + log.info('Received Ctrl-C. Stopping telemetry stream.') + + except Exception as e: + log.error('TLM send error: %s' % str(e)) + + log.end() + +if __name__ == '__main__': + main() diff --git a/bliss/core/bin/bliss_yaml_validate.py b/bliss/core/bin/bliss_yaml_validate.py new file mode 100755 index 00000000..861dcebe --- /dev/null +++ b/bliss/core/bin/bliss_yaml_validate.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +usage: bliss-yaml-validate + +Validate YAML files with applicable schema and/or advanced +content validation for CMD and TLM dictionaries. + +YAML validation is done through a combination of JSON Schema +(http://json-schema.org/) and Python-coded content validation. +The JSON Schema is used to validate general format of the YAML, +i.e dictionaries contain the expected keys, values are the +expected type, etc. + +Why JSON Schema? All of the available YAML validators did not +meet the robustness expected for this tool. Since JSON and YAML +are stored similarly in memory, the JSON Schema became an option. +The only difference between YAML and JSON is the use of multiple +documents in the same YAML file. The val.py module handles this +implication. See TBD wiki page for more details on developing +JSON schema for an applicable YAML file. + +Examples: + + $ bliss-yaml-validate --cmd + $ bliss-yaml-validate --tlm + $ bliss-yaml-validate --evr + $ bliss-yaml-validate --cmd --yaml /path/to/cmd.yaml + $ bliss-yaml-validate --tlm --yaml /path/to/tlm.yaml + $ bliss-yaml-validate --yaml /path/to/yaml --schema /path/to/schema +''' + + +import argparse +import os +import sys +import textwrap + +import bliss +from bliss.core import cmd, evr, log, tlm, val, limits + + +def validate(validator, yml, schema): + msgs = [] + validator = validator(yml, schema) + valid = validator.validate(messages=msgs) + + msg = "Validation: %s: yml=%s, schema=%s" + + if valid: + log.info(msg % ('SUCCESS', yml, schema)) + return 0 + else: + log.error(msg % ('FAILED', yml, schema)) + for msg in msgs: + log.error(msg) + return 1 + + +def main(): + argparser = argparse.ArgumentParser( + description = """ +Validate YAML files with applicable schema and/or advanced +content validation for CMD and TLM dictionaries. + +YAML validation is done through a combination of JSON Schema +(http://json-schema.org/) and Python-coded content validation. The +JSON Schema is used to validate general format of the YAML, i.e +dictionaries contain the expected keys, values are the expected type, +etc. + +Why JSON Schema? All of the available YAML validators did not meet the +robustness expected for this tool. Since JSON and YAML are stored +similarly in memory, the JSON Schema became an option. The only +difference between YAML and JSON is the use of multiple documents in +the same YAML file. The val.py module handles this implication. See +TBD wiki page for more details on developing JSON schema for an +applicable YAML file. +""", + epilog = """ +Examples: + + $ bliss-yaml-validate.py --cmd + $ bliss-yaml-validate.py --tlm + $ bliss-yaml-validate.py --evr + $ bliss-yaml-validate.py --cmd --yaml /path/to/cmd.yaml + $ bliss-yaml-validate.py --tlm --yaml /path/to/tlm.yaml + $ bliss-yaml-validate.py --yaml /path/to/yaml --schema /path/to/schema +""", + formatter_class = argparse.RawDescriptionHelpFormatter + ) + + argparser.add_argument( + '-y', '--yaml', + metavar = '', + type = str, + help = 'Path to YAML file.' + ) + + argparser.add_argument( + '-s', '--schema', + metavar = '', + type = str, + help = 'Path to JSON schema file.' + ) + + argparser.add_argument( + '-c', '--cmd', + action = 'store_true', + default = False, + help = """Command dictionary flag. If a YAML file is not + specified, the default command dictionary and schema will be used. + """ + ) + + argparser.add_argument( + '-t', '--tlm', + action = 'store_true', + default = False, + help = """Telemetry dictionary flag. If a YAML file is not + specified, the default telemetry dictionary and schema will be used. + """ + ) + + argparser.add_argument( + '-e', '--evr', + action = 'store_true', + default = False, + help = """EVR dictionary flag. If a YAML file is not specified, + the default EVR dictionary and schema will be used. + """ + ) + + argparser.add_argument( + '-l', '--limits', + action = 'store_true', + default = False, + help = """Limits dictionary flag. If a YAML file is not specified, + the default limits dictionary and schema will be used. + """ + ) + + if len(sys.argv) < 2: + argparser.print_usage() + print 'Run with --help for detailed help.' + sys.exit(2) + + options = argparser.parse_args() + + log.begin() + + # Validate specified yaml file with specified schema + if options.yaml is not None and options.schema is not None: + # Check YAML exists + if not os.path.exists(options.yaml): + raise os.error(options.yaml + " does not exist.") + + # Check schema exists + if not os.path.exists(options.schema): + raise os.error(options.schema + " does not exist.") + + validator = val.Validator + retcode = validate(validator, options.yaml, options.schema) + + else: + if options.cmd: + yml = bliss.config.cmddict.filename + schema = cmd.getDefaultSchema() + validator = val.CmdValidator + elif options.evr: + yml = bliss.config.evrdict.filename + schema = evr.getDefaultSchema() + validator = val.Validator + elif options.tlm: + yml = bliss.config.tlmdict.filename + schema = tlm.getDefaultSchema() + validator = val.TlmValidator + elif options.limits: + yml = bliss.config.limits.filename + schema = limits.getDefaultSchema() + validator = val.Validator + + if options.yaml is not None: + yml = options.yaml + + retcode = validate(validator, yml, schema) + + log.end() + return retcode + + +if __name__ == "__main__": + main() diff --git a/bliss/core/bsc.py b/bliss/core/bsc.py new file mode 100644 index 00000000..381d3bbf --- /dev/null +++ b/bliss/core/bsc.py @@ -0,0 +1,815 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +''' +BLISS Binary Stream Capturer + +The bliss.bsc module handles logging of network data to PCAP files +along with the server definition for RESTful manipulation of running +loggers. +''' + +import calendar +import datetime +import json +import os +import socket +import time + +from bottle import request, Bottle +import gevent +import gevent.monkey +import gevent.pool +import gevent.socket + +from bliss.core import pcap, log + +gevent.monkey.patch_all() + +RAW_SOCKET_FD = None +try: + import rawsocket + RAW_SOCKET_FD = rawsocket.rawsocket_fd() +except ImportError: + log.debug( + 'The rawsocket library cannot be imported. ' + 'Defaulting to the non-rawsocket approach.' + ) +except IOError: + log.info( + 'Unable to spawn rawsocket-helper. ' + 'This may be a permissions issue (not SUID root?). ' + 'Defaulting to non-rawsocket approach.' + ) + +ETH_P_IP = 0x0800 +ETH_P_ALL = 0x0003 +ETH_PROTOCOL = ETH_P_ALL + +class SocketStreamCapturer(object): + ''' Class for logging socket data to a PCAP file. ''' + + def __init__(self, capture_handlers, address, conn_type): + ''' + Args: + capture_handlers: + A list of handler configuration dictionaries that contains the + following values + + name + A unique name for this handler + + log_dir + The directory path into which log files will be written. + This path may include format strings which reference + handler metadata (E.g., {name}) as well as + `strftime format characters + ` + + Example:: + + '/tmp/additional_dir/test/%j' + + rotate_log + *True* or *False* flag specifying whether logs should be + rotated at a regular interval. + + rotate_log_index + If **rotate_log** is *True* this controls the time frame of + log rotations. The below values are all the valid options. + Each row's values are equivalent:: + + 'year', 'years', 'tm_year', + 'month', 'months', 'tm_mon', + 'day', 'days', 'tm_mday', + 'hour', 'hours', 'tm_hour', + 'minute', 'minutes', 'tm_min', + 'second', 'seconds', 'tm_sec', + + Default:: + + 'day' + + rotate_log_delta + If **rotate_log** is *True* this controls the + **rotate_log_index** delta between the current time at log + rotation check versus the time the log file was open + necessary to trigger a rotation. + + Default:: + + 1 + + file_name_pattern (optional) + The pattern to use for the log file name. This will be + joined with the **log_dir** option to generate the full + log file path. This may also include format strings like + *log_dir*. + + Example:: + + '%Y-%m-%d-randomUDPtestData-{name}.pcap' + + Default:: + + '%Y-%m-%d-%H-%M-%S-{name}.pcap' + + pre_write_transforms (optional) + A list of *callables* to be run prior to data output for + this handler. The currently captured data is passed through + each transformation in order supplied with the output of + the previous being used as the input for the next. + + address: + The address to which a socket connection should be made. What is + considered a valid address depends on the **conn_type** value. + + udp:: + + [host, port number] + + E.g., ['', 8500] + + ethernet:: + + ['interface name', protocol number] + + E.g., ['p2p2', 0] + + tcp:: + + [host, port] + + E.g., ['127.0.0.1', 8125] + + conn_type: + A string identifying the connection type. Valid options are + *udp*, *ethernet*, and *tcp*. + + ''' + if not isinstance(capture_handlers, list): + capture_handlers = [capture_handlers] + + self.capture_handlers = capture_handlers + for h in self.capture_handlers: + h['reads'] = 0 + h['data_read'] = 0 + + self.conn_type = conn_type + self.address = address + + if conn_type == 'udp': + self.socket = gevent.socket.socket(gevent.socket.AF_INET, + gevent.socket.SOCK_DGRAM) + self.socket.bind((address[0], address[1])) + # TODO: Make this configurable + self._buffer_size = 65565 + elif conn_type == 'ethernet': + socket_family = getattr(gevent.socket, + 'AF_PACKET', + gevent.socket.AF_INET) + + if RAW_SOCKET_FD: + self.socket = gevent.socket.fromfd(RAW_SOCKET_FD, + socket_family, + gevent.socket.SOCK_RAW, + socket.htons(ETH_PROTOCOL)) + else: + self.socket = gevent.socket.socket(socket_family, + gevent.socket.SOCK_RAW, + socket.htons(ETH_PROTOCOL)) + + self.socket.bind((address[0], address[1])) + self._buffer_size = 1518 + elif conn_type == 'tcp': + self.socket = gevent.socket.socket(gevent.socket.AF_INET, + gevent.socket.SOCK_STREAM) + self.socket.connect((address[0], address[1])) + # TODO: Make this configurable + self._buffer_size = 65565 + + self._init_log_file_handlers() + + @property + def handler_count(self): + ''' Return the number of active capture handlers. ''' + return len(self.capture_handlers) + + def capture_packet(self): + ''' Write packet data to the logger's log file. ''' + data = self.socket.recv(self._buffer_size) + + for h in self.capture_handlers: + h['reads'] += 1 + h['data_read'] += len(data) + + d = data + if 'pre_write_transforms' in h: + for data_transform in h['pre_write_transforms']: + d = data_transform(d) + h['logger'].write(d) + + def clean_up(self): + ''' Clean up the socket and log file handles. ''' + self.socket.close() + for h in self.capture_handlers: + h['logger'].close() + + def socket_monitor_loop(self): + ''' Monitor the socket and log captured data. ''' + try: + while True: + gevent.socket.wait_read(self.socket.fileno()) + + self._handle_log_rotations() + self.capture_packet() + finally: + self.clean_up() + + def add_handler(self, handler): + ''' Add an additional handler + + Args: + handler: + A dictionary of handler configuration for the handler + that should be added. See :func:`__init__` for details + on valid parameters. + ''' + handler['logger'] = self._get_logger(handler) + handler['reads'] = 0 + handler['data_read'] = 0 + + self.capture_handlers.append(handler) + + def remove_handler(self, name): + ''' Remove a handler given a name + + Note, if multiple handlers have the same name the last matching + instance in the handler list will be removed. + + Args: + name: + The name of the handler to remove + ''' + index = None + for i, h in enumerate(self.capture_handlers): + if h['name'] == name: + index = i + + if index is not None: + self.capture_handlers[index]['logger'].close() + del self.capture_handlers[index] + + def dump_handler_config_data(self): + ''' Return capture handler configuration data. + + Return a dictionary of capture handler configuration data of the form: + + .. code-block:: none + + [{ + 'handler': , + + 'log_file_path': , + + 'conn_type': , + + 'address': + }, ...] + + ''' + ignored_keys = ['logger', 'log_rot_time', 'reads', 'data_read'] + config_data = [] + for h in self.capture_handlers: + config_data.append({ + 'handler': { + k:v for k, v in h.iteritems() + if k not in ignored_keys + }, + 'log_file_path': h['logger']._stream.name, + 'conn_type': self.conn_type, + 'address': self.address, + }) + return config_data + + def dump_all_handler_stats(self): + ''' Return handler capture statistics + + Return a dictionary of capture handler statistics of the form: + + .. code-block:: none + + [{ + 'name': The handler's name, + + 'reads': The number of packet reads this handler has received + + 'data_read_length': The total length of the data received + + 'approx_data_rate': The approximate data rate for this handler + }, ...] + + ''' + stats = [] + for h in self.capture_handlers: + now = calendar.timegm(time.gmtime()) + rot_time = calendar.timegm(h['log_rot_time']) + time_delta = now - rot_time + approx_data_rate = '{} bytes/second'.format(h['data_read'] / float(time_delta)) + + stats.append({ + 'name': h['name'], + 'reads': h['reads'], + 'data_read_length': '{} bytes'.format(h['data_read']), + 'approx_data_rate': approx_data_rate + }) + + return stats + + def _handle_log_rotations(self): + ''' Rotate each handler's log file if necessary ''' + for h in self.capture_handlers: + if self._should_rotate_log(h): + self._rotate_log(h) + + def _should_rotate_log(self, handler): + ''' Determine if a log file rotation is necessary ''' + if handler['rotate_log']: + rotate_time_index = handler.get('rotate_log_index', 'day') + try: + rotate_time_index = self._decode_time_rotation_index(rotate_time_index) + except ValueError: + rotate_time_index = 2 + + rotate_time_delta = handler.get('rotate_log_delta', 1) + + cur_t = time.gmtime() + first_different_index = 9 + for i in range(9): + if cur_t[i] != handler['log_rot_time'][i]: + first_different_index = i + break + + if first_different_index < rotate_time_index: + # If the time deltas differ by a time step greater than what we + # have set for the rotation (I.e., months instead of days) we will + # automatically rotate. + return True + else: + time_delta = cur_t[rotate_time_index] - handler['log_rot_time'][rotate_time_index] + return time_delta >= rotate_time_delta + + return False + + def _decode_time_rotation_index(self, time_rot_index): + ''' Return the time struct index to use for log rotation checks ''' + time_index_decode_table = { + 'year': 0, 'years': 0, 'tm_year': 0, + 'month': 1, 'months': 1, 'tm_mon': 1, + 'day': 2, 'days': 2, 'tm_mday': 2, + 'hour': 3, 'hours': 3, 'tm_hour': 3, + 'minute': 4, 'minutes': 4, 'tm_min': 4, + 'second': 5, 'seconds': 5, 'tm_sec': 5, + } + + if time_rot_index not in time_index_decode_table.keys(): + raise ValueError('Invalid time option specified for log rotation') + + return time_index_decode_table[time_rot_index] + + def _rotate_log(self, handler): + ''' Rotate a handlers log file ''' + handler['logger'].close() + handler['logger'] = self._get_logger(handler) + + def _get_log_file(self, handler): + ''' Generate log file path for a given handler + + Args: + handler: + The handler configuration dictionary for which a log file + path should be generated. + ''' + if 'file_name_pattern' not in handler: + filename = '%Y-%m-%d-%H-%M-%S-{name}.pcap' + else: + filename = handler['file_name_pattern'] + + log_file = handler['log_dir'] + if 'path' in handler: + log_file = os.path.join(log_file, handler['path'], filename) + else: + log_file = os.path.join(log_file, filename) + + log_file = time.strftime(log_file, time.gmtime()) + log_file = log_file.format(**handler) + + return log_file + + def _get_logger(self, handler): + ''' Initialize a PCAP stream for logging data ''' + log_file = self._get_log_file(handler) + + if not os.path.isdir(os.path.dirname(log_file)): + os.makedirs(os.path.dirname(log_file)) + + handler['log_rot_time'] = time.gmtime() + return pcap.open(log_file, mode='a') + + def _init_log_file_handlers(self): + ''' Initialize log file handles ''' + for handler in self.capture_handlers: + handler['logger'] = self._get_logger(handler) + + +class StreamCaptureManager(object): + ''' Manage handlers for binary data capture and logging ''' + + def __init__(self, mngr_conf, lgr_conf): + ''' + Args: + mngr_conf: + Configuration dictionary for the manager. At + the minimum this should contain the following: + + .. code-block:: none + + { + 'root_log_directory': '' + } + + lgr_conf: + Configuration data for all the logger instances that + should be created by default. Additional information on + parameters that are required for logger initialization can be + found in :func:`add_logger`. Data should be of the form: + + .. code-block:: none + + [ + name, address, conn_type, log_dir_path, misc_conf_dict), + name, address, conn_type, log_dir_path, misc_conf_dict), + ] + + ''' + self._logger_data = {} + self._stream_capturers = {} + self._pool = gevent.pool.Pool(50) + self._mngr_conf = mngr_conf + + #TODO: Remove this kwargs passing if not going to add more options + #TODO: Abstract this out to a function call to handle conf parsing? + for name, address, conn_type, log_dir_path, misc_conf in lgr_conf: + self.add_logger(name, address, conn_type, log_dir_path, **misc_conf) + + def add_logger(self, name, address, conn_type, log_dir_path=None, **kwargs): + ''' Add a new stream capturer to the manager. + + Add a new stream capturer to the manager with the provided configuration + details. If an existing capturer is monitoring the same address the + new handler will be added to it. + + Args: + name: + A string defining the new capturer's name. + + address: + A tuple containing address data for the capturer. Check the + :class:`SocketStreamCapturer` documentation for what is + required. + + conn_type: + A string defining the connection type. Check the + :class:`SocketStreamCapturer` documentation for a list of valid + options. + + log_dir_path: + An optional path defining the directory where the + capturer should write its files. If this isn't provided the root + log directory from the manager configuration is used. + + ''' + capture_handler_conf = kwargs + + if not log_dir_path: + log_dir_path = self._mngr_conf['root_log_directory'] + + log_dir_path = os.path.normpath(os.path.expanduser(log_dir_path)) + + capture_handler_conf['log_dir'] = log_dir_path + capture_handler_conf['name'] = name + if 'rotate_log' not in capture_handler_conf: + capture_handler_conf['rotate_log'] = True + + transforms = [] + if 'pre_write_transforms' in capture_handler_conf: + for transform in capture_handler_conf['pre_write_transforms']: + if isinstance(transform, str): + if globals().has_key(transform): + transforms.append(globals().get(transform)) + else: + msg = ( + 'Unable to load data transformation ' + '"{}" for handler "{}"' + ).format( + transform, + capture_handler_conf['name'] + ) + log.warn(msg) + elif hasattr(transform, '__call__'): + transforms.append(transform) + else: + msg = ( + 'Unable to determine how to load data transform "{}"' + ).format(transform) + log.warn(msg) + capture_handler_conf['pre_write_transforms'] = transforms + + address_key = str(address) + if address_key in self._stream_capturers: + capturer = self._stream_capturers[address_key][0] + capturer.add_handler(capture_handler_conf) + return + + socket_logger = SocketStreamCapturer(capture_handler_conf, + address, + conn_type) + greenlet = gevent.spawn(socket_logger.socket_monitor_loop) + + self._stream_capturers[address_key] = ( + socket_logger, + greenlet + ) + self._pool.add(greenlet) + + def stop_capture_handler(self, name): + ''' Remove all handlers with a given name + + Args: + name: + The name of the handler(s) to remove. + ''' + empty_capturers_indeces = [] + for k, sc in self._stream_capturers.iteritems(): + stream_capturer = sc[0] + stream_capturer.remove_handler(name) + + if stream_capturer.handler_count == 0: + self._pool.killone(sc[1]) + empty_capturers_indeces.append(k) + + for i in empty_capturers_indeces: + del self._stream_capturers[i] + + def stop_stream_capturer(self, address): + ''' Stop a capturer that the manager controls. + + Args: + address: + An address array of the form ['host', 'port'] or similar + depending on the connection type of the stream capturer being + terminated. The capturer for the address will be terminated + along with all handlers for that capturer if the address is + that of a managed capturer. + + Raises: + ValueError: + The provided address doesn't match a capturer that is + currently managed. + ''' + address = str(address) + if address not in self._stream_capturers: + raise ValueError('Capturer address does not match a managed capturer') + + stream_cap = self._stream_capturers[address] + self._pool.killone(stream_cap[1]) + del self._stream_capturers[address] + + def rotate_capture_handler_log(self, name): + ''' Force a rotation of a handler's log file + + Args: + name: + The name of the handler who's log file should be rotated. + ''' + for sc_key, sc in self._stream_capturers.iteritems(): + for h in sc[0].capture_handlers: + if h['name'] == name: + sc[0]._rotate_log(h) + + def get_logger_data(self): + ''' Return data on managed loggers. + + Returns a dictionary of managed logger configuration data. The format + is primarily controlled by the + :func:`SocketStreamCapturer.dump_handler_config_data` function:: + + { + : + } + + ''' + return { + address : stream_capturer[0].dump_handler_config_data() + for address, stream_capturer in self._stream_capturers.iteritems() + } + + def get_handler_stats(self): + ''' Return handler read statistics + + Returns a dictionary of managed handler data read statistics. The + format is primarily controlled by the + :func:`SocketStreamCapturer.dump_all_handler_stats` function:: + + { + : + } + + ''' + return { + address : stream_capturer[0].dump_all_handler_stats() + for address, stream_capturer in self._stream_capturers.iteritems() + } + + def get_capture_handler_config_by_name(self, name): + ''' Return data for handlers of a given name. + + Args: + name: + Name of the capture handler(s) to return config data for. + + Returns: + Dictionary dump from the named capture handler as given by + the :func:`SocketStreamCapturer.dump_handler_config_data` method. + ''' + handler_confs = [] + for address, stream_capturer in self._stream_capturers.iteritems(): + handler_data = stream_capturer[0].dump_handler_config_data() + for h in handler_data: + if h['handler']['name'] == name: + handler_confs.append(h) + + return handler_confs + + def run_socket_event_loop(self): + ''' Start monitoring managed loggers. ''' + try: + while True: + self._pool.join() + + # If we have no loggers we'll sleep briefly to ensure that we + # allow other processes (I.e., the webserver) to do their work. + if len(self._logger_data.keys()) == 0: + time.sleep(0.5) + + except KeyboardInterrupt: + pass + finally: + self._pool.kill() + + +class StreamCaptureManagerServer(Bottle): + ''' Webserver for management of Binary Stream Capturers. ''' + + def __init__(self, logger_manager, host, port): + ''' + Args: + logger_manager: + Instance of :class:`StreamCaptureManager` which the + server will use to manage logger instances. + + host: + The host for webserver configuration. + + port: + The port for webserver configuration. + ''' + self._host = host + self._port = port + self._logger_manager = logger_manager + self._app = Bottle() + self._route() + + def start(self): + ''' Starts the server. ''' + self._app.run(host=self._host, port=self._port) + + def _route(self): + ''' Handles server route instantiation. ''' + self._app.route('/', + method='GET', + callback=self._get_logger_list) + self._app.route('/stats', + method='GET', + callback=self._fetch_handler_stats) + self._app.route('//start', + method='POST', + callback=self._add_logger_by_name) + self._app.route('//stop', + method='DELETE', + callback=self._stop_logger_by_name) + self._app.route('//config', + method='GET', + callback=self._get_logger_conf) + self._app.route('//rotate', + method='POST', + callback=self._rotate_capturer_log) + + def _add_logger_by_name(self, name): + ''' Handles POST requests for adding a new logger. + + Expects logger configuration to be passed in the request's query string. + The logger name is included in the URL and the address components and + connection type should be included as well. The loc attribute is + defaulted to "localhost" when making the socket connection if not + defined. + + loc = IP / interface + port = port / protocol + conn_type = udp or ethernet + + Raises: + ValueError: + if the port or connection type are not supplied. + ''' + data = dict(request.forms) + loc = data.pop('loc', '') + port = data.pop('port', None) + conn_type = data.pop('conn_type', None) + + if not port or not conn_type: + e = 'Port and/or conn_type not set' + raise ValueError(e) + address = [loc, int(port)] + + if 'rotate_log' in data: + data['rotate_log'] = True if data == 'true' else False + + if 'rotate_log_delta' in data: + data['rotate_log_delta'] = int(data['rotate_log_delta']) + + self._logger_manager.add_logger(name, address, conn_type, **data) + + def _stop_logger_by_name(self, name): + ''' Handles requests for termination of a handler by name ''' + self._logger_manager.stop_capture_handler(name) + + def _get_logger_list(self): + ''' Retrieves a JSON object of running handler information. + + Returns a JSON object containing config data for all the currently + running loggers. Structure of the JSON object is controlled by the + form of the dictionary returned from + :func:`StreamCaptureManager.get_logger_data` + ''' + return json.dumps(self._logger_manager.get_logger_data()) + + def _get_logger_conf(self, name): + ''' Retrieves a config for loggers matching a given name. + + Note that there isn't a requirement that capture handles have unique + names. This will return all handlers with a matching name in the event + that there is more than one. If the name doesn't match you will get + an empty JSON object. + ''' + return json.dumps(self._logger_manager.get_capture_handler_config_by_name(name)) + + def _rotate_capturer_log(self, name): + ''' Trigger log rotation for a given handler name. + + Note that if the file name pattern provided isn't sufficient for + a rotation to occur with a new unique file name you will not see + a log rotation . Be sure to timestamp your files in such a way + to ensure that this isn't the case! The default file name pattern + includes year, month, day, hours, minutes, and seconds to make sure + this works as expected. + ''' + self._logger_manager.rotate_capture_handler_log(name) + + def _fetch_handler_stats(self): + ''' Retrieves a JSON object of running handler stats + + Returns a JSON object containing data read statistics for all + running handlers. Structure of the JOSN objects is controlled by + :func:`StreamCaptureManager.dump_all_handler_stats`. + ''' + return json.dumps(self._logger_manager.get_handler_stats()) + +def identity_transform(data): + '''Example data transformation function for a capture handler.''' + return data diff --git a/bliss/core/ccsds.py b/bliss/core/ccsds.py new file mode 100644 index 00000000..ece72746 --- /dev/null +++ b/bliss/core/ccsds.py @@ -0,0 +1,124 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL +# RIGHTS RESERVED. United States Government Sponsorship +# acknowledged. Any commercial use must be negotiated with the Office +# of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By +# accepting this software, the user agrees to comply with all +# applicable U.S. export laws and regulations. User has the +# responsibility to obtain export licenses, or other export authority +# as may be required before exporting such information to foreign +# countries or providing access to foreign persons. + + +""" +Consultative Committee for Space Data Systems (CCSDS) + +The bliss.core.ccsds module provides CCSDS header definitions and +datatypes. +""" + + +from bliss.core import json, tlm, util + + +class CcsdsDefinition(json.SlotSerializer, object): + """A :class:`CcsdsDefinition` is analogous to a + :class:`PacketDefinition`, except it defines the expected values + in a CCSDS header. + + :class:`CcsdsDefinition`s are most often specified in a ``ccsds:`` + block within a YAML ``!Command`` or ``!Packet`` definition. + """ + + __slots__ = 'version', 'type', 'secondary', 'apid', 'seqflags', 'length' + + def __init__(self, *args, **kwargs): + self.version = kwargs.get('version' , 0) + self.type = kwargs.get('type' , 0) + self.secondary = kwargs.get('secondary', None) + self.apid = kwargs.get('apid' , 0) + self.seqflags = kwargs.get('seqflags' , 3) # No segmentation + self.length = kwargs.get('length' , 0) + + def __repr__(self): + return util.toRepr(self) + + @property + def shflag(self): + """Indicates whether a CCSDS Secondary Header is present.""" + return 1 if self.secondary else 0 + + +class CcsdsHeader(tlm.Packet): + """A :class:`CcsdsHeader` is just like any other :class:`Packet`, + except that the CCSDS (primary) header :class:`FieldDefinition`s + are already defined. That is, there is no need to pass in a + :class`PacketDefinition` at initialization, only the underlying + packet data to decode as a CCSDS header. + """ + + # NOTE: CcsdsHeader.Definition is distinct from a CcsdsDefinition. + # The former specifies how to decode the fields of a CCSDS header. + # The latter defines the expected values for those fields within a + # a particular type of packet. + + Definition = tlm.PacketDefinition( + name = 'CCSDS_Header', + fields = [ + tlm.FieldDefinition( + name = 'version', + bytes = 0, + type = 'U8', + mask = 0xE0 + ), + tlm.FieldDefinition( + name = 'type', + bytes = 0, + type = 'U8', + mask = 0x10 + ), + tlm.FieldDefinition( + name = 'shflag', + bytes = 0, + type = 'U8', + mask = 0x08 + ), + tlm.FieldDefinition( + name = 'apid', + bytes = [0, 1], + type = 'MSB_U16', + mask = 0x07FF + ), + tlm.FieldDefinition( + name = 'seqflags', + bytes = 2, + type = 'U8', + mask = 0xC0, + enum = { + 0: 'Continuation Segment', + 1: 'First Segment', + 2: 'Last Segment', + 3: 'Unsegmented', + } + ), + tlm.FieldDefinition( + name = 'seqcount', + bytes = [2, 3], + type = 'MSB_U16', + mask = 0x3FFF + ), + tlm.FieldDefinition( + name = 'length', + bytes = [4, 5], + type = 'MSB_U16' + ) + ] + ) + + def __init__(self, data=None): + super(CcsdsHeader, self).__init__(CcsdsHeader.Definition, data) + self.seqflags = 3 diff --git a/bliss/core/cfg.py b/bliss/core/cfg.py new file mode 100644 index 00000000..2353458d --- /dev/null +++ b/bliss/core/cfg.py @@ -0,0 +1,417 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Configuration + +The bliss.core.cfg module provides classes and functions to manage +(re)configurable aspects of BLISS via a YAML configuration file. + +""" + +import os +import platform +import sys +import time +import re + +import yaml + +import bliss +from bliss.core import log, util + + +DEFAULT_PATH_VARS = { + 'year': time.strftime('%Y', time.gmtime()), + 'doy' : time.strftime('%j', time.gmtime()) +} + +PATH_KEYS = 'directory', 'file', 'filename', 'path', 'pathname' + +def expandConfigPaths (config, prefix=None, datetime=None, pathvars=None, parameter_key='', *keys): + """Updates all relative configuration paths in dictionary config, + which contain a key in keys, by prepending prefix. + + If keys is omitted, it defaults to 'directory', 'file', + 'filename', 'path', 'pathname'. + + See util.expandPath(). + """ + if len(keys) == 0: + keys = PATH_KEYS + + for name, value in config.items(): + if name in keys and type(name) is str: + expanded = util.expandPath(value, prefix) + cleaned = replaceVariables(expanded, datetime=datetime, pathvars=pathvars) + + for p in cleaned: + if not os.path.exists(p): + msg = "Config parameter {}.{} specifies nonexistent path {}".format(parameter_key, name, p) + log.warn(msg) + + config[name] = cleaned[0] if len(cleaned) == 1 else cleaned + + elif type(value) is dict: + param_key = name if parameter_key == '' else parameter_key + '.' + name + expandConfigPaths(value, prefix, datetime, pathvars, param_key, *keys) + + +def replaceVariables(path, datetime=None, pathvars=None): + """Return absolute path with path variables replaced as applicable""" + + if datetime is None: + datetime = time.gmtime() + + # if path variables are not given, set as empty list + if pathvars is None: + pathvars = [ ] + + # create an init path list to loop through + if isinstance(path, list): + path_list = path + else: + path_list = [ path ] + + # Set up the regex to search for variables + regex = re.compile('\$\{(.*?)\}') + + # create a newpath list that will hold the 'cleaned' paths + # with variables and strftime format directives replaced + newpath_list = [ ] + + for p in path_list: + # create temppath_list to be used a we work through the + newpath_list.append(p) + + # Variable replacement + # Find all the variables in path using the regex + for k in regex.findall(p): + # Check if the key is in path variables map + if k in pathvars: + # get the str or list of values + v = pathvars[k] + + # Check value of variable must be in (string, integer, list) + if type(v) is dict: + msg = "Path variable must refer to string, integer, or list" + raise TypeError(msg) + + # get the list of possible variable values + value_list = v if type(v) is list else [ v ] + + + # create temp_list for now + temp_list = [] + + # loop through the most recent newpath list + # need to do this every time in order to account for all possible + # combinations + # replace the variables + # loop through the list of values and replace the variables + for v in value_list: + for newpath in newpath_list: + # remove the path from newpath_list + temp_list.append(newpath.replace('${%s}' % k, str(v))) + + # replace newpath_list + newpath_list = temp_list + + # strftime translation + # Loop through newpath_list to do strftime translation + for index, newpath in enumerate(newpath_list): + # Apply strftime translation + newpath_list[index] = time.strftime(newpath, datetime) + + return newpath_list + + +def flatten (d, *keys): + """Flattens the dictionary d by merging keys in order such that later + keys take precedence over earlier keys. + + """ + flat = { } + + for k in keys: + flat = merge(flat, d.pop(k, { })) + + return flat + + +def loadYAML (filename=None, data=None): + """Loads either the given YAML configuration file or YAML data. + + Returns None if there was an error reading from the configuration + file and logs an error message via bliss.core.log.error(). + """ + config = None + + try: + if filename: + data = open(filename, 'rt') + + config = yaml.load(data) + + if type(data) is file: + data.close() + except IOError, e: + msg = 'Could not read BLISS configuration file "%s": %s' + log.error(msg, filename, str(e)) + + return config + + +def merge (d, o): + """Recursively merges keys from o into d and returns d.""" + for k in o.keys(): + if type(o[k]) is dict and k in d: + merge(d[k], o[k]) + else: + d[k] = o[k] + return d + + + +class BlissConfigError(Exception): + """Raised when a BLISS configuration parameter is present, but + is in some way incorrect.""" + pass + + + +class BlissConfigMissing(Exception): + """Raised when a BLISS configuration parameter is missing.""" + + def __init__(self, param): + values = param, bliss.config._filename + format = 'The parameter %s is missing from config.yaml (%s).' + super(BlissConfigMissing, self).__init__(format % values) + self.param = param + + + +class BlissConfig (object): + """BlissConfig + + A BlissConfig object holds configuration parameters read from a + YAML configuration file. The YAML data structure has three levels + of parameters, in order: defaults, platform-specific, and + host-specific, each taking precedence over the previous one. + + NOTE: The platform string is Python's sys.platform, i.e. 'linux2', + 'darwin', 'win32'. + """ + _ROOT_DIR = os.path.abspath(os.environ.get('BLISS_ROOT', os.getcwd())) + + if 'BLISS_ROOT' not in os.environ: + log.warn('BLISS_ROOT not set. Defaulting to "%s"' % _ROOT_DIR) + + def __init__ (self, filename=None, data=None, config=None, pathvars=None): + """Creates a new BlissConfig object with configuration data read from + the given YAML configuration file or passed-in via the given + config dictionary. + + If filename and data are not given, it defaults to the following in + order depending on the presence of environment variables:: + + ${BLISS_CONFIG} + + """ + self._filename = None + self._data = data + self._datetime = time.gmtime() + self._pathvars = pathvars + + if data is None and filename is None: + if 'BLISS_CONFIG' in os.environ: + filename = os.path.abspath(os.environ.get('BLISS_CONFIG')) + else: + msg = 'BLISS_CONFIG is not set. Exiting ...' + log.error(msg) + raise ValueError(msg) + + if config is None: + self.reload(filename, data) + else: + self._config = config + self._filename = filename + + def __contains__ (self, name): + """Returns True if name is in this BlissConfig, False otherwise.""" + return name in self._config + + def __eq__ (self, other): + return isinstance(other, BlissConfig) and self._config == other._config + + def __ne__ (self, other): + return not self == other + + def __getattr__ (self, name): + """Returns the attribute value BlissConfig.name.""" + if name not in self: + raise AttributeError('No attribute "%s" in BlissConfig.' % name) + return self._getattr_(name) + + def __getitem__ (self, name): + """Returns the value of BlissConfig[name].""" + if name not in self: + raise KeyError('No key "%s" in BlissConfig.' % name) + return self._getattr_(name) + + def __repr__ (self): + """Return a printable representation of this BlissConfig.""" + args = [ ] + + if self._filename: + args.append('filename="%s"' % self._filename) + + args.append('data=%s' % self._config) + return '%s(%s)' % (self.__class__.__name__, ', '.join(args)) + + def __str__ (self): + """Return a string representation of this BlissConfig.""" + return self.__repr__() + + def _getattr_ (self, name): + """Internal method. Used by __getattr__() and __getitem__().""" + value = self._config.get(name) + + if type(value) is dict: + value = BlissConfig(self._filename, config=value) + + return value + + @property + def _directory (self): + """The directory for this BlissConfig.""" + if self._filename is None: + return os.path.join(self._ROOT_DIR, 'config') + else: + return os.path.dirname(self._filename) + + @property + def _hostname (self): + """The hostname for this BlissConfig.""" + return platform.node().split('.')[0] + + @property + def _platform (self): + """The platform for this BlissConfig.""" + return sys.platform + + @property + def _datapaths(self): + """Returns a simple key-value map for easy access to data paths""" + paths = { } + try: + data = self._config['data'] + for k in data: + paths[k] = data[k]['path'] + except KeyError as e: + raise BlissConfigMissing(e.message) + except Exception as e: + raise BlissConfigError('Error reading data paths: %s' % e) + + return paths + + def reload (self, filename=None, data=None): + """Reloads the a BLISS configuration. + + The BLISS configuration is automatically loaded when the BLISS + package is first imported. To replace the configuration, call + reload() (defaults to the current config.filename) or + reload(new_filename). + """ + if data is None and filename is None: + filename = self._filename + + self._config = loadYAML(filename, data) + self._filename = filename + + if self._config is not None: + keys = 'default', self._platform, self._hostname + self._config = flatten(self._config, *keys) + + # on reload, if pathvars have not been set, we want to start + # with the defaults, add the platform and hostname, and + # merge in all of the information provided in the config + if self._pathvars is None: + self._pathvars = self.getDefaultPathVariables() + + expandConfigPaths(self._config, + self._directory, + self._datetime, + merge(self._config, self._pathvars)) + + else: + self._config = { } + + + def get (self, name, default=None): + """Returns the attribute value *BlissConfig.name* or *default* + if name does not exist. + + The name may be a series of attributes separated periods. For + example, "foo.bar.baz". In that case, lookups are attempted + in the following order until one succeeeds: + + 1. BlissConfig['foo.bar.baz'], and + 2. BlissConfig.foo.bar.baz + 3. (If both fail, return *default*) + """ + if name in self: + return self[name] + + config = self + parts = name.split('.') + heads = parts[:-1] + tail = parts[-1] + + for part in heads: + if part in config and type(config[part]) is BlissConfig: + config = config[part] + else: + return default + + return config[tail] if tail in config else default + + + def getDefaultFilename(self): + if 'BLISS_CONFIG' in os.environ: + filename = os.path.abspath(os.environ.get('BLISS_CONFIG')) + else: + msg = 'BLISS_CONFIG not set. Falling back to BLISS_ROOT or CWD' + log.warn(msg) + filename = os.path.join(self._directory, 'config.yaml') + + return filename + + def getDefaultPathVariables(self): + pathvars = DEFAULT_PATH_VARS + pathvars['platform'] = self._platform + pathvars['hostname'] = self._hostname + return pathvars + + def addPathVariables(self, pathvars): + """ Adds path variables to the pathvars map property""" + if type(pathvars) is dict: + self._pathvars = merge(self._pathvars, pathvars) + + +# Create a singleton BlissConfig accessible via bliss.config +sys.modules['bliss'].config = BlissConfig() + +# Re-initialize logging now that bliss.config.logging.* parameters may exist. +log.reinit() diff --git a/bliss/core/cmd.py b/bliss/core/cmd.py new file mode 100644 index 00000000..2cda5454 --- /dev/null +++ b/bliss/core/cmd.py @@ -0,0 +1,516 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Commands + +The bliss.core.cmd module provides commands and command dictionaries. +Dictionaries contain command and argument definitions. +""" + +import os +import pkg_resources +import struct +import yaml + +import bliss +from bliss.core import json, log, util + + +MAX_CMD_WORDS = 54 + + +class ArgDefn(json.SlotSerializer, object): + """ArgDefn - Argument Definition + + Argument Definitions encapsulate all information required to define + a single command argument. This includes the argument name, its + description, units, type, byte position within a command, name-value + enumerations, and allowed value ranges. Name, type, and byte + position are required. All others are optional. + + A fixed argument (fixed=True) defines a fixed bit pattern in that + argument's byte position(s). + """ + __slots__ = [ + "name", "desc", "units", "_type", "bytes", "_enum", "range", + "fixed", "value" + ] + + def __init__(self, *args, **kwargs): + """Creates a new Argument Definition. + """ + for slot in ArgDefn.__slots__: + name = slot[1:] if slot.startswith("_") else slot + setattr(self, name, kwargs.get(name, None)) + + def __repr__(self): + return util.toRepr(self) + + @property + def enum(self): + """The argument enumeration.""" + return self._enum + + @enum.setter + def enum(self, value): + self._enum = None + if value is not None: + self._enum = dict(reversed(pair) for pair in value.items()) + + @property + def nbytes(self): + """The number of bytes required to encode this argument.""" + return self.type.nbytes + + @property + def type(self): + """The argument type.""" + return self._type + + @type.setter + def type(self, value): + from bliss.core import dtype + self._type = dtype.get(value) if type(value) is str else value + + @property + def startword(self): + """The argument start word in the command""" + return self.slice().start / 2 + 1 + + @property + def startbit(self): + """The argument start bit in the word""" + return self.slice().start % 2 * 8 + + def decode(self, bytes): + """Decodes the given bytes according to this BLISS Argument + Definition. + """ + value = self.type.decode(bytes) + if self._enum is not None: + for name, val in self._enum.items(): + if value == val: + value = name + break + return value + + def encode(self, value): + """Encodes the given value according to this BLISS Argument + Definition. + """ + if type(value) == str and self.enum and value in self.enum: + value = self.enum[value] + return self.type.encode(value) if self.type else bytearray() + + def slice(self, offset=0): + """Returns a Python slice object (e.g. for array indexing) indicating + the start and stop byte position of this Command argument. The + start and stop positions may be translated by the optional byte + offset. + """ + if type(self.bytes) is int: + start = self.bytes + stop = start + 1 + else: + start = self.bytes[0] + stop = self.bytes[1] + 1 + + return slice(start + offset, stop + offset) + + def validate(self, value, messages=None): + """Returns True if the given Argument value is valid, False otherwise. + Validation error messages are appended to an optional messages + array. + """ + valid = True + primitive = value + + def log(msg): + if messages is not None: + messages.append(msg) + + if self.enum: + if value not in self.enum.keys(): + valid = False + args = (self.name, str(value)) + log("%s value '%s' not in allowed enumerated values." % args) + else: + primitive = int(self.enum[value]) + + if self.type: + if self.type.validate(primitive, messages, self.name) is False: + valid = False + + if self.range: + if primitive < self.range[0] or primitive > self.range[1]: + valid = False + args = (self.name, str(primitive), self.range[0], self.range[1]) + log("%s value '%s' out of range [%d, %d]." % args) + + return valid + + + +class Cmd(object): + """Cmd - Command + + Commands reference their Command Definition and may contain arguments. + """ + def __init__(self, defn, *args, **kwargs): + """Creates a new BLISS Command based on the given command + definition and command arguments. A Command may be created + with either positional or keyword arguments, but not both. + """ + self.defn = defn + + if len(args) > 0 and len(kwargs) > 0: + msg = 'A Cmd may be created with either positional or ' + msg += 'keyword arguments, but not both.' + raise TypeError(msg) + + if len(kwargs) > 0: + args = [ ] + for defn in self.defn.args: + if defn.name in kwargs: + value = kwargs.pop(defn.name) + else: + value = None + args.append(value) + + self.args = args + self._unrecognized = kwargs + + + def __repr__(self): + return self.defn.name + " " + " ".join([str(a) for a in self.args]) + + @property + def desc(self): + """The command description.""" + return self.defn.desc + + @property + def name(self): + """The command name.""" + return self.defn.name + + @property + def opcode(self): + """The command opcode.""" + return self.defn.opcode + + @property + def subsystem(self): + """The subsystem to which this command applies.""" + return self.defn.subsystem + + @property + def argdefns(self): + """The command argument definitions.""" + return self.defn.argdefns + + def encode(self, pad=106): + """Encodes this BLISS command to binary. + + If pad is specified, it indicates the maximum size of the encoded + command in bytes. If the encoded command is less than pad, the + remaining bytes are set to zero. + + Commands sent to ISS payloads over 1553 are limited to 64 words + (128 bytes) with 11 words (22 bytes) of CCSDS overhead (SSP + 52050J, Section 3.2.3.4). This leaves 53 words (106 bytes) for + the command itself. + """ + opcode = struct.pack('>H', self.defn.opcode) + offset = len(opcode) + size = max(offset + self.defn.argsize, pad) + encoded = bytearray(size) + + encoded[0:offset] = opcode + encoded[offset] = self.defn.argsize + offset += 1 + index = 0 + + for defn in self.defn.argdefns: + if defn.fixed: + value = defn.value + else: + value = self.args[index] + index += 1 + encoded[defn.slice(offset)] = defn.encode(value) + + return encoded + + def validate(self, messages=None): + """Returns True if the given Command is valid, False otherwise. + Validation error messages are appended to an optional messages + array. + """ + return self.defn.validate(self, messages) + + + +class CmdDefn(json.SlotSerializer, object): + """CmdDefn - Command Definition + + Command Definitions encapsulate all information required to define a + single command. This includes the command name, its opcode, + subsystem, description and a list of argument definitions. Name and + opcode are required. All others are optional. + """ + __slots__ = ( 'name', '_opcode', 'subsystem', 'ccsds', 'title', 'desc', + 'argdefns' ) + + def __init__(self, *args, **kwargs): + """Creates a new Command Definition.""" + for slot in CmdDefn.__slots__: + name = slot[1:] if slot.startswith("_") else slot + setattr(self, slot, kwargs.get(name, None)) + + if self.ccsds: + import ccsds + self.ccsds = ccsds.CcsdsDefinition(**self.ccsds) + + if self.argdefns is None: + self.argdefns = [] + + + def __repr__(self): + return util.toRepr(self) + + @property + def args (self): + """The argument definitions to this command (excludes fixed + arguments). + """ + return filter(lambda a: not a.fixed, self.argdefns) + + @property + def nargs(self): + """The number of arguments to this command (excludes fixed + arguments). + """ + return len(self.args) + + @property + def nbytes(self): + """The number of bytes required to encode this command. + + Encoded commands are comprised of a two byte opcode, followed by a + one byte size, and then the command argument bytes. The size + indicates the number of bytes required to represent command + arguments. + """ + return len(self.opcode) + 1 + sum(arg.nbytes for arg in self.argdefns) + + @property + def opcode(self): + """Returns the opcode for the given command.""" + return self._opcode + + @property + def argsize(self): + """The total size in bytes of all the command arguments.""" + argsize = sum(arg.nbytes for arg in self.argdefns) + return argsize if len(self.argdefns) > 0 else 0 + + def staging_required(self): + maxbytes = getMaxCmdSize() + if self.argsize > maxbytes: + msg = "Command %s larger than %d bytes. Staging required." + log.debug(msg, self.name, maxbytes) + return False + else: + return True + + def toJSON(self): + obj = super(CmdDefn, self).toJSON() + obj['arguments'] = obj.pop('argdefns') + + if self.ccsds is None: + obj.pop('ccsds', None) + + return obj + + def validate(self, cmd, messages=None): + """Returns True if the given Command is valid, False otherwise. + Validation error messages are appended to an optional messages + array. + """ + valid = True + args = [ arg for arg in cmd.args if arg is not None ] + + if self.nargs != len(args): + valid = False + if messages is not None: + msg = 'Expected %d arguments, but received %d.' + messages.append(msg % (self.nargs, len(args))) + + for defn, value in zip(self.args, cmd.args): + if value is None: + valid = False + if messages is not None: + messages.append('Argument "%s" is missing.' % defn.name) + elif defn.validate(value, messages) is False: + valid = False + + if len(cmd._unrecognized) > 0: + valid = False + if messages is not None: + for name in cmd.unrecognized: + messages.append('Argument "%s" is unrecognized.' % name) + + return valid + + + +class CmdDict(dict): + """CmdDict + + Command Dictionaries provide a Python dictionary (i.e. hashtable) + interface mapping Command names to Command Definitions. + """ + def __init__(self, *args, **kwargs): + """Creates a new Command Dictionary from the given command dictionary + filename. + """ + self.filename = None + self.opcodes = {} + + if len(args) == 1 and len(kwargs) == 0 and type(args[0]) == str: + dict.__init__(self) + self.load(args[0]) + else: + dict.__init__(self, *args, **kwargs) + + def add(self, defn): + """Adds the given Command Definition to this Command Dictionary.""" + self[defn.name] = defn + self.opcodes[defn._opcode] = defn + + + def create(self, name, *args, **kwargs): + """Creates a new BLISS command with the given arguments.""" + tokens = name.split() + + if len(tokens) > 1 and (len(args) > 0 or len(kwargs) > 0): + msg = 'A Cmd may be created with either positional arguments ' + msg += '(passed as a string or a Python list) or keyword ' + msg += 'arguments, but not both.' + raise TypeError(msg) + + if len(tokens) > 1: + name = tokens[0] + args = [ util.toNumber(t, t) for t in tokens[1:] ] + + defn = self.get(name, None) + + if defn is None: + raise TypeError('Unrecognized command: %s' % name) + + return createCmd(defn, *args, **kwargs) + + + def decode(self, bytes): + """Decodes the given bytes according to this BLISS Command + Definition. + """ + opcode = struct.unpack(">H", bytes[0:2])[0] + nbytes = struct.unpack("B", bytes[2:3])[0] + name = None + args = [] + + if opcode in self.opcodes: + defn = self.opcodes[opcode] + name = defn.name + stop = 3 + + for arg in defn.argdefns: + start = stop + stop = start + arg.nbytes + if arg.fixed: + pass # FIXME: Confirm fixed bytes are as expected? + else: + args.append(arg.decode(bytes[start:stop])) + + return self.create(name, *args) + + def load(self, content): + """Loads Command Definitions from the given YAML content into + into this Command Dictionary. Content may be either a + filename containing YAML content or a YAML string. + + Load has no effect if this Command Dictionary was already + instantiated with a filename or YAML content. + """ + if self.filename is None: + if os.path.isfile(content): + self.filename = content + stream = open(self.filename, 'rb') + else: + stream = content + + for cmd in yaml.load(stream): + self.add(cmd) + + if type(stream) is file: + stream.close() + + def toJSON(self): + return { name: defn.toJSON() for name, defn in self.items() } + + + +def getDefaultCmdDict(reload=False): + return getDefaultDict(reload=reload) + + +def getDefaultDict(reload=False): + return util.getDefaultDict(__name__, 'cmddict', CmdDict, reload) + + +def getDefaultDictFilename(): + return bliss.config.cmddict.filename + + +def getDefaultSchema(): + return pkg_resources.resource_filename('bliss.core', 'data/cmd_schema.json') + + +def getMaxCmdSize(): + """ Returns the maximum size TReK command in bytes + + Converts from words to bytes (hence the \*2) and + removes 1 word for CCSDS header (-1) + """ + return (MAX_CMD_WORDS - 1) * 2 + + +def YAMLCtor_ArgDefn(loader, node): + fields = loader.construct_mapping(node, deep=True) + fields["fixed"] = node.tag == "!Fixed" + return createArgDefn(**fields) + + +def YAMLCtor_CmdDefn(loader, node): + fields = loader.construct_mapping(node, deep=True) + fields['argdefns'] = fields.pop('arguments', None) + return createCmdDefn(**fields) + +yaml.add_constructor('!Command' , YAMLCtor_CmdDefn) +yaml.add_constructor('!Argument', YAMLCtor_ArgDefn) +yaml.add_constructor('!Fixed' , YAMLCtor_ArgDefn) + +util.__init_extensions__(__name__, globals()) diff --git a/bliss/core/coord.py b/bliss/core/coord.py new file mode 100644 index 00000000..41aafd88 --- /dev/null +++ b/bliss/core/coord.py @@ -0,0 +1,150 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Coordinate Functions + +The bliss.core.coord module provides various coordinate manpulation +and transformation functions. +""" + +import datetime +import math + +from bliss.core import dmc + + +class Ellipsoid (object): + """An ellipsoid is the three dimensional analogue of an ellipse, used + here to approximate the geoid. See WGS84. + + """ + def __init__ (self, a, b): + """Creates a new Ellipsoid with the given semimajor and semiminor + axes. + """ + self.a = a + self.b = b + self.a2 = a ** 2 + self.b2 = b ** 2 + self.f = (a - b) / a + self.e2 = 1 - (self.b2 / self.a2) + self.ep2 = (self.a2 - self.b2) / self.b2 + + +WGS84 = Ellipsoid(a=6378137, b=6356752.3142) + + +def cbrt (x): + """Returns the cube root of x.""" + if x >= 0: + return math.pow(x , 1.0 / 3.0) + else: + return - math.pow(abs(x), 1.0 / 3.0) + + +def ecef2geodetic (x, y, z, ellipsoid=None): + """Convert ECEF coordinates to geodetic using the given ellipsoid + (defaults to WGS84). + + J. Zhu, "Conversion of Earth-centered Earth-fixed coordinates to + geodetic coordinates," IEEE Transactions on Aerospace and Electronic + Systems, vol. 30, pp. 957-961, 1994. + + See https://code.google.com/p/pysatel/source/browse/trunk/coord.py + + """ + if ellipsoid is None: + ellipsoid = WGS84 + + a = ellipsoid.a + b = ellipsoid.b + a2 = ellipsoid.a2 + b2 = ellipsoid.b2 + f = ellipsoid.f + e2 = ellipsoid.e2 + ep2 = ellipsoid.ep2 + r = math.sqrt(x * x + y * y) + F = 54 * b * b * z * z + G = r * r + (1 - e2) * z * z - e2 * (a2 - b2) + C = (e2 * e2 * F * r * r) / (math.pow(G, 3)) + S = cbrt(1 + C + math.sqrt(C * C + 2 * C)) + P = F / (3 * math.pow((S + 1 / S + 1), 2) * G * G) + Q = math.sqrt(1 + 2 * e2 * e2 * P) + r_0 = -(P * e2 * r) / (1 + Q) + math.sqrt(0.5 * a * a*(1 + 1.0 / Q) - \ + P * (1 - e2) * z * z / (Q * (1 + Q)) - 0.5 * P * r * r) + U = math.sqrt(math.pow((r - e2 * r_0), 2) + z * z) + V = math.sqrt(math.pow((r - e2 * r_0), 2) + (1 - e2) * z * z) + Z_0 = b * b * z / (a * V) + h = U * (1 - b * b / (a * V)) + lat = math.atan((z + ep2 * Z_0) / r) + lon = math.atan2(y, x) + + return lat, lon, h + + +def eci2ecef (x, y, z, gmst=None): + """Converts the given ECI coordinates to ECEF at the given Greenwich + Mean Sidereal Time (GMST) (defaults to now). + + This code was adapted from + `shashwatak/satellite-js `_ + and http://ccar.colorado.edu/ASEN5070/handouts/coordsys.doc + + """ + if gmst is None: + gmst = dmc.toGMST() + + X = (x * math.cos(gmst)) + (y * math.sin(gmst)) + Y = (x * (-math.sin(gmst))) + (y * math.cos(gmst)) + Z = z + + return X, Y, Z + + +def eci2geodetic (x, y, z, gmst=None, ellipsoid=None): + """Converts the given ECI coordinates to Geodetic coordinates at the + given Greenwich Mean Sidereal Time (GMST) (defaults to now) and with + the given ellipsoid (defaults to WGS84). + + This code was adapted from + `shashwatak/satellite-js `_ + and http://www.celestrak.com/columns/v02n03/ + + """ + if gmst is None: + gmst = dmc.toGMST() + + if ellipsoid is None: + ellipsoid = WGS84 + + a = WGS84.a + b = WGS84.b + f = WGS84.f + r = math.sqrt((x * x) + (y * y)) + e2 = (2 * f) - (f * f) + lon = math.atan2(y, x) - gmst + k = 0 + kmax = 20 + lat = math.atan2(z, r) + + while (k < kmax): + slat = math.sin(lat) + C = 1 / math.sqrt( 1 - e2 * (slat * slat) ) + lat = math.atan2(z + (a * C * e2 * slat), r) + k += 1 + + z = (r / math.cos(lat)) - (a * C) + + return lat, lon, z diff --git a/bliss/core/data/cmd_schema.json b/bliss/core/data/cmd_schema.json new file mode 100644 index 00000000..9a7a2dcb --- /dev/null +++ b/bliss/core/data/cmd_schema.json @@ -0,0 +1,105 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Command Dictionary Schema", + "description": "Command Dictionary Schema", + "type": "array", + "items": { + "required": ["command", "name", "opcode"], + "additionalProperties": false, + "properties": { + "command": { + "type": "string" + }, + "name": { + "type": "string" + }, + "opcode": { + "type": "integer" + }, + "subsystem": { + "type": "string" + }, + "title": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "arguments": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "object", + "additionalProperties": false, + "required": ["argument", "name", "type", "bytes"], + "properties": { + "argument": { + "type": "string" + }, + "name": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "units": { + "type": "string" + }, + "range": { + "type": "array", + "maxItems": 2 + }, + "type": { + "type": "string" + }, + "bytes": { + "type": [ "integer", "array" ], + "description": "TODO: Need to update the min/max when NOT a list", + "maxItems": 2, + "items": { "type": "integer" } + }, + "enum": { + "type": "object", + "description": "TODO: Does not check valid enumeration" + } + } + }, + { + "type": "object", + "additionalProperties": false, + "required": ["fixed", "type", "bytes"], + "properties": { + "fixed": { + "type": "string" + }, + "name": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "units": { + "type": "string" + }, + "type": { + "type": "string" + }, + "bytes": { + "type": [ "integer", "array" ], + "description": "TODO: Need to update the min/max when NOT a list", + "maxItems": 2, + "items": { "type": "integer" } + }, + "value": { + "type": "number", + "description": "TODO: Does not check valid enumeration" + } + } + } + ] + } + } + } + } +} diff --git a/bliss/core/data/evr_schema.json b/bliss/core/data/evr_schema.json new file mode 100644 index 00000000..f43cbc7a --- /dev/null +++ b/bliss/core/data/evr_schema.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "EVR Dictionary Schema", + "description": "EVR Dictionary Schema. See http://json-schema.org/ for more details on how to create this schema.", + "type": "array", + "items": { + "required": ["evr", "name", "code"], + "additionalProperties": false, + "properties": { + "evr": { + "type": "string" + }, + "name": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "code": { + "type": "integer" + }, + "message": { + "type": "string" + } + } + } +} diff --git a/bliss/core/data/limits_schema.json b/bliss/core/data/limits_schema.json new file mode 100644 index 00000000..5321252a --- /dev/null +++ b/bliss/core/data/limits_schema.json @@ -0,0 +1,69 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Limit Dictionary Schema", + "description": "Limit Dictionary Schema. See http://json-schema.org/ for more details on how to create this schema.", + "type": "array", + "items": { + "required": ["limit", "source"], + "anyOf": [{ + "required": ["lower"] + }, { + "required": ["upper"] + }, { + "required": ["value"] + }], + "additionalProperties": false, + "properties": { + "limit": { + "type": "string" + }, + "source": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "units": { + "type": "string" + }, + "lower": { + "type": "object", + "additionalProperties": false, + "properties": { + "error": { + "type": "number" + }, + "warn": { + "type": "number" + } + } + }, + "upper": { + "type": "object", + "additionalProperties": false, + "properties": { + "error": { + "type": "number" + }, + "warn": { + "type": "number" + } + } + }, + "value": { + "type": "object", + "additionalProperties": false, + "properties": { + "error": { + "type": [ "string", "number", "array" ], + "minLength": 1 + }, + "warn": { + "type": [ "string", "number", "array" ], + "minLength": 1 + } + } + } + } + } +} diff --git a/bliss/core/data/table_schema.json b/bliss/core/data/table_schema.json new file mode 100644 index 00000000..4cfaede9 --- /dev/null +++ b/bliss/core/data/table_schema.json @@ -0,0 +1,124 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Flight Software Command Table Schema", + "description": "Flight Software Command Table Schema", + "type": "array", + "items": { + "required": ["fswtable", "name", "delimiter", "uptype", "size", "header", "columns"], + "additionalProperties": false, + "properties": { + "fswtable": { + "type": "string" + }, + "name": { + "type": "string" + }, + "delimiter": { + "type": "string" + }, + "uptype": { + "type": "integer" + }, + "size": { + "type": "integer" + }, + "header": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "required": ["fswcolumn", "name", "format", "type"], + "properties": { + "fswcolumn": { + "type": "string" + }, + "name": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "format": { + "type": "string" + }, + "units": { + "type": "string" + }, + "type": { + "type": "string" + }, + "bytes": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "string", + "enum": ["@prev"] + + }, + { + "type": "array", + "maxItems": 2, + "items": { "type": "integer" } + } + ], + "description": "TODO: Need to update the min/max when NOT a list" + } + } + } + }, + "columns": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "required": ["fswcolumn", "name", "format", "type"], + "properties": { + "fswcolumn": { + "type": "string" + }, + "name": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "format": { + "type": "string" + }, + "units": { + "type": "string" + }, + "type": { + "type": "string" + }, + "bytes": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "string", + "enum": ["@prev"] + + }, + { + "type": "array", + "maxItems": 2, + "items": { "type": "integer" } + } + ], + "description": "TODO: Need to update the min/max when NOT a list" + }, + "enum": { + "type": "object", + "description": "TODO: Does not check valid enumeration" + } + } + } + } + } + } +} + diff --git a/bliss/core/data/tlm_schema.json b/bliss/core/data/tlm_schema.json new file mode 100644 index 00000000..5cccf2b6 --- /dev/null +++ b/bliss/core/data/tlm_schema.json @@ -0,0 +1,133 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Telemetry Dictionary Schema", + "description": "Telemetry Dictionary Schema", + "type": "array", + "items": { + "oneOf" : [{ + "required" : ["packet", "fields"] + }, { + "required" : ["include"] + }], + "additionalProperties": false, + "properties": { + "packet": { + "type": "string" + }, + "include": { + "type": "string" + }, + "ccsds": { + "type": "object", + "apid": { + "type": "string" + } + }, + "name": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "constants": { + "type": "object", + "description": "Key:Value pairs that define a constant and its value." + }, + "functions": { + "type": "object", + "description": "Function name to function mappings. The function name should be of the form 'R(dn)'. The function body can reference history values and constant values." + }, + "history": { + "type": "array", + "description": "A list of field names for which history values should be stored." + }, + "fields": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "oneOf" : [{ + "required" : ["name", "type"] + }, { + "required" : ["include"] + }], + "properties": { + "include": { + "type": "string" + }, + "field": { + "type": "string" + }, + "name": { + "type": "string" + }, + "title": { + "type": "string" + }, + "type": { + "type": "string" + }, + "units": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "mask": { + "type": "integer" + }, + "bytes": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "string", + "enum": ["@prev"] + + }, + { + "type": "array", + "maxItems": 2, + "items": { "type": "integer" } + } + ], + "description": "TODO: Need to update the min/max when NOT a list" + }, + "aliases": { + "type": "object" + }, + "enum": { + "type": "object", + "description": "TODO: Does not check valid enumeration" + }, + "value": { + "type": "integer", + "description": "Set value for this defn in hex" + }, + "when": { + "type": "string", + "description": "An expression defining when a !Field's value is valid." + }, + "dntoeu": { + "type": "object", + "additionalProperties": false, + "required": ["equation", "units"], + "properties": { + "equation": { + "type": "string" + }, + "units": { + "type": "string" + }, + "when": { + "type": "string" + } + } + } + } + } + } + } + } +} diff --git a/bliss/core/db.py b/bliss/core/db.py new file mode 100644 index 00000000..98627949 --- /dev/null +++ b/bliss/core/db.py @@ -0,0 +1,106 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +"""BLISS Database + +The bliss.db module provides a general database storage layer for +commands and telemetry with several backends. +""" + +import importlib + +import bliss +from bliss.core import cfg, tlm + + +# Backend must implement DB-API 2.0 [PEP 249] +# (https://www.python.org/dev/peps/pep-0249/). +Backend = None + + +def connect(database): + """Returns a connection to the given database.""" + if Backend is None: + raise cfg.BlissConfigMissing('database.backend') + + return Backend.connect(database) + + +def create(database, tlmdict=None): + """Creates a new database for the given Telemetry Dictionary and + returns a connection to it. + """ + if tlmdict is None: + tlmdict = tlm.getDefaultDict() + + dbconn = connect(database) + + for name, defn in tlmdict.items(): + createTable(dbconn, defn) + + return dbconn + + +def createTable(dbconn, pd): + """Creates a database table for the given PacketDefinition.""" + cols = ('%s %s' % (defn.name, getTypename(defn)) for defn in pd.fields) + sql = 'CREATE TABLE IF NOT EXISTS %s (%s)' % (pd.name, ', '.join(cols)) + + dbconn.execute(sql) + dbconn.commit() + + +def getTypename(defn): + """Returns the SQL typename required to store the given + FieldDefinition.""" + return 'REAL' if defn.type.float or defn.dntoeu else 'INTEGER' + + +def insert(dbconn, packet): + """Inserts the given packet into the connected database.""" + values = [ ] + pd = packet._defn + + for defn in pd.fields: + if defn.enum: + val = getattr(packet.raw, defn.name) + else: + val = getattr(packet, defn.name) + + if val is None and defn.name in pd.history: + val = getattr(packet.history, defn.name) + + values.append(val) + + qmark = ['?'] * len(values) + sql = 'INSERT INTO %s VALUES (%s)' % (pd.name, ', '.join(qmark)) + + dbconn.execute(sql, values) + + +def use(backend): + """Use the given database backend, e.g. 'MySQLdb', 'psycopg2', + 'MySQLdb', etc. + """ + global Backend + + try: + Backend = importlib.import_module(backend) + except ImportError: + msg = 'Could not import (load) database.backend: %s' % backend + raise cfg.BlissConfigError(msg) + + +if bliss.config.get('database.backend'): + use( bliss.config.get('database.backend') ) diff --git a/bliss/core/dmc.py b/bliss/core/dmc.py new file mode 100644 index 00000000..33c6704e --- /dev/null +++ b/bliss/core/dmc.py @@ -0,0 +1,298 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +"""BLISS DeLorean Motor Company (DMC) + +The bliss.dmc module provides functions to represent, translate, and +manipulate time, building upon Python's datetime and timedelta data +types. Originally, this module was named bliss.time, but time.py +conflicts with Python's builtin module of the same name, which causes +all sorts of subtle import issues and conflicts. + +Many functions assume the GPS (and ISS) epoch: January 6, 1980 at +midnight. + +""" + +import calendar +import datetime +import math +import time + + + +GPS_Epoch = datetime.datetime(1980, 1, 6, 0, 0, 0) +TICs = [ ] +TwoPi = 2 * math.pi + +DOY_Format = '%Y-%jT%H:%M:%SZ' +ISO_8601_Format = '%Y-%m-%dT%H:%M:%SZ' + + +def getTimestampUTC(): + """getTimestampUTC() -> (ts_sec, ts_usec) + + Returns the current UTC time in seconds and microseconds. + """ + utc = datetime.datetime.utcnow() + ts_sec = calendar.timegm( utc.timetuple() ) + ts_usec = utc.microsecond + return ts_sec, ts_usec + + +def getUTCDatetimeDOY(days=0, hours=0, minutes=0, seconds=0): + """getUTCDatetimeDOY -> datetime + + Returns the UTC current datetime with the input timedelta arguments (days, hours, minutes, seconds) + added to current date. Returns ISO-8601 datetime format for day of year: + + YYYY-DDDTHH:mm:ssZ + + """ + return (datetime.datetime.utcnow() + + datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)).strftime(DOY_Format) + + +def tic(): + """tic() + + Records the current time for benchmarking purposes. See also toc(). + """ + global TICs + begin = datetime.datetime.now() + TICs.append(begin) + + +def toc(): + """toc() -> float | None + + Returns the total elapsed seconds since the most recent tic(), or + None if tic() was not called. + + Examples: + + >>> import time + + >>> tic() + >>> time.sleep(1.2) + >>> elapsed = toc() + + >>> assert abs(elapsed - 1.2) <= 1e-2 + + .. note:: The tic() and toc() functions are simplistic and may introduce + significant overhead, especially in tight loops. Their use should + be limited to one-off experiments and rough numbers. The Python + profile package (i.e. 'import profile') should be used for serious + and detailed profiling. + """ + end = datetime.datetime.now() + return totalSeconds( end - TICs.pop() ) if len(TICs) else None + + +def toGPSWeekAndSecs(timestamp=None): + """Converts the given UTC timestamp (defaults to the current time) to + a two-tuple, (GPS week number, GPS seconds within the week). + """ + if timestamp is None: + timestamp = datetime.datetime.utcnow() + + leap = getUTCtoGPSLeapSeconds(timestamp) + + secsInWeek = 604800 + delta = totalSeconds(timestamp - GPS_Epoch) + leap + seconds = delta % secsInWeek + week = int( math.floor(delta / secsInWeek) ) + + return (week, seconds) + + +def getUTCtoGPSLeapSeconds(timestamp=None): + """ Get the number of leap seconds for UTC->GPS conversion + + Args: + timestamp: + A UTC datetime object (defaults to current time) + + Returns: + Integer value specifying how many leap seconds to use for + conversion of the timestamp. + + Raises: + ValueError: + If the timestamp provided occurs before 01-01-1980. + """ + if timestamp is None: + timestamp = datetime.datetime.utcnow() + + if timestamp < datetime.datetime(1980, 1, 6): + e = "The timestamp date is before the GPS epoch" + raise ValueError(e) + elif datetime.datetime(1980, 1, 6) <= timestamp < datetime.datetime(1981, 7, 1): + return 0 + elif datetime.datetime(1981, 7, 1) <= timestamp < datetime.datetime(1982, 7, 1): + return 1 + elif datetime.datetime(1982, 7, 1) <= timestamp < datetime.datetime(1983, 7, 1): + return 2 + elif datetime.datetime(1983, 7, 1) <= timestamp < datetime.datetime(1985, 7, 1): + return 3 + elif datetime.datetime(1985, 7, 1) <= timestamp < datetime.datetime(1988, 1, 1): + return 4 + elif datetime.datetime(1988, 1, 1) <= timestamp < datetime.datetime(1990, 1, 1): + return 5 + elif datetime.datetime(1990, 1, 1) <= timestamp < datetime.datetime(1991, 1, 1): + return 6 + elif datetime.datetime(1991, 1, 1) <= timestamp < datetime.datetime(1992, 7, 1): + return 7 + elif datetime.datetime(1992, 7, 1) <= timestamp < datetime.datetime(1993, 7, 1): + return 8 + elif datetime.datetime(1993, 7, 1) <= timestamp < datetime.datetime(1994, 7, 1): + return 9 + elif datetime.datetime(1994, 7, 1) <= timestamp < datetime.datetime(1996, 1, 1): + return 10 + elif datetime.datetime(1996, 1, 1) <= timestamp < datetime.datetime(1997, 7, 1): + return 11 + elif datetime.datetime(1997, 7, 1) <= timestamp < datetime.datetime(1999, 1, 1): + return 12 + elif datetime.datetime(1999, 1, 1) <= timestamp < datetime.datetime(2006, 1, 1): + return 13 + elif datetime.datetime(2006, 1, 1) <= timestamp < datetime.datetime(2009, 1, 1): + return 14 + elif datetime.datetime(2009, 1, 1) <= timestamp < datetime.datetime(2012, 7, 1): + return 15 + elif datetime.datetime(2012, 7, 1) <= timestamp < datetime.datetime(2015, 7, 1): + return 16 + elif datetime.datetime(2015, 7, 1) <= timestamp < datetime.datetime(2017, 1, 1): + return 17 + elif timestamp >= datetime.datetime(2017, 1, 1): + return 18 + + +def toGPSSeconds(timestamp): + """toGPSSeconds(timestamp) -> integer + + Converts the given Python datetime object to the number of seconds + since the GPS Epoch (midnight on January 6th, 1980). + + Examples: + + >>> import datetime + + >>> toGPSSeconds( datetime.datetime(1980, 1, 6) ) + 0 + + >>> toGPSSeconds( datetime.datetime(1980, 1, 7) ) + 86400 + """ + delta = timestamp - GPS_Epoch + return (delta.days * 24 * 3600) + delta.seconds + + +def toGMST(dt=None): + """Converts the given Python datetime or Julian date (float) to + Greenwich Mean Sidereal Time (GMST) (in radians) using the formula + from D.A. Vallado (2004). + + See: + + D.A. Vallado, Fundamentals of Astrodynamics and Applications, p. 192 + http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=192&pg=PA192 + """ + if dt is None or type(dt) is datetime.datetime: + jd = toJulian(dt) + else: + jd = dt + + tUT1 = (jd - 2451545.0) / 36525.0 + gmst = 67310.54841 + (876600 * 3600 + 8640184.812866) * tUT1 + gmst += 0.093104 * tUT1**2 + gmst -= 6.2e-6 * tUT1**3 + + # Convert from seconds to degrees, i.e. + # 86400 seconds / 360 degrees = 240 seconds / degree + gmst /= 240. + + # Convert to radians + gmst = math.radians(gmst) % TwoPi + + if gmst < 0: + gmst += TwoPi + + return gmst + + +def toJulian(dt=None): + """Converts a Python datetime to a Julian date, using the formula from + Meesus (1991). This formula is reproduced in D.A. Vallado (2004). + + See: + + D.A. Vallado, Fundamentals of Astrodynamics and Applications, p. 187 + http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=187&pg=PA187 + """ + if dt is None: + dt = datetime.datetime.utcnow() + + if dt.month < 3: + year = dt.year - 1 + month = dt.month + 12 + else: + year = dt.year + month = dt.month + + A = int(year / 100.0) + B = 2 - A + int(A / 4.0) + C = ( (dt.second / 60.0 + dt.minute) / 60.0 + dt.hour ) / 24.0 + jd = int(365.25 * (year + 4716)) + jd += int(30.6001 * (month + 1)) + dt.day + B - 1524.5 + C + + return jd + + +def toLocalTime(seconds, microseconds=0): + """toLocalTime(seconds, microseconds=0) -> datetime + + Converts the given number of seconds since the GPS Epoch (midnight + on January 6th, 1980) to this computer's local time. Returns a + Python datetime object. + + Examples: + + >>> toLocalTime(0) + datetime.datetime(1980, 1, 6, 0, 0) + + >>> toLocalTime(25 * 86400) + datetime.datetime(1980, 1, 31, 0, 0) + """ + delta = datetime.timedelta(seconds=seconds, microseconds=microseconds) + return GPS_Epoch + delta + + +def totalSeconds(td): + """totalSeconds(td) -> float + + Return the total number of seconds contained in the given Python + datetime.timedelta object. Python 2.6 and earlier do not have + timedelta.total_seconds(). + + Examples: + + >>> totalSeconds( toLocalTime(86400.123) - toLocalTime(0.003) ) + 86400.12 + """ + if hasattr(td, "total_seconds"): + ts = td.total_seconds() + else: + ts = (td.microseconds + (td.seconds + td.days * 24 * 3600.0) * 1e6) / 1e6 + + return ts diff --git a/bliss/core/dtype.py b/bliss/core/dtype.py new file mode 100644 index 00000000..c993eb04 --- /dev/null +++ b/bliss/core/dtype.py @@ -0,0 +1,821 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +"""BLISS Primitive Data Types (PDT) + +The bliss.core.dtype module provides definitions and functions for +primitive data types used in the construction and manipulation of +OCO-3 commands and telemetry. Originally, this module was named +bliss.core.types, but types.py conflicts with Python's builtin module +of the same name, which causes all sorts of subtle import issues and +conflicts. + +Supported PrimitiveType names (which may be passed to +bliss.core.dtype.get()) are listed in bliss.core.dtype.PrimitiveTypes. + +The following code, shown via the interactive Python prompt, +demonstrates attributes and methods of the 'LSB_U16' PrimitiveType. +Several related statements are combined into a single line, forming +tuples, for succinctness: + + >>> from bliss.core import dtype + >>> t = dtype.get('LSB_U16') + + >>> t.name, t.endian, t.format, t.nbits, t.nbytes + ('LSB_U16', 'LSB', '>> t.float, t.signed + (False, False) + + >>> t.min, t.max + (0, 65535) + + >>> bytes = t.encode(42) + >>> ' '.join('0x%02x' % b for b in bytes) + '0x2a 0x00' + + >>> t.decode(bytes) + 42 + + >>> t.validate(42) + True + + # Both the array of error messages and message prefixes are optional + + >>> messages = [ ] + >>> t.validate(65536, messages, prefix='error:') + False + + >>> t.validate(1e6, messages, prefix='error:') + False + + >>> print "\\n".join(messages) + error: value '65536' out of range [0, 65535]. + error: float '1e+06' cannot be represented as an integer. + +""" + +import datetime +import struct +import sys + +from bliss.core import cmd, dmc, evr, log, util + + +# PrimitiveTypes +# +# Lists PrimitiveType names. Passing these names to get() will return +# the corresponding PrimitiveType. +# +# (Populated below based on information in PrimitiveTypeFormats). +# +PrimitiveTypes = None + + +# PrimitiveTypeMap +# +# Maps typenames to PrimitiveType. Use +# bliss.core.dtype.get(typename). (Populated below based on +# information in PrimitiveTypeFormats). +# +PrimitiveTypeMap = {} + + +# PrimitiveTypeFormats +# +# Maps typenames to their corresponding Python C struct format code. +# See: +# +# https://docs.python.org/2/library/struct.html#format-characters +# +PrimitiveTypeFormats = { + "I8" : "b" , + "U8" : "B" , + "LSB_I16": "h", + "LSB_U16": "H", + "LSB_I32": "i", + "LSB_U32": "I", + "LSB_I64": "q", + "LSB_U64": "Q", + "LSB_F32": "f", + "LSB_D64": "d" +} + +class PrimitiveType(object): + """PrimitiveType + + A PrimitiveType contains a number of fields that provide information + on the details of a primitive type, including: name + (e.g. "MSB_U32"), format (Python C struct format code), endianness + ("MSB" or "LSB"), float, signed, nbits, nbytes, min, and max. + + PrimitiveTypes can validate() specific values and encode() and + decode() binary representations. + """ + + def __init__(self, name): + """PrimitiveType(name) -> PrimitiveType + + Creates a new PrimitiveType based on the given typename + (e.g. 'MSB_U16' for a big endian, 16 bit short integer). + """ + self._name = name + self._format = PrimitiveTypeFormats.get(name, None) + self._endian = None + self._float = False + self._min = None + self._max = None + self._signed = False + self._string = False + + if self.name.startswith("LSB_") or self.name.startswith("MSB_"): + self._endian = self.name[0:3] + self._signed = self.name[4] != "U" + self._float = self.name[4] == "F" or self.name[4] == "D" + self._nbits = int(self.name[-2:]) + elif self.name.startswith("S"): + self._format = self.name[1:] + "s" + self._nbits = int(self.name[1:]) * 8 + self._string = True + else: + self._signed = self.name[0] != "U" + self._nbits = int(self.name[-1:]) + + self._nbytes = self.nbits / 8 + + if self.float: + self._max = +sys.float_info.max + self._min = -sys.float_info.max + elif self.signed: + self._max = 2 ** (self.nbits - 1) + self._min = -1 * (self.max - 1) + elif not self.string: + self._max = 2 ** self.nbits - 1 + self._min = 0 + + def __eq__ (self, other): + return isinstance(other, PrimitiveType) and self._name == other._name + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self.name) + + @property + def endian(self): + """Endianness of this PrimitiveType, either 'MSB' or 'LSB'.""" + return self._endian + + @property + def float(self): + """Indicates whether or not this PrimitiveType is a float or double.""" + return self._float + + @property + def format(self): + """Python C struct format code for this PrimitiveType.""" + return self._format + + @property + def name(self): + """Name of this PrimitiveType (e.g. 'I8', 'MSB_U16', 'LSB_F32', + etc.).""" + return self._name + + @property + def nbits(self): + """Number of bits required to represent this PrimitiveType.""" + return self._nbits + + @property + def nbytes(self): + """Number of bytes required to represent this PrimitiveType.""" + return self._nbytes + + @property + def min(self): + """Minimum value for this PrimitiveType.""" + return self._min + + @property + def max(self): + """Maximum value for this PrimitiveType.""" + return self._max + + @property + def signed(self): + """Indicates whether or not this PrimitiveType is signed or unsigned.""" + return self._signed + + @property + def string(self): + """Indicates whether or not this PrimitiveType is a string.""" + return self._string + + def encode(self, value): + """encode(value) -> bytearray + + Encodes the given value to a bytearray according to this + PrimitiveType definition. + """ + return bytearray(struct.pack(self.format, value)) + + def decode(self, bytes, raw=False): + """decode(bytearray, raw=False) -> value + + Decodes the given bytearray according to this PrimitiveType + definition. + + NOTE: The parameter ``raw`` is present to adhere to the + ``decode()`` inteface, but has no effect for PrimitiveType + definitions. + """ + return struct.unpack(self.format, buffer(bytes))[0] + + + def toJSON(self): + return self.name + + def validate(self, value, messages=None, prefix=None): + """validate(value[, messages[, prefix]]) -> True | False + + Validates the given value according to this PrimitiveType + definition. Validation error messages are appended to an optional + messages array, each with the optional message prefix. + """ + valid = False + + def log(msg): + if messages is not None: + if prefix is not None: + tok = msg.split() + msg = prefix + ' ' + tok[0].lower() + " " + " ".join(tok[1:]) + messages.append(msg) + + if self.string: + valid = type(value) is str + else: + if type(value) is str: + log("String '%s' cannot be represented as a number." % value) + elif type(value) not in (int, long, float): + log("Value '%s' is not a primitive type." % str(value)) + elif type(value) is float and not self.float: + log("Float '%g' cannot be represented as an integer." % value) + else: + if value < self.min or value > self.max: + args = (str(value), self.min, self.max) + log("Value '%s' out of range [%d, %d]." % args) + else: + valid = True + + return valid + +# +# Populate the PrimitiveTypeMap based on the types in +# PrimitiveTypeFormats. +# +PrimitiveTypeMap.update( + (t, PrimitiveType(t)) for t in PrimitiveTypeFormats.keys() +) + +PrimitiveTypes = sorted(PrimitiveTypeMap.keys()) + + +class ArrayType(object): + __slots__ = [ '_nelems', '_type' ] + + def __init__(self, elemType, nelems): + """Creates a new ArrayType of nelems, each of type elemType.""" + if type(elemType) is str: + elemType = get(elemType) + + if type(nelems) is not int: + raise TypeError('ArrayType(..., nelems) must be an integer') + + self._type = elemType + self._nelems = nelems + + + def __eq__(self, other): + """Returns True if two ArrayTypes are equivalent, False otherwise.""" + return (isinstance(other, ArrayType) and + self.type == other.type and self.nelems == other.nelems) + + + def _assertIndex(self, index): + """Raise TypeError or IndexError if index is not an integer or out of + range for the number of elements in this array, respectively. + """ + if type(index) is not int: + raise TypeError('list indices must be integers') + if index < 0 or index >= self.nelems: + raise IndexError('list index out of range') + + + @property + def name(self): + """Name of this ArrayType.""" + return '%s[%d]' % (self.type.name, self.nelems) + + @property + def nbits(self): + """Number of bits required to represent this ArrayType.""" + return self.nelems * self.type.nbits + + @property + def nbytes(self): + """Number of bytes required to represent this ArrayType.""" + return self.nelems * self.type.nbytes + + @property + def nelems(self): + """Number of elements in this ArrayType.""" + return self._nelems + + @property + def type(self): + """Type of array elements.""" + return self._type + + + def decode(self, bytes, index=None, raw=False): + """decode(bytes[[, index], raw=False]) -> value1, ..., valueN + + Decodes the given sequence of bytes according to this Array's + element type. + + If the optional `index` parameter is an integer or slice, then + only the element(s) at the specified position(s) will be + decoded and returned. + """ + if index is None: + index = slice(0, self.nelems) + + if type(index) is slice: + step = 1 if index.step is None else index.step + indices = xrange(index.start, index.stop, step) + result = [ self.decodeElem(bytes, n, raw) for n in indices ] + else: + result = self.decodeElem(bytes, index, raw) + + return result + + + def decodeElem(self, bytes, index, raw=False): + """Decodes a single element at array[index] from a sequence bytes + that contain data for the entire array. + """ + self._assertIndex(index) + start = index * self.type.nbytes + stop = start + self.type.nbytes + + if stop > len(bytes): + msg = 'Decoding %s[%d] requires %d bytes, ' + msg += 'but the ArrayType.decode() method received only %d bytes.' + raise IndexError(msg % (self.type.name, index, stop, len(bytes))) + + return self.type.decode( bytes[start:stop], raw ) + + + def encode(self, *args): + """encode(value1[, ...]) -> bytes + + Encodes the given values to a sequence of bytes according to this + Array's underlying element type + """ + if len(args) != self.nelems: + msg = 'ArrayType %s encode() requires %d values, but received %d.' + raise ValueError(msg % (self.name, self.nelems, len(args))) + + return bytearray().join(self.type.encode(arg) for arg in args) + + + @staticmethod + def parse (name): + """parse(name) -> [typename | None, nelems | None] + + Parses an ArrayType name to return the element type name and + number of elements, e.g.: + + >>> ArrayType.parse('MSB_U16[32]') + ['MSB_U16', 32] + + If typename cannot be determined, None is returned. + Similarly, if nelems is not an integer or less than one (1), + None is returned. + """ + parts = [None, None] + start = name.find('[') + + if start != -1: + stop = name.find(']', start) + if stop != -1: + try: + parts[0] = name[:start] + parts[1] = int(name[start + 1:stop]) + if parts[1] <= 0: + raise ValueError + except ValueError: + msg = 'ArrayType specification: "%s" must have an ' + msg += 'integer greater than zero in square brackets.' + raise ValueError(msg % name) + + return parts + + +class CmdType(PrimitiveType): + """CmdType + + This type is used to take a two byte opcode and return the + corresponding Command Definition (:class:`CmdDefn`). + """ + BASEPDT = "MSB_U16" + + def __init__(self): + super(CmdType, self).__init__(self.BASEPDT) + + self._pdt = self.name + self._name = 'CMD16' + self._cmddict = None + + @property + def pdt(self): + """PrimitiveType base for the ComplexType""" + return self._pdt + + @property + def cmddict(self): + """PrimitiveType base for the ComplexType""" + if self._cmddict is None: + self._cmddict = cmd.getDefaultDict() + + return self._cmddict + + @cmddict.setter + def cmddict(self, value): + """PrimitiveType base for the ComplexType""" + self._cmddict = value + + def encode(self, value): + """encode(value) -> bytearray + + Encodes the given value to a bytearray according to this + PrimitiveType definition. + """ + opcode = self.cmddict[value].opcode + return super(CmdType, self).encode(opcode) + + def decode(self, bytes, raw=False): + """decode(bytearray, raw=False) -> value + + Decodes the given bytearray and returns the corresponding + Command Definition (:class:`CmdDefn`) for the underlying + 'MSB_U16' command opcode. + + If the optional parameter ``raw`` is ``True``, the command + opcode itself will be returned instead of the Command + Definition (:class:`CmdDefn`). + """ + opcode = super(CmdType, self).decode(bytes) + result = None + + if raw: + result = opcode + elif opcode in self.cmddict.opcodes: + result = self.cmddict.opcodes[opcode] + else: + raise ValueError('Unrecognized command opcode: %d' % opcode) + + return result + + +class EVRType(PrimitiveType): + """EVRType + + This type is used to take a two byte Event Verification Record + (EVR) code and return the corresponding EVR Definition + (:class:`EVRDefn`). + """ + BASEPDT = "MSB_U16" + + def __init__(self): + super(EVRType, self).__init__(self.BASEPDT) + + self._pdt = self.name + self._name = 'EVR16' + self._evrs = None + + @property + def pdt(self): + """PrimitiveType base for the ComplexType""" + return self._pdt + + @property + def evrs(self): + """Getter EVRs dictionary""" + if self._evrs is None: + self._evrs = evr.getDefaultDict() + + return self._evrs + + @evrs.setter + def evrs(self, value): + """Setter for EVRs dictionary""" + self._evrs = value + + def encode(self, value): + """encode(value) -> bytearray + + Encodes the given value to a bytearray according to this + Complex Type definition. + """ + e = self.evrs.get(value, None) + if not e: + log.error(str(value) + " not found as EVR. Cannot encode.") + return None + else: + return super(EVRType, self).encode(e.code) + + def decode(self, bytes, raw=False): + """decode(bytearray, raw=False) -> value + + Decodes the given bytearray according the corresponding + EVR Definition (:class:`EVRDefn`) for the underlying + 'MSB_U16' EVR code. + + If the optional parameter ``raw`` is ``True``, the EVR code + itself will be returned instead of the EVR Definition + (:class:`EVRDefn`). + """ + code = super(EVRType, self).decode(bytes) + result = None + + if raw: + result = code + else: + result = self.evrs.codes[code] + + if result is None: + raise ValueError('Unrecognized EVR code: %d' % code) + + return result + + +class Time8Type(PrimitiveType): + """Time8Type + + This 8-bit time type represents the fine time in the CCSDS + secondary header. This time is calculated where the LSB of the + octet is equal to 1/256 seconds (or 2^-8), approximately 4 msec. + See SSP 41175-02H for more details on the CCSDS headers. + """ + def __init__(self): + super(Time8Type, self).__init__('U8') + + self._pdt = self.name + self._name = 'TIME8' + + @property + def pdt(self): + """PrimitiveType base for the ComplexType""" + return self._pdt + + def encode(self, value): + """encode(value) -> bytearray + + Encodes the given value to a bytearray according to this + ComplexType definition. + """ + return super(Time8Type, self).encode(value * 256) + + def decode(self, bytes, raw=False): + """decode(bytearray, raw=False) -> value + + Decodes the given bytearray and returns the number of + (fractional) seconds. + + If the optional parameter ``raw`` is ``True``, the byte (U8) + itself will be returned. + + """ + result = super(Time8Type, self).decode(bytes) + + if not raw: + result /= 256.0 + + return result + + +class Time32Type(PrimitiveType): + """Time32Type + + This four byte time represents the elapsed time in seconds since + the GPS epoch. + """ + def __init__(self): + super(Time32Type, self).__init__('MSB_U32') + + self._pdt = self.name + self._name = 'TIME32' + + @property + def pdt(self): + """PrimitiveType base for the ComplexType""" + return self._pdt + + def encode(self, value): + """encode(value) -> bytearray + + Encodes the given value to a bytearray according to this + ComplexType definition. + """ + if type(value) is not datetime.datetime: + raise TypeError('encode() argument must be a Python datetime') + + return super(Time32Type, self).encode( dmc.toGPSSeconds(value) ) + + def decode(self, bytes, raw=False): + """decode(bytearray, raw=False) -> value + + Decodes the given bytearray containing the elapsed time in + seconds since the GPS epoch and returns the corresponding + Python :class:`datetime`. + + If the optional parameter ``raw`` is ``True``, the integral + number of seconds will be returned instead. + """ + sec = super(Time32Type, self).decode(bytes) + return sec if raw else dmc.toLocalTime(sec) + + + +class Time40Type(PrimitiveType): + """Time40Type + + This five byte time is made up of four bytes of seconds and one + byte of (1 / 256) subseconds, representing the elapsed time since + the GPS epoch. + """ + def __init__(self): + super(Time40Type, self).__init__('MSB_U32') + + self._pdt = self.name + self._name = 'TIME40' + + @property + def pdt(self): + """PrimitiveType base for the ComplexType""" + return self._pdt + + def encode(self, value): + """encode(value) -> bytearray + + Encodes the given value to a bytearray according to this + ComplexType definition. + """ + if type(value) is not datetime.datetime: + raise TypeError('encode() argument must be a Python datetime') + + coarse = Time32Type().encode(value) + fine = Time8Type() .encode(value.microsecond / 1e6) + + return coarse + fine + + def decode(self, bytes, raw=False): + """decode(bytearray, raw=False) -> value + + Decodes the given bytearray containing the elapsed time in + seconds plus 1/256 subseconds since the GPS epoch returns the + corresponding Python :class:`datetime`. + + If the optional parameter ``raw`` is ``True``, the number of + seconds and subseconds will be returned as a floating-point + number instead. + """ + coarse = Time32Type().decode(bytes[:4], raw) + fine = Time8Type() .decode(bytes[4:]) + + if not raw: + fine = datetime.timedelta(microseconds=fine * 1e6) + + return coarse + fine + + +class Time64Type(PrimitiveType): + """Time64Type + + This eight byte time is made up of four bytes of seconds and four + bytes of nanoseconds, representing the elapsed time since the GPS + epoch. + """ + def __init__(self): + super(Time64Type, self).__init__('MSB_U64') + + self._pdt = self.name + self._name = 'TIME64' + + @property + def pdt(self): + """PrimitiveType base for the ComplexType""" + return self._pdt + + def encode(self, value): + """encode(value) -> bytearray + + Encodes the given value to a bytearray according to this + ComplexType definition. + """ + if type(value) is not datetime.datetime: + raise TypeError('encode() argument must be a Python datetime') + + coarse = Time32Type().encode(value) + fine = get('MSB_U32').encode(value.microsecond * 1e3) + + return coarse + fine + + def decode(self, bytes, raw=False): + """decode(bytearray, False) -> value + + Decodes the given bytearray containing the elapsed time in + seconds plus nanoseconds since the GPS epoch and and returns + the corresponding Python :class:`datetime`. NOTE: The Python + :class:`datetime` class has only microsecond resolution. + + If the optional parameter ``raw`` is ``True``, the number of + seconds and nanoseconds will be returned as a floating-point + number instead. + """ + coarse = Time32Type() .decode(bytes[:4], raw) + fine = get('MSB_U32').decode(bytes[4:]) + + if raw: + fine /= 1e9 + else: + fine = datetime.timedelta(microseconds=fine / 1e3) + + return coarse + fine + + +# ComplexTypeMap +# +# Maps typenames to Complex Types. Use bliss.core.dtype.get(typename). +# +ComplexTypeMap = { + 'CMD16' : CmdType(), + 'EVR16' : EVRType(), + 'TIME8' : Time8Type(), + 'TIME32': Time32Type(), + 'TIME40': Time40Type(), + 'TIME64': Time64Type() +} + + +def getPDT(typename): + """get(typename) -> PrimitiveType + + Returns the PrimitiveType for typename or None. + """ + if typename not in PrimitiveTypeMap and typename.startswith("S"): + PrimitiveTypeMap[typename] = PrimitiveType(typename) + + return PrimitiveTypeMap.get(typename, None) + + +def getCDT(typename): + """getCDT(typename) -> ComplexType + + Returns the ComplexType for typename or None. + """ + return ComplexTypeMap.get(typename, None) + + +def get(typename): + """get(typename) -> PrimitiveType or ComplexType + + Returns the PrimitiveType or ComplexType for typename or None. + """ + dt = getPDT(typename) or getCDT(typename) + + if dt is None: + pdt, nelems = ArrayType.parse(typename) + if pdt and nelems: + dt = ArrayType(pdt, nelems) + + return dt diff --git a/bliss/core/evr.py b/bliss/core/evr.py new file mode 100644 index 00000000..c55a3528 --- /dev/null +++ b/bliss/core/evr.py @@ -0,0 +1,198 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2015, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Event Record (EVR) Reader + +The bliss.core.evr module is used to read the EVRs from a YAML file. +""" + +import binascii +import os +import pkg_resources +import re +import yaml + +import bliss +from bliss.core import json, log, util + + +class EVRDict(dict): + def __init__(self, *args, **kwargs): + self.filename = None + self.codes = {} + + if len(args) == 1 and len(kwargs) == 0 and type(args[0]) == str: + dict.__init__(self) + self.load(args[0]) + else: + dict.__init__(self, *args, **kwargs) + + def add(self, defn): + if defn.name not in self and defn.code not in self.codes: + self[defn.name] = defn + self.codes[defn.code] = defn + else: + msg = "EVRDict: Duplicate EVR name/code {}".format(defn) + log.error(msg) + raise util.YAMLError(msg) + + def load(self, content): + if self.filename: + log.warn('EVRDict: Skipping load() attempt after previous initialization') + return + + if os.path.isfile(content): + self.filename = content + stream = open(self.filename, 'rb') + else: + stream = content + + try: + evrs = yaml.load(stream) + except IOError, e: + msg = "Could not load EVR YAML '{}': '{}'".format(stream, str(e)) + log.error(msg) + return + + for e in evrs: + self.add(e) + + def toJSON(self): + return {code: defn.toJSON() for code, defn in self.items()} + + +def getDefaultSchema(): + return pkg_resources.resource_filename('bliss.core', 'data/evr_schema.json') + + +def getDefaultDict(reload=False): + return util.getDefaultDict(__name__, 'evrdict', EVRDict, reload) + + +def getDefaultEVRs(): + return getDefaultDict() + + +def getDefaultDictFilename(): + return bliss.config.evrdict.filename + + +class EVRDefn(json.SlotSerializer, object): + """""" + __slots__ = ["name", "code", "desc", "_message"] + + def __init__(self, *args, **kwargs): + """Creates a new EVR Definition.""" + for slot in EVRDefn.__slots__: + name = slot[1:] if slot.startswith("_") else slot + setattr(self, name, kwargs.get(name, None)) + + def __repr__(self): + return util.toRepr(self) + + def format_message(self, evr_hist_data): + ''' Format EVR message with EVR data + + Given a byte array of EVR data, format the EVR's message attribute + printf format strings and split the byte array into appropriately + sized chunks. + + Args: + evr_hist_data: A bytearray of EVR data. + + Example formatting:: + + # This is the character '!', string 'Foo', and int '4279317316' + bytearray([0x21, 0x46, 0x6f, 0x6f, 0x00, 0xff, 0x11, 0x33, 0x44]) + + Returns: + The EVR's message string formatted with the EVR data or the + unformatted EVR message string if there are no valid format + strings present in it. + + Raises: + ValueError: When the bytearray cannot be fully processed with the + specified format strings. This is usually a result of the + expected data length and the byte array length not matching. + ''' + formatter_info = { + 's': (-1, str), + 'c': (1, str), + 'i': (4, lambda h: int(binascii.hexlify(h), 16)), + 'd': (4, lambda h: int(binascii.hexlify(h), 16)), + 'u': (4, lambda h: int(binascii.hexlify(h), 16)), + 'f': (4, lambda h: float(binascii.hexlify(h), 16)), + 'e': (4, lambda h: float(binascii.hexlify(h), 16)), + 'g': (4, lambda h: float(binascii.hexlify(h), 16)), + } + formatters = re.findall("%(?:\d+\$)?([cdifosuxXhlL]+)", self._message) + + cur_byte_index = 0 + data_chunks = [] + + for f in formatters: + format_size, format_func = formatter_info[f] + + try: + # Normal data chunking is the current byte index + the size + # of the relevant data type for the formatter + if format_size > 0: + end_index = cur_byte_index + format_size + + # Some formatters have an undefined data size (such as strings) + # and require additional processing to determine the length of + # the data. + else: + if f == 's': + end_index = str(evr_hist_data).index('\x00', cur_byte_index) + else: + end_index = format_size + + data_chunks.append(format_func(evr_hist_data[cur_byte_index:end_index])) + except: + msg = "Unable to format EVR Message with data {}".format(evr_hist_data) + bliss.core.log.error(msg) + raise ValueError(msg) + + cur_byte_index = end_index + + # If we were formatting a string we need to add another index offset + # to exclude the null terminator. + if f == 's': + cur_byte_index += 1 + + # Format and return the EVR message if formatters were present, otherwise + # just return the EVR message as is. + if len(formatters) == 0: + return self._message + else: + return self._message % tuple(data_chunks) + + @property + def message(self): + return self._message + + @message.setter + def message(self, value): + self._message = value + +def YAMLCtor_EVRDefn(loader, node): + fields = loader.construct_mapping(node, deep=True) + fields['argdefns'] = fields.pop('arguments', None) + return createEVRDefn(**fields) + +yaml.add_constructor('!EVR' , YAMLCtor_EVRDefn) + +util.__init_extensions__(__name__, globals()) diff --git a/bliss/core/gds.py b/bliss/core/gds.py new file mode 100644 index 00000000..55dbdf94 --- /dev/null +++ b/bliss/core/gds.py @@ -0,0 +1,314 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Ground Data System + +The bliss.core.gds module provides utility functions specific to GDS +command-line tools. +""" + +import os +import sys +import getopt +import zlib +import socket +import argparse + +from bliss.core import log, util + + +def compress (input_filename, output_filename=None, verbose=False): + """compress(input_filename, output_filename=None, verbose=False) -> integer + + Uses zlib to compress input_filename and store the result in + output_filename. The size of output_filename is returned on + success; zero is returned on failure. + + The input file is compressed in one fell swoop. The output_filename + defaults to input_filename + ".bliss-zlib". + + If verbose is True, compress() will use bliss.core.log.info() to + report compression statistics. + """ + input_size = 0 + output_size = 0 + + if output_filename is None: + output_filename = input_fillename + '.bliss-zlib' + + try: + stream = open(input_filename , 'rb') + output = open(output_filename, 'wb') + bytes = stream.read() + input_size = len(bytes) + + if verbose: + log.info("Compressing %s (%d bytes).", input_filename, input_size) + + compressed = zlib.compress(bytes, 3) + output_size = len(compressed) + output.write(compressed) + + stream.close() + output.close() + + percent = (1.0 - (output_size / float(input_size) )) * 100 + + if verbose: + log.info("Wrote %s (%d bytes).", output_filename, output_size) + log.info("Compressed %6.2f percent", percent) + + except (IOError, OSError), e: + log.error(str(e) + ".") + + return output_size + + +def exit (status=None): + """exit([status]) + + Calls bliss.core.log.end() + + Exit the interpreter by raising SystemExit(status). If the status + is omitted or None, it defaults to zero (i.e., success). If the + status is numeric, it will be used as the system exit status. If it + is another kind of object, it will be printed and the system exit + status will be one (i.e., failure). + """ + log.end() + sys.exit(status) + + +def hexdump (bytes, addr=None, preamble=None, printfunc=None, stepsize=16): + """hexdump(bytes[, addr[, preamble[, printfunc[, stepsize=16]]]]) + + Outputs bytes in hexdump format lines similar to the following (here + preamble='Bank1', stepsize=8, and len(bytes) == 15):: + + Bank1: 0xFD020000: 7f45 4c46 0102 0100 *.ELF....* + Bank1: 0xFD020008: 0000 0000 0000 00 *....... * + + Where stepsize controls the number of bytes per line. If addr is + omitted, the address portion of the hexdump will not be output. + Lines will be passed to printfunc for output, or Python's builtin + print, if printfunc is omitted. + + If a byte is not in the range [32, 127), a period will rendered for + the character portion of the output. + """ + if preamble is None: + preamble = "" + + bytes = bytearray(bytes) + size = len(bytes) + + for n in xrange(0, size, stepsize): + if addr is not None: + dump = preamble + "0x%04X: " % (addr + n) + else: + dump = preamble + end = min(size, n + stepsize) + dump += hexdumpLine(bytes[n:end], stepsize) + + if printfunc is None: + print dump + else: + printfunc(dump) + + +def hexdumpLine (bytes, length=None): + """hexdumpLine(bytes[, length]) + + Returns a single hexdump formatted line for bytes. If length is + greater than len(bytes), the line will be padded with ASCII space + characters to indicate no byte data is present. + + Used by hexdump(). + """ + line = "" + + if length is None: + length = len(bytes) + + for n in xrange(0, length, 2): + if n < len(bytes) - 1: + line += "%02x%02x " % (bytes[n], bytes[n + 1]) + elif n < len(bytes): + line += "%02x " % bytes[n] + else: + line += " " + + line += "*" + + for n in xrange(length): + if n < len(bytes): + if bytes[n] in xrange(32, 127): + line += "%c" % bytes[n] + else: + line += "." + else: + line += " " + + line += "*" + return line + + + +def parseArgs (argv, defaults): + """parseArgs(argv, defaults) -> (dict, list) + + Parses command-line arguments according to the given defaults. For + every key in defaults, an argument of the form --key=value will be + parsed. Numeric arguments are converted from strings with errors + reported via bliss.core.log.error() and default values used instead. + + Returns a copy of defaults with parsed option values and a list of + any non-flag arguments. + """ + options = dict(defaults) + numeric = \ + [ k for k, v in options.items() if type(v) is float or type(v) is int ] + + try: + longopts = [ "%s=" % key for key in options.keys() ] + opts, args = getopt.getopt(argv, "", longopts) + + for key, value in opts: + if key.startswith("--"): + key = key[2:] + options[key] = value + except getopt.GetoptError, err: + log.error( str(err) ) + usage( exit=True ) + + for key in numeric: + value = options[key] + if type(value) is str: + options[key] = util.toNumber(value) + + if options[key] is None: + msg = "Option '%s': '%s' is not a number, using default '%s' instead." + log.error(msg, key, value, defaults[key]) + options[key] = defaults[key] + + return options, args + + +def usage (exit=False): + """usage([exit]) + + Prints the usage statement at the top of a Python program. A usage + statement is any comment at the start of a line that begins with a + double hash marks (##). The double hash marks are removed before + the usage statement is printed. If exit is True, the program is + terminated with a return code of 2 (GNU standard status code for + incorrect usage). + """ + stream = open(sys.argv[0]) + for line in stream.readlines(): + if line.startswith("##"): print line.replace("##", ""), + stream.close() + + if exit: + sys.exit(2) + + +def getip(): + """ + getip() + + Returns the IP address of the computer. Helpful for those hosts that might + sit behind gateways and report a hostname that is a little strange (I'm + looking at you oco3-sim1). + """ + return [(s.connect(('8.8.8.8', 80)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1] + + +def arg_parse(arguments, description=None): + """ + arg_parse() + + Parses the arguments using argparse. Returns a Namespace object. The + arguments dictionary should match the argparse expected data structure: + + .. code-block::python + + arguments = { + '--port': { + 'type' : int, + 'default' : 3075, + 'help' : 'Port on which to send data' + }, + '--verbose': { + 'action' : 'store_true', + 'default' : False, + 'help' : 'Hexdump of the raw command being sent.' + } + } + + For positional arguments, be sure to pass in an OrderedDict: + + .. code-block::python + + arguments = { + '--port': { + 'type' : int, + 'default' : 3075, + 'help' : 'Port on which to send data' + }, + '--verbose': { + 'action' : 'store_true', + 'default' : False, + 'help' : 'Hexdump of the raw command being sent.' + } + } + + arguments['command'] = { + 'type' : str, + 'help' : 'Name of the command to send.' + } + + arguments['arguments'] = { + 'type' : util.toNumberOrStr, + 'metavar' : 'argument', + 'nargs' : '*', + 'help' : 'Command arguments.' + } + + """ + if not description: + description = "" + + ap = argparse.ArgumentParser( + description = description, + formatter_class = argparse.ArgumentDefaultsHelpFormatter + ) + + for name, params in arguments.items(): + ap.add_argument(name, **params) + + args = ap.parse_args() + return args + + +def extant_file(file): + """ + 'Type' for argparse - checks that file exists but does not open. + """ + if not os.path.exists(file): + # Argparse uses the ArgumentTypeError to give a rejection message like: + # error: argument input: file does not exist + raise argparse.ArgumentTypeError("{0} does not exist".format(file)) + return file diff --git a/bliss/core/geom.py b/bliss/core/geom.py new file mode 100755 index 00000000..4c5dc482 --- /dev/null +++ b/bliss/core/geom.py @@ -0,0 +1,823 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2009, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +"""BLISS 2D/3D Geometry + +This module contains basic 2D and 3D geometry classes (Point, Line, +Polygon, Rectangle) with arithmetic operator, containment, and +sequence/iterator methods. These methods allow for natural and +convenient Python expressions such as:: + + # Translate point by five units in both x and y. + point + 5 + + # Polygon hit tests + if point in polygon: + ... + + # Iteration or vertices + for vertex in polygon: + ... + +This module was originally written as a support library for AEGIS +ground processing code and its precursors (e.g. OASIS). It dates back +to at least 2009 and probably even earlier. +""" + +class Point (object): + """Point is a simple 2D Cartesian point object with public 'x' and + 'y' coordinate fields. The operators +, -, +=, -=, \*, \*=, /, /=, == + and !=. + """ + + __slots__ = [ 'x', 'y', 'z' ] + + + def __init__ (self, x=0, y=0, z=None): + """Point(x=0, y=0, z=None) -> Point + Point([x, y, z]) -> Point + Point([x, y]) -> Point + """ + if isinstance(x, (list, tuple)): + if len(x) >= 2: + self.x = x[0] + self.y = x[1] + self.z = None + if len(x) == 3: + self.z = x[2] + else: + self.x = x + self.y = y + self.z = z + + + def copy (self): + """Returns a copy of this Point. + """ + return Point(self.x, self.y, self.z) + + + def __repr__ (self): + if self.z: + return 'Point(%s, %s, %s)' % (str(self.x), str(self.y), str(self.z)) + else: + return 'Point(%s, %s)' % (str(self.x), str(self.y)) + + + + def __add__ (self, other): + """Adds the (x, y) coordinates of two points or a point and a + number. Examples: + + >>> Point(1, 2) + 1 + Point(2, 3) + + >>> Point(1, 2) + Point(3, 4) + Point(4, 6) + """ + if isinstance(other, Point): + if self.z and other.z: + return Point(self.x + other.x, self.y + other.y, self.z + other.z) + else: + return Point(self.x + other.x, self.y + other.y, self.z) + else: + if self.z: + return Point(self.x + other, self.y + other, self.z + other) + else: + return Point(self.x + other, self.y + other, self.z) + + + def __radd__ (self, other): + """Adds a number to the (x, y) coordinates of a point. Examples: + + >>> 1 + Point(1, 2) + Point(2, 3) + """ + return self.__add__(other) + + + def __sub__ (self, other): + """Subtracts the (x, y) coordinates of two points or a point and a + number. Examples: + + >>> Point(1, 2) - 1 + Point(0, 1) + + >>> Point(1, 2) - Point(3, 4) + Point(-2, -2) + """ + if isinstance(other, Point): + if self.z and other.z: + return Point(self.x - other.x, self.y - other.y, self.z - other.z) + else: + return Point(self.x - other.x, self.y - other.y, self.z) + else: + if self.z: + return Point(self.x - other, self.y - other, self.z - other) + else: + return Point(self.x - other, self.y - other, self.z) + + + def __mul__ (self, other): + """Multiplies the (x, y) coordinates of a point by a number. + Examples: + + >>> Point(2, 3) * 0 + Point(0, 0) + + >>> Point(2, 3) * 1 + Point(2, 3) + + >>> Point(2, 3) * 2 + Point(4, 6) + """ + if self.z: + return Point(self.x * other, self.y * other, self.z * other) + else: + return Point(self.x * other, self.y * other) + + + def __rmul__(self, other): + """Multiplies the (x, y) coordinates of a point by a number. + """ + return self.__mul__(other) + + + def __div__ (self, other): + """Divides the (x, y) coordinates of a point by a number. + Examples: + + >>> Point(4, 6) / 1 + Point(4, 6) + + >>> Point(4, 6) / 2 + Point(2, 3) + + >>> Point(2, 3) / 2 + Point(1, 1) + + >>> Point(2.0, 3.0) / 2 + Point(1.0, 1.5) + """ + if self.z: + return Point(self.x / other, self.y / other, self.z / other) + else: + return Point(self.x / other, self.y / other) + + + def __iadd__ (self, other): + """Adds the (x, y) coordinates of two points or a point and a + number. Examples: + + >>> p = Point(1, 2) + >>> p += 1 + >>> p + Point(2, 3) + + >>> p = Point(1, 2) + >>> p += Point(3, 4) + >>> p + Point(4, 6) + """ + if isinstance(other, Point): + self.x += other.x + self.y += other.y + if self.z and other.z: + self.z += other.z + else: + self.x += other + self.y += other + if self.z: + self.z += other + return self + + + def __isub__ (self, other): + """Subtracts the (x, y) coordinates of two points or a point and a + number. Examples: + + >>> p = Point(1, 2) + >>> p -= 1 + >>> p + Point(0, 1) + + >>> p = Point(1, 2) + >>> p -= Point(3, 4) + >>> p + Point(-2, -2) + """ + if isinstance(other, Point): + self.x -= other.x + self.y -= other.y + if self.z and other.z: + self.z -= other.z + else: + self.x -= other + self.y -= other + if self.z: + self.z -= other + return self + + + def __imul__ (self, other): + """Multiplies the (x, y) coordinates of a point by a number. + Examples: + + >>> p = Point(2, 3) + >>> p *= 0 + >>> p + Point(0, 0) + + >>> p = Point(2, 3) + >>> p *= 1 + >>> p + Point(2, 3) + + >>> p = Point(2, 3) + >>> p *= 2 + >>> p + Point(4, 6) + """ + if isinstance(other, Point): + self.x *= other.x + self.y *= other.y + if self.z and other.z: + self.z *= other.z + else: + self.x *= other + self.y *= other + if self.z: + self.z *= other + return self + + + def __idiv__ (self, other): + """Divides the (x, y) coordinates of a point by a number. + Examples: + + >>> p = Point(4, 6) + >>> p /= 1 + >>> p + Point(4, 6) + + >>> p = Point(4, 6) + >>> p /= 2 + >>> p + Point(2, 3) + + >>> p = Point(2, 3) + >>> p /= 2 + >>> p + Point(1, 1) + + >>> p = Point(2.0, 3.0) + >>> p /= 2 + >>> p + Point(1.0, 1.5) + """ + if isinstance(other, Point): + self.x /= other.x + self.y /= other.y + if self.z and other.z: + self.z /= other.z + else: + self.x /= other + self.y /= other + if self.z: + self.z /= other + return self + + + def __eq__ (self, other): + """Compares the (x, y) coordinates of two points for equality. + Examples: + + >>> Point(8, 7) == Point(8, 7) + True + + >>> Point(8, 7) == Point(7, 8) + False + + >>> Point(8, 7) == None + False + """ + if isinstance(other, Point): + if self.z: + return self.x == other.x and self.y == other.y and self.z == other.z + else: + return self.x == other.x and self.y == other.y + else: + return False + + + def __ne__ (self, other): + """Compare the (x, y) coordinates of two points for inequality. + Examples: + + >>> Point(8, 7) != Point(8, 7) + False + + >>> Point(8, 7) != Point(7, 8) + True + + >>> Point(8, 7) != None + True + """ + return not self.__eq__(other) + + + def __len__ (self): + """Returns the dimensionality of this Point, either 2 or 3. + """ + if self.z: + return 3 + else: + return 2 + + + def __getitem__ (self, key): + """Returns the x, y, or z (0, 1, 2) coordinate of this point. + """ + if key == 0: + return self.x + elif key == 1: + return self.y + elif key == 2 and self.z is not None: + return self.z + else: + raise IndexError('Point index out of range') + + + def __setitem__ (self, key, value): + """Sets the x, y, or z (0, 1, 2) coordinate of this point. + """ + if key == 0: + self.x = value + elif key == 1: + self.y = value + elif key == 2 and self.z is not None: + self.z = value + else: + raise IndexError('Point assignment index out of range') + + + def dot (self, other): + """dot (self, other) -> number + + Returns the dot product of this Point with another. + """ + if self.z: + return (self.x * other.x) + (self.y * other.y) + (self.z * other.z) + else: + return (self.x * other.x) + (self.y * other.y) + + + +class Line (object): + """Line segment objects contain two points. + """ + + __slots__ = [ 'p', 'q' ] + + + def __init__ (self, p, q): + """Line(Point, Point) -> Line + + Creates a new Line segment with the given endpoints. + """ + self.p = p + self.q = q + + + def __repr__ (self): + return "Line(%s, %s)" % (str(self.p), str(self.q)) + + + def slope (self): + """slope () -> float + """ + return (self.p.y - self.q.y) / (self.p.x - self.q.x) + + + def intersect (self, line): + """intersect (line) -> Point | None + + Returns the intersection point of this line segment with another. + If this line segment and the other line segment are conincident, + the first point on this line segment is returned. If the line + segments do not intersect, None is returned. + + See http://local.wasp.uwa.edu.au/~pbourke/geometry/lineline2d/ + + Examples: + + >>> A = Line( Point(0.0, 0.0), Point(5.0, 5.0) ) + >>> B = Line( Point(5.0, 0.0), Point(0.0, 5.0) ) + >>> C = Line( Point(1.0, 3.0), Point(9.0, 3.0) ) + >>> D = Line( Point(0.5, 3.0), Point(6.0, 4.0) ) + >>> E = Line( Point(1.0, 1.0), Point(3.0, 8.0) ) + >>> F = Line( Point(0.5, 2.0), Point(4.0, 7.0) ) + >>> G = Line( Point(1.0, 2.0), Point(3.0, 6.0) ) + >>> H = Line( Point(2.0, 4.0), Point(4.0, 8.0) ) + >>> I = Line( Point(3.5, 9.0), Point(3.5, 0.5) ) + >>> J = Line( Point(3.0, 1.0), Point(9.0, 1.0) ) + >>> K = Line( Point(2.0, 3.0), Point(7.0, 9.0) ) + >>> L = Line( Point(1.0, 2.0), Point(5.0, 7.0) ) + + >>> A.intersect(B) + Point(2.5, 2.5) + + >>> C.intersect(D) == None + True + + >>> E.intersect(F) + Point(1.8275862069, 3.89655172414) + + >>> G.intersect(H) + Point(1.0, 2.0) + + >>> I.intersect(J) + Point(3.5, 1.0) + + >>> K.intersect(L) == None + True + """ + (x1, y1) = (self.p.x, self.p.y) + (x2, y2) = (self.q.x, self.q.y) + (x3, y3) = (line.p.x, line.p.y) + (x4, y4) = (line.q.x, line.q.y) + denom = ((y4 - y3) * (x2 - x1)) - ((x4 - x3) * (y2 - y1)) + num1 = ((x4 - x3) * (y1 - y3)) - ((y4 - y3) * (x1 - x3)) + num2 = ((x2 - x1) * (y1 - y3)) - ((y2 - y1) * (x1 - x3)) + intersect = None + + if num1 == 0 and num2 == 0 and denom == 0: # Coincident lines + intersect = self.p + elif denom != 0: # Parallel lines (denom == 0) + ua = float(num1) / denom + ub = float(num2) / denom + if ua >= 0.0 and ua <= 1.0 and ub >= 0.0 and ub <= 1.0: + x = x1 + (ua * (x2 - x1)) + y = y1 + (ua * (y2 - y1)) + intersect = Point(x, y) + + return intersect + + +class Plane (object): + """Plane objects are defined by a point and direction vector normal + to the plane. + """ + + __slots__ = [ 'p', 'n' ] + + + def __init__ (self, point, normal): + """Plane(point, normal) -> Plane + + Creates a new plane given a point and direction vector normal to + the plane. + """ + self.p = point + self.n = normal + + + def __repr__ (self): + return "Plane(point=%s, normal=%s)" % (str(self.p), str(self.n)) + + + def front (self, point): + """front (point) -> True | False + + Returns True if point is in ""front"" of the Plane, False otherwise. + """ + return self.n.dot(self.p - point) > 0 + + + def intersect (self, line): + """intersect(line) -> Point | None + + Returns the point at which the line segment and Plane intersect + or None if they do not intersect. + """ + eps = 1e-8 + d = (line.q - line.p) + dn = d.dot(self.n) + point = None + + if abs(dn) >= eps: + mu = self.n.dot(self.p - line.p) / dn + if mu >= 0 and mu <= 1: + point = line.p + mu * d + + return point + + + +class Polygon (object): + """Polygon objects contain a list of points. + """ + + __slots__ = [ '_bounds', '_dirty', 'vertices' ] + + + def __init__ (self, *vertices): + """Polygon(vertices) -> Polygon + + Creates a new Polygon with no vertices. + """ + if vertices: + if len(vertices) == 1 and isinstance(vertices[0], list): + vertices = vertices[0] + else: + vertices = list(vertices) + else: + vertices = [ ] + + if len(vertices) > 0 and isinstance(vertices[0], (list, tuple)): + vertices = [ Point(v) for v in vertices ] + + self._bounds = None + self._dirty = True + self.vertices = vertices + + + def __contains__ (self, point): + """__contains__ (self, point) -> True | False + + Allows syntax: if point in polygon + """ + return self.contains(point) + + + def __len__ (self): + """__len__ () -> integer + + Returns the number of vertices in this Polygon. + + Examples: + + >>> p = Polygon() + >>> len(p) + 0 + + >>> p.vertices = [ Point(0, 0), Point(0, 1), Point(0, 2) ] + >>> len(p) + 3 + """ + return len(self.vertices) + + + def __getitem__ (self, key): + """Returns the nth vertex of this Polygon. + """ + return self.vertices[key] + + + def __setitem__ (self, key, value): + """Sets the nth vertex of this Polygon. + """ + self.vertices[key] = value + self._dirty = True + + + def __iter__ (self): + return self.vertices.__iter__() + + + def __repr__ (self): + if len(self.vertices) > 4: + vertices = "" % len(self.vertices) + else: + vertices = "(" + ", ".join(str(v) for v in self.vertices) + ")" + return "Polygon%s" % vertices + + + def area (self): + """area() -> number + + Returns the area of this Polygon. + """ + area = 0.0 + + for segment in self.segments(): + area += ((segment.p.x * segment.q.y) - (segment.q.x * segment.p.y))/2 + + return area + + + def bounds (self): + """bounds() -> Rect + + Returns the bounding Rectangle for this Polygon. + """ + if self._dirty: + min = self.vertices[0].copy() + max = self.vertices[0].copy() + for point in self.vertices[1:]: + if point.x < min.x: min.x = point.x + if point.y < min.y: min.y = point.y + if point.x > max.x: max.x = point.x + if point.y > max.y: max.y = point.y + + self._bounds = Rect(min, max) + self._dirty = False + + return self._bounds + + + def center (self): + """center() -> (x, y) + + Returns the center (of mass) point of this Polygon. + + See http://en.wikipedia.org/wiki/Polygon + + Examples: + + >>> p = Polygon() + >>> p.vertices = [ Point(3, 8), Point(6, 4), Point(0, 3) ] + >>> p.center() + Point(2.89285714286, 4.82142857143) + """ + Cx = 0.0 + Cy = 0.0 + denom = 6.0 * self.area() + + for segment in self.segments(): + x = (segment.p.x + segment.q.x) + y = (segment.p.y + segment.q.y) + xy = (segment.p.x * segment.q.y) - (segment.q.x * segment.p.y) + Cx += (x * xy) + Cy += (y * xy) + + Cx /= denom + Cy /= denom + + return Point(Cx, Cy) + + + def contains (self, p): + """Returns True if point is contained inside this Polygon, False + otherwise. + + This method uses the Ray Casting algorithm. + + Examples: + + >>> p = Polygon() + >>> p.vertices = [Point(1, 1), Point(1, -1), Point(-1, -1), Point(-1, 1)] + + >>> p.contains( Point(0, 0) ) + True + + >>> p.contains( Point(2, 3) ) + False + + """ + inside = False + + if p in self.bounds(): + for s in self.segments(): + if ((s.p.y > p.y) != (s.q.y > p.y) and + (p.x < (s.q.x - s.p.x) * (p.y - s.p.y) / (s.q.y - s.p.y) + s.p.x)): + inside = not inside + + return inside + + + def segments (self): + """Return the Line segments that comprise this Polygon.""" + for n in xrange(len(self.vertices) - 1): + yield Line(self.vertices[n], self.vertices[n + 1]) + + yield Line(self.vertices[-1], self.vertices[0]) + + + +class Rect (object): + """Rect + """ + + __slots__ = [ 'ul', 'lr' ] + + + def __init__ (self, ul, lr): + """Rect(Point, Point) -> Rect + + Creates a new rectangle. + """ + self.ul = Point(min(ul.x, lr.x), min(ul.y, lr.y)) + self.lr = Point(max(ul.x, lr.x), max(ul.y, lr.y)) + + + def __contains__ (self, point): + """__contains__ (self, point) -> True | False + + Allows syntax: if point in rectangle + """ + return self.contains(point) + + + def __len__ (self): + """__len__ () -> integer + + Returns the number of vertices in this Rectangle. + """ + return 4 + + + def __repr__ (self): + return "Rect(ul=%s, lr=%s)" % (str(self.ul), str(self.lr)) + + + def area (self): + """area() -> number + + Returns the area of this Rectangle. + """ + return self.width() * self.height() + + + def bounds (self): + """bounds() -> Rect + + Returns the Rectangle itself. + """ + return self + + + def center (self): + """center () -> Point + + Returns the center Point of this Rectangle. + """ + return (self.ul + self.lr) / 2 + + + def contains (self, point): + """contains(point) -> True | False + + Returns True if point is contained inside this Rectangle, False otherwise. + + Examples: + + >>> r = Rect( Point(-1, -1), Point(1, 1) ) + >>> r.contains( Point(0, 0) ) + True + + >>> r.contains( Point(2, 3) ) + False + """ + return (point.x >= self.ul.x and point.x <= self.lr.x) and \ + (point.y >= self.ul.y and point.y <= self.lr.y) + + + def height (self): + """height () -> number + + Returns the height of this Rectangle. + """ + return self.lr.y - self.ul.y + + + def segments (self): + """segments () -> [ Line, Line, Line, Line ] + + Return a list of Line segments that comprise this Rectangle. + """ + ul = self.ul + lr = self.lr + ur = Point(lr.x, ul.y) + ll = Point(ul.x, lr.y) + return [ Line(ul, ur), Line(ur, lr), Line(lr, ll), Line(ll, ul) ] + + + def width (self): + """width () -> number + + Returns the width of this Rectangle. + """ + return self.lr.y - self.ul.x + + + +def runTests (): + import doctest + doctest.testmod() + + + +if __name__ == '__main__': + runTests() diff --git a/bliss/core/json.py b/bliss/core/json.py new file mode 100644 index 00000000..0674d67d --- /dev/null +++ b/bliss/core/json.py @@ -0,0 +1,97 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Javascript Object Notation (JSON) + +The bliss.core.json module provides JSON utilities and mixin classes +for encoding and decoding between BLISS data structures and JSON. +""" + +import collections +import json + + +def slotsToJSON(obj, slots=None): + """Converts the given Python object to one suitable for Javascript + Object Notation (JSON) serialization via :func:`json.dump` or + :func:`json.dumps`. This function delegates to :func:`toJSON`. + + Specifically only attributes in the list of *slots* are converted. + If *slots* is not provided, it defaults to the object's + ``__slots__` and any inherited ``__slots__``. + + To omit certain slots from serialization, the object may define a + :meth:`__jsonOmit__(key, val)` method. When the method returns + True for any particular slot name (i.e. key) and value + combination, the slot will not serialized. + """ + if slots is None: + slots = list(obj.__slots__) if hasattr(obj, '__slots__') else [ ] + for base in obj.__class__.__bases__: + if hasattr(base, '__slots__'): + slots.extend(base.__slots__) + + testOmit = hasattr(obj, '__jsonOmit__') and callable(obj.__jsonOmit__) + result = { } + + for slot in slots: + key = slot[1:] if slot.startswith('_') else slot + val = getattr(obj, slot, None) + + if testOmit is False or obj.__jsonOmit__(key, val) is False: + result[key] = toJSON(val) + + return result + + +def toJSON (obj): + """Converts the given Python object to one suitable for Javascript + Object Notation (JSON) serialization via :func:`json.dump` or + :func:`json.dumps`. If the Python object has a :meth:`toJSON` + method, it is always given preference and will be called to peform + the conversion. + + Otherwise, plain mapping and sequence types are converted to + Python dictionaries and lists, respectively, by recursively + calling this :func:`toJSON` function on mapping keys and values or + iterable items. Python primitive types handled natively by the + JSON encoder (``int``, ``long``, ``float``, ``str``, ``unicode``, + and ``None``) are returned as-is. + + If no other conversion is appropriate, the Python builtin function + :func:`str` is used to convert the object. + """ + if hasattr(obj, 'toJSON') and callable(obj.toJSON): + result = obj.toJSON() + elif isinstance(obj, (int, long, float, str, unicode)) or obj is None: + result = obj + elif isinstance(obj, collections.Mapping): + result = { toJSON(key): toJSON(obj[key]) for key in obj } + elif isinstance(obj, collections.Sequence): + result = [ toJSON(item) for item in obj ] + else: + result = str(obj) + + return result + + +class SlotSerializer (object): + __slots__ = [ ] + + def __jsonOmit__(self, key, val): + return val is None or val is '' + + def toJSON(self): + return slotsToJSON(self) diff --git a/bliss/core/limits.py b/bliss/core/limits.py new file mode 100644 index 00000000..d088a5d6 --- /dev/null +++ b/bliss/core/limits.py @@ -0,0 +1,226 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Limits + +The bliss.core.limits module provides limit definitions for telemetry fields. + +The expected limits.yaml should follow this schema: + +- !Limit + source: -- telemetry source for the limit. should follow format 'Packet.field_name' + desc: -- description of the limit + units: -- the units used for possible conversion depending on the units set in the + telemetry dictionary + lower: -- lower limits + error: -- trigger error if telemetry value exceeds this lower bound (exclusive) + warn: -- trigger warning if telemetry value exceeds this lower bound (exclusive) + upper: -- upper limits + error: -- trigger error if telemetry value exceeds this upper bound (exclusive) + warn: -- trigger warning if telemetry value exceeds this upper bound (exclusive) + value: -- enumerated values to trigger error/warning + error: -- trigger error if telemetry value == or in list of strings + warn: -- trigger warning if telemetry value == or in list of strings + +For example: + + - !Limit + source: 1553_HS_Packet.Voltage_A + desc: tbd + units: Kelvin + lower: + error: 5.0 + warn: 10.0 + upper: + error: 45.0 + warn: 40.0 + + + - !Limit + source: Ethernet_HS_Packet.product_type + desc: tbd + value: + error: FOOBAR + warn: + - FOO + - BAR + +""" + +import os +import pkg_resources +import yaml + +import bliss +from bliss.core import json, log, tlm, util + +class Thresholds (json.SlotSerializer, object): + def __init__ (self, **kwargs): + self._thresholds = kwargs + + + def __getattr__ (self, name): + if name in self._thresholds: + return self._thresholds[name] + else: + raise AttributeError("Limit has no such threshold '%s'" % name) + + + def __getstate__ (self): + return self.__dict__ + + + def __repr__ (self): + kwargs = [ '%s=%s' % item for item in self._thresholds.items() ] + return 'Thresholds(%s)' % ', '.join(kwargs) + + + def __setstate__ (self, state): + self.__dict__ = state + + def toJSON(self): + return self._thresholds + + + +class LimitDefinition (json.SlotSerializer, object): + """LimitDefinition + """ + + __slots__ = [ 'desc', 'lower', 'source', 'units', 'upper', 'value' ] + + def __init__(self, *args, **kwargs): + """Creates a new LimitDefinition.""" + for slot in self.__slots__: + name = slot[1:] if slot.startswith("_") else slot + setattr(self, name, kwargs.get(name, None)) + + for name in 'lower', 'upper', 'value': + thresholds = getattr(self, name) + + if type(thresholds) is dict: + setattr(self, name, Thresholds(**thresholds)) + + def __repr__(self): + return util.toRepr(self) + + def error (self, value, units=None): + if self.units and self.units != units: + value = self.convert(value, units, self.units) + + check = False + if self.lower: + check = check or value < self.lower.error + + if self.upper: + check = check or value > self.upper.error + + if self.value: + if isinstance(self.value.error, list): + check = check or value in self.value.error + else: + check = check or value == self.value.error + + return check + + def warn (self, value, units=None): + if self.units and self.units != units: + value = self.convert(value, units, self.units) + + check = False + if self.lower: + check = check or value < self.lower.warn + + if self.upper: + check = check or value > self.upper.warn + + if self.value: + if isinstance(self.value.warn, list): + check = check or value in self.value.warn + else: + check = check or value == self.value.warn + + return check + + def convert(self, value, new_unit, old_unit): + return value + + +class LimitsDict(dict): + """LimitsDict + """ + def __init__(self, *args, **kwargs): + """Creates a new Limits Dictionary from the given limits + dictionary filename or YAML string. + """ + self.filename = None + + if len(args) == 1 and len(kwargs) == 0 and type(args[0]) == str: + dict.__init__(self) + self.load(args[0]) + else: + dict.__init__(self, *args, **kwargs) + + def add(self, defn): + """Adds the given Limit Definition to this Limits Dictionary.""" + self[defn.source] = defn + + def load(self, content): + """Loads Limit Definitions from the given YAML content into this + Telemetry Dictionary. Content may be either a filename + containing YAML content or a YAML string. + + Load has no effect if this Limits Dictionary was already + instantiated with a filename or YAML content. + """ + if self.filename is None: + if os.path.isfile(content): + self.filename = content + stream = open(self.filename, 'rb') + else: + stream = content + + limits = yaml.load(stream) + + for lmt in limits: + self.add(lmt) + + if type(stream) is file: + stream.close() + + def toJSON(self): + return { name: defn.toJSON() for name, defn in self.items() } + + +def getDefaultDict(reload=False): + return util.getDefaultDict(__name__, 'limits', LimitsDict, reload) + + +def getDefaultSchema(): + return pkg_resources.resource_filename('bliss.core', 'data/limits_schema.json') + + +def getDefaultDictFilename(): + return bliss.config.limits.filename + + +def YAMLCtor_LimitDefinition(loader, node): + fields = loader.construct_mapping(node, deep=True) + return createLimitDefinition(**fields) + + +yaml.add_constructor('!Limit', YAMLCtor_LimitDefinition) + +util.__init_extensions__(__name__, globals()) diff --git a/bliss/core/log.py b/bliss/core/log.py new file mode 100644 index 00000000..6c2f8571 --- /dev/null +++ b/bliss/core/log.py @@ -0,0 +1,326 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2008, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Logging + +The bliss.core.log module logs warnings, errors, and other information to +standard output and via syslog. +""" + +import sys +import socket +import datetime +import time +import re + +import logging +import logging.handlers + +import bliss +import bliss.core + + +NOTICE = logging.INFO + 1 +COMMAND = logging.INFO + 2 +PROGRAM = logging.INFO + 3 +logging.addLevelName(NOTICE , 'NOTICE' ) +logging.addLevelName(COMMAND, 'COMMAND') +logging.addLevelName(PROGRAM, 'PROGRAM') + + +class LogFormatter (logging.Formatter): + """LogFormatter + + Log output format is: + + YYYY-MM-DDTHH:MM:SS | levelname | message + + Where levelname is one of 'critical', 'error', 'warning', 'command', + 'info', or 'debug'. + """ + + DATEFMT = "%Y-%m-%dT%H:%M:%S" + + def __init__ (self): + """LogFormatter() + + Creates and returns a new LogFormatter. + """ + format = "%(asctime)s | %(levelname)-8s | %(message)s" + datefmt = self.DATEFMT + logging.Formatter.__init__(self, format, datefmt) + + + def formatTime (self, record, datefmt=None): + """Return the creation time of the specified LogRecord as formatted + text.""" + if datefmt is None: + datefmt = '%Y-%m-%d %H:%M:%S' + + ct = self.converter(record.created) + t = time.strftime(datefmt, ct) + s = '%s.%03d' % (t, record.msecs) + + return s + + + +class SysLogFormatter (logging.Formatter): + """SysLogFormatter""" + + BSD_FMT = '%(asctime)s %(hostname)s %(name)s[%(process)d]: %(message)s' + BSD_DATEFMT = '%b %d %H:%M:%S' + + SYS_DATEFMT = '%Y-%m-%dT%H:%M:%S.%fZ' + SYSLOG_FMT = ('1 %(asctime)s %(hostname)s %(name)s %(process)d ' + '%(levelname)s - %(message)s') + + + def __init__ (self, bsd=False): + """LogFormatter([bsd=False]) + + Creates and returns a new SysLogFormatter. If BSD is True, the + sylog message is formatted according to the BSD Syslog Protocol: + + RFC 3164 - The BSD Syslog Protocol + http://tools.ietf.org/html/rfc3164 + + Otherwise, the syslog message is formatted according to the Syslog + Protocol: + + RFC 5424 - The Syslog Protocol + http://tools.ietf.org/html/rfc5424 + """ + self.bsd = bsd + self.hostname = socket.gethostname() + + if self.bsd is True: + format = self.BSD_FMT + else: + format = self.SYSLOG_FMT + + logging.Formatter.__init__(self, format) + + + def format (self, record): + """Returns the given LogRecord as formatted text.""" + record.hostname = self.hostname + return logging.Formatter.format(self, record) + + + def formatTime (self, record, datefmt=None): + """Returns the creation time of the given LogRecord as formatted text. + + NOTE: The datefmt parameter and self.converter (the time + conversion method) are ignored. BSD Syslog Protocol messages + always use local time, and by our convention, Syslog Protocol + messages use UTC. + """ + if self.bsd: + lt_ts = datetime.datetime.fromtimestamp(record.created) + ts = lt_ts.strftime(self.BSD_DATEFMT) + if ts[4] == '0': + ts = ts[0:4] + ' ' + ts[5:] + else: + utc_ts = datetime.datetime.utcfromtimestamp(record.created) + ts = utc_ts.strftime(self.SYS_DATEFMT) + return ts + + + +class SysLogHandler (logging.handlers.SysLogHandler): + def __init__(self, address=None, facility=None, socktype=None): + self.bsd = False + + if address is None: + if sys.platform == 'darwin': + address = '/var/run/syslog' + self.bsd = True + else: + address = ('localhost', logging.handlers.SYSLOG_UDP_PORT) + + if facility is None: + facility = logging.handlers.SysLogHandler.LOG_USER + + logging.handlers.SysLogHandler.__init__(self, address, facility, socktype) + + self.priority_map['NOTICE'] = 'notice' + self.priority_map['COMMAND'] = 'notice' + self.priority_map['PROGRAM'] = 'notice' + + self.setFormatter( SysLogFormatter(self.bsd) ) + + +def addLocalHandlers (logger): + """Adds logging handlers to logger to log to the following local + resources: + + 1. The terminal + 2. localhost:514 (i.e. syslogd) + 3. localhost:2514 (i.e. the BLISS GUI syslog-like handler) + """ + termlog = logging.StreamHandler() + termlog.setFormatter( LogFormatter() ) + + logger.addHandler( termlog ) + logger.addHandler( SysLogHandler() ) + logger.addHandler( SysLogHandler(('localhost', 2514)) ) + + +def addRemoteHandlers (logger): + """Adds logging handlers to logger to remotely log to: + + bliss.config.logging.hostname:514 (i.e. syslogd) + + If not set or hostname cannot be resolved, this method has no + effect. + """ + try: + hostname = bliss.config.logging.hostname + + # Do not "remote" log to this host, as that's already covered + # by addLocalHandlers(). + if socket.getfqdn() != hostname: + socket.gethostbyname(hostname) + logger.addHandler( SysLogHandler( (hostname, 514) ) ) + + except AttributeError: + pass # No bliss.config.logging.hostname + + except socket.gaierror: + pass # hostname cannot be resolved (e.g. no Internet) + + +def init (): + global logger, crit, debug, error, info, warn + + try: + name = bliss.config.logging.name + except AttributeError: + name = 'bliss' + + if logging.getLogger(name) == logger: + for h in logger.handlers[:]: + logger.removeHandler(h) + + logger = logging.getLogger(name) + crit = logger.critical + debug = logger.debug + error = logger.error + info = logger.info + warn = logger.warning + + logger.setLevel(logging.INFO) + + addLocalHandlers (logger) + addRemoteHandlers(logger) + +reinit = init + + +def parseSyslog(msg): + """Parses Syslog messages (RFC 5424) + + The `Syslog Message Format (RFC 5424) + `_ can be parsed with + simple whitespace tokenization:: + + SYSLOG-MSG = HEADER SP STRUCTURED-DATA [SP MSG] + HEADER = PRI VERSION SP TIMESTAMP SP HOSTNAME + SP APP-NAME SP PROCID SP MSGID + ... + NILVALUE = "-" + + This method does not return STRUCTURED-DATA. It parses NILVALUE + ("-") STRUCTURED-DATA or simple STRUCTURED-DATA which does not + contain (escaped) ']'. + + :returns: A dictionary keyed by the constituent parts of the + Syslog message. + """ + tokens = msg.split(' ', 6) + result = { } + + if len(tokens) > 0: + pri = tokens[0] + start = pri.find('<') + stop = pri.find('>') + + if start != -1 and stop != -1: + result['pri'] = pri[start + 1:stop] + else: + result['pri'] = '' + + if stop != -1 and len(pri) > stop: + result['version'] = pri[stop + 1:] + else: + result['version'] = '' + + result[ 'timestamp' ] = tokens[1] if len(tokens) > 1 else '' + result[ 'hostname' ] = tokens[2] if len(tokens) > 2 else '' + result[ 'appname' ] = tokens[3] if len(tokens) > 3 else '' + result[ 'procid' ] = tokens[4] if len(tokens) > 4 else '' + result[ 'msgid' ] = tokens[5] if len(tokens) > 5 else '' + result[ 'msg' ] = '' + + if len(tokens) > 6: + # The following will work for NILVALUE STRUCTURED-DATA or + # simple STRUCTURED-DATA which does not contain ']'. + rest = tokens[6] + start = rest.find('-') + + if start == -1: + start = rest.find(']') + + if len(rest) > start: + result['msg'] = rest[start + 1:].strip() + + return result + + +def begin (): + """Command-line tools should begin logging with core.log.begin() to + log the command name and arguments. + """ + logger.log(PROGRAM, " ".join(sys.argv)) + + +def end (): + """Command-line tools should end logging with log.end() to log the + completion of the command. + """ + logger.log(PROGRAM, "done.") + logging.shutdown() + +def command(*args, **kwargs): + logger.log(COMMAND, *args, **kwargs) + + +def program(*args, **kwargs): + logger.log(PROGRAM, *args, **kwargs) + + +def notice(*args, **kwargs): + logger.log(NOTICE, *args, **kwargs) + + +logger = None +crit = None +debug = None +error = None +info = None +warn = None + +init() diff --git a/bliss/core/pcap.py b/bliss/core/pcap.py new file mode 100644 index 00000000..b218e925 --- /dev/null +++ b/bliss/core/pcap.py @@ -0,0 +1,557 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +This module, pcap.py, is a library to read/write PCAP-formatted files with +simple open, read, write, close functions +""" + +import __builtin__ +import calendar +import math +import struct +import dmc +import datetime +import log + + +""" +Check the endian of the host we are currently running on. +""" +if struct.pack('@I', 0xA1B2C3D4) == struct.pack('>I', 0xA1B2C3D4): + EndianSwap = '<' +else: + EndianSwap = '>' + + +class PCapFileStats (object): + """Current and threshold and statistics in PCapRolloverStream""" + + __slots__ = 'nbytes', 'npackets', 'nseconds' + + def __init__ (self, nbytes=None, npackets=None, nseconds=None): + self.nbytes = nbytes + self.npackets = npackets + self.nseconds = nseconds + + +class PCapGlobalHeader: + """PCapGlobalHeader + + Represents a PCap global header. See: + + https://wiki.wireshark.org/Development/LibpcapFileFormat + """ + + def __init__ (self, stream=None): + """Creates a new PCapGlobalHeader with default values. If a stream + is given, the global header data is read from it. + """ + self._format = 'IHHiIII' + self._size = struct.calcsize(self._format) + self._swap = '@' + + if stream is None: + self.magic_number = 0xA1B2C3D4 + self.version_major = 2 + self.version_minor = 4 + self.thiszone = 0 + self.sigfigs = 0 + self.snaplen = 65535 + self.network = 147 + self._data = str(self) + else: + self.read(stream) + + + def __len__ (self): + """Returns the number of bytes in this PCapGlobalHeader.""" + return len(self._data) + + + def __str__ (self): + """Returns this PCapGlobalHeader as a binary string.""" + return struct.pack( self._format, + self.magic_number , + self.version_major, + self.version_minor, + self.thiszone , + self.sigfigs , + self.snaplen , + self.network ) + + + def incomplete (self): + """Indicates whether or not this PCapGlobalHeader is incomplete.""" + return len(self) < self._size + + + def read (self, stream): + """Reads PCapGlobalHeader data from the given stream.""" + self._data = stream.read(self._size) + + if len(self._data) >= self._size: + values = struct.unpack(self._format, self._data) + else: + values = None, None, None, None, None, None, None + + if values[0] == 0xA1B2C3D4 or values[0] == 0xA1B23C4D: + self._swap = '@' + elif values[0] == 0xD4C3B2A1 or values[0] == 0x4D3CB2A1: + self._swap = EndianSwap + + if values[0] is not None: + values = struct.unpack(self._swap + self._format, self._data) + + self.magic_number = values[0] + self.version_major = values[1] + self.version_minor = values[2] + self.thiszone = values[3] + self.sigfigs = values[4] + self.snaplen = values[5] + self.network = values[6] + + + +class PCapPacketHeader: + """PCapPacketHeader + + Represents a PCap packet header. See: + + https://wiki.wireshark.org/Development/LibpcapFileFormat + """ + + def __init__ (self, stream=None, swap=None, orig_len=0, maxlen=65535): + """Creates a new PCapPacketHeader with default values. If a stream is + given, the packet header data is read from it. + """ + if swap is None: + swap = '@' + + self._format = 'IIII' + self._size = struct.calcsize(self._format) + self._swap = swap + + if stream is None: + self.ts_sec, self.ts_usec = dmc.getTimestampUTC() + self.incl_len = min(orig_len, maxlen) + self.orig_len = orig_len + self._data = str(self) + else: + self.read(stream) + + + def __len__ (self): + """Returns the number of bytes in this PCapPacketHeader.""" + return len(self._data) + + + def __str__ (self): + """Returns this PCapPacketHeader as a binary string.""" + return struct.pack( self._format , + self.ts_sec , + self.ts_usec , + self.incl_len, + self.orig_len ) + + + @property + def timestamp (self): + """Packet timestamp as a Python Datetime object""" + return datetime.datetime.utcfromtimestamp( self.ts ) + + + @property + def ts (self): + """Packet timestamp as a float, a combination of ts_sec and ts_usec""" + return float(self.ts_sec) + (float(self.ts_usec) / 1e6) + + + def incomplete (self): + """Indicates whether or not this PCapGlobalHeader is incomplete.""" + return len(self) < self._size + + + def read (self, stream): + """Reads PCapPacketHeader data from the given stream.""" + self._data = stream.read(self._size) + + if len(self._data) >= self._size: + values = struct.unpack(self._swap + self._format, self._data) + else: + values = None, None, None, None + + self.ts_sec = values[0] + self.ts_usec = values[1] + self.incl_len = values[2] + self.orig_len = values[3] + + +class PCapRolloverStream: + """ + Wraps a PCapStream to rollover to a new filename, based on packet + times, file size, or number of packets. + """ + + def __init__(self, format, + nbytes=None, npackets=None, nseconds=None, dryrun=False): + """Creates a new :class:`PCapRolloverStream` with the given + thresholds. + + A :class:`PCapRolloverStream` behaves like a + :class:`PCapStream`, except that writing a new packet will + cause the current file to be closed and a new file to be + opened when one or more of thresholds (``nbytes``, + ``npackets``, ``nseconds``) is exceeded. + + The new filename is determined by passing the ``format`` + string through :func:`PCapPacketHeader.timestamp.strftime()` + for the first packet in the file. + + When segmenting based on time (``nseconds``), for file naming + and interval calculation purposes ONLY, the timestamp of the + first packet in the file is rounded down to nearest even + multiple of the number of seconds. This yields nice round + number timestamps for filenames. For example: + + PCapRolloverStream(format="%Y%m%dT%H%M%S.pcap", nseconds=3600) + + If the first packet written to a file has a time of 2017-11-23 + 19:28:58, the file will be named: + + 20171123T190000.pcap + + And a new file will be started when a packet is written with a + timestamp that exceeds 2017-11-23 19:59:59. + + :param format: Output filename in ``strftime(3)`` format + :param nbytes: Rollover after writing nbytes + :param npackets: Rollover after writing npackets + :param nseconds: Rollover after nseconds have elapsed between + the first and last packet timestamp in the file. + :param dryrun: Simulate file writes and output log messages. + """ + self._dryrun = dryrun + self._filename = None + self._format = format + self._startTime = None + self._stream = None + self._threshold = PCapFileStats(nbytes, npackets, nseconds) + self._total = PCapFileStats(0, 0, 0) + + + @property + def rollover (self): + """Indicates whether or not its time to rollover to a new file.""" + rollover = False + + if not rollover and self._threshold.nbytes is not None: + rollover = self._total.nbytes >= self._threshold.nbytes + + if not rollover and self._threshold.npackets is not None: + rollover = self._total.npackets >= self._threshold.npackets + + if not rollover and self._threshold.nseconds is not None: + nseconds = math.ceil(self._total.nseconds) + rollover = nseconds >= self._threshold.nseconds + + return rollover + + + def write (self, bytes, header=None): + """Writes packet ``bytes`` and the optional pcap packet ``header``. + + If the pcap packet ``header`` is not specified, one will be + generated based on the number of packet ``bytes`` and current + time. + """ + if header is None: + header = PCapPacketHeader(orig_len=len(bytes)) + + if self._stream is None: + if self._threshold.nseconds is not None: + # Round down to the nearest multiple of nseconds + nseconds = self._threshold.nseconds + remainder = int( math.floor( header.ts % nseconds ) ) + delta = datetime.timedelta(seconds=remainder) + timestamp = header.timestamp - delta + else: + timestamp = header.timestamp + + self._filename = timestamp.strftime(self._format) + self._startTime = calendar.timegm( + timestamp.replace(microsecond=0).timetuple() ) + + if self._dryrun: + self._stream = True + self._total.nbytes += len(PCapGlobalHeader()) + else: + self._stream = open(self._filename, 'w') + self._total.nbytes += len(self._stream.header) + + if not self._dryrun: + self._stream.write(bytes, header) + + self._total.nbytes += len(bytes) + len(header) + self._total.npackets += 1 + self._total.nseconds = header.ts - self._startTime + + if self.rollover: + self.close() + + return header.incl_len + + + def close (self): + """Closes this :class:``PCapStream`` by closing the underlying Python + stream.""" + if self._stream: + values = ( self._total.nbytes, + self._total.npackets, + int( math.ceil(self._total.nseconds) ), + self._filename ) + + if self._dryrun: + msg = 'Would write %d bytes, %d packets, %d seconds to %s.' + else: + msg = 'Wrote %d bytes, %d packets, %d seconds to %s.' + self._stream.close() + + log.info(msg % values) + + self._filename = None + self._startTime = None + self._stream = None + self._total = PCapFileStats(0, 0, 0) + + +class PCapStream: + """PCapStream + + PCapStream is the primary class of the pcap.py module. It exposes + open(), read(), write(), and close() methods to read and write + pcap-formatted files. + + See: + + https://wiki.wireshark.org/Development/LibpcapFileFormat + """ + + def __init__(self, stream, mode='rb'): + """Creates a new PCapStream, which wraps the underlying Python stream, + already opened in the given mode. + """ + if mode.startswith('r'): + self.header = PCapGlobalHeader(stream) + elif mode.startswith('w') or (mode.startswith('a') and stream.tell() == 0): + self.header = PCapGlobalHeader() + stream.write( str(self.header) ) + + self._stream = stream + + + def __enter__ (self): + """A PCapStream provies a Python Context Manager interface.""" + return self + + + def __exit__ (self, type, value, traceback): + """A PCapStream provies a Python Context Manager interface.""" + self.close() + + + def __next__ (self): + """Provides Python 3 iterator compatibility. See next().""" + return self.next() + + + def __iter__ (self): + """A PCapStream provides a Python iterator interface.""" + return self + + + def next (self): + """Returns the next header and packet from this + PCapStream. See read(). + """ + header, packet = self.read() + + if packet is None: + raise StopIteration + + return header, packet + + + def read (self): + """Reads a single packet from the this pcap stream, returning a + tuple (PCapPacketHeader, packet) + """ + header = PCapPacketHeader(self._stream, self.header._swap) + packet = None + + if not header.incomplete(): + packet = self._stream.read(header.incl_len) + + return (header, packet) + + + def write (self, bytes, header=None): + """write() is meant to work like the normal file write(). It takes + two arguments, a byte array to write to the file as a single + PCAP packet, and an optional header if one already exists. + The length of the byte array should be less than 65535 bytes. + write() returns the number of bytes actually written to the file. + """ + if type(bytes) is str: + bytes = bytearray(bytes) + + if not isinstance(header, PCapPacketHeader): + header = PCapPacketHeader(orig_len=len(bytes)) + + packet = bytes[0:header.incl_len] + + self._stream.write( str(header) ) + self._stream.write( packet ) + self._stream.flush() + + return header.incl_len + + + def close (self): + """Closes this PCapStream by closing the underlying Python stream.""" + self._stream.close() + + +def open (filename, mode='r', **options): + """Returns an instance of a :class:`PCapStream` class which contains + the ``read()``, ``write()``, and ``close()`` methods. Binary mode + is assumed for this module, so the "b" is not required when + calling ``open()``. + + If the optiontal ``rollover`` parameter is True, a + :class:`PCapRolloverStream` is created instead. In that case + ``filename`` is treated as a ``strftime(3)`` format string and + ``nbytes``, ``npackets``, ``nseconds``, and ``dryrun`` parameters + may also be specified. See :class:``PCapRolloverStream`` for more + information. + + NOTE: :class:`PCapRolloverStream` is always opened in write mode + ("wb") and supports only ``write()`` and ``close()``, not + ``read()``. + """ + mode = mode.replace('b', '') + 'b' + + if options.get('rollover', False): + stream = PCapRolloverStream(filename, + options.get('nbytes' , None), + options.get('npackets', None), + options.get('nseconds', None), + options.get('dryrun' , False)) + else: + stream = PCapStream( __builtin__.open(filename, mode), mode ) + + return stream + + +def query(starttime, endtime, output=None, *filenames): + '''Given a time range and input file, query creates a new file with only + that subset of data. If no outfile name is given, the new file name is the + old file name with the time range appended. + + Args: + starttime: + The datetime of the beginning time range to be extracted from the files. + endtime: + The datetime of the end of the time range to be extracted from the files. + output: + Optional: The output file name. Defaults to + [first filename in filenames][starttime]-[endtime].pcap + filenames: + A tuple of one or more file names to extract data from. + ''' + + if not output: + output = (filenames[0].replace('.pcap','') + starttime.isoformat() + '-' + endtime.isoformat() + '.pcap') + else: + output = output + + with open(output,'w') as outfile: + for filename in filenames: + log.info("pcap.query: processing %s..." % filename) + with open(filename, 'r') as stream: + for header, packet in stream: + if packet is not None: + if header.timestamp >= starttime and header.timestamp <= endtime: + outfile.write(packet, header=header) + + +def segment(filenames, format, **options): + """Segment the given pcap file(s) by one or more thresholds + (``nbytes``, ``npackets``, ``nseconds``). New segment filenames + are determined based on the ``strftime(3)`` ``format`` string + and the timestamp of the first packet in the file. + + :param filenames: Single filename (string) or list of filenames + :param format: Output filename in ``strftime(3)`` format + :param nbytes: Rollover after writing N bytes + :param npackets: Rollover after writing N packets + :param nseconds: Rollover after N seconds have elapsed between + the first and last packet timestamp in the file. + :param dryrun: Simulate file writes and output log messages. + """ + output = open(format, rollover=True, **options) + + if isinstance(filenames, str): + filenames = [ filenames ] + + for filename in filenames: + with open(filename, 'r') as stream: + for header, packet in stream: + output.write(packet, header) + + output.close() + + +def times(filenames, tolerance=2): + """For the given file(s), return the time ranges available. Tolerance + sets the number of seconds between time ranges. Any gaps larger + than tolerance seconds will result in a new time range. + + :param filenames: Single filename (string) or list of filenames + :param tolerance: Maximum seconds between contiguous time ranges + + :returns: A dictionary keyed by filename, with each value a list + of (start, stop) time ranges for that file. + """ + times = { } + delta = datetime.timedelta(seconds=tolerance) + + if isinstance(filenames, str): + filenames = [ filenames ] + + for filename in filenames: + with open(filename, 'r') as stream: + times[filename] = list() + header, packet = stream.read() + start , stop = header.timestamp, header.timestamp + + for header, packet in stream: + if header.timestamp - stop > delta: + times[filename].append((start, stop)) + start = header.timestamp + stop = header.timestamp + + times[filename].append((start, stop)) + + return times diff --git a/bliss/core/seq.py b/bliss/core/seq.py new file mode 100644 index 00000000..a572d9a0 --- /dev/null +++ b/bliss/core/seq.py @@ -0,0 +1,737 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Sequences + +The bliss.core.seq module provides sequences of commands. +""" + +from __future__ import absolute_import + +import os +import math +import struct +import sys +import time + +from bliss.core import cmd, util + + +def setBit (value, bit, bitval): + """Returns value with a specific bit position set to bitval.""" + if bitval: + return value | (1 << bit) + else: + return value & ~(1 << bit) + + + +class Seq (object): + """Seq - Sequence + + """ + Magic = 0x0C03 + + + def __init__ (self, pathname=None, cmddict=None, id=None, version=0): + """Creates a new BLISS Command Sequence + + Creates an empty sequence which will be encoded and decoded based + on the given command dictionary (default: cmd.DefaultCmdDict). If + the optional pathname is given, the command sequence (text or + binary) will be read from it. + """ + self.pathname = pathname + self.cmddict = cmddict or cmd.getDefaultCmdDict() + self.crc32 = None + self.seqid = id + self.lines = [ ] + self.header = { } + self.version = version + self.log = SeqMsgLog() + + if self.pathname is not None: + self.read() + + + def _parseHeader (self, line, lineno, log): + """Parses a sequence header line containing 'name: value' pairs.""" + if line.startswith('#') and line.find(':') > 0: + tokens = [ t.strip().lower() for t in line[1:].split(":", 1) ] + name = tokens[0] + pos = SeqPos(line, lineno) + + if name in self.header: + msg = 'Ignoring duplicate header parameter: %s' + log.warning(msg % name, pos) + else: + for expected in ['seqid', 'version']: + if name == expected: + value = util.toNumber(tokens[1], None) + if value is None: + msg = 'Parameter "%s" value "%s" is not a number.' + log.error(msg % (name, tokens[1]), poss) + else: + self.header[name] = value + + + @property + def cmddict (self): + """The command dictionary used to encode and decode this sequence.""" + return self._cmddict + + + @cmddict.setter + def cmddict (self, value): + if value is None: + value = cmd.getDefaultCmdDict() + self._cmddict = value + + + @property + def commands (self): + """The ordered list of SeqCmds in this sequence.""" + return filter(lambda line: type(line) is SeqCmd, self.lines) + + + @property + def duration (self): + """The total duration of this sequence.""" + return sum(cmd.delay.delay for cmd in self.commands) + + + @property + def binpath (self): + """The full path to the binary sequence filename.""" + return self._basepath + '.bin' + + + @property + def pathname (self): + """The underlying sequence pathname.""" + return self._pathname + + + @pathname.setter + def pathname (self, pathname): + self._pathname = None + self._basepath = None + self._basename = None + + if pathname is not None: + self._pathname = pathname + self._basepath = os.path.splitext(self._pathname)[0] + self._basename = os.path.basename(self._basepath) + + + @property + def txtpath (self): + """The full path to the text sequence filename.""" + return self._basepath + '.txt' + + + def append (self, cmd, delay=0.000, attrs=None): + """Adds a new command with a relative time delay to this sequence.""" + self.lines.append( SeqCmd(cmd, delay, attrs) ) + + + def printText (self, stream=None): + """Prints a text representation of this sequence to the given stream or + standard output. + """ + if stream is None: + stream = sys.stdout + + stream.write('# seqid : %u\n' % self.seqid ) + stream.write('# version : %u\n' % self.version ) + stream.write('# crc32 : 0x%04x\n' % self.crc32 ) + stream.write('# ncmds : %u\n' % len(self.commands) ) + stream.write('# duration: %.3fs\n' % self.duration ) + stream.write('\n') + + for line in self.lines: + stream.write( str(line) ) + stream.write('\n') + + + def read (self, filename=None): + """Reads a command sequence from the given filename (defaults to + self.pathname). + """ + if filename is None: + filename = self.pathname + + stream = open(filename, 'rb') + magic = struct.unpack('>H', stream.read(2))[0] + stream.close() + + if magic == Seq.Magic: + self.readBinary(filename) + else: + self.readText(filename) + + + def readBinary (self, filename=None): + """Reads a binary command sequence from the given filename (defaults to + self.pathname). + """ + if filename is None: + filename = self.pathname + + stream = open(filename, 'rb') + magic = struct.unpack('>H', stream.read(2))[0] + self.crc32 = struct.unpack('>I', stream.read(4))[0] + self.seqid = struct.unpack('>H', stream.read(2))[0] + self.version = struct.unpack('>H', stream.read(2))[0] + ncmds = struct.unpack('>H', stream.read(2))[0] + reserved = stream.read(20) + + for n in range(ncmds): + bytes = stream.read(110) + self.lines.append( SeqCmd.decode(bytes, self.cmddict) ) + + + def readText (self, filename=None): + """Reads a text command sequence from the given filename (defaults to + self.pathname). + """ + if filename is None: + filename = self.pathname + + self.header = { } + inBody = False + + with open(filename, 'rt') as stream: + for (lineno, line) in enumerate(stream.readlines()): + stripped = line.strip() + if stripped == '': + continue + elif stripped.startswith('#'): + if not inBody: + self._parseHeader(line, lineno, self.log) + else: + inBody = True + self.lines.append( SeqCmd.parse(line, lineno, self.log, self.cmddict) ) + + if 'seqid' in self.header: + self.seqid = self.header['seqid'] + else: + self.log.error('No sequence id present in header.') + + if 'version' in self.header: + self.version = self.header['version'] + else: + self.log.warning('No version present in header. Defaulting to zero (0).') + self.version = 0 + + + def validate (self): + """Returns True if this Sequence is valid, False otherwise. + Validation error messages are stored in self.messages. + """ + if not os.path.isfile(self.pathname): + self.message.append('Filename "%s" does not exist.') + else: + try: + with open(self.pathname, 'r') as stream: + pass + except IOError: + self.messages.append('Could not open "%s" for reading.' % self.pathname) + + for line in self.commands: + messages = [ ] + if line.cmd and not line.cmd.validate(messages): + msg = 'error: %s: %s' % (line.cmd.name, " ".join(messages)) + self.log.messages.append(msg) + + return len(self.log.messages) == 0 + + + def writeBinary (self, filename=None): + """Writes a binary representation of this sequence to the given filename + (defaults to self.binpath). + """ + if filename is None: + filename = self.binpath + + with open(filename, 'wb') as output: + output.write( struct.pack('>H', Seq.Magic ) ) + output.write( struct.pack('>I', 0 ) ) + output.write( struct.pack('>H', self.seqid ) ) + output.write( struct.pack('>H', self.version ) ) + output.write( struct.pack('>H', len(self.commands) ) ) + + pad = struct.pack('B', 0) + for n in range(20): + output.write(pad) + + for line in self.lines: + output.write( line.encode() ) + + self.crc32 = util.crc32File(filename, 6) + + with open(filename, 'r+b') as output: + output.seek(2) + output.write( struct.pack('>I', self.crc32) ) + + + def writeText (self, filename=None): + """Writes a text representation of this sequence to the given filename + (defaults to self.txtpath). + """ + if filename is None: + filename = self.txtpath + + with open(filename, 'wt') as output: + self.printText(output) + + + +class SeqPos (object): + """SeqPos - Sequence Position + + Each SeqAtom contains a SeqPos to locate the atom within the text + sequence. + """ + + def __init__ (self, line=None, lineno=1, start=1, stop=None): + """Creates a new SeqPos from the given line in the sequence and start + and stop line and character positions within the line. + """ + if line is None: + line = '' + start = 0 + stop = 0 + + self.line = line + self.lineno = lineno + self.col = slice(start, stop or len(self.line)) + + + def __str__ (self): + """Returns this SeqPos as a string.""" + return str(self.lineno) + ':' + str(self.col.start) + ':' + + + +class SeqAtom (object): + """SeqAtom - Sequence Atom + + Sequence atoms are the smallest unit of a sequence. This class + serves as a base class for specific parts of a sequence, + e.g. header, comments, commands, attributes, and meta-commands. + """ + + def __init__ (self, pos=None): + """Creates a new SeqAtom with the given SeqPos.""" + self.pos = pos or SeqPos() + + + def __str__ (self): + """Returns this SeqAtom as a string.""" + result = '' + if len(self.pos.line) is not None: + result = self.pos.line[self.pos.col.start - 1:self.pos.col.stop] + return result + + + @classmethod + def decode (cls, bytes, cmddict=None): + """Decodes an SeqAtom from an array of bytes, according to the given + command dictionary, and returns a new SeqAtom. + """ + return cls() + + + def encode (self): + """Encodes this SeqAtom to binary and returns a bytearray.""" + return bytearray() + + + @classmethod + def parse (cls, line, lineno, log, cmddict=None): + """Parses the SeqAtom from a line of text, according to the given + command dictionary, and returns a new SeqAtom or None. Warning + and error messages are logged via the SeqMsgLog log. + """ + return cls(line) + + + def validate (self, log): + """Returns True if this SeqAtom is valid, False otherwise. Warning + and error messages are logged via the SeqMsgLog log. + """ + return True + + + +class SeqCmd (SeqAtom): + """SeqCmd - Sequence Command + + Each SeqCmd contains a command, a relative time delay, and command + attributes. New SeqCmds may be created with an explicit cmd.Cmd and + decimal delay (SeqCmd()), via an array of bytes (decode()), or a + line of text (parse()). + """ + + def __init__ (self, cmd, delay=0.000, attrs=None, comment=None, pos=None): + """Creates a new SeqCmd.""" + super(SeqCmd, self).__init__(pos) + self.cmd = cmd + self.delay = delay + self.attrs = attrs + self.comment = comment + + + def __str__ (self): + s = '%s\t%s' % (str(self.delay), str(self.cmd)) + + if self.attrs: + s += '\t%s' % str(self.attrs) + + if self.comment: + s += '\t%s' % str(self.comment) + + return s + + + @classmethod + def decode (cls, bytes, cmddict): + """Decodes a sequence command from an array of bytes, according to the + given command dictionary, and returns a new SeqCmd. + """ + attrs = SeqCmdAttrs.decode(bytes[0:1]) + delay = SeqDelay .decode(bytes[1:4]) + cmd = cmddict .decode(bytes[4:] ) + return cls(cmd, delay, attrs) + + + def encode (self): + """Encodes this SeqCmd to binary and returns a bytearray.""" + return self.attrs.encode() + self.delay.encode() + self.cmd.encode() + + + @classmethod + def parse (cls, line, lineno, log, cmddict): + """Parses the sequence command from a line of text, according to the + given command dictionary, and returns a new SeqCmd. + """ + delay = SeqDelay .parse(line, lineno, log, cmddict) + attrs = SeqCmdAttrs.parse(line, lineno, log, cmddict) + comment = SeqComment .parse(line, lineno, log, cmddict) + stop = len(line) + + if comment: + stop = comment.pos.col.start - 1 + + if attrs and attrs.pos.col.start != -1: + stop = attrs.pos.col.start - 1 + + tokens = line[:stop].split() + name = tokens[1] + args = tokens[2:] + start = line.find(name) + pos = SeqPos(line, lineno, start + 1, stop) + + if name not in cmddict: + log.error('Unrecognized command "%s".' % name, pos) + elif cmddict[name].nargs != len(args): + msg = 'Command argument size mismatch: expected %d, but encountered %d.' + log.error(msg % (cmddict[name].nargs, len(args)), pos) + + args = [ util.toNumber(a, a) for a in args ] + cmd = cmddict.create(name, *args) + + return cls(cmd, delay, attrs, comment, pos) + + + +class SeqCmdAttrs (SeqAtom): + """SeqCmdAttrs - Sequence Command Attributes + + Each sequence command may be annotated with attributes following the + command by using the following syntax: + + { name: value, ... } + + """ + + Table = [ + # Bit Name Value0 Value1 Default + # --- ------------- -------- ----------- --------- + [ 7, 'OnError' , 'Halt' , 'Continue', 'Halt' ], + [ 6, 'Attribute6', 'Value0', 'Value1' , 'Value0' ], + [ 5, 'Attribute5', 'Value0', 'Value1' , 'Value0' ], + [ 4, 'Attribute4', 'Value0', 'Value1' , 'Value0' ], + [ 3, 'Attribute3', 'Value0', 'Value1' , 'Value0' ], + [ 2, 'Attribute2', 'Value0', 'Value1' , 'Value0' ], + [ 1, 'Attribute1', 'Value0', 'Value1' , 'Value0' ], + [ 0, 'Attribute0', 'Value0', 'Value1' , 'Value0' ] + ] + + + def __init__ (self, attrs=None, pos=None): + """Creates a new SeqCmdAttrs.""" + super(SeqCmdAttrs, self).__init__(pos) + self.attrs = attrs or { } + + + def __str__ (self): + """Returns this SeqCmdAttrs as a string.""" + if len(self.attrs) > 0: + return '{ %s }' % ', '.join(': '.join(item) for item in self.attrs.items()) + else: + return '' + + + @property + def default (self): + """The default sequence command attributes (as an integer).""" + byte = 0 + for bit, name, value0, value1, default in SeqCmdAttrs.Table: + if default == value1: + byte = setBit(byte, bit, 1) + return byte + + + @classmethod + def decode (cls, bytes, cmddict=None): + """Decodes sequence command attributes from an array of bytes and + returns a new SeqCmdAttrs. + """ + byte = struct.unpack('B', bytes)[0] + self = cls() + defval = self.default + + for bit, name, value0, value1, default in SeqCmdAttrs.Table: + mask = 1 << bit + bitset = mask & byte + defset = mask & defval + if bitset != defset: + if bitset: + self.attrs[name] = value1 + else: + self.attrs[name] = value0 + + return self + + + def encode (self): + """Encodes this SeqCmdAttrs to binary and returns a bytearray.""" + byte = self.default + + for bit, name, value0, value1, default in SeqCmdAttrs.Table: + if name in self.attrs: + value = self.attrs[name] + byte = setBit(byte, bit, value == value1) + + return struct.pack('B', byte) + + + @classmethod + def parse (cls, line, lineno, log, cmddict=None): + """Parses a SeqCmdAttrs from a line of text and returns it or None. + Warning and error messages are logged via the SeqMsgLog log. + """ + start = line.find('{') + stop = line.find('}') + pos = SeqPos(line, lineno, start + 1, stop) + result = cls(None, pos) + + if start >= 0 and stop >= start: + attrs = { } + pairs = line[start + 1:stop].split(',') + + for item in pairs: + ncolons = item.count(':') + if ncolons == 0: + log.error('Missing colon in command attribute "%s".' % item, pos) + elif ncolons > 1: + log.error('Too many colons in command attribute "%s".' % item, pos) + else: + name, value = (s.strip() for s in item.split(':')) + attrs[name] = value + + result = cls(attrs, pos) + + elif start != -1 or stop != -1: + log.error('Incorrect command attribute curly brace placement.', pos) + + return result + + + +class SeqComment (SeqAtom): + """SeqComment - Sequence Comment + + Sequence comments are parsed for completeness, but are ignored when + translating a sequence to its binary representation. + """ + + def __init__ (self, comment, pos=None): + """Creates a new SeqComment.""" + super(SeqComment, self).__init__(pos) + self.comment = comment + + + + @classmethod + def parse (cls, line, lineno, log, cmddict=None): + """Parses the SeqComment from a line of text. Warning and error + messages are logged via the SeqMsgLog log. + """ + start = line.find('#') + pos = SeqPos(line, lineno, start + 1, len(line)) + result = None + + if start >= 0: + result = cls(line[start:], pos) + + return result + + +class SeqDelay (SeqAtom): + """SeqDelay - Sequence Delay + + Sequence lines begin with a decimal relative time delay. + """ + + def __init__ (self, delay=0.000, pos=None): + """Creates a new SeqDelay with the given relative time delay.""" + super(SeqDelay, self).__init__(pos) + self.delay = delay + + + def __str__ (self): + """Returns this SeqDelay as a string.""" + return '%.3f' % self.delay + + + @classmethod + def decode (cls, bytes, cmddict=None): + """Decodes a sequence delay from an array of bytes, according to the + given command dictionary, and returns a new SeqDelay. + """ + delay_s = struct.unpack('>H', bytes[0:2])[0] + delay_ms = struct.unpack('B' , bytes[2:3])[0] + return cls(delay_s + (delay_ms / 255.0)) + + + def encode (self): + """Encodes this SeqDelay to a binary bytearray.""" + delay_s = int( math.floor(self.delay) ) + delay_ms = int( (self.delay - delay_s) * 255.0 ) + return struct.pack('>H', delay_s) + struct.pack('B', delay_ms) + + + @classmethod + def parse (cls, line, lineno, log, cmddict=None): + """Parses the SeqDelay from a line of text. Warning and error + messages are logged via the SeqMsgLog log. + """ + delay = -1 + token = line.split()[0] + start = line.find(token) + pos = SeqPos(line, lineno, start + 1, start + len(token)) + + try: + delay = float(token) + except ValueError: + msg = 'String "%s" could not be interpreted as a numeric time delay.' + log.error(msg % token, pos) + + return cls(delay, pos) + + + def validate (self, log): + """Returns True if this SeqDelay is valid, False otherwise. Warning + and error messages are logged via the SeqMsgLog log. + """ + return self.delay >= 0 + + + +class SeqMetaCmd (SeqAtom): + """SeqMetaCmd - Sequence Meta-Command + + Sequence meta-commands are parsed and executed locally, but are + ignored when translating a sequence to its binary representation. + """ + + def __init__ (self, metacmd, pos=None): + """Creates a new SeqMetaCmd.""" + super(SeqMetaCmd, self).__init__(pos) + self.metacmd = metacmd + + + @classmethod + def parse (cls, line, lineno, log, cmddict=None): + """Parses the SeqMetaCmd from a line of text. Warning and error + messages are logged via the SeqMsgLog log. + """ + start = line.find('%') + pos = SeqPos(line, lineno, start + 1, len(line)) + result = None + + if start >= 0: + result = cls(line[start:], pos) + + return result + + + +class SeqMsgLog (object): + """SeqMsgLog - Sequence Message Log + + SeqMsgLog logs warning and errors encountered during sequence + parsing and validation. + """ + + def __init__ (self, filename=None): + """Creates a new SeqMsgLog pertaining to the given sequence filename.""" + self.messages = [ ] + self.filename = filename + + + def error (self, msg, pos=None): + """Logs an error message pertaining to the given SeqPos.""" + self.log(msg, 'error: ' + self.location(pos)) + + + def location (self, pos): + """Formats the location of the given SeqPos as: + + filename:line:col: + """ + result = '' + if self.filename: + result += self.filename + ':' + if pos: + result += str(pos) + return result + + + def log (self, msg, prefix=None): + """Logs a message with an optional prefix.""" + if prefix: + if not prefix.strip().endswith(':'): + prefix += ': ' + msg = prefix + msg + self.messages.append(msg) + + + def warning (self, msg, pos=None): + """Logs a warning message pertaining to the given SeqAtom.""" + self.log(msg, 'warning: ' + self.location(pos)) diff --git a/bliss/core/table.py b/bliss/core/table.py new file mode 100644 index 00000000..7996e454 --- /dev/null +++ b/bliss/core/table.py @@ -0,0 +1,720 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2014, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Table Converter + +The bliss.core.table module provides the dictionary for translating tables. +""" + +import cPickle +import os +import yaml +import struct +import binascii +import array +import hashlib + +from bliss.core import dtype, log, util + + +class FSWColDefn (object): + """FSWColDefn - Argument Definition + + Argument Definitions encapsulate all information required to define + a single column. This includes the column name, its + description, units, type, byte position within a command, name-value + enumerations, and allowed value ranges. Name, type, and byte + position are required. All others are optional. + """ + __slots__ = [ + "name", "_format", "_type", "_units", "_items", "_enum", "_bytes" + ] + + def __init__ (self, *args, **kwargs): + """Creates a new Column Definition. + """ + for slot in self.__slots__: + name = slot[1:] if slot.startswith("_") else slot + setattr(self, name, kwargs.get(name, None)) + + + def __repr__ (self): + return util.toRepr(self) + + @property + def enum(self): + """The argument enumeration.""" + return self._enum + + @enum.setter + def enum(self, value): + self._enum = None + if value is not None: + self._enum = value + + @property + def values (self): + """The argument values.""" + return self._values + + @values.setter + def values (self, value): + self._values = value if value is not None else { } + + @property + def format (self): + """The argument format.""" + return self._format + + @format.setter + def format (self, value): + self._format = value if value is not None else '' + + @property + def type (self): + """The argument type.""" + return self._type + + @type.setter + def type (self, value): + self._type = value if value is not None else '' + + @property + def items (self): + """The argument items.""" + return self._items + + @items.setter + def items (self, value): + self._items = None + if value is not None: + self._items = value + + @property + def units (self): + """The argument units. + """ + return self._units + + @units.setter + def units (self, value): + self._units = value if value is not None else '' + + @property + def bytes (self): + """The argument bytes.""" + return self._bytes + + @bytes.setter + def bytes (self, value): + self._bytes = None + if value is not None: + self._bytes = value + + + +class FSWTab (object): + """Table object that contains column definitions + + Commands reference their Command Definition and may contain arguments. + """ + def __init__ (self, defn, *args): + """Creates a new Command based on the given command definition + and command arguments. + """ + self.defn = defn + self.args = args + + + def __repr__ (self): + return self.defn.name + " " + " ".join([str(a) for a in self.args]) + + @property + def coldefns (self): + """The table column definitions.""" + return self.defn.coldefns + + @property + def fswheaderdefns (self): + """The table fsw header definitions.""" + return self.defn.fswheaderdefns + + +def hash_file(filename): + """"This function returns the SHA-1 hash + of the file passed into it""" + + # make a hash object + h = hashlib.sha1() + + # open file for reading in binary mode + with open(filename,'rb') as file: + + # loop till the end of the file + chunk = 0 + while chunk != b'': + # read only 1024 bytes at a time + chunk = file.read(1024) + h.update(chunk) + + # return the hex representation of digest + return h.hexdigest() + + +class FSWTabDefn (object): + """Table Definition + + FSW Table Definitions encapsulate all information required to define a + single column. This includes the column name, its opcode, + subsystem, description and a list of argument definitions. Name and + opcode are required. All others are optional. + """ + __slots__ = ["name", "delimiter", "uptype", "size", "rows", "fswheaderdefns", "coldefns"] + + + def __init__ (self, *args, **kwargs): + """Creates a new Command Definition.""" + for slot in self.__slots__: + name = slot[1:] if slot.startswith("_") else slot + setattr(self, slot, kwargs.get(name, None)) + + if self.fswheaderdefns is None: + self.fswheaderdefns = [ ] + + if self.coldefns is None: + self.coldefns = [ ] + + + def __repr__ (self): + return util.toRepr(self) + + def toText(self, stream, fswtab_f, verbose, version): + out = "" + + size = os.path.getsize(stream.name) + #print "table name: " + self.name + #print "stream len: " + str(size) + + noentries = 0 + if self.name != "memory": + for fswheaderdef in enumerate(self.fswheaderdefns): + #print "header definition: " + str(fswheaderdef[1]) + fswcoldefn = fswheaderdef[1] + name = fswcoldefn.name + #print "name: " + name + colfmt = str(fswcoldefn.format) + #print "format: " + colfmt + colpk = dtype.get(fswcoldefn.type) + #print "packing: " + str(colpk) + coltype = fswcoldefn.type + #print "type: " + coltype + if isinstance(fswcoldefn.bytes,list): + nobytes = fswcoldefn.bytes[1] - fswcoldefn.bytes[0] + 1 + else: + nobytes = 1 + #print "bytes: " + str(fswcoldefn.bytes) + #print "nobytes: " + str(nobytes) + + strval = "" + if str(colpk) == "PrimitiveType('U8')" and nobytes>1: + strval = "" + for i in range(nobytes): + value = colpk.decode(stream.read(1)) + strval += str(colfmt % value) + #print(colfmt % value) + else: + value = colpk.decode(stream.read(nobytes)) + #print(colfmt % value) + if (str(colpk) != "PrimitiveType('U8')") or (str(colpk) == "PrimitiveType('U8')" and nobytes == 1): + strval = str(colfmt % value) + #print "strval: " + strval + if name == "NUMBER_ENTRIES": + noentries = strval + #print "noentries: " + strval + if self.name != "keep_out_zones" and self.name != "line_of_sight": + # Append the value to table row + out += name+': %s\n' % strval + + if verbose is not None and verbose != 0: + print + print out + #fswtab_f.write(out) + + size = size - 32 + + out = "" + + if self.name.startswith("log_"): + norows = self.rows + #print "norows: " + str(norows) + else: + rowbytes = 0 + items = None + for coldef in enumerate(self.coldefns): + fswcoldefn = coldef[1] + items = fswcoldefn.items + if isinstance(fswcoldefn.bytes,list): + nobytes = fswcoldefn.bytes[1] - fswcoldefn.bytes[0] + 1 + else: + nobytes = 1 + rowbytes = rowbytes + nobytes + #print "Row bytes: " + str(rowbytes) + if items is not None: + rowbytes = rowbytes * items + norows = size / rowbytes + else: + norows = int(noentries) + + #print "norows: " + str(norows) + #print "items: " + str(items) + + if norows == 0: + idx = 1 + for i in range(size): + byte = stream.read(1) + value = binascii.hexlify(byte) + fswtab_f.write(value) + if (idx%2) == 0: + fswtab_f.write(" ") + if (idx%16) == 0: + fswtab_f.write("\n") + idx += 1 + return + + for i in range(norows): + condition = None + #this is how to step into table definitions + for coldef in enumerate(self.coldefns): + #print "column definition: " + str(coldef[1]) + fswcoldefn = coldef[1] + name = fswcoldefn.name + #print "name: " + name + colfmt = str(fswcoldefn.format) + #print "format: " + colfmt + colpk = dtype.get(fswcoldefn.type) + #print "packing: " + str(colpk) + coltype = fswcoldefn.type + #print "type: " + coltype + if isinstance(fswcoldefn.bytes,list): + nobytes = fswcoldefn.bytes[1] - fswcoldefn.bytes[0] + 1 + else: + nobytes = 1 + #print "bytes: " + str(fswcoldefn.bytes) + #print "nobytes: " + str(nobytes) + units = fswcoldefn.units + #print "units: " + units + enum = fswcoldefn.enum + + items = fswcoldefn.items + if items is not None: + #print "items: " + str(items) + for i in range(items): + value = colpk.decode(stream.read(nobytes)) + strval = str(colfmt % value) + out += strval + self.delimiter + else: + strval = "" + if str(colpk) == "PrimitiveType('U8')" and nobytes>1: + strval = "" + for i in range(nobytes): + value = colpk.decode(stream.read(1)) + strval += str(colfmt % value) + #print(colfmt % value) + else: + value = colpk.decode(stream.read(nobytes)) + #print(colfmt % value) + + if enum is not None: + if enum is not None: + for enumkey in enumerate(enum.keys()): + #print "enumkey: " + str(enumkey[1]) + ", enumval: " + str(enum[enumkey[1]]) + if enumkey[1] == value: + strval = str(enum[enumkey[1]]) + else: + if units != 'none': + strval = str(colfmt % value) + " " + units + #print "units: " + units + else: + if (str(colpk) != "PrimitiveType('U8')") or (str(colpk) == "PrimitiveType('U8')" and nobytes == 1): + strval = str(colfmt % value) + if self.name == "response" and "CONSTANT" in name and condition > 6: + strval = str('%d' % value) + #print "strval: " + strval + + if self.name == "response" and name == "CONDITION_TYPE": + #print "value: "+str(value) + condition = value + #print "condition: "+str(condition) + + # Append the value to table row + out += strval + self.delimiter + + out = out[:-1] + "\n" + #print + #print out + fswtab_f.write(out) + + # Once we are done appending all the columns to the row + # strip off last comma and append a \r + # Note: Since it is Access, \n alone does not work + return + + def convertValue(self, strval): + try: + return int(strval) + except ValueError: + return float(strval) + + def toBinary(self, tabfile, stream, fswbin_f, verbose, version): + #print "self.name: "+self.name + #print "stream name: "+stream.name + + size = os.path.getsize(stream.name) + #print "stream len: " + str(size) + + #print "self.size: " + str(self.size) + + no_lines = 0 + for line in stream: + no_lines += 1 + stream.seek(0) + + fsw_header = bytearray(32) + sha1 = hash_file(tabfile) + + #version = "0" + if verbose is not None and verbose != 0: + print + print "MAGIC_NUMBER: 0x0c03" + print "UPLOAD_TYPE: "+str(self.uptype) + print "VERSION: "+str(version) + print "NUMBER_ENTRIES: "+str(no_lines) + print "SHA-1: "+sha1 + + fswbin_f.write( struct.pack('>H', 0x0C03 ) ) + fswbin_f.write( struct.pack('B', self.uptype ) ) + fswbin_f.write( struct.pack('B', int(version,16)&255 ) ) + if self.name == "memory": + fswbin_f.write( struct.pack('>H', 0 ) ) + else: + fswbin_f.write( struct.pack('>H', no_lines ) ) + fswbin_f.write( struct.pack('>H', 0 ) ) + + data = bytearray(20) + i = 0 + tmpbytes = list(sha1) + for x in range(0, len(sha1)/2): + tmp = ((int(tmpbytes[x],16)&255)<<4) + (int(tmpbytes[x+1],16)&255) + #print "tmp: "+ str(tmp) + data[i] = tmp&0xFF + i += 1 + fswbin_f.write(data) + fswbin_f.write( struct.pack('>I', 0) ) + + for line in stream: + #print line + idx = 0 + line = line.replace("\n","") + allcols = line.split(self.delimiter) + if self.name == "memory": + for val in allcols: + if val != "": + #print "val: "+val + data = bytearray(2) + tmpbytes = list(val) + data[0] = ((int(tmpbytes[0],16)&0xF)<<4) + (int(tmpbytes[1],16)&0xF) + data[1] = ((int(tmpbytes[2],16)&0xF)<<4) + (int(tmpbytes[3],16)&0xF) + #print "tmp byte1: "+ str(int(tmpbytes[0],16)&0xF) + #print "tmp byte2: "+ str(int(tmpbytes[1],16)&0xF) + #print "tmp byte3: "+ str(int(tmpbytes[2],16)&0xF) + #print "tmp byte4: "+ str(int(tmpbytes[3],16)&0xF) + fswbin_f.write(data) + else: + idx = 0 + #this is how to step into table definitions + for coldef in enumerate(self.coldefns): + #print "column definition: " + str(coldef[1]) + fswcoldefn = coldef[1] + name = fswcoldefn.name + #print "name: " + name + colpk = dtype.get(fswcoldefn.type) + #print "packing: " + str(colpk) + units = fswcoldefn.units + #print "units: " + units + enum = fswcoldefn.enum + + if isinstance(fswcoldefn.bytes,list): + nobytes = fswcoldefn.bytes[1] - fswcoldefn.bytes[0] + 1 + else: + nobytes = 1 + #print "bytes: " + str(fswcoldefn.bytes) + #print "nobytes: " + str(nobytes) + + items = fswcoldefn.items + if items is not None: + #print "items: " + str(items) + for i in range(items): + val = allcols[i] + #print "item col val: "+str(val) + if units != 'none': + val = val.replace(units,"") + val = val.replace(" ","") + else: + val = val.replace(" ","") + #print "item col val: "+str(val) + fswbin_f.write(colpk.encode(self.convertValue(val))) + else: + val = allcols[idx] + val = val.replace("\n","") + #print "else col val 1: "+str(val) + if enum is not None: + if enum is not None: + for enumkey in enumerate(enum.keys()): + enumval = enum[enumkey[1]] + #print "enumkey: " + str(enumkey[1]) + ", enumval: " + str(enum[enumkey[1]]) + if enumval == val: + val = str(enumkey[1]) + #print "XXXX colpk.type: "+colpk.format + fswbin_f.write(colpk.encode(self.convertValue(val))) + else: + #print "XXXX colpk.type: "+colpk.format + #print "fswcoldefn.bytes: "+str(fswcoldefn.bytes) + if units != 'none': + val = val.replace(units,"") + val = val.replace(" ","") + #print "else col val 2a: "+str(val) + fswbin_f.write(colpk.encode(self.convertValue(val))) + #fswbin_f.write(colpk.encode(float(val))) + elif str(colpk) == "PrimitiveType('U8')" and nobytes>1: + strval = "" + for c in list(val): + tmp = (int(c,16))&255 + #print "tmp: "+str(tmp) + fswbin_f.write(colpk.encode(tmp)) + else: + val = val.replace(" ","") + #print "else col val 2b: "+str(val) + fswbin_f.write(colpk.encode(self.convertValue(val))) + #fswbin_f.write(colpk.encode(float(val))) + idx += 1 + + written = fswbin_f.tell() + #print "written: "+str(written) + + #print str(self.size) + ", " + str(written) + if self.size > written: + padding = bytearray(self.size - (written)) + fswbin_f.write(padding) + + #Now calculate and update CRC field in the FSW header + fswbin_f.close() + fname = fswbin_f.name + crc32 = util.crc32File(fname, 0) + if verbose is not None and verbose != 0: + print "CRC: %x"%crc32 + #print "fname: "+fname+", crc32: "+str(crc32) + fswbin_f = open(fname, 'r+b') + fswbin_f.seek(28) + crcbuf = bytearray(4) + crcbuf[0:4] = struct.pack('>L',crc32) + fswbin_f.write(crcbuf) + + # Once we are done appending all the columns to the row + # strip off last comma and append a \r + # Note: Since it is Access, \n alone does not work + return + + +class FSWTabDict (dict): + """Table dictionary object + + Table Dictionaries provide a Python dictionary (i.e. hashtable) + interface mapping Tables names to Column Definitions. + """ + def __init__ (self, *args, **kwargs): + """Creates a new Command Dictionary from the given command dictionary + filename. + """ + self.filename = None + self.colnames = { } + + if len(args) == 1 and len(kwargs) == 0 and type(args[0]) == str: + dict.__init__(self) + self.load(args[0]) + else: + dict.__init__(self, *args, **kwargs) + + def add (self, defn): + """Adds the given Command Definition to this Command Dictionary.""" + self[defn.name] = defn + self.colnames[defn.name] = defn + + def create (self, name, *args): + """Creates a new command with the given arguments.""" + tab = None + defn = self.get(name, None) + if defn: + tab = FSWTab(defn, *args) + return tab + + def load (self, filename): + """Loads Command Definitions from the given YAML file into this + Command Dictionary. + """ + if self.filename is None: + self.filename = filename + + stream = open(self.filename, "rb") + for doc in yaml.load_all(stream): + for table in doc: + self.add(table) + stream.close() + + +class FSWTabDictCache (object): + def __init__ (self, filename=None): + if filename is None: + filename = os.path.join(os.path.dirname(__file__), + "../../config/table.yaml") + filename = os.path.abspath(filename) + + self.filename = filename + self.pcklname = os.path.splitext(filename)[0] + '.pkl' + self.fswtabdict = None + + def dirty (self): + return (not os.path.exists(self.pcklname) or + os.path.getmtime(self.filename) > os.path.getmtime(self.pcklname)) + + def load (self): + if self.fswtabdict is None: + if self.dirty(): + self.fswtabdict = FSWTabDict(self.filename) + self.update() + else: + with open(self.pcklname, "rb") as stream: + self.fswtabdict = cPickle.load(stream) + + return self.fswtabdict + + def update (self): + msg = "Saving updates from more recent '%s' to '%s'" + log.info(msg, self.filename, self.pcklname) + with open(self.pcklname, "wb") as output: + cPickle.dump(self.fswtabdict, output, -1) + + +_DefaultFSWTabDictCache = FSWTabDictCache() + + +def getDefaultFSWTabDict (): + fswtabdict = None + try: + filename = _DefaultFSWTabDictCache.filename + fswtabdict = _DefaultFSWTabDictCache.load() + except IOError, e: + msg = "Could not load default command dictionary '%s': %s'" + log.error(msg, filename, str(e)) + + return fswtabdict + + +def YAMLCtor_FSWColDefn (loader, node): + fields = loader.construct_mapping(node, deep=True) + return FSWColDefn(**fields) + + +def YAMLCtor_FSWTabDefn (loader, node): + fields = loader.construct_mapping(node, deep=True) + fields['fswheaderdefns'] = fields.pop('header', None) + fields['coldefns'] = fields.pop('columns', None) + return FSWTabDefn(**fields) + + +def writeToText (fswtabdict, tabletype, binfile, verbose, version, outpath='../output/', messages=None): + + verStr = '%02d' % version + + #get the table definition + if tabletype != "log": + fswtabdefn = fswtabdict.get(tabletype) + #print "TABLE definition: "+str(fswtabdefn) + + if not os.path.isdir(outpath): + os.makedirs(outpath) + + # Get the files ready for writing + fswtab_f = open(outpath + '/' + tabletype + '_table' + verStr + '.txt', 'w') + stream = open(binfile, 'rb') + + #pass in stream, fswtab_f + fswtabdefn.toText(stream,fswtab_f,verbose,version) + fswtab_f.close() + else: + fswtabdefn = fswtabdict.get("log_main") + #print "TABLE definition: "+str(fswtabdefn) + if not os.path.isdir(outpath): + os.makedirs(outpath) + fswtab_f = open(outpath + '/log_main_table' + verStr + '.txt', 'w') + stream = open(binfile, 'rb') + fswtabdefn.toText(stream,fswtab_f,verbose) + fswtab_f.close() + + fswtabdefn = fswtabdict.get("log_isr") + #print "TABLE definition: "+str(fswtabdefn) + fswtab_f = open(outpath + '/log_isr_table' + verStr + '.txt', 'w') + fswtabdefn.toText(stream,fswtab_f,verbose) + fswtab_f.close() + + fswtabdefn = fswtabdict.get("log_evr") + #print "TABLE definition: "+str(fswtabdefn) + fswtab_f = open(outpath + '/log_evr_table' + verStr + '.txt', 'w') + fswtabdefn.toText(stream,fswtab_f,verbose) + fswtab_f.close() + + fswtabdefn = fswtabdict.get("log_assert") + #print "TABLE definition: "+str(fswtabdefn) + fswtab_f = open(outpath + '/log_assert_table' + verStr + '.txt', 'w') + fswtabdefn.toText(stream,fswtab_f,verbose,version) + fswtab_f.close() + + + #close input file + stream.close() + + +def writeToBinary (fswtabdict, tabletype, tabfile, verbose, outpath='../output/', messages=None): + + #get the table definition + fswtabdefn = fswtabdict.get(tabletype) + #print "TABLE definition: "+str(fswtabdefn) + + if not os.path.isdir(outpath): + os.makedirs(outpath) + + # Get the files ready for writing + version = tabfile[tabfile.index("_table",0)+6:tabfile.index(".",0)] + fswbin_f = open(outpath + '/' + tabletype + '_table' + str(version) + '.bin', 'wb') + stream = open(tabfile, 'r') + + #pass in stream, fswtab_f + fswtabdefn.toBinary(tabfile,stream,fswbin_f,verbose,version) + + #close input and output files + stream.close() + fswbin_f.close() + +yaml.add_constructor('!FSWTable' , YAMLCtor_FSWTabDefn) +yaml.add_constructor('!FSWColumn', YAMLCtor_FSWColDefn) diff --git a/bliss/core/test/__init__.py b/bliss/core/test/__init__.py new file mode 100644 index 00000000..a6d64abf --- /dev/null +++ b/bliss/core/test/__init__.py @@ -0,0 +1,84 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2014, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Unit and Functional Tests + +The bliss.test module provides functional and unit tests for bliss modules. +""" + +import os +import warnings +import logging + +import bliss +import bliss.core + + +bliss.config.reload('data/config/config.yaml') + +def setUp(): + """Set up tests. + + Turn logging level to CRITICAL: due to failure test cases, there + are many verbose log messages that are useful in context. + """ + logging.getLogger('bliss').setLevel(logging.CRITICAL) + +def tearDown(): + """Tear down tests. + + Turn logging level back to INFO. + """ + logging.getLogger('bliss').setLevel(logging.INFO) + + +class TestFile: + """TestFile + + TestFile is a Python Context Manager for quickly creating test + data files that delete when a test completes, either successfully + or unsuccessfully. + + Example: + + with TestFile(data) as filename: + # filename (likely something like '/var/tmp/tmp.1.uNqbVJ') now + # contains data. + assert load(filename) + + Whether the above assert passes or throws AssertionError, filename + will be deleted. + """ + + def __init__ (self, data): + """Creates a new TestFile and writes data to a temporary file.""" + self._filename = None + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self._filename = os.tmpnam() + + with open(self._filename, 'wt') as output: + output.write(data) + + + def __enter__ (self): + """Enter the runtime context and return filename.""" + return self._filename + + + def __exit__ (self, exc_type, exc_value, traceback): + """Exit the runtime context and delete filename.""" + os.remove(self._filename) diff --git a/bliss/core/test/test_bsc.py b/bliss/core/test/test_bsc.py new file mode 100644 index 00000000..624d93c9 --- /dev/null +++ b/bliss/core/test/test_bsc.py @@ -0,0 +1,542 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import gevent.monkey +gevent.monkey.patch_all() + +import datetime +import logging +import mock +import os +import platform +import socket +import time + +import gevent +import nose +import nose.tools + +from bliss.core import bsc, pcap + + +class TestSocketStreamCapturer(object): + @mock.patch('gevent.socket.socket') + def test_mocked_udp_socket(self, socket_mock): + handler = {'name':'name', 'log_dir':'/tmp'} + sl = bsc.SocketStreamCapturer([handler], ['', 9000], 'udp') + socket_mock.assert_called_with(gevent.socket.AF_INET, + gevent.socket.SOCK_DGRAM) + assert sl.conn_type == 'udp' + + @mock.patch('gevent.socket') + def test_mocked_eth_socket(self, socket_mock): + socket_family = getattr(gevent.socket, + 'AF_PACKET', + gevent.socket.AF_INET) + proto = bsc.ETH_PROTOCOL + handler = {'name':'name', 'log_dir':'/tmp'} + bsc.RAW_SOCKET_FD = 'foobar' + sl = bsc.SocketStreamCapturer([handler], ['eho0', 0], 'ethernet') + # We need to test a different load if the rawsocket package is used + if not bsc.RAW_SOCKET_FD: + socket_mock.socket.assert_called_with(socket_family, + gevent.socket.SOCK_RAW, + socket.htons(proto)) + else: + socket_mock.fromfd.assert_called_with(bsc.RAW_SOCKET_FD, + socket_family, + gevent.socket.SOCK_RAW, + socket.htons(proto)) + assert sl.conn_type == 'ethernet' + bsc.RAW_SOCKET_FD = None + + @mock.patch('gevent.socket') + def test_mocked_eth_socket_with_rawsocket(self, socket_mock): + socket_family = getattr(gevent.socket, + 'AF_PACKET', + gevent.socket.AF_INET) + + rawsocket_is_installed = True if bsc.RAW_SOCKET_FD else False + if not rawsocket_is_installed: + rawsocket_fd = 'fake_rawsocket_fd' + bsc.RAW_SOCKET_FD = rawsocket_fd + else: + rawsocket_fd = bsc.RAW_SOCKET_FD + + handler = {'name':'name', 'log_dir':'/tmp'} + sl = bsc.SocketStreamCapturer([handler], ['eho0', 0], 'ethernet') + # We need to test a different load if the rawsocket package is used + socket_mock.fromfd.assert_called_with(rawsocket_fd, + socket_family, + gevent.socket.SOCK_RAW, + socket.htons(bsc.ETH_PROTOCOL)) + assert sl.conn_type == 'ethernet' + + if not rawsocket_is_installed: + bsc.RAW_SOCKET_FD = None + + @mock.patch('bliss.core.pcap.PCapStream') + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_packet_log(self, socket_mock, pcap_open_mock, pcap_stream_mock): + socket_mock.return_value = mock.MagicMock() + pcap_open_mock.return_value = pcap.PCapStream() + + # Verify UDP packet log + handler = {'name':'name', 'log_dir':'/tmp'} + sl = bsc.SocketStreamCapturer([handler], ['', 9000], 'udp') + logger = sl.capture_handlers[0]['logger'] + sl.socket.recv.return_value = 'udp_data' + sl.capture_packet() + + sl.socket.recv.assert_called_with(sl._buffer_size) + logger.write.assert_called_with('udp_data') + + # Verify Ethernet log + sl = bsc.SocketStreamCapturer([handler], ['etho0', 0], 'ethernet') + logger = sl.capture_handlers[0]['logger'] + logger.write.reset_mock() + sl.socket.recv.return_value = 'eth_data' + sl.capture_packet() + logger.write.assert_called_with('eth_data') + + @mock.patch('bliss.core.pcap.PCapStream') + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_packet_log_mutliple_handlers(self, socket_mock, pcap_open_mock, pcap_stream_mock): + h1 = {'name':'h1', 'log_dir':'/tmp'} + h2 = {'name':'h2', 'log_dir':'/tmp'} + sl = bsc.SocketStreamCapturer([h1, h2], ['', 9000], 'udp') + + sl.capture_handlers[0]['logger'] = mock.MagicMock() + sl.capture_handlers[1]['logger'] = mock.MagicMock() + logger1 = sl.capture_handlers[0]['logger'] + logger2 = sl.capture_handlers[1]['logger'] + sl.socket.recv.return_value = 'udp_data' + sl.capture_packet() + + assert logger1.write.call_count == 1 + assert logger2.write.call_count == 1 + + @mock.patch('bliss.core.pcap.PCapStream') + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_capture_with_data_manip(self, socket_mock, pcap_open_mock, pcap_stream_mock): + transform_mock = mock.Mock(side_effect=['transformed data']) + handler = { + 'name': 'name', + 'log_dir': '/tmp', + 'pre_write_transforms': [transform_mock] + } + sl = bsc.SocketStreamCapturer([handler], ['', 9000], 'udp') + logger = sl.capture_handlers[0]['logger'] + sl.socket.recv.return_value = 'udp_data' + sl.capture_packet() + + assert transform_mock.called + logger.write.assert_called_with('transformed data') + + @mock.patch('gevent.socket.socket') + def test_logger_conf_dump(self, socket_mock): + handler = {'name':'name', 'log_dir':'/tmp', 'rotate_log':True} + addr = ['', 9000] + conn_type = 'udp' + + sl = bsc.SocketStreamCapturer(handler, addr, conn_type) + conf_dump = sl.dump_handler_config_data() + + handler = sl.capture_handlers[0] + expected_log_file_path = sl._get_log_file(handler) + + assert len(conf_dump) == 1 + assert conf_dump[0]['handler']['name'] == 'name' + assert conf_dump[0]['handler']['log_dir'] == '/tmp' + assert conf_dump[0]['handler']['rotate_log'] == True + assert conf_dump[0]['log_file_path'] == expected_log_file_path + assert conf_dump[0]['conn_type'] == conn_type + assert conf_dump[0]['address'] == addr + + @mock.patch('gevent.socket.socket') + def test_handler_stat_dump(self, socket_mock): + handler = {'name':'name', 'log_dir':'/tmp', 'rotate_log':True} + addr = ['', 9000] + conn_type = 'udp' + + sl = bsc.SocketStreamCapturer(handler, addr, conn_type) + handler = sl.capture_handlers[0] + new_date = datetime.datetime.utcnow() - datetime.timedelta(hours=1) + handler['log_rot_time'] = new_date.timetuple() + + stats = sl.dump_all_handler_stats() + + assert len(stats) == 1 + assert stats[0]['name'] == 'name' + assert stats[0]['reads'] == 0 + assert stats[0]['data_read_length'] == '0 bytes' + assert stats[0]['approx_data_rate'] == '0.0 bytes/second' + + + handler['reads'] = 2 + handler['data_read'] = 27 + stats = sl.dump_all_handler_stats() + + print stats + assert len(stats) == 1 + assert stats[0]['name'] == 'name' + assert stats[0]['reads'] == 2 + assert stats[0]['data_read_length'] == '27 bytes' + data_rate = float(stats[0]['approx_data_rate'].split(' ')[0]) + assert data_rate != 0.0 + + @mock.patch('gevent.socket.socket') + def test_should_rotate_log(self, socket_mock): + handler = {'name':'name', 'log_dir':'/tmp', 'rotate_log':True} + sl = bsc.SocketStreamCapturer(handler, ['', 9000], 'udp') + h = sl.capture_handlers[0] + assert sl._should_rotate_log(h) == False + + sl = bsc.SocketStreamCapturer(handler, ['', 9000], 'udp') + h = sl.capture_handlers[0] + new_date = datetime.datetime.now() - datetime.timedelta(days=1) + h['log_rot_time'] = new_date.timetuple() + assert sl._should_rotate_log(h) == True + + @mock.patch('gevent.socket.socket') + def test_custon_log_rotation(self, socket_mock): + handler = { + 'name': 'name', + 'log_dir': '/tmp', + 'rotate_log': True, + 'rotate_log_index': 'months', + 'rotate_log_delta': 2 + } + + sl = bsc.SocketStreamCapturer(handler, ['', 9000], 'udp') + h = sl.capture_handlers[0] + + assert sl._should_rotate_log(h) == False + + # Check the default 1 day log rotation case to make sure our config + # is being used + new_date = datetime.datetime.now() - datetime.timedelta(days=1) + h['log_rot_time'] = new_date.timetuple() + assert sl._should_rotate_log(h) == False + + # Check a working case for our "rotate every 2 months" use case. + new_date = datetime.datetime.now() - datetime.timedelta(days=62) + h['log_rot_time'] = new_date.timetuple() + assert sl._should_rotate_log(h) == True + + @mock.patch('bliss.core.pcap.PCapStream') + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_log_rotation(self, socket_mock, pcap_open_mock, pcap_stream_mock): + pcap_open_mock.return_value = pcap.PCapStream() + + handler = {'name':'name', 'log_dir':'/tmp', 'rotate_log':True} + sl = bsc.SocketStreamCapturer(handler, ['', 9000], 'udp') + handler = sl.capture_handlers[0] + + log_path = sl._get_log_file(handler) + pcap_open_mock.assert_called_with( + log_path, + mode='a' + ) + + # New name so our open call changes from above. This means we can + # ensure that the log rotation opens a new logger as expected. + sl_new_name = 'newname' + handler['name'] = sl_new_name + # We expect the rotation to set the last log rotation time. To test + # we'll set it to None and expect it to be set after + handler['log_rot_time'] = None + + sl._rotate_log(handler) + + # We expect the log rotation to close the existing logger + assert pcap_stream_mock.return_value.close.call_count == 1 + + # Since we change the name and rotated the log we expect this + # updated value to be present in the new file name and the call + # to open the new stream. + log_path = sl._get_log_file(handler) + assert sl_new_name in log_path + pcap_open_mock.assert_called_with( + log_path, + mode='a' + ) + + assert pcap_open_mock.call_count == 2 + + # We expect the rotation to fix our None assignment on the log_rot_time + # and we expect it to be replaced by a time struct object. + assert handler['log_rot_time'] != None + assert type(handler['log_rot_time']) == type(time.gmtime()) + + @mock.patch('gevent.socket.socket') + def test_time_rotation_index_decoding(self, socket_mock): + handler = {'name':'name', 'log_dir':'/tmp', 'rotate_log':True} + sl = bsc.SocketStreamCapturer(handler, ['', 9000], 'udp') + + # We expect an error when we input a bad time index value + nose.tools.assert_raises( + ValueError, + sl._decode_time_rotation_index, + 'this is not a valid value' + ) + + assert 2 == sl._decode_time_rotation_index('tm_mday') + + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_get_log_file(self, socket_mock, pcap_open_mock): + h = { + 'name':'name', + 'log_dir': '/tmp', + 'path':'foobarbaz/%j/', + 'file_name_pattern': 'extrafolder/%j/%Y-%m-%d-%H-%M-randomUDPtestData-{name}.pcap' + } + sl = bsc.SocketStreamCapturer(h, ['', 9000], 'udp') + handler = sl.capture_handlers[0] + + # Check log path generation with user specified handler-specific path + # and file_name_pattern. This includes strftime substitution and handler + # metadata substitution. + log_path = sl._get_log_file(handler) + expected_path = os.path.join(h['log_dir'], h['path'], h['file_name_pattern']) + expected_path = time.strftime(expected_path, time.gmtime()) + expected_path = expected_path.format(**handler) + assert log_path == expected_path + + h = {'name':'name', 'log_dir': '/tmp'} + sl = bsc.SocketStreamCapturer(h, ['', 9000], 'udp') + handler = sl.capture_handlers[0] + + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_get_logger(self, socket_mock, pcap_open_mock): + handler = {'name':'name', 'log_dir':'/tmp', 'rotate_log':True} + sl = bsc.SocketStreamCapturer(handler, ['', 9000], 'udp') + # We expect _get_logger to generate the file path for the PCapStream + # and call the bliss.core.pcap.open static function to generate the + # stream. + handler = sl.capture_handlers[0] + log_path = sl._get_log_file(handler) + pcap_open_mock.assert_called_with(log_path, mode='a') + + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_add_handler(self, socket_mock, pcap_open_mock): + h1 = {'name':'h1', 'log_dir':'/tmp'} + h2 = {'name':'h2', 'log_dir':'/tmp'} + sl = bsc.SocketStreamCapturer(h1, ['', 9000], 'udp') + + assert len(sl.capture_handlers) == 1 + sl.add_handler(h2) + assert len(sl.capture_handlers) == 2 + assert pcap_open_mock.call_count == 2 + + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_remove_handler(self, socket_mock, pcap_open_mock): + h1 = {'name':'h1', 'log_dir':'/tmp'} + h2 = {'name':'h2', 'log_dir':'/tmp'} + sl = bsc.SocketStreamCapturer([h1, h2], ['', 9000], 'udp') + + assert len(sl.capture_handlers) == 2 + sl.remove_handler('h2') + assert len(sl.capture_handlers) == 1 + assert sl.capture_handlers[0]['name'] == 'h1' + +class TestStreamCaptureManager(object): + @mock.patch('bliss.core.bsc.SocketStreamCapturer') + def test_log_manager_init(self, socket_log_mock): + loggers = [ + ('foo', ['', 9000], 'udp', '/tmp', {'rotate_log': True}), + ('bar', ['', 8125], 'udp', '/tmp', {}), + ] + fake_mngr_conf = 'mngr_conf' + lm = bsc.StreamCaptureManager(fake_mngr_conf, loggers) + + assert lm._mngr_conf == fake_mngr_conf + assert len(lm._stream_capturers.keys()) == 2 + assert "['', 9000]" in lm._stream_capturers.keys() + assert "['', 8125]" in lm._stream_capturers.keys() + + @mock.patch('os.makedirs') + @mock.patch('bliss.core.bsc.SocketStreamCapturer') + def test_add_logger(self, socket_log_mock, mkdirs_mock): + mngr_conf = {'root_log_directory': '/totally/a/../fake/dir/../name'} + # We'll use this to make sure directory paths are cleaned + cleaned_dir_path = os.path.normpath(mngr_conf['root_log_directory']) + + lm = bsc.StreamCaptureManager(mngr_conf, []) + lm.add_logger('foo', ['', 9000], 'udp', '/tmp') + + assert len(lm._stream_capturers.keys()) == 1 + assert "['', 9000]" in lm._stream_capturers + + # Default root_log_directory usage and normalization check + lm.add_logger('baz', ['', 8500], 'udp') + socket_log_mock.assert_called_with( + { + 'log_dir': cleaned_dir_path, + 'name': 'baz', + 'rotate_log': True, + 'pre_write_transforms': [], + }, ['', 8500], 'udp' + ) + assert lm._pool.free_count() == 48 + + # Check to make sure that home directory expansion is being done + socket_log_mock.reset_mock() + lm.add_logger('testlog', ['', 1234], 'udp', '~/logger_dir') + expanded_user_path = os.path.expanduser('~/logger_dir') + socket_log_mock.assert_called_with( + { + 'log_dir': expanded_user_path, + 'name': 'testlog', + 'rotate_log': True, + 'pre_write_transforms': [], + }, ['', 1234], 'udp' + ) + + @mock.patch('bliss.core.pcap.open') + @mock.patch('os.makedirs') + @mock.patch('gevent.socket.socket') + def test_pre_write_transform_load(self, socket_mock, mkdirs_mock, pcap_open_mock): + mngr_conf = {'root_log_directory': '/tmp'} + lm = bsc.StreamCaptureManager(mngr_conf, []) + + kwargs = { + 'pre_write_transforms': [ + 'identity_transform', + lambda x: 1 + ] + } + lm.add_logger('testlog', ['', 9876], 'udp', '~/logger_dir', **kwargs) + stream_capturer = lm._stream_capturers["['', 9876]"][0] + handler = stream_capturer.capture_handlers[0] + + assert 'pre_write_transforms' in handler + assert len(handler['pre_write_transforms']) == 2 + + for t in handler['pre_write_transforms']: + assert hasattr(t, '__call__') + + assert 'identity_transform' == handler['pre_write_transforms'][0].__name__ + assert 1 == handler['pre_write_transforms'][1]('bogus input') + + @mock.patch('bliss.core.log.warn') + @mock.patch('bliss.core.pcap.open') + @mock.patch('os.makedirs') + @mock.patch('gevent.socket.socket') + def test_bad_builtin_transform_load(self, socket_mock, mkdirs_mock, open_mock, log_mock): + logging.getLogger('bliss').setLevel(logging.INFO) + + mngr_conf = {'root_log_directory': '/tmp'} + lm = bsc.StreamCaptureManager(mngr_conf, []) + + bad_func_name = 'this function name doesnt exist' + kwargs = { + 'pre_write_transforms': [ + bad_func_name + ] + } + lm.add_logger('testlog', ['', 9876], 'udp', '~/logger_dir', **kwargs) + msg = 'Unable to load data transformation "{}" for handler "{}"'.format( + bad_func_name, + 'testlog' + ) + log_mock.assert_called_with(msg) + + logging.getLogger('bliss').setLevel(logging.CRITICAL) + + @mock.patch('bliss.core.log.warn') + @mock.patch('bliss.core.pcap.open') + @mock.patch('os.makedirs') + @mock.patch('gevent.socket.socket') + def test_bad_type_transform_load(self, socket_mock, mkdirs_mock, open_mock, log_mock): + logging.getLogger('bliss').setLevel(logging.INFO) + + mngr_conf = {'root_log_directory': '/tmp'} + lm = bsc.StreamCaptureManager(mngr_conf, []) + + bad_func_name = ('foobarbaz',) + kwargs = { + 'pre_write_transforms': [ + bad_func_name + ] + } + lm.add_logger('testlog', ['', 9876], 'udp', '~/logger_dir', **kwargs) + msg = 'Unable to determine how to load data transform "{}"'.format(bad_func_name) + log_mock.assert_called_with(msg) + + logging.getLogger('bliss').setLevel(logging.CRITICAL) + + @mock.patch('bliss.core.bsc.SocketStreamCapturer') + def test_remove_logger(self, socket_log_mock): + lm = bsc.StreamCaptureManager(None, []) + lm.add_logger('foo', ['', 9000], 'udp', '/tmp') + + lm.stop_capture_handler('foo') + assert mock.call().remove_handler('foo') in socket_log_mock.mock_calls + + @mock.patch('bliss.core.bsc.SocketStreamCapturer') + def test_get_logger_data(self, socket_log_mock): + lm = bsc.StreamCaptureManager(None, []) + with mock.patch('os.mkdir') as mkdir_mock: + lm.add_logger('foo', ['', 9000], 'udp', '/tmp') + lm.add_logger('bar', ['', 8500], 'udp', '/tmp') + + logger_data = lm.get_logger_data() + # Note we're not going to test content of the returned data because + # that is handled by SocketStreamCapturer. There is an appropriate test + # for that in the SocketStreamCapturer section. + assert len(logger_data.keys()) == 2 + assert "['', 8500]" in logger_data.keys() + assert "['', 9000]" in logger_data.keys() + + @mock.patch('bliss.core.bsc.SocketStreamCapturer') + def test_get_logger_stats(self, socket_log_mock): + lm = bsc.StreamCaptureManager(None, []) + with mock.patch('os.mkdir') as mkdir_mock: + lm.add_logger('foo', ['', 9000], 'udp', '/tmp') + lm.add_logger('bar', ['', 8500], 'udp', '/tmp') + + logger_data = lm.get_handler_stats() + # Note we're not going to test content of the returned data because + # that is handled by SocketStreamCapturer. There is an appropriate test + # for that in the SocketStreamCapturer section. + assert len(logger_data.keys()) == 2 + assert "['', 8500]" in logger_data.keys() + assert "['', 9000]" in logger_data.keys() + + @mock.patch('bliss.core.pcap.open') + @mock.patch('gevent.socket.socket') + def test_forced_log_rotation(self, socket_mock, pcap_open_mock): + '''''' + lm = bsc.StreamCaptureManager(None, []) + with mock.patch('os.mkdir') as mkdir_mock: + lm.add_logger('foo', ['', 9000], 'udp', '/tmp') + lm.add_logger('bar', ['', 8500], 'udp', '/tmp') + + bar = lm._stream_capturers["['', 8500]"][0] + + pre_rot_count = pcap_open_mock.call_count + lm.rotate_capture_handler_log('bar') + post_rot_count = pcap_open_mock.call_count + assert post_rot_count - pre_rot_count == 1 diff --git a/bliss/core/test/test_ccsds.py b/bliss/core/test/test_ccsds.py new file mode 100644 index 00000000..986179f0 --- /dev/null +++ b/bliss/core/test/test_ccsds.py @@ -0,0 +1,73 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL +# RIGHTS RESERVED. United States Government Sponsorship +# acknowledged. Any commercial use must be negotiated with the Office +# of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By +# accepting this software, the user agrees to comply with all +# applicable U.S. export laws and regulations. User has the +# responsibility to obtain export licenses, or other export authority +# as may be required before exporting such information to foreign +# countries or providing access to foreign persons. + + +import nose + +from bliss.core import ccsds + + +def testCcsdsDefinition(): + defn = ccsds.CcsdsDefinition(apid=42, length=128) + + assert defn.version == 0 + assert defn.type == 0 + assert defn.secondary == None + assert defn.shflag == 0 + assert defn.apid == 42 + assert defn.seqflags == 0b11 + assert defn.length == 128 + + +def testCcsdsHeaderDefaults(): + header = ccsds.CcsdsHeader() + + assert header.version == 0 + assert header.type == 0 + assert header.shflag == 0 + assert header.apid == 0 + assert header.raw.seqflags == 0b11 + assert header.seqcount == 0 + assert header.length == 0 + + +def testCcsdsHeaderDecode(): + header = ccsds.CcsdsHeader([0x18, 0x2A, 0xC4, 0xD2, 0x16, 0x2E]) + + assert header.version == 0 + assert header.type == 1 + assert header.shflag == 1 + assert header.apid == 42 + assert header.raw.seqflags == 0b11 + assert header.seqcount == 1234 + assert header.length == 5678 + + +def testCcsdsHeaderEncode(): + header = ccsds.CcsdsHeader() + + header.version = 0 + header.type = 1 + header.shflag = 1 + header.apid = 42 + header.seqflags = 0b11 + header.seqcount = 1234 + header.length = 5678 + + assert header._data == bytearray([0x18, 0x2A, 0xC4, 0xD2, 0x16, 0x2E]) + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_cfg.py b/bliss/core/test/test_cfg.py new file mode 100644 index 00000000..e01ce195 --- /dev/null +++ b/bliss/core/test/test_cfg.py @@ -0,0 +1,278 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2015, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import sys +import os +import time + +import nose + +from bliss.core import cfg + +from . import TestFile + + +def YAML (): + """ + # Call YAML() to return the YAML string below, customized for this + # platform (i.e. OS) and hostname. + + default: + ISS: + apiport: 9090 + bticard: 0 + desc: ISS PL/MDM Simulator + path: bin/bliss-orbits + rtaddr: 15 + + data: + test1: + path: /gds/%Y/%Y-%j/test1 + test2: + path: /gds/%Y/%Y-%j/test2 + + PLATFORM: + ISS: + bticard: 6 + + HOSTNAME: + ISS: + apiport: 1234 + """ + s = YAML.__doc__ + return s.replace('PLATFORM', platform()).replace('HOSTNAME', hostname()) + + +def hostname (): + import platform + return platform.node().split('.')[0] + + +def platform (): + return sys.platform + + +def test_expandConfigPaths (): + prefix = os.path.join('/', 'bliss') + actual = { + 'desc' : 'Test cfg.expandConfigPaths()', + 'file' : os.path.join('bin', 'bliss-orbits'), + 'filename': os.path.join('bin', 'bliss-orbits'), + 'nested' : { + 'desc' : 'Test expansion of nested dictionaries too', + 'file' : os.path.join('bin', 'bliss-cmd-send'), + 'filename': os.path.join('bin', 'bliss-cmd-send'), + } + } + expected = { + 'desc' : 'Test cfg.expandConfigPaths()', + 'file' : os.path.join(prefix, 'bin', 'bliss-orbits'), + 'filename': os.path.join(prefix, 'bin', 'bliss-orbits'), + 'nested' : { + 'desc' : 'Test expansion of nested dictionaries too', + 'file' : os.path.join(prefix, 'bin', 'bliss-cmd-send'), + 'filename': os.path.join(prefix, 'bin', 'bliss-cmd-send'), + } + } + + cfg.expandConfigPaths(actual, prefix, None, None, '', 'file', 'filename') + assert actual == expected + +def test_expandConfigPaths_w_variables (): + prefix = os.path.join('/', 'bliss') + pathvars = { + 'x': 'test-x', + 'y': 'test-y', + 'hostname': hostname() + } + actual = { + 'desc' : 'Test cfg.expandConfigPaths() with variables', + 'file' : os.path.join('bin', '${x}', 'bliss-orbits'), + 'filename': os.path.join('bin', '${y}', 'bliss-orbits') + } + expected = { + 'desc' : 'Test cfg.expandConfigPaths() with variables', + 'file' : os.path.join(prefix, 'bin', 'test-x', 'bliss-orbits'), + 'filename': os.path.join(prefix, 'bin', 'test-y', 'bliss-orbits') + } + + cfg.expandConfigPaths(actual, prefix, None, pathvars, '', 'file', 'filename') + assert actual == expected + + +def test_replaceVariables (): + # Test expandPath with simple custom path variable + pathvars = { + 'x' : 'test' + } + pathname = os.path.join('/' , '${x}', 'bliss-orbits') + expected = [ os.path.join('/', pathvars['x'], 'bliss-orbits') ] + assert cfg.replaceVariables(pathname, pathvars=pathvars) == expected + + # Test expandPath with more complex path variable with multiple + # permutations + pathvars = { + 'x' : 'x1', + 'y' : ['y1', 'y2'], + 'z' : ['z1', 'z2'] + } + pathname = os.path.join('/' , '${x}', '${y}', '${z}','bliss-orbits') + expected = [ + os.path.join('/', pathvars['x'], pathvars['y'][0], + pathvars['z'][0], 'bliss-orbits'), + os.path.join('/', pathvars['x'], pathvars['y'][0], + pathvars['z'][1], 'bliss-orbits'), + os.path.join('/', pathvars['x'], pathvars['y'][1], + pathvars['z'][0], 'bliss-orbits'), + os.path.join('/', pathvars['x'], pathvars['y'][1], + pathvars['z'][1], 'bliss-orbits') + ] + assert sorted(cfg.replaceVariables(pathname, pathvars=pathvars)) == sorted(expected) + +def test_replaceVariables_strftime (): + # Test replaceVariables with strftime directives + pathname = os.path.join('/', '%Y', '%Y-%j', 'bliss-orbits') + + expected = [ os.path.join('/', + time.strftime('%Y', time.gmtime()), + time.strftime('%Y-%j', time.gmtime()), + 'bliss-orbits') ] + + assert sorted(cfg.replaceVariables(pathname)) == sorted(expected) + +def test_replaceVariables_strftime_addday (): + # Test replaceVariables with strftime directives + pathname = os.path.join('/', '%Y', '%Y-%j', 'bliss-orbits') + + expected = [ os.path.join('/', + time.strftime('%Y', time.gmtime()), + time.strftime('%Y-%j', time.gmtime()), + 'bliss-orbits') ] + + assert sorted(cfg.replaceVariables(pathname)) == sorted(expected) + +def test_addPathVariables (): + config = cfg.BlissConfig(data=YAML()) + before = config._pathvars + before_len = len(before.keys()) + + pathvars = { + 'x': 'test-x', + 'y': 'test-y' + } + config.addPathVariables(pathvars) + after = config._pathvars + after_len = len(after.keys()) + + assert before_len < after_len + assert 'x' in after.keys() + assert 'y' in after.keys() + +def test_datapaths (): + """ + default: + ISS: + apiport: 9090 + bticard: 0 + desc: ISS PL/MDM Simulator + path: bin/bliss-orbits + rtaddr: 15 + + """ + # check data paths work from YAML() + config = cfg.BlissConfig(data=YAML()) + assert len(config._datapaths) > 0 + + # check if data paths do not exist + config = cfg.BlissConfig(data=test_datapaths.__doc__) + try: + paths = config._datapaths + assert False + except cfg.BlissConfigMissing as e: + assert True + +def test_flatten (): + d = { 'a': { 'foo': 'a' }, 'b': { 'foo': 'b' } } + assert cfg.flatten(dict(d), 'a', 'b') == { 'foo': 'b' } + assert cfg.flatten(dict(d), 'b', 'a') == { 'foo': 'a' } + + +def test_loadYAML (): + with TestFile(data=YAML()) as filename: + assert cfg.loadYAML(filename) == cfg.loadYAML(data=YAML()) + + +def test_merge (): + d = { 'foo': 'bar' } + o = { 'foo': 'baz' } + assert cfg.merge(d, o) == o + + d = { 'foo': 'bar' } + o = { 'baz': 'bop' } + assert cfg.merge(d, o) == { 'foo': 'bar', 'baz': 'bop' } + + +def assert_BlissConfig (config, path, filename=None): + assert config.ISS.apiport == 1234 + assert config.ISS.bticard == 6 + assert config.ISS.desc == 'ISS PL/MDM Simulator' + assert config.ISS.path == os.path.join(config._directory, path) + assert config.ISS.rtaddr == 15 + + assert config._hostname == hostname() + assert config._platform == platform() + assert config._filename == filename + + assert config != config.ISS + assert config.ISS == config['ISS'] + + year = time.strftime('%Y', time.gmtime()) + doy = time.strftime('%j', time.gmtime()) + base = '/gds/%s/%s-%s/' % (year, year, doy) + assert config.data.test1.path == base + 'test1' + assert config.data.test2.path == base + 'test2' + + assert 'foo' not in config + try: + config.foo + assert False + except AttributeError: + pass + + try: + config['foo'] + assert False + except KeyError: + pass + + assert type(str(config)) is str + + +def test_BlissConfig (): + config = cfg.BlissConfig(data=YAML()) + path = 'bin/bliss-orbits' + assert_BlissConfig(config, path) + + with TestFile(data=YAML()) as filename: + config = cfg.BlissConfig(filename) + assert_BlissConfig(config, path, filename) + + config.reload() + assert_BlissConfig(config, path, filename) + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_cmd.py b/bliss/core/test/test_cmd.py new file mode 100644 index 00000000..2100c7a1 --- /dev/null +++ b/bliss/core/test/test_cmd.py @@ -0,0 +1,143 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import nose +import struct + +from bliss.core import cmd, dtype + + +CMDDICT_TEST = """ +- !Command + name: SEQ_ENABLE_DISABLE + opcode: 0x0042 + arguments: + - !Argument + name: sequence_id + type: MSB_U16 + bytes: [0, 1] + + - !Argument + name: enable + type: U8 + bytes: 2 + enum: + 0: DISABLED + 1: ENABLED +""" + + +def testArgDefn (): + name = 'SEQ_ENABLE_DISABLE' + defn = cmd.CmdDict(CMDDICT_TEST)[name] + + arg = defn.argdefns[0] + assert arg.bytes == [0, 1] + assert arg.desc == None + assert arg.enum == None + assert arg.fixed == False + assert arg.name == 'sequence_id' + assert arg.nbytes == 2 + assert arg.range == None + assert arg.slice() == slice(0, 2) + assert arg.type == dtype.get('MSB_U16') + assert arg.units == None + assert arg.value == None + + assert type( repr(arg) ) is str + + arg = defn.argdefns[1] + assert arg.bytes == 2 + assert arg.desc == None + assert arg.enum == {'DISABLED': 0, 'ENABLED': 1} + assert arg.fixed == False + assert arg.name == 'enable' + assert arg.nbytes == 1 + assert arg.range == None + assert arg.slice() == slice(2, 3) + assert arg.type == dtype.get('U8') + assert arg.units == None + assert arg.value == None + + assert type( repr(arg) ) is str + + +def testArgDefnDecode (): + name = 'SEQ_ENABLE_DISABLE' + defn = cmd.CmdDict(CMDDICT_TEST)[name] + + arg = defn.argdefns[0] + assert arg.decode( struct.pack('>H', 1234) ) == 1234 + + arg = defn.argdefns[1] + assert arg.decode( struct.pack('>B', 0) ) == 'DISABLED' + assert arg.decode( struct.pack('>B', 1) ) == 'ENABLED' + assert arg.decode( struct.pack('>B', 2) ) == 2 + + +def testArgDefnEncode (): + name = 'SEQ_ENABLE_DISABLE' + defn = cmd.CmdDict(CMDDICT_TEST)[name] + + arg = defn.argdefns[0] + assert arg.encode(1234) == struct.pack('>H', 1234) + + arg = defn.argdefns[1] + assert arg.encode( 'DISABLED') == struct.pack('>B', 0) + assert arg.encode( 'ENABLED' ) == struct.pack('>B', 1) + assert arg.encode( 2 ) == struct.pack('>B', 2) + + +def testArgDefnValidate (): + name = 'SEQ_ENABLE_DISABLE' + defn = cmd.CmdDict(CMDDICT_TEST)[name] + + arg = defn.argdefns[0] + assert arg.validate(1) == True + assert arg.validate(1.2) == False + + arg.range = [0, 2] + assert arg.validate(0) == True + assert arg.validate(1) == True + assert arg.validate(2) == True + assert arg.validate(3) == False + + arg = defn.argdefns[1] + assert arg.validate('ENABLED' ) == True + assert arg.validate('DISABLED') == True + assert arg.validate('FOOBAR') == False + + msgs = [ ] + assert arg.validate('FOOBAR', msgs) == False + assert len(msgs) > 0 + + +def testCmdDefn (): + name = 'SEQ_ENABLE_DISABLE' + defn = cmd.CmdDict(CMDDICT_TEST)[name] + + assert defn.name == name + assert defn.opcode == 0x0042 + assert defn.nargs == 2 + + +def testGetDefaultDict (): + cmddict = cmd.getDefaultDict() + + assert cmddict is not None + assert isinstance(cmddict, cmd.CmdDict) + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_coord.py b/bliss/core/test/test_coord.py new file mode 100644 index 00000000..49132e0a --- /dev/null +++ b/bliss/core/test/test_coord.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import datetime +import math +import nose + +from bliss.core import dmc, coord + + +def float_equality(a, b, rel_tol=1e-06, abs_tol=0.0): + return abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) + +def test_cbrt(): + assert float_equality(coord.cbrt(64), 4) + assert float_equality(coord.cbrt(-64), -4) + assert float_equality(coord.cbrt(10), 2.1544346) + +def test_ecef2geodetic(): + rad_lat, rad_lon, altitude = coord.ecef2geodetic(4510731, 4510731, 0) + assert float_equality(math.degrees(rad_lat), 0.0) + assert float_equality(math.degrees(rad_lon), 45.0) + assert float_equality(altitude, 999.9564) + + rad_lat, rad_lon, altitude = coord.ecef2geodetic(2297292.93, 1016894.95, -5843939.67) + assert float_equality(math.degrees(rad_lat), -66.87654) + assert float_equality(math.degrees(rad_lon), 23.87654) + assert float_equality(altitude, 1000.1, rel_tol=1e-2) + +def test_eci2ecef(): + eci = -6.0744*1e6, -1.8289*1e6, 0.6685*1e6 + t = datetime.datetime(2010, 1, 17, 10, 20, 36) + gmst = dmc.toGMST(t) + ecef = coord.eci2ecef(eci[0], eci[1], eci[2], gmst=gmst) + assert float_equality(ecef[0], 1628340.306018) + assert float_equality(ecef[1], -6131208.5609442) + assert float_equality(ecef[2], 668500.0) + +def test_eci2geodetic(): + eci = -6.0744*1e6, -1.8289*1e6, 0.6685*1e6 + t = datetime.datetime(2010, 1, 17, 10, 20, 36) + gmst = dmc.toGMST(t) + lla = list(coord.eci2geodetic(eci[0], eci[1], eci[2], gmst=gmst)) + lla[0] = math.fmod(lla[0], math.pi * 2) + lla[1] = math.fmod(lla[1], math.pi * 2) + assert float_equality(math.degrees(lla[0]), 6.0558200) + assert float_equality(math.degrees(lla[1]), -75.1266047) + assert float_equality(lla[2], 978.4703290) + +def test_eci_conversion_equality(): + eci = -6.0744*1e6, -1.8289*1e6, 0.6685*1e6 + t = datetime.datetime(2010, 1, 17, 10, 20, 36) + gmst = dmc.toGMST(t) + ecef = coord.eci2ecef(eci[0], eci[1], eci[2], gmst=gmst) + lla1 = list(coord.ecef2geodetic(ecef[0], ecef[1], ecef[2])) + lla1[0] = math.fmod(lla1[0], math.pi * 2) + lla1[1] = math.fmod(lla1[1], math.pi * 2) + + lla2 = list(coord.eci2geodetic(eci[0], eci[1], eci[2], gmst=gmst)) + lla2[0] = math.fmod(lla2[0], math.pi * 2) + lla2[1] = math.fmod(lla2[1], math.pi * 2) + + assert float_equality(lla1[0], lla2[0]) + assert float_equality(lla1[1], lla2[1]) + assert float_equality(lla1[2], lla2[2]) + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_dmc.py b/bliss/core/test/test_dmc.py new file mode 100644 index 00000000..9c1593ea --- /dev/null +++ b/bliss/core/test/test_dmc.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import time +import datetime +import nose + +from bliss.core import dmc + + +def test_getTimestampUTC(): + expected = time.strftime('%Y-%j', time.gmtime()) + + actual = time.strftime('%Y-%j', time.gmtime(dmc.getTimestampUTC()[0])) + + assert actual == expected + + +def test_getUTCDatetimeDOY_w_days(): + days = 1 + t = datetime.datetime.utcnow() + datetime.timedelta(days=days) + timestamp = t.timetuple() + exp_year = timestamp.tm_year + exp_day = '%03d' % timestamp.tm_yday + + dtime = dmc.getUTCDatetimeDOY(days=days).split('T')[0].split('-') + assert str(exp_year) == dtime[0] + assert str(exp_day) == dtime[1] + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_dtype.py b/bliss/core/test/test_dtype.py new file mode 100644 index 00000000..b91e0c2b --- /dev/null +++ b/bliss/core/test/test_dtype.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2014, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import base64 +import binascii +import datetime +import struct + +import nose +import nose.tools + +from bliss.core import dtype + + +def fpeq (p, q, eps=1e-6): + return abs(p - q) < eps + + +def testLSB_D64(): + val = 1.2 + bytes = struct.pack('d', val) + MSB_D64 = dtype.get('MSB_D64') + + assert fpeq(MSB_D64.decode(bytes), val) + assert fpeq(MSB_D64.decode(bytes, raw=True), val) + + +def testLSB_F32(): + val = 5.6 + bytes = struct.pack('f', val) + MSB_F32 = dtype.get('MSB_F32') + + assert fpeq(MSB_F32.decode(bytes), val) + assert fpeq(MSB_F32.decode(bytes, raw=True), val) + + +def testArrayType(): + array = dtype.ArrayType('MSB_U16', 3) + bin123 = '\x00\x01\x00\x02\x00\x03' + bin456 = '\x00\x04\x00\x05\x00\x06' + + assert array.name == 'MSB_U16[3]' + assert array.nbits == 3 * 16 + assert array.nbytes == 3 * 2 + assert array.nelems == 3 + assert array.type == dtype.PrimitiveType('MSB_U16') + + assert array.encode(1, 2, 3) == bin123 + assert array.decode(bin456) == [4, 5, 6] + assert array.decode(bin456, 0) == 4 + assert array.decode(bin456, 1) == 5 + assert array.decode(bin456, 2) == 6 + assert array.decode(bin456, slice(1, 3)) == [5, 6] + + with nose.tools.assert_raises(ValueError): + array.encode(1, 2) + + with nose.tools.assert_raises(IndexError): + array.decode(bin456[1:5]) + + with nose.tools.assert_raises(IndexError): + array.decode(bin456, 3) + + with nose.tools.assert_raises(TypeError): + array.decode(bin456, 'foo') + + with nose.tools.assert_raises(TypeError): + dtype.ArrayType('U8', '4') + + +def testArrayTime8(): + array = dtype.ArrayType('TIME8', 3) + bytes = '\x40\x80\xC0' + + assert array.decode(bytes) == [0.25, 0.50, 0.75] + assert array.decode(bytes, raw=True) == [ 64, 128, 192] + + +def testCMD16(): + dt = dtype.CmdType() + code = 0x0001 + name = 'NO_OP' + + rawdata = bytearray(struct.pack('>H', code)) + + assert dt.decode(rawdata).name == name + assert dt.decode(rawdata, raw=True) == code + assert dt.encode(name) == rawdata + + +def testEVR16(): + dt = dtype.EVRType() + code = 0x0001 + name = 'NO_ERROR' + + rawdata = bytearray(struct.pack('>H', code)) + + assert dt.decode(rawdata).name == name + assert dt.decode(rawdata, raw=True) == code + assert dt.encode(name) == rawdata + + +def testTIME8(): + dt = dtype.Time8Type() + fine = 17 + rawdata = bytearray(struct.pack('B', fine)) + + expected = fine / 256.0 + + assert dt.decode(rawdata) == expected + assert dt.decode(rawdata, raw=True) == fine + assert dt.encode(expected) == rawdata + + +def testTIME32(): + dt = dtype.Time32Type() + sec = 1113733097 + + rawdata = bytearray(struct.pack('>I', sec)) + date = datetime.datetime(2015, 4, 22, 10, 18, 17) + + assert dt.decode(rawdata) == date + assert dt.decode(rawdata, raw=True) == sec + assert dt.encode(date) == rawdata + + +def testTIME40(): + dt = dtype.Time40Type() + sec = 1113733097 + fine = 8 + + # get raw data ready + rawdata = bytearray(struct.pack('>I', sec)) + rawdata.extend(struct.pack('B', fine)) + + # get the expected date + expected = datetime.datetime(2015, 4, 22, 10, 18, 17, 31250) + + assert dt.decode(rawdata) == expected + assert dt.decode(rawdata, raw=True) == sec + (fine / 256.0) + assert dt.encode(expected) == rawdata + + +def testTIME64(): + dt = dtype.Time64Type() + sec = 1113733097 + nsec = 31250000 + + rawdata = bytearray(struct.pack('>I', sec)) + rawdata.extend(struct.pack('>I', nsec)) + + date = datetime.datetime(2015, 4, 22, 10, 18, 17, 31250) + + assert dt.decode(rawdata) == date + assert dt.decode(rawdata, raw=True) == sec + (nsec / 1e9) + assert dt.encode(date) == rawdata + + +def testgetdtype(): + dt = dtype.get("TIME32") + assert isinstance(dt, dtype.Time32Type) + assert dt.name == "TIME32" + assert dt.pdt == "MSB_U32" + assert dt.max == 4294967295 + + +def testget(): + assert isinstance( dtype.get("U8") , dtype.PrimitiveType ) + assert isinstance( dtype.get("S40") , dtype.PrimitiveType ) + assert isinstance( dtype.get("TIME32"), dtype.Time32Type ) + + assert dtype.get('LSB_U32[10]') == dtype.ArrayType('LSB_U32', 10) + + with nose.tools.assert_raises(ValueError): + dtype.get('U8["foo"]') + + with nose.tools.assert_raises(ValueError): + dtype.get('U8[-42]') + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_evr.py b/bliss/core/test/test_evr.py new file mode 100644 index 00000000..2e768666 --- /dev/null +++ b/bliss/core/test/test_evr.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2015, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import nose +import nose.tools + +import bliss +from bliss.core import evr + + +def test_evr_load(): + evr_dicts = evr.getDefaultDict() + assert len(evr_dicts.keys()) == 4 + + assert evr_dicts.codes[1].name == "NO_ERROR" + +def test_evr_message_format_single_formatter(): + evr_dicts = evr.getDefaultDict() + example = evr_dicts.codes[1] + example.message = "Unexpected length for %c command." + exclamation = bytearray([0x21]) + + expected = "Unexpected length for ! command." + result = example.format_message(exclamation) + + assert result == expected + +def test_evr_message_format_multiple_formatters(): + evr_dicts = evr.getDefaultDict() + example = evr_dicts.codes[1] + example.message = "Unexpected length for %c command %s and %d." + input_data = bytearray([0x21, 0x46, 0x6f, 0x6f, 0x00, 0xff, 0x11, 0x33, 0x44]) + + expected = "Unexpected length for ! command Foo and 4279317316." + result = example.format_message(input_data) + + assert result == expected + +def test_evr_no_formatters_found(): + evr_dicts = evr.getDefaultDict() + example = evr_dicts.codes[1] + input_data = bytearray([0x21]) + example.message = "%M this formatter doesn't exist" + result = example.format_message(input_data) + + assert result == example.message + +def test_bad_formatter_parsing(): + evr_dicts = evr.getDefaultDict() + example = evr_dicts.codes[1] + example.message = "Unexpected length for %c command %s and %d." + input_data = bytearray([0x21]) + msg = "Unable to format EVR Message with data {}".format(input_data) + + try: + result = example.format_message(input_data) + assert False + except ValueError as e: + assert e.message == msg + assert True diff --git a/bliss/core/test/test_limits.py b/bliss/core/test/test_limits.py new file mode 100644 index 00000000..0c3436f8 --- /dev/null +++ b/bliss/core/test/test_limits.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import os +import csv +import struct + +import nose + +import bliss +from bliss.core import limits, tlm + +def test_limit_range(): + """ + # test_limit_range + + - !Limit + source: 1553_HS_Packet.Voltage_A + desc: tbd + lower: + error: 5.0 + warn: 10.0 + upper: + error: 45.0 + warn: 40.0 + """ + ldict = limits.LimitsDict(test_limit_range.__doc__) + assert ldict['1553_HS_Packet.Voltage_A'].upper.error == 45.0 + assert ldict['1553_HS_Packet.Voltage_A'].lower.warn == 10.0 + + +def test_limit_error_value(): + """ + # test_limit_error_value + + - !Limit + source: CCSDS_HEADER.secondary_header_flag + desc: tbd + lower: + error: Not Present + """ + ldict = limits.LimitsDict(test_limit_error_value.__doc__) + assert 'Not Present' in ldict['CCSDS_HEADER.secondary_header_flag'].lower.error + + +def test_check_upper_error(): + """ + # test_check_upper_error + + - !Limit + source: 1553_HS_Packet.Voltage_A + desc: tbd + lower: + error: 5.0 + warn: 10.0 + upper: + error: 45.0 + warn: 40.0 + """ + ldict = limits.LimitsDict(test_check_upper_error.__doc__) + assert ldict['1553_HS_Packet.Voltage_A'].error(46) + + +def test_check_lower_warn(): + """ + # test_check_lower_warn + - !Limit + source: 1553_HS_Packet.Voltage_A + desc: tbd + lower: + error: 5.0 + warn: 10.0 + upper: + error: 45.0 + warn: 40.0 + """ + ldict = limits.LimitsDict(test_check_lower_warn.__doc__) + assert ldict['1553_HS_Packet.Voltage_A'].warn(6) + +def test_check_value_error(): + """ + # test_check_value_error + + - !Limit + source: Ethernet_HS_Packet.product_type + desc: tbd + value: + error: TABLE_BAR + warn: TABLE_FOO + """ + ldict = limits.LimitsDict(test_check_value_error.__doc__) + assert ldict['Ethernet_HS_Packet.product_type'].error('TABLE_BAR') + assert ldict['Ethernet_HS_Packet.product_type'].warn('TABLE_FOO') + +def test_check_value_list_warn(): + """ + # test_check_value_error + + - !Limit + source: Ethernet_HS_Packet.product_type + desc: tbd + value: + error: FOOBAR + warn: [ FOO, BAR ] + """ + ldict = limits.LimitsDict(test_check_value_list_warn.__doc__) + assert ldict['Ethernet_HS_Packet.product_type'].error('FOOBAR') + assert ldict['Ethernet_HS_Packet.product_type'].warn('BAR') + +def test_check_value_list_warn2(): + """ + # test_check_value_error + + - !Limit + source: Ethernet_HS_Packet.product_type + desc: tbd + value: + error: FOOBAR + warn: + - FOO + - BAR + """ + ldict = limits.LimitsDict(test_check_value_list_warn2.__doc__) + assert ldict['Ethernet_HS_Packet.product_type'].warn('BAR') + diff --git a/bliss/core/test/test_log.py b/bliss/core/test/test_log.py new file mode 100644 index 00000000..6f05333d --- /dev/null +++ b/bliss/core/test/test_log.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2014, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import unittest + +import bliss +from bliss.core import log + + +class SysLogParserTest: + """Unit test bliss.core.log.SysLogParser""" + message = '' + expected = { } + + def run_test(self): + """Test parsing of syslog""" + parts = log.parseSyslog(self.message) + for key, expected in self.expected.items(): + actual = parts.get(key, '') + msg = 'Syslog Parsing failed for "%s" ' % key + msg += '(expected: "%s", actual: "%s")' % (expected, actual) + self.assertEquals(expected, actual, msg) + + +class SysLogParserTestSuccess(SysLogParserTest, unittest.TestCase): + """Unit test of the log.SysLogParser.parse method""" + message = ('<14>1 2015-03-06T21:29:43.756496Z LMC-037512 bliss 12074 ' + 'INFO - Waiting for BLISS telemetry on port 2514') + expected = { + 'pri' : '14', + 'version' : '1', + 'timestamp': '2015-03-06T21:29:43.756496Z', + 'hostname' : 'LMC-037512', + 'appname' : 'bliss', + 'procid' : '12074', + 'msgid' : 'INFO', + 'msg' : 'Waiting for BLISS telemetry on port 2514' + } + + +class SysLogParserTestMsgWithHyphen(SysLogParserTest, unittest.TestCase): + """Unit test of the log.SysLogParser.parse method""" + message = ('<14>1 2015-03-06T21:29:43.756496Z LMC-037512 bliss 12074 ' + 'INFO - Waiting for BLISS - GUI telemetry') + expected = { + 'pri' : '14', + 'version' : '1', + 'timestamp': '2015-03-06T21:29:43.756496Z', + 'hostname' : 'LMC-037512', + 'appname' : 'bliss', + 'procid' : '12074', + 'msgid' : 'INFO', + 'msg' : 'Waiting for BLISS - GUI telemetry' + } + + +if __name__ == '__main__': + log.begin() + unittest.main(verbosity=4) + log.end() diff --git a/bliss/core/test/test_pcap.py b/bliss/core/test/test_pcap.py new file mode 100644 index 00000000..30209c73 --- /dev/null +++ b/bliss/core/test/test_pcap.py @@ -0,0 +1,317 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2015, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import datetime +import os +import struct +import time +import warnings +import time + +import mock +import nose + +from bliss.core import dmc, pcap + + +TmpFilename = None + +with warnings.catch_warnings(): + warnings.simplefilter('ignore') + TmpFilename = os.tmpnam() + + +def testPCapGlobalHeader (): + header = pcap.PCapGlobalHeader() + assert header.magic_number == 0xA1B2C3D4 + assert header.version_major == 2 + assert header.version_minor == 4 + assert header.thiszone == 0 + assert header.sigfigs == 0 + assert header.snaplen == 65535 + assert header.network == 147 + assert str(header) == header._data + assert len(header) == 24 + assert header.incomplete() == False + + +def testPCapPacketHeader (): + header = pcap.PCapPacketHeader() + assert time.time() - header.ts <= 1 + assert header.incl_len == 0 + assert header.orig_len == 0 + assert str(header) == header._data + + +def testReadBigEndian (): + bytes = 'Hello World!' + ts = int( time.time() ) + + # Write pcap file + with open(TmpFilename, 'wb') as stream: + stream.write( struct.pack('>IHHiIII', 0xA1B2C3D4, 2, 4, 0, 0, 65535, 147) ) + stream.write( struct.pack('>IIII', ts, 0, len(bytes), len(bytes)) ) + stream.write(bytes) + + # Read pcap using API + with pcap.open(TmpFilename, 'r') as stream: + assert stream.header.magic_number == 0xA1B2C3D4 + assert stream.header.version_major == 2 + assert stream.header.version_minor == 4 + assert stream.header.thiszone == 0 + assert stream.header.sigfigs == 0 + assert stream.header.snaplen == 65535 + assert stream.header.network == 147 + + header, packet = stream.read() + assert header.ts == ts + assert header.incl_len == len(bytes) + assert header.orig_len == len(bytes) + assert packet == bytes + + header, packet = stream.read() + assert header.incomplete() + assert packet is None + + os.unlink(TmpFilename) + + +def testReadLittleEndian (): + bytes = 'Hello World!' + ts = int( time.time() ) + + # Write pcap file + with open(TmpFilename, 'wb') as stream: + stream.write( struct.pack('= prev_ts + assert header.incl_len == len( packets[index] ) + assert header.orig_len == len( packets[index] ) + assert packet == packets[index] + + index += 1 + prev_ts = header.ts + + assert index == len(packets) + + header, packet = stream.read() + assert header.incomplete() + assert packet is None + + os.unlink(TmpFilename) + + +def testPCapPacketHeaderInit (): + header = pcap.PCapPacketHeader() + assert header._format == 'IIII' + assert header._size == 16 + assert header.incl_len == 0 + assert header.orig_len == 0 + assert header._data == str(header) + assert header._swap == '@' + + ts, usec = dmc.getTimestampUTC() + header.ts_sec, header.ts_usec = ts, usec + + float_ts = float(ts) + (float(usec) / 1e6) + assert header.ts == float_ts + assert header.timestamp == datetime.datetime.utcfromtimestamp(float_ts) + + +@mock.patch('bliss.core.log.info') +def testSegmentBytes(log_info): + try: + with pcap.open(TmpFilename, 'w') as output: + for p in range(10): + output.write( str(p) ) + + pcap.segment(TmpFilename, 'foo.pcap', nbytes=41, dryrun=True) + expected = 'Would write 41 bytes, 1 packets, 1 seconds to foo.pcap.' + + assert len(log_info.call_args_list) == 10 + for call in log_info.call_args_list: + assert call[0][0] == expected + + finally: + os.unlink(TmpFilename) + + +@mock.patch('bliss.core.log.info') +def testSegmentPackets(log_info): + try: + with pcap.open(TmpFilename, 'w') as output: + for p in range(10): + output.write( str(p) ) + + pcap.segment(TmpFilename, 'foo.pcap', npackets=5, dryrun=True) + expected = 'Would write 109 bytes, 5 packets, 1 seconds to foo.pcap.' + + print log_info.call_args_list + assert len(log_info.call_args_list) == 2 + for call in log_info.call_args_list: + assert call[0][0] == expected + + finally: + os.unlink(TmpFilename) + + +@mock.patch('bliss.core.log.info') +def testSegmentSeconds(log_info): + try: + header = pcap.PCapPacketHeader(orig_len=1) + with pcap.open(TmpFilename, 'w') as output: + for p in range(10): + header.ts_sec = p + output.write( str(p), header ) + + pcap.segment(TmpFilename, 'foo.pcap', nseconds=2, dryrun=True) + expected = 'Would write 58 bytes, 2 packets, 2 seconds to foo.pcap.' + + assert len(log_info.call_args_list) == 5 + for call in log_info.call_args_list: + assert call[0][0] == expected + + finally: + os.unlink(TmpFilename) + + +def testTimes(): + packets = "This is a nice little sentence".split() + with pcap.open(TmpFilename, 'w') as stream: + for p in packets: + stream.write(p) + + with pcap.open(TmpFilename, 'r') as stream: + i = 0 + for header, packet in stream: + if i is 0: + exp_start = header.timestamp + if i is 5: + exp_end = header.timestamp + i += 1 + + times = pcap.times(TmpFilename) + + start = times[TmpFilename][0][0] + stop = times[TmpFilename][0][1] + + assert len(times[TmpFilename]) == 1 + assert start == exp_start + assert stop == exp_end + + # test when we have 2 separate time segments + with pcap.open(TmpFilename, 'w') as stream: + for p in packets: + stream.write(p) + + time.sleep(3) + + for p in packets: + stream.write(p) + + times = pcap.times(TmpFilename, 2) + assert len(times[TmpFilename]) == 2 + + os.remove(TmpFilename) + + +def testQuery(): + TmpRes = "test_pcap_res.pcap" + TmpFilename = "test_pcap_file.pcap" + packets = "This is a nice little sentence".split() + start = datetime.datetime.now() + + with pcap.open(TmpFilename, 'w') as stream: + for p in packets: + stream.write(p) + end = datetime.datetime.max + + pcap.query(start, end, TmpRes, (TmpFilename)) + + with pcap.open(TmpFilename, 'r') as stream1: + with pcap.open(TmpRes, 'r') as stream2: + header1, packet1 = stream1.read() + header2, packet2 = stream2.read() + assert str(header1) == str(header2) + assert packet1 == packet2 + + os.remove(TmpRes) + os.remove(TmpFilename) + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_table.py b/bliss/core/test/test_table.py new file mode 100644 index 00000000..ea139498 --- /dev/null +++ b/bliss/core/test/test_table.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import os +import struct +import warnings + +import nose + +from bliss.core import table + +TmpFilename = None + +with warnings.catch_warnings(): + warnings.simplefilter('ignore') + TmpFilename = 'tmpfile'# os.tmpnam(); + + +class Season: + SPRING=1 + SUMMER=2 + AUTUMN=3 + WINTER=4 + + +class Color: + RED=1 + GREEN=2 + BLUE=3 + YELLOW=4 + + +# Util to create and destroy a file for testing +class TestTableWriter(object): + + # Creates a table file to be used in the tests + def writeTempFile(self): + yaml_table_test = ( + "- !FSWTable" + " name: test" + " delimiter: '-'" + " uptype: 1" + " size: 5120" + " header:" + " - !FSWColumn:" + " name: HEADER_COLUMN_ONE" + " desc: First header column" + " format: '%u'" + " units: none" + " type: U8" + " bytes: 1" + " - !FSWColumn:" + " name: HEADER_COLUMN_TWO" + " desc: Second header column" + " format: '%u'" + " units: cm" + " type: U16" + " bytes: 4" + " columns:" + " - !FSWColumn" + " name: COL_ONE" + " desc: First table column" + " format: '%u'" + " units: none" + " type: U8" + " bytes: 1" + " - !FSWColumn" + " name: COL_TWO" + " desc: Second table column" + " format: '%u'" + " units: cm" + " type: U16" + " bytes: 4" + " enum:" + " 0: SPRING" + " 1: SUMMER" + " 2: AUTUMN" + " 3: WINTER" + ) + with open(TmpFilename, 'wb') as file: + file.write(yaml_table_test) + + def tearDown(self): + os.unlink(TmpFilename) + + +def testTestTest(): + '''Tests the test''' + assert 1 + + +def testColDefn(): + '''Test single column definition''' + column = table.FSWColDefn( + name='test_col', + type='U8', + bytes=4, + format='%u', + units='none', + items=4, + enum=Season, + ) + assert column.name is 'test_col' + assert column.type is 'U8' + column.type = 'U16' + assert column.type is 'U16' + assert column.bytes is 4 + column.bytes = 3 + assert column.bytes is 3 + assert column.format is '%u' + column.format = '%i' + assert column.format is '%i' + assert column.units is 'none' + column.units = 'cm' + assert column.units is 'cm' + assert column.items is 4 + column.items = 2 + assert column.items is 2 + assert column.enum is Season + column.enum = Color + assert column.enum is Color + + +def testTabDefnAndWrite(): + '''Test table definition''' + coldefn = table.FSWColDefn( + me='test_col', + type='U8', + bytes=4, + format='%u', + units='none', + items=4, + enum=Season, + ) + coldefn2 = table.FSWColDefn( + name='test_col2', + type='U8', + bytes=1, + format='%u', + units='none', + items=4, + enum=Color, + ) + tabledefn = table.FSWTabDefn( + name='test_table', + delimiter='-', + uptype=1, + size=8000, + rows=10, + fswheaderdefns=None, + coldefns=[coldefn, coldefn2], + ) + fileholder = TestTableWriter() + fileholder.writeTempFile() + + # Test that the table was created properly + assert tabledefn.name is 'test_table' + assert tabledefn.delimiter is '-' + assert tabledefn.uptype is 1 + assert tabledefn.size is 8000 + assert tabledefn.rows is 10 + assert tabledefn.coldefns[0] is coldefn + assert tabledefn.coldefns[1] is coldefn2 + + # Write table to text file + stream = open(TmpFilename, 'rw') + outstream = open('tempfile', 'wr') + + # Test that the text file was created and did not exit with error code + assert tabledefn.toText(stream, outstream, 1, 0.0) is None + + # Close the write to text + stream.close() + outstream.close() + + # Write table to binary file + stream = open(TmpFilename, 'rw') + outstream = open('tempfileb', 'wr') + + # Version in toBinary does not appear to be handled properly + #assert tabledefn.toBinary('tempfile', stream, outstream, 1, 0.0) is None + + # Test that getDefaultFSWTabDict exits without an error code + # and does not erroneously produce a dictionary when none exists + assert table.getDefaultFSWTabDict() is None + + # Create a new table dictionary + tabdict = table.FSWTabDict() + tabdict.add(tabledefn) + tabdict.create('test',{'colnames':tabledefn.coldefns}) + + # Load a table definition to the dictionary + tabdict.load('bliss/core/test/testdata/val/testCmdValidator6.yaml') + + # Assert writing to text does not exit with an error code + assert table.writeToText(tabdict,'test_table','tempfileb',0,0.0) is None + + #having trouble with getting version from TmpFilename + #assert table.writeToBinary(tabdict,'test_table',TmpFilename,0) is None + + stream.close() + outstream.close() + + os.unlink('tempfile') + os.unlink('tempfileb') + fileholder.tearDown() + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_tlm.py b/bliss/core/test/test_tlm.py new file mode 100644 index 00000000..5aefaefb --- /dev/null +++ b/bliss/core/test/test_tlm.py @@ -0,0 +1,446 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import os +import csv +import struct + +import nose + +from bliss.core import tlm + + +class TestTlmDictWriter(object): + test_yaml_file = '/tmp/test.yaml' + test_outpath = '/tmp' + + def test_writeToCSV(self): + yaml_doc = """ + - !Packet + name: Packet1 + fields: + - !Field + name: col1 + desc: test column 1 + bytes: 0 + type: MSB_U16 + mask: 0x10 + enum: + a: testa + - !Field + name: SampleTime + type: TIME64 + bytes: 1 + """ + + csv_row1 = ['col1', '0', '2', '0x10', 'MSB', 'MSB_U16', 'test column 1', 'a: testa'] + + with open(self.test_yaml_file, 'wb') as out: + out.write(yaml_doc) + + tlmdict = tlm.TlmDict(self.test_yaml_file) + + writer = tlm.TlmDictWriter(tlmdict=tlmdict) + writer.writeToCSV(self.test_outpath) + + expected_csv = os.path.join(self.test_outpath, 'Packet1.csv') + assert os.path.isfile(expected_csv) + + with open(expected_csv, 'rb') as csvfile: + reader = csv.reader(csvfile) + # skip header + reader.next() + actual_row = reader.next() + assert actual_row[0] == csv_row1[0] + assert actual_row[1] == csv_row1[1] + assert actual_row[4] == csv_row1[4] + + os.remove(self.test_yaml_file) + os.remove(expected_csv) + + +class TestFieldDefinition(object): + test_yaml_test1 = '/tmp/test_test1.yaml' + + yaml_docs_test1 = ( + '- !Packet\n' + ' name: OCO3_1553_EHS\n' + ' fields:\n' + ' - !Field\n' + ' name: field_1\n' + ' title: Field 1\n' + ' type: MSB_U16\n' + ) + + def setUp(self): + with open(self.test_yaml_test1, 'wb') as out: + out.write(self.yaml_docs_test1) + + def tearDown(self): + os.remove(self.test_yaml_test1) + + def test_field_definition(self): + tlmdict = tlm.TlmDict(self.test_yaml_test1) + assert tlmdict['OCO3_1553_EHS'].fields[0].name == 'field_1' + assert tlmdict['OCO3_1553_EHS'].fields[0].title == 'Field 1' + + def test_fld_defn_notitle(self): + test_yaml_test2 = '/tmp/test_test2.yaml' + yaml_docs_test2 = ( + '- !Packet\n' + ' name: OCO3_1553_EHS\n' + ' fields:\n' + ' - !Field\n' + ' name: field_1\n' + ' type: MSB_U16\n' + ) + + with open(test_yaml_test2, 'wb') as out: + out.write(yaml_docs_test2) + + tlmdict = tlm.TlmDict(test_yaml_test2) + + assert tlmdict['OCO3_1553_EHS'].fields[0].title == 'field_1' + + os.remove(test_yaml_test2) + + +class TestTlmConfig(object): + test_yaml_inc1 = '/tmp/test_inc1.yaml' + test_yaml_inc2 = '/tmp/test_inc2.yaml' + test_yaml_main = '/tmp/test_main.yaml' + test_pkl_main = '/tmp/test_main.pkl' + + yaml_docs_inc1 = ( + '- !Field\n' + ' name: field_A\n' + ' type: U8\n' + '- !Field\n' + ' name: field_B\n' + ' type: U8\n' + ) + yaml_docs_inc2 = ( + '- !Field\n' + ' name: field_Y\n' + ' type: U8\n' + '- !Field\n' + ' name: field_Z\n' + ' type: U8\n' + ) + + def setUp(self): + with open(self.test_yaml_inc1, 'wb') as out: + out.write(self.yaml_docs_inc1) + + with open(self.test_yaml_inc2, 'wb') as out: + out.write(self.yaml_docs_inc2) + + def tearDown(self): + os.remove(self.test_yaml_inc1) + os.remove(self.test_yaml_inc2) + + def test_yaml_fld_includes(self): + yaml_docs_main = ( + '- !Packet\n' + ' name: OCO3_1553_EHS\n' + ' fields:\n' + ' - !include /tmp/test_inc1.yaml\n' + ' - !Field\n' + ' name: field_1\n' + ' type: MSB_U16\n' + ) + + with open(self.test_yaml_main, 'wb') as out: + out.write(yaml_docs_main) + + tlmdict = tlm.TlmDict(self.test_yaml_main) + assert len(tlmdict['OCO3_1553_EHS'].fields) == 3 + assert tlmdict['OCO3_1553_EHS'].fields[1].name == 'field_B' + assert tlmdict['OCO3_1553_EHS'].fields[1].bytes == 1 + + try: + os.remove(self.test_yaml_main) + os.remove(self.test_pkl_main) + except OSError: + None + + def test_yaml_fld_includesx2(self): + yaml_docs_main = ( + '- !Packet\n' + ' name: OCO3_1553_EHS\n' + ' fields:\n' + ' - !include /tmp/test_inc1.yaml\n' + ' - !Field\n' + ' name: field_1\n' + ' type: MSB_U16\n' + ' - !include /tmp/test_inc2.yaml\n' + ) + + with open(self.test_yaml_main, 'wb') as out: + out.write(yaml_docs_main) + + tlmdict = tlm.TlmDict(self.test_yaml_main) + assert len(tlmdict['OCO3_1553_EHS'].fields) == 5 + assert tlmdict['OCO3_1553_EHS'].fields[4].name == 'field_Z' + assert tlmdict['OCO3_1553_EHS'].fields[4].bytes == 5 + + try: + os.remove(self.test_yaml_main) + os.remove(self.test_pkl_main) + except OSError: + None + + def test_yaml_fld_includes_nested(self): + test_yaml_inc3 = '/tmp/test_inc3.yaml' + yaml_docs_inc3 = ( + '- !include /tmp/test_inc1.yaml\n' + '- !include /tmp/test_inc2.yaml\n' + ) + + with open(test_yaml_inc3, 'wb') as out: + out.write(yaml_docs_inc3) + + yaml_docs_main = ( + '- !Packet\n' + ' name: OCO3_1553_EHS\n' + ' fields:\n' + ' - !Field\n' + ' name: field_1\n' + ' type: MSB_U16\n' + ' - !include /tmp/test_inc3.yaml\n' + ) + + with open(self.test_yaml_main, 'wb') as out: + out.write(yaml_docs_main) + + tlmdict = tlm.TlmDict(self.test_yaml_main) + assert len(tlmdict['OCO3_1553_EHS'].fields) == 5 + assert tlmdict['OCO3_1553_EHS'].fields[4].name == 'field_Z' + assert tlmdict['OCO3_1553_EHS'].fields[4].bytes == 5 + + try: + os.remove(test_yaml_inc3) + os.remove(self.test_yaml_main) + os.remove(self.test_pkl_main) + except OSError: + None + + def test_yaml_fld_includes_nestedx2(self): + test_yaml_inc3 = '/tmp/test_inc3.yaml' + yaml_docs_inc3 = ( + '- !include /tmp/test_inc1.yaml\n' + '- !include /tmp/test_inc2.yaml\n' + ) + + with open(test_yaml_inc3, 'wb') as out: + out.write(yaml_docs_inc3) + + test_yaml_inc4 = '/tmp/test_inc4.yaml' + yaml_docs_inc4 = ( + ' - !include /tmp/test_inc3.yaml\n' + ' - !Field\n' + ' name: field_FOO\n' + ' type: MSB_U16\n' + ) + + with open(test_yaml_inc4, 'wb') as out: + out.write(yaml_docs_inc4) + + yaml_docs_main = ( + '- !Packet\n' + ' name: OCO3_1553_EHS\n' + ' fields:\n' + ' - !Field\n' + ' name: field_1\n' + ' type: MSB_U16\n' + ' - !include /tmp/test_inc4.yaml\n' + ) + + with open(self.test_yaml_main, 'wb') as out: + out.write(yaml_docs_main) + + tlmdict = tlm.TlmDict(self.test_yaml_main) + assert len(tlmdict['OCO3_1553_EHS'].fields) == 6 + assert tlmdict['OCO3_1553_EHS'].fields[5].name == 'field_FOO' + assert tlmdict['OCO3_1553_EHS'].fields[5].bytes == [6, 7] + + try: + os.remove(test_yaml_inc3) + os.remove(test_yaml_inc4) + os.remove(self.test_yaml_main) + os.remove(self.test_pkl_main) + except OSError: + None + + def test_yaml_pkt_includes(self): + yaml_docs_inc3 = ( + '- !Packet\n' + ' name: OCO3_TEST1\n' + ' fields:\n' + ' - !include /tmp/test_inc1.yaml\n' + ) + + test_yaml_inc3 = '/tmp/test_inc3.yaml' + with open(test_yaml_inc3, 'wb') as out: + out.write(yaml_docs_inc3) + + yaml_docs_inc4 = ( + '- !Packet\n' + ' name: OCO3_TEST_2\n' + ' fields:\n' + ' - !include /tmp/test_inc2.yaml\n' + ) + + test_yaml_inc4 = '/tmp/test_inc4.yaml' + with open(test_yaml_inc4, 'wb') as out: + out.write(yaml_docs_inc4) + + yaml_docs_main = ( + '- !Packet\n' + ' name: OCO3_1553_EHS\n' + ' fields:\n' + ' - !include /tmp/test_inc1.yaml\n' + ' - !Field\n' + ' name: field_1\n' + ' type: MSB_U16\n' + ' - !include /tmp/test_inc2.yaml\n' + '- !include /tmp/test_inc3.yaml\n' + '- !include /tmp/test_inc4.yaml\n' + ) + + with open(self.test_yaml_main, 'wb') as out: + out.write(yaml_docs_main) + + tlmdict = tlm.TlmDict(self.test_yaml_main) + assert len(tlmdict['OCO3_1553_EHS'].fields) == 5 + assert tlmdict['OCO3_1553_EHS'].fields[4].name == 'field_Z' + assert tlmdict['OCO3_1553_EHS'].fields[4].bytes == 5 + + assert len(tlmdict['OCO3_TEST1'].fields) == 2 + assert tlmdict['OCO3_TEST1'].fields[1].name == 'field_B' + assert tlmdict['OCO3_TEST1'].fields[1].bytes == 1 + + try: + os.remove(test_yaml_inc3) + os.remove(test_yaml_inc4) + os.remove(self.test_yaml_main) + os.remove(self.test_pkl_main) + except OSError: + None + + + +def testArray(): + """ + # This test will use the following TLM dictionary definitions: + + - !Packet + name: P + fields: + - !Field + name: A + type: MSB_U16[3] + + """ + defn = tlm.TlmDict(testArray.__doc__)['P'] + packet = tlm.Packet(defn, struct.pack('>HHH', 1, 2, 3)) + + assert packet.A == [1, 2, 3] + + +def testAliases(): + """ + # This test will use the following TLM dictionary definitions: + + - !Packet + name: P + fields: + - !Field + name: A + aliases: + icd: ALIAS_A + subsys: ALIAS_B + type: MSB_U16[3] + + """ + defn = tlm.TlmDict(testAliases.__doc__)['P'] + assert defn.fieldmap['A'].aliases['icd'] == 'ALIAS_A' + assert defn.fieldmap['A'].aliases['subsys'] == 'ALIAS_B' + assert len(defn.fieldmap['A'].aliases) == 2 + + +def testMask(): + """ + # This test will use the following TLM dictionary definitions. + # The mask 0x0180 singles out the two bits on either MSB_U16 + # word: + # + # 0b00000001 0b10000000 + + - !Packet + name: P + fields: + - !Field + name: M + type: MSB_U16 + mask: 0x0180 + """ + defn = tlm.TlmDict(testMask.__doc__)['P'] + packet = tlm.Packet(defn) + + assert packet.M == 0 + assert packet._data == bytearray([0x00, 0x00]) + + packet.M = 1 + assert packet.M == 1 + assert packet._data == bytearray([0x00, 0x80]) + + packet.M = 2 + assert packet.M == 2 + assert packet._data == bytearray([0x01, 0x00]) + + packet.M = 3 + assert packet.M == 3 + assert packet._data == bytearray([0x01, 0x80]) + + +def testSingleItemList(): + """ + # this test will test 1-item lists + - !Packet + name: P + fields: + - !Field + name: foo + bytes: 0 + type: U8 + - !Field + name: bar + bytes: [1] + type: U8 + - !Field + name: baz + bytes: [9,10] + type: MSB_U16 + """ + defn = tlm.TlmDict(testSingleItemList.__doc__)['P'] + assert defn.fieldmap['foo'].nbytes == 1 + assert defn.fieldmap['bar'].bytes == 1 + assert defn.fieldmap['baz'].bytes == [9, 10] + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/test_util.py b/bliss/core/test/test_util.py new file mode 100644 index 00000000..64a4a1f4 --- /dev/null +++ b/bliss/core/test/test_util.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2014, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import os +import unittest +import mock +import shutil + +from bliss.core import util + + +"""Specify some test file info""" +TEST_FILE_PATH = os.path.dirname(__file__) + "/testdata/util/test_util.txt" +TEST_FILE_SIZE = 117 +TEST_FILE_CRC = 3099955026 +TEST_FILE_CRC_SKIP_BYTE = 651256842 + +class Crc32FileTest(unittest.TestCase): + """Unit test of the CRC-32 generator for files""" + + def testCrc32WithTestFile(self): + """Test the CRC for a basic test file""" + crc = util.crc32File(TEST_FILE_PATH) + self.assertEqual(crc, TEST_FILE_CRC) + + def testCrc32WithTestFileAndSkip(self): + """Test the CRC-32 with a skip specified""" + crc = util.crc32File(TEST_FILE_PATH, 1) + self.assertEqual(crc, TEST_FILE_CRC_SKIP_BYTE) + +class EndianSwapU16Test(unittest.TestCase): + """Unit test of the endianSwap method""" + + def testEndianSwap(self): + """Test endian swap""" + input_array = bytearray([0x13, 0x00, 0x01, 0x00, 0x08, 0x00]) + expected_output = bytearray([0x00, 0x13, 0x00, 0x01, 0x00, 0x08]) + output_array = util.endianSwapU16(input_array) + self.assertEqual(output_array, expected_output) + +class GetFileSizeTest(unittest.TestCase): + """Unit test for finding size of file""" + + def testGetFileSize(self): + """Test the method can properly calculate file size + for known test data file + """ + size = util.getFileSize(TEST_FILE_PATH) + self.assertEqual(size, TEST_FILE_SIZE) + +class ToBCDTest(unittest.TestCase): + """Unit test for converting from a number to a Binary Coded + Decimal + """ + + def testToBCDWithInt(self): + """Test toBCD with integer""" + bcd = util.toBCD(25) + self.assertEqual("{0:b}".format(bcd), '100101') + +class ToFloatTest(unittest.TestCase): + """Unit test for toFloat method""" + + def testToFloat(self): + """Test toFloat with float string""" + f = util.toFloat("4.2") + self.assertIsInstance(f, float) + self.assertEqual(f, 4.2) + + def testToFloatWithDefaultSpecified(self): + """Test toFloat with new default""" + f = util.toFloat("UNDEFINED", 999.9) + self.assertIsInstance(f, float) + self.assertEqual(f, 999.9) + + def testToFloatWithDefaultReturnNone(self): + """Test toFloat with return none""" + f = util.toFloat("Foo") + self.assertIsNone(f) + +class ToNumberTest(unittest.TestCase): + """Unit test for toNumber method""" + + def testToNumberWithHex(self): + """Test toNumber with Hex specified""" + n = util.toNumber("0x2A") + self.assertIsInstance(n, int) + self.assertEqual(n, 42) + + def testToNumberWithInt(self): + """Test toNumber with int specified""" + n = util.toNumber("42") + self.assertIsInstance(n, int) + self.assertEqual(n, 42) + + def testToNumberWithFloat(self): + """Test toNumber with float specified""" + n = util.toNumber("42.0") + self.assertIsInstance(n, float) + self.assertEqual(n, 42.0) + + def testToNumberWithStringAndDefaultSpecified(self): + """Test toNumber with String and new default specified""" + n = util.toNumber("Foo", 42) + self.assertIsInstance(n, int) + self.assertEqual(n, 42) + + def testToNumberWithDefaultReturnNone(self): + """Test toNumber with String and None return""" + n = util.toNumber("Foo") + self.assertIsNone(n); + +class ToReprTest(unittest.TestCase): + """Unit test for converting Python object to + string representation + """ + + def testToReprWithString(self): + """TODO""" + pass + + +def test_expandPath (): + pathname = os.path.join('~', 'bin', 'bliss-orbits') + assert util.expandPath(pathname) == os.path.expanduser(pathname) + + pathname = os.path.join('/', 'bin', 'bliss-orbits') + assert util.expandPath(pathname) == pathname + + pathname = os.path.join('' , 'bin', 'bliss-orbits') + assert util.expandPath(pathname) == os.path.abspath(pathname) + + pathname = os.path.join('' , 'bin', 'bliss-orbits') + prefix = os.path.join('/', 'bliss') + expected = os.path.join(prefix, pathname) + assert util.expandPath(pathname, prefix) == expected + + +def test_listAllFiles(): + pathname = os.path.join('~','foo','bar') + directory = os.path.expanduser(pathname) + try: + os.makedirs(os.path.expanduser(directory)) + files = [ os.path.join(directory, 'test_1.txt'), os.path.join(directory, 'test_2.txt') ] + for fname in files: + with open(fname, 'wb') as file: + os.utime(fname, None) + + # test relative path + filelist = util.listAllFiles(pathname, ".txt") + assert os.path.relpath(files[0], start=directory) in filelist + + # test absolute path + filelist = util.listAllFiles(pathname, ".txt", True) + assert files[0] in filelist + finally: + shutil.rmtree(os.path.expanduser(os.path.join('~','foo'))) + + +@mock.patch('bliss.core.log.error') +def test_YAMLValidationError_exception(log_mock): + message = 'foo' + e = util.YAMLValidationError(message) + assert message == e.message + log_mock.assert_called_with(message) + +@mock.patch('bliss.core.log.error') +def test_YAMLError_exception(log_mock): + message = 'foo' + e = util.YAMLError(message) + assert message == e.message + log_mock.assert_called_with(message) + +if __name__ == '__main__': + unittest.main(verbosity=2) + nose.main() diff --git a/bliss/core/test/test_val.py b/bliss/core/test/test_val.py new file mode 100644 index 00000000..6175b783 --- /dev/null +++ b/bliss/core/test/test_val.py @@ -0,0 +1,450 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2016, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +import os +import pkg_resources +import jsonschema +import logging + +import mock +import nose + +import bliss +from bliss.core import cmd, log, tlm, val, util, evr + + +DATA_PATH = os.path.join(os.path.dirname(__file__), 'testdata', 'val') + + +class TestYAMLProcessor(object): + test_yaml_file = '/tmp/test.yaml' + + def test_yamlprocess_init(self): + yp = val.YAMLProcessor() + assert yp.loaded == False + assert yp.data == [] + assert yp.doclines == [] + assert yp._clean + assert yp.ymlfile is None + + @mock.patch('bliss.core.val.YAMLProcessor.load') + def test_ymlfile_setter(self, yaml_load_mock): + yp = val.YAMLProcessor() + assert yaml_load_mock.call_count == 0 + ymlfile = 'something that is not None' + yp.load(ymlfile) + assert yaml_load_mock.call_count == 1 + + @mock.patch('bliss.core.val.YAMLProcessor.process') + def test_yaml_load_with_clean(self, process_mock): + yp = val.YAMLProcessor() + yp.load() + assert process_mock.called + assert yp.loaded + + def test_yaml_load_without_clean(self): + yaml_docs = ( + '---\n' + 'a: hello\n' + '---\n' + 'b: goodbye\n' + ) + + with open(self.test_yaml_file, 'wb') as out: + out.write(yaml_docs) + + yp = val.YAMLProcessor(clean=False) + yp.load(self.test_yaml_file) + assert len(yp.data) == 2 + assert yp.data[0]['a'] == 'hello' + assert yp.data[1]['b'] == 'goodbye' + + os.remove(self.test_yaml_file) + + def test_invalid_yaml_load(self): + yaml_docs = """ + --- + a: these newlines and white space break stuff + --- + b: processing wont even get here + """ + with open(self.test_yaml_file, 'wb') as out: + out.write(yaml_docs) + + yp = val.YAMLProcessor(clean=False) + nose.tools.assert_raises( + util.YAMLError, + yp.load, self.test_yaml_file + ) + + os.remove(self.test_yaml_file) + + def test_basic_process_doc_object_name_strip(self): + yaml_docs = ( + 'a: hello\n' + '--- !foo\n' + 'b: goodbye\n' + ) + + with open(self.test_yaml_file, 'w') as out: + out.write(yaml_docs) + + yp = val.YAMLProcessor(clean=False) + out = yp.process(self.test_yaml_file) + + assert len(yp.doclines) == 2 + assert yp.doclines == [1, 3] + assert '!foo' not in out + os.remove(self.test_yaml_file) + + def test_basic_process_seq_name_strip(self): + yaml_docs = ( + ' - !bar\n' + ' - blah\n' + ) + + with open(self.test_yaml_file, 'w') as out: + out.write(yaml_docs) + + yp = val.YAMLProcessor(clean=False) + out = yp.process(self.test_yaml_file) + + assert "bar:" in out + os.remove(self.test_yaml_file) + + def test_empty_file_process(self): + open(self.test_yaml_file, 'w').close() + + yp = val.YAMLProcessor(clean=False) + nose.tools.assert_raises( + util.YAMLError, + yp.process, self.test_yaml_file + ) + + os.remove(self.test_yaml_file) + + def test_invalid_yaml_process(self): + yaml_docs = """ + --- + a: these newlines and white space break stuff + --- + b: processing wont even get here + """ + open(self.test_yaml_file, 'w').close() + + yp = val.YAMLProcessor(clean=False) + nose.tools.assert_raises( + IOError, + yp.process, '/tmp/thisFileDoesntExistAndWillCauseAnError' + ) + + os.remove(self.test_yaml_file) + + +class TestSchemaProcessor(object): + def test_schema_load(self): + """ Test variable settings from proper schema loading. """ + schemaproc = val.SchemaProcessor() + + # Test success + schema = os.path.join(DATA_PATH, "testSchemaLoad1.json") + schemaproc.load(schema) + assert schemaproc.data is not None + assert isinstance(schemaproc.data, dict) + assert schemaproc.loaded + assert schemaproc.schemafile == schemaproc._schemafile + + def test_schema_load_failure_bad_file(self): + """ Test Exception raise on not existent file load. """ + schemaproc = val.SchemaProcessor() + + schema = os.path.join('not', 'a', 'valid', 'path.json') + nose.tools.assert_raises( + jsonschema.SchemaError, + schemaproc.load, schema + ) + + def test_schema_load_failure_no_json_object(self): + test_file_path = '/tmp/test.json' + open(test_file_path, 'w').close() + + schemaproc = val.SchemaProcessor() + + nose.tools.assert_raises( + jsonschema.SchemaError, + schemaproc.load, test_file_path + ) + + os.remove(test_file_path) + + +class TestErrorHandler(object): + def test_error_handler_init(self): + eh = val.ErrorHandler('error', 'ymlfile', 'schemafile') + assert eh.error == 'error' + assert eh.ymlfile == 'ymlfile' + assert eh.schemafile == 'schemafile' + + def test_process_bad_root_object(self): + eh = val.ErrorHandler('error', 'ymlfile', 'schemafile') + messages = [] + error = mock.MagicMock() + error.message = "this is not of type u'object'" + eh.process(1, [1, 2], error, messages) + assert len(messages) == 1 + assert messages[0] == "Invalid root object in YAML. Check format." + + @mock.patch('bliss.core.val.ErrorHandler.pretty') + def test_process_docline_docnum_mismatch(self, pretty_mock): + eh = val.ErrorHandler('error', 'ymlfile', 'schemafile') + messages = [] + error = mock.MagicMock() + error.message = "Some error message" + eh.process(1, [1, 2, 3, 4], error, messages) + assert pretty_mock.called + pretty_mock.assert_called_with(3, 4, error, messages) + + @mock.patch('bliss.core.val.ErrorHandler.pretty') + def test_procces_with_single_doc(self, pretty_mock): + eh = val.ErrorHandler('error', 'ymlfile', 'schemafile') + messages = [] + error = mock.MagicMock() + error.message = "Some error message" + eh.process(1, [1, 2], error, messages) + assert pretty_mock.called + pretty_mock.assert_called_with(1, 3, error, messages) + + +def validate(args): + msgs = [] + + validator = val.Validator(*args) + v = validator.validate(messages=msgs) + return msgs, v + + +def dispmsgs(msgs): + for msg in msgs: + print "Validation Test Error: %s \n" % msg + + +def cmdval(args): + msgs = [] + + validator = val.CmdValidator(*args) + v = validator.validate(messages=msgs) + + return msgs, v + + +def tlmval(args): + msgs = [] + + validator = val.TlmValidator(*args) + v = validator.validate(messages=msgs) + + return msgs, v + +def testYAMLProcesserLoad(): + ymlproc = val.YAMLProcessor() + + # Test bad path + try: + ymlfile = os.path.join('invalid', 'file', 'path.yaml') + ymlproc.load(ymlfile) + assert False + except IOError: + assert True + assert not ymlproc.loaded + + # Test valid yaml + ymlproc.load(os.path.join(DATA_PATH, "testValidCmd1.yaml")) + + assert ymlproc.data is not None + assert ymlproc.loaded + + +def testYAMLProcessorProcess(): + ymlproc = val.YAMLProcessor() + ymlproc.process(os.path.join(DATA_PATH, "testValidCmd1.yaml")) + + # check the document lines are correct + doclines = [85] + assert doclines == ymlproc.doclines + +def testValidatorCmd(): + msgs = [] + + # test successful validation + msgs, v = validate([os.path.join(DATA_PATH, "testValidCmd1.yaml"), cmd.getDefaultSchema()]) + assert v + assert len(msgs) == 0 + + # test failed validation + msgs, v = validate([os.path.join(DATA_PATH, "testInvalidCmd1.yaml"), cmd.getDefaultSchema()]) + + assert not v + assert len(msgs) == 1 + assert "Value 'BAD_OPCODE' should be of type 'integer'" in msgs[0] + +def testCmdValidator(): + # test successful cmd validation + msgs, v = cmdval([os.path.join(DATA_PATH, "testValidCmd1.yaml"), cmd.getDefaultSchema()]) + dispmsgs(msgs) + assert v + assert len(msgs) == 0 + + # test failed cmd validation - duplicate name + msgs, v = cmdval([os.path.join(DATA_PATH, "testCmdValidator1.yaml"), cmd.getDefaultSchema()]) + assert not v + assert len(msgs) == 1 + assert "Duplicate command name" in msgs[0] + + # test failed cmd validation - duplicate opcode + msgs, v = cmdval([os.path.join(DATA_PATH, "testCmdValidator2.yaml"), cmd.getDefaultSchema()]) + assert not v + assert len(msgs) == 2 + assert "Duplicate opcode" in msgs[1] + + # test failed cmd validation - bad argtype + msgs, v = cmdval([os.path.join(DATA_PATH, "testCmdValidator3.yaml"), cmd.getDefaultSchema()]) + assert not v + assert len(msgs) == 1 + assert "Invalid argument type" in msgs[0] + + # test failed cmd validation - bad nbytes + msgs, v = cmdval([os.path.join(DATA_PATH, "testCmdValidator4.yaml"), cmd.getDefaultSchema()]) + assert not v + assert len(msgs) == 2 + assert "Invalid argument size" in msgs[0] + + # test failed cmd validation - bad byte order + msgs, v = cmdval([os.path.join(DATA_PATH, "testCmdValidator5.yaml"), cmd.getDefaultSchema()]) + assert not v + assert len(msgs) == 1 + assert "Invalid byte order" in msgs[0] + + # test failed cmd validation - bad start byte + msgs, v = cmdval([os.path.join(DATA_PATH, "testCmdValidator6.yaml"), cmd.getDefaultSchema()]) + assert not v + assert len(msgs) == 1 + assert "Invalid byte order" in msgs[0] + + # test success cmd validation - ensure quoted YAML booleans in enums + msgs, v = cmdval([os.path.join(DATA_PATH, "testCmdValidator7.yaml"), cmd.getDefaultSchema()]) + dispmsgs(msgs) + assert v + assert len(msgs) == 0 + + # test failed cmd validation - YAML booleans not quoted + msgs, v = cmdval([os.path.join(DATA_PATH, "testCmdValidator8.yaml"), cmd.getDefaultSchema()]) + assert not v + assert len(msgs) == 2 + assert "Invalid enum value" in msgs[0] + +def testTlmValidator(): + # test successful tlm validation + msgs, v = tlmval([os.path.join(DATA_PATH, "testValidTlm1.yaml"), tlm.getDefaultSchema()]) + dispmsgs(msgs) + assert v + assert len(msgs) == 0 + + # test failed tlm validation - duplicate packet name + try: + msgs, v = tlmval([os.path.join(DATA_PATH, "testTlmValidator1.yaml"), tlm.getDefaultSchema()]) + assert False + except util.YAMLError, e: + assert "Duplicate packet name" in e.message + + # test failed tlm validation - duplicate field name + msgs, v = tlmval([os.path.join(DATA_PATH, "testTlmValidator2.yaml"), tlm.getDefaultSchema()]) + assert not v + assert len(msgs) == 1 + assert "Duplicate field name" in msgs[0] + + # test failed tlm validation - invalid field type + msgs, v = tlmval([os.path.join(DATA_PATH, "testTlmValidator3.yaml"), tlm.getDefaultSchema()]) + assert not v + assert len(msgs) == 1 + assert "Invalid field type" in msgs[0] + + # test failed tlm validation - invalid field size for field type specified + msgs, v = tlmval([os.path.join(DATA_PATH, "testTlmValidator4.yaml"), tlm.getDefaultSchema()]) + assert not v + assert len(msgs) == 2 + assert "Invalid field size" in msgs[0] + + # test failed tlm validation - un-quoted YAML special variables in enumerations + msgs, v = tlmval([os.path.join(DATA_PATH, "testTlmValidator5.yaml"), tlm.getDefaultSchema()]) + assert not v + assert len(msgs) == 2 + assert "Invalid enum value" in msgs[0] + + # test failed tlm validation - empty string for a YAML field + msgs, v = tlmval([os.path.join(DATA_PATH, "testTlmValidator6.yaml"), tlm.getDefaultSchema()]) + + assert not v + assert len(msgs) == 1 + assert "Missing value for desc." in msgs[0] + + +def testCmdDictValidation(): + # Validation test of current command dictionary + msgs, v = cmdval([bliss.config.cmddict.filename, cmd.getDefaultSchema()]) + dispmsgs(msgs) + assert v + assert len(msgs) == 0 + + +def testTlmDictValidation(): + # Validation test of current telemetry dictionary + msgs, v = tlmval([bliss.config.tlmdict.filename, tlm.getDefaultSchema()]) + dispmsgs(msgs) + assert v + assert len(msgs) == 0 + + +def testEvrValidation(): + # Validation test of current telemetry dictionary + yml = bliss.config.evrdict.filename + schema = os.path.join(os.path.dirname(yml), evr.getDefaultSchema()) + msgs, v = validate([yml, schema]) + dispmsgs(msgs) + assert v + assert len(msgs) == 0 + + +def testTableValidation(): + # Validation test of current table configuration + yml = bliss.config.table.filename + schema = pkg_resources.resource_filename('bliss.core', 'data/table_schema.json') + msgs, v = validate([yml, schema]) + dispmsgs(msgs) + assert v + assert len(msgs) == 0 + +def testLimitsValidation(): + # Validation test of current table configuration + yml = bliss.config.limits.filename + schema = pkg_resources.resource_filename('bliss.core', 'data/limits_schema.json') + msgs, v = validate([yml, schema]) + dispmsgs(msgs) + assert v + assert len(msgs) == 0 + + +if __name__ == '__main__': + nose.main() diff --git a/bliss/core/test/testdata/testValidTable1.yaml b/bliss/core/test/testdata/testValidTable1.yaml new file mode 100644 index 00000000..af4b3885 --- /dev/null +++ b/bliss/core/test/testdata/testValidTable1.yaml @@ -0,0 +1,45 @@ +# Sample table for use in tests for bliss/core/table. + +- !FSWTable + name: test + delimiter: '-' + uptype: 1 + size: 5120 + header: + - !FSWColumn: + name: HEADER_COLUMN_ONE + desc: First header column + format: "%u" + units: none + type: U8 + bytes: 1 + + - !FSWColumn: + name: HEADER_COLUMN_TWO + desc: Second header column + format: "%u" + units: cm + type: U16 + bytes: 4 + + columns: + - !FSWColumn + name: COL_ONE + desc: First table column + format: "%u" + units: none + type: U8 + bytes: 1 + + - !FSWColumn + name: COL_TWO + desc: Second table column + format: "%u" + units: cm + type: U16 + bytes: 4 + enum: + 0: SPRING + 1: SUMMER + 2: AUTUMN + 3: WINTER diff --git a/bliss/core/test/testdata/util/test_util.txt b/bliss/core/test/testdata/util/test_util.txt new file mode 100644 index 00000000..21a740f0 --- /dev/null +++ b/bliss/core/test/testdata/util/test_util.txt @@ -0,0 +1,2 @@ +“I choose a lazy person to do a hard job. Because a lazy person will find an easy way to do it.” + - Bill Gates diff --git a/bliss/core/test/testdata/val/testCmdValidator1.yaml b/bliss/core/test/testdata/val/testCmdValidator1.yaml new file mode 100644 index 00000000..e2af800e --- /dev/null +++ b/bliss/core/test/testdata/val/testCmdValidator1.yaml @@ -0,0 +1,31 @@ +- !Command + name: AIT_DUPLICATE_COMMAND + opcode: 0x1001 + subsystem: DCC + desc: | + This is a generic command + + arguments: + - !Argument + name: genericargument + desc: genericargument + units: none + type: MSB_U16 + bytes: [0,1] + + +# Following command has same name as the first +- !Command + name: AIT_DUPLICATE_COMMAND + opcode: 0x2001 + subsystem: CMD + desc: | + This command has a duplicate name as the first command + + arguments: + - !Argument + name: genericargument + desc: genericargument + units: none + type: MSB_U16 + bytes: [0,1] diff --git a/bliss/core/test/testdata/val/testCmdValidator2.yaml b/bliss/core/test/testdata/val/testCmdValidator2.yaml new file mode 100644 index 00000000..54b612d6 --- /dev/null +++ b/bliss/core/test/testdata/val/testCmdValidator2.yaml @@ -0,0 +1,31 @@ +- !Command + name: AIT_DUPLICATE_COMMAND + opcode: 0x1001 + subsystem: DCC + desc: | + This is a generic command + + arguments: + - !Argument + name: genericargument + desc: genericargument + units: none + type: MSB_U16 + bytes: [0,1] + + +# Following command has same name as the first +- !Command + name: AIT_DUPLICATE_COMMAND + opcode: 0x1001 + subsystem: CMD + desc: | + This command has the same opcode as the first command + + arguments: + - !Argument + name: genericargument + desc: genericargument + units: none + type: MSB_U16 + bytes: [0,1] diff --git a/bliss/core/test/testdata/val/testCmdValidator3.yaml b/bliss/core/test/testdata/val/testCmdValidator3.yaml new file mode 100644 index 00000000..4a90adc1 --- /dev/null +++ b/bliss/core/test/testdata/val/testCmdValidator3.yaml @@ -0,0 +1,14 @@ +- !Command + name: AIT_COMMAND + opcode: 0x1001 + subsystem: DCC + desc: | + This command has a bad type identifier + + arguments: + - !Argument + name: genericargument + desc: genericargument + units: none + type: BADTYPE + bytes: [0,1] diff --git a/bliss/core/test/testdata/val/testCmdValidator4.yaml b/bliss/core/test/testdata/val/testCmdValidator4.yaml new file mode 100644 index 00000000..dd5f66a2 --- /dev/null +++ b/bliss/core/test/testdata/val/testCmdValidator4.yaml @@ -0,0 +1,24 @@ +- !Command + name: AIT_COMMAND + opcode: 0x1000 + subsystem: CMD + desc: | + This is a generic command + + arguments: + - !Argument + name: genericargument1 + desc: genericargument1 + units: none + type: MSB_U16 + bytes: [0,1] + + - !Argument + name: genericargument2 + desc: genericargument2 + units: none + type: U8 + bytes: [2,3] # invalid nbytes, type says 1 byte but 2 specified + enum: + 0: DISABLED + 1: ENABLED diff --git a/bliss/core/test/testdata/val/testCmdValidator5.yaml b/bliss/core/test/testdata/val/testCmdValidator5.yaml new file mode 100644 index 00000000..e4818aa2 --- /dev/null +++ b/bliss/core/test/testdata/val/testCmdValidator5.yaml @@ -0,0 +1,24 @@ +- !Command + name: AIT_COMMAND + opcode: 0x1000 + subsystem: CMD + desc: | + This is a generic command + + arguments: + - !Argument + name: genericargument1 + desc: genericargument1 + units: none + type: MSB_U16 + bytes: [0,1] + + - !Argument + name: genericargument2 + desc: genericargument2 + units: none + type: U8 + bytes: 3 # invalid byte order, should be 2 + enum: + 0: DISABLED + 1: ENABLED diff --git a/bliss/core/test/testdata/val/testCmdValidator6.yaml b/bliss/core/test/testdata/val/testCmdValidator6.yaml new file mode 100644 index 00000000..be1a39e3 --- /dev/null +++ b/bliss/core/test/testdata/val/testCmdValidator6.yaml @@ -0,0 +1,24 @@ +- !Command + name: AIT_COMMAND + opcode: 0x1000 + subsystem: CMD + desc: | + This is a generic command + + arguments: + - !Argument + name: genericargument1 + desc: genericargument1 + units: none + type: MSB_U16 + bytes: [1,2] + + - !Argument + name: genericargument2 + desc: genericargument2 + units: none + type: U8 + bytes: 3 + enum: + 0: DISABLED + 1: ENABLED diff --git a/bliss/core/test/testdata/val/testCmdValidator7.yaml b/bliss/core/test/testdata/val/testCmdValidator7.yaml new file mode 100644 index 00000000..7692b391 --- /dev/null +++ b/bliss/core/test/testdata/val/testCmdValidator7.yaml @@ -0,0 +1,24 @@ +- !Command + name: AIT_COMMAND + opcode: 0x1000 + subsystem: CMD + desc: | + This is a generic command + + arguments: + - !Argument + name: genericargument1 + desc: genericargument1 + units: none + type: MSB_U16 + bytes: [0,1] + + - !Argument + name: genericargument2 + desc: genericargument2 + units: none + type: U8 + bytes: 2 + enum: + 0: DISABLED + 1: ENABLED diff --git a/bliss/core/test/testdata/val/testCmdValidator8.yaml b/bliss/core/test/testdata/val/testCmdValidator8.yaml new file mode 100644 index 00000000..9a44b526 --- /dev/null +++ b/bliss/core/test/testdata/val/testCmdValidator8.yaml @@ -0,0 +1,24 @@ +- !Command + name: AIT_COMMAND + opcode: 0x1000 + subsystem: CMD + desc: | + This is a generic command + + arguments: + - !Argument + name: genericargument1 + desc: genericargument1 + units: none + type: MSB_U16 + bytes: [0,1] + + - !Argument + name: genericargument2 + desc: genericargument2 + units: none + type: U8 + bytes: 2 + enum: + 0: True + 1: 'False' diff --git a/bliss/core/test/testdata/val/testInvalidCmd1.yaml b/bliss/core/test/testdata/val/testInvalidCmd1.yaml new file mode 100644 index 00000000..5f3631a0 --- /dev/null +++ b/bliss/core/test/testdata/val/testInvalidCmd1.yaml @@ -0,0 +1,14 @@ +- !Command + name: AIT_DUPLICATE_COMMAND + opcode: BAD_OPCODE + subsystem: DCC + desc: | + This is a generic command + + arguments: + - !Argument + name: genericargument + desc: genericargument + units: none + type: MSB_U16 + bytes: [0,1] diff --git a/bliss/core/test/testdata/val/testInvalidTlm1.yaml b/bliss/core/test/testdata/val/testInvalidTlm1.yaml new file mode 100644 index 00000000..3f1d27e4 --- /dev/null +++ b/bliss/core/test/testdata/val/testInvalidTlm1.yaml @@ -0,0 +1,15 @@ +- !Packet + name: CCSDS_Packet + type: ethernet + desc: TBD + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: test + type: U8 + mask: 0xE0 + - !Field + name: SampleTime + type: TIME64 + bytes: test diff --git a/bliss/core/test/testdata/val/testSchemaLoad1.json b/bliss/core/test/testdata/val/testSchemaLoad1.json new file mode 100644 index 00000000..80fad8ee --- /dev/null +++ b/bliss/core/test/testdata/val/testSchemaLoad1.json @@ -0,0 +1,20 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Command Dictionary Schema", + "description": "Command Dictionary Schema", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "opcode": { + "type": "integer" + }, + "subsystem": { + "type": "string" + }, + "desc": { + "type": "string" + } + } +} \ No newline at end of file diff --git a/bliss/core/test/testdata/val/testTlmValidator1.yaml b/bliss/core/test/testdata/val/testTlmValidator1.yaml new file mode 100644 index 00000000..e678341a --- /dev/null +++ b/bliss/core/test/testdata/val/testTlmValidator1.yaml @@ -0,0 +1,34 @@ +# Test YAML for TLM Dict Validation +# +# Expected Result: Validation Failure +# Reason: Duplicate packet name +# +- !Packet + name: CCSDS_Packet + desc: TBD + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: SampleTime + type: TIME64 + bytes: 1 + +- !Packet + name: CCSDS_Packet + desc: TBD + fields: + - !Field + name: version2 + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: SampleTime2 + type: TIME64 + bytes: 1 diff --git a/bliss/core/test/testdata/val/testTlmValidator2.yaml b/bliss/core/test/testdata/val/testTlmValidator2.yaml new file mode 100644 index 00000000..20631d90 --- /dev/null +++ b/bliss/core/test/testdata/val/testTlmValidator2.yaml @@ -0,0 +1,44 @@ +# Test YAML for TLM Dict Validation +# +# Expected Result: Validation Failure +# Reason: Duplicate field name +# +- !Packet + name: CCSDS_Packet + desc: TBD + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: version + desc: | + Distinguishes between core and payload packet types to extend the + APID space to 4032 + bytes: 0 + type: U8 + mask: 0x10 + enum: + 0: 'Core' + 1: 'Payload' + - !Field + name: secondary_header_flag + desc: | + Indicates whether, or not, a Secondary Header follows the primary + header (always set to 1) + bytes: 0 + type: U8 + mask: 0x08 + enum: + 0: 'Not Present' + 1: 'Present' + - !Field + name: apid + desc: | + Used in conjunction with Type to define the Logical Data Path + bytes: [0, 1] + type: MSB_U16 + mask: 0x07FF diff --git a/bliss/core/test/testdata/val/testTlmValidator3.yaml b/bliss/core/test/testdata/val/testTlmValidator3.yaml new file mode 100644 index 00000000..f29257f8 --- /dev/null +++ b/bliss/core/test/testdata/val/testTlmValidator3.yaml @@ -0,0 +1,44 @@ +# Test YAML for TLM Dict Validation +# +# Expected Result: Validation Failure +# Reason: Invalid field type for field version +# +- !Packet + name: CCSDS_Packet + desc: TBD + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: BAD_TYPE + mask: 0xE0 + - !Field + name: type + desc: | + Distinguishes between core and payload packet types to extend the + APID space to 4032 + bytes: 0 + type: U8 + mask: 0x10 + enum: + 0: 'Core' + 1: 'Payload' + - !Field + name: secondary_header_flag + desc: | + Indicates whether, or not, a Secondary Header follows the primary + header (always set to 1) + bytes: 0 + type: U8 + mask: 0x08 + enum: + 0: 'Not Present' + 1: 'Present' + - !Field + name: apid + desc: | + Used in conjunction with Type to define the Logical Data Path + bytes: [0, 1] + type: MSB_U16 + mask: 0x07FF diff --git a/bliss/core/test/testdata/val/testTlmValidator4.yaml b/bliss/core/test/testdata/val/testTlmValidator4.yaml new file mode 100644 index 00000000..4c0808c6 --- /dev/null +++ b/bliss/core/test/testdata/val/testTlmValidator4.yaml @@ -0,0 +1,44 @@ +# Test YAML for TLM Dict Validation +# +# Expected Result: Validation Failure +# Reason: Field size for apid is invalid +# +- !Packet + name: CCSDS_Packet + desc: TBD + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: type + desc: | + Distinguishes between core and payload packet types to extend the + APID space to 4032 + bytes: 0 + type: U8 + mask: 0x10 + enum: + 0: 'Core' + 1: 'Payload' + - !Field + name: secondary_header_flag + desc: | + Indicates whether, or not, a Secondary Header follows the primary + header (always set to 1) + bytes: 0 + type: U8 + mask: 0x08 + enum: + 0: 'Not Present' + 1: 'Present' + - !Field + name: apid + desc: | + Used in conjunction with Type to define the Logical Data Path + bytes: [0, 3] + type: MSB_U16 + mask: 0x07FF diff --git a/bliss/core/test/testdata/val/testTlmValidator5.yaml b/bliss/core/test/testdata/val/testTlmValidator5.yaml new file mode 100644 index 00000000..ff0af733 --- /dev/null +++ b/bliss/core/test/testdata/val/testTlmValidator5.yaml @@ -0,0 +1,44 @@ +# Test YAML for TLM Dict Validation +# +# Expected Result: Validation Failure +# Reason: TRUE not enclosed in quotes for secondary_header_flag enums +# +- !Packet + name: CCSDS_Packet + desc: TBD + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: type + desc: | + Distinguishes between core and payload packet types to extend the + APID space to 4032 + bytes: 0 + type: U8 + mask: 0x10 + enum: + 0: 'Core' + 1: 'Payload' + - !Field + name: secondary_header_flag + desc: | + Indicates whether, or not, a Secondary Header follows the primary + header (always set to 1) + bytes: 0 + type: U8 + mask: 0x08 + enum: + 0: TRUE + 1: 'Present' + - !Field + name: apid + desc: | + Used in conjunction with Type to define the Logical Data Path + bytes: [0, 1] + type: MSB_U16 + mask: 0x07FF diff --git a/bliss/core/test/testdata/val/testTlmValidator6.yaml b/bliss/core/test/testdata/val/testTlmValidator6.yaml new file mode 100644 index 00000000..33e2a295 --- /dev/null +++ b/bliss/core/test/testdata/val/testTlmValidator6.yaml @@ -0,0 +1,44 @@ +# Test YAML for TLM Dict Validation +# +# Expected Result: Validation Failure +# Reason: version.desc is empty string +# +- !Packet + name: CCSDS_Packet + desc: TBD + fields: + - !Field + name: version + desc: + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: type + desc: | + Distinguishes between core and payload packet types to extend the + APID space to 4032 + bytes: 0 + type: U8 + mask: 0x10 + enum: + 0: 'Core' + 1: 'Payload' + - !Field + name: secondary_header_flag + desc: | + Indicates whether, or not, a Secondary Header follows the primary + header (always set to 1) + bytes: 0 + type: U8 + mask: 0x08 + enum: + 0: TRUE + 1: 'Present' + - !Field + name: apid + desc: | + Used in conjunction with Type to define the Logical Data Path + bytes: [0, 1] + type: MSB_U16 + mask: 0x07FF diff --git a/bliss/core/test/testdata/val/testTlmValidator7.yaml b/bliss/core/test/testdata/val/testTlmValidator7.yaml new file mode 100644 index 00000000..b02e85ef --- /dev/null +++ b/bliss/core/test/testdata/val/testTlmValidator7.yaml @@ -0,0 +1,15 @@ +- !Packet + name: Packet + fields: + - !Field + name: foo + bytes: [0] + type: U8 + - !Field + name: bar + bytes: 1 + type: U8 + - !Field + name: baz + bytes: [9,10] + type: MSB_U16 \ No newline at end of file diff --git a/bliss/core/test/testdata/val/testValidCmd1.yaml b/bliss/core/test/testdata/val/testValidCmd1.yaml new file mode 100644 index 00000000..d9d44607 --- /dev/null +++ b/bliss/core/test/testdata/val/testValidCmd1.yaml @@ -0,0 +1,85 @@ +- !Command + name: NO_OP + opcode: 0x0001 + subsystem: CORE + title: NO_OP + desc: | + Standard NO_OP command. + +- !Command + name: SEQ_START + opcode: 0x002 + subsystem: CMD + title: Start Sequence + desc: | + This command starts a specified command sequence. + + arguments: + - !Argument + name: sequence_id + desc: Sequence ID + units: none + type: MSB_U16 + bytes: [0,1] + +- !Command + name: SEQ_ENABLE_DISABLE + opcode: 0x0003 + subsystem: CMD + title: Enable/Disable Sequence + desc: | + This command enables or disabled the specified sequence. If a + sequence to be disabled is currently executing, it will be + interrupted. + + arguments: + - !Argument + name: sequence_id + desc: Sequence ID + units: none + type: MSB_U16 + bytes: [0,1] + + - !Argument + name: enable + desc: Enable + units: none + type: U8 + bytes: 2 + enum: + 0: DISABLED + 1: ENABLED + +- !Command + name: SEND_FIXED_ARG + opcode: 0x0004 + subsystem: CMD + title: Send Fixed Argument + desc: | + This command tests sending a fixed argument. + + arguments: + - !Fixed + name: fixed_arg + desc: Fixed Argument + units: none + type: MSB_U16 + bytes: [0,1] + value: 5 + +- !Command + name: SEND_STR_ARG + opcode: 0x0005 + subsystem: CMD + title: Send string argument value + desc: | + This command tests sending a single string argument. + + arguments: + - !Argument + name: str_arg + desc: String Argument + units: none + type: S16 + bytes: [0,15] + diff --git a/bliss/core/test/testdata/val/testValidTlm1.yaml b/bliss/core/test/testdata/val/testValidTlm1.yaml new file mode 100644 index 00000000..d8faab98 --- /dev/null +++ b/bliss/core/test/testdata/val/testValidTlm1.yaml @@ -0,0 +1,14 @@ +- !Packet + name: CCSDS_Packet + desc: TBD + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: SampleTime + type: TIME64 + bytes: 1 diff --git a/bliss/core/tlm.py b/bliss/core/tlm.py new file mode 100644 index 00000000..282e2ad8 --- /dev/null +++ b/bliss/core/tlm.py @@ -0,0 +1,974 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2015, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Telemetry + +The bliss.core.tlm module provides telemetry fields and telemetry +dictionaries. Dictionaries contain packet, header, data, and field +definitions. +""" + +import collections +import os +import pkg_resources +import struct +import yaml +import csv + +import bliss +from bliss.core import dtype, json, log, util + + +class wordarray(object): + """Wordarrays are somewhat analogous to Python bytearrays, but + currently much more limited in functionality. They provide a + readonly view of a bytearray addressable and iterable as a sequence + of 16-bit words. This is convenient for telemetry processing as + packets are often more naturally addressable on word, as opposed to + byte, boundaries. + """ + + __slots__ = [ '_bytes' ] + + + def __init__(self, bytes): + """Creates a new wordarray from the given bytearray. + + The given bytearray should contain an even number of bytes. If + odd, the last byte is ignored. + """ + self._bytes = bytes + + def __getitem__(self, key): + """Returns the words in this wordarray at the given Python slice + or word at the given integer index.""" + length = len(self) + + if isinstance(key, slice): + return [self[n] for n in xrange(*key.indices(length))] + + elif isinstance(key, int): + if key < 0: + key += length + if key >= length: + msg = "wordarray index (%d) is out of range [0 %d]." + raise IndexError(msg % (key, length - 1)) + index = 2 * key + return (self._bytes[index] << 8) | self._bytes[index + 1] + + else: + raise TypeError("wordarray indices must be integers.") + + def __len__(self): + """Returns the number of words in this wordarray.""" + return len(self._bytes) / 2 + + + +class DNToEUConversion(json.SlotSerializer, object): + """DNToEUConversion + """ + + __slots__ = [ '_equation', 'units', '_when' ] + + def __init__(self, equation, units=None, when=None, terms=None): + if when: + when = PacketExpression(when) + + self._equation = PacketExpression(equation) + self.units = units + self._when = when + + + def eval(self, packet): + """Returns the result of evaluating this DNToEUConversion in the + context of the given Packet. + """ + result = None + terms = None + + if self._when is None or self._when.eval(packet): + result = self._equation.eval(packet) + + return result + + + +class FieldList(collections.Sequence): + """FieldList + + FieldLists encapsulate a packet field array so that it behaves + like a Python list (or more generally a sequence) when accessed. + + A FieldList should not be created directly. It's created internally + by the private Packet field accessor _getattr(). + """ + + __slots__ = [ '_defn', '_packet', '_raw' ] + + def __init__(self, packet, defn, raw): + self._packet = packet + self._defn = defn + self._raw = raw + + def __eq__ (self, other): + return ( + isinstance(other, collections.Sequence) and + len(self) == len(other) and + all(self[n] == other[n] for n in xrange(len(self))) + ) + + def __getitem__(self, key): + return self._packet._getattr(self._defn.name, self._raw, key) + + def __len__(self): + return self._defn.type.nelems + + + +class FieldDefinition(json.SlotSerializer, object): + """FieldDefinition + + FieldDefinitions encapsulate all information required to define a + single packet field. This includes the field name, byte offset, + its format, and an optional bitmask. + + Use the get() and set() methods to extract and set a field's value + in the underlying raw packet data. + + """ + + __slots__ = [ + "_bytes", "desc", "dntoeu", "enum", "expr", "mask", "name", "shift", + "_type", "units", "when", "_title", "aliases" + ] + + def __init__(self, *args, **kwargs): + """Creates a new FieldDefinition.""" + for slot in FieldDefinition.__slots__: + name = slot[1:] if slot.startswith("_") else slot + setattr(self, name, kwargs.get(name, None)) + + self.shift = 0 + + # Set the shift based on the bitmask + mask = self.mask + if mask is not None: + while mask != 0 and mask & 1 == 0: + self.shift += 1 + mask >>= 1 + + if self.dntoeu: + self.dntoeu = createDNToEUConversion(**self.dntoeu) + + if self.expr: + self.expr = createPacketExpression(self.expr) + + if self.when: + self.when = createPacketExpression(self.when) + + + def __jsonOmit__(self, key, val): + return val is None or val is '' or (key == 'shift' and val == 0) + + def __repr__(self): + return util.toRepr(self) + + + @property + def nbytes(self): + """The number of bytes required to represent this packet field.""" + if type(self.type) is not str: + return self.type.nbytes + else: + return 0 + + @property + def title(self): + """The argument title.""" + if not self._title: + return self.name + else: + return self._title + + @title.setter + def title(self, value): + self._title = value + + @property + def type(self): + """The argument type.""" + return self._type + + @type.setter + def type(self, value): + if type(value) is str and dtype.get(value) is not None: + self._type = dtype.get(value) + else: + self._type = value + log.error("Invalid field type '%s' " % value) + + @property + def bytes(self): + """The argument bytes.""" + return self._bytes + + @bytes.setter + def bytes(self, value): + if type(value) is list and len(value) == 1: + self._bytes = value[0] + else: + self._bytes = value + + + def decode(self, bytes, raw=False, index=None): + """Decodes the given bytes according to this Field Definition. + + If raw is True, no enumeration substitutions will be applied + to the data returned. + + If index is an integer or slice (and the type of this + FieldDefinition is an ArrayType), then only the element(s) at + the specified position(s) will be decoded. + """ + if index is not None and isinstance(self.type, dtype.ArrayType): + value = self.type.decode( bytes[self.slice()], index, raw ) + else: + value = self.type.decode( bytes[self.slice()], raw ) + + + # Apply bit mask if needed + if self.mask is not None: + value &= self.mask + + if self.shift > 0: + value >>= self.shift + + if not raw and self.enum is not None: + value = self.enum.get(value, value) + + return value + + + def encode(self, value): + """Encodes the given value according to this FieldDefinition.""" + if type(value) == str and self.enum and value in self.enum: + value = self.enum[value] + + if type(value) == int: + if self.shift > 0: + value <<= self.shift + if self.mask is not None: + value &= self.mask + + return self.type.encode(value) if self.type else bytearray() + + + def slice(self, offset=0): + """Returns a Python slice object (e.g. for array indexing) indicating + the start and stop byte position of this Telemetry field. The + start and stop positions may be translated by the optional + byte offset. + """ + if self.bytes is None: + start = 0 + stop = start + self.nbytes + elif type(self.bytes) is int: + start = self.bytes + stop = start + self.nbytes + else: + start = self.bytes[0] + stop = self.bytes[1] + 1 + + return slice(start + offset, stop + offset) + + + def validate(self, value, messages=None): + """Returns True if the given field value is valid, False otherwise. + Validation error messages are appended to an optional messages + array. + """ + valid = True + primitive = value + + def log(msg): + if messages is not None: + messages.append(msg) + + if self.enum: + if value not in self.enum.values(): + valid = False + flds = (self.name, str(value)) + log("%s value '%s' not in allowed enumerated values." % flds) + else: + primitive = int(self.enum.keys()[self.enum.values().index(value)]) + + if self.type: + if self.type.validate(primitive, messages, self.name) is False: + valid = False + + return valid + + + +class Packet(object): + """Packet + """ + def __init__(self, defn, data=None): + """Creates a new Packet based on the given Packet Definition and + binary (raw) packet data. + """ + object.__setattr__(self, '_defn', defn) + + if data is None: + data = bytearray(self.nbytes) + elif not isinstance(data, bytearray): + data = bytearray(data) + + object.__setattr__(self, '_data', data) + + if defn.history: + defn.history.add(self) + + def __repr__(self): + return self._defn.__repr__() + + + def __getattr__(self, fieldname): + """Returns the value of the given packet field name.""" + return self._getattr(fieldname) + + + def __setattr__(self, fieldname, value): + """Sets the given packet field name to value.""" + self._assertField(fieldname) + + defn = self._defn.fieldmap[fieldname] + bytes = defn.encode(value) + indices = defn.slice() + + if defn.mask is not None: + # If a mask is defined on the FieldDefinition (defn), + # defn.encode() will return the encoded value + # appropriately bit-shifted and masked. This value, which + # could span several bytes must now be integrated + # byte-by-byte into the already existing data bytes of the + # packet (self._data), taking care not to clobber any bits + # outside the mask. To accomplish this, for each byte at + # byte position b: + # + # 1. Bitwise-AND the existing value (data[b]) with the + # bitwise-COMPLEMENT of mask[b] to zero-out (clear) + # only the masked bits of the existing value, then + # + # 2. Bitwise-OR with the the new byte value (bytes[b]) + # to set the appropriate bits. + + data = self._data[indices] + mask = bytearray(struct.pack(defn.type.format, defn.mask)) + + for b in range( len(data) ): + bytes[b] |= (data[b] & ~mask[b]) + + self._data[indices] = bytes + + + def _assertField(self, fieldname): + """Raise AttributeError when Packet has no field with the given + name.""" + if not self._hasattr(fieldname): + values = self._defn.name, fieldname + raise AttributeError("Packet '%s' has no field '%s'" % values) + + + def _getattr (self, fieldname, raw=False, index=None): + """Returns the value of the given packet field name. + + If raw is True, the field value is only decoded. That is no + enumeration substituions or DN to EU conversions are applied. + """ + self._assertField(fieldname) + value = None + + if fieldname == 'raw': + value = createRawPacket(self) + elif fieldname == 'history': + value = self._defn.history + else: + defn = self._defn.fieldmap[fieldname] + + if isinstance(defn.type, dtype.ArrayType) and index is None: + return createFieldList(self, defn, raw) + + if defn.when is None or defn.when.eval(self): + if raw or (defn.dntoeu is None and defn.expr is None): + value = defn.decode(self._data, raw, index) + elif defn.dntoeu is not None: + value = defn.dntoeu.eval(self) + elif defn.expr is not None: + value = defn.expr.eval(self) + + return value + + + def _hasattr(self, fieldname): + """Returns True if this packet contains fieldname, False otherwise.""" + special = 'history', 'raw' + return fieldname in special or fieldname in self._defn.fieldmap + + + @property + def nbytes(self): + """The size of this packet in bytes.""" + return self._defn.nbytes + + + @property + def words(self): + """Packet data as a wordarray.""" + return wordarray(self._data) + + + def toJSON(self): + return { name: getattr(self, name) for name in self._defn.fieldmap } + + + def validate(self, messages=None): + """Returns True if the given Packet is valid, False otherwise. + Validation error messages are appended to an optional messages + array. + """ + return self._defn.validate(self, messages) + + + +class PacketContext(object): + """PacketContext + + A PacketContext provides a simple wrapper around a Packet so that + field accesses of the form: + + packet.fieldname + + may also be specified as: + + packet[fieldname] + + This latter syntax allows a PacketContext to be used as a symbol + table when evaluating PacketExpressions. + """ + + __slots__ = [ '_packet' ] + + + def __init__(self, packet): + """Creates a new PacketContext for the given Packet.""" + self._packet = packet + + + def __getitem__(self, name): + """Returns packet[name]""" + result = None + packet = self._packet + + if self._packet._hasattr(name): + result = self._packet._getattr(name) + else: + msg = "Packet '%s' has no field '%s'" + values = self._packet._defn.name, name + raise KeyError(msg % values) + + return result + + +class PacketDefinition(json.SlotSerializer, object): + """PacketDefinition + """ + NextUID = 1 + __slots__ = [ 'ccsds', 'constants', 'desc', 'fields', 'fieldmap', + 'functions', 'globals', 'history', 'name', 'uid' ] + + def __init__(self, *args, **kwargs): + """Creates a new PacketDefinition.""" + for slot in PacketDefinition.__slots__: + name = slot[1:] if slot.startswith("_") else slot + setattr(self, slot, kwargs.get(name, None)) + + if self.ccsds: + import ccsds + self.ccsds = ccsds.CcsdsDefinition(**self.ccsds) + + if self.fields is None: + self.fields = [ ] + self.fieldmap = { } + else: + self.fields = handle_includes(self.fields) + self.fieldmap = { defn.name: defn for defn in self.fields } + + if self.history: + self.history = PacketHistory(self, names=self.history) + + if self.ccsds: + self.uid = self.ccsds.apid + else: + self.uid = PacketDefinition.NextUID + PacketDefinition.NextUID += 1 + + self._update_globals() + self._update_bytes(self.fields) + + + def __repr__(self): + return util.toRepr(self) + + def __getstate__(self): + return { + name: getattr(self, name) + for name in PacketDefinition.__slots__ if name != 'globals' + } + + def __setstate__(self, state): + for s in PacketDefinition.__slots__: + setattr(self, s, state.get(s, None)) + self._update_globals() + + + def _update_bytes(self, defns, start=0): + """Updates the 'bytes' field in all FieldDefinition. + + Any FieldDefinition.bytes which is undefined (None) or '@prev' + will have its bytes field computed based on its data type size + and where the previous FieldDefinition ended (or the start + parameter in the case of very first FieldDefinition). If + bytes is set to '@prev', this has the effect of *starting* the + FieldDefinition at the same place as the *previous* + FieldDefinition. This reads well in YAML, e.g.: + + bytes: '@prev' + + Returns the end of the very last FieldDefinition in Python + slice notation, i.e. [start, stop). This would correspond to + the *start* of the next FieldDefinition, if it existed. + """ + + pos = slice(start, start) + for fd in defns: + if fd.bytes == '@prev' or fd.bytes is None: + if fd.bytes == '@prev': + fd.bytes = None + pos = fd.slice(pos.start) + elif fd.bytes is None: + pos = fd.slice(pos.stop) + if pos.start == pos.stop - 1: + fd.bytes = pos.start + else: + fd.bytes = [ pos.start, pos.stop - 1 ] + pos = fd.slice() + return pos.stop + + + def _update_globals(self): + if self.globals is None: + self.globals = { } + + if self.constants: + self.globals.update(self.constants) + + if self.functions: + for signature, body in self.functions.items(): + defn = 'def %s: return %s\n' % (signature, body) + exec(defn, self.globals) + + if self.history: + self.globals['history'] = self.history + + + @property + def nbytes(self): + """The number of bytes for this telemetry packet""" + max_byte = -1 + + for defn in self.fields: + byte = defn.bytes if type(defn.bytes) is int else max(defn.bytes) + max_byte = max(max_byte, byte) + + return max_byte + 1 + + + def validate(self, pkt, messages=None): + """Returns True if the given Packet is valid, False otherwise. + Validation error messages are appended to an optional messages + array. + """ + valid = True + + for f in self.fields: + try: + value = getattr(pkt, f.name) + except AttributeError: + valid = False + if messages is not None: + msg = "Telemetry field mismatch for packet '%s'. " + msg += "Unable to retrieve value for %s in Packet." + values = self.name, f.name + messages.append(msg % values) + break + + if f.validate(value, messages) is False: + valid = False + + return valid + + + def toJSON(self): + slots = ['name', 'desc', 'constants', 'functions', 'history', 'uid'] + + if self.ccsds is not None: + slots += 'ccsds' + + obj = json.slotsToJSON(self, slots) + obj['fields'] = { defn.name: defn.toJSON() for defn in self.fields } + return obj + + + +class PacketExpression(object): + """PacketExpression + + A Packet Expression is a simple mathematical expression that can + be evaluted in the context of a Packet. Names in the formula + refer to fields in the packet. + + Packet Expressions provide a convenient mechanism to express and + perform Digital Number (DN) to Engineering Unit (EU) conversions. + They can also be used to specify packet field guard conditions. + For example, a packet field may only be interpreted as a + particular housekeeping value when a corresponding mux field in + the same packet is equal to some contsant value. + + """ + + __slots__ = [ '_code', '_expr' ] + + + def __init__(self, expr): + """Creates a new PacketExpression from the given string expression.""" + self._code = compile(expr, '', mode='eval') + self._expr = expr + + + def __reduce__(self): + """Pickles and Unpickles PacketExpressions. + + Since Python code object cannot be Pickled, this method tells + Python picklers to pickle this class as a string expression + and unpickle by passing that string to the PacketExpression + constructor. + """ + return (PacketExpression, (self._expr, )) + + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, self._expr) + + + def __str__(self): + return self._expr + + + def eval(self, packet): + """Returns the result of evaluating this PacketExpression in the + context of the given Packet. + """ + try: + context = createPacketContext(packet) + result = eval(self._code, packet._defn.globals, context) + except ZeroDivisionError: + result = None + + return result + + + def toJSON(self): + return self._expr + + + +class PacketFunction(object): + """PacketFunction + """ + + __slots__ = [ '_args', '_code', '_name' ] + + def __init__(self, signature, body): + lparen = signature.find('(') + rparen = signature.find(')') + + if lparen == -1 and rparen == -1: + raise SyntaxError('Function signature "%s" has no parentheses') + + defn = 'def %s:\n return %s' % (signature, body) + self._args = signature[lparen:rparen].split(',') + self._name = signature[:lparen] + self._code = compile(defn, '', mode='eval') + + +class PacketHistory(object): + """PacketHistory + """ + + __slots__ = [ '_defn', '_dict', '_names' ] + + def __init__(self, defn, names=None): + if names is None and defn.history is not None: + names = defn.history + + if names is None: + names = [ ] + + self._defn = defn + self._names = names + self._dict = { name: 0 for name in names } + + + def __contains__(self, fieldname): + """Returns True if fieldname is in this PacketHistory.""" + return fieldname in self._names + + + def __getattr__(self, fieldname): + """Returns the value of the given packet field name.""" + self._assertField(fieldname) + return self._dict.get(fieldname, None) + + + def __getitem__(self, fieldname): + """Returns packet.fieldname""" + return self._dict.get(fieldname, None) + + + def __getstate__(self): + """Serialize state, avoiding __getattr__().""" + return { s: getattr(self, s) for s in PacketHistory.__slots__ } + + + def __setstate__(self, state): + """Deserialize state, avoiding __getattr__().""" + for s in PacketHistory.__slots__: + setattr(self, s, state.get(s, None)) + + + def _assertField(self, name): + """Raise AttributeError when PacketHistory has no field with the given + name. + """ + if name not in self._names: + msg = 'PacketHistory "%s" has no field "%s"' + values = self._defn.name, name + raise AttributeError(msg % values) + + def add(self, packet): + """Add the given Packet to this PacketHistory.""" + for name in self._names: + value = getattr(packet, name) + if value is not None: + self._dict[name] = value + + def toJSON(self): + return self._names + + + +class RawPacket(object): + """RawPacket + + Wraps a packet such that: + + packet.raw.fieldname + + returns the value of fieldname as a raw value with no enumeration + substitutions or DN to EU conversions applied. + """ + + __slots__ = [ '_packet' ] + + + def __init__(self, packet): + """Creates a new RawPacket based on the given Packet.""" + self._packet = packet + + + def __getattr__(self, fieldname): + """Returns the value of the given packet fieldname as a raw + value with no DN to EU conversion applied. + """ + return self._packet._getattr(fieldname, raw=True) + + + +class TlmDict(dict): + """TlmDict + + Tlm Dictionaries provide a Python dictionary (i.e. hashtable) + interface mapping Packet names to Packet Definitions. + """ + def __init__(self, *args, **kwargs): + """Creates a new Telemetry Dictionary from the given telemetry + dictionary filename or YAML string. + """ + self.filename = None + + if len(args) == 1 and len(kwargs) == 0 and type(args[0]) == str: + dict.__init__(self) + self.load(args[0]) + else: + dict.__init__(self, *args, **kwargs) + + def add(self, defn): + """Adds the given Packet Definition to this Telemetry Dictionary.""" + if defn.name not in self: + self[defn.name] = defn + else: + msg = "Duplicate packet name '%s'" % defn.name + log.error(msg) + raise util.YAMLError(msg) + + def create(self, name, data=None): + """Creates a new packet with the given definition and raw data. + """ + return createPacket(self[name], data) if name in self else None + + def load(self, content): + """Loads Packet Definitions from the given YAML content into this + Telemetry Dictionary. Content may be either a filename + containing YAML content or a YAML string. + + Load has no effect if this Command Dictionary was already + instantiated with a filename or YAML content. + """ + if self.filename is None: + if os.path.isfile(content): + self.filename = content + stream = open(self.filename, 'rb') + else: + stream = content + + pkts = yaml.load(stream) + pkts = handle_includes(pkts) + for pkt in pkts: + self.add(pkt) + + if type(stream) is file: + stream.close() + + def toJSON(self): + return { name: defn.toJSON() for name, defn in self.items() } + + + +class TlmDictWriter(object): + """TlmDictWriter + + Writes telemetry dictionary to a file in selected formats + """ + def __init__(self, tlmdict=None): + self.tlmdict = tlmdict or getDefaultDict() + + def writeToCSV(self, output_path=None): + '''writeToCSV - write the telemetry dictionary to csv + ''' + header = ['Name', 'First Byte', 'Last Byte', 'Bit Mask', 'Endian', + 'Type', 'Description', 'Values'] + + if output_path is None: + output_path = bliss.config._directory + + for pkt_name in self.tlmdict: + filename = os.path.join(output_path, pkt_name + '.csv') + + with open(filename, 'wb') as output: + csvwriter = csv.writer(output, quoting=csv.QUOTE_ALL) + csvwriter.writerow(header) + + for fld in self.tlmdict[pkt_name].fields: + # Pre-process some fields + + # Description + desc = fld.desc.replace('\n', ' ') if fld.desc is not None else "" + + # Mask + mask = hex(fld.mask) if fld.mask is not None else "" + + # Enumerations + enums = '\n'.join("%s: %s" % (k, fld.enum[k]) + for k in fld.enum) if fld.enum is not None else "" + + # Set row + row = [fld.name, fld.slice().start, fld.slice().stop, + mask, fld.type.endian, fld.type.name, desc, enums] + + csvwriter.writerow(row) + + +def getDefaultDict(reload=False): + return util.getDefaultDict(__name__, 'tlmdict', TlmDict, reload) + + +def getDefaultSchema(): + return pkg_resources.resource_filename('bliss.core', 'data/tlm_schema.json') + + +def getDefaultDictFilename(): + return bliss.config.tlmdict.filename + + +def handle_includes(defns): + '''Recursive handling of includes for any input list of defns. + The assumption here is that when an include is handled by the + pyyaml reader, it adds them as a list, which is stands apart from the rest + of the expected YAML definitions. + ''' + newdefns = [] + for d in defns: + if isinstance(d,list): + newdefns.extend(handle_includes(d)) + else: + newdefns.append(d) + + return newdefns + +def YAMLCtor_PacketDefinition(loader, node): + fields = loader.construct_mapping(node, deep=True) + return createPacketDefinition(**fields) + + +def YAMLCtor_FieldDefinition(loader, node): + fields = loader.construct_mapping(node, deep=True) + return createFieldDefinition(**fields) + + +def YAMLCtor_include(loader, node): + # Get the path out of the yaml file + name = os.path.join(os.path.dirname(loader.name), node.value) + data = None + with open(name,'r') as f: + data = yaml.load(f) + return data + +yaml.add_constructor('!include', YAMLCtor_include) +yaml.add_constructor('!Packet' , YAMLCtor_PacketDefinition) +yaml.add_constructor('!Field' , YAMLCtor_FieldDefinition) + +util.__init_extensions__(__name__, globals()) diff --git a/bliss/core/util.py b/bliss/core/util.py new file mode 100755 index 00000000..86655fd6 --- /dev/null +++ b/bliss/core/util.py @@ -0,0 +1,452 @@ +#!/usr/bin/env python2.7 + +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2013, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS Utilities + +The bliss.core.util module provides general utility functions. +""" + +import os +import pydoc +import stat +import sys +import time +import zlib +import types + +import cPickle + +import bliss +from bliss.core import log + + +class ObjectCache (object): + def __init__(self, filename, loader): + """Creates a new ObjectCache + + Caches the Python object returned by loader(filename), using + Python's pickle object serialization mechanism. An ObjectCache + is useful when loader(filename) is slow. + + The result of loader(filename) is cached to cachename, the + basename of filename with a '.pkl' extension. + + Use the load() method to load, either via loader(filename) or + the pickled cache file, whichever was modified most recently. + """ + self._loader = loader + self._dict = None + self._filename = filename + self._cachename = os.path.splitext(filename)[0] + '.pkl' + + + @property + def cachename(self): + """The pickled cache filename""" + return self._cachename + + + @property + def dirty(self): + """True if the cache needs to be updated, False otherwise""" + return not os.path.exists(self.cachename) or \ + (os.path.getmtime(self.filename) > + os.path.getmtime(self.cachename)) + + + @property + def filename(self): + """The filename to cache via loader(filename)""" + return self._filename + + + def cache(self): + """Caches the result of loader(filename) to cachename.""" + msg = 'Saving updates from more recent "%s" to "%s"' + log.info(msg, self.filename, self.cachename) + with open(self.cachename, 'wb') as output: + cPickle.dump(self._dict, output, -1) + + + def load(self): + """Loads the Python object + + Loads the Python object, either via loader(filename) or the + pickled cache file, whichever was modified most recently. + """ + if self._dict is None: + if self.dirty: + self._dict = self._loader(self.filename) + self.cache() + else: + with open(self.cachename, 'rb') as stream: + self._dict = cPickle.load(stream) + + return self._dict + + +if sys.platform == 'win32': + # On Windows, the best timer is time.clock + timer = time.clock +else: + # On most other platforms the best timer is time.time + timer = time.time + + + +def __init_extensions__(modname, modsyms): + """Initializes a module (given its name and :func:`globals()` symbol + table) for BLISS extensions. + + For every Python class defined in the given module, a + `createXXX()`` function is added to the module (where XXX is the + classname). By default, the function calls the ``XXX()`` + constructor and returns a new instance of that class. However, if + BLISS extensions are defined in ``config.yaml`` those extension + classes are instantiated instead. For example, with the following + ``config.yaml``: + + .. code-block:: python + extensions: + bliss.core.cmd.Cmd: FooCmd + + Anywhere BLISS would create a :class:`Cmd` object (via + :func:`createCmd()`) it will now create a ``FooCmd`` object + instead. Note: ``FooCmd`` should adhere to the same interface as + :class:`bliss.core.cmd.Cmd` (and probably inherit from it). + """ + + def createFunc (cls, extname): + """Creates and returns a new ``createXXX()`` function to instantiate + either the given class by class object (*cls*) or extension + class name (*extname*). + + In the case of an extension class name, the first time the + returned ``createXXX()`` is called, it attempts to lookup and + load the class. Thereafter, the loaded class is cached for + subsequent calls. + """ + def create(*args, **kwargs): + if create.cls is None: + parts = extname.rsplit('.', 1) + if len(parts) > 1: + modname, clsname = parts + module = pydoc.locate(modname) + if module is None: + raise ImportError('No module named %d' % modname) + create.cls = getattr(module, clsname) + if create.cls is None: + raise ImportError('No class named %s' % extname) + return create.cls(*args, **kwargs) + create.cls = cls + return create + + extensions = bliss.config.get('extensions', None) + + for clsname, cls in modsyms.items(): + if not isinstance(cls, type): + continue + + extname = None + + if extensions: + extname = extensions.get(modname + '.' + clsname, None) + + if extname: + cls = None + values = modname, clsname, extname + log.info('Replacing %s.%s with custom extension: %s' % values) + + modsyms['create' + clsname] = createFunc(cls, extname) + + + +def crc32File(filename, skip=0): + """Computes the CRC-32 of the contents of filename, optionally + skipping a certain number of bytes at the beginning of the file. + """ + with open(filename, 'rb') as stream: + discard = stream.read(skip) + return zlib.crc32(stream.read()) & 0xffffffff + + +def endianSwapU16(bytes): + """Swaps pairs of bytes (16-bit words) in the given bytearray.""" + for b in range(0, len(bytes), 2): + bytes[b], bytes[b + 1] = bytes[b + 1], bytes[b] + return bytes + + +def setDictDefaults (d, defaults): + """Sets all defaults for the given dictionary to those contained in a + second defaults dictionary. This convenience method calls: + + d.setdefault(key, value) + + for each key and value in the given defaults dictionary. + """ + for key, val in defaults.items(): + d.setdefault(key, val) + + return d + + +def getDefaultDict(modname, config_key, loader, reload=False, filename=None): + """Returns default BLISS dictonary for modname + + This helper function encapulates the core logic necessary to + (re)load, cache (via util.ObjectCache), and return the default + dictionary. For example, in bliss.core.cmd: + + def getDefaultDict(reload=False): + return bliss.util.getDefaultDict(__name__, 'cmddict', CmdDict, reload) + """ + module = sys.modules[modname] + default = getattr(module, 'DefaultDict', None) + + if filename is None: + filename = bliss.config.get('%s.filename' % config_key, None) + + if filename is not None and (default is None or reload is True): + try: + default = ObjectCache(filename, loader).load() + setattr(module, 'DefaultDict', default) + except IOError, e: + msg = 'Could not load default %s "%s": %s' + log.error(msg, config_key, filename, str(e)) + + return default or loader() + + +def getFileSize(filename): + """Returns the size of filename in bytes.""" + return os.stat(filename)[stat.ST_SIZE] + + +def toBCD (n): + + """Converts the number n into Binary Coded Decimal.""" + bcd = 0 + bits = 0 + + while True: + n, r = divmod(n, 10) + bcd |= (r << bits) + if n is 0: + break + bits += 4 + + return bcd + + +def toFloat (str, default=None): + """toFloat(str[, default]) -> float | default + + Converts the given string to a floating-point value. If the + string could not be converted, default (None) is returned. + + NOTE: This method is *significantly* more effecient than + toNumber() as it only attempts to parse floating-point numbers, + not integers or hexadecimal numbers. + + Examples: + + >>> f = toFloat("4.2") + >>> assert type(f) is float and f == 4.2 + + >>> f = toFloat("UNDEFINED", 999.9) + >>> assert type(f) is float and f == 999.9 + + >>> f = toFloat("Foo") + >>> assert f is None + """ + value = default + + try: + value = float(str) + except ValueError: + pass + + return value + + +def toNumber (str, default=None): + """toNumber(str[, default]) -> integer | float | default + + Converts the given string to a numeric value. The string may be a + hexadecimal, integer, or floating number. If string could not be + converted, default (None) is returned. + + Examples: + + >>> n = toNumber("0x2A") + >>> assert type(n) is int and n == 42 + + >>> n = toNumber("42") + >>> assert type(n) is int and n == 42 + + >>> n = toNumber("42.0") + >>> assert type(n) is float and n == 42.0 + + >>> n = toNumber("Foo", 42) + >>> assert type(n) is int and n == 42 + + >>> n = toNumber("Foo") + >>> assert n is None + """ + value = default + + try: + if str.startswith("0x"): + value = int(str, 16) + else: + try: + value = int(str) + except ValueError: + value = float(str) + except ValueError: + pass + + return value + +def toNumberOrStr (str): + """toNumberOrStr(str) -> integer | float | string + + Converts the given string to a numeric value, if possible. Otherwise + returns the input string + """ + return toNumber(str, str) + +def toRepr (obj): + """toRepr(obj) -> string + + Converts the Python object to a string representation of the kind + often returned by a class __repr__() method. + """ + args = [ ] + names = [ ] + + if hasattr(obj, "__dict__"): + names += getattr(obj, "__dict__").keys() + if hasattr(obj, "__slots__"): + names += getattr(obj, "__slots__") + + for name in names: + value = getattr(obj, name) + if value is not None: + if type(value) is str: + if len(value) > 32: + value = value[0:32] + "..." + value = "'" + value + "'" + args.append("%s=%s" % (name, str(value))) + + return "%s(%s)" % (obj.__class__.__name__, ", ".join(args)) + + +def toStringDuration (duration): + """Returns a description of the given duration in the most appropriate + units (e.g. seconds, ms, us, or ns). + """ + + table = ( + ('%dms' , 1e-3, 1e3), + (u'%d\u03BCs', 1e-6, 1e6), + ('%dns' , 1e-9, 1e9) + ) + + if duration > 1: + return '%fs' % duration + + for format, threshold, factor in table: + if duration > threshold: + return format % int(duration * factor) + + return '%fs' % duration + +def expandPath (pathname, prefix=None): + """Return pathname as an absolute path, either expanded by the users + home directory ("~") or with prefix prepended. + """ + if prefix is None: + prefix = '' + + expanded = pathname + + if pathname[0] == '~': + expanded = os.path.expanduser(pathname) + elif pathname[0] != '/': + expanded = os.path.join(prefix, pathname) + + return os.path.abspath(expanded) + +def listAllFiles (directory, suffix=None, abspath=False): + """Returns the list of all files within the input directory and + all subdirectories. + """ + files = [] + + directory = expandPath(directory) + + for dirpath, dirnames, filenames in os.walk(directory, followlinks=True): + if suffix: + filenames = [f for f in filenames if f.endswith(suffix)] + + for filename in filenames: + filepath = os.path.join(dirpath, filename) + if not abspath: + filepath = os.path.relpath(filepath, start=directory) + + # os.path.join(path, filename) + + files.append(filepath) + + return files + + +class YAMLValidationError(Exception): + def __init__(self, arg): + # Set some exception infomation + self.message = arg + + log.error(self.message) + + +class YAMLError(Exception): + def __init__(self, arg): + # Set some exception infomation + self.message = arg + + log.error(self.message) + + +if __name__ == "__main__": + # HACK: The 'doctest' module imports 'pdb' which imports 'cmd'. + # Since none of these modules are using Python absolute imports, The + # Python 'cmd' module conflicts with 'bliss.cmd' (in this directory). + # As a workaround, temporarily remove the current directory from our + # path prior to importing doctest. + + import sys + saved = sys.path.pop(0) + import doctest + sys.path.insert(0, saved) + + (num_failed, num_tests) = doctest.testmod() + filename = os.path.basename(__file__) + + if num_failed == 0: + print "%-20s All %3d tests passed!" % (filename, num_tests) diff --git a/bliss/core/val.py b/bliss/core/val.py new file mode 100644 index 00000000..4debb821 --- /dev/null +++ b/bliss/core/val.py @@ -0,0 +1,734 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2015, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +""" +BLISS YAML Validator + +The bliss.core.val module provides validation of content for YAML +files based on specified schema. +""" + +from __future__ import absolute_import + + +import os + +import json +import yaml +from yaml.scanner import ScannerError +import re +import linecache +import jsonschema +import collections + +from bliss.core import dtype, log, tlm, util + + +class YAMLProcessor (object): + + __slots__ = ["ymlfile", "data", "loaded", "doclines", "_clean"] + + def __init__(self, ymlfile=None, clean=True): + """Creates a new YAML validator for the given schema and yaml file + + The schema file should validate against JSON Schema Draft 4 + http://json-schema.org/latest/json-schema-core.html + + The YAML file should validate against the schema file given + """ + self.loaded = False + self.data = [] + self.doclines = [] + self._clean = clean + + self.ymlfile = ymlfile + + if ymlfile is not None: + self.load() + + def load(self, ymlfile=None): + """Load and process the YAML file""" + if ymlfile is not None: + self.ymlfile = ymlfile + + try: + # If yaml should be 'cleaned' of document references + if self._clean: + self.data = self.process(self.ymlfile) + else: + with open(self.ymlfile, 'rb') as stream: + for data in yaml.load_all(stream): + self.data.append(data) + + self.loaded = True + except ScannerError, e: + msg = "YAML formattting error - '" + self.ymlfile + ": '" + str(e) + "'" + raise util.YAMLError(msg) + + def process(self, ymlfile): + """Cleans out all document tags from the YAML file to make it + JSON-friendly to work with the JSON Schema. + """ + output = "" + + try: + # Need a list of line numbers where the documents resides + # Used for finding/displaying errors + self.doclines = [] + linenum = None + with open(ymlfile, 'r') as txt: + for linenum, line in enumerate(txt): + # Pattern to match document start lines + doc_pattern = re.compile('(---) (![a-z]+)(.*$)', flags=re.I) + + # Pattern to match sequence start lines + seq_pattern = re.compile('(\s*)(-+) !([a-z]+)(.*$)', flags=re.I) + + # If we find a document, remove the tag + if doc_pattern.match(line): + line = doc_pattern.sub(r"---", line).lower() + self.doclines.append(linenum) + elif seq_pattern.match(line): + # Replace the sequence start with key string + line = seq_pattern.sub(r"\1\2 \3: line " + str(linenum), line).lower() + + output = output + line + + if linenum is None: + msg = "Empty YAML file: " + ymlfile + raise util.YAMLError(msg) + else: + # Append one more document to docline for the end + self.doclines.append(linenum+1) + + return output + + except IOError, e: + msg = "Could not process YAML file '" + ymlfile + "': '" + str(e) + "'" + raise IOError(msg) + + +class SchemaProcessor(object): + + __slots__ = ['_schemafile', 'data', '_proc_schema', 'loaded'] + + def __init__(self, schemafile=None): + """Creates a new YAML validator for the given schema and yaml file + + The schema file should validate against JSON Schema Draft 4 + http://json-schema.org/latest/json-schema-core.html + + The YAML file should validate against the schema file given + """ + self.data = {} + self.loaded = False + + self.schemafile = schemafile + + @property + def schemafile(self): + return self._schemafile + + @schemafile.setter + def schemafile(self, schemafile): + self._schemafile = schemafile + + if schemafile is not None: + self.load() + + def load(self, schemafile=None): + """Load and process the schema file""" + if schemafile is not None: + self._schemafile = schemafile + + try: + self.data = json.load(open(self._schemafile)) + except (IOError, ValueError), e: + msg = "Could not load schema file '" + self._schemafile + "': '" + str(e) + "'" + raise jsonschema.SchemaError(msg) + + self.loaded = True + + +class ErrorHandler(object): + """ErrorHandler class + + Leverages the jsonschema.exceptions.ValidationError API in order to extract + useful information from the error to provide more detail on the root cause + of the error. + + Refer to http://python-jsonschema.readthedocs.io/en/latest/errors/ for more + information on JSON Schema errors. + """ + + __slots__ = ['error', 'doclines', 'ymlfile', 'schemafile', 'messages'] + + def __init__(self, error=None, ymlfile=None, schemafile=None): + self.error = error + self.ymlfile = ymlfile + self.schemafile = schemafile + + def process(self, docnum, doclines, error, messages=None): + # TODO: Process the various types of errors + + start = doclines[docnum]+1 + if error.message.endswith("is not of type u'object'"): + msg = "Invalid root object in YAML. Check format." + messages.append(msg) + elif len(doclines) > docnum+1: + end = doclines[docnum+1]+1 + self.pretty(start, end, error, messages) + else: + # Only one value for doclines since only 1 doc + self.pretty(1, start, error, messages) + + def pretty(self, start, end, e, messages=None): + """Pretties up the output error message so it is readable + and designates where the error came from""" + + log.debug("Displaying document from lines '%i' to '%i'", start, end) + + errorlist = [] + if len(e.context) > 0: + errorlist = e.context + else: + errorlist.append(e) + + for error in errorlist: + validator = error.validator + + if validator == "required": + # Handle required fields + msg = error.message + messages.append("Between lines %d - %d. %s" % (start, end, msg)) + elif validator == "additionalProperties": + # Handle additional properties not allowed + if len(error.message) > 256: + msg = error.message[:253] + "..." + else: + msg = error.message + messages.append("Between lines %d - %d. %s" % (start, end, msg)) + elif len(error.relative_path) > 0: + # Handle other cases where we can loop through the lines + + # get the JSON path to traverse through the file + jsonpath = error.relative_path + array_index = 0 + + current_start = start + foundline = 0 + found = False + + context = collections.deque(maxlen=20) + tag = " <<<<<<<<< Expects: %s <<<<<<<<<\n""" + for cnt, path in enumerate(error.relative_path): + + # Need to set the key we are looking, and then check the array count + # if it is an array, we have some interesting checks to do + if int(cnt) % 2 == 0: + # we know we have some array account + # array_index keeps track of the array count we are looking for or number + # of matches we need to skip over before we get to the one we care about + + # check if previous array_index > 0. if so, then we know we need to use + # that one to track down the specific instance of this nested key. + # later on, we utilize this array_index loop through + # if array_index == 0: + array_index = jsonpath[cnt] + + match_count = 0 + continue + elif int(cnt) % 2 == 1: + # we know we have some key name + # current_key keeps track of the key we are looking for in the JSON Path + current_key = jsonpath[cnt] + + for linenum in range(current_start, end): + line = linecache.getline(self.ymlfile, linenum) + + # Check if line contains the error + if ":" in line: + l = line.split(':') + key = l[0] + value = ':'.join(l[1:]) + + # TODO: + # Handle maxItems TBD + # Handle minItems TBD + # Handle in-order (bytes) TBD + # Handle uniqueness TBD + + # Handle cases where key in yml file is hexadecimal + try: + key = int(key.strip(), 16) + except ValueError: + key = key.strip() + + if str(key) == current_key: + # check if we are at our match_count and end of the path + if match_count == array_index: + # check if we are at end of the jsonpath + if cnt == len(jsonpath)-1: + # we are at the end of path so let's stop here' + if error.validator == "type": + if value.strip() == str(error.instance): + errormsg = "Value '%s' should be of type '%s'" % (error.instance, str(error.validator_value)) + line = line.replace("\n", (tag % errormsg)) + foundline = linenum + found = True + elif value.strip() == "" and error.instance is None: + errormsg = "Missing value for %s." % key + line = line.replace("\n", (tag % errormsg)) + foundline = linenum + found = True + + elif not found: + # print "EXTRA FOO" + # print match_count + # print array_index + # print current_key + # print line + # otherwise change the start to the current line + current_start = linenum + break + + match_count += 1 + + + + + # for the context queue, we want to get the error to appear in + # the middle of the error output. to do so, we will only append + # to the queue in 2 cases: + # + # 1. before we find the error (found == False). we can + # just keep pushing on the queue until we find it in the YAML. + # 2. once we find the error (found == True), we just want to push + # onto the queue until the the line is in the middle + if not found or (found and context.maxlen > (linenum-foundline)*2): + context.append(line) + elif found and context.maxlen <= (linenum-foundline)*2: + break + + + # Loop through the queue and generate a readable msg output + out = "" + for line in context: + out += line + + if foundline: + msg = "Error found on line %d in %s:\n\n%s" % (foundline, self.ymlfile, out) + messages.append(msg) + + # reset the line it was found on and the context + foundline = 0 + context.clear() + + linecache.clearcache() + else: + messages.append(error.message) + + +class Validator(object): + + __slots__ = ['_ymlfile', '_schemafile', '_ymlproc', '_schemaproc', 'ehandler'] + + def __init__(self, ymlfile, schemafile): + """Creates a new YAML validator for the given schema and yaml file + + The schema file should validate against JSON Schema Draft 4 + http://json-schema.org/latest/json-schema-core.html + + The YAML file should validate against the schema file given + """ + self._ymlfile = ymlfile + self._schemafile = schemafile + + # Get the error handler ready, just in case + self.ehandler = ErrorHandler(ymlfile=self._ymlfile, schemafile=self._schemafile) + + def validate(self, messages=None): + """Provides schema_val validation for objects that do not override + in domain-specific validator""" + valid = self.schema_val(messages) + return valid + + def schema_val(self, messages=None): + "Perform validation with processed YAML and Schema" + self._ymlproc = YAMLProcessor(self._ymlfile) + self._schemaproc = SchemaProcessor(self._schemafile) + valid = True + + log.debug("BEGIN: Schema-based validation for YAML '%s' with schema '%s'", self._ymlfile, self._schemafile) + + # Make sure the yml and schema have been loaded + if self._ymlproc.loaded and self._schemaproc.loaded: + # Load all of the yaml documents. Could be more than one in the same YAML file. + for docnum, data in enumerate(yaml.load_all(self._ymlproc.data)): + + # Since YAML allows integer keys but JSON does not, we need to first + # dump the data as a JSON string to encode all of the potential integers + # as strings, and then read it back out into the YAML format. Kind of + # a clunky workaround but it works as expected. + data = yaml.load(json.dumps(data)) + + # Now we want to get a validator ready + v = jsonschema.Draft4Validator(self._schemaproc.data) + + # Loop through the errors (if any) and set valid = False if any are found + # Display the error message + for error in sorted(v.iter_errors(data)): + msg = "Schema-based validation failed for YAML file '" + self._ymlfile + "'" + self.ehandler.process(docnum, self._ymlproc.doclines, error, messages) + valid = False + + if not valid: + log.error(msg) + + elif not self._ymlproc.loaded: + raise util.YAMLError("YAML must be loaded in order to validate.") + elif not self._schemaproc.loaded: + raise jsonschema.SchemaError("Schema must be loaded in order to validate.") + + log.debug("END: Schema-based validation complete for '%s'", self._ymlfile) + return valid + + +class CmdValidator (Validator): + def __init__(self, ymlfile=None, schema=None): + super(CmdValidator, self).__init__(ymlfile, schema) + + def validate(self, ymldata=None, messages=None): + """Validates the Command Dictionary definitions""" + + schema_val = self.schema_val(messages) + content_val = self.content_val(messages=messages) + + return schema_val and content_val + + def content_val(self, ymldata=None, messages=None): + """Validates the Command Dictionary to ensure the contents for each of the fields + meets specific criteria regarding the expected types, byte ranges, etc.""" + + self._ymlproc = YAMLProcessor(self._ymlfile, False) + + # Turn off the YAML Processor + log.debug("BEGIN: Content-based validation of Command dictionary") + if ymldata is not None: + cmddict = ymldata + elif ymldata is None and self._ymlproc.loaded: + cmddict = self._ymlproc.data + elif not self._ymlproc.loaded: + raise util.YAMLError("YAML failed to load.") + + try: + # instantiate the document number. this will increment in order to + # track the line numbers and section where validation fails + docnum = 0 + + # boolean to hold argument validity + argsvalid = True + + # list of rules to validate against + rules = [] + + ### set the command rules + # + # set uniqueness rule for command names + rules.append(UniquenessRule('name', "Duplicate command name: %s", messages)) + + # set uniqueness rule for opcodes + rules.append(UniquenessRule('opcode', "Duplicate opcode: %s", messages)) + # + ### + for cmdcnt, cmddefn in enumerate(cmddict[0]): + # check the command rules + for rule in rules: + rule.check(cmddefn) + + # list of argument rules to validate against + argrules = [] + + ### set rules for command arguments + # + # set uniqueness rule for opcodes + argrules.append(UniquenessRule('name', "Duplicate argument name: " + cmddefn.name + ".%s", messages)) + + # set type rule for arg.type + argrules.append(TypeRule('type', "Invalid argument type for argument: " + cmddefn.name + ".%s", messages)) + + # set argument size rule for arg.type.nbytes + argrules.append(TypeSizeRule('nbytes', "Invalid argument size for argument: " + cmddefn.name + ".%s", messages)) + + # set argument enumerations rule to check no enumerations contain un-quoted YAML special variables + argrules.append(EnumRule('enum', "Invalid enum value for argument: " + cmddefn.name + ".%s", messages)) + + # set byte order rule to ensure proper ordering of aruguments + argrules.append(ByteOrderRule('bytes', "Invalid byte order for argument: " + cmddefn.name + ".%s", messages)) + # + ### + + argdefns = cmddefn.argdefns + for arg in argdefns: + # check argument rules + for rule in argrules: + rule.check(arg) + + # check if argument rule failed, if so set the validity to False + if not all(r.valid is True for r in argrules): + argsvalid = False + + log.debug("END: Content-based validation complete for '%s'", self._ymlfile) + + # check validity of all command rules and argument validity + return all(rule.valid is True for rule in rules) and argsvalid + + except util.YAMLValidationError, e: + # Display the error message + if messages is not None: + if len(e.message) < 128: + msg = "Validation Failed for YAML file '" + self._ymlfile + "': '" + str(e.message) + "'" + else: + msg = "Validation Failed for YAML file '" + self._ymlfile + "'" + + log.error(msg) + self.ehandler.process(docnum, self.ehandler.doclines, e, messages) + return False + + +class TlmValidator (Validator): + def __init__(self, ymlfile=None, schema=None): + super(TlmValidator, self).__init__(ymlfile, schema) + + def validate(self, ymldata=None, messages=None): + """Validates the Telemetry Dictionary definitions""" + schema_val = self.schema_val(messages) + if len(messages) == 0: + content_val = self.content_val(ymldata, messages) + + return schema_val and content_val + + def content_val(self, ymldata=None, messages=None): + """Validates the Telemetry Dictionary to ensure the contents for each of the fields + meets specific criteria regarding the expected types, byte ranges, etc.""" + + # Turn off the YAML Processor + log.debug("BEGIN: Content-based validation of Telemetry dictionary") + if ymldata is not None: + tlmdict = ymldata + else: + tlmdict = tlm.TlmDict(self._ymlfile) + + try: + # instantiate the document number. this will increment in order to + # track the line numbers and section where validation fails + docnum = 0 + + # boolean to hold argument validity + fldsvalid = True + + # list of rules to validate against + rules = [] + + ### set the packet rules + # + # set uniqueness rule for packet names + rules.append(UniquenessRule('name', "Duplicate packet name: %s", messages)) + + ### + # Loop through the keys and check each PacketDefinition + for key in tlmdict.keys(): + pktdefn = tlmdict[key] + # check the telemetry packet rules + for rule in rules: + rule.check(pktdefn) + + # list of field rules to validate against + fldrules = [] + + ### set rules for telemetry fields + # + # set uniqueness rule for field name + fldrules.append(UniquenessRule('name', "Duplicate field name: " + pktdefn.name + ".%s", messages)) + + # set type rule for field.type + fldrules.append(TypeRule('type', "Invalid field type for field: " + pktdefn.name + ".%s", messages)) + + # set field size rule for field.type.nbytes + fldrules.append(TypeSizeRule('nbytes', "Invalid field size for field: " + pktdefn.name + ".%s", messages)) + + # set field enumerations rule to check no enumerations contain un-quoted YAML special variables + fldrules.append(EnumRule('enum', "Invalid enum value for field: " + pktdefn.name + ".%s", messages)) + # + ### + + flddefns = pktdefn.fields + for fld in flddefns: + # check field rules + for rule in fldrules: + rule.check(fld) + + # check if field rule failed, if so set the validity to False + if not all(r.valid is True for r in fldrules): + fldsvalid = False + + log.debug("END: Content-based validation complete for '%s'", self._ymlfile) + + # check validity of all packet rules and field validity + return all(rule.valid is True for rule in rules) and fldsvalid + + except util.YAMLValidationError, e: + # Display the error message + if messages is not None: + if len(e.message) < 128: + msg = "Validation Failed for YAML file '" + self._ymlfile + "': '" + str(e.message) + "'" + else: + msg = "Validation Failed for YAML file '" + self._ymlfile + "'" + + log.error(msg) + self.ehandler.process(self.ehandler.doclines, e, messages) + return False + + +class ValidationRule(object): + def __init__(self, attr, msg=None, messages=[]): + self.attr = attr + self.valid = True + self.msg = msg + self.messages = messages + + +class UniquenessRule(ValidationRule): + """Checks the uniqueness of an attribute across YAML documents""" + + def __init__(self, attr, msg, messages=[]): + """Takes in an attribute name, error message, and list of error + messages to append to + """ + super(UniquenessRule, self).__init__(attr, msg, messages) + self.val_list = [] + + def check(self, defn): + """Performs the uniqueness check against the value list + maintained in this rule objects + """ + val = getattr(defn, self.attr) + if val is not None and val in self.val_list: + self.messages.append(self.msg % str(val)) + # TODO self.messages.append("TBD location message") + self.valid = False + elif val is not None: + self.val_list.append(val) + log.debug(self.val_list) + + +class TypeRule(ValidationRule): + """Checks the object's type is an allowable types""" + + def __init__(self, attr, msg, messages=[]): + """Takes in an attribute name, error message, and list of error + messages to append to + """ + super(TypeRule, self).__init__(attr, msg, messages) + + def check(self, defn): + """Performs isinstance check for the definitions data type. + + Assumes the defn has 'type' and 'name' attributes + """ + allowedTypes = dtype.PrimitiveType, dtype.ArrayType + if not isinstance(defn.type, allowedTypes): + self.messages.append(self.msg % str(defn.name)) + # self.messages.append("TBD location message") + self.valid = False + + +class TypeSizeRule(ValidationRule): + """Checks the object size matches the designated type""" + + def __init__(self, attr, msg, messages=[]): + """Takes in an attribute name, error message, and list of error + messages to append to + """ + super(TypeSizeRule, self).__init__(attr, msg, messages) + + def check(self, defn, msg=None): + """Uses the byte range in the object definition to determine + the number of bytes and compares to the size defined in the type. + + Assumes the defn has 'type' and 'name' attributes, and a slice() method + """ + if isinstance(defn.type, dtype.PrimitiveType): + # Check the nbytes designated in the YAML match the PDT + nbytes = defn.type.nbytes + defnbytes = defn.slice().stop - defn.slice().start + if nbytes != defnbytes: + self.messages.append(self.msg % defn.name) + self.messages.append("Definition size of (" + str(defnbytes) + + " bytes) does not match size of data" + + " type " +str(defn.type.name) + " (" + + str(nbytes) + " byte(s))") + # TODO self.messages.append("TBD location message") + self.valid = False + + +class EnumRule(ValidationRule): + """Checks all enumerated values do not contain boolean keys. + The YAML standard has a set of allowable boolean strings that are + interpretted as boolean True/False unless explicitly quoted in the YAML + file. The YAML boolean strings include (TRUE/FALSE/ON/OFF/YES/NO) . + """ + def __init__(self, attr, msg, messages=[]): + """Takes in an attribute name, error message, and list of error + messages to append to + """ + super(EnumRule, self).__init__(attr, msg, messages) + + def check(self, defn, msg=None): + # Check enum does not contain boolean keys + enum = defn.enum + if enum is not None: + for key in enum: + val = enum.get(key) + if type(key) is bool or type(val) is bool: + self.messages.append(self.msg % str(defn.name)) + self.messages.append("Must enclose all YAML boolean " + + "strings (TRUE/FALSE/ON/OFF/YES/NO) " + + "with quotes.") + # TODO self.messages.append("TBD location message") + self.valid = False + + +class ByteOrderRule(ValidationRule): + """Checks the byte ordering based on the previous set stop byte/bit""" + + def __init__(self, attr, msg, messages=[]): + """Takes in an attribute name, error message, and list of error + messages to append to + """ + super(ByteOrderRule, self).__init__(attr, msg, messages) + self.prevstop = 0 + + def check(self, defn, msg=None): + """Uses the definitions slice() method to determine its start/stop + range. + """ + # Check enum does not contain boolean keys + if (defn.slice().start != self.prevstop): + self.messages.append(self.msg % str(defn.name)) + # TODO self.messages.append("TBD location message") + self.valid = False + + self.prevstop = defn.slice().stop + diff --git a/bliss/data/settings.yaml b/bliss/data/settings.yaml new file mode 100644 index 00000000..5f17096c --- /dev/null +++ b/bliss/data/settings.yaml @@ -0,0 +1,2 @@ +# A relative path +config_path: "data/config" diff --git a/build/clean.py b/build/clean.py new file mode 100755 index 00000000..c59f944d --- /dev/null +++ b/build/clean.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python + + +import os +import pip + +KEEP = 'pip setuptools wsgiref appdirs packaging pyparsing six'.split() + +def system (cmd): + print cmd + os.system(cmd) + +for pkg in [ pkg.key for pkg in pip.get_installed_distributions() ]: + if pkg not in KEEP: + system('pip uninstall -y %s' % pkg) + +for name in '*~ *.pyc *.pkl'.split(): + system('find . -name "%s" -exec rm {} \;' % name) + +system( 'python setup.py clean --all' ) +system( 'rm -rf bliss_core.egg-info' ) diff --git a/build/generate_changelog.py b/build/generate_changelog.py new file mode 100755 index 00000000..418f93d9 --- /dev/null +++ b/build/generate_changelog.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python + +import argparse +import datetime +import getpass +import requests +import json + +API_HOSTNAME = 'https://github.jpl.nasa.gov/api/v3/' + +parser = argparse.ArgumentParser() +parser.add_argument('--user', help='Username to use for authentication') +parser.add_argument('--pass', help='Password to use for authentication') +parser.add_argument('--start-time', help='Start date range for tickets to find. This is a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ.') +parser.add_argument('--end-time', default=datetime.datetime.utcnow(), help='End date range for tickets to find. This is a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ.') +args = vars(parser.parse_args()) + +un = args['user'] +if not un: + un = raw_input('Username: ') + +pw = args['pass'] +if not pw: + pw = getpass.getpass('Password: ') + +url = API_HOSTNAME + 'repos/bliss/bliss-core/issues?state=all&per_page=100&sort=updated' +if args['start_time']: + url += '&since={}'.format(args['start_time']) + +r = requests.get(url, auth=(un, pw)) +raw_issues = r.json() + +while True: + if 'next' not in r.links: + break + + r = requests.get(r.links['next']['url'], auth=(un, pw)) + raw_issues += r.json() + +issues = [] +for issue in raw_issues: + # If the issue is a pull request, skip it + if 'pull_request' in issue: + continue + # If the issue isn't closed, skip it + elif issue['closed_at'] == 'null' or issue['state'] != 'closed': + continue + # If the issue was closed after the end time parameter, skip it + elif args['end_time'] < datetime.datetime.strptime(issue['closed_at'], '%Y-%m-%dT%H:%M:%SZ'): + continue + # If the issue has a label of `wontfix` or `duplicate`, skip it + elif issue['labels'] and len([l for l in issue['labels'] if l['name'] in ['resolution-wontfix', 'resolution-duplicate']]) != 0: + continue + # If the issue has no milestone, skip it + elif not issue['milestone']: + continue + + issues.append(issue) + +for issue in issues: + print 'Issue #{} - {}'.format(issue['number'], issue['title']) diff --git a/build/pypi/make-pypi.sh b/build/pypi/make-pypi.sh new file mode 100755 index 00000000..419fb096 --- /dev/null +++ b/build/pypi/make-pypi.sh @@ -0,0 +1,16 @@ +#!/bin/sh + +mkdir -p simple + +# pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.1.0#egg=bliss-core[tests,docs]" +# pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.2.0#egg=bliss-core[tests,docs]" +# pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.3.0#egg=bliss-core[tests,docs]" +# pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.4.0#egg=bliss-core[tests,docs]" +# pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.5.0#egg=bliss-core[tests,docs]" +# pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.6.0#egg=bliss-core[tests,docs]" +# pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.7.0#egg=bliss-core[tests,docs]" +# pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.8.0#egg=bliss-core[tests,docs]" +#pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.9.0#egg=bliss-core[tests,docs]" +pip download -d simple "git+ssh://git@github.jpl.nasa.gov/bliss/bliss-core.git@0.10.0#egg=bliss-core[tests,docs]" + +./simple.py simple diff --git a/build/pypi/simple.py b/build/pypi/simple.py new file mode 100755 index 00000000..2f1d9091 --- /dev/null +++ b/build/pypi/simple.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python + + +import os +import re +import sys + + +isdir = os.path.isdir +isfile = os.path.isfile +join = os.path.join + + +def endswith (s, suffixes): + if type(suffixes) is str: + suffixes = [ suffixes ] + + for suffix in suffixes: + if s.endswith(suffix): + return True + + return False + + +def index (dirname, names): + with open( join(dirname, 'index.html'), 'w' ) as output: + output.write('\n') + output.write('\n') + + for name in names: + output.write(' %s
\n' % (name, name)) + + output.write('\n') + + +def ispkg (filename): + extensions = '.tar.gz', '.whl', '.zip' + return isfile(filename) and endswith(filename, extensions) + + +def normalize (name): + return re.sub(r'[-_.]+', '-', name).lower() + + +def organize (dirname): + for name in os.listdir(dirname): + pathname = join(dirname, name) + if ispkg(pathname): + pkgpath = join(dirname, pkgname(name)) + system('mkdir -p %s' % pkgpath) + system('mv %s %s' % (pathname, pkgpath)) + + +def pkgname (name): + parts = [ ] + + for part in name.split('-'): + if part[0].isdigit(): + break + else: + parts.append(part) + + return normalize( '-'.join(parts) ) + + +def system (cmd): + print cmd + os.system(cmd) + + +if __name__ == '__main__': + if len(sys.argv) != 2: + print ' usage: simple.py directory' + print + print ' Creates a PEP 503 -- Simple Repository API compliant PyPI' + print ' repository in the given directory containing Python packages' + print ' downloaded with "pip download". For example:' + print + print ' $ mkdir simple' + print ' $ pip download -d simple -r requirements.txt' + print ' $ ./simply.py simple' + print + sys.exit(2) + + root = sys.argv[1] + organize(root) + + packages = [ s for s in os.listdir(root) if isdir( join(root, s) ) ] + packages.sort() + index(root, packages) + + for pkg in packages: + path = join(root, pkg) + files = [ s for s in os.listdir(path) if ispkg( join(path, s)) ] + files.sort() + index(path, files) + + print 'Done. Indexed %d packages in "%s".' % (len(packages), root) diff --git a/build/update_docs_release.sh b/build/update_docs_release.sh new file mode 100755 index 00000000..4450ba64 --- /dev/null +++ b/build/update_docs_release.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +cd .. +git checkout master +git pull +sphinx-apidoc --separate --force --no-toc -o doc/source bliss bliss/test +python setup.py build_sphinx +git checkout gh-pages +\cp doc/build/html/*.html . +\cp doc/build/html/*.js . +\cp doc/build/html/objects.inv . +\cp -r doc/build/html/_static . +\cp -r doc/build/html/_images . +git add *.html *.js _static _images + +echo +echo "*** Documentation update complete ***" +echo +echo "Please review staged files, commit, and push" +echo "the changes (git push origin gh-pages)" +echo +echo "When finished run 'git checkout master'" diff --git a/data/config/bsc.yaml b/data/config/bsc.yaml new file mode 100644 index 00000000..b75824f4 --- /dev/null +++ b/data/config/bsc.yaml @@ -0,0 +1,41 @@ +# Example bsc configuration data. This should be customized for your +# use case before use. + +capture_manager: + root_log_directory: /tmp + + manager_server: + host: localhost + port: 8080 + +handlers: + - name: test1 + conn_type: udp + address: ['', 8500] + + # A Handler's 'path' attribute allows for specification of + # handler-specific sub-folders to meet a desired nesting structure in + # the root log directory. + path: additional_dir/test/%j + + # A Handlers 'file_name_pattern' attribute allows for custom file name + # specification. The final handler log file path is passed through + # strftime and format string substitution with the handler passed + # as the kwargs for substitution. If nothing is present for the + # 'file_name_pattern' a default of -.pcap is used. + file_name_pattern: '%Y-%m-%d-randomUDPtestData-{name}.pcap' + rotate_log: True + + - name: test2 + conn_type: udp + address: ['', 8500] + rotate_log: True + + - name: test3 + conn_type: udp + address: ['', 8125] + + # Example logger for monitoring raw Ethernet frames + # - name: ethernet_test + # type: ethernet + # address: [etho0, 0x55aa] diff --git a/data/config/ccsds_header.yaml b/data/config/ccsds_header.yaml new file mode 100644 index 00000000..76e32522 --- /dev/null +++ b/data/config/ccsds_header.yaml @@ -0,0 +1,239 @@ +- !Packet + name: CCSDS_HEADER + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: type + desc: | + Distinguishes between core and payload packet types to extend the + APID space to 4032 + bytes: 0 + type: U8 + mask: 0x10 + enum: + 0: 'Core' + 1: 'Payload' + - !Field + name: secondary_header_flag + desc: | + Indicates whether, or not, a Secondary Header follows the primary + header (always set to 1) + bytes: 0 + type: U8 + mask: 0x08 + enum: + 0: 'Not Present' + 1: 'Present' + - !Field + name: apid + desc: | + Used in conjunction with Type to define the Logical Data Path + bytes: [0, 1] + type: MSB_U16 + mask: 0x07FF + - !Field + name: sequence_flags + desc: | + When sending commands, the sequence flags must be marked as + unsegmented data. All other PL packets may be per source/destination + ICDs. + bytes: 2 + type: U8 + mask: 0xC0 + enum: + 0: 'Continuation Segment' + 1: 'First Segment' + 2: 'Last Segment' + 3: 'Unsegmented' + - !Field + name: sequence_count + desc: | + Sequential count which numbers each packet on a Logical Data Path, + i.e. a separate counter is maintained for each source-destination + pair. + bytes: [2, 3] + mask: 0x3FFF + type: MSB_U16 + - !Field + name: packet_length + desc: | + Sequential count which expresses the length of the remainder of the + packet including checkword if present. The value is the number of + bytes (octets) following the field minus 1. + bytes: [4, 5] + type: MSB_U16 + - !Field + name: time_coarse + desc: | + The Time Code is made up of a preamble field (P-field) and a time + field (T-field). For ISS, as allowed by the CCSDS Recommendation, + the P-field is implicitly conveyed (it is not present with the + T-field). ISS time is referenced to GPS Time (midnight 5-6 January + 1980). The T-field consists of 4 octets of coarse time and 1 octet of + fine time. These octets are a set of binary counters, cascaded with + the adjacent counters. The value represents the elapsed time since + midnight 5-6 January 1980. The least significant bit (LSB) of the + least significant octet of coarse time is equal to 1 second. + bytes: [6, 9] + type: TIME32 + - !Field + name: time_fine + desc: | + The LSB of the fine time octet is equal to 2-8 second, or about 4 ms. + bytes: 10 + type: TIME8 + - !Field + name: time_id + desc: | + The time field is always present. This field indicates its use. + Commands with Time ID = 00 indicate real-time commands (RTC) which are + not authenticated, e.g., commands originating from the Orbiter. Cmds + with ID = 01 are RTC commands for which authentication is required, + e.g., commands to the USOS from the SSCC. Commands with ID = 10 are + stored in the C&C MDMs time-tagged command queue for later execution. + For Data Packets, the field is always 01 and the time fields contain + the time at the beginning of the processing frame when the packet was + generated. + bytes: 11 + type: U8 + mask: 0xC0 + enum: + 0: 'Not Used' + 1: 'Data Gen' + 2: 'Time Tagged Cmd' + 3: 'Invalid' + - !Field + name: checkword_indicator + desc: | + This field indicates if a checkword (add without carry checksum) is + contained in the CCSDS packet. When present, the checkword is the last + 16-bit word of the packet. All commands must contain a checkword. Data + packets do not contain a checkword. + bytes: 11 + type: U8 + mask: 0x20 + enum: + 0: 'Not Present' + 1: 'Present' + - !Field + name: zoe + desc: | + Used in a telemetry data packet to indicate that the telemetry packet + is from the onboard ZOE recording. + bytes: 11 + type: U8 + mask: 0x10 + enum: + 0: 'Onboard recording' + 1: 'All Other Cases' + - !Field + name: packet_type + bytes: 11 + type: U8 + mask: 0x0F + enum: + 1: 'Data Dump' + 4: 'Telemetry Status' + 6: 'Payload Private Science' + 7: 'Ancillary Data' + 8: 'Essential Command' + 9: 'System Command' + 10: 'RT/Payload Command' + 11: 'Data Load' + # - !Field + # name: spare + # bytes: 12 + # mask: 0x80 + - !Field + name: element_id + bytes: 12 + type: U8 + mask: 0x78 + enum: + 0: 'NASA 0' + 1: 'NASA 1' + 2: 'ESA/APM' + 3: 'NASDA' + 4: 'RSA' + 5: 'CSA' + 6: 'ESAATV' + 7: 'ASI' + 8: 'ESA/ERA' + 9: 'Reserved' + 10: 'SPP' + 11: 'HTV' + # 12: 'Spare 0' + # 13: 'Spare 1' + # 14: 'Spare 2' + # 15: 'Spare 3' + - !Field + name: data_packet + desc: | + Identifies this field as a data packet. This field distinguishes + between command and data packets and is used to ensure unique + definition of the 32- bit USOS Packet ID field. + bytes: 12 + mask: 0x04 + type: U8 + - !Field + name: version_id + desc: | + Configuration Management. Used to identify multiple versions + of a format. + bytes: [12, 13] + mask: 0x03C0 + type: MSB_U16 + - !Field + name: format_id + desc: Identifies the packet. + bytes: 13 + mask: 0x3F + type: U8 + enum: + 0: Reserved + 1: 'Essential Telemetry' + 2: 'Housekeeping Tlm-1' + 3: 'Housekeeping Tlm-2' + 4: 'PCS DDT' + 5: 'CCS S-Band Command Response' + 6: 'Contingency Telemetry to SMCC' + 7: 'Normal Data Dump' + 8: 'Extended data Dump' + 9: 'MSS Ancillary Data' + 10: 'Reserved' + 11: 'Broadcast Ancillary Data' + 12: 'PCS Status' + 13: 'NCS to OIU Telemetry and ECOMM telemetry' + 14: 'CCS to OIU Telemetry - Direct' + 15: 'SM to USOS Status Data' + 16: 'Normal File Dump' + 17: 'Extended File Dump' + 18: 'NCS to FGB Telemetry' + 19: 'EMU Suit Data' + 20: 'ZOE Normal Dump (S- Band)' + 21: 'ZOE Extended Dump (S- Band)' + 22: 'EMU S-Band TLM Packet' + 23: 'VTC1 to CCS Status Packet' + 24: 'VTC2 to CCS Status Packet' + 25: 'MMC to CCS Status Packet' + 26: 'CCS to OIU Telemetry via UHF' + # 27-63 reserved + # - !Field + # name: spare + # bytes: [14, 15] + # mask: 0x3F + # type: U8 + - !Field + name: frame_id + desc: | + Identifies the processing frame count of the frame in which the data + packet was built. Not applicable for extended dump and data dump + packets. + bytes: 15 + mask: 0x7F + type: U8 diff --git a/data/config/cmd.yaml b/data/config/cmd.yaml new file mode 100644 index 00000000..04d68c0c --- /dev/null +++ b/data/config/cmd.yaml @@ -0,0 +1,51 @@ +- !Command + name: NO_OP + opcode: 0x0001 + subsystem: CORE + title: NO_OP + desc: | + Standard NO_OP command. + +- !Command + name: SEQ_START + opcode: 0x002 + subsystem: CMD + title: Start Sequence + desc: | + This command starts a specified command sequence. + + arguments: + - !Argument + name: sequence_id + desc: Sequence ID + units: none + type: MSB_U16 + bytes: [0,1] + +- !Command + name: SEQ_ENABLE_DISABLE + opcode: 0x0003 + subsystem: CMD + title: Enable/Disable Sequence + desc: | + This command enables or disabled the specified sequence. If a + sequence to be disabled is currently executing, it will be + interrupted. + + arguments: + - !Argument + name: sequence_id + desc: Sequence ID + units: none + type: MSB_U16 + bytes: [0,1] + + - !Argument + name: enable + desc: Enable + units: none + type: U8 + bytes: 2 + enum: + 0: DISABLED + 1: ENABLED diff --git a/data/config/config.yaml b/data/config/config.yaml new file mode 100644 index 00000000..d3d04748 --- /dev/null +++ b/data/config/config.yaml @@ -0,0 +1,52 @@ +default: + command: + history: + filename: ../../cmdhist.pcap + sequence: + directory: ../../seq/ + cmddict: + filename: cmd.yaml + + evrdict: + filename: evr.yaml + + tlmdict: + filename: tlm.yaml + + limits: + filename: limits/limits.yaml + + table: + filename: table.yaml + + bsc: + filename: bsc.yaml + + logging: + name: bliss + hostname: bliss.jpl.nasa.gov + + phase: 'dev' + + data: + '1553': + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/downlink/1553 + bad: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/downlink/bad + lehx: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/downlink/lehx + planning: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/planning + sdos: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/sdos + uplink: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/uplink + ats: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/ats + + gui: + port: 8080 + telemetry: + - stream: + name: OCO3_1553_EHS + port: 3076 diff --git a/data/config/evr.yaml b/data/config/evr.yaml new file mode 100644 index 00000000..c702959e --- /dev/null +++ b/data/config/evr.yaml @@ -0,0 +1,23 @@ +- !EVR + name: NO_ERROR + code: 0x0001 + desc: No error + message: "No error" + +- !EVR + name: EVR_1 + code: 0x0002 + desc: EVR 1 + message: "The first evr" + +- !EVR + name: EVR_2 + code: 0x0003 + desc: EVR 2 + message: "The second evr" + +- !EVR + name: EVR_3 + code: 0x0004 + desc: EVR 3 + message: "The third evr %s" diff --git a/data/config/limits/limits.yaml b/data/config/limits/limits.yaml new file mode 100644 index 00000000..1199f237 --- /dev/null +++ b/data/config/limits/limits.yaml @@ -0,0 +1,21 @@ +# Min/Max Range Example - Simple case for telemetry value range +- !Limit + source: 1553_HS_Packet.Voltage_A + desc: Voltage A + units: Volts + lower: + error: 5.0 + warn: 10.0 + upper: + error: 45.0 + warn: 40.0 + +# Error enumerations example - throw error if value is in enum list +- !Limit + source: Ethernet_HS_Packet.product_type + desc: Ethernet Product Type field + value: + error: MEM_DUMP + warn: + - TABLE_FOO + - TABLE_BAR diff --git a/data/config/table.yaml b/data/config/table.yaml new file mode 100644 index 00000000..f21664d8 --- /dev/null +++ b/data/config/table.yaml @@ -0,0 +1,64 @@ +# Dictionary describing the FSW table formats that are +# uploaded into FSW table memory. Uses OrderedDict to maintain +# the order, and contains the name of each column, description, +# datatype, units, and the maximum length for that column. + +- !FSWTable + name: response + delimiter: "," + uptype: 1 + size: 8224 + header: + - !FSWColumn + name: HEADER_COLUMN_ONE + desc: The first column in our header + format: "%x" + units: none + type: MSB_U16 + bytes: [0,1] + + - !FSWColumn + name: HEADER_COLUMN_TWO + desc: The second column in our header + format: "%u" + units: none + type: U8 + bytes: 2 + + - !FSWColumn + name: HEADER_COLUMN_THREE + desc: The third column in our header + format: "%u" + units: none + type: U8 + bytes: 3 + + columns: + - !FSWColumn + name: COLUMN_ONE + desc: First FSW Table Column + format: "%u" + units: none + type: MSB_U16 + bytes: [0,1] + + - !FSWColumn + name: COLUMN_TWO + desc: Second FSW Table Column + format: "%u" + units: none + type: MSB_U16 + bytes: [2,3] + + - !FSWColumn + name: COLUMN_THREE + desc: Third FSW Table Column + format: "%u" + units: none + type: U8 + bytes: 4 + enum: + 0: TEST_ENUM_0 + 1: TEST_ENUM_1 + 2: TEST_ENUM_2 + 3: TEST_ENUM_3 diff --git a/data/config/tlm.yaml b/data/config/tlm.yaml new file mode 100644 index 00000000..9ac4825c --- /dev/null +++ b/data/config/tlm.yaml @@ -0,0 +1,88 @@ +- !Packet + name: 1553_HS_Packet + desc: Ethernet 1553 packet used to monitor telemetry in real-time + functions: + CurrA_Fx(dn): (dn - 2) / 1234.0 + + fields: + - !Field + name: Voltage_A + desc: Voltage A as a 14-bit DN. Conversion to engineering units is TBD. + units: Volts + type: MSB_U16 + - !Field + name: Voltage_B + desc: Voltage B as a 14-bit DN. Conversion to engineering units is TBD. + units: Volts + type: MSB_U16 + - !Field + name: Voltage_C + desc: Voltage C as a 14-bit DN. Conversion to engineering units is TBD. + units: Volts + type: MSB_U16 + - !Field + name: Voltage_D + desc: Voltage D as a 14-bit DN. Conversion to engineering units is TBD. + units: Volts + type: MSB_U16 + aliases: + icd: Voltage_D_Alias + - !Field + name: Current_A + type: MSB_U16 + dntoeu: + equation: CurrA_Fx(raw.Current_A) + units: amperes + +- !Packet + name: Ethernet_HS_Packet + desc: Ethernet Health and Status Packet + fields: + - !Field + name: sync_word + type: MSB_U32 + value: 0x01234567 + - !Field + name: time + type: TIME64 + desc: Time when data product created (seconds since GPS/ISS epoch) + - !Field + name: product_type + type: U8 + enum: + 0: TABLE_FOO + 1: TABLE_BAR + 2: MEM_DUMP + 3: HEALTH_AND_STATUS + when: product_type == 3 + - !Field + name: product_length + type: MSB_U32 + desc: Product length (including this header) + mask: 0x00FFFFFF + - !Field + name: VoltageSampleTime + desc: Time measurements were taken + type: TIME64 + - !Field + name: Voltage_A + desc: Voltage A as a 14-bit DN. Conversion to engineering units is TBD. + type: MSB_U16 + - !Field + name: Voltage_B + desc: Voltage B as a 14-bit DN. Conversion to engineering units is TBD. + type: MSB_U16 + - !Field + name: Voltage_C + desc: Voltage C as a 14-bit DN. Conversion to engineering units is TBD. + type: MSB_U16 + - !Field + name: Voltage_D + desc: Voltage D as a 14-bit DN. Conversion to engineering units is TBD. + type: MSB_U16 + - !Field + name: footer + type: MSB_U32 + value: 0x89ABCDEF + +- !include ccsds_header.yaml diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 00000000..d51aca3e --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,223 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) + $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " epub3 to make an epub3" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + +.PHONY: clean +clean: + rm -rf $(BUILDDIR)/* + +.PHONY: html +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: dirhtml +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +.PHONY: singlehtml +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +.PHONY: pickle +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +.PHONY: json +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +.PHONY: htmlhelp +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +.PHONY: qthelp +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/BLISS-Core.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/BLISS-Core.qhc" + +.PHONY: applehelp +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +.PHONY: devhelp +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/BLISS-Core" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/BLISS-Core" + @echo "# devhelp" + +.PHONY: epub +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +.PHONY: epub3 +epub3: + $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 + @echo + @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." + +.PHONY: latex +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +.PHONY: latexpdf +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: latexpdfja +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: text +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +.PHONY: man +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +.PHONY: texinfo +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +.PHONY: info +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +.PHONY: gettext +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +.PHONY: changes +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +.PHONY: linkcheck +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +.PHONY: doctest +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +.PHONY: coverage +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +.PHONY: xml +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +.PHONY: pseudoxml +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/doc/make.bat b/doc/make.bat new file mode 100644 index 00000000..a31a28be --- /dev/null +++ b/doc/make.bat @@ -0,0 +1,272 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source +set I18NSPHINXOPTS=%SPHINXOPTS% source +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. epub3 to make an epub3 + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + echo. coverage to run coverage check of the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +REM Check if sphinx-build is available and fallback to Python version if any +%SPHINXBUILD% 1>NUL 2>NUL +if errorlevel 9009 goto sphinx_python +goto sphinx_ok + +:sphinx_python + +set SPHINXBUILD=python -m sphinx.__init__ +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +:sphinx_ok + + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\BLISS-Core.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\BLISS-Core.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "epub3" ( + %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "coverage" ( + %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage + if errorlevel 1 exit /b 1 + echo. + echo.Testing of coverage in the sources finished, look at the ^ +results in %BUILDDIR%/coverage/python.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/doc/source/_static/ccsds_prim_header.png b/doc/source/_static/ccsds_prim_header.png new file mode 100644 index 0000000000000000000000000000000000000000..a7383a185acb94425ae9a2d32ba1e985003705c7 GIT binary patch literal 6947 zcmY*;bx;(5vpBcz|>rh;+kI0tW&D2h#140!K)9 zNj<;6-}}w)y?=IhW@mPGX6v)NG1{6+#Dp}2I5;@OD#~x);o#sx|H<+Ac>m}mGE?#& z3)NQFQ@Fps|0n+a`?sm7NmNvHV`HPcyPKAlc6xeRPEKxcaBzHlJR%|@Ffh=-z<`B? zMNLg@VPV0}&hF&oWOa4*;NYORxVWdMCnY7Nrluw*C#S8gO<7qvF)`7~%1TB?#?8$w zG&EFCPmhm}&)nSn>({Tbv9Y|oyZ``znVFfClyr}%<_ZUg#!Tg{te!7oFNX+PQ2ycr z2j`4sE%*0vJ9t9V{r!_AYW2B74U~!&YH@J~G3mq-b{Nx*gZF^<_*MWdlM^0d{4SI+ z?s{_evZSc-5ZcavrFE?JA zod)BxM{B>-f2LPX(2B-&EAJ$wX84?^X!l~M79GqL)nC5dk`3i%$LW*V>)pxQ#*M`d z(bMmNRE9Nf=xa~Qy!&1tC8!zuJ9w`W=3q6ZNZ>1*r0&qyLcD^n9{Pe6iXmL<5^y~GGFWTMNuqTigj2Ipk{ zM1Wa*?tu{bM1&6LbYv+)?4ot3-u%fHChDS%Z&0~L`YAG}edJ{57cM%+d7pHBhEU1~ z99RF%fzY0}<>*aimZCiK8QF~3z!=zB9{<*JObOjPsnM#tPD$k2L~yp>z#(qWneAjhN(X3kc8#|g7U?2fx#0Y>h7^wW55xT5@dv? zFp!IQnL8fH1nk(v$gri%_JaO@)li~;^ATYW{%29W2ax^Ua=MwUV?{5*Eck~1MkRNE zXNF15uP1?qLZ>g*W~TE!Oe{jqf5kTKFrv;6PiNH}L-0hm2}Lh+Ikstz+8I$o9rvD7 zIgmiL?9E#?>`vcY>ZVRvMi~fV_xN)sl!1^emm`Um@0JlT7?SGV#IR12$0ii#6htGo zXpEGOFPLlG8Lram`Egk__BQRy8%z6s;SZ@!FUk_gaH2z85+jdHo(H!WO%{5QwvQ7* zWpH~cwyLEy>lWCEu-{k;-*_=`1QfA3V8y73#^uDi%>W&I-~JG0^;;1KQ@7CfUBmjm zP9yb%!dK%(E&L%j269hiOtVN9KFl{Ulds`)7jSM<^vn#W-?5;1!8IQ5-SXH9ym0OD z-H8@k>&#GuS+j%difDuS{X?Tvp)OOs`G>}Oak+Noij*4K!nm=p+e~(p{p?;A{@f*3E~eS?Ju}~UAd*Z%wZ0vMpxA-CyKc`LsMNlb`0VHu z+tox^*>&?r$&&|4niK#q{Rz>c#_8zEcG`=5z&E#qA-nVdJc)9D0&0HyEY_-~U#Cc! zy7o?LJF^;Gl}+PWXs8W{)So;3IR6M3FY~jx%{A=VBxagF6R(1ftQM9SObdOYU-Ls9 zN-ftK+iAT1p_7%_k78NNCXwR>L-Tiz5sZ>O^+ zGb5|g)M7HV6NF;7 z8RWT5_vrzKp~JdbQAu9KD&>25E#@5^R!*ug zZRRXNy=@KTo$NK>+O73z*|?^JcCGMPGj0L?4N(Wg@H~mjhK2O{A=WZ2{5J$Cpu&PXwqO`I~N} z7yFd1j_=h6j+ zTwL;xEEwD_`(zvVgaJq;?7FI)U#ji(wpi}i*4ho-Wiv6!~q)+ z3dPdwXj*nYKm-Te2SP8$W16tVX_lzf_-&pEYQ-`zHT>QW56a6@o7Yr&MW;DDqwXsWn3Au%Qz8Aw>7J8ePX%jL=m^*1R&N}WIGU?(Qkhz zh}GY-0Azw7yQ+HRN@mXu#{BhKB=QtwWfA_RN;Ocp&7=szOX1;C3IlaEAv}uPd=EEE zYylSSkEk5>^m!84am9b+NUikfBTnEaz4iSO)5v|+B{QWTYoX@YSM3_F1r+5kNgjx* zdw9e-J%wKXu;=jZB`aT{oNqH`O>_U?(0akoy;BlEq)Lj}CPWu}a$bTG_*>5t$HgmG z_O<87w^G;ChC*5|wezl8MS9UB9UdJAtUvtS8$0c=LuJYu7fw9zlBgYA`74MmIdXYIeOBjl!vnD3 zx@Uvhk#Ai{m%~gTtaYF%XrPM|x|I0K#@8iU801FvN1xq5XU<%ZEHq%r!8|g`vz-u8 zmh%v!bBf##VYleccu59TgwilK3)O|*nJdp?n;U zy*hCiVYIm^xg^iZ6vaa;sE4ORa$se%aoUZ26lf*!s=lSPQTVqps_L=lQtAn~YowES zGuZFHv!}CiUm-}@uf;B7$(G2FuU!+aw<>Z<-l@9+$N+t>91>d_iK-Q2v!6{l$Ve9* z07t;Jx<%s@IW(en<5sEkh&kR|VkYTJOeMnsD5-CHGJ3^Ft3!FYppX&t-1h8eMb2RO zZPbynW$i$hQ*QE`5)x%9Wf~lRga}kgvc2-SQK<8dzd|EN5nno@@1cqAZ4h3do9EOu~DeM01#z)>Mp;P++CA@ znfH(yQlqD&BeeS5^4Au2nJUfl(hvQ`{v=yl~g#$Rk-wbj=zqz=sIyarG3djUSnYi0SE-Vq>wp zQRZZp`UNVcgo4AA#$W2K_CjDbyA!g;n$IRI$=GpWh5;+gV-&e=_D<~29Y_dC;nc*M z5MJZE_12S1^(1~Nrsd1zRbSr^?cEq*WLNg5&LLln+Vh^sz35;;Y*40*8>aynXLd6| z2tHNJpXx`yNsDTeEEG{ZlUAE=d!1K%a_d1M1kXtgs~qfmJma?nYni2FoLa;*;m{;& zo|Lpb6EE~`;j=W${z+$r*gXM7=ZzrmTYo$61N>Hv6h3aP)ehJ-+kiK9OF88afWe$6 zea{54FlFI4@lYA(p{|iKd~Axv$K!13Z)+|@Im%tmWi%-(;Lfgg!Nk$l{H|It;j^UU zi-ky0ctDNFHOgzvt;GH6u>T7V7 zb!4&Qx*>0317U=;mb(QEZf`5@1SF$7$@wGqbRCJ$+p4$wd#Jgr4;A5BYem11a?5ek z_WR&KUFi<9aa2v4=Q}4uc{IyQ4>Wj%rZjtnhmZC@pa+a5llgwJ+n-Y}@n2h%j54ud zq?T`w7xSFFQ=$IdDSQR9<1ybmU_W)QSZni*LEDsWHTU~k{PLj_93RRal3kh9o!z}4 zweh|PyRkukkEW^2&C*xD8&*$|r=#QPB7#OW3JK>f)KBi6n6dyh53*A^yc-ron5HvE z;r43EhN&41l7g>?-#Dk7TBgotB5m@rS{PUz)7}!~z5{=yuG_QJb$St2Xb&Gpc*eM6 zzoJDl@qI2Qk?0-}h(iNZ{WZ7X zfx$a{Mo)#Ai9Cv8M_EpvvL4%#vkywnsG<{7xVhq^T1wJGAHbx7#!JPh*(oOeQAu9) zPqy_dauG5^AoIUWH84nE5ieZkb4P}ItVpAL5TOWa%@Y=2b*84ZvV;5qmy2vs!a;klz*|@{nJjiF9a5Lq_b6RN}KVZI_=DWuga0O z==#+1=6vhI>VF?W9h>SC`7D^XYcU_{Gdl84n!)A|6iZ^C_Pyqq5S;On0-ZP;F;S)E4^m z?$tc$2<4|6I_Ne{nK?TSU(~gZq2<7gn)Ln`l^p(aG8#Er=HagyJ3w|2-wg}d>|oi& zOdBF(Bm<=0`}=w^Y8(04GkI!OV$=gYHEceb+!0_|e*PHy(0M_p)_<^#spL+(&1n5v z5UxQ?xKXs*@DRLHFLGeSxJj=}HAU9!aVr6FN@BTSjrhjc5T^>32zuT`vOVbSdBaf z`}L>Ep7sGn$s4x_q+eRJ)UQ*8o=!CZ^EN5T!2=#M*)601-_FAh39!AN(dRFInN%>t zh}XvS;Xi5xWIT*bcCn;ud`$_z|E;(@M*qM{2pcwbW&xlJ*SQE+LCO5;%QWy+#hqPh zVm+iNmCBwO4hoAxIREZzRJ}L{J)sRJZQ3^EpP8o7Mb0=93vD?0G1v$Y?-l1$fS0{R z7E${Gh;M;Y_X*5_5ROOXh z5Qqy6{V5!?^x#C$cs0ewYF(B-t3o#c80ycK{c|>__nMVjhO$gji_0^^`c9zba0ruc z)jN@>uq`VCt`wdp9d9#_X|JM0k=xVMdjSUIeU^y;-!>q}_OH+)3^DFDvptxh<_wBt zJNtP^vv|n;7~@ah_nvluK5t;J-7A!j*2a;f?NCwuxWwC9VvS%10F?wQx~xeop!7?8 z@9hJ$3A8Z#QQ%!762+4MR~iV!(>lAYa6Z3NMk^XV!)k07PPD1IErI?Z+*QT6D0>>9 zb3{Tk*in1w~Dp;YRUtF{@qywDVs;S1M@Vx+x!=8m~Usaml1#Zx;Ka<|+r8Czj)8 zdi2NwhGO4dPgXRxJJm#PQq^Q$q~w^MF#XE|A8TYn2p197#QMK`CM&iNT@9$EXn#l% z;NCFmX(bPxPoAICYM}mX46OH0M5HkAelQy}h?5>3?*DO#CoVuT^j25L1Pl(dZpMAAB7TsA(U{Y?fe@jNoyqyXR*81K7Xjt zDjHZW1xqh&Fpd_v)t$zj;Yi>nlYA>+6f~w1HORr z4)+K7zVucx91G~&FeDrs@Bj(KD1@MN`yQcfKDCLz0G&~>v={KGQZ)^=@ePy&NfGU_ z?W}~0iUo;7KT#Xf-}j3Vlx*sQTmjd4YtisW{XNl}cE9)ylGu^A8`d(e)ULymp@|Z0 zz*k$Jeo5|s9|U{d((9WVAM8`4C3F4?zE6#@7{TOl)8vrA*cK48`GXd@`MT(hKiz*$ zeh^f!CX%6PyTt(P&aeI22&kxaJ*=f_{i(%|9{AfxJ;eZ;p<-$59)9rK;K*>giC`b8R zv8_Fr$~{T{C*x0`ZYy0u-C%Z=<4?*5{GgUq(}6YYue?|1M^BDR=K2_8uD|U`r``TV z@T_$RRMQ2w|KYykmnu8$BY<5ekX?8l{XYilvX9xhh8wd)`xjp%^rVH=& zE7Y>te1)x;ynT!|qtAIQT&zFFX>1{NC2}?8+l$G4HJGEMUcd8uNqUeL{r;PyR`AP{ z`OAeDOZ}q!@tf~IDo@P&)UUpt0S|l+H@mhF4~cS@LNTCpD)QnR%p;9Zq29yj^W&af zYrk$WM0=*-T%8w{U$?Z634x@^w6lM3F=QbS30fLDXoxS;e^XtUe^w_!_Tp}bA9}$R zgm6sGVSE3spZNz3lPV-~Ph)n)$CYHbEl-q=R9a*1#c6={dN@Mm_Go#WYF_vlV$E9a6ez?e4 zN3sR1dcnL%uLnMQ&|k?*tcJg&4p?Gp6xymb(lKUtJbd|hNMkW8VJw9I;;%4^k4<+#{4vF5H~|F?MBNrwbCbPLrOf2nDFOUl4N>w kt7Ng`P2Qn<+bc$16x~NH$qxblUR^jU3Yu@L 2) + + + # + # Send a command + inst.cmd.send('BLISS_NO_OP') + + + # + # The packet buffer may be accessed directly for the current + # packet. The current packet may also be accessed via subscript + # zero. For example, the following are equivalent: + # + # inst.tlm.BLISS_EHS.CmdCmdsRcvd == inst.tlm.BLISS_EHS[0].CmdCmdsRcvd + # + # Older packets are accessed using increasing subscripts, e.g. + # the penultimate received packet is accessed via: + # + # inst.tlm.BLISS_EHS[1].CmdCmdsRcvd + # + # Here we'll wait until telemetry tells us that it received our + # command or we'll timeout (and raise an Exception) if we wait + # 5 seconds and nothing happens. + if wait('inst.tlm.BLISS_EHS.CmdCmdsRcvd == inst.tlm.BLISS_EHS[1].CmdCmdsRcvd + 1', timeout=5): + + log.info('Command received') + else: + log.info('Timeout') + diff --git a/doc/source/bliss.core.api.rst b/doc/source/bliss.core.api.rst new file mode 100644 index 00000000..e42e0d9b --- /dev/null +++ b/doc/source/bliss.core.api.rst @@ -0,0 +1,7 @@ +bliss.core.api module +===================== + +.. automodule:: bliss.core.api + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_bsc.rst b/doc/source/bliss.core.bin.bliss_bsc.rst new file mode 100644 index 00000000..c32398f0 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_bsc.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_bsc module +=============================== + +.. automodule:: bliss.core.bin.bliss_bsc + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_bsc_create_handler.rst b/doc/source/bliss.core.bin.bliss_bsc_create_handler.rst new file mode 100644 index 00000000..ada04d37 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_bsc_create_handler.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_bsc_create_handler module +============================================== + +.. automodule:: bliss.core.bin.bliss_bsc_create_handler + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_bsc_stop_handler.rst b/doc/source/bliss.core.bin.bliss_bsc_stop_handler.rst new file mode 100644 index 00000000..7b1bd5b0 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_bsc_stop_handler.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_bsc_stop_handler module +============================================ + +.. automodule:: bliss.core.bin.bliss_bsc_stop_handler + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_cmd_send.rst b/doc/source/bliss.core.bin.bliss_cmd_send.rst new file mode 100644 index 00000000..e7c6661d --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_cmd_send.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_cmd_send module +==================================== + +.. automodule:: bliss.core.bin.bliss_cmd_send + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_create_dirs.rst b/doc/source/bliss.core.bin.bliss_create_dirs.rst new file mode 100644 index 00000000..3dab9361 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_create_dirs.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_create_dirs module +======================================= + +.. automodule:: bliss.core.bin.bliss_create_dirs + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_dict_writer.rst b/doc/source/bliss.core.bin.bliss_dict_writer.rst new file mode 100644 index 00000000..e596a2ff --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_dict_writer.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_dict_writer module +======================================= + +.. automodule:: bliss.core.bin.bliss_dict_writer + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_orbits.rst b/doc/source/bliss.core.bin.bliss_orbits.rst new file mode 100644 index 00000000..b9538383 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_orbits.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_orbits module +================================== + +.. automodule:: bliss.core.bin.bliss_orbits + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_pcap.rst b/doc/source/bliss.core.bin.bliss_pcap.rst new file mode 100644 index 00000000..09a98247 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_pcap.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_pcap module +================================ + +.. automodule:: bliss.core.bin.bliss_pcap + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_pcap_segment.rst b/doc/source/bliss.core.bin.bliss_pcap_segment.rst new file mode 100644 index 00000000..7d70f010 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_pcap_segment.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_pcap_segment module +======================================== + +.. automodule:: bliss.core.bin.bliss_pcap_segment + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_seq_decode.rst b/doc/source/bliss.core.bin.bliss_seq_decode.rst new file mode 100644 index 00000000..526d74e9 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_seq_decode.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_seq_decode module +====================================== + +.. automodule:: bliss.core.bin.bliss_seq_decode + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_seq_encode.rst b/doc/source/bliss.core.bin.bliss_seq_encode.rst new file mode 100644 index 00000000..31a18abd --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_seq_encode.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_seq_encode module +====================================== + +.. automodule:: bliss.core.bin.bliss_seq_encode + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_seq_print.rst b/doc/source/bliss.core.bin.bliss_seq_print.rst new file mode 100644 index 00000000..6938bace --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_seq_print.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_seq_print module +===================================== + +.. automodule:: bliss.core.bin.bliss_seq_print + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_seq_send.rst b/doc/source/bliss.core.bin.bliss_seq_send.rst new file mode 100644 index 00000000..fdfef8ae --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_seq_send.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_seq_send module +==================================== + +.. automodule:: bliss.core.bin.bliss_seq_send + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_table_decode.rst b/doc/source/bliss.core.bin.bliss_table_decode.rst new file mode 100644 index 00000000..c9db3cbf --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_table_decode.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_table_decode module +======================================== + +.. automodule:: bliss.core.bin.bliss_table_decode + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_table_encode.rst b/doc/source/bliss.core.bin.bliss_table_encode.rst new file mode 100644 index 00000000..7affa7c8 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_table_encode.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_table_encode module +======================================== + +.. automodule:: bliss.core.bin.bliss_table_encode + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_tlm_db_insert.rst b/doc/source/bliss.core.bin.bliss_tlm_db_insert.rst new file mode 100644 index 00000000..04efe02e --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_tlm_db_insert.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_tlm_db_insert module +========================================= + +.. automodule:: bliss.core.bin.bliss_tlm_db_insert + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_tlm_send.rst b/doc/source/bliss.core.bin.bliss_tlm_send.rst new file mode 100644 index 00000000..5270c947 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_tlm_send.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_tlm_send module +==================================== + +.. automodule:: bliss.core.bin.bliss_tlm_send + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.bliss_yaml_validate.rst b/doc/source/bliss.core.bin.bliss_yaml_validate.rst new file mode 100644 index 00000000..0e072051 --- /dev/null +++ b/doc/source/bliss.core.bin.bliss_yaml_validate.rst @@ -0,0 +1,7 @@ +bliss.core.bin.bliss_yaml_validate module +========================================= + +.. automodule:: bliss.core.bin.bliss_yaml_validate + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bin.rst b/doc/source/bliss.core.bin.rst new file mode 100644 index 00000000..fecf61f4 --- /dev/null +++ b/doc/source/bliss.core.bin.rst @@ -0,0 +1,33 @@ +bliss.core.bin package +====================== + +Submodules +---------- + +.. toctree:: + + bliss.core.bin.bliss_bsc + bliss.core.bin.bliss_bsc_create_handler + bliss.core.bin.bliss_bsc_stop_handler + bliss.core.bin.bliss_cmd_send + bliss.core.bin.bliss_create_dirs + bliss.core.bin.bliss_dict_writer + bliss.core.bin.bliss_pcap + bliss.core.bin.bliss_pcap_segment + bliss.core.bin.bliss_seq_decode + bliss.core.bin.bliss_seq_encode + bliss.core.bin.bliss_seq_print + bliss.core.bin.bliss_seq_send + bliss.core.bin.bliss_table_decode + bliss.core.bin.bliss_table_encode + bliss.core.bin.bliss_tlm_db_insert + bliss.core.bin.bliss_tlm_send + bliss.core.bin.bliss_yaml_validate + +Module contents +--------------- + +.. automodule:: bliss.core.bin + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.bsc.rst b/doc/source/bliss.core.bsc.rst new file mode 100644 index 00000000..b23c7c80 --- /dev/null +++ b/doc/source/bliss.core.bsc.rst @@ -0,0 +1,7 @@ +bliss.core.bsc module +===================== + +.. automodule:: bliss.core.bsc + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.ccsds.rst b/doc/source/bliss.core.ccsds.rst new file mode 100644 index 00000000..2db46574 --- /dev/null +++ b/doc/source/bliss.core.ccsds.rst @@ -0,0 +1,7 @@ +bliss.core.ccsds module +======================= + +.. automodule:: bliss.core.ccsds + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.cfg.rst b/doc/source/bliss.core.cfg.rst new file mode 100644 index 00000000..a16ccb50 --- /dev/null +++ b/doc/source/bliss.core.cfg.rst @@ -0,0 +1,7 @@ +bliss.core.cfg module +===================== + +.. automodule:: bliss.core.cfg + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.cmd.rst b/doc/source/bliss.core.cmd.rst new file mode 100644 index 00000000..9dbbc98c --- /dev/null +++ b/doc/source/bliss.core.cmd.rst @@ -0,0 +1,7 @@ +bliss.core.cmd module +===================== + +.. automodule:: bliss.core.cmd + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.coord.rst b/doc/source/bliss.core.coord.rst new file mode 100644 index 00000000..ddde3584 --- /dev/null +++ b/doc/source/bliss.core.coord.rst @@ -0,0 +1,7 @@ +bliss.core.coord module +======================= + +.. automodule:: bliss.core.coord + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.db.rst b/doc/source/bliss.core.db.rst new file mode 100644 index 00000000..a0b19728 --- /dev/null +++ b/doc/source/bliss.core.db.rst @@ -0,0 +1,7 @@ +bliss.core.db module +==================== + +.. automodule:: bliss.core.db + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.dmc.rst b/doc/source/bliss.core.dmc.rst new file mode 100644 index 00000000..a7124627 --- /dev/null +++ b/doc/source/bliss.core.dmc.rst @@ -0,0 +1,7 @@ +bliss.core.dmc module +===================== + +.. automodule:: bliss.core.dmc + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.dtype.rst b/doc/source/bliss.core.dtype.rst new file mode 100644 index 00000000..e377ed65 --- /dev/null +++ b/doc/source/bliss.core.dtype.rst @@ -0,0 +1,7 @@ +bliss.core.dtype module +======================= + +.. automodule:: bliss.core.dtype + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.evr.rst b/doc/source/bliss.core.evr.rst new file mode 100644 index 00000000..b02960b3 --- /dev/null +++ b/doc/source/bliss.core.evr.rst @@ -0,0 +1,7 @@ +bliss.core.evr module +===================== + +.. automodule:: bliss.core.evr + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.gds.rst b/doc/source/bliss.core.gds.rst new file mode 100644 index 00000000..27e47f82 --- /dev/null +++ b/doc/source/bliss.core.gds.rst @@ -0,0 +1,7 @@ +bliss.core.gds module +===================== + +.. automodule:: bliss.core.gds + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.geom.rst b/doc/source/bliss.core.geom.rst new file mode 100644 index 00000000..98778c84 --- /dev/null +++ b/doc/source/bliss.core.geom.rst @@ -0,0 +1,7 @@ +bliss.core.geom module +====================== + +.. automodule:: bliss.core.geom + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.json.rst b/doc/source/bliss.core.json.rst new file mode 100644 index 00000000..65515bc1 --- /dev/null +++ b/doc/source/bliss.core.json.rst @@ -0,0 +1,7 @@ +bliss.core.json module +====================== + +.. automodule:: bliss.core.json + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.limit.rst b/doc/source/bliss.core.limit.rst new file mode 100644 index 00000000..1394b6d8 --- /dev/null +++ b/doc/source/bliss.core.limit.rst @@ -0,0 +1,7 @@ +bliss.core.limit module +======================= + +.. automodule:: bliss.core.limit + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.limits.rst b/doc/source/bliss.core.limits.rst new file mode 100644 index 00000000..b30069da --- /dev/null +++ b/doc/source/bliss.core.limits.rst @@ -0,0 +1,7 @@ +bliss.core.limits module +======================== + +.. automodule:: bliss.core.limits + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.log.rst b/doc/source/bliss.core.log.rst new file mode 100644 index 00000000..7542e5df --- /dev/null +++ b/doc/source/bliss.core.log.rst @@ -0,0 +1,7 @@ +bliss.core.log module +===================== + +.. automodule:: bliss.core.log + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.pcap.rst b/doc/source/bliss.core.pcap.rst new file mode 100644 index 00000000..07dec1d4 --- /dev/null +++ b/doc/source/bliss.core.pcap.rst @@ -0,0 +1,7 @@ +bliss.core.pcap module +====================== + +.. automodule:: bliss.core.pcap + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.rst b/doc/source/bliss.core.rst new file mode 100644 index 00000000..c0cc6803 --- /dev/null +++ b/doc/source/bliss.core.rst @@ -0,0 +1,45 @@ +bliss.core package +================== + +Subpackages +----------- + +.. toctree:: + + bliss.core.bin + bliss.core.test + +Submodules +---------- + +.. toctree:: + + bliss.core.api + bliss.core.bsc + bliss.core.ccsds + bliss.core.cfg + bliss.core.cmd + bliss.core.coord + bliss.core.db + bliss.core.dmc + bliss.core.dtype + bliss.core.evr + bliss.core.gds + bliss.core.geom + bliss.core.json + bliss.core.limits + bliss.core.log + bliss.core.pcap + bliss.core.seq + bliss.core.table + bliss.core.tlm + bliss.core.util + bliss.core.val + +Module contents +--------------- + +.. automodule:: bliss.core + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.seq.rst b/doc/source/bliss.core.seq.rst new file mode 100644 index 00000000..b84a3a99 --- /dev/null +++ b/doc/source/bliss.core.seq.rst @@ -0,0 +1,7 @@ +bliss.core.seq module +===================== + +.. automodule:: bliss.core.seq + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.table.rst b/doc/source/bliss.core.table.rst new file mode 100644 index 00000000..226a1ccf --- /dev/null +++ b/doc/source/bliss.core.table.rst @@ -0,0 +1,7 @@ +bliss.core.table module +======================= + +.. automodule:: bliss.core.table + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.rst b/doc/source/bliss.core.test.rst new file mode 100644 index 00000000..704cb5b2 --- /dev/null +++ b/doc/source/bliss.core.test.rst @@ -0,0 +1,31 @@ +bliss.core.test package +======================= + +Submodules +---------- + +.. toctree:: + + bliss.core.test.test_bsc + bliss.core.test.test_ccsds + bliss.core.test.test_cfg + bliss.core.test.test_cmd + bliss.core.test.test_coord + bliss.core.test.test_dmc + bliss.core.test.test_dtype + bliss.core.test.test_evr + bliss.core.test.test_limits + bliss.core.test.test_log + bliss.core.test.test_pcap + bliss.core.test.test_table + bliss.core.test.test_tlm + bliss.core.test.test_util + bliss.core.test.test_val + +Module contents +--------------- + +.. automodule:: bliss.core.test + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_bsc.rst b/doc/source/bliss.core.test.test_bsc.rst new file mode 100644 index 00000000..96d2ea98 --- /dev/null +++ b/doc/source/bliss.core.test.test_bsc.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_bsc module +=============================== + +.. automodule:: bliss.core.test.test_bsc + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_ccsds.rst b/doc/source/bliss.core.test.test_ccsds.rst new file mode 100644 index 00000000..bfed1e69 --- /dev/null +++ b/doc/source/bliss.core.test.test_ccsds.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_ccsds module +================================= + +.. automodule:: bliss.core.test.test_ccsds + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_cfg.rst b/doc/source/bliss.core.test.test_cfg.rst new file mode 100644 index 00000000..3da5c035 --- /dev/null +++ b/doc/source/bliss.core.test.test_cfg.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_cfg module +=============================== + +.. automodule:: bliss.core.test.test_cfg + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_cmd.rst b/doc/source/bliss.core.test.test_cmd.rst new file mode 100644 index 00000000..7a4f5db4 --- /dev/null +++ b/doc/source/bliss.core.test.test_cmd.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_cmd module +=============================== + +.. automodule:: bliss.core.test.test_cmd + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_coord.rst b/doc/source/bliss.core.test.test_coord.rst new file mode 100644 index 00000000..cab8a4ae --- /dev/null +++ b/doc/source/bliss.core.test.test_coord.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_coord module +================================= + +.. automodule:: bliss.core.test.test_coord + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_dmc.rst b/doc/source/bliss.core.test.test_dmc.rst new file mode 100644 index 00000000..fb8a035e --- /dev/null +++ b/doc/source/bliss.core.test.test_dmc.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_dmc module +=============================== + +.. automodule:: bliss.core.test.test_dmc + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_dtype.rst b/doc/source/bliss.core.test.test_dtype.rst new file mode 100644 index 00000000..8a91c268 --- /dev/null +++ b/doc/source/bliss.core.test.test_dtype.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_dtype module +================================= + +.. automodule:: bliss.core.test.test_dtype + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_evr.rst b/doc/source/bliss.core.test.test_evr.rst new file mode 100644 index 00000000..7678bb15 --- /dev/null +++ b/doc/source/bliss.core.test.test_evr.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_evr module +=============================== + +.. automodule:: bliss.core.test.test_evr + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_limit.rst b/doc/source/bliss.core.test.test_limit.rst new file mode 100644 index 00000000..40d3be9f --- /dev/null +++ b/doc/source/bliss.core.test.test_limit.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_limit module +================================= + +.. automodule:: bliss.core.test.test_limit + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_limits.rst b/doc/source/bliss.core.test.test_limits.rst new file mode 100644 index 00000000..73402d3e --- /dev/null +++ b/doc/source/bliss.core.test.test_limits.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_limits module +================================== + +.. automodule:: bliss.core.test.test_limits + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_log.rst b/doc/source/bliss.core.test.test_log.rst new file mode 100644 index 00000000..bd23324b --- /dev/null +++ b/doc/source/bliss.core.test.test_log.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_log module +=============================== + +.. automodule:: bliss.core.test.test_log + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_pcap.rst b/doc/source/bliss.core.test.test_pcap.rst new file mode 100644 index 00000000..dade7a56 --- /dev/null +++ b/doc/source/bliss.core.test.test_pcap.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_pcap module +================================ + +.. automodule:: bliss.core.test.test_pcap + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_table.rst b/doc/source/bliss.core.test.test_table.rst new file mode 100644 index 00000000..1b078339 --- /dev/null +++ b/doc/source/bliss.core.test.test_table.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_table module +================================= + +.. automodule:: bliss.core.test.test_table + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_tlm.rst b/doc/source/bliss.core.test.test_tlm.rst new file mode 100644 index 00000000..f9f016b7 --- /dev/null +++ b/doc/source/bliss.core.test.test_tlm.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_tlm module +=============================== + +.. automodule:: bliss.core.test.test_tlm + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_util.rst b/doc/source/bliss.core.test.test_util.rst new file mode 100644 index 00000000..8d42370a --- /dev/null +++ b/doc/source/bliss.core.test.test_util.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_util module +================================ + +.. automodule:: bliss.core.test.test_util + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.test.test_val.rst b/doc/source/bliss.core.test.test_val.rst new file mode 100644 index 00000000..ce76a8d8 --- /dev/null +++ b/doc/source/bliss.core.test.test_val.rst @@ -0,0 +1,7 @@ +bliss.core.test.test_val module +=============================== + +.. automodule:: bliss.core.test.test_val + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.tlm.rst b/doc/source/bliss.core.tlm.rst new file mode 100644 index 00000000..f5fb2ed6 --- /dev/null +++ b/doc/source/bliss.core.tlm.rst @@ -0,0 +1,7 @@ +bliss.core.tlm module +===================== + +.. automodule:: bliss.core.tlm + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.util.rst b/doc/source/bliss.core.util.rst new file mode 100644 index 00000000..9ca97516 --- /dev/null +++ b/doc/source/bliss.core.util.rst @@ -0,0 +1,7 @@ +bliss.core.util module +====================== + +.. automodule:: bliss.core.util + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.core.val.rst b/doc/source/bliss.core.val.rst new file mode 100644 index 00000000..994aace1 --- /dev/null +++ b/doc/source/bliss.core.val.rst @@ -0,0 +1,7 @@ +bliss.core.val module +===================== + +.. automodule:: bliss.core.val + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bliss.rst b/doc/source/bliss.rst new file mode 100644 index 00000000..ed45a8df --- /dev/null +++ b/doc/source/bliss.rst @@ -0,0 +1,17 @@ +bliss package +============= + +Subpackages +----------- + +.. toctree:: + + bliss.core + +Module contents +--------------- + +.. automodule:: bliss + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/source/bsc_intro.rst b/doc/source/bsc_intro.rst new file mode 100644 index 00000000..6e161778 --- /dev/null +++ b/doc/source/bsc_intro.rst @@ -0,0 +1,307 @@ +Binary Stream Capture Introduction +================================== + +The Binary Stream Capture (BSC) module provides tools for monitoring and capturing networking traffic and persisting it into PCap files. BSC can monitor UDP, TCP, and raw Ethernet traffic and is configurable via YAML. BSC also provides a RESTful interface for the manipulation and instantiation of data handlers. + +To initialize BSC, run the ``bliss-bsc`` utility script:: + + $ bliss-bsc + +If you want to customize BSC before you start it you can do so via a YAML file. By default this config file will be called ``bsc.yaml``. There are two main components of the configuration: The first is the ``capture_manager`` configuration. This sets high level configuration for the system that manages logging tasks as well as the webserver that handles the REST endpoints. The second is ``handler`` configuration which specifies tasks that will log specific connections. + +---- + +Capture Manager Configuration +----------------------------- + +The capture manager defaults are fairly sane. However, you will most likely want to adjust the location where log files are saved at the minimum. + +.. code-block:: yaml + + capture_manager: + root_log_directory: /tmp + + manager_server: + host: localhost + port: 8080 + +root_log_directory: + Specifies the path that should be treated as the root directory for writing log files. Each of the handlers can nest their log data under this directory in customized folders, but all BSC log files will be children of this root folder. + +The ``manager_server`` settings are used to control where the RESTful endpoint webserver runs. In general, you'll only need to adjust the ``port`` setting to deal with potential clashes with other services that you may be running. + +---- + +Handler Configuration +--------------------- + +The handler configuration section allows you to set up one or more handlers to be run when BSC is initialized. + +.. code-block:: yaml + + - name: test1 + conn_type: udp + address: ['', 8500] + path: additional_dir/test/%j + file_name_pattern: %Y-%m-%d-randomUDPtestData-{name}.pcap + rotate_log: True + rotate_log_index: day + rotate_log_delta: 1 + +You only need to provide a few of the available configuration options and BSC will handle the remaining options. + +name: + A unique name for the capture handler. + +conn_type: + The type of connection that the handler should make to the specified address. This can be one of **udp**, **tcp**, or **ethernet** for reading raw ethernet frames. + +address: + The address to which the handler should attempt to connect and monitor. The value for *conn_type* affects the format that you'll specify here. For a **upd** handler the address will be of the form ``['', , ]``. For an **ethernet** handler the address will be of the form ``[, /start + + Create a new handler called *name*. + + **Handler Attributes**: + + See the `Handler Configuration`_ section for details on what can be included here. Note that the *address* field is split into two components (loc and port) for the REST service. The below options are required for proper functionality! + + port: + The port/protocol for the connection. + + conn_type: + The type of connection the handler will make. One of *udp*, *ethernet*, or *tcp*. + + **Example Post Data**: + + .. code-block:: javascript + + { + 'loc': '', + 'port': 8125, + 'conn_type': 'udp' + } + + **Example Request**: + + .. code-block:: bash + + curl --form "port=8125" --form "conn_type=udp" http://localhost:8080/mytesthandler/start + +.. http:delete:: //stop + + Stop all handlers that match a given *name*. + + **Example Request**: + + .. code-block:: bash + + curl -X DELETE http://localhost:8080/mytesthandler/stop + + .. warning:: + + There isn't a requirement that handlers have unique names. As such, if multiple handlers have the same name they will all be terminated! + +.. http:get:: //config + + Returns a configuration dictionary for handlers with a given *name*. + + **Example Request**: + + .. code-block:: bash + + curl http://localhost:8080/mytesthandler/config + + **Example Response**: + + .. code-block:: javascript + + [ + { + conn_type: "udp", + handler: { + pre_write_transforms: [], + file_name_pattern: "%Y-%m-%d-randomUDPtestData-{name}.pcap", + rotate_log: true, + name: "mytesthandler", + log_dir: "/tmp/additional_dir/test/%j" + }, + log_file_path: "/tmp/additional_dir/test/211/2016-07-29-randomUDPtestData-test1.pcap", + address: ["", 8500] + } + ] + + .. note:: + + There isn't a requirement that handlers have unique names. As such, if multiple handlers have the same name you will receive muliple handler's configuration dictionaries. + +.. http:POST:: //rotate + + Trigger log rotation for a given handler name. + + **Example Request**: + + .. code-block:: bash + + curl -X POST http://localhost:8080/mytesthandler/rotate + + .. warning:: + + Note that if the file name pattern provided isn't sufficient for a rotation to occur with a new unique file name you will not see a log rotation . Be sure to timestamp your files in such a way to ensure that this isn't the case! The default file name pattern includes year, month, day, hours, minutes, and seconds to make sure this works as expected. + +---- + +Convenience Scripts +------------------- + +Create Handler +^^^^^^^^^^^^^^ + +The **bliss-bsc-create-handler** bin script provides a wrapper around the BSC REST endpoint for creating a log handler. It requires a name for the new handler, a hostname/interface name, port/protocol number, and the connection time (one of 'udp', 'tcp', or 'ethernet'). + +**Example:** + +.. code-block:: bash + + bliss-bsc-create-handler new_handler '' 8123 udp + +Stop Handler +^^^^^^^^^^^^ + +The **bliss-bsc-stop-handler** bin script provides a wrapper around the BSC REST endpoint for stopping a log handler. It requires the handlers name that you wish to stop. + +**Example:** + +.. code-block:: bash + + bliss-bsc-stop-handler new_handler diff --git a/doc/source/c_and_dh_intro.rst b/doc/source/c_and_dh_intro.rst new file mode 100644 index 00000000..db63b15a --- /dev/null +++ b/doc/source/c_and_dh_intro.rst @@ -0,0 +1,33 @@ +Introduction to Command & Data Handling Tables and Products +=========================================================== + +The `bliss.core.table` module provides interfaces for encoding and decoding flight software tables to and from text and binary. + +The flight software table formats are defined in YAML and usually stored in a configuration file called **table.yaml**. Tables are constructed from **header** definitions and **column** definitions. + +.. code-block:: yaml + + --- !FSWTable + name: OurTable + delimiter: "," + # other config + + header: + - !FSWColumn + name: FIRST_HEADER_COLUMN + # other header column config + + - !FSWColumn + name: SECOND_HEADER_COLUMN + # other header column config + + columns: + - !FSWColumn + name: FIRST_COLUMN + # other column config + + - !FSWColumn + name: SECOND_COLUMN + # other column config + +There are a number of helper scripts for encoding/decoding flight software tables and for uploading tables. Checkout the :doc:`Command Line Intro ` page for additional information on what utilities are available. Each utility script provides information on its interfaces via **help** documentation. diff --git a/doc/source/command_intro.rst b/doc/source/command_intro.rst new file mode 100644 index 00000000..5912ec9d --- /dev/null +++ b/doc/source/command_intro.rst @@ -0,0 +1,223 @@ +Command Dictionary Introduction +=============================== + +AIT provides support for YAML-based configuration of commands with enough detail to provide verification of information (E.g., units) and encoding/decoding. The commands are constrained by the ISS 1553B command word design (64 total words with 11 reserved). + +.. code-block:: yaml + + # An example command for setting the operation + # mode of an instrument. + --- !Command + name: CORE_SET_OP_MODE + opcode: 0x0001 + subsystem: CORE + desc: | + This command sets the operational mode. + + arguments: + - !Argument + name: mode + desc: Mode + units: none + type: U8 + bytes: 0 + enum: + 0: SAFE + 1: IDLE + 2: SCANNING + 3: SCIENCE + +All the valid parameters and attributes that you can have in your command dictionary configuration file is controlled by the command dictionary schema file. You can view the full schema file in the AIT Core repo at **bliss/core/data/cmd_schema.json**. A snippet of a schema is below. You can see that it allows for quite of bit of control over the command dictionary including nested object verification, individual attribute type checks, and required fields. + +.. code-block:: javascript + + { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Command Dictionary Schema", + "description": "Command Dictionary Schema", + "type": "array", + "items": { + "required": ["command", "name", "opcode"], + "additionalProperties": false, + "properties": { + "command": { + "type": "string" + }, + "name": { + "type": "string" + }, + "opcode": { + "type": "integer" + }, + "subsystem": { + "type": "string" + }, + "title": { + "type": "string" + }, + "desc": { + "type": "string" + }, + "arguments": { + ... Nested Argument and Fixed Field Schemas snipped + } + } + } + } + +AIT also provides a command line utility for verifying that your command dictionary configuration is valid given that you have a defined schema file. If you pass the ``--cmd`` or ``-c`` flag to ``bliss-yaml-validate`` it will check this for you. + +.. code-block:: bash + + $ bliss-yaml-validate --cmd + 016-07-27T09:36:21.408 | INFO | Validation: SUCCESS: ... + +AIT provides command encoding/decoding via :class:`bliss.core.cmd.CmdDict`. + + >>> cmddict = bliss.core.cmd.getDefaultDict() + >>> type(cmddict) + + +You can create and encode a command directly from the command dictionary. + + >>> noop = cmddict.create('NO_OP') + >>> type(noop) + + >>> noop + NO_OP + >>> bin_noop = noop.encode() + >>> bin_noop + bytearray(b'\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00') + +Given a binary blob, you can also decode into a command. + + >>> decoded_cmd = cmddict.decode(bin_noop) + >>> type(decoded_cmd) + + >>> decoded_cmd + NO_OP + + +---- + +!Command +-------- + +The AIT command constructor is the parent construct for all AIT command definitions. It encapsulates optional arguments and contains metadata critical to the command. + +name: + A **string** denoting the name of this command + +opcode: + The number assigned to this opcode. This is usually given in hexadecimal. + +subsystem (optional): + A **string** denoting the subsystem associated with this command. + +title (optional): + A **string** denoting the title of this command + +desc (optional): + A **string** for providing a description of the command. + +arguments (optional): + A **list** of *!Argument* or *!Fixed* objects + +---- + +!Argument +--------- + +The argument constructor allows for a number of parameter to specify options for a command. By default an argument needs to include a name, data type, and byte information. + +name: + A **string** denoting the name of this argument + +type: + A **string** specifying the data type of the argument. You can see all the valid primitive types that will be accepted here by looking at ``bliss.core.dtype.PrimitiveTypes``. + +bytes: + Specifies which byte(s) in the command filled by this argument. This can be specified as a single integer or as a list of integers (in the case of a range of bytes). + +desc (optional): + A **string** for providing a description of the argument. + +units (optional): + A **string** denoting the argument's units. + +range (optional): + A **list** of 2 items specifying the range of acceptable values for the argument. + +enum (optional): + A **dict** of key, value pairs listing the enumeration of valid values for the argument. The **key** matches with the value in the command. The **value** is a **string** describing what the value in the enumeration represents. + +---- + +!Fixed +------ + +The fixed constructor allows you to define constant values in your command. + +type: + A **string** specifying the data type of the argument. You can see all the valid primitive types that will be accepted here by looking at ``bliss.core.dtype.PrimitiveTypes``. + +bytes: + Specifies which byte(s) in the command filled by this constant. This can be specified as a single integer or as a list of integers (in the case of a range of bytes). + +name (optional): + A **string** denoting the name of this constant. + +desc (optional): + A **string** for providing a description of the constant. + +units (optional): + A **string** denoting the constant's units. + +value (optional): + A number specifying the value for this constant. + + +---- + +Example Command Definition +-------------------------- + +Below is an example of what you might have defined for a command. It uses most of the options mentioned above. + +.. code-block:: yaml + + --- !Command + name: EXAMPLE_RESET_SYSTEM + opcode: 0x1337 + subsystem: ExampleSubSystem + title: Example Reset System + desc: | + Reset the processor and initiate boot process. + arguments: + - !Fixed + type: LSB_U16 + bytes: [0, 1] + value: 0x3010 + + - !Argument + name: reset_type + desc: | + Reset type + PROM_REBOOT: Nominal reboot + DIAG_RAM_REBOOT: Diagnostic reboot + units: none + type: LSB_U16 + bytes: [2, 3] + enum: + 0x0000: PROM_REBOOT + 0x0001: DIAG_RAM_REBOOT + + - !Fixed + type: LSB_U16 + bytes: [4, 5] + value: 0x0000 + + - !Fixed + type: LSB_U16 + bytes: [6, 7] + value: 0x0000 diff --git a/doc/source/command_line.rst b/doc/source/command_line.rst new file mode 100644 index 00000000..b2902c62 --- /dev/null +++ b/doc/source/command_line.rst @@ -0,0 +1,121 @@ +Command Line Utilities +====================== + +AIT provides a number of command line utilities for performing common operations and component initialization. Below is a breakdown of these utilities with a brief explanation of how they work and why you might use them. + +---- + +Component Initialization Utilities +---------------------------------- + +The following commands are used to start up AIT services or components. + +bliss-bsc +^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_bsc.py + :start-after: ''' + :end-before: ''' + +---- + +Sequence Utilities +------------------ + +Utilities for the manipulation of command sequences. + +bliss-seq-encode +^^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_seq_encode.py + :start-after: ''' + :end-before: ''' + +bliss-seq-decode +^^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_seq_decode.py + :start-after: ''' + :end-before: ''' + +bliss-seq-print +^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_seq_print.py + :start-after: ''' + :end-before: ''' + +____ + +Telemetry Utilities +___________________ + +bliss-tlm-send +^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_tlm_send.py + :start-after: ''' + :end-before: ''' + +____ + +Command Utilities +_________________ + +bliss-cmd-send +^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_cmd_send.py + :start-after: ''' + :end-before: ''' + +____ + +BSC Utilities +_____________ + +bliss-bsc-create-handler +^^^^^^^^^^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_bsc_create_handler.py + :start-after: ''' + :end-before: ''' + +bliss-bsc-stop-handler +^^^^^^^^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_bsc_stop_handler.py + :start-after: ''' + :end-before: ''' + +____ + +Command Table Utilities +_______________________ + +bliss-table-decode +^^^^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_table_decode.py + :start-after: ''' + :end-before: ''' + +bliss-table-encode +^^^^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_table_encode.py + :start-after: ''' + :end-before: ''' + +____ + +Miscellaneous Utilities +_______________________ + +bliss-create-dirs +^^^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_create_dirs.py + :start-after: ''' + :end-before: ''' + +bliss-orbits +^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_orbits.py + :start-after: ''' + :end-before: ''' + +bliss-yaml-validate +^^^^^^^^^^^^^^^^^^^ +.. literalinclude:: ../../bliss/core/bin/bliss_yaml_validate.py + :start-after: ''' + :end-before: ''' diff --git a/doc/source/conf.py b/doc/source/conf.py new file mode 100644 index 00000000..0469f0c2 --- /dev/null +++ b/doc/source/conf.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# +# BLISS-Core documentation build configuration file, created by +# sphinx-quickstart on Wed Apr 6 12:19:38 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +import sphinx_rtd_theme + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('../../bin/')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinxcontrib.httpdomain' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'AIT-Core' +copyright = u'2017, Jet Propulsion Laboratory' +author = u'AIT-Core Development Team' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = u'0.29.0' +# The full version, including alpha/beta/rc tags. +release = u'0.29.0' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# The name for this set of Sphinx documents. +# " v documentation" by default. +#html_title = u'BLISS-Core v0.1' + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (relative to this directory) to use as a favicon of +# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not None, a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# The empty string is equivalent to '%b %d, %Y'. +#html_last_updated_fmt = None + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# 'ja' uses this config value. +# 'zh' user can custom change `jieba` dictionary path. +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'BLISS-Coredoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', + +# Latex figure (float) alignment +#'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'BLISS-Core.tex', u'BLISS-Core Documentation', + u'BLISS-Core Development Team', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'bliss-core', u'BLISS-Core Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'BLISS-Core', u'BLISS-Core Documentation', + author, 'BLISS-Core', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Sort members by type +autodoc_member_order = 'groupwise' + +# Ensure that the __init__ method gets documented. +def skip(app, what, name, obj, skip, options): + if name == "__init__": + return False + return skip + +def setup(app): + app.connect("autodoc-skip-member", skip) diff --git a/doc/source/configuration_intro.rst b/doc/source/configuration_intro.rst new file mode 100644 index 00000000..256968c8 --- /dev/null +++ b/doc/source/configuration_intro.rst @@ -0,0 +1,167 @@ +Introduction to AIT Configuration +================================= + +AIT uses a number of `YAML `_ (YAML Ain't Markup Language) and JSON files for project configuration. + +You must ensure that the **BLISS_CONFIG** environment variable points to your **config.yaml** file in order for AIT to properly configure your project. Given the default AIT project structure you would have the following setup. This assumes you've set **BLISS_ROOT** to the project's root directory:: + + export BLISS_CONFIG=$BLISS_ROOT/data/config/config.yaml + +What is YAML? +------------- + +YAML is a data serialization language with a heavy focus on maintaining human-readability. The `YAML Getting Started `_ provides an overview of the structures supported. + +config.yaml +----------- + +AIT uses **config.yaml** to load configuration data for the command (cmddict), telemetry (tlmdict), Event Verification Record (evrdict), Binary Stream Capture (bsc), and Logging (logging) components. + +* **cmddict** - defines the location of the Command Dictionary YAML file +* **evrdict** - defines the location of the Event Verification Record (EVR) Dictionary YAML file +* **tlmdict** - defines the location of the Telemetry Dictionary YAML file +* **bsc** - defines the location of the Binary Stream Capture (BSC) YAML configuration file. +* **logging** - defines the name to be associated with the Logger component (defaults to 'bliss') and the host to push the output syslog information (defaults to 'localhost') +* **data** - specifies all of the data paths associated with the GDS that can further be referenced by AIT or mission-specific tools. The paths specified can use path variables to allow for value substitution based upon date, hostname, platform, or any other configurable variable. See the *bliss-create-dirs* tool and *Path Expansion and Variables* section below for more details. + +The filename paths should be considered relative to the location of **config.yaml**. If you have **hostname** specific configuration you can add another block of data. The **default** block is the fall back if a match cannot be found. Below is an example **config.yaml** file that defines the default configuration files for AIT. + +AIT loads **config.yaml** on import. Here is an example **config.yaml**: + +.. code-block:: none + + default: + command: + history: + filename: ../cmdhist.pcap + sequence: + directory: ../seq + cmddict: + filename: cmd.yaml + + evrdict: + filename: evr.yaml + + tlmdict: + filename: tlm.yaml + + bsc: + filename: bsc.yaml + + logging: + name: bliss + hostname: bliss.jpl.nasa.gov + + phase: 'dev' + + data: + '1553': + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/downlink/1553 + bad: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/downlink/bad + lehx: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/downlink/lehx + planning: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/planning + sdos: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/sdos + uplink: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/uplink + ats: + path: /gds/${phase}/data/${hostname}/%Y/%Y-%j/ats + + gui: + port: 8080 + telemetry: + - stream: + name: OCO3_1553_EHS + port: 3076 + html: + directory: ../gui/ + + +If you want to look at the contents of **config.yaml** programmatically you can access it with: + + >>> bliss.config + BlissConfig(...) + +You can read more about each component's configuration and configuration-schema files in the component-specific pages. + +Path Expansion and Variables +---------------------------- + +File and directory paths included in **config.yaml** can be specified with varying degrees of explicitness in order to allow for the most flexibility. Any file or directory path specified with a key of 'directory', 'file', 'filename', 'path', or 'pathname' will resolve according to the details below. + +Absolute Path Expansion +^^^^^^^^^^^^^^^^^^^^^^^ + +In the case where an absolute path is not specified for a 'file', 'filename', 'path', or 'pathname', the following are handled: + +* path does not begin with '/' (relative path) - the path or filename given is assumed from the BLISS_CONFIG directory. +* path begins with '~' (User HOME directory) - the current user's home directory is used + +Variable Substitution +^^^^^^^^^^^^^^^^^^^^^ + +Variables can also be specified within the path in order to allow for more explicit configuration. The following rules apply: + +* Variables are extracted from the keys specified in the **config.yaml**. +* Any variables you would like to access must be at the base-level of the default, host, or platform: +.. code-block:: none + + default: + phase: 'dev' + mission: 'oco3' + instrument: 'pma' + +* The variable values must be a string or list of strings: +.. code-block:: none + + default: + phase: 'dev' + mission: 'oco3' + instrument: ['pma', 'icc', 'ecc'] + +* Variables can be specified in a path using the following syntax +.. code-block:: none + + `${variable}` + + # For example + path: /${phase}/${mission}/${instrument} + +* There are currently 2 default variables whose values are automatically generated, and they can be accessed without specifying them in **config.yaml**. + + * ${year} - current year + * ${doy} - current day of year + * ${hostname} - hostname of machine where AIT is running + * ${platform} - platform of machine where AIT is running + +Example +^^^^^^^ + +If we have the following specified in **config.yaml**:: + + default: + phase: 'dev' + mission: 'oco3' + data: + data1: + path: /${phase}/${hostname}/%Y-%j/data1 + data2: + path: /${phase}/${hostname}/%Y-%j/data2 + +If the machine hostname = 'oco3-gds1', and today is day 300 in 2016, we can programmatically access these paths: + + >>> for k, v in bliss.config._datapaths.items(): + >>> print "%s - %s" % (k ,v) + data1 - /dev/oco3-gds1/2016-300/data1 + data2 - /dev/oco3-gds1/2016-300/data2 + +See **bliss-create-dir** software for more details on path substitution and how it can be leveraged. + +YAML Idiosyncrasies +------------------- + +While YAML is generally very user-friendly, every tool has its rough edges. The AIT team has done its best to help you avoid these where possible. However, it may still be worth investigating potential roadblocks as you use YAML more. There is an excellent resource that the developers at SaltStack have put together on `YAML idosyncrasies `_ that is worth reading. It should help you avoid any potential problems in your YAML configuration. + diff --git a/doc/source/contribute.rst b/doc/source/contribute.rst new file mode 100644 index 00000000..5fac01ea --- /dev/null +++ b/doc/source/contribute.rst @@ -0,0 +1,102 @@ +Contributor Guides +================== + +Installation +------------ + +Before you install **bliss-core** you should install `virtualenv `_ to properly isolate your development environment. It is also recommended that you install `virtualenvwrapper `_ for convenience. The following instructions will assume that you have installed both already. + +Installation is largely the same if you wish to contribute or make changes to the code compared to simply using the toolkit. The only real change is installing **bliss-core** as a "develop" mode package so we can make changes and test them without needing to reinstall the changed files. + +.. code-block:: bash + + $ pip install -e .[docs,tests] + +Project Workflow Overview +------------------------- + +AIT use a feature-branch / pull request approach to organizing contributions to the toolkit. All code is reviewed prior to integration into the toolkit. + +Track changes via tickets +^^^^^^^^^^^^^^^^^^^^^^^^^ + +All changes need to be made against one or more tickets for tracking purposes. AIT uses Github Issues along with Zenhub to track issue in the project. All tickets should have (outside of rare edge-cases): + +- A concise title +- An in-depth description of the problem / request. If reporting a bug, the description should include information on how to reproduce the bug. Also include the version of the code where you're seeing the bug. + +If you're going to begin work on a ticket make sure to progress the ticket through the various **Pipeline** steps as appropriate as well as assigning yourself as an **Assignee**. If you lack sufficient permissions to do so you can post on the ticket asking for the above to be done for you. + +Commit Messages +^^^^^^^^^^^^^^^ + +AIT projects take a fairly standard approach to commit message formatting. You can checkout `Tim Pope's blog `_ for a good starting point to figuring out how to format your commit messages. All commit messages should reference a ticket in their title / summary line. + +.. code-block:: none + + Issue #248 - Show an example commit message title + +This makes sure that tickets are updated on Github with references to commits that are related to them. + +Pull Requests and Feature Branches +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All changes should be isolated to a feature branch that links to a ticket. The standard across AIT projects is to use **issue-###** for branch names where **###** is the issue number found on Github. + +The title of a pull request should include a reference to the ticket being fixed as mentioned for commit messages. The description of a pull request should provide an in-depth explanation of the changes present. Note, if you wrote good commit messages this step should be easy! + +Any tickets that are resolved by the pull request should be referenced with Github's syntax for closing out tickets. Assuming the above ticket we would have the following in a pull request description: + +.. code-block:: none + + Resolve #248 + +Documentation +------------- + +AIT uses Sphinx to build its documentation. You can build the documentation +with: + +.. code-block:: bash + + $ python setup.py build_sphinx + +To view the documentation, open **doc/build/html/index.html** in a web browser. + +If you need to update the auto-generated documentation you can run the +following command to rebuild all of the **bliss** package documentation: + +.. code-block:: bash + + $ sphinx-apidoc --separate --force --no-toc -o doc/source bliss bliss/test + +Please make sure to update the docs if changes in a ticket result in the +documentation being out of date. + +Unit Tests +---------- + +AIT uses the `Nose `_ unit +test framework. To run the tests in **python/bliss/test**: + +.. code-block:: bash + + $ python setup.py nosetests + +Please be sure to check that all tests pass before creating a pull request for a ticket. All new functionality or changes to existing functionality should include one or more (probably more) tests covering those changes. + +Coding Style +------------ + +AIT makes a best-effort attempt at sticking with PEP-8 conventions. + +Mailing Lists +------------- + +The AIT mailings lists are a good way to get in contact with people working on the project. If you need help with something on the project feel free to send an email to the AIT team at **bliss.support@jpl.nasa.gov**. + +Slack Channels +-------------- + +AIT has three channels on the JPL team Slack. Generic AIT conversations happen on **#bliss**, development conversations happen on **#bliss-development**, and user support conversations happen on **#bliss-support**. + diff --git a/doc/source/dev_index.rst b/doc/source/dev_index.rst new file mode 100644 index 00000000..768bbcaa --- /dev/null +++ b/doc/source/dev_index.rst @@ -0,0 +1,14 @@ +Welcome to BLISS-Core's developer documentation! +================================================ + +.. toctree:: + :maxdepth: 2 + + developer_info + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/source/developer_info.rst b/doc/source/developer_info.rst new file mode 100644 index 00000000..b1d3e238 --- /dev/null +++ b/doc/source/developer_info.rst @@ -0,0 +1,119 @@ +Developer Documentation +======================= + +Release Process +--------------- + +Prepare Repo for Release +^^^^^^^^^^^^^^^^^^^^^^^^ + +1. *Determine the version number for the release. **bliss-core** uses standard semantic versioning (Major.Minor.Patch). + +* Major bumps are for large, non-backwards compatible changes +* Minor bumps are for backwards compatible changes +* Patch bumps are for incremental bug fixes, small releases, and end-of-sprint releases. + +2. *Update the project documentation to use the correct version names.* The following files needs to be updated with the correct version names: + +* `doc/source/conf.py `_ - contains a **version** and **release** option. Both of these should be updated to point to the version number for this release. + +.. code-block:: python + + # The short X.Y version. + version = u'0.29.0' + # The full version, including alpha/beta/rc tags. + release = u'0.29.0 + +* `setup.py `_ - The setup object and bottom of script also contains the **version**. + +.. code-block:: python + + setup( + name = 'bliss-core', + version = '0.29.0' + . + . + ) + +3. Commit and push these changes. + +.. code-block:: bash + + git add doc/source/conf.py setup.py + git commit -m "Prep for release" + git push origin master + +See a past release `commit `_ for an example. + +Generate Release Notes +^^^^^^^^^^^^^^^^^^^^^^ + +You will need a list of included tickets to put the in tag annotation when tagging the release. There is a helper script in /build that will generate this for you. Note that you can include a start and end time to help narrow down the notes to include since the last release made. + +.. code-block:: bash + + cd build + ./generate_changelog.py --start-time YYYY-MM-DDTHH:MM:SSZ + +Tag the Release +^^^^^^^^^^^^^^^ + +Via the Github Releases page, draft a new release. Place the above version number as the tag version. The release title should be **BLISS v**. Copy the change log into the release description box. If the release is not production ready be sure to check the pre-release box to note that. When finished, publish the release. + +Push Latest Docs to Github Pages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You will need to push the latest documentation to Github pages for the release. There is a script that helps you with the majority of this. + +.. code-block:: bash + + cd build + ./update_docs_release.sh + git status # Check that everything looks correct + git commit -m "Update docs for " + git push origin gh-pages + git checkout master + +Notify Relevant Parties of Release +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Send an email to the **bliss.announce@jpl.nasa.gov** mailing list. An example template is included below: + +.. code-block:: none + + Subject: + [RELEASE] BLISS Core v has been released + + Body: + Hello! + + BLISS Core v has been released and is ready for use. + + You can view the change logs and download the release at + https://github.jpl.nasa.gov/bliss/bliss-core/releases/tag/ + + View the BLISS Installation page for information on updating + to the latest version. + https://github.jpl.nasa.gov/pages/bliss/bliss-core/installation.html#upgrading-an-installation + + Thank you! + BLISS Development Team + +Push Release Artifacts to OCO3-TB PyPi +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. SSH into OCO3-TB: + +2. Run **make-pypi.sh** + +.. code-block:: bash + + # Navigate to pypi repo + cd /usr/local/vhosts/oco3-tb/htdocs/pypi + + # Run make-pypi.sh. + ./make-pypi.sh -c 0.29.0 + +3. Check https://bliss.jpl.nasa.gov/pypi/simple/ to ensure that the release has been added. + +NOTE: Currently requires pip 9.0.1 in order to utilize `pip download`. diff --git a/doc/source/evr_intro.rst b/doc/source/evr_intro.rst new file mode 100644 index 00000000..3ce3c2c5 --- /dev/null +++ b/doc/source/evr_intro.rst @@ -0,0 +1,64 @@ +EVRs Introduction +================= + +AIT provides support for YAML-based configuration of Event Verification Records (EVRs) within the system. Below is an example of a simple set of EVRs defined for use in the toolkit. + +.. code-block:: yaml + + - !EVR + name: NO_ERROR + code: 0x0001 + desc: No error + message: "No error" + + - !EVR + name: EVR_1 + code: 0x0002 + desc: EVR 1 + message: "The first evr" + + - !EVR + name: EVR_2 + code: 0x0003 + desc: EVR 2 + message: "The second evr" + + - !EVR + name: EVR_3 + code: 0x0004 + desc: EVR 3 + message: "The third evr %s" + +Message Formatting +------------------ + +AIT EVRs allow you to include common format strings in the **message** attribute so that EVR data can be decoded and included in displays. You can use the :meth:`bliss.core.evr.EVRDefn.format_message` method for this. + +>>> import bliss.core.evr +>>> evr = bliss.core.evr.getDefaultDict()[3] +>>> evr.message +'The third evr %s' + +We'll need a :func:`bytearray` of data to decode: + +>>> data = bytearray([0x69, 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x72, 0x65, 0x61, 0x74, 0x65, 0x73, 0x74, 0x21, 0x00]) + +We can now decode that data and include it in our message: + +>>> evr.format_message(data) +'The third evr is the greatest!' + +!EVR +---- + +name: + The EVR's name + +code: + The code that specifies this EVR + +desc (optional): + A human readable description of what the EVR represents + +message (optional): + A human readable description of what the EVR represents. The message attribute can contain **printf** strings. The :class:`bliss.core.evr.EVRDefn` class provides an interface for unpacking data into it's message attribute. diff --git a/doc/source/index.rst b/doc/source/index.rst new file mode 100644 index 00000000..16397085 --- /dev/null +++ b/doc/source/index.rst @@ -0,0 +1,37 @@ +Welcome to the AMMOS Instrument Toolkit (AIT) documentation! +============================================================ + +The AMMOS Instrument Toolkit (Formerly the Bespoke Links to Instruments for Surface and Space (BLISS)) is a Python-based software suite developed to handle Ground Data System (GDS), Electronic Ground Support Equipment (EGSE), commanding, telemetry uplink/downlink, and sequencing for JPL International Space Station and CubeSat Missions. It is a generalization and expansion of tools developed for the following JPL ISS projects: + +* `Orbiting Carbon Observatory 3 (OCO-3) `_ +* `Vehicle Cabin Atmosphere Monitor (VCAM) `_ +* `ECOsystem Spaceborne Thermal Radiometer Experiment on Space Station (ECOSTRESS) `_ + +Visit the :doc:`Installation and Environment Configuration ` guide for installation information. Visit the :doc:`API Documentation ` page to view in-depth interface documentation. + +.. note:: The AMMOS Instrument Toolkit was formerly known as "BLISS". You will see references to this, especially in the :doc:`API Documentation ` and code snippets, since the internal software structure still maintains that provenance. + +.. toctree:: + :maxdepth: 2 + + installation + project_setup + API Documentation + command_line + configuration_intro + Command Dictionary Introduction + Telemetry Dictionary Introduction + API Module Introduction + EVR Introduction + limits_intro + Command & Data Handling Tables + bsc_intro + contribute + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/doc/source/installation.rst b/doc/source/installation.rst new file mode 100644 index 00000000..db6b29a3 --- /dev/null +++ b/doc/source/installation.rst @@ -0,0 +1,148 @@ +Installation and Environment Configuration +========================================== + +The following guide will show you how to install and configure AIT Core. For information on how to configure a new project to use AIT, check out the `New Project Setup `_ page. + +Installation +------------ + +Before you install AIT Core you should install `virtualenv `_ to properly isolate your development environment. It is also recommended that you install `virtualenvwrapper `_ for convenience. The following instructions will assume that you have installed both already and created an environment. + +You can install AIT Core from a checkout of the code or from the BLISS PyPi server. Having a checkout of the code can be handy if you want to view the source or make changes. Installing from PyPi keeps your system clutter free since you don’t have a copy of the code base around. Either choice will work fine! + +From Code Checkout +^^^^^^^^^^^^^^^^^^ + +Clone the repository from JPL Github: + +.. code-block:: bash + + $ git clone https://github.jpl.nasa.gov/bliss/bliss-core.git + $ cd bliss-core + +Find the latest tagged version of the code and check it out: + +.. code-block:: bash + + $ git tag + $ git checkout + + +Install the **bliss.core** package and its dependencies: + +.. code-block:: bash + + $ pip install . + +From BLISS PyPi +^^^^^^^^^^^^^^^ + +If you have access to the JPL network you can install AIT Core directly from the BLISS PyPi server. + +.. code-block:: bash + + $ pip install bliss-core --extra-index-url https://bliss.jpl.nasa.gov/pypi/simple/ + + +Optional Binary Stream Capture Components +----------------------------------------- + +AIT's Binary Stream Capture (BSC) module is used to capture data over Ethernet (Not supported on OS X), TCP, and +UDP connections. BSC supports the use of the `rawsocket `_ +library so you can limit raw socket access on machines to specific users. **Rawsocket** +is not needed for BSC to function, however if you need this additional functionality +you will have to manually install the dependency with: + +.. code-block:: bash + + $ pip install rawsocket + +Environment Configuration +------------------------- + +AIT uses two environment variables for configuration. + +**BLISS_ROOT** is used for project wide pathing. If you don't set this AIT will attempt to do a good job of it for you. If you want to be safe you should set it to the project root where you checked out the code. + +**BLISS_CONFIG** is used for locating the project's YAML configuration file. This environment variable should contain a full path to a valid **config.yaml** file. If you don't set this AIT will fail to initialize properly and will notify you that your configuration is invalid. If you wanted to set this to some example configuration that comes packaged with AIT you could set this to: + +.. code-block:: bash + + //data/config/config.yaml + +We recommend that you set this in your **postactivate** file from **virtualenvwrapper**. This will ensure that each time you activate the virtual environment that your **BLISS_CONFIG** environment variable is set properly. By default, this file is located at **~/.virtualenvs/postactive**. + +.. code-block:: bash + + if [ $VIRTUAL_ENV == "$HOME/.virtualenvs/ait" ] + then + export BLISS_ROOT=/path/to/bliss-core + export BLISS_CONFIG=/path/to/bliss-core/data/config/config.yaml + fi + +Check Installation +------------------ + +Now that your installation has finished let's check that everything works as expected. + +.. code-block:: bash + + # Deactivate your ait virtual environment + $ deactivate + + # Reactivate your ait virtual environment to make sure we pick up the + # new environment variable settings that you added + $ workon ait + + # Test that you can properly import the bliss.core package. + $ python -c "import bliss.core" + +If the last command **doesn't** generate any errors your installation is all set! If you see an error as shown below make sure to activate your virtual environment first. + +.. code-block:: bash + + $ python -c "import bliss.core" + Traceback (most recent call last): + File "", line 1, in + ImportError: No module named bliss.core + +Working with AIT +---------------- + +Your AIT Core installation is now isolated to your virtual environment. Whenever you want to work on or run AIT related code run ``workon ait`` first to activate your environment. You will see a change in the format of your prompt indicating what environment you currently have active. If you want to disable the environment run ``deactivate``. + +.. code-block:: bash + + # Normal prompt + $ + + # Prompt after running workon + (ait) + $ + +Upgrading an Installation +------------------------- + +When a new version of AIT Core is released you'll most likely want to upgrade your environment. You'll need to upgrade differently depending on how you installed the system. + +Installed from Code Checkout +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Navigate back to the directory where you checked out the code and run the following commands to pull the latest code, checkout the latest tag, and upgrade your install. + +.. code-block:: bash + + $ git checkout master + $ git pull + $ git tag + $ git checkout + $ pip install . --upgrade + +Installed from PyPi +^^^^^^^^^^^^^^^^^^^ + +Run the following to upgrade to the latest AIT Core versions. + +.. code-block:: bash + + $ pip install bliss-core --extra-index-url https://bliss.jpl.nasa.gov/pypi/simple/ --upgrade diff --git a/doc/source/limits_intro.rst b/doc/source/limits_intro.rst new file mode 100644 index 00000000..55e1becf --- /dev/null +++ b/doc/source/limits_intro.rst @@ -0,0 +1,73 @@ +Limits Introduction +=================== + +The :class:`bliss.core.limits` module provides support for specifying acceptable value ranges for telemetry fields. + +Consider the below example telemetry packet and fields for which we'll specify limit values. + +.. code-block:: YAML + + - !Packet + name: 1553_HS_Packet + desc: Ethernet 1553 packet used to monitor telemetry in real-time + functions: CurrA_Fx(dn): (dn - 2) / 1234.0 + + fields: + - !Field + name: Voltage_A + desc: Voltage A as a 14-bit DN. Conversion to engineering units is TBD. + units: Volts + type: MSB_U16 + + - !Field + name: product_type + type: U8 + enum: + 0: TABLE_FOO + 1: TABLE_BAR + 2: MEM_DUMP + 3: HEALTH_AND_STATUS + +Specifying Limits +----------------- + +Limit values can be specified for fields with a value range and for fields with enumerated values. By default, limits are specified in **limits.yaml**. You can see the path specified in **config.yaml** under the **limits.filename** parameter. + +.. code-block:: YAML + + limits: + filename: limits/limits.yaml + +Value-Range Limits +^^^^^^^^^^^^^^^^^^ + +For the **1553_HS_PACKET.Voltage_A** field we'll specify min/max value ranges for our limits. You'll see in the example below that we're specifying upper and lower bounds with error and warning values for each. You can customize the limits as necessary by specify a subset of these values. For instance, you could specify just a lower warning bound if that is all you were concerned about. + +.. code-block:: YAML + + - !Limit + source: 1553_HS_Packet.Voltage_A + desc: Voltage A + units: Volts + lower: + error: 5.0 + warn: 10.0 + upper: + error: 45.0 + warn: 40.0 + +Enum Limits +^^^^^^^^^^^ + +For fields with enumerated values, such as the **1553_HS_PACKET.product_type** field, we specify warning and error limits for one or more of the field's enumerated values. Here we're specifying an error limit when the field has the **MEM_DUMP** value and a warning limit when the field value is either **TABLE_FOO** or **TABLE_BAR**. + +.. code-block:: YAML + + - !Limit + source: 1553_HS_PACKET.product_type + desc: Ethernet Product Type field + value: + error: MEM_DUMP + warn: + - TABLE_FOO + - TABLE_BAR diff --git a/doc/source/project_setup.rst b/doc/source/project_setup.rst new file mode 100644 index 00000000..c5956b5a --- /dev/null +++ b/doc/source/project_setup.rst @@ -0,0 +1,32 @@ +Setting up a New Project with AIT +================================= + +The following documentation will teach you how to setup a new project to build off of the AMMOS Instrument Toolkit. This guide assumes that the project you'll be developing is a Python-based project. + +Add AIT Core as a Dependency +------------------------------ + +You'll need to add AIT Core to either your **requirements.txt** file or your **setup.py** file. + +If you use a requirements file for specifying dependencies: + +.. code-block:: bash + + --extra-index-url https://bliss.jpl.nasa.gov/pypi/simple/ + bliss-core==1.0.0 + +If you use **setup.py** for specifying dependencies: + +.. code-block:: bash + + install_requires = [ + bliss-core==1.0.0 + ], + dependency_links = [ + 'https://bliss.jpl.nasa.gov/pypi/simple/bliss-core/' + ] + +Set AIT Config Values +--------------------- + +AIT provides a large number of configuration parameters for customizing and configuring the toolkit. AIT ships with an example **config.yaml** skeleton located at **/PROJECT_ROOT/data/config/config.yaml** that you can use as a baseline configuration file. You should read the :doc:`Configuration Introduction ` and the component specific configuration documents such as the :doc:`Telemetry `, :doc:`Commanding `, and :doc:`EVR ` pages for additional information and update the files to meet your project's specifications. diff --git a/doc/source/telemetry_intro.rst b/doc/source/telemetry_intro.rst new file mode 100644 index 00000000..cd6333a5 --- /dev/null +++ b/doc/source/telemetry_intro.rst @@ -0,0 +1,363 @@ +Telemetry Dictionary Introduction +================================= + +AIT provides support for YAML-based configuration of telemetry data within the system. AIT uses a YAML based configuration file to define Packets and their constituent Fields. + +.. code-block:: yaml + + - !Packet + name: CCSDS_HEADER + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: type + desc: | + Distinguishes between core and payload packet types to extend the + APID space to 4032 + bytes: 0 + type: U8 + mask: 0x10 + enum: + 0: 'Core' + 1: 'Payload' + +All the valid parameters and attributes that can be present in the telemetry dictionary definition are defined in the telemetry schema file. By default this is called *tlm_schema.json* and is co-located with *config.yaml*. AIT also provides a command line utility for verifying that your telemetry dictionary configuration is valid given that you have a defined schema file. If you pass the ``--tlm`` or ``-t`` flag to ``bliss-yaml-validate`` it will check this for you. + +.. code-block:: bash + + $ bliss-yaml-validate --tlm + 016-07-27T09:36:21.408 | INFO | Validation: SUCCESS: ... + +AIT provides telemetry dictionary processing via :class:`bliss.core.tlm.TlmDict` which gives a mapping of Packet names and :class:`bliss.core.tlm.PacketDefinition` instances. + + >>> import bliss.core.tlm + >>> tlmdict = bliss.core.tlm.getDefaultDict() + >>> type(tlmdict) + + >>> tlmdict.keys() + ['Ethernet_HS_Packet', 'CCSDS_HEADER', '1553_HS_Packet'] + >>> type(tlmdict['CCSDS_HEADER']) + + +We can look at a specific field via a :class:`bliss.core.tlm.PacketDefinition`. For instance, we can look at the **version** field from the CCSDS packet defined in `Example Telemetry Packet Definition` + + >>> ccsds_pkt = tlmdict['CCSDS_HEADER'] + >>> ccsds_pkt.fieldmap['version'] + FieldDefinition(bytes=0, desc='CCSDS Version', dntoeu=None, enum=None, expr=None, mask=224, name='version', shift=5, _type=PrimitiveType('U8'), units=None, when=None) + + +Decoding binary into a :class:`bliss.core.tlm.Packet` allows us to easily decode downlink data and check values. Let's look at an example CCSDS Primary Packet Header: + +.. code-block:: none + + version: 000 # Set to '000' per the CCSDS spec + packet type: 0 # Set per the CCSDS spec + secondary header flag: 1 + apid: 01011100111 + sequence flag: 01 # Indicate this is 'first' segment of a sequence + sequence count: 00000000000000 # Since it's the first segment, the count is 0 + packet length: 0000010010101111 # '1200' byte packet encoded as 1199 per the CCSDS spec + +We'll create a packet from this binary using the CCSDS Primary Packet Header :class:`bliss.core.tlm.PacketDefinition` that we were using earlier. + + >>> type(ccsds_pkt) + + >>> data = bytearray(b'\x0A\xE7\x40\x00\x04\xAF') + >>> pkt = bliss.core.tlm.Packet(ccsds_pkt, data=data) + +With the :class:`bliss.core.tlm.Packet` object we can check each of those values above. + + >>> pkt.version + 0 + >>> pkt.packet_type + 'Core' + >>> pkt.secondary_header_flag + 'Present' + >>> pkt.apid + 743 + >>> pkt.sequence_flags + 'First Segment' + >>> pkt.sequence_count + 0 + >>> pkt.packet_length + 1199 + +---- + +!Packet +------- + +The AIT packet constructor is the parent construct for all AIT telemetry packet definitions. It encapsulates high level metadata related to the packet along with all the fields that create the packets structure. + +fields: + A **list** of *!Field* objects that define the structure of the packet. + +name (optional): + A **string** denoting the name of this telemetry packet. + +desc (optional): + A **string** for providing a description of the packet. + +constants (optional): + A **dict** mapping constant names to values to be referenced in the packet and its fields. + + .. code-block:: yaml + + constants: + A: 371.81 + B: -4.850e-2 + C: 1.086e-5 + D: -1.239e-9 + +functions (optional): + A **dict** mapping function names to their expressions. Function bodies can reference constants and history values. + + .. code-block:: yaml + + functions: + R(dn): RL + (dn - history.RT0) * (RH - RL) / (history.RT1 - history.RT0) + T(dn): A + (B * R(dn)) + (C * R(dn)**2) + (D * R(dn)**3) + + Functions can then be referenced throughout the telemetry definitions. For instance, the following shows the ``T(n)`` function being used as part of a field's ``dntoeu`` attribute: + + .. code-block:: yaml + + - !Field + name: RT2 + bytes: '@prev' + desc: Noise source coupler (external) (Assembly Drawing 10335041) + dntoeu: + equation: T(raw.RT2) + units: Kelvin + when: (history.RT1 - history.RT0) > 3000 + type: LSB_U16 + when: HKMux1 == 18 + +history (optional): + A **list** of *!Field* names for which previous values should be stored. The previous value of a !Field can be reference via ``history.fieldName``. + + .. code-block:: yaml + + history: + - VX0 + - VX1 + - VX2 + +---- + +!Field +------ + +name: + A **string** denoting the name of this field in the packet. + +type: + A **string** specifying the data type for the section of the packet in which this field is located. You can see all the valid primitive types that will be accepted here by looking at ``bliss.dtype.PrimitiveTypes``. Arrays of types are also supported, e.g. ``MSB_U16[32]``. You can see examples of how *type* is used in the `Example Telemetry Packet Definition`_ section. + +desc (optional): + A **string** for providing a description of the field. + +mask (optional): + An integer (usually specified in hexadecimal) specifying the relevant bits of the field's *type* that represents the field's value. + +bytes (optional): + Specifies which byte(s) in the packet make up this field. This can be specified as a single integer or as a list of integers (in the case of a range of bytes). This is a helpful attribute if a field is comprised of a number of bits that do not easily divide into primitive data types. See the *Application Processes Indentifer* in the `Example Telemetry Packet Definition`_ section. If the current **!Field** is a mask of the previous **!Field**'s bytes you can specify that with **'@prev'**. + +enum (optional): + A **dict** of key, value pairs listing the enumeration of values for the field. The **key** matches with the value in the field. The **value** is a **string** describing what the value in the enumeration represents. + +dntoeu (optional): + Specify the equation and units for Data Number to Engineering Unit conversion for the **!Field**. + + .. code-block:: yaml + + dntoeu: + equation: -4.652 * raw.VX2 / history.VX0 + units: volts + when: history.VX0 > 2000 + +when (optional): + An expression defining when a !Field's value is valid. + + .. code-block:: yaml + + when: HKMux1 == 0 + +---- + +!include +-------- + +The `include` YAML constructor can be used to pull in a YAML definition from another file. +This can be used to consolidate dictionaries that become to long to manage over time, or to +reuse definitions across various packets. The value for the include can be: + +An absolute path: + +.. code-block:: yaml + + !include /path/to/my.yaml + +or a relative path to the file with the include specified. For instance, if we have a yaml `/path/to/tlm.yaml` with the following: + +.. code-block:: yaml + + !include my.yaml + +will include `/path/to/my.yaml`. + +The included file can include either 1+ Packet definitions or 1+ Field definitions. For instance, both of the following examples are valid. + +**Packet !include example** + +* header.yaml + + .. code-block:: yaml + + - !Packet + name: my_header + fields: + - !Field + name: header_field_1 + desc: header field 1 + type: U8 + - !Field + name: header_field_2 + desc: header field 2 + type: U8 + +* tlm.yaml + + .. code-block:: yaml + + - !Packet + name: my_packet + fields: + - !Field + name: pkt_field_1 + desc: pkt field 1 + type: U8 + - !Field + name: pkt_field_2 + desc: pkt field 2 + type: U8 + + - !include header.yaml + +**Field !include example** + +* packet_fields.yaml + + .. code-block:: yaml + + - !Field + name: pkt_field_1 + desc: pkt field 1 + type: U8 + - !Field + name: pkt_field_2 + desc: pkt field 2 + type: U8 + +* tlm.yaml + + .. code-block:: yaml + + - !Packet + name: my_packet + fields: + - !include packet_fields.yaml + - !Field + name: pkt_field_3 + desc: pkt field 3 + type: U8 + - !Field + name: pkt_field_4 + desc: pkt field 4 + type: U8 + +---- + +Example Telemetry Packet Definition +----------------------------------- + +The example telemetry dictionary snippet below provides the definition for a CCSDS Packet Primary Header. + +.. image:: _static/ccsds_prim_header.png + +.. code-block:: yaml + + - !Packet + name: CCSDS_HEADER + fields: + - !Field + name: version + desc: Indicates CCSDS Version-1 (does not change) + bytes: 0 + type: U8 + mask: 0xE0 + - !Field + name: type + desc: | + Distinguishes between core and payload packet types to extend the + APID space to 4032 + bytes: 0 + type: U8 + mask: 0x10 + enum: + 0: 'Core' + 1: 'Payload' + - !Field + name: secondary_header_flag + desc: | + Indicates whether, or not, a Secondary Header follows the primary + header (always set to 1) + bytes: 0 + type: U8 + mask: 0x08 + enum: + 0: 'Not Present' + 1: 'Present' + - !Field + name: apid + desc: | + Used in conjunction with Type to define the Logical Data Path + bytes: [0, 1] + type: MSB_U16 + mask: 0x07FF + - !Field + name: sequence_flags + desc: | + When sending commands, the sequence flags must be marked as + unsegmented data. All other PL packets may be per source/destination + ICDs. + bytes: 2 + type: U8 + mask: 0xC0 + enum: + 0: 'Continuation Segment' + 1: 'First Segment' + 2: 'Last Segment' + 3: 'Unsegmented' + - !Field + name: sequence_count + desc: | + Sequential count which numbers each packet on a Logical Data Path, + i.e. a separate counter is maintained for each source-destination + pair. + bytes: [2, 3] + mask: 0x3FFF + type: MSB_U16 + - !Field + name: packet_length + desc: | + Sequential count which expresses the length of the remainder of the + packet including checkword if present. The value is the number of + bytes (octets) following the field minus 1. + bytes: [4, 5] + type: MSB_U16 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..5b79deb7 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,16 @@ +[pep8] +ignore = E111,E303,E211,E302,E201,E202 + +[nosetests] +verbosity=2 +with-coverage=1 +with-doctest=1 +with-xunit=1 +cover-inclusive=1 +cover-package=bliss +cover-erase=1 + +[build_sphinx] +source-dir = doc/source +build-dir = doc/build +all_files = 1 diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..61f7ffdb --- /dev/null +++ b/setup.py @@ -0,0 +1,66 @@ +# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT) +# Bespoke Link to Instruments and Small Satellites (BLISS) +# +# Copyright 2017, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any +# commercial use must be negotiated with the Office of Technology Transfer +# at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting +# this software, the user agrees to comply with all applicable U.S. export +# laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such +# information to foreign countries or providing access to foreign persons. + +from setuptools import setup, find_packages + +import os + +setup( + name = 'bliss-core', + version = '0.29.0', + packages = find_packages(exclude=['tests']), + author = 'BLISS-Core Development Team', + author_email = 'bliss@jpl.nasa.gov', + + namespace_packages = ['bliss'], + include_package_data = True, + + package_data = { + 'bliss.core': ['data/*.json'] + }, + + install_requires = [ + 'bottle==0.12.9', + 'jsonschema==2.5.1', + 'pyyaml==3.11', + 'requests==2.9.1', + 'gevent==1.1.2', + 'gevent-websocket==0.9.5', + ], + + extras_require = { + 'docs': [ + 'Sphinx==1.4', + 'sphinx_rtd_theme', + 'sphinxcontrib-httpdomain' + ], + 'tests': [ + 'nose', + 'coverage', + 'mock', + 'pylint' + ], + }, + + entry_points = { + 'console_scripts': [ + '{}=bliss.core.bin.{}:main'.format( + f.split('.')[0].replace('_', '-'), + f.split('.')[0]) + for f in os.listdir('./bliss/core/bin') + if f.endswith('.py') and + f != '__init__.py' + ] + } +) diff --git a/src/doc/dict/cmd/Makefile b/src/doc/dict/cmd/Makefile new file mode 100644 index 00000000..2abe787a --- /dev/null +++ b/src/doc/dict/cmd/Makefile @@ -0,0 +1,39 @@ +PDFLATEX=/usr/local/texlive/2015/bin/x86_64-darwin/pdflatex + +LOWER_MISSION = $(shell echo $(BLISS_MISSION) | tr A-Z a-z) +CMDDICT = ../../../config/$(LOWER_MISSION)/cmd.yaml + +SOURCES = \ + cmddict.tex \ + cmddict-01-defs.tex \ + cmddict-02-preamble.tex \ + cmddict-03-title.tex \ + cmddict-04-signatures.tex \ + cmddict-05-changelog.tex \ + cmddict-06-intro.tex \ + cmddict-07-cmddefs.tex \ + cmddict-08-acronyms.tex \ + +YYYYMMDD=$(shell date "+%Y-%m-%d") +TEMPNAME=cmddict.pdf +FULLNAME=$(BLISS_MISSION)_CMD_Dictionary_$(YYYYMMDD).pdf + +.PHONY: all copy + +all: $(TEMPNAME) copy + +copy: $(TEMPNAME) + /bin/cp $(TEMPNAME) $(FULLNAME) + +cmddict-07-cmddefs.tex: ./bliss-cmddefs-tex $(CMDDICT) + ./bliss-cmddefs-tex + +$(TEMPNAME): $(SOURCES) + $(PDFLATEX) $< + $(PDFLATEX) $< + +clean: + /bin/rm -f *~ *.aux *.log *.out *.toc + +distclean: clean + /bin/rm -f *.pdf diff --git a/src/doc/dict/cmd/bliss-cmddefs-tex b/src/doc/dict/cmd/bliss-cmddefs-tex new file mode 100755 index 00000000..6841bae0 --- /dev/null +++ b/src/doc/dict/cmd/bliss-cmddefs-tex @@ -0,0 +1,135 @@ +#!/usr/bin/env python + +""" +BLISS Command Defintions to TeX + +Outputs BLISS Command Definitions in TeX format. +""" + +""" +Authors: Ben Bornstein + +Copyright 2016 California Institute of Technology. ALL RIGHTS RESERVED. +U.S. Government Sponsorship acknowledged. +""" + + +import bliss + + +def argvalues (arg): + """Returns either a value range or enumeration for the given ArgDefn.""" + result = '' + + if arg.enum: + options = [ ] + names = arg.enum.keys() + names.sort(key=lambda n: arg.enum[n]) + + for name in names: + value = arg.enum[name] + options.append('\\argenum{%d}{%s}' % (value, name)) + result = ' \\newline '.join(options) + + elif arg.type: + if arg.type.float: + result = '[%g, %g]' % (arg.type.min, arg.type.max) + else: + result = '[%s, %s]' % (str(arg.type.min), str(arg.type.max)) + + return result + + +def argtype (arg): + """Returns a string indicating the type of the given ArgDefn.""" + result = '' + if arg.type: + result = arg.type.name + + return result + + +def sanitize (s): + """Returns a santized string with special TeX characters escaped.""" + s = s.replace('&', '\&') + s = s.replace('<', '\\textless') + s = s.replace('>', '\\textgreater') + return s + + +def varargs (argdefns): + """Returns a list of ArgDefns with Fixed (constant) arguments removed.""" + result = [ ] + if argdefns is not None: + result = filter(lambda arg: not arg.fixed, argdefns) + return result + + +def writeSection (output, title, commands): + """Outputs a section of the command dictionary.""" + output.write('\\section{%s}\n\n' % title) + + for cmd in commands: + writeCommand(output, cmd) + output.write('\n') + + +def writeCommand (output, cmd): + """Outputs a subsection for the given command.""" + output.write('\\subsection{%s}\n' % cmd.title) + output.write('\n') + output.write('%s\n' % sanitize(cmd.desc).strip()) + output.write('\n') + + args = varargs(cmd.argdefns) + + output.write('\\begin{cmdusage}{%s}\n' % cmd.name) + for arg in varargs(args): + output.write(' \\cmdarg{%s} \:\n' % arg.name) + output.write('\\end{cmdusage}\n') + output.write('\n') + + if len(args) > 0: + output.write('\\begin{argdesc}\n') + for arg in args: + values = arg.name, sanitize(arg.desc) + output.write(' \\cmdarg{%s} & %s\\\\\n' % values) + output.write('\\end{argdesc}\n') + output.write('\n') + + output.write('\\begin{argdetails}\n') + for arg in args: + values = arg.name, argtype(arg), argvalues(arg), arg.units + output.write(' \\cmdarg{%s} & %s & %s & %s\\\\\n' % values) + output.write(' \\hline\n') + output.write('\\end{argdetails}\n') + output.write('\n') + + output.write('\\clearpage\n') + output.write('\n') + output.write('% ' + ('-' * 65) + '\n') + + +titles = { + 'CMD' : 'Commands and Sequences', + 'CORE' : 'C\&DH Core', + 'CRYO' : 'Cryocooler', + 'ETHERNET': 'Ethernet', + 'FP' : 'Fault Protection', + 'FPIE' : 'Focal Plane Interface Electronics', + 'MOTOR' : 'Motor Control', + 'TEMPCTL' : 'Temperature Control' +} + +filename = 'cmddict-07-cmddefs.tex' +output = open(filename, 'wt') + +for section in sorted( titles.keys() ): + commands = [ ] + for cmd in bliss.cmd.getDefaultCmdDict().values(): + if cmd.subsystem == section: + commands.append(cmd) + commands.sort(key=lambda c: c.name) + writeSection(output, titles[section], commands) + +output.close() diff --git a/src/doc/dict/cmd/cmddict-01-defs.tex b/src/doc/dict/cmd/cmddict-01-defs.tex new file mode 100644 index 00000000..48182656 --- /dev/null +++ b/src/doc/dict/cmd/cmddict-01-defs.tex @@ -0,0 +1,96 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\newcommand{\jpldoc}{D-94639} + +\newcommand{\mission}{ECOSTRESS} +\newcommand{\missionfull}{ECOsystem Spaceborne Thermal Radiometer Experiment +on Space Station} + +\newcommand{\docname}{Command Dictionary} +\newcommand{\docdate}{\today} + +\newcommand{\revision}{Initial Release} + +\newcommand{\prepareAname}{Alan Mazer} +\newcommand{\prepareArole}{\mission \, Flight Software Lead} + +\newcommand{\prepareBname}{Ben Bornstein} +\newcommand{\prepareBrole}{\mission \, Mission System Manager} + +\newcommand{\approveAname}{Renaud Goullioud} +\newcommand{\approveArole}{\mission \, Project System Engineer} + +\newcommand{\approveBname}{Helenann Kwong-Fu} +\newcommand{\approveBrole}{\mission \, Mission Assurance Manager} + +\newcommand{\projectlibloc}{https://bravo-lib.jpl.nasa.gov/docushare/dsweb/View/Library-509} + +\newcommand{\nasajplbottom}{\begingroup +\par\vspace*{\fill} +National Aeronautics and\\ +Space Administration\\ + +% JPL logo +\begin{figure}[h] + \includegraphics[width=0.3\textwidth]{figures/jpl-logo} +\end{figure} + +4800 Oak Grove Drive\\ +Pasadena, California 91109-8099\\ +\endgroup} + +\newcommand{\cmdname}[1]{\texttt{#1}} +\newcommand{\cmdarg}[1]{\texttt{#1}} +\newcommand{\argenum}[2]{\texttt{#1: #2}} + +\newenvironment{cmdusage}[1] +{ + \subsubsection*{Usage} + \hangindent=0.7cm \cmdname{#1} \enspace +} +{ +} + +\newenvironment{argdesc} +{ + \subsubsection*{Where} + \vspace{-0.5cm} + \renewcommand{\arraystretch}{1.5} + \table[h] + \center + \tabularx{\textwidth}{lX} +} +{ + \endtabularx + \endcenter + \vspace{-1cm} + \endtable +} + +\newenvironment{argdetails} +{ + \subsubsection*{Argument Details} + \renewcommand{\arraystretch}{1.5} + \table[h] + \center + \tabularx{\textwidth}{|l|l|X|l|} + \hline + \textbf{Name} & + \textbf{Type} & + \textbf{Range or Values} & + \textbf{Units}\\ + \hline +} +{ + \endtabularx + \endcenter + \vspace{-1cm} + \endtable +} diff --git a/src/doc/dict/cmd/cmddict-02-preamble.tex b/src/doc/dict/cmd/cmddict-02-preamble.tex new file mode 100644 index 00000000..fdb58809 --- /dev/null +++ b/src/doc/dict/cmd/cmddict-02-preamble.tex @@ -0,0 +1,183 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +% Acronym list +\usepackage{acronym} + +% Bibliography +\usepackage[round]{natbib} + +% Math and other symbols +\usepackage{array} +\usepackage{amssymb} +\usepackage{amsmath} +\usepackage{commath} +\usepackage{multirow} +\usepackage{tabularx} +\usepackage{textcomp} + +% Bytefield For packet diagrams +\usepackage{bytefield} + +% Inconsolata for fixed-width fonts +\usepackage{inconsolata} + +% Use of color names in text +\usepackage[usenames,dvipsnames]{xcolor} + +% Author comments +\usepackage{xspace} +\newcommand{\todo}[1]{{\noindent\textit{\color{red} Todo: #1}}\xspace} +\newcommand{\note}[1]{{\noindent\textit{\color{blue} Note: #1}}\xspace} + +% Adjust page layout +\usepackage{layout} +\usepackage[margin=1.2in]{geometry} + +% Create headers and footers +\usepackage{fancyhdr} +\pagestyle{fancy} + +% Header: Document number and document type (short) +\lhead[\mission \, \jpldoc, \, \revision]{\mission \, \jpldoc, \, \revision} +\rhead[\docdate]{\docdate} + +% Footer: JPL Export Compliance, Page Number +\cfoot{\vspace{4ex} + \footnotesize{The technical data in this document is controlled + under the U.S. Export Regulations, release to foreign persons may + require an export authorization.}} +\rfoot[\thepage]{} + +% Fonts +\usepackage{alltt} +% \usepackage{cmbright} +\usepackage{mathpazo} +\usepackage[T1]{fontenc} + +% Change spacing in itemization +\usepackage{paralist} +\let\itemize\compactitem +\let\enditemize\endcompactitem +\let\enumerate\compactenum +\let\endenumerate\endcompactenum +\let\description\compactdesc +\let\enddescription\endcompactdesc +\pltopsep=\medskipamount +\plitemsep=2pt +\plparsep=1pt + +% Definition of labels in itemizations +\renewcommand{\labelitemi}{$\circ$} +\renewcommand{\labelitemii}{$\mathbf{\cdot}$} +\renewcommand{\labelitemiii}{$-$} + +% Figures +%\usepackage[font={scriptsize,sl}]{subfig} +\usepackage{epsfig} +\usepackage{graphicx} +\usepackage{caption} +\usepackage{subcaption} + +% Define placeholder figure, with variable height +% Use as: \placeholder{height} +\newcommand{\placeholder}[1]{ + \setlength{\fboxsep}{#1} + \framebox[\textwidth]{\textcolor{red}{[FIGURE PLACEHOLDER]}} +} + +% Adjust caption layout +\captionsetup{font={sl,small},labelfont={bf},margin=0pt} + +% Use sections, not chapters +\renewcommand{\thesection}{\arabic{section}} +\renewcommand{\theequation}{\arabic{equation}} +\renewcommand{\thefigure}{\arabic{figure}} +\renewcommand{\thetable}{\arabic{table}} + +% Set the number of levels of sections that get numbers: +% Setting this to 4 will number subsubsections +\setcounter{secnumdepth}{4} + +% Format table of contents (see specification of custom layout for table of +% contents below) +\usepackage[titles]{tocloft} + +% Make sections bold in table of contents +\renewcommand{\cftsecfont}{\bfseries} +\renewcommand{\cftsecpagefont}{\bfseries} + +% Modify the dot fill spacing in table of contents +\renewcommand{\cftdotsep}{0} + +% Bold dots for sections +%\renewcommand{\cftsecleader}{\bfseries\cftdotfill{\cftsecdotsep}} + +% Alternatively, no dots for sections in table of contents +\renewcommand{\cftsecdotsep}{\cftnodots} + +% No dots for subsections in table of contents +%\renewcommand{\cftsubsecdotsep}{\cftnodots} + +% Larger spacing between sections in table of contents +\setlength{\cftbeforesecskip}{2ex} + +% Set depth of table of contents +% Setting this to 3 will include subsubsections +\setcounter{tocdepth}{3} + +\usepackage{underscore} + +% Formatting of hyperlinks +% This must be defined as the last package to avoid conflicts with other +% packages! +\usepackage{hyperref} +\usepackage{url} +\hypersetup{ + colorlinks, + citecolor=RoyalBlue, + filecolor=RoyalBlue, + linkcolor=RoyalBlue, + urlcolor=RoyalBlue, +} + +% Set autoref to print 'section' and not 'subsection' +% This must be defined after all packages are called! +\let\subsectionautorefname\sectionautorefname +\let\subsubsectionautorefname\sectionautorefname + +%======================================================================= + +% Custom table of contents +% Condense table of contents and figure/table lists onto one page + +\makeatletter + +% Redefine table of contents +\renewcommand{\contentsname}{Table of Contents} +\renewcommand{\tableofcontents}{% + \null\hfill\textbf{\Large\contentsname}\hfill + \vspace{2ex} + \@mkboth{\MakeUppercase\contentsname}{\MakeUppercase\contentsname} + \@starttoc{toc}\vspace{12ex} +} + +% Redefine list of figures +% FIXME: Title not centered +\renewcommand{\listfigurename}{ + \hfill\textbf{\Large{List of Figures}}\hfill +} + +% Redefine list of tables +% FIXME: Title not centered +\renewcommand{\listtablename}{ + \hfill\textbf{\Large{List of Tables}}\hfill +} + +\makeatother diff --git a/src/doc/dict/cmd/cmddict-03-title.tex b/src/doc/dict/cmd/cmddict-03-title.tex new file mode 100644 index 00000000..3dd538f8 --- /dev/null +++ b/src/doc/dict/cmd/cmddict-03-title.tex @@ -0,0 +1,41 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\thispagestyle{empty} + +\raggedright + +% JPL document number +{\Large\bfseries JPL \jpldoc}\\[\baselineskip] + +% Full Mission Name +{\LARGE\bfseries \missionfull $\,$ (\mission)}\\[2\baselineskip] + +% Document Title +{\LARGE\bfseries \mission $\,$ \docname}\\[2\baselineskip] + +% Revision and Date +\revision\\ +\docdate\\[3\baselineskip] + +% Author +\prepareAname\\ +\prepareArole\\[2\baselineskip] + +\prepareBname\\ +\prepareBrole\\[6\baselineskip] + +Paper copies of this document may not be current and should not be +relied on for official purposes. The current version is in the +\mission \, Project Library:\\ +\href{\projectlibloc}{\projectlibloc}\\[1\baselineskip] +\textit{NOTE: Access is limited to \mission \, project personnel.}\\[2\baselineskip] + +\nasajplbottom +\hrulefill diff --git a/src/doc/dict/cmd/cmddict-04-signatures.tex b/src/doc/dict/cmd/cmddict-04-signatures.tex new file mode 100644 index 00000000..0e673144 --- /dev/null +++ b/src/doc/dict/cmd/cmddict-04-signatures.tex @@ -0,0 +1,40 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +% \thispagestyle{empty} + +\section*{ + \vspace{-5ex}\center{\mission \, \docname} +} + +\begin{table}[htp] + \begin{center} + \begin{tabular}{p{8.5cm} p{2cm} p{4cm}} + % + \textbf{Prepared by:} & & \\[5ex] + \hrule & & \hrule \\[-2ex] + \prepareAname & & Date \\ + \prepareArole & & \\[5ex] + % + \hrule & & \hrule \\[-2ex] + \prepareBname & & Date \\ + \prepareBrole & & \\[5ex] + \textbf{Approved by:} & & \\[5ex] + \hrule & & \hrule \\[-2ex] + \approveAname & & Date \\ + \approveArole & & \\[5ex] + % + \hrule & & \hrule \\[-2ex] + \approveBname & & Date \\ + \approveBrole & & \\[5ex] + \end{tabular} + \end{center} +\end{table} + +\nasajplbottom diff --git a/src/doc/dict/cmd/cmddict-05-changelog.tex b/src/doc/dict/cmd/cmddict-05-changelog.tex new file mode 100644 index 00000000..412ad58e --- /dev/null +++ b/src/doc/dict/cmd/cmddict-05-changelog.tex @@ -0,0 +1,23 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\section*{\center{Document Change Log}} + +\renewcommand{\arraystretch}{1.5} +\begin{table}[htp] + \begin{center} + \begin{tabular}{|p{2cm}|p{2.5cm}|p{6cm}|p{4cm}|} + \hline + \textbf{Revision} & \textbf{Date} & \textbf{Sections Changed} & \textbf{Author(s)} \\ + \hline\hline + Initial & 22-Feb-2016 & All & A. Mazer, B. Bornstein \\ + \hline + \end{tabular} + \end{center} +\end{table} diff --git a/src/doc/dict/cmd/cmddict-06-intro.tex b/src/doc/dict/cmd/cmddict-06-intro.tex new file mode 100644 index 00000000..e69de29b diff --git a/src/doc/dict/cmd/cmddict-08-acronyms.tex b/src/doc/dict/cmd/cmddict-08-acronyms.tex new file mode 100644 index 00000000..14256a79 --- /dev/null +++ b/src/doc/dict/cmd/cmddict-08-acronyms.tex @@ -0,0 +1,24 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\section{Appendix} +\label{app:acron} + +\subsection{List of Acronyms and Abbreviations} + +% Define longest acronym in [] to determine spacing to corresponding full names +\begin{acronym}[ECOSTRESS] + \acro{ECOSTRESS} {ECOsystem Spaceborne Thermal Radiometer Experiment on + Space Station} + \acro{CMD} {Command} + \acro{CRYO} {Cryocooler} + \acro{FP} {Fault Protection} + \acro{FPIE} {Focal Plane Interface Electronics} + \acro{TEMPCTL} {Temperature Control} +\end{acronym} diff --git a/src/doc/dict/cmd/cmddict.tex b/src/doc/dict/cmd/cmddict.tex new file mode 100644 index 00000000..3a8a8762 --- /dev/null +++ b/src/doc/dict/cmd/cmddict.tex @@ -0,0 +1,43 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\documentclass[11pt, letterpaper, twoside]{report} + +\input{cmddict-01-defs} +\input{cmddict-02-preamble} + +\begin{document} + +\include{cmddict-03-title} +\include{cmddict-04-signatures} + +\pagenumbering{roman} + +\include{cmddict-05-changelog} + +\tableofcontents +\clearpage + +\pagenumbering{arabic} +\setcounter{page}{1} + +\include{cmddict-06-intro} +\include{cmddict-07-cmddefs} + +% Change section numbers to letters for the appendices +\renewcommand\thesection{\Alph{section}} +\renewcommand\thesubsection{\thesection.\arabic{subsection}} +\renewcommand\thesubsubsection{\thesection.\arabic{subsection}. +\arabic{subsubsection}} + +% Make sure first section in the appendix starts with A +\setcounter{section}{0} +\include{cmddict-08-acronyms} + +\end{document} diff --git a/src/doc/dict/cmd/figures/jpl-logo.pdf b/src/doc/dict/cmd/figures/jpl-logo.pdf new file mode 100644 index 0000000000000000000000000000000000000000..373ad5ad76f311c6f74007d69177596545e3ff46 GIT binary patch literal 8814 zcmaiaWmKC@7cEwxxNCs`#S0|CrMSDb#R{Z21b25Q6n78qQYhBqPK#@?;_k(vm%i`! z-Otut_gPtaCVS4DIkWf4uRRQEQZnoy4lWFap3Sq({o?c7v7SB*ZU7Kq_rVH7SQx;m zWNK^fYyseTf;0e}GL}$hQ^%(_6yj_uWom3^VhRuu!EkbRG=V2mdaFkhT zz^D{^yUg`&b=%_%3B#5T_y)CaS#;HDtt-*j*njsn(gaB6CZqAIdRsL6x98WJl?#!a z{`>o!-`dyI=s z8?^dCFlV^SY!tQMdZB8&x?}rhan2=);Gj!U4Bf}!_ub9xxwgSMxcAz_KHKTT(lS|J zM}NZl;V@1!#g-CjhJTfDB;nEid8XlAkG^Z@R5i`@g5hwo=2h{kAhpU8Uy+JkL7R94 zvgFm(xmb2A(QyFZ%ed6j54xo_c^00p?)H7y$8JU9%!BVN1r$*ID2i(4t)-nYz`?)o zAMkQ!oxa5Oyl(qqIJ~=vt#CZ+a5zfL$Yc_hW+c&`eRs!G$>Uk;DC>HD8a_YkBrAM^ zGjEzhSUY&Ci^G5iU&Ssr;(jYK`>SC@b5i-@dNZG#YI^5*oA=9B7V`BhhP5F--X&rv zNvdeD!3&Y18X+9F_qd%-ONLCXfad3BTicCa86UgL_;}Njkq0;Rkq=ZJ2w1?rJ8~@} zqC5xWikYNX+g zGEw*S^2XKqaW;S^p%v_R)iK-fe2PV`73IBGbU^$}puHE-<+lEygny?BmnKng>T?bx zy2ytHm10bDqAVxL!h=mlE_2B{Kr)?5q6QhQ!QdFEUXeEcg%B6tNq327uLLZ}-77bf zE`ymrz4n^A_p5qieqY`uNh{WQrm%S=kbx+@t7*g1OIj=wy%(L0wwThM4$r?I3hbMi ztDy*0U!NoM{k|g*LiUl+InVYPgMY$4Pl6qRs90fEP|eH_3A3{1PGZDVhRzu}cA!Vl zyd7Pt#d{jtNO~s6oCN-WgAW%1YF^#I<`hpNPdL8A=@CS8DV!wGp2K8lLjpn=6A|?%Xo=b zJ-k1IiN$&GoveTKwEsH8<-MR!1lwc^-3whWEH*D@eNl<`kkB}w`zVxWDRWfkjW zgxY|G$pq`r8dGxSO1d^vNXmGhUYOJ5E|Kcx=}Zn@d>^dOH?3yJkB>~qq#7chUSLpk zyzylo=~-uCn0mI-o5L4EA}HA~Dr4t)$Ev%ZK)Im2UL$p&)>f6az}G5_T7!qqSTc14 zN?r}7543N+I^#;%-(NQ=L+JP3Y zCz+YmQFN7L3Ilg=ST5_Z)|I*O1rh{137`4;SKwPi0>wY&ZAx=0E~i`M8>X5)!nW*~ zv6k7@Fg0J=l4CHjR>~N|DMBc|_tH)O`RKjW$MY#v8K3e}|RAvFcM=v8#;n@eG2M?=U#}PPFEQJS z%BczrWai@<$_RPsQa{xgnZPgh)ej69Rtc8{G1x0UW{9n#$$}r6H98A#b6Y8#LLKMD zaw^r!4o^b)`s4J<6bJB3Bv8)HV1kr@L3fSuc3KVIrmKxWKg}AaNijz416S`JXCfCCJ#}iy zofkpM=Bp7%--a298P=McViv=}lbSQk-=!aVhNitUvsW1x--a17SO2(5GBpU?^6(AU zvR#k{3RUz-4T_{pJEd;YRDZRrer8&TJ^2~m=o?Nn`Qz)yqKZb+#A)7_{6EjHZab!4 z%^f;*r$twimp~x2@gbWOCE~5r7Kr`dymOUiVDztrJx#Z^i+;?fD>kIOjM+-Ua@Ni; z|6r-Ld*rY_*YK7xO!+h6@X9(0B=4HqTVGIn747r!k|2|&IecNT)gxL0d&|>wui35a zn(1AJe5uT?j)iDL-B_+(lWS$tAY|eYsXO>fTysU}4-6#@2mg}W&xp+@6IlpmK%1uO zsa`T#Mg_N@St_QD$P84S)!Q=TrCvmoWST8Ef$xAM=;i`Sim)G3fX&|YZu94J{8`Vd ztX_CAO$FfCJ-(oEL;4mvBX%Z^QlW@TfKu5K&Ssg6b-`HKvq%Q>0nL+4udZu4dlFS< zPu{P5I}OT5a|pT4LBKFaa%d2wwe zF<&-8n-}M9`z&e_ila>gLuRFGdyzhL`3GVXD5T>KTOaas+<~lSMsdn>K6}dhzU~j6G`0>Z-CXa;O8o;Hj9! z4>QGm)B~Malbzt=l4EPF5Sf|2A2hPG!4kro1tGO&3LOS%jZ@BqAzI7J_sBm)iVVv~ zmKU+aX+1dy#7Q&XNjYRP+TTe;&;dFRHNko2M?S8CnzRE$`9-uwpJACEk1`f0tu~qS zEhI%aG5HG=i0#KmdjY=*`x>aN@}*>kHJo8HMGfE^>lPak3(tcBg}~oYzRqkTUvn4E z14m|YI_H>Z3r;Yb%ysTO36*c0G;FQ*zI>A2Q~+q1RK4OJAgOw7ZK^#0-!1x*x{iCx zYsdU9Mzle14t>Ba7qMOs<T!0Z}Ry zB};G4?=pshL-Fw)NgH!3e=(%w=Q zyS}#+VmxOZNR36ujV*3DB#^ho`Bu)SOJBX*bk1scW;T3_-nBU1>k$U79zrv$(KXE+ zy=`SjvywH_nD&Z~)XdwMmW6*Wht*34NLV*|B#F|H>y4D>P6o#-T>r8sCA4IdhSGot ztTV^%NTKULvPU&)xY*o5DKch0_UV(CGM=#-4UYXJJ@3*$MUTGwiNSD5+8dnuSxYnJf?6}PL-kC|YE*U|)C{FOFMP3B-=eoCX6sQzZ*15;7i>~7B8;(XXsvC4v zaz6@l(vcGb&hUa%s)N#;2$sl8Ww{K0Da^g><6e^T*7GsHvyA@wFr~}(r60bpLOjBL z5YqULMAqdr5vOQh{c~xO-TN^9J}-2k6#r{EjaVqL$MXNUeVqV6T*dxEIbpJ$EeG_Gh+ia@3yjujr@cobWtzS%bE zg~7HgEic?l=5P_NFO&#h5zro_5_~);6h+hnJVzPnnX@;bjvshDQm}VJK7K!lwSQvA zWnG1mws_&oJz=~Rw7B-AL+^$SPWaPUOq9`qRf3$Hh@^vLl;(Qmg7mxJJYozT!_SoH zM2eNlm6PFObs;(LpfINUK)XClHZkYd(yrnjvG&TRy}VxeV_R`jG3ueY8Uu5Z?5l3< zxQUXf&tYBq@J-40p0hz8Pv}Ip3G~8^LN=oT=nJNJ)tek^+g|Z_PA=?Qj9crKNudP? zNbcyrg97ji=K7)>X>aC%dx1@i8g7JDThCU#&f>(yGwMjo@z`>zunh*?;YRnls$=k) zz@H6cMv35Blo!!{=)zhElRdsMb%0TK@4VlEKC;L;Xcf;0Dn9um5H19g-DJpwk!>9) z3pw3KXokrDLic!AIFpZeCmAm_P9XSlu%EhCORG&irqwYwfl~r>wUybGPD*7TSOf43 zB@A3~9Hm7ksc2_Dl?p~wlVev`o2+!?*4o9yW-euS{eDG3sH@5Wsv?6C&J$X-B!aBq zJ0jkFdEVCVghSGCoz=BLB+R2|OI-s7!{Yh+MEt>qr`mO^I{>B3(wT^n!OIM+d?$Le zbPM|sWTKwkAWRQ<692a$gL=ipmy(;Hy(3a(d6@_aB*SE*j7)B__YsF7eqIAfR`G+a zD)NZMENZ3&r8E|BHFVXx3h}jQGc%W7L(~afOT@xVW1<^87yJ5j7(=0;755_f`-GBI zjU%irxITm*yVYFloK*<1P^A5Uu`N)L!6(vqjYuv44f88i?UwwGz?ZlHYgc2hQ~uS1 zAIvHwoL`zgTX`emYj+xq*Han~H;6O_2kDNzAS&ovkF{5;NA6a)?^x$(z)ufD8hze` zz+}?v?-86@u_Kym^3X$3A4D>0TETUlE=OzCQ|tQe(Vf;1+u{BIO`Km8%7L| zi-+PDHJZ;x`oGxp3aDv?!SA%pH!@}Kd#466k;-MOa~_;D#$@Dm;Q z%VTfxtuE)jqV(|6<=KTe`h@+(hS-bs;UYkWM&s78vbAZ8{G?5BM89ySUyJ#P-;o6u zXj)@Fc9ekDnL;zxHm*cq2k{$a zjt#FIceP6b-1$0i7%?rR7aaSQsL6#pI_*-mCR%QA6X>pkQO|^51XGw~ns*!oIVK5~ zQj>E)LrD5R+BsiyE%#UmyLDx_^e0nsYP!O`VILMeuBSh=sU?@~aMlPEsEG5^g6~B~ zyfw%P${aQhW@oW*4y5SL(iQJ$9`@U}qq8KfZt*P7)9kHN zwOy=mOm9j@QD`XPCW&h7UL4~fWzFg5M7@V%j?cEx=IN-3ZU)P68$Yz1g}1PniIJ13 z%r!j7XV|HN(h^bk%3Q11wT^9oy39b7Bc}7%ycW;3sKxMvQV%@78vLAF2Wrwhjb7!B zKC0iuog|3CKv&l5JN_%f=tRvqE{ewzrqQc6BGX(rg7nir6SpO#eo@1=ul&{LIflL8 zbzHfhL8uQ4EJ2xE!<7gzCe3ev@f_vebLp<^E0+D1n@6$BXvv?OHjLWG(8!is7m?mY z7~c?!deqnl)NGQ#Z+Yd4T`51P-fxN+4_CT)|$a4W(@kT7sqxk^%Nf^>BgQEW> zw92RMUG}JEK_EGCuMQ@5CPZEQ3>?8`KRoHCm}Zs4Q7sZqzPzo`0obGW2i1uM%Z)S| zIbnOJi1x5hxCKKq@#AeiDoN?0$;r$YL&JP=FV#bPA>d~efY-V*uqGz%R~+{sm4VV` zD`AP3AIrVhV0$=IBl}GDGp`z3y)gW4nukAUc0j>l|0-LpgM1b$Zm=! zA2(wqF|sWtF*;z2Og6=?cV(@tb(gZ%Av{Tshv$=7{TrM+A1S)b6a+;b0IeKR16FPQL}Cg&YK-(Mfa}r+_u3yjyGn3_UO;@qBL1er~pU(l)nX^ zTQs=)lOdvPaGgj97t&#phgnat*)SSTFrKk(3=?dT*h)KQta(dTKLZ&a5_N$h5kNjP z$MY2t6_5VUF+-%T(X^K_ywcmcby9ir-5FMapJfuBuaJh8q!)7z?&7pzpW7(5A_L)8 z7G}9)Yp;2fY9SeGu!m=H10#k-1E3^nfOOIiEJdZ%TPa@dnFAzTXa&{Wc9;Sa7ATvw zr~;gPVL?s@EUzZec7@k7U7s^AJ5MS>WBf|1L(@A@Q_Xu4Us0k z-`MPz);WSkF9z-!*$9H}d1wbH72!KkVwDb;KAxs!RlAKJIv=6sL{uMXB@aKPQ^Nu| z@-6gRXNJ~xq72*Zmxb~W2}HeD&7lPnmpi&q&?RmP%4ueUfo~-wW^#&U4k?hK9+O6P_u>sdk)AUTW8GE0<-=MT8GLI`Gsc~6Gf@;` z7vCm+T8Y*QM?FC-63JmUI96YB%}G>Oi z5U&vz!x*bzvBYhW2eh*a#;sHLghLZ~t2)+s=MjeLamYaX+4s-lmQzfSG}%E9$&O?q zJOR@(b%Bd}XwQtjwI9hjB?_caNn6PoaR-PT zY#@q?K`A9&MepBu2{Q&;WQEe3>t90gOouC2*D7ifK;8@(0~I4b!@b*$Ib0J+t=b{0Zjw z-DNb53iCDbuCFBCHL~s+6CXFN?`gXthxT6rc`6R*8y##oTIK0-g&vz zC|hJ}SyA_*Jq{sfN_b;zTdS%hS<}4Q+J&}o*J;%vjy3sE3Da<=v^!+7QcMaQeODm4 zm?0RREWJJ_NS?-5>f`IBphEj}9ABJbd-0y)ve8KT7xG)~br157xJRw;gmW-4W3>0y zo5kiU2yQ8I_3n-Qgq|09Z7Q}h4{62=L(ivQ8kgGD`1p6VrDU)Vs>Q{>(u%*Jf<*I< zRx}iYzmKKfunbag$}faPXT@gW9^_}G$4e@@w8|eu)Hv>wWg9Uv3y@F<2WN_Wx?$l{ zj=wVVjc2i2^I$~v{aoU`V-`O+#5n#?SRq@Vy_eMcVVnvrm;LBnCI!P5_4wTBBo!_g zjuy=MVe&A|11&gIZeTF!z2q;Ia4;)t z^8_~m9J(kIi22Ne7VvZ^%@0xn@rY&q|$<-AFLa-8KHWtD3y)BjSF!PkQ&e_iZQ%Q z%W8HRBW3+u0%P$RYV}RI-mwG>Icq`SwVJRZMo&}b-EpD9A^ytI`qP!3ff>VsPdJ!7 zOVrBjmqSw@CY4RpM}9QZw~Jk`Mi63E@C7s;#P0w2T5iqKMW{|s9}P-yb4=+)F_3d_ zjT3b>vW4A#OPY=4X1a-EyT3zrP}9ZiY)uIPsBMLIGW;;ETi1k6^#A|5K zN$$b%xQ>^@duN*Zw*dAA6K!~Ae_+?P+G$K}l@DP?|6&)WG|L^3GMPoK=vvmBxk(A{ zXHoRcu^YmD3eF<&C81dO(uWi>E{SfMWSRXiS~V2O$jVd-_%$*EBK#6`8dI|>sQ4qw z>%-h@3?T#Ph^$94l-{cBN@qY|RX9p`8dk>h>zQ60symJD-tVtGb6V`5?aJ-Ad(7-j z1_QIXH80gvBIeTNhMxE5{+@^mU!T7`85NqLCJZT+(e5#SCDz$PTN;kXV8|q19`Ra@ zqrsJR#Nzd1*P(z$EWdMXVMYYuTxiaW+{WrqvQ?3;ZQ(KdNIJr-wKL+q0*Z0qxj~B%AXZFt+{BP#4 zmWRD5fKv@({@1sosjV}B>u;l2!_>*n#nIT*3Bda|AZcgo{50{_ZO$ W=cg|Ci+g@<5I+V3gS4s)#{U5@1rmt> literal 0 HcmV?d00001 diff --git a/src/doc/dict/tlm/Makefile b/src/doc/dict/tlm/Makefile new file mode 100644 index 00000000..75797e52 --- /dev/null +++ b/src/doc/dict/tlm/Makefile @@ -0,0 +1,39 @@ +PDFLATEX=/usr/local/texlive/2015/bin/x86_64-darwin/pdflatex + +LOWER_MISSION = $(shell echo $(BLISS_MISSION) | tr A-Z a-z) +CMDDICT = ../../../config/$(LOWER_MISSION)/cmd.yaml + +SOURCES = \ + tlmdict.tex \ + tlmdict-01-defs.tex \ + tlmdict-02-preamble.tex \ + tlmdict-03-title.tex \ + tlmdict-04-signatures.tex \ + tlmdict-05-changelog.tex \ + tlmdict-06-intro.tex \ + tlmdict-07-tlmdefs.tex \ + tlmdict-08-acronyms.tex \ + +YYYYMMDD=$(shell date "+%Y-%m-%d") +TEMPNAME=tlmdict.pdf +FULLNAME=$(BLISS_MISSION)_TLM_Dictionary_$(YYYYMMDD).pdf + +.PHONY: all copy + +all: $(TEMPNAME) copy + +copy: $(TEMPNAME) + /bin/cp $(TEMPNAME) $(FULLNAME) + +tlmdict-07-tlmdefs.tex: ./bliss-tlmdefs-tex $(TLMDICT) + ./bliss-tlmdefs-tex + +$(TEMPNAME): $(SOURCES) + $(PDFLATEX) $< + $(PDFLATEX) $< + +clean: + /bin/rm -f *~ *.aux *.log *.out *.toc + +distclean: clean + /bin/rm -f *.pdf diff --git a/src/doc/dict/tlm/bliss-tlmdefs-tex b/src/doc/dict/tlm/bliss-tlmdefs-tex new file mode 100755 index 00000000..08d36155 --- /dev/null +++ b/src/doc/dict/tlm/bliss-tlmdefs-tex @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +""" +BLISS Telemetry Defintions to TeX + +Outputs BLISS Telemetry Definitions in TeX format. +""" + +""" +Authors: Ben Bornstein + +Copyright 2016 California Institute of Technology. ALL RIGHTS RESERVED. +U.S. Government Sponsorship acknowledged. +""" + + +import bliss + + +filename = 'tlmdict-07-tlmdefs.tex' +output = open(filename, 'wt') +d = bliss.tlm.getDefaultDict() + +n = 0 +output.write('\\begin{tlmdetails}\n') + +for defn in d['CCSDS_Packet'].flddefns[18:]: + if n == 28: + output.write('\\end{tlmdetails}\n') + output.write('\\begin{tlmdetails}\n') + n = 0 + + if type(defn.bytes) is list: + start = defn.bytes[0] + else: + start = defn.bytes + values = (start, defn.name, defn.desc, defn.type.name) + output.write('%d & \\tlmarg{%s} & %s & %s\\\\\n' % values) + output.write(' \\hline\n') + + n = n + 1 + +output.write('\\end{tlmdetails}\n') +output.write('\n') +output.close() diff --git a/src/doc/dict/tlm/figures/jpl-logo.pdf b/src/doc/dict/tlm/figures/jpl-logo.pdf new file mode 100644 index 0000000000000000000000000000000000000000..373ad5ad76f311c6f74007d69177596545e3ff46 GIT binary patch literal 8814 zcmaiaWmKC@7cEwxxNCs`#S0|CrMSDb#R{Z21b25Q6n78qQYhBqPK#@?;_k(vm%i`! z-Otut_gPtaCVS4DIkWf4uRRQEQZnoy4lWFap3Sq({o?c7v7SB*ZU7Kq_rVH7SQx;m zWNK^fYyseTf;0e}GL}$hQ^%(_6yj_uWom3^VhRuu!EkbRG=V2mdaFkhT zz^D{^yUg`&b=%_%3B#5T_y)CaS#;HDtt-*j*njsn(gaB6CZqAIdRsL6x98WJl?#!a z{`>o!-`dyI=s z8?^dCFlV^SY!tQMdZB8&x?}rhan2=);Gj!U4Bf}!_ub9xxwgSMxcAz_KHKTT(lS|J zM}NZl;V@1!#g-CjhJTfDB;nEid8XlAkG^Z@R5i`@g5hwo=2h{kAhpU8Uy+JkL7R94 zvgFm(xmb2A(QyFZ%ed6j54xo_c^00p?)H7y$8JU9%!BVN1r$*ID2i(4t)-nYz`?)o zAMkQ!oxa5Oyl(qqIJ~=vt#CZ+a5zfL$Yc_hW+c&`eRs!G$>Uk;DC>HD8a_YkBrAM^ zGjEzhSUY&Ci^G5iU&Ssr;(jYK`>SC@b5i-@dNZG#YI^5*oA=9B7V`BhhP5F--X&rv zNvdeD!3&Y18X+9F_qd%-ONLCXfad3BTicCa86UgL_;}Njkq0;Rkq=ZJ2w1?rJ8~@} zqC5xWikYNX+g zGEw*S^2XKqaW;S^p%v_R)iK-fe2PV`73IBGbU^$}puHE-<+lEygny?BmnKng>T?bx zy2ytHm10bDqAVxL!h=mlE_2B{Kr)?5q6QhQ!QdFEUXeEcg%B6tNq327uLLZ}-77bf zE`ymrz4n^A_p5qieqY`uNh{WQrm%S=kbx+@t7*g1OIj=wy%(L0wwThM4$r?I3hbMi ztDy*0U!NoM{k|g*LiUl+InVYPgMY$4Pl6qRs90fEP|eH_3A3{1PGZDVhRzu}cA!Vl zyd7Pt#d{jtNO~s6oCN-WgAW%1YF^#I<`hpNPdL8A=@CS8DV!wGp2K8lLjpn=6A|?%Xo=b zJ-k1IiN$&GoveTKwEsH8<-MR!1lwc^-3whWEH*D@eNl<`kkB}w`zVxWDRWfkjW zgxY|G$pq`r8dGxSO1d^vNXmGhUYOJ5E|Kcx=}Zn@d>^dOH?3yJkB>~qq#7chUSLpk zyzylo=~-uCn0mI-o5L4EA}HA~Dr4t)$Ev%ZK)Im2UL$p&)>f6az}G5_T7!qqSTc14 zN?r}7543N+I^#;%-(NQ=L+JP3Y zCz+YmQFN7L3Ilg=ST5_Z)|I*O1rh{137`4;SKwPi0>wY&ZAx=0E~i`M8>X5)!nW*~ zv6k7@Fg0J=l4CHjR>~N|DMBc|_tH)O`RKjW$MY#v8K3e}|RAvFcM=v8#;n@eG2M?=U#}PPFEQJS z%BczrWai@<$_RPsQa{xgnZPgh)ej69Rtc8{G1x0UW{9n#$$}r6H98A#b6Y8#LLKMD zaw^r!4o^b)`s4J<6bJB3Bv8)HV1kr@L3fSuc3KVIrmKxWKg}AaNijz416S`JXCfCCJ#}iy zofkpM=Bp7%--a298P=McViv=}lbSQk-=!aVhNitUvsW1x--a17SO2(5GBpU?^6(AU zvR#k{3RUz-4T_{pJEd;YRDZRrer8&TJ^2~m=o?Nn`Qz)yqKZb+#A)7_{6EjHZab!4 z%^f;*r$twimp~x2@gbWOCE~5r7Kr`dymOUiVDztrJx#Z^i+;?fD>kIOjM+-Ua@Ni; z|6r-Ld*rY_*YK7xO!+h6@X9(0B=4HqTVGIn747r!k|2|&IecNT)gxL0d&|>wui35a zn(1AJe5uT?j)iDL-B_+(lWS$tAY|eYsXO>fTysU}4-6#@2mg}W&xp+@6IlpmK%1uO zsa`T#Mg_N@St_QD$P84S)!Q=TrCvmoWST8Ef$xAM=;i`Sim)G3fX&|YZu94J{8`Vd ztX_CAO$FfCJ-(oEL;4mvBX%Z^QlW@TfKu5K&Ssg6b-`HKvq%Q>0nL+4udZu4dlFS< zPu{P5I}OT5a|pT4LBKFaa%d2wwe zF<&-8n-}M9`z&e_ila>gLuRFGdyzhL`3GVXD5T>KTOaas+<~lSMsdn>K6}dhzU~j6G`0>Z-CXa;O8o;Hj9! z4>QGm)B~Malbzt=l4EPF5Sf|2A2hPG!4kro1tGO&3LOS%jZ@BqAzI7J_sBm)iVVv~ zmKU+aX+1dy#7Q&XNjYRP+TTe;&;dFRHNko2M?S8CnzRE$`9-uwpJACEk1`f0tu~qS zEhI%aG5HG=i0#KmdjY=*`x>aN@}*>kHJo8HMGfE^>lPak3(tcBg}~oYzRqkTUvn4E z14m|YI_H>Z3r;Yb%ysTO36*c0G;FQ*zI>A2Q~+q1RK4OJAgOw7ZK^#0-!1x*x{iCx zYsdU9Mzle14t>Ba7qMOs<T!0Z}Ry zB};G4?=pshL-Fw)NgH!3e=(%w=Q zyS}#+VmxOZNR36ujV*3DB#^ho`Bu)SOJBX*bk1scW;T3_-nBU1>k$U79zrv$(KXE+ zy=`SjvywH_nD&Z~)XdwMmW6*Wht*34NLV*|B#F|H>y4D>P6o#-T>r8sCA4IdhSGot ztTV^%NTKULvPU&)xY*o5DKch0_UV(CGM=#-4UYXJJ@3*$MUTGwiNSD5+8dnuSxYnJf?6}PL-kC|YE*U|)C{FOFMP3B-=eoCX6sQzZ*15;7i>~7B8;(XXsvC4v zaz6@l(vcGb&hUa%s)N#;2$sl8Ww{K0Da^g><6e^T*7GsHvyA@wFr~}(r60bpLOjBL z5YqULMAqdr5vOQh{c~xO-TN^9J}-2k6#r{EjaVqL$MXNUeVqV6T*dxEIbpJ$EeG_Gh+ia@3yjujr@cobWtzS%bE zg~7HgEic?l=5P_NFO&#h5zro_5_~);6h+hnJVzPnnX@;bjvshDQm}VJK7K!lwSQvA zWnG1mws_&oJz=~Rw7B-AL+^$SPWaPUOq9`qRf3$Hh@^vLl;(Qmg7mxJJYozT!_SoH zM2eNlm6PFObs;(LpfINUK)XClHZkYd(yrnjvG&TRy}VxeV_R`jG3ueY8Uu5Z?5l3< zxQUXf&tYBq@J-40p0hz8Pv}Ip3G~8^LN=oT=nJNJ)tek^+g|Z_PA=?Qj9crKNudP? zNbcyrg97ji=K7)>X>aC%dx1@i8g7JDThCU#&f>(yGwMjo@z`>zunh*?;YRnls$=k) zz@H6cMv35Blo!!{=)zhElRdsMb%0TK@4VlEKC;L;Xcf;0Dn9um5H19g-DJpwk!>9) z3pw3KXokrDLic!AIFpZeCmAm_P9XSlu%EhCORG&irqwYwfl~r>wUybGPD*7TSOf43 zB@A3~9Hm7ksc2_Dl?p~wlVev`o2+!?*4o9yW-euS{eDG3sH@5Wsv?6C&J$X-B!aBq zJ0jkFdEVCVghSGCoz=BLB+R2|OI-s7!{Yh+MEt>qr`mO^I{>B3(wT^n!OIM+d?$Le zbPM|sWTKwkAWRQ<692a$gL=ipmy(;Hy(3a(d6@_aB*SE*j7)B__YsF7eqIAfR`G+a zD)NZMENZ3&r8E|BHFVXx3h}jQGc%W7L(~afOT@xVW1<^87yJ5j7(=0;755_f`-GBI zjU%irxITm*yVYFloK*<1P^A5Uu`N)L!6(vqjYuv44f88i?UwwGz?ZlHYgc2hQ~uS1 zAIvHwoL`zgTX`emYj+xq*Han~H;6O_2kDNzAS&ovkF{5;NA6a)?^x$(z)ufD8hze` zz+}?v?-86@u_Kym^3X$3A4D>0TETUlE=OzCQ|tQe(Vf;1+u{BIO`Km8%7L| zi-+PDHJZ;x`oGxp3aDv?!SA%pH!@}Kd#466k;-MOa~_;D#$@Dm;Q z%VTfxtuE)jqV(|6<=KTe`h@+(hS-bs;UYkWM&s78vbAZ8{G?5BM89ySUyJ#P-;o6u zXj)@Fc9ekDnL;zxHm*cq2k{$a zjt#FIceP6b-1$0i7%?rR7aaSQsL6#pI_*-mCR%QA6X>pkQO|^51XGw~ns*!oIVK5~ zQj>E)LrD5R+BsiyE%#UmyLDx_^e0nsYP!O`VILMeuBSh=sU?@~aMlPEsEG5^g6~B~ zyfw%P${aQhW@oW*4y5SL(iQJ$9`@U}qq8KfZt*P7)9kHN zwOy=mOm9j@QD`XPCW&h7UL4~fWzFg5M7@V%j?cEx=IN-3ZU)P68$Yz1g}1PniIJ13 z%r!j7XV|HN(h^bk%3Q11wT^9oy39b7Bc}7%ycW;3sKxMvQV%@78vLAF2Wrwhjb7!B zKC0iuog|3CKv&l5JN_%f=tRvqE{ewzrqQc6BGX(rg7nir6SpO#eo@1=ul&{LIflL8 zbzHfhL8uQ4EJ2xE!<7gzCe3ev@f_vebLp<^E0+D1n@6$BXvv?OHjLWG(8!is7m?mY z7~c?!deqnl)NGQ#Z+Yd4T`51P-fxN+4_CT)|$a4W(@kT7sqxk^%Nf^>BgQEW> zw92RMUG}JEK_EGCuMQ@5CPZEQ3>?8`KRoHCm}Zs4Q7sZqzPzo`0obGW2i1uM%Z)S| zIbnOJi1x5hxCKKq@#AeiDoN?0$;r$YL&JP=FV#bPA>d~efY-V*uqGz%R~+{sm4VV` zD`AP3AIrVhV0$=IBl}GDGp`z3y)gW4nukAUc0j>l|0-LpgM1b$Zm=! zA2(wqF|sWtF*;z2Og6=?cV(@tb(gZ%Av{Tshv$=7{TrM+A1S)b6a+;b0IeKR16FPQL}Cg&YK-(Mfa}r+_u3yjyGn3_UO;@qBL1er~pU(l)nX^ zTQs=)lOdvPaGgj97t&#phgnat*)SSTFrKk(3=?dT*h)KQta(dTKLZ&a5_N$h5kNjP z$MY2t6_5VUF+-%T(X^K_ywcmcby9ir-5FMapJfuBuaJh8q!)7z?&7pzpW7(5A_L)8 z7G}9)Yp;2fY9SeGu!m=H10#k-1E3^nfOOIiEJdZ%TPa@dnFAzTXa&{Wc9;Sa7ATvw zr~;gPVL?s@EUzZec7@k7U7s^AJ5MS>WBf|1L(@A@Q_Xu4Us0k z-`MPz);WSkF9z-!*$9H}d1wbH72!KkVwDb;KAxs!RlAKJIv=6sL{uMXB@aKPQ^Nu| z@-6gRXNJ~xq72*Zmxb~W2}HeD&7lPnmpi&q&?RmP%4ueUfo~-wW^#&U4k?hK9+O6P_u>sdk)AUTW8GE0<-=MT8GLI`Gsc~6Gf@;` z7vCm+T8Y*QM?FC-63JmUI96YB%}G>Oi z5U&vz!x*bzvBYhW2eh*a#;sHLghLZ~t2)+s=MjeLamYaX+4s-lmQzfSG}%E9$&O?q zJOR@(b%Bd}XwQtjwI9hjB?_caNn6PoaR-PT zY#@q?K`A9&MepBu2{Q&;WQEe3>t90gOouC2*D7ifK;8@(0~I4b!@b*$Ib0J+t=b{0Zjw z-DNb53iCDbuCFBCHL~s+6CXFN?`gXthxT6rc`6R*8y##oTIK0-g&vz zC|hJ}SyA_*Jq{sfN_b;zTdS%hS<}4Q+J&}o*J;%vjy3sE3Da<=v^!+7QcMaQeODm4 zm?0RREWJJ_NS?-5>f`IBphEj}9ABJbd-0y)ve8KT7xG)~br157xJRw;gmW-4W3>0y zo5kiU2yQ8I_3n-Qgq|09Z7Q}h4{62=L(ivQ8kgGD`1p6VrDU)Vs>Q{>(u%*Jf<*I< zRx}iYzmKKfunbag$}faPXT@gW9^_}G$4e@@w8|eu)Hv>wWg9Uv3y@F<2WN_Wx?$l{ zj=wVVjc2i2^I$~v{aoU`V-`O+#5n#?SRq@Vy_eMcVVnvrm;LBnCI!P5_4wTBBo!_g zjuy=MVe&A|11&gIZeTF!z2q;Ia4;)t z^8_~m9J(kIi22Ne7VvZ^%@0xn@rY&q|$<-AFLa-8KHWtD3y)BjSF!PkQ&e_iZQ%Q z%W8HRBW3+u0%P$RYV}RI-mwG>Icq`SwVJRZMo&}b-EpD9A^ytI`qP!3ff>VsPdJ!7 zOVrBjmqSw@CY4RpM}9QZw~Jk`Mi63E@C7s;#P0w2T5iqKMW{|s9}P-yb4=+)F_3d_ zjT3b>vW4A#OPY=4X1a-EyT3zrP}9ZiY)uIPsBMLIGW;;ETi1k6^#A|5K zN$$b%xQ>^@duN*Zw*dAA6K!~Ae_+?P+G$K}l@DP?|6&)WG|L^3GMPoK=vvmBxk(A{ zXHoRcu^YmD3eF<&C81dO(uWi>E{SfMWSRXiS~V2O$jVd-_%$*EBK#6`8dI|>sQ4qw z>%-h@3?T#Ph^$94l-{cBN@qY|RX9p`8dk>h>zQ60symJD-tVtGb6V`5?aJ-Ad(7-j z1_QIXH80gvBIeTNhMxE5{+@^mU!T7`85NqLCJZT+(e5#SCDz$PTN;kXV8|q19`Ra@ zqrsJR#Nzd1*P(z$EWdMXVMYYuTxiaW+{WrqvQ?3;ZQ(KdNIJr-wKL+q0*Z0qxj~B%AXZFt+{BP#4 zmWRD5fKv@({@1sosjV}B>u;l2!_>*n#nIT*3Bda|AZcgo{50{_ZO$ W=cg|Ci+g@<5I+V3gS4s)#{U5@1rmt> literal 0 HcmV?d00001 diff --git a/src/doc/dict/tlm/tlmdict-01-defs.tex b/src/doc/dict/tlm/tlmdict-01-defs.tex new file mode 100644 index 00000000..c980b28a --- /dev/null +++ b/src/doc/dict/tlm/tlmdict-01-defs.tex @@ -0,0 +1,95 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\newcommand{\jpldoc}{D-94642} + +\newcommand{\mission}{ECOSTRESS} +\newcommand{\missionfull}{ECOsystem Spaceborne Thermal Radiometer Experiment +on Space Station} + +\newcommand{\docname}{Telemetry Dictionary} +\newcommand{\docdate}{\today} + +\newcommand{\revision}{Initial Release} + +\newcommand{\prepareAname}{Alan Mazer} +\newcommand{\prepareArole}{\mission \, Flight Software Lead} + +\newcommand{\prepareBname}{Ben Bornstein} +\newcommand{\prepareBrole}{\mission \, Mission System Manager} + +\newcommand{\approveAname}{Renaud Goullioud} +\newcommand{\approveArole}{\mission \, Project System Engineer} + +\newcommand{\approveBname}{Helenann Kwong-Fu} +\newcommand{\approveBrole}{\mission \, Mission Assurance Manager} + +\newcommand{\projectlibloc}{https://bravo-lib.jpl.nasa.gov/docushare/dsweb/View/Library-509} + +\newcommand{\nasajplbottom}{\begingroup +\par\vspace*{\fill} +National Aeronautics and\\ +Space Administration\\ + +% JPL logo +\begin{figure}[h] + \includegraphics[width=0.3\textwidth]{figures/jpl-logo} +\end{figure} + +4800 Oak Grove Drive\\ +Pasadena, California 91109-8099\\ +\endgroup} + +\newcommand{\tlmname}[1]{\texttt{#1}} +\newcommand{\tlmarg}[1]{\texttt{#1}} +\newcommand{\argenum}[2]{\texttt{#1: #2}} + +\newenvironment{tlmusage}[1] +{ + \subsubsection*{Usage} + \hangindent=0.7cm \tlmname{#1} \enspace +} +{ +} + +\newenvironment{argdesc} +{ + \subsubsection*{Where} + \vspace{-0.5cm} + \renewcommand{\arraystretch}{1.5} + \table[h] + \center + \tabularx{\textwidth}{lX} +} +{ + \endtabularx + \endcenter + \vspace{-1cm} + \endtable +} + +\newenvironment{tlmdetails} +{ + \renewcommand{\arraystretch}{1.5} + \table[htp] + \center + \tabularx{\textwidth}{|l|l|X|l|} + \hline + \textbf{Byte} & + \textbf{Field} & + \textbf{Description} & + \textbf{Type}\\ + \hline +} +{ + \endtabularx + \endcenter + \vspace{-1cm} + \endtable +} diff --git a/src/doc/dict/tlm/tlmdict-02-preamble.tex b/src/doc/dict/tlm/tlmdict-02-preamble.tex new file mode 100644 index 00000000..bc0e0063 --- /dev/null +++ b/src/doc/dict/tlm/tlmdict-02-preamble.tex @@ -0,0 +1,184 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +% Acronym list +\usepackage{acronym} + +% Bibliography +\usepackage[round]{natbib} + +% Math and other symbols +\usepackage{array} +\usepackage{amssymb} +\usepackage{amsmath} +\usepackage{commath} +\usepackage{multirow} +\usepackage{tabularx} +\usepackage{textcomp} + +% Bytefield For packet diagrams +\usepackage{bytefield} +\usepackage{longtable} + +% Inconsolata for fixed-width fonts +\usepackage{inconsolata} + +% Use of color names in text +\usepackage[usenames,dvipsnames]{xcolor} + +% Author comments +\usepackage{xspace} +\newcommand{\todo}[1]{{\noindent\textit{\color{red} Todo: #1}}\xspace} +\newcommand{\note}[1]{{\noindent\textit{\color{blue} Note: #1}}\xspace} + +% Adjust page layout +\usepackage{layout} +\usepackage[margin=1.2in]{geometry} + +% Create headers and footers +\usepackage{fancyhdr} +\pagestyle{fancy} + +% Header: Document number and document type (short) +\lhead[\mission \, \jpldoc, \, \revision]{\mission \, \jpldoc, \, \revision} +\rhead[\docdate]{\docdate} + +% Footer: JPL Export Compliance, Page Number +\cfoot{\vspace{4ex} + \footnotesize{The technical data in this document is controlled + under the U.S. Export Regulations, release to foreign persons may + require an export authorization.}} +\rfoot[\thepage]{} + +% Fonts +\usepackage{alltt} +% \usepackage{cmbright} +\usepackage{mathpazo} +\usepackage[T1]{fontenc} + +% Change spacing in itemization +\usepackage{paralist} +\let\itemize\compactitem +\let\enditemize\endcompactitem +\let\enumerate\compactenum +\let\endenumerate\endcompactenum +\let\description\compactdesc +\let\enddescription\endcompactdesc +\pltopsep=\medskipamount +\plitemsep=2pt +\plparsep=1pt + +% Definition of labels in itemizations +\renewcommand{\labelitemi}{$\circ$} +\renewcommand{\labelitemii}{$\mathbf{\cdot}$} +\renewcommand{\labelitemiii}{$-$} + +% Figures +%\usepackage[font={scriptsize,sl}]{subfig} +\usepackage{epsfig} +\usepackage{graphicx} +\usepackage{caption} +\usepackage{subcaption} + +% Define placeholder figure, with variable height +% Use as: \placeholder{height} +\newcommand{\placeholder}[1]{ + \setlength{\fboxsep}{#1} + \framebox[\textwidth]{\textcolor{red}{[FIGURE PLACEHOLDER]}} +} + +% Adjust caption layout +\captionsetup{font={sl,small},labelfont={bf},margin=0pt} + +% Use sections, not chapters +\renewcommand{\thesection}{\arabic{section}} +\renewcommand{\theequation}{\arabic{equation}} +\renewcommand{\thefigure}{\arabic{figure}} +\renewcommand{\thetable}{\arabic{table}} + +% Set the number of levels of sections that get numbers: +% Setting this to 4 will number subsubsections +\setcounter{secnumdepth}{4} + +% Format table of contents (see specification of custom layout for table of +% contents below) +\usepackage[titles]{tocloft} + +% Make sections bold in table of contents +\renewcommand{\cftsecfont}{\bfseries} +\renewcommand{\cftsecpagefont}{\bfseries} + +% Modify the dot fill spacing in table of contents +\renewcommand{\cftdotsep}{0} + +% Bold dots for sections +%\renewcommand{\cftsecleader}{\bfseries\cftdotfill{\cftsecdotsep}} + +% Alternatively, no dots for sections in table of contents +\renewcommand{\cftsecdotsep}{\cftnodots} + +% No dots for subsections in table of contents +%\renewcommand{\cftsubsecdotsep}{\cftnodots} + +% Larger spacing between sections in table of contents +\setlength{\cftbeforesecskip}{2ex} + +% Set depth of table of contents +% Setting this to 3 will include subsubsections +\setcounter{tocdepth}{3} + +\usepackage{underscore} + +% Formatting of hyperlinks +% This must be defined as the last package to avoid conflicts with other +% packages! +\usepackage{hyperref} +\usepackage{url} +\hypersetup{ + colorlinks, + citecolor=RoyalBlue, + filecolor=RoyalBlue, + linkcolor=RoyalBlue, + urlcolor=RoyalBlue, +} + +% Set autoref to print 'section' and not 'subsection' +% This must be defined after all packages are called! +\let\subsectionautorefname\sectionautorefname +\let\subsubsectionautorefname\sectionautorefname + +%======================================================================= + +% Custom table of contents +% Condense table of contents and figure/table lists onto one page + +\makeatletter + +% Redefine table of contents +\renewcommand{\contentsname}{Table of Contents} +\renewcommand{\tableofcontents}{% + \null\hfill\textbf{\Large\contentsname}\hfill + \vspace{2ex} + \@mkboth{\MakeUppercase\contentsname}{\MakeUppercase\contentsname} + \@starttoc{toc}\vspace{12ex} +} + +% Redefine list of figures +% FIXME: Title not centered +\renewcommand{\listfigurename}{ + \hfill\textbf{\Large{List of Figures}}\hfill +} + +% Redefine list of tables +% FIXME: Title not centered +\renewcommand{\listtablename}{ + \hfill\textbf{\Large{List of Tables}}\hfill +} + +\makeatother diff --git a/src/doc/dict/tlm/tlmdict-03-title.tex b/src/doc/dict/tlm/tlmdict-03-title.tex new file mode 100644 index 00000000..3dd538f8 --- /dev/null +++ b/src/doc/dict/tlm/tlmdict-03-title.tex @@ -0,0 +1,41 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\thispagestyle{empty} + +\raggedright + +% JPL document number +{\Large\bfseries JPL \jpldoc}\\[\baselineskip] + +% Full Mission Name +{\LARGE\bfseries \missionfull $\,$ (\mission)}\\[2\baselineskip] + +% Document Title +{\LARGE\bfseries \mission $\,$ \docname}\\[2\baselineskip] + +% Revision and Date +\revision\\ +\docdate\\[3\baselineskip] + +% Author +\prepareAname\\ +\prepareArole\\[2\baselineskip] + +\prepareBname\\ +\prepareBrole\\[6\baselineskip] + +Paper copies of this document may not be current and should not be +relied on for official purposes. The current version is in the +\mission \, Project Library:\\ +\href{\projectlibloc}{\projectlibloc}\\[1\baselineskip] +\textit{NOTE: Access is limited to \mission \, project personnel.}\\[2\baselineskip] + +\nasajplbottom +\hrulefill diff --git a/src/doc/dict/tlm/tlmdict-04-signatures.tex b/src/doc/dict/tlm/tlmdict-04-signatures.tex new file mode 100644 index 00000000..0e673144 --- /dev/null +++ b/src/doc/dict/tlm/tlmdict-04-signatures.tex @@ -0,0 +1,40 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +% \thispagestyle{empty} + +\section*{ + \vspace{-5ex}\center{\mission \, \docname} +} + +\begin{table}[htp] + \begin{center} + \begin{tabular}{p{8.5cm} p{2cm} p{4cm}} + % + \textbf{Prepared by:} & & \\[5ex] + \hrule & & \hrule \\[-2ex] + \prepareAname & & Date \\ + \prepareArole & & \\[5ex] + % + \hrule & & \hrule \\[-2ex] + \prepareBname & & Date \\ + \prepareBrole & & \\[5ex] + \textbf{Approved by:} & & \\[5ex] + \hrule & & \hrule \\[-2ex] + \approveAname & & Date \\ + \approveArole & & \\[5ex] + % + \hrule & & \hrule \\[-2ex] + \approveBname & & Date \\ + \approveBrole & & \\[5ex] + \end{tabular} + \end{center} +\end{table} + +\nasajplbottom diff --git a/src/doc/dict/tlm/tlmdict-05-changelog.tex b/src/doc/dict/tlm/tlmdict-05-changelog.tex new file mode 100644 index 00000000..85f97ce0 --- /dev/null +++ b/src/doc/dict/tlm/tlmdict-05-changelog.tex @@ -0,0 +1,23 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\section*{\center{Document Change Log}} + +\renewcommand{\arraystretch}{1.5} +\begin{table}[htp] + \begin{center} + \begin{tabular}{|p{2cm}|p{2.5cm}|p{6cm}|p{4cm}|} + \hline + \textbf{Revision} & \textbf{Date} & \textbf{Sections Changed} & \textbf{Author(s)} \\ + \hline\hline + Initial & 23-Feb-2016 & All & A. Mazer, B. Bornstein \\ + \hline + \end{tabular} + \end{center} +\end{table} diff --git a/src/doc/dict/tlm/tlmdict-06-intro.tex b/src/doc/dict/tlm/tlmdict-06-intro.tex new file mode 100644 index 00000000..7b9823e4 --- /dev/null +++ b/src/doc/dict/tlm/tlmdict-06-intro.tex @@ -0,0 +1,7 @@ +\section{ECOSTRESS 1553 H\&S Telemetry} + +The ECOSTRESS 1553 Health and Status telemetry is as follows. The ISS +required CCSDS primary and secondary headers (not shown) are +documented in the International Standard Payload Rack to International +Space Station, Software Interface Control Document Part 1 (ISS SSP +52050 Revision L). diff --git a/src/doc/dict/tlm/tlmdict-07-tlmdefs.tex b/src/doc/dict/tlm/tlmdict-07-tlmdefs.tex new file mode 100644 index 00000000..3fe0092e --- /dev/null +++ b/src/doc/dict/tlm/tlmdict-07-tlmdefs.tex @@ -0,0 +1,1069 @@ +\begin{tlmdetails} +16 & \tlmarg{SubsetID} & Subset ID word. Value for OCO-3 should be 191. + & MSB_U16\\ + \hline +18 & \tlmarg{RequestID} & Servie Request ID Word + & MSB_U16\\ + \hline +20 & \tlmarg{RequestData} & Service Request Data Word + & MSB_U16\\ + \hline +22 & \tlmarg{ECW} & Caution and Warning word + & MSB_U16\\ + \hline +24 & \tlmarg{HSCycleCount} & TBD + & MSB_U16\\ + \hline +26 & \tlmarg{PayloadMessage} & TBD + & MSB_U16\\ + \hline +28 & \tlmarg{SampleTime} & None & TIME64\\ + \hline +36 & \tlmarg{Voltage01} & None & MSB_U16\\ + \hline +38 & \tlmarg{Voltage02} & None & MSB_U16\\ + \hline +40 & \tlmarg{Voltage03} & None & MSB_U16\\ + \hline +42 & \tlmarg{Voltage04} & None & MSB_U16\\ + \hline +44 & \tlmarg{Voltage05} & None & MSB_U16\\ + \hline +46 & \tlmarg{Voltage06} & None & MSB_U16\\ + \hline +48 & \tlmarg{Voltage07} & None & MSB_U16\\ + \hline +50 & \tlmarg{Voltage08} & None & MSB_U16\\ + \hline +52 & \tlmarg{Voltage09} & None & MSB_U16\\ + \hline +54 & \tlmarg{Voltage10} & None & MSB_U16\\ + \hline +56 & \tlmarg{Voltage11} & None & MSB_U16\\ + \hline +58 & \tlmarg{Voltage12} & None & MSB_U16\\ + \hline +60 & \tlmarg{Voltage13} & None & MSB_U16\\ + \hline +62 & \tlmarg{Voltage14} & None & MSB_U16\\ + \hline +64 & \tlmarg{Voltage15} & None & MSB_U16\\ + \hline +66 & \tlmarg{Voltage16} & None & MSB_U16\\ + \hline +68 & \tlmarg{Current01} & None & MSB_U16\\ + \hline +70 & \tlmarg{Current02} & None & MSB_U16\\ + \hline +72 & \tlmarg{Current03} & None & MSB_U16\\ + \hline +74 & \tlmarg{Current04} & None & MSB_U16\\ + \hline +76 & \tlmarg{Current05} & None & MSB_U16\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +78 & \tlmarg{Current06} & None & MSB_U16\\ + \hline +80 & \tlmarg{Current07} & None & MSB_U16\\ + \hline +82 & \tlmarg{Current08} & None & MSB_U16\\ + \hline +84 & \tlmarg{Current09} & None & MSB_U16\\ + \hline +86 & \tlmarg{Current10} & None & MSB_U16\\ + \hline +88 & \tlmarg{Current11} & None & MSB_U16\\ + \hline +90 & \tlmarg{Current12} & None & MSB_U16\\ + \hline +92 & \tlmarg{Current13} & None & MSB_U16\\ + \hline +94 & \tlmarg{Current14} & None & MSB_U16\\ + \hline +96 & \tlmarg{Current15} & None & MSB_U16\\ + \hline +98 & \tlmarg{Current16} & None & MSB_U16\\ + \hline +100 & \tlmarg{Temp01} & None & MSB_U16\\ + \hline +102 & \tlmarg{Temp02} & None & MSB_U16\\ + \hline +104 & \tlmarg{Temp03} & None & MSB_U16\\ + \hline +106 & \tlmarg{Temp04} & None & MSB_U16\\ + \hline +108 & \tlmarg{Temp05} & None & MSB_U16\\ + \hline +110 & \tlmarg{Temp06} & None & MSB_U16\\ + \hline +112 & \tlmarg{Temp07} & None & MSB_U16\\ + \hline +114 & \tlmarg{Temp08} & None & MSB_U16\\ + \hline +116 & \tlmarg{Temp09} & None & MSB_U16\\ + \hline +118 & \tlmarg{Temp10} & None & MSB_U16\\ + \hline +120 & \tlmarg{Temp11} & None & MSB_U16\\ + \hline +122 & \tlmarg{Temp12} & None & MSB_U16\\ + \hline +124 & \tlmarg{Temp13} & None & MSB_U16\\ + \hline +126 & \tlmarg{Temp14} & None & MSB_U16\\ + \hline +128 & \tlmarg{Temp15} & None & MSB_U16\\ + \hline +130 & \tlmarg{Temp16} & None & MSB_U16\\ + \hline +132 & \tlmarg{Temp17} & None & MSB_U16\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +134 & \tlmarg{Temp18} & None & MSB_U16\\ + \hline +136 & \tlmarg{Temp19} & None & MSB_U16\\ + \hline +138 & \tlmarg{Temp20} & None & MSB_U16\\ + \hline +140 & \tlmarg{Temp21} & None & MSB_U16\\ + \hline +142 & \tlmarg{Temp22} & None & MSB_U16\\ + \hline +144 & \tlmarg{Temp23} & None & MSB_U16\\ + \hline +146 & \tlmarg{Temp24} & None & MSB_U16\\ + \hline +148 & \tlmarg{Temp25} & None & MSB_U16\\ + \hline +150 & \tlmarg{Temp26} & None & MSB_U16\\ + \hline +152 & \tlmarg{Temp27} & None & MSB_U16\\ + \hline +154 & \tlmarg{Temp28} & None & MSB_U16\\ + \hline +156 & \tlmarg{Temp29} & None & MSB_U16\\ + \hline +158 & \tlmarg{Temp30} & None & MSB_U16\\ + \hline +160 & \tlmarg{Temp31} & None & MSB_U16\\ + \hline +162 & \tlmarg{Temp32} & None & MSB_U16\\ + \hline +164 & \tlmarg{Temp33} & None & MSB_U16\\ + \hline +166 & \tlmarg{Temp34} & None & MSB_U16\\ + \hline +168 & \tlmarg{Temp35} & None & MSB_U16\\ + \hline +170 & \tlmarg{Temp36} & None & MSB_U16\\ + \hline +172 & \tlmarg{Temp37} & None & MSB_U16\\ + \hline +174 & \tlmarg{Temp38} & None & MSB_U16\\ + \hline +176 & \tlmarg{Temp39} & None & MSB_U16\\ + \hline +178 & \tlmarg{Temp40} & None & MSB_U16\\ + \hline +180 & \tlmarg{Temp41} & None & MSB_U16\\ + \hline +182 & \tlmarg{Temp42} & None & MSB_U16\\ + \hline +184 & \tlmarg{Temp43} & None & MSB_U16\\ + \hline +186 & \tlmarg{Temp44} & None & MSB_U16\\ + \hline +188 & \tlmarg{Temp45} & None & MSB_U16\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +190 & \tlmarg{Temp46} & None & MSB_U16\\ + \hline +192 & \tlmarg{Temp47} & None & MSB_U16\\ + \hline +194 & \tlmarg{Temp48} & None & MSB_U16\\ + \hline +196 & \tlmarg{Temp49} & None & MSB_U16\\ + \hline +198 & \tlmarg{Temp50} & None & MSB_U16\\ + \hline +200 & \tlmarg{Temp51} & None & MSB_U16\\ + \hline +202 & \tlmarg{Temp52} & None & MSB_U16\\ + \hline +204 & \tlmarg{Temp53} & None & MSB_U16\\ + \hline +206 & \tlmarg{Temp54} & None & MSB_U16\\ + \hline +208 & \tlmarg{Temp55} & None & MSB_U16\\ + \hline +210 & \tlmarg{Temp56} & None & MSB_U16\\ + \hline +212 & \tlmarg{Temp57} & None & MSB_U16\\ + \hline +214 & \tlmarg{Temp58} & None & MSB_U16\\ + \hline +216 & \tlmarg{Temp59} & None & MSB_U16\\ + \hline +218 & \tlmarg{Temp60} & None & MSB_U16\\ + \hline +220 & \tlmarg{Temp61} & None & MSB_U16\\ + \hline +222 & \tlmarg{Temp62} & None & MSB_U16\\ + \hline +224 & \tlmarg{Temp63} & None & MSB_U16\\ + \hline +226 & \tlmarg{Temp64} & None & MSB_U16\\ + \hline +228 & \tlmarg{CmdsRcvd} & None & MSB_U16\\ + \hline +230 & \tlmarg{CmdsFailed} & None & MSB_U16\\ + \hline +232 & \tlmarg{CmdsExec} & None & MSB_U16\\ + \hline +234 & \tlmarg{CurrSeqID} & None & MSB_U16\\ + \hline +236 & \tlmarg{SeqCmdOffset} & None & MSB_U16\\ + \hline +238 & \tlmarg{CmdHist0Time} & None & TIME64\\ + \hline +246 & \tlmarg{CmdHist0Opcode} & None & CMD16\\ + \hline +248 & \tlmarg{CmdHist1Time} & None & TIME64\\ + \hline +256 & \tlmarg{CmdHist1Opcode} & None & CMD16\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +258 & \tlmarg{CmdHist2Time} & None & TIME64\\ + \hline +266 & \tlmarg{CmdHist2Opcode} & None & CMD16\\ + \hline +268 & \tlmarg{CmdHist3Time} & None & TIME64\\ + \hline +276 & \tlmarg{CmdHist3Opcode} & None & CMD16\\ + \hline +278 & \tlmarg{CmdHist4Time} & None & TIME64\\ + \hline +286 & \tlmarg{CmdHist4Opcode} & None & CMD16\\ + \hline +288 & \tlmarg{CmdHist5Time} & None & TIME64\\ + \hline +296 & \tlmarg{CmdHist5Opcode} & None & CMD16\\ + \hline +298 & \tlmarg{CmdHist6Time} & None & TIME64\\ + \hline +306 & \tlmarg{CmdHist6Opcode} & None & CMD16\\ + \hline +308 & \tlmarg{CmdHist7Time} & None & TIME64\\ + \hline +316 & \tlmarg{CmdHist7Opcode} & None & CMD16\\ + \hline +318 & \tlmarg{CmdHist8Time} & None & TIME64\\ + \hline +326 & \tlmarg{CmdHist8Opcode} & None & CMD16\\ + \hline +328 & \tlmarg{CmdHist9Time} & None & TIME64\\ + \hline +336 & \tlmarg{CmdHist9Opcode} & None & CMD16\\ + \hline +338 & \tlmarg{CmdHist10Time} & None & TIME64\\ + \hline +346 & \tlmarg{CmdHist10Opcode} & None & CMD16\\ + \hline +348 & \tlmarg{CmdHist11Time} & None & TIME64\\ + \hline +356 & \tlmarg{CmdHist11Opcode} & None & CMD16\\ + \hline +358 & \tlmarg{CmdHist12Time} & None & TIME64\\ + \hline +366 & \tlmarg{CmdHist12Opcode} & None & CMD16\\ + \hline +368 & \tlmarg{CmdHist13Time} & None & TIME64\\ + \hline +376 & \tlmarg{CmdHist13Opcode} & None & CMD16\\ + \hline +378 & \tlmarg{CmdHist14Time} & None & TIME64\\ + \hline +386 & \tlmarg{CmdHist14Opcode} & None & CMD16\\ + \hline +388 & \tlmarg{CmdHist15Time} & None & TIME64\\ + \hline +396 & \tlmarg{CmdHist15Opcode} & None & CMD16\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +398 & \tlmarg{CmdHist16Time} & None & TIME64\\ + \hline +406 & \tlmarg{CmdHist16Opcode} & None & CMD16\\ + \hline +408 & \tlmarg{CmdHist17Time} & None & TIME64\\ + \hline +416 & \tlmarg{CmdHist17Opcode} & None & CMD16\\ + \hline +418 & \tlmarg{CmdHist18Time} & None & TIME64\\ + \hline +426 & \tlmarg{CmdHist18Opcode} & None & CMD16\\ + \hline +428 & \tlmarg{CmdHist19Time} & None & TIME64\\ + \hline +436 & \tlmarg{CmdHist19Opcode} & None & CMD16\\ + \hline +438 & \tlmarg{CmdNumSequenceTableProducts} & None & MSB_U32\\ + \hline +442 & \tlmarg{CmdNumScheduleTableProducts} & None & MSB_U32\\ + \hline +446 & \tlmarg{CmdNumLowRateBadataProducts} & None & MSB_U32\\ + \hline +450 & \tlmarg{CmdNumHighRateBadataProducts} & None & MSB_U32\\ + \hline +454 & \tlmarg{MemBytesAlloc} & None & MSB_U32\\ + \hline +458 & \tlmarg{MeanCPUPct} & None & MSB_U16\\ + \hline +460 & \tlmarg{PeakCPUPct} & None & MSB_U16\\ + \hline +462 & \tlmarg{PeakCPUSlot} & None & U8\\ + \hline +463 & \tlmarg{X1553Bytes} & None & MSB_U32\\ + \hline +467 & \tlmarg{EthernetBytes} & None & MSB_U32\\ + \hline +471 & \tlmarg{StkUsage} & None & MSB_U32\\ + \hline +475 & \tlmarg{NumDumpProducts} & None & MSB_U32\\ + \hline +479 & \tlmarg{NumLogProducts} & None & MSB_U32\\ + \hline +483 & \tlmarg{Cryo1PowerUpCycles} & None & MSB_U32\\ + \hline +487 & \tlmarg{Cryo1TempAmb} & None & MSB_F32\\ + \hline +491 & \tlmarg{Cryo1VSensor} & None & MSB_F32\\ + \hline +495 & \tlmarg{Cryo1VAC} & None & MSB_F32\\ + \hline +499 & \tlmarg{Cryo1VDC} & None & MSB_F32\\ + \hline +503 & \tlmarg{Cryo1CurrentDC} & None & MSB_F32\\ + \hline +507 & \tlmarg{Cryo1SetPoint} & None & MSB_F32\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +511 & \tlmarg{Cryo1PGain} & None & MSB_F32\\ + \hline +515 & \tlmarg{Cryo1IGain} & None & MSB_F32\\ + \hline +519 & \tlmarg{Cryo1ReadyWindow} & None & MSB_U32\\ + \hline +523 & \tlmarg{Cryo1RemoteReady} & None & MSB_U32\\ + \hline +527 & \tlmarg{Cryo1OutputVNomTemp} & None & MSB_F32\\ + \hline +531 & \tlmarg{Cryo1FreqOfACOutput} & None & MSB_F32\\ + \hline +535 & \tlmarg{Cryo1GainLowAmb} & None & MSB_F32\\ + \hline +539 & \tlmarg{Cryo1GainHighAmb} & None & MSB_F32\\ + \hline +543 & \tlmarg{Cryo1TempNominal} & None & MSB_F32\\ + \hline +547 & \tlmarg{Cryo1SlowStartFact} & None & MSB_F32\\ + \hline +551 & \tlmarg{Cryo1SlowStartT1} & None & MSB_F32\\ + \hline +555 & \tlmarg{Cryo1SlowStartT2} & None & MSB_F32\\ + \hline +559 & \tlmarg{Cryo1SlowStartT3} & None & MSB_F32\\ + \hline +563 & \tlmarg{Cryo1VTempSensor1} & None & MSB_U32\\ + \hline +567 & \tlmarg{Cryo1VTempSensor2} & None & MSB_U32\\ + \hline +571 & \tlmarg{Cryo1Cmd1} & None & MSB_U32\\ + \hline +575 & \tlmarg{Cryo1Cmd2} & None & MSB_U32\\ + \hline +579 & \tlmarg{Cryo1Cmd3} & None & MSB_U32\\ + \hline +583 & \tlmarg{Cryo1CmdResponse1} & None & MSB_U32\\ + \hline +587 & \tlmarg{Cryo1CmdResponse2} & None & MSB_U32\\ + \hline +591 & \tlmarg{Cryo1CmdResponse3} & None & MSB_U32\\ + \hline +595 & \tlmarg{Cryo2PowerUpCycles} & None & MSB_U32\\ + \hline +599 & \tlmarg{Cryo2TempAmb} & None & MSB_F32\\ + \hline +603 & \tlmarg{Cryo2VSensor} & None & MSB_F32\\ + \hline +607 & \tlmarg{Cryo2VAC} & None & MSB_F32\\ + \hline +611 & \tlmarg{Cryo2VDC} & None & MSB_F32\\ + \hline +615 & \tlmarg{Cryo2CurrentDC} & None & MSB_F32\\ + \hline +619 & \tlmarg{Cryo2SetPoint} & None & MSB_F32\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +623 & \tlmarg{Cryo2PGain} & None & MSB_F32\\ + \hline +627 & \tlmarg{Cryo2IGain} & None & MSB_F32\\ + \hline +631 & \tlmarg{Cryo2ReadyWindow} & None & MSB_U32\\ + \hline +635 & \tlmarg{Cryo2RemoteReady} & None & MSB_U32\\ + \hline +639 & \tlmarg{Cryo2OutputVNomTemp} & None & MSB_F32\\ + \hline +643 & \tlmarg{Cryo2FreqOfACOutput} & None & MSB_F32\\ + \hline +647 & \tlmarg{Cryo2GainLowAmb} & None & MSB_F32\\ + \hline +651 & \tlmarg{Cryo2GainHighAmb} & None & MSB_F32\\ + \hline +655 & \tlmarg{Cryo2TempNominal} & None & MSB_F32\\ + \hline +659 & \tlmarg{Cryo2SlowStartFact} & None & MSB_F32\\ + \hline +663 & \tlmarg{Cryo2SlowStartT1} & None & MSB_F32\\ + \hline +667 & \tlmarg{Cryo2SlowStartT2} & None & MSB_F32\\ + \hline +671 & \tlmarg{Cryo2SlowStartT3} & None & MSB_F32\\ + \hline +675 & \tlmarg{Cryo2VTempSensor1} & None & MSB_U32\\ + \hline +679 & \tlmarg{Cryo2VTempSensor2} & None & MSB_U32\\ + \hline +683 & \tlmarg{Cryo2Cmd1} & None & MSB_U32\\ + \hline +687 & \tlmarg{Cryo2Cmd2} & None & MSB_U32\\ + \hline +691 & \tlmarg{Cryo2Cmd3} & None & MSB_U32\\ + \hline +695 & \tlmarg{Cryo2CmdResponse1} & None & MSB_U32\\ + \hline +699 & \tlmarg{Cryo2CmdResponse2} & None & MSB_U32\\ + \hline +703 & \tlmarg{Cryo2CmdResponse3} & None & MSB_U32\\ + \hline +707 & \tlmarg{Cryo3PowerUpCycles} & None & MSB_U32\\ + \hline +711 & \tlmarg{Cryo3TempAmb} & None & MSB_F32\\ + \hline +715 & \tlmarg{Cryo3VSensor} & None & MSB_F32\\ + \hline +719 & \tlmarg{Cryo3VAC} & None & MSB_F32\\ + \hline +723 & \tlmarg{Cryo3VDC} & None & MSB_F32\\ + \hline +727 & \tlmarg{Cryo3CurrentDC} & None & MSB_F32\\ + \hline +731 & \tlmarg{Cryo3SetPoint} & None & MSB_F32\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +735 & \tlmarg{Cryo3PGain} & None & MSB_F32\\ + \hline +739 & \tlmarg{Cryo3IGain} & None & MSB_F32\\ + \hline +743 & \tlmarg{Cryo3ReadyWindow} & None & MSB_U32\\ + \hline +747 & \tlmarg{Cryo3RemoteReady} & None & MSB_U32\\ + \hline +751 & \tlmarg{Cryo3OutputVNomTemp} & None & MSB_F32\\ + \hline +755 & \tlmarg{Cryo3FreqOfACOutput} & None & MSB_F32\\ + \hline +759 & \tlmarg{Cryo3GainLowAmb} & None & MSB_F32\\ + \hline +763 & \tlmarg{Cryo3GainHighAmb} & None & MSB_F32\\ + \hline +767 & \tlmarg{Cryo3TempNominal} & None & MSB_F32\\ + \hline +771 & \tlmarg{Cryo3SlowStartFact} & None & MSB_F32\\ + \hline +775 & \tlmarg{Cryo3SlowStartT1} & None & MSB_F32\\ + \hline +779 & \tlmarg{Cryo3SlowStartT2} & None & MSB_F32\\ + \hline +783 & \tlmarg{Cryo3SlowStartT3} & None & MSB_F32\\ + \hline +787 & \tlmarg{Cryo3VTempSensor1} & None & MSB_U32\\ + \hline +791 & \tlmarg{Cryo3VTempSensor2} & None & MSB_U32\\ + \hline +795 & \tlmarg{Cryo3Cmd1} & None & MSB_U32\\ + \hline +799 & \tlmarg{Cryo3Cmd2} & None & MSB_U32\\ + \hline +803 & \tlmarg{Cryo3Cmd3} & None & MSB_U32\\ + \hline +807 & \tlmarg{Cryo3CmdResponse1} & None & MSB_U32\\ + \hline +811 & \tlmarg{Cryo3CmdResponse2} & None & MSB_U32\\ + \hline +815 & \tlmarg{Cryo3CmdResponse3} & None & MSB_U32\\ + \hline +819 & \tlmarg{EVRHist0Time} & None & TIME64\\ + \hline +827 & \tlmarg{EVRHist0Code} & None & EVR16\\ + \hline +831 & \tlmarg{EVRHist0DataLen} & None & MSB_U32\\ + \hline +835 & \tlmarg{EVRHist0Data0} & None & U8\\ + \hline +836 & \tlmarg{EVRHist0Data1} & None & U8\\ + \hline +837 & \tlmarg{EVRHist0Data2} & None & U8\\ + \hline +838 & \tlmarg{EVRHist0Data3} & None & U8\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +839 & \tlmarg{EVRHist0Data4} & None & U8\\ + \hline +840 & \tlmarg{EVRHist0Data5} & None & U8\\ + \hline +841 & \tlmarg{EVRHist0Data6} & None & U8\\ + \hline +842 & \tlmarg{EVRHist0Data7} & None & U8\\ + \hline +843 & \tlmarg{EVRHist0Data8} & None & U8\\ + \hline +844 & \tlmarg{EVRHist0Data9} & None & U8\\ + \hline +845 & \tlmarg{EVRHist1Time} & None & TIME64\\ + \hline +853 & \tlmarg{EVRHist1Code} & None & EVR16\\ + \hline +857 & \tlmarg{EVRHist1DataLen} & None & MSB_U32\\ + \hline +861 & \tlmarg{EVRHist1Data0} & None & U8\\ + \hline +862 & \tlmarg{EVRHist1Data1} & None & U8\\ + \hline +863 & \tlmarg{EVRHist1Data2} & None & U8\\ + \hline +864 & \tlmarg{EVRHist1Data3} & None & U8\\ + \hline +865 & \tlmarg{EVRHist1Data4} & None & U8\\ + \hline +866 & \tlmarg{EVRHist1Data5} & None & U8\\ + \hline +867 & \tlmarg{EVRHist1Data6} & None & U8\\ + \hline +868 & \tlmarg{EVRHist1Data7} & None & U8\\ + \hline +869 & \tlmarg{EVRHist1Data8} & None & U8\\ + \hline +870 & \tlmarg{EVRHist1Data9} & None & U8\\ + \hline +871 & \tlmarg{EVRHist2Time} & None & TIME64\\ + \hline +879 & \tlmarg{EVRHist2Code} & None & EVR16\\ + \hline +883 & \tlmarg{EVRHist2DataLen} & None & MSB_U32\\ + \hline +887 & \tlmarg{EVRHist2Data0} & None & U8\\ + \hline +888 & \tlmarg{EVRHist2Data1} & None & U8\\ + \hline +889 & \tlmarg{EVRHist2Data2} & None & U8\\ + \hline +890 & \tlmarg{EVRHist2Data3} & None & U8\\ + \hline +891 & \tlmarg{EVRHist2Data4} & None & U8\\ + \hline +892 & \tlmarg{EVRHist2Data5} & None & U8\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +893 & \tlmarg{EVRHist2Data6} & None & U8\\ + \hline +894 & \tlmarg{EVRHist2Data7} & None & U8\\ + \hline +895 & \tlmarg{EVRHist2Data8} & None & U8\\ + \hline +896 & \tlmarg{EVRHist2Data9} & None & U8\\ + \hline +897 & \tlmarg{EVRHist3Time} & None & TIME64\\ + \hline +905 & \tlmarg{EVRHist3Code} & None & EVR16\\ + \hline +909 & \tlmarg{EVRHist3DataLen} & None & MSB_U32\\ + \hline +913 & \tlmarg{EVRHist3Data0} & None & U8\\ + \hline +914 & \tlmarg{EVRHist3Data1} & None & U8\\ + \hline +915 & \tlmarg{EVRHist3Data2} & None & U8\\ + \hline +916 & \tlmarg{EVRHist3Data3} & None & U8\\ + \hline +917 & \tlmarg{EVRHist3Data4} & None & U8\\ + \hline +918 & \tlmarg{EVRHist3Data5} & None & U8\\ + \hline +919 & \tlmarg{EVRHist3Data6} & None & U8\\ + \hline +920 & \tlmarg{EVRHist3Data7} & None & U8\\ + \hline +921 & \tlmarg{EVRHist3Data8} & None & U8\\ + \hline +922 & \tlmarg{EVRHist3Data9} & None & U8\\ + \hline +923 & \tlmarg{EVRHist4Time} & None & TIME64\\ + \hline +931 & \tlmarg{EVRHist4Code} & None & EVR16\\ + \hline +935 & \tlmarg{EVRHist4DataLen} & None & MSB_U32\\ + \hline +939 & \tlmarg{EVRHist4Data0} & None & U8\\ + \hline +940 & \tlmarg{EVRHist4Data1} & None & U8\\ + \hline +941 & \tlmarg{EVRHist4Data2} & None & U8\\ + \hline +942 & \tlmarg{EVRHist4Data3} & None & U8\\ + \hline +943 & \tlmarg{EVRHist4Data4} & None & U8\\ + \hline +944 & \tlmarg{EVRHist4Data5} & None & U8\\ + \hline +945 & \tlmarg{EVRHist4Data6} & None & U8\\ + \hline +946 & \tlmarg{EVRHist4Data7} & None & U8\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +947 & \tlmarg{EVRHist4Data8} & None & U8\\ + \hline +948 & \tlmarg{EVRHist4Data9} & None & U8\\ + \hline +949 & \tlmarg{EVRHist5Time} & None & TIME64\\ + \hline +957 & \tlmarg{EVRHist5Code} & None & EVR16\\ + \hline +961 & \tlmarg{EVRHist5DataLen} & None & MSB_U32\\ + \hline +965 & \tlmarg{EVRHist5Data0} & None & U8\\ + \hline +966 & \tlmarg{EVRHist5Data1} & None & U8\\ + \hline +967 & \tlmarg{EVRHist5Data2} & None & U8\\ + \hline +968 & \tlmarg{EVRHist5Data3} & None & U8\\ + \hline +969 & \tlmarg{EVRHist5Data4} & None & U8\\ + \hline +970 & \tlmarg{EVRHist5Data5} & None & U8\\ + \hline +971 & \tlmarg{EVRHist5Data6} & None & U8\\ + \hline +972 & \tlmarg{EVRHist5Data7} & None & U8\\ + \hline +973 & \tlmarg{EVRHist5Data8} & None & U8\\ + \hline +974 & \tlmarg{EVRHist5Data9} & None & U8\\ + \hline +975 & \tlmarg{EVRHist6Time} & None & TIME64\\ + \hline +983 & \tlmarg{EVRHist6Code} & None & EVR16\\ + \hline +987 & \tlmarg{EVRHist6DataLen} & None & MSB_U32\\ + \hline +991 & \tlmarg{EVRHist6Data0} & None & U8\\ + \hline +992 & \tlmarg{EVRHist6Data1} & None & U8\\ + \hline +993 & \tlmarg{EVRHist6Data2} & None & U8\\ + \hline +994 & \tlmarg{EVRHist6Data3} & None & U8\\ + \hline +995 & \tlmarg{EVRHist6Data4} & None & U8\\ + \hline +996 & \tlmarg{EVRHist6Data5} & None & U8\\ + \hline +997 & \tlmarg{EVRHist6Data6} & None & U8\\ + \hline +998 & \tlmarg{EVRHist6Data7} & None & U8\\ + \hline +999 & \tlmarg{EVRHist6Data8} & None & U8\\ + \hline +1000 & \tlmarg{EVRHist6Data9} & None & U8\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +1001 & \tlmarg{EVRHist7Time} & None & TIME64\\ + \hline +1009 & \tlmarg{EVRHist7Code} & None & EVR16\\ + \hline +1013 & \tlmarg{EVRHist7DataLen} & None & MSB_U32\\ + \hline +1017 & \tlmarg{EVRHist7Data0} & None & U8\\ + \hline +1018 & \tlmarg{EVRHist7Data1} & None & U8\\ + \hline +1019 & \tlmarg{EVRHist7Data2} & None & U8\\ + \hline +1020 & \tlmarg{EVRHist7Data3} & None & U8\\ + \hline +1021 & \tlmarg{EVRHist7Data4} & None & U8\\ + \hline +1022 & \tlmarg{EVRHist7Data5} & None & U8\\ + \hline +1023 & \tlmarg{EVRHist7Data6} & None & U8\\ + \hline +1024 & \tlmarg{EVRHist7Data7} & None & U8\\ + \hline +1025 & \tlmarg{EVRHist7Data8} & None & U8\\ + \hline +1026 & \tlmarg{EVRHist7Data9} & None & U8\\ + \hline +1027 & \tlmarg{EVRHist8Time} & None & TIME64\\ + \hline +1035 & \tlmarg{EVRHist8Code} & None & EVR16\\ + \hline +1039 & \tlmarg{EVRHist8DataLen} & None & MSB_U32\\ + \hline +1043 & \tlmarg{EVRHist8Data0} & None & U8\\ + \hline +1044 & \tlmarg{EVRHist8Data1} & None & U8\\ + \hline +1045 & \tlmarg{EVRHist8Data2} & None & U8\\ + \hline +1046 & \tlmarg{EVRHist8Data3} & None & U8\\ + \hline +1047 & \tlmarg{EVRHist8Data4} & None & U8\\ + \hline +1048 & \tlmarg{EVRHist8Data5} & None & U8\\ + \hline +1049 & \tlmarg{EVRHist8Data6} & None & U8\\ + \hline +1050 & \tlmarg{EVRHist8Data7} & None & U8\\ + \hline +1051 & \tlmarg{EVRHist8Data8} & None & U8\\ + \hline +1052 & \tlmarg{EVRHist8Data9} & None & U8\\ + \hline +1053 & \tlmarg{EVRHist9Time} & None & TIME64\\ + \hline +1061 & \tlmarg{EVRHist9Code} & None & EVR16\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +1065 & \tlmarg{EVRHist9DataLen} & None & MSB_U32\\ + \hline +1069 & \tlmarg{EVRHist9Data0} & None & U8\\ + \hline +1070 & \tlmarg{EVRHist9Data1} & None & U8\\ + \hline +1071 & \tlmarg{EVRHist9Data2} & None & U8\\ + \hline +1072 & \tlmarg{EVRHist9Data3} & None & U8\\ + \hline +1073 & \tlmarg{EVRHist9Data4} & None & U8\\ + \hline +1074 & \tlmarg{EVRHist9Data5} & None & U8\\ + \hline +1075 & \tlmarg{EVRHist9Data6} & None & U8\\ + \hline +1076 & \tlmarg{EVRHist9Data7} & None & U8\\ + \hline +1077 & \tlmarg{EVRHist9Data8} & None & U8\\ + \hline +1078 & \tlmarg{EVRHist9Data9} & None & U8\\ + \hline +1079 & \tlmarg{X1553ErrorsFormatIn} & None & MSB_U16\\ + \hline +1081 & \tlmarg{X1553ErrorsLoopTest} & None & MSB_U16\\ + \hline +1083 & \tlmarg{X1553ErrorsDataRollover} & None & MSB_U16\\ + \hline +1085 & \tlmarg{X1553ErrorsIllegalCmd} & None & MSB_U16\\ + \hline +1087 & \tlmarg{X1553ErrorsWordCount} & None & MSB_U16\\ + \hline +1089 & \tlmarg{X1553ErrorsDataSync} & None & MSB_U16\\ + \hline +1091 & \tlmarg{X1553ErrorsInvalidWord} & None & MSB_U16\\ + \hline +1093 & \tlmarg{X1553ErrorsCmdWord} & None & MSB_U16\\ + \hline +1095 & \tlmarg{X1553ErrorsRAMParity} & None & MSB_U16\\ + \hline +1097 & \tlmarg{X1553ErrorsXmitTimeout} & None & MSB_U16\\ + \hline +1099 & \tlmarg{X1553ErrorsStkRollover} & None & MSB_U16\\ + \hline +1101 & \tlmarg{X1553ErrorsHandshake} & None & MSB_U16\\ + \hline +1103 & \tlmarg{X1553ErrorsAddrParity} & None & MSB_U16\\ + \hline +1105 & \tlmarg{X1553ErrorsFormatOut} & None & MSB_U16\\ + \hline +1107 & \tlmarg{X1553ErrorsHAndSSync} & None & MSB_U16\\ + \hline +1109 & \tlmarg{FPEnabled000MSB} & None & MSB_U32\\ + \hline +1113 & \tlmarg{FPEnabled032MSB} & None & MSB_U32\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +1117 & \tlmarg{FPEnabled064MSB} & None & MSB_U32\\ + \hline +1121 & \tlmarg{FPEnabled096MSB} & None & MSB_U32\\ + \hline +1125 & \tlmarg{FPTriggered000MSB} & None & MSB_U32\\ + \hline +1129 & \tlmarg{FPTriggered032MSB} & None & MSB_U32\\ + \hline +1133 & \tlmarg{FPTriggered064MSB} & None & MSB_U32\\ + \hline +1137 & \tlmarg{FPTriggered096MSB} & None & MSB_U32\\ + \hline +1141 & \tlmarg{FaultHist0Time} & None & TIME64\\ + \hline +1149 & \tlmarg{FaultHist0ID} & None & MSB_U16\\ + \hline +1151 & \tlmarg{FaultHist1Time} & None & TIME64\\ + \hline +1159 & \tlmarg{FaultHist1ID} & None & MSB_U16\\ + \hline +1161 & \tlmarg{FaultHist2Time} & None & TIME64\\ + \hline +1169 & \tlmarg{FaultHist2ID} & None & MSB_U16\\ + \hline +1171 & \tlmarg{FaultHist3Time} & None & TIME64\\ + \hline +1179 & \tlmarg{FaultHist3ID} & None & MSB_U16\\ + \hline +1181 & \tlmarg{FaultHist4Time} & None & TIME64\\ + \hline +1189 & \tlmarg{FaultHist4ID} & None & MSB_U16\\ + \hline +1191 & \tlmarg{FaultHist5Time} & None & TIME64\\ + \hline +1199 & \tlmarg{FaultHist5ID} & None & MSB_U16\\ + \hline +1201 & \tlmarg{FaultHist6Time} & None & TIME64\\ + \hline +1209 & \tlmarg{FaultHist6ID} & None & MSB_U16\\ + \hline +1211 & \tlmarg{FaultHist7Time} & None & TIME64\\ + \hline +1219 & \tlmarg{FaultHist7ID} & None & MSB_U16\\ + \hline +1221 & \tlmarg{FaultHist8Time} & None & TIME64\\ + \hline +1229 & \tlmarg{FaultHist8ID} & None & MSB_U16\\ + \hline +1231 & \tlmarg{FaultHist9Time} & None & TIME64\\ + \hline +1239 & \tlmarg{FaultHist9ID} & None & MSB_U16\\ + \hline +1241 & \tlmarg{FaultHist10Time} & None & TIME64\\ + \hline +1249 & \tlmarg{FaultHist10ID} & None & MSB_U16\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +1251 & \tlmarg{FaultHist11Time} & None & TIME64\\ + \hline +1259 & \tlmarg{FaultHist11ID} & None & MSB_U16\\ + \hline +1261 & \tlmarg{FaultHist12Time} & None & TIME64\\ + \hline +1269 & \tlmarg{FaultHist12ID} & None & MSB_U16\\ + \hline +1271 & \tlmarg{FaultHist13Time} & None & TIME64\\ + \hline +1279 & \tlmarg{FaultHist13ID} & None & MSB_U16\\ + \hline +1281 & \tlmarg{FaultHist14Time} & None & TIME64\\ + \hline +1289 & \tlmarg{FaultHist14ID} & None & MSB_U16\\ + \hline +1291 & \tlmarg{FaultHist15Time} & None & TIME64\\ + \hline +1299 & \tlmarg{FaultHist15ID} & None & MSB_U16\\ + \hline +1301 & \tlmarg{FaultHist16Time} & None & TIME64\\ + \hline +1309 & \tlmarg{FaultHist16ID} & None & MSB_U16\\ + \hline +1311 & \tlmarg{FaultHist17Time} & None & TIME64\\ + \hline +1319 & \tlmarg{FaultHist17ID} & None & MSB_U16\\ + \hline +1321 & \tlmarg{FaultHist18Time} & None & TIME64\\ + \hline +1329 & \tlmarg{FaultHist18ID} & None & MSB_U16\\ + \hline +1331 & \tlmarg{FaultHist19Time} & None & TIME64\\ + \hline +1339 & \tlmarg{FaultHist19ID} & None & MSB_U16\\ + \hline +1341 & \tlmarg{FPNumCRCTableProducts} & None & MSB_U32\\ + \hline +1345 & \tlmarg{FPNumFaultResponseTableProducts} & None & MSB_U32\\ + \hline +1349 & \tlmarg{FPIELastCmd0} & None & MSB_U32\\ + \hline +1353 & \tlmarg{FPIELastCmd1} & None & MSB_U32\\ + \hline +1357 & \tlmarg{FPIELastResponse0} & None & MSB_U32\\ + \hline +1361 & \tlmarg{FPIELastResponse1} & None & MSB_U32\\ + \hline +1365 & \tlmarg{MotorLastCmd0} & None & MSB_U32\\ + \hline +1369 & \tlmarg{MotorLastCmd1} & None & MSB_U32\\ + \hline +1373 & \tlmarg{MotorLastResponse0} & None & MSB_U32\\ + \hline +1377 & \tlmarg{MotorLastResponse1} & None & MSB_U32\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +1381 & \tlmarg{MotorPos1} & None & MSB_U32\\ + \hline +1385 & \tlmarg{MotorPos1Time} & None & TIME64\\ + \hline +1393 & \tlmarg{MotorPos2} & None & MSB_U32\\ + \hline +1397 & \tlmarg{MotorPos2Time} & None & TIME64\\ + \hline +1405 & \tlmarg{MotorPos3} & None & MSB_U32\\ + \hline +1409 & \tlmarg{MotorPos3Time} & None & TIME64\\ + \hline +1417 & \tlmarg{MotorPos4} & None & MSB_U32\\ + \hline +1421 & \tlmarg{MotorPos4Time} & None & TIME64\\ + \hline +1429 & \tlmarg{MotorPos5} & None & MSB_U32\\ + \hline +1433 & \tlmarg{MotorPos5Time} & None & TIME64\\ + \hline +1441 & \tlmarg{MotorMoving} & None & U8\\ + \hline +1442 & \tlmarg{MotorMode} & None & MSB_U32\\ + \hline +1446 & \tlmarg{PCECircuit1} & None & U8\\ + \hline +1446 & \tlmarg{PCECircuit2} & None & U8\\ + \hline +1446 & \tlmarg{PCECircuit3} & None & U8\\ + \hline +1446 & \tlmarg{PCECircuit4} & None & U8\\ + \hline +1446 & \tlmarg{PCECircuit5} & None & U8\\ + \hline +1446 & \tlmarg{PCECircuit6} & None & U8\\ + \hline +1446 & \tlmarg{PCECircuit7} & None & U8\\ + \hline +1446 & \tlmarg{PCECircuit8} & None & U8\\ + \hline +1447 & \tlmarg{PCECircuit9} & None & U8\\ + \hline +1447 & \tlmarg{PCECircuit10} & None & U8\\ + \hline +1447 & \tlmarg{PCECircuit11} & None & U8\\ + \hline +1447 & \tlmarg{PCECircuit12} & None & U8\\ + \hline +1447 & \tlmarg{PCECircuit13} & None & U8\\ + \hline +1447 & \tlmarg{PCECircuit14} & None & U8\\ + \hline +1447 & \tlmarg{PCECircuit15} & None & U8\\ + \hline +1447 & \tlmarg{PCECircuit16} & None & U8\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +1448 & \tlmarg{TempCtl0Mode} & None & U8\\ + \hline +1449 & \tlmarg{TempCtl0Sensor} & None & U8\\ + \hline +1450 & \tlmarg{TempCtl0Circuit} & None & U8\\ + \hline +1451 & \tlmarg{TempCtl0Target} & None & MSB_U16\\ + \hline +1453 & \tlmarg{TempCtl0Sample} & None & MSB_U16\\ + \hline +1455 & \tlmarg{TempCtl0HtrOn} & None & U8\\ + \hline +1456 & \tlmarg{TempCtl1Mode} & None & U8\\ + \hline +1457 & \tlmarg{TempCtl1Sensor} & None & U8\\ + \hline +1458 & \tlmarg{TempCtl1Circuit} & None & U8\\ + \hline +1459 & \tlmarg{TempCtl1Target} & None & MSB_U16\\ + \hline +1461 & \tlmarg{TempCtl1Sample} & None & MSB_U16\\ + \hline +1463 & \tlmarg{TempCtl1HtrOn} & None & U8\\ + \hline +1464 & \tlmarg{TempCtl2Mode} & None & U8\\ + \hline +1465 & \tlmarg{TempCtl2Sensor} & None & U8\\ + \hline +1466 & \tlmarg{TempCtl2Circuit} & None & U8\\ + \hline +1467 & \tlmarg{TempCtl2Target} & None & MSB_U16\\ + \hline +1469 & \tlmarg{TempCtl2Sample} & None & MSB_U16\\ + \hline +1471 & \tlmarg{TempCtl2HtrOn} & None & U8\\ + \hline +1472 & \tlmarg{TempCtl3Mode} & None & U8\\ + \hline +1473 & \tlmarg{TempCtl3Sensor} & None & U8\\ + \hline +1474 & \tlmarg{TempCtl3Circuit} & None & U8\\ + \hline +1475 & \tlmarg{TempCtl3Target} & None & MSB_U16\\ + \hline +1477 & \tlmarg{TempCtl3Sample} & None & MSB_U16\\ + \hline +1479 & \tlmarg{TempCtl3HtrOn} & None & U8\\ + \hline +1480 & \tlmarg{TempCtl4Mode} & None & U8\\ + \hline +1481 & \tlmarg{TempCtl4Sensor} & None & U8\\ + \hline +1482 & \tlmarg{TempCtl4Circuit} & None & U8\\ + \hline +1483 & \tlmarg{TempCtl4Target} & None & MSB_U16\\ + \hline +\end{tlmdetails} +\begin{tlmdetails} +1485 & \tlmarg{TempCtl4Sample} & None & MSB_U16\\ + \hline +1487 & \tlmarg{TempCtl4HtrOn} & None & U8\\ + \hline +1488 & \tlmarg{TempCtl5Mode} & None & U8\\ + \hline +1489 & \tlmarg{TempCtl5Sensor} & None & U8\\ + \hline +1490 & \tlmarg{TempCtl5Circuit} & None & U8\\ + \hline +1491 & \tlmarg{TempCtl5Target} & None & MSB_U16\\ + \hline +1493 & \tlmarg{TempCtl5Sample} & None & MSB_U16\\ + \hline +1495 & \tlmarg{TempCtl5HtrOn} & None & U8\\ + \hline +\end{tlmdetails} + diff --git a/src/doc/dict/tlm/tlmdict-08-acronyms.tex b/src/doc/dict/tlm/tlmdict-08-acronyms.tex new file mode 100644 index 00000000..14256a79 --- /dev/null +++ b/src/doc/dict/tlm/tlmdict-08-acronyms.tex @@ -0,0 +1,24 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\section{Appendix} +\label{app:acron} + +\subsection{List of Acronyms and Abbreviations} + +% Define longest acronym in [] to determine spacing to corresponding full names +\begin{acronym}[ECOSTRESS] + \acro{ECOSTRESS} {ECOsystem Spaceborne Thermal Radiometer Experiment on + Space Station} + \acro{CMD} {Command} + \acro{CRYO} {Cryocooler} + \acro{FP} {Fault Protection} + \acro{FPIE} {Focal Plane Interface Electronics} + \acro{TEMPCTL} {Temperature Control} +\end{acronym} diff --git a/src/doc/dict/tlm/tlmdict.tex b/src/doc/dict/tlm/tlmdict.tex new file mode 100644 index 00000000..6afe4859 --- /dev/null +++ b/src/doc/dict/tlm/tlmdict.tex @@ -0,0 +1,43 @@ +%======================================================================= +% JPL Project Document LaTeX Template +% +% Template Author: Laura Alisic Jewell (23-Nov-2015) +% +% Copyright: 2015 California Institute of Technology. United States +% Government sponsorship acknowledged. ALL RIGHTS RESERVED. +%======================================================================= + +\documentclass[11pt, letterpaper, twoside]{report} + +\input{tlmdict-01-defs} +\input{tlmdict-02-preamble} + +\begin{document} + +\include{tlmdict-03-title} +\include{tlmdict-04-signatures} + +\pagenumbering{roman} + +\include{tlmdict-05-changelog} + +\tableofcontents +\clearpage + +\pagenumbering{arabic} +\setcounter{page}{1} + +\include{tlmdict-06-intro} +\include{tlmdict-07-tlmdefs} + +% Change section numbers to letters for the appendices +\renewcommand\thesection{\Alph{section}} +\renewcommand\thesubsection{\thesection.\arabic{subsection}} +\renewcommand\thesubsubsection{\thesection.\arabic{subsection}. +\arabic{subsubsection}} + +% Make sure first section in the appendix starts with A +\setcounter{section}{0} +\include{tlmdict-08-acronyms} + +\end{document} diff --git a/test/test_cli.sh b/test/test_cli.sh new file mode 100755 index 00000000..014e969f --- /dev/null +++ b/test/test_cli.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +set -e + +test_help() { + _script=${1//_/-} + $_script --help > /dev/null +} + +test_bliss_cmd_send() { + bliss-cmd-send NO_OP > /dev/null +} + +if [ -z $BLISS_ROOT ]; then + BLISS_ROOT=../ +fi + +# loop through all scripts in the bin directory +for script in `find ${BLISS_ROOT}/bliss/core/bin -type f -name "*.py"`; do + if [[ "$script" == *__init__* ]]; then + continue + fi + + # Build the function name from the script filepath + scr_name=$(basename $script) + fx_name=${scr_name%".py"} + + if [ "$(type -t test_$fx_name)" != "function" ]; then + echo "Test for $scr_name does not exist. Checking --help flag." + echo $fx_name + test_help $fx_name + else + test_$fx_name + fi + +done + +# +# main end +#### + +