Skip to content

Commit

Permalink
Add SBOM Convert Command (#2798)
Browse files Browse the repository at this point in the history
* SBOM convert command

Signed-off-by: Charles Hu <[email protected]>

* Fix tests

Signed-off-by: Charles Hu <[email protected]>

* SBOM update

Signed-off-by: Charles Hu <[email protected]>

* Example SPDX to CycloneDX added

Signed-off-by: Charles Hu <[email protected]>

* Testing changes; README blurb

Signed-off-by: Charles Hu <[email protected]>

* Blurb update

Signed-off-by: Charles Hu <[email protected]>

* Linting

Signed-off-by: Charles Hu <[email protected]>

* Input check update

Signed-off-by: Charles Hu <[email protected]>

---------

Signed-off-by: Charles Hu <[email protected]>
Co-authored-by: Amndeep Singh Mann <[email protected]>
  • Loading branch information
charleshu-8 and Amndeep7 authored Aug 22, 2024
1 parent a6f0a50 commit f406b49
Show file tree
Hide file tree
Showing 25 changed files with 471,126 additions and 1 deletion.
30 changes: 30 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ The SAF CLI is the successor to [Heimdall Tools](https://github.com/mitre/heimda
* [AWS Config to HDF](#aws-config-to-hdf)
* [Burp Suite to HDF](#burp-suite-to-hdf)
* [CKL to POA&amp;M](#ckl-to-poam)
* [CycloneDX SBOM to HDF](#cyclonedx-sbom-to-hdf)
* [DBProtect to HDF](#dbprotect-to-hdf)
* [Fortify to HDF](#fortify-to-hdf)
* [gosec to HDF](#gosec-to-hdf)
Expand Down Expand Up @@ -568,6 +569,35 @@ convert ckl2POAM Translate DISA Checklist CKL file(s) to POA&M file
EXAMPLES
$ saf convert ckl2POAM -i checklist_file.ckl -o output-folder -d abcdefg -s 2
```

[top](#convert-other-formats-to-hdf)
#### CycloneDX SBOM to HDF

Note: Currently, only the CycloneDX SBOM, VEX, and HBOM formats are officially supported in the CycloneDX SBOM convert command (formats like SaaSBOM are NOT supported and will result in errors). To convert other non-CycloneDX SBOM formats, first convert your current SBOM data file into the CycloneDX SBOM data format with [their provided utility](https://github.com/CycloneDX/cyclonedx-cli) and then convert the CycloneDX SBOM file to OHDF with the `saf convert cyclonedx_sbom2hdf` command.

EX) To convert SPDX SBOM format to CycloneDX SBOM format using the [CycloneDX CLI](https://github.com/CycloneDX/cyclonedx-cli), you can perform the following:

```
cyclonedx-cli convert --input-file spdx-sbom.json --output-file cyclonedx-sbom.json --input-format spdxjson --output-format json
```

And then use that resulting CycloneDX SBOM file to convert to OHDF.

```
convert cyclonedx_sbom2hdf Translate a CycloneDX SBOM report into an HDF results set
USAGE
$ saf convert cyclonedx_sbom2hdf -i <cyclonedx_sbom-json> -o <hdf-scan-results-json> [-h]
FLAGS
-h, --help Show CLI help.
-i, --input=<cyclonedx_sbom-json> (required) Input CycloneDX SBOM File
-o, --output=<hdf-scan-results-json> (required) Output HDF JSON File
EXAMPLES
$ saf convert cyclonedx_sbom2hdf -i cyclonedx_sbom.json -o output-hdf-name.json
```

[top](#convert-other-formats-to-hdf)
#### DBProtect to HDF
```
Expand Down
30 changes: 30 additions & 0 deletions src/commands/convert/cyclonedx_sbom2hdf.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import {Command, Flags} from '@oclif/core'
import fs from 'fs'
import {CycloneDXSBOMResults as Mapper} from '@mitre/hdf-converters'
import {checkInput, checkSuffix} from '../../utils/global'

export default class CycloneDXSBOM2HDF extends Command {
static usage = 'convert cyclonedx_sbom2hdf -i <cyclonedx_sbom-json> -o <hdf-scan-results-json> [-h] [-w]'

static description = 'Translate a CycloneDX SBOM report into an HDF results set'

static examples = ['saf convert cyclonedx_sbom2hdf -i cyclonedx_sbom.json -o output-hdf-name.json']

static flags = {
help: Flags.help({char: 'h'}),
input: Flags.string({char: 'i', required: true, description: 'Input CycloneDX SBOM file'}),
output: Flags.string({char: 'o', required: true, description: 'Output HDF JSON file'}),
'with-raw': Flags.boolean({char: 'w', required: false, description: 'Include raw input file in HDF JSON file'}),
}

async run() {
const {flags} = await this.parse(CycloneDXSBOM2HDF)

// Check for correct input type
const data = fs.readFileSync(flags.input, 'utf8')
checkInput({data, filename: flags.input}, 'cyclonedx_sbom', 'CycloneDX SBOM output file')

const converter = new Mapper(data, flags['with-raw'])
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
}
}
9 changes: 8 additions & 1 deletion src/commands/convert/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {ASFFResults, ChecklistResults, BurpSuiteMapper, ConveyorResults, DBProtectMapper, fingerprint, FortifyMapper, JfrogXrayMapper, MsftSecureScoreMapper, NessusResults, NetsparkerMapper, NiktoMapper, PrismaMapper, SarifMapper, ScoutsuiteMapper, SnykResults, TrufflehogResults, TwistlockResults, XCCDFResultsMapper, ZapMapper} from '@mitre/hdf-converters'
import {ASFFResults, ChecklistResults, BurpSuiteMapper, ConveyorResults, CycloneDXSBOMResults, DBProtectMapper, fingerprint, FortifyMapper, JfrogXrayMapper, MsftSecureScoreMapper, NessusResults, NetsparkerMapper, NiktoMapper, PrismaMapper, SarifMapper, ScoutsuiteMapper, SnykResults, TrufflehogResults, TwistlockResults, XCCDFResultsMapper, ZapMapper} from '@mitre/hdf-converters'
import fs from 'fs'
import _ from 'lodash'
import {checkSuffix, convertFullPathToFilename} from '../../utils/global'
Expand Down Expand Up @@ -51,6 +51,7 @@ export default class Convert extends Command {
case 'nikto':
case 'prisma':
case 'sarif':
case 'cyclonedx_sbom':
case 'scoutsuite':
case 'snyk':
case 'trufflehog':
Expand Down Expand Up @@ -127,6 +128,12 @@ export default class Convert extends Command {
break
}

case 'cyclonedx_sbom': {
converter = new CycloneDXSBOMResults(fs.readFileSync(flags.input, 'utf8'))
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
break
}

case 'fortify': {
converter = new FortifyMapper(fs.readFileSync(flags.input, 'utf8'))
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
Expand Down
127 changes: 127 additions & 0 deletions test/commands/convert/cyclonedx_sbom2hdf.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
import {expect, test} from '@oclif/test'
import tmp from 'tmp'
import path from 'path'
import fs from 'fs'
import {omitHDFChangingFields} from '../utils'

describe('Test sbom', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/dropwizard-no-vulns.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`])
.it('hdf-converter output test - dropwizard no vulns', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-dropwizard-no-vulns-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/dropwizard-vex.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`])
.it('hdf-converter output test - dropwizard vex', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-dropwizard-vex-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/dropwizard-vulns.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`])
.it('hdf-converter output test - dropwizard w/ vulns', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-dropwizard-vulns-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/generated-saf-sbom.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`])
.it('hdf-converter output test - saf', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-saf-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/vex.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`])
.it('hdf-converter output test - vex', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-vex-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/spdx-to-cyclonedx.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`])
.it('hdf-converter output test - spdx converted cyclonedx', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-converted-spdx-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/syft-scan-alpine-container.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`])
.it('hdf-converter output test - syft-generated alpine container', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-syft-alpine-container-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
})

describe('Test sbom using withraw flag', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/dropwizard-no-vulns.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`, '-w'])
.it('hdf-converter withraw output test - dropwizard no vulns', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-dropwizard-no-vulns-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/dropwizard-vex.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`, '-w'])
.it('hdf-converter withraw output test - dropwizard vex', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-dropwizard-vex-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/dropwizard-vulns.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`, '-w'])
.it('hdf-converter withraw output test - dropwizard w/ vulns', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-dropwizard-vulns-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/generated-saf-sbom.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`, '-w'])
.it('hdf-converter withraw output test - saf', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-saf-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/vex.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`, '-w'])
.it('hdf-converter withraw output test - vex', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-vex-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/spdx-to-cyclonedx.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`, '-w'])
.it('hdf-converter withraw output test - spdx converted cyclonedx', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-converted-spdx-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert cyclonedx_sbom2hdf', '-i', path.resolve('./test/sample_data/cyclonedx_sbom/sample_input_report/syft-scan-alpine-container.json'), '-o', `${tmpobj.name}/cyclonedx_sbom.json`, '-w'])
.it('hdf-converter withraw output test - syft-generated alpine container', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/cyclonedx_sbom.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/cyclonedx_sbom/sbom-syft-alpine-container-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
})
Loading

0 comments on commit f406b49

Please sign in to comment.