Skip to content

Commit

Permalink
initial commit for saf cli int grype (#2887)
Browse files Browse the repository at this point in the history
* initial commit for saf cli int grype

* lint and readme

* small QOL changes
  • Loading branch information
andytang99 committed Sep 16, 2024
1 parent 793aa41 commit c5bce30
Show file tree
Hide file tree
Showing 13 changed files with 559,954 additions and 1 deletion.
18 changes: 18 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ The SAF CLI is the successor to [Heimdall Tools](https://github.com/mitre/heimda
### Convert Other Formats to HDF

* [Convert To HDF](#convert-to-hdf)
* [Anchore Grype to HDF](#anchore-grype-to-hdf)
* [ASFF to HDF](#asff-to-hdf)
* [AWS Config to HDF](#aws-config-to-hdf)
* [Burp Suite to HDF](#burp-suite-to-hdf)
Expand Down Expand Up @@ -280,6 +281,23 @@ Want to Recommend or Help Develop a Converter? See [the wiki](https://github.com

### Convert From HDF

[top](#convert-other-formats-to-hdf)
#### Anchore Grype to HDF
```
convert anchoregrype2hdf Translate a Anchore Grype output file into an HDF results set
USAGE
$ saf convert anchoregrype2hdf -i <anchoregrype-json> -o <hdf-scan-results-json>
FLAGS
-h, --help Show CLI help.
-i, --input=<anchoregrype-json> (required) Input Anchore Grype file
-o, --output=<hdf-scan-results-json> (required) Output HDF JSON File
EXAMPLES
$ saf convert anchoregrype2hdf -i anchoregrype.json -o output-hdf-name.json
```


#### HDF to ASFF

Expand Down
27 changes: 27 additions & 0 deletions src/commands/convert/anchoregrype2hdf.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import {Command, Flags} from '@oclif/core'
import fs from 'fs'
import {AnchoreGrypeMapper as Mapper} from '@mitre/hdf-converters'
import {checkSuffix} from '../../utils/global'

export default class AnchoreGrype2HDF extends Command {
static usage = 'convert anchoregrype2hdf -i <anchoregrype-json> -o <hdf-scan-results-json>'

static description = 'Translate a Anchore Grype output file into an HDF results set'

static examples = ['saf convert anchoregrype2hdf -i anchoregrype.json -o output-hdf-name.json']

static flags = {
help: Flags.help({char: 'h'}),
input: Flags.string({char: 'i', required: true, description: 'Input Anchore Grype file'}),
output: Flags.string({char: 'o', required: true, description: 'Output HDF file'}),
'with-raw': Flags.boolean({char: 'w', required: false}),
}

async run() {
const {flags} = await this.parse(AnchoreGrype2HDF)
const input = fs.readFileSync(flags.input, 'utf8')

const converter = new Mapper(input, flags['with-raw'])
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
}
}
9 changes: 8 additions & 1 deletion src/commands/convert/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {ASFFResults, ChecklistResults, BurpSuiteMapper, ConveyorResults, CycloneDXSBOMResults, DBProtectMapper, fingerprint, FortifyMapper, JfrogXrayMapper, MsftSecureScoreMapper, NessusResults, NetsparkerMapper, NiktoMapper, PrismaMapper, SarifMapper, ScoutsuiteMapper, SnykResults, TrufflehogResults, TwistlockResults, XCCDFResultsMapper, ZapMapper} from '@mitre/hdf-converters'
import {AnchoreGrypeMapper, ASFFResults, ChecklistResults, BurpSuiteMapper, ConveyorResults, CycloneDXSBOMResults, DBProtectMapper, fingerprint, FortifyMapper, JfrogXrayMapper, MsftSecureScoreMapper, NessusResults, NetsparkerMapper, NiktoMapper, PrismaMapper, SarifMapper, ScoutsuiteMapper, SnykResults, TrufflehogResults, TwistlockResults, XCCDFResultsMapper, ZapMapper} from '@mitre/hdf-converters'
import fs from 'fs'
import _ from 'lodash'
import {checkSuffix, convertFullPathToFilename} from '../../utils/global'
Expand Down Expand Up @@ -39,6 +39,7 @@ export default class Convert extends Command {
return Zap2HDF.flags
}

case 'anchoregrype':
case 'burp':
case 'conveyor':
case 'checklist':
Expand Down Expand Up @@ -71,6 +72,12 @@ export default class Convert extends Command {
const {flags} = await this.parse(Convert)
let converter
switch (Convert.detectedType) {
case 'anchoregrype': {
converter = new AnchoreGrypeMapper(fs.readFileSync(flags.input, 'utf8'))
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
break
}

case 'asff': {
let securityhub = _.get(flags, 'securityhub') as string[]
if (securityhub) {
Expand Down
188 changes: 188 additions & 0 deletions test/commands/convert/anchoregrype2hdf.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
import { expect, test } from "@oclif/test";
import tmp from "tmp";
import path from "path";
import fs from "fs";
import { omitHDFChangingFields } from "../utils";

describe("Test anchore grype", () => {
const tmpobj = tmp.dirSync({ unsafeCleanup: true });

test
.stdout()
.command([
"convert anchoregrype2hdf",
"-i",
path.resolve(
"./test/sample_data/anchoregrype/sample_input_report/anchore_grype.json"
),
"-o",
`${tmpobj.name}/anchore-grype-hdf.json`,
])
.it("hdf-converter output test", () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/anchore-grype-hdf.json`, "utf8")
);
const sample = JSON.parse(
fs.readFileSync(
path.resolve("./test/sample_data/anchoregrype/anchore-grype-hdf.json"),
"utf8"
)
);
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample)
);
});
});

describe("Test anchore grype withraw flag", () => {
const tmpobj = tmp.dirSync({ unsafeCleanup: true });

test
.stdout()
.command([
"convert anchoregrype2hdf",
"-i",
path.resolve(
"./test/sample_data/anchoregrype/sample_input_report/anchore_grype.json"
),
"-o",
`${tmpobj.name}/anchore-grype-withraw.json`,
"-w",
])
.it("hdf-converter withraw output test", () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/anchore-grype-withraw.json`, "utf8")
);
const sample = JSON.parse(
fs.readFileSync(
path.resolve("./test/sample_data/anchoregrype/anchore-grype-withraw.json"),
"utf8"
)
);
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample)
);
});
});

describe("Test amazon anchore grype", () => {
const tmpobj = tmp.dirSync({ unsafeCleanup: true });

test
.stdout()
.command([
"convert anchoregrype2hdf",
"-i",
path.resolve(
"./test/sample_data/anchoregrype/sample_input_report/amazon.json"
),
"-o",
`${tmpobj.name}/amazon-grype-hdf.json`,
])
.it("hdf-converter output test", () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/amazon-grype-hdf.json`, "utf8")
);
const sample = JSON.parse(
fs.readFileSync(
path.resolve("./test/sample_data/anchoregrype/amazon-grype-hdf.json"),
"utf8"
)
);
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample)
);
});
});

describe("Test amazon anchore grype withraw flag", () => {
const tmpobj = tmp.dirSync({ unsafeCleanup: true });

test
.stdout()
.command([
"convert anchoregrype2hdf",
"-i",
path.resolve(
"./test/sample_data/anchoregrype/sample_input_report/amazon.json"
),
"-o",
`${tmpobj.name}/amazon-grype-withraw.json`,
"-w",
])
.it("hdf-converter withraw output test", () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/amazon-grype-withraw.json`, "utf8")
);
const sample = JSON.parse(
fs.readFileSync(
path.resolve("./test/sample_data/anchoregrype/amazon-grype-withraw.json"),
"utf8"
)
);
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample)
);
});
});

describe("Test tensorflow anchore grype", () => {
const tmpobj = tmp.dirSync({ unsafeCleanup: true });

test
.stdout()
.command([
"convert anchoregrype2hdf",
"-i",
path.resolve(
"./test/sample_data/anchoregrype/sample_input_report/tensorflow.json"
),
"-o",
`${tmpobj.name}/tensorflow-grype-hdf.json`,
])
.it("hdf-converter output test", () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/tensorflow-grype-hdf.json`, "utf8")
);
const sample = JSON.parse(
fs.readFileSync(
path.resolve("./test/sample_data/anchoregrype/tensorflow-grype-hdf.json"),
"utf8"
)
);
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample)
);
});
});

describe("Test tensorflow anchore grype withraw flag", () => {
const tmpobj = tmp.dirSync({ unsafeCleanup: true });

test
.stdout()
.command([
"convert anchoregrype2hdf",
"-i",
path.resolve(
"./test/sample_data/anchoregrype/sample_input_report/tensorflow.json"
),
"-o",
`${tmpobj.name}/tensorflow-grype-withraw.json`,
"-w",
])
.it("hdf-converter withraw output test", () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/tensorflow-grype-withraw.json`, "utf8")
);
const sample = JSON.parse(
fs.readFileSync(
path.resolve("./test/sample_data/anchoregrype/tensorflow-grype-withraw.json"),
"utf8"
)
);
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample)
);
});
});
Loading

0 comments on commit c5bce30

Please sign in to comment.