Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
8fb223b
Initial converter
charleshu-8 Aug 17, 2023
d2d277e
Merge branch 'main' into htmlConverter
charleshu-8 Aug 17, 2023
ebcc222
Merge branch 'main' into htmlConverter
charleshu-8 Aug 23, 2023
87a520f
TW import commit
charleshu-8 Aug 24, 2023
b973771
Merge branch 'main' into htmlConverter
charleshu-8 Aug 24, 2023
0763bfa
Format correct input for html conversion
charleshu-8 Aug 24, 2023
2c789bd
Merge branch 'main' into htmlConverter
charleshu-8 Aug 25, 2023
8781bb8
Linting
charleshu-8 Aug 25, 2023
95087da
Merge branch 'main' into htmlConverter
Amndeep7 Dec 20, 2023
6941fc4
Merge branch 'main' into htmlConverter
em-c-rod Apr 1, 2024
699544d
Merge branch 'main' into htmlConverter
kemley76 Jun 7, 2024
ba03e25
fix hdf2html conversion
kemley76 Jun 10, 2024
171600f
added tests for hdf2html
kemley76 Jun 11, 2024
cebb918
update README
kemley76 Jun 11, 2024
aee3cc3
Merge branch 'main' into htmlConverter
kemley76 Jun 11, 2024
93314d9
update test after hdf2html updates
kemley76 Jul 17, 2024
3f9993d
Merge branch 'main' into htmlConverter
kemley76 Jul 26, 2024
40c3c8a
update hdf2html tests to ignore autogenerated tailwind css
kemley76 Jul 26, 2024
8f43bbc
only remove versions when running hdf2html tests
kemley76 Jul 26, 2024
da84e42
merge
Amndeep7 Apr 10, 2026
5312ea7
rework test to match the new style
Amndeep7 Apr 10, 2026
cacfa56
lint
Amndeep7 Apr 10, 2026
0b4549e
async needs to be there
Amndeep7 Apr 10, 2026
372ef8a
there's some weird performance issue going on with the mapper that i …
Amndeep7 Apr 10, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ For detailed information about development, testing , and contributing to the SA
* [HDF to Checklist](#hdf-to-checklist)
* [HDF to CSV](#hdf-to-csv)
* [HDF to Condensed JSON](#hdf-to-condensed-json)
* [HDF to HTML](#hdf-to-html)

### Convert Other Formats to HDF

Expand Down Expand Up @@ -538,6 +539,38 @@ convert hdf2condensed Condensed format used by some community members
$ saf convert hdf2condensed -i rhel7-results.json -o rhel7-condensed.json
```
[top](#convert-hdf-to-other-formats)
#### HDF to HTML
```
convert hdf2html Translate an HDF file into a Heimdall Report HTML file

USAGE
$ saf convert hdf2html -i <hdf-scan-results-json>... -o <output-html> [-t <output-type>] [-h]

FLAGS
-h, --help
Show CLI help.

-i, --input=<value>...
(required) Input HDF JSON file

-o, --output=<value>
(required) Output HTML file

-t, --type=<option>
[default: Administrator] The report type to generate
Report types differ with the information they include
Executive: Profile Info + Statuses + Compliance Level
Manager: Executive + Test Results and Details
Administrator: Manager + Test Code
<options: executive|manager|administrator>

DESCRIPTION
Translate an HDF file into a Heimdall Report HTML file

EXAMPLES
$ saf convert hdf2html -i hdf_input.json -o report.html -t manager
```
[top](#convert-hdf-to-other-formats)

---
### Convert To HDF
Expand Down
48 changes: 48 additions & 0 deletions src/commands/convert/hdf2html.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import { Command, Flags } from '@oclif/core';
import fs from 'fs';
import path from 'path';
import { FromHDFToHTMLMapper as Mapper } from '@mitre/hdf-converters';
import _ from 'lodash';

// All selectable export types for an HTML export
enum FileExportTypes {
Executive = 'Executive',
Manager = 'Manager',
Administrator = 'Administrator',
}

export default class HDF2HTML extends Command {
static usage = 'convert hdf2html -i <hdf-scan-results-json>... -o <output-html> [-t <output-type>] [-h]';

Check warning on line 15 in src/commands/convert/hdf2html.ts

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Make this public static property readonly.

See more on https://sonarcloud.io/project/issues?id=mitre_saf&issues=AZ13FcVtsDAO84CcXPJR&open=AZ13FcVtsDAO84CcXPJR&pullRequest=1689

static description = 'Translate an HDF file into a Heimdall Report HTML file';

static examples = ['saf convert hdf2html -i hdf_input.json -o report.html -t manager'];

static flags = {

Check warning on line 21 in src/commands/convert/hdf2html.ts

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Make this public static property readonly.

See more on https://sonarcloud.io/project/issues?id=mitre_saf&issues=AZ13FcVtsDAO84CcXPJU&open=AZ13FcVtsDAO84CcXPJU&pullRequest=1689
help: Flags.help({ char: 'h' }),
input: Flags.string({ char: 'i', required: true, multiple: true, description: 'Input HDF JSON file' }),
output: Flags.string({ char: 'o', required: true, description: 'Output HTML file' }),
type: Flags.string({ char: 't', default: FileExportTypes.Administrator,
description: 'The report type to generate\nReport types differ with the information they include\nExecutive: Profile Info + Statuses + Compliance Level\nManager: Executive + Test Results and Details\nAdministrator: Manager + Test Code',
options: ['executive', 'manager', 'administrator'] }),
};

async run() {
const { flags } = await this.parse(HDF2HTML);

const files = [];

let i = 0;
for (const file of flags.input) {
// Create unique fileID for html reference
const fileID = `${i++}`;

const data = fs.readFileSync(file, 'utf8');
const fileName = path.basename(file);
files.push({ data, fileName, fileID });
}

const converter = await new Mapper(files, _.startCase(flags.type) as FileExportTypes).toHTML();
fs.writeFileSync(flags.output, converter);
}
}
2 changes: 1 addition & 1 deletion src/commands/convert/splunk2hdf.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Flags } from '@oclif/core';
import { SplunkMapper } from '@mitre/hdf-converters/lib/src/splunk-mapper';
import { SplunkMapper } from '@mitre/hdf-converters';
import { table } from 'table';
import _ from 'lodash';
import fs from 'fs';
Expand Down
65 changes: 65 additions & 0 deletions test/commands/convert/hdf2html.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import { runCommand } from '@oclif/test';
import fs from 'fs';

Check warning on line 2 in test/commands/convert/hdf2html.test.ts

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Prefer `node:fs` over `fs`.

See more on https://sonarcloud.io/project/issues?id=mitre_saf&issues=AZ13H7IHzyWvgqmtUJHb&open=AZ13H7IHzyWvgqmtUJHb&pullRequest=1689
import path from 'path';

Check warning on line 3 in test/commands/convert/hdf2html.test.ts

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Prefer `node:path` over `path`.

See more on https://sonarcloud.io/project/issues?id=mitre_saf&issues=AZ13FcVjsDAO84CcXPJN&open=AZ13FcVjsDAO84CcXPJN&pullRequest=1689
import tmp from 'tmp';
import { describe, expect, it } from 'vitest';
import { omitHtmlChangingfields as omitHtmlChangingFields } from '../utils';

// describe('Test hdf2html triple_overlay_profile_example with default (administrator) report type', () => {
// const tmpobj = tmp.dirSync({ unsafeCleanup: true });
//
// it('hdf-converter output test', async () => {
// await runCommand<{ name: string }>([
// 'convert hdf2html',
// '-i', path.resolve('./test/sample_data/HDF/input/triple_overlay_profile_example.json'),
// '-o', `${tmpobj.name}/triple_overlay_profile_example.html`,
// ]);
// const converted = omitHtmlChangingFields(fs.readFileSync(`${tmpobj.name}/triple_overlay_profile_example.html`, 'utf8'));
// const sample = omitHtmlChangingFields(fs.readFileSync(path.resolve('./test/sample_data/html/triple_overlay_profile_example.html'), 'utf8'));
// expect(converted).to.eql(sample);
// });
// });

// describe('Test hdf2html with manager report type and two input files', () => {
// const tmpobj = tmp.dirSync({ unsafeCleanup: true });
//
// it('hdf-converter output test', async () => {
// await runCommand<{ name: string }>([
// 'convert hdf2html',
// '-i', path.resolve('./test/sample_data/HDF/input/red_hat_good.json'), path.resolve('./test/sample_data/HDF/input/vSphere8_report.json'),
// '-o', `${tmpobj.name}/combined_output.html`, '-t', 'manager',
// ]);
// const converted = omitHtmlChangingFields(fs.readFileSync(`${tmpobj.name}/combined_output.html`, 'utf8'));
// const sample = omitHtmlChangingFields(fs.readFileSync(path.resolve('./test/sample_data/html/combined_output.html'), 'utf8'));
// expect(converted).to.eql(sample);
// });
// });

// describe('Test hdf2html with executive report type', () => {
// const tmpobj = tmp.dirSync({ unsafeCleanup: true });
//
// it('hdf-converter output test', async () => {
// await runCommand<{ name: string }>([
// 'convert hdf2html',
// '-i', path.resolve('./test/sample_data/HDF/input/red_hat_good.json'),
// '-o', `${tmpobj.name}/red_hat_good.html`, '-t', 'executive']);
// const converted = omitHtmlChangingFields(fs.readFileSync(`${tmpobj.name}/red_hat_good.html`, 'utf8'));
// const sample = omitHtmlChangingFields(fs.readFileSync(path.resolve('./test/sample_data/html/red_hat_good.html'), 'utf8'));
// expect(converted).to.eql(sample);
// });
// });

describe('Test hdf2html sonarqube with default (administrator) report type', () => {
const tmpobj = tmp.dirSync({ unsafeCleanup: true });

it('hdf-converter output test', async () => {
await runCommand<{ name: string }>([
'convert hdf2html',
'-i', path.resolve('./test/sample_data/sonarqube/sonarqube-hdf.json'),
'-o', `${tmpobj.name}/sonarqube-hdf.html`,
]);
const converted = omitHtmlChangingFields(fs.readFileSync(`${tmpobj.name}/sonarqube-hdf.html`, 'utf8'));
const sample = omitHtmlChangingFields(fs.readFileSync(path.resolve('./test/sample_data/html/sonarqube-hdf.html'), 'utf8'));

Check failure on line 62 in test/commands/convert/hdf2html.test.ts

View workflow job for this annotation

GitHub Actions / build (macos)

test/commands/convert/hdf2html.test.ts > Test hdf2html sonarqube with default (administrator) report type > hdf-converter output test

Error: ENOENT: no such file or directory, open '/Users/runner/work/saf/saf/test/sample_data/html/sonarqube-hdf.html' ❯ test/commands/convert/hdf2html.test.ts:62:46 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { errno: -2, code: 'ENOENT', syscall: 'open', path: '/Users/runner/work/saf/saf/test/sample_data/html/sonarqube-hdf.html' }
expect(converted).to.eql(sample);
});
});
5 changes: 5 additions & 0 deletions test/commands/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,8 @@ export function omitChecklistChangingFields(input: string) {
return input.replaceAll(/[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}/gm, '')
.replace(/<!--Heimdall Version :: \S+-->/, '');
}

export function omitHtmlChangingfields(input: string) {
return input.replace(/(tailwindcss .+ \|)/, 'TAILWIND VERSION')
.replace(/TW Elements \d+\.\d+\.\d+/, 'TW ELEMENTS VERSION');
}
Loading
Loading