1+ // REPLACE ALL REFERENCES TO "_generic_hasher" WITH THE DESIRED ALGORITHM IN THE ALGORITHMS DIRECTORY.
2+
3+ import { dirname , join } from 'node:path' ;
4+ import { fileURLToPath } from 'node:url' ;
5+
6+ import { generateHashesForDocument , validateDocument , type CidDocument , type Config } from '../src/index' ;
7+ import { SUPPORTED_VALIDATORS } from '../src/validation/Validation' ;
8+ import { makeHasher } from './example_algorithm/_generic_hasher' ;
9+
10+ const __dirname = dirname ( fileURLToPath ( import . meta. url ) ) ;
11+
12+ /*
13+ Construct a config object instructing the algorithm HOW to process the data being passed
14+ in. This contains rules related to source schema, target schemas, validation rules, and
15+ algorithm specifications. Normally, this will be read from file using loadConfig()
16+
17+ see ../docs/configuration-files.md for more detail on the relevant config fields.
18+ */
19+ const config : Config . Options = {
20+ meta : {
21+ region : "UNKONWN" , // this must match the shortCode of the algorithm being used
22+ version : "" ,
23+ signature : ""
24+ } ,
25+ // the schema information for the source data
26+ source : {
27+ columns : [
28+ { name : "ID" , alias : "id" } ,
29+ { name : "Column 2" , alias : "col2" } ,
30+ { name : "Column 3" , alias : "col3" } ,
31+ ]
32+ } ,
33+ // [OPTIONAL] validation rules per column: see ../docs/validators.md
34+ validations : {
35+ id : [
36+ { op : SUPPORTED_VALIDATORS . FIELD_TYPE , value : 'string' } ,
37+ { op : SUPPORTED_VALIDATORS . MAX_FIELD_LENGTH , value : 11 }
38+ // { op: SUPPORTED_VALIDATORS.LINKED_FIELD, target: "col2" }
39+ ]
40+ } ,
41+ algorithm : {
42+ hash : { strategy : "SHA256" } ,
43+ salt : { source : "STRING" , value : "testSalt" } ,
44+ columns : {
45+ process : [ ] ,
46+ reference : [ ] ,
47+ static : [ "id" ]
48+ } ,
49+ } ,
50+ // schema for main output file, skipping for brevity
51+ destination : { columns : [ ] } ,
52+ // schema for mapping output file, skipping for brevity
53+ destination_map : { columns : [ ] } ,
54+ // schema for error files, skipping for brevity
55+ destination_errors : { columns : [ ] }
56+ }
57+
58+ /*
59+ Construct a `document` containing the data to process.
60+ */
61+ const doc : CidDocument = {
62+ name : "Input Data" ,
63+ data : [
64+ { "id" : "43294300000" , "col2" : "bar0" , "col3" : "baz0" } ,
65+ { "id" : "38591500000" , "col2" : "bar1" , "col3" : "baz1" } ,
66+ { "id" : "17386300000" , "col2" : "bar2" , "col3" : "baz2" } ,
67+ ]
68+ }
69+
70+ function main ( ) {
71+ // validate the input data against all configured validation rules.
72+ const validationResult = validateDocument ( config , doc , false ) ;
73+ if ( ! validationResult . ok ) {
74+ console . dir ( validationResult . results , { depth : 5 } ) ;
75+ throw new Error ( "Data contains validation errors, check input" ) ;
76+ }
77+
78+ // initialise the selected algorithm
79+ const hasher = makeHasher ( config . algorithm ) ;
80+ // run the algorithm against the input data
81+ const result = generateHashesForDocument ( hasher , doc ) ;
82+
83+ // print the results, save the results, up to you.
84+ console . dir ( result , { depth : 5 } ) ;
85+ }
86+
87+ main ( ) ;
0 commit comments