|
| 1 | +import type { DependencyGraph, ResolvedFile } from "../../../types/solidity.js"; |
| 2 | +import type { NewTaskActionFunction } from "../../../types/tasks.js"; |
| 3 | + |
| 4 | +import { resolveFromRoot } from "@ignored/hardhat-vnext-utils/path"; |
| 5 | +import chalk from "chalk"; |
| 6 | +import toposort from "toposort"; |
| 7 | + |
| 8 | +import { getHardhatVersion } from "../../utils/package.js"; |
| 9 | +import { buildDependencyGraph } from "../solidity/build-system/dependency-graph-building.js"; |
| 10 | +import { isNpmRootPath } from "../solidity/build-system/root-paths-utils.js"; |
| 11 | + |
| 12 | +// Match every group where a SPDX license is defined. The first captured group is the license. |
| 13 | +const SPDX_LICENSES_REGEX = |
| 14 | + /^(?:\/\/|\/\*)\s*SPDX-License-Identifier:\s*([a-zA-Z\d+.-]+).*/gm; |
| 15 | +// Match every group where a pragma directive is defined. The first captured group is the pragma directive. |
| 16 | +const PRAGMA_DIRECTIVES_REGEX = |
| 17 | + /^(?: |\t)*(pragma\s*abicoder\s*v(1|2)|pragma\s*experimental\s*ABIEncoderV2)\s*;/gim; |
| 18 | + |
| 19 | +export interface FlattenActionArguments { |
| 20 | + files: string[]; |
| 21 | +} |
| 22 | + |
| 23 | +export interface FlattenActionResult { |
| 24 | + flattened: string; |
| 25 | + metadata: FlattenMetadata | null; |
| 26 | +} |
| 27 | + |
| 28 | +export interface FlattenMetadata { |
| 29 | + filesWithoutLicenses: string[]; |
| 30 | + pragmaDirective: string; |
| 31 | + filesWithoutPragmaDirectives: string[]; |
| 32 | + filesWithDifferentPragmaDirectives: string[]; |
| 33 | +} |
| 34 | + |
| 35 | +const flattenAction: NewTaskActionFunction<FlattenActionArguments> = async ( |
| 36 | + { files }, |
| 37 | + { solidity, config }, |
| 38 | +): Promise<FlattenActionResult> => { |
| 39 | + // Resolve files from arguments or default to all root files |
| 40 | + const rootPaths = |
| 41 | + files.length === 0 |
| 42 | + ? await solidity.getRootFilePaths() |
| 43 | + : files.map((file) => { |
| 44 | + if (isNpmRootPath(file)) { |
| 45 | + return file; |
| 46 | + } |
| 47 | + |
| 48 | + return resolveFromRoot(process.cwd(), file); |
| 49 | + }); |
| 50 | + |
| 51 | + // Build the dependency graph |
| 52 | + const { dependencyGraph } = await buildDependencyGraph( |
| 53 | + rootPaths.toSorted(), // We sort them to have a deterministic order |
| 54 | + config.paths.root, |
| 55 | + config.solidity.remappings, |
| 56 | + ); |
| 57 | + |
| 58 | + let flattened = ""; |
| 59 | + |
| 60 | + // Return empty string when no files are resolved |
| 61 | + if (Array.from(dependencyGraph.getAllFiles()).length === 0) { |
| 62 | + return { flattened, metadata: null }; |
| 63 | + } |
| 64 | + |
| 65 | + // Write a comment with hardhat version used to flatten |
| 66 | + const hardhatVersion = await getHardhatVersion(); |
| 67 | + flattened += `// Sources flattened with hardhat v${hardhatVersion} https://hardhat.org`; |
| 68 | + |
| 69 | + const sortedFiles = getSortedFiles(dependencyGraph); |
| 70 | + |
| 71 | + const [licenses, filesWithoutLicenses] = getLicensesInfo(sortedFiles); |
| 72 | + |
| 73 | + const [ |
| 74 | + pragmaDirective, |
| 75 | + filesWithoutPragmaDirectives, |
| 76 | + filesWithDifferentPragmaDirectives, |
| 77 | + ] = getPragmaAbicoderDirectiveInfo(sortedFiles); |
| 78 | + |
| 79 | + // Write the combined license header and pragma abicoder directive with highest importance |
| 80 | + flattened += getLicensesHeader(licenses); |
| 81 | + flattened += getPragmaAbicoderDirectiveHeader(pragmaDirective); |
| 82 | + |
| 83 | + for (const file of sortedFiles) { |
| 84 | + let normalizedText = getTextWithoutImports(file); |
| 85 | + normalizedText = commentLicenses(normalizedText); |
| 86 | + normalizedText = commentPragmaAbicoderDirectives(normalizedText); |
| 87 | + |
| 88 | + // Write files without imports, with commented licenses and pragma abicoder directives |
| 89 | + flattened += `\n\n// File ${file.sourceName}\n`; |
| 90 | + flattened += `\n${normalizedText}\n`; |
| 91 | + } |
| 92 | + |
| 93 | + // Print the flattened file |
| 94 | + console.log(flattened); |
| 95 | + |
| 96 | + if (filesWithoutLicenses.length > 0) { |
| 97 | + console.warn( |
| 98 | + chalk.yellow( |
| 99 | + `\nThe following file(s) do NOT specify SPDX licenses: ${filesWithoutLicenses.join( |
| 100 | + ", ", |
| 101 | + )}`, |
| 102 | + ), |
| 103 | + ); |
| 104 | + } |
| 105 | + |
| 106 | + if (pragmaDirective !== "" && filesWithoutPragmaDirectives.length > 0) { |
| 107 | + console.warn( |
| 108 | + chalk.yellow( |
| 109 | + `\nPragma abicoder directives are defined in some files, but they are not defined in the following ones: ${filesWithoutPragmaDirectives.join( |
| 110 | + ", ", |
| 111 | + )}`, |
| 112 | + ), |
| 113 | + ); |
| 114 | + } |
| 115 | + |
| 116 | + if (filesWithDifferentPragmaDirectives.length > 0) { |
| 117 | + console.warn( |
| 118 | + chalk.yellow( |
| 119 | + `\nThe flattened file is using the pragma abicoder directive '${pragmaDirective}' but these files have a different pragma abicoder directive: ${filesWithDifferentPragmaDirectives.join( |
| 120 | + ", ", |
| 121 | + )}`, |
| 122 | + ), |
| 123 | + ); |
| 124 | + } |
| 125 | + |
| 126 | + return { |
| 127 | + flattened, |
| 128 | + metadata: { |
| 129 | + filesWithoutLicenses, |
| 130 | + pragmaDirective, |
| 131 | + filesWithoutPragmaDirectives, |
| 132 | + filesWithDifferentPragmaDirectives, |
| 133 | + }, |
| 134 | + }; |
| 135 | +}; |
| 136 | + |
| 137 | +function getLicensesInfo(sortedFiles: ResolvedFile[]): [string[], string[]] { |
| 138 | + const licenses: Set<string> = new Set(); |
| 139 | + const filesWithoutLicenses: Set<string> = new Set(); |
| 140 | + |
| 141 | + for (const file of sortedFiles) { |
| 142 | + const matches = [...file.content.text.matchAll(SPDX_LICENSES_REGEX)]; |
| 143 | + |
| 144 | + if (matches.length === 0) { |
| 145 | + filesWithoutLicenses.add(file.sourceName); |
| 146 | + continue; |
| 147 | + } |
| 148 | + |
| 149 | + for (const groups of matches) { |
| 150 | + licenses.add(groups[1]); |
| 151 | + } |
| 152 | + } |
| 153 | + |
| 154 | + // Sort alphabetically |
| 155 | + return [Array.from(licenses).sort(), Array.from(filesWithoutLicenses).sort()]; |
| 156 | +} |
| 157 | + |
| 158 | +function getPragmaAbicoderDirectiveInfo( |
| 159 | + sortedFiles: ResolvedFile[], |
| 160 | +): [string, string[], string[]] { |
| 161 | + let directive = ""; |
| 162 | + const directivesByImportance = [ |
| 163 | + "pragma abicoder v1", |
| 164 | + "pragma experimental ABIEncoderV2", |
| 165 | + "pragma abicoder v2", |
| 166 | + ]; |
| 167 | + const filesWithoutPragmaDirectives: Set<string> = new Set(); |
| 168 | + const filesWithMostImportantDirective: Array<[string, string]> = []; // Every array element has the structure: [ fileName, fileMostImportantDirective ] |
| 169 | + |
| 170 | + for (const file of sortedFiles) { |
| 171 | + const matches = [...file.content.text.matchAll(PRAGMA_DIRECTIVES_REGEX)]; |
| 172 | + |
| 173 | + if (matches.length === 0) { |
| 174 | + filesWithoutPragmaDirectives.add(file.sourceName); |
| 175 | + continue; |
| 176 | + } |
| 177 | + |
| 178 | + let fileMostImportantDirective = ""; |
| 179 | + for (const groups of matches) { |
| 180 | + const normalizedPragma = removeUnnecessarySpaces(groups[1]); |
| 181 | + |
| 182 | + // Update the most important pragma directive among all the files |
| 183 | + if ( |
| 184 | + directivesByImportance.indexOf(normalizedPragma) > |
| 185 | + directivesByImportance.indexOf(directive) |
| 186 | + ) { |
| 187 | + directive = normalizedPragma; |
| 188 | + } |
| 189 | + |
| 190 | + // Update the most important pragma directive for the current file |
| 191 | + if ( |
| 192 | + directivesByImportance.indexOf(normalizedPragma) > |
| 193 | + directivesByImportance.indexOf(fileMostImportantDirective) |
| 194 | + ) { |
| 195 | + fileMostImportantDirective = normalizedPragma; |
| 196 | + } |
| 197 | + } |
| 198 | + |
| 199 | + // Add in the array the most important directive for the current file |
| 200 | + filesWithMostImportantDirective.push([ |
| 201 | + file.sourceName, |
| 202 | + fileMostImportantDirective, |
| 203 | + ]); |
| 204 | + } |
| 205 | + |
| 206 | + // Add to the array the files that have a pragma directive which is not the same as the main one that |
| 207 | + // is going to be used in the flatten file |
| 208 | + const filesWithDifferentPragmaDirectives = filesWithMostImportantDirective |
| 209 | + .filter(([, fileDirective]) => fileDirective !== directive) |
| 210 | + .map(([fileName]) => fileName); |
| 211 | + |
| 212 | + // Sort alphabetically |
| 213 | + return [ |
| 214 | + directive, |
| 215 | + Array.from(filesWithoutPragmaDirectives).sort(), |
| 216 | + filesWithDifferentPragmaDirectives.sort(), |
| 217 | + ]; |
| 218 | +} |
| 219 | + |
| 220 | +function getSortedFiles(dependencyGraph: DependencyGraph): ResolvedFile[] { |
| 221 | + const sortingGraph: Array<[string, string]> = []; |
| 222 | + const visited = new Set<string>(); |
| 223 | + |
| 224 | + const walk = (files: Iterable<ResolvedFile>) => { |
| 225 | + for (const file of files) { |
| 226 | + if (visited.has(file.sourceName)) continue; |
| 227 | + |
| 228 | + visited.add(file.sourceName); |
| 229 | + |
| 230 | + // Sort dependencies in alphabetical order for deterministic results |
| 231 | + const dependencies = Array.from( |
| 232 | + dependencyGraph.getDependencies(file), |
| 233 | + ).sort((f1, f2) => f1.sourceName.localeCompare(f2.sourceName)); |
| 234 | + |
| 235 | + for (const dependency of dependencies) { |
| 236 | + sortingGraph.push([dependency.sourceName, file.sourceName]); |
| 237 | + } |
| 238 | + |
| 239 | + walk(dependencies); |
| 240 | + } |
| 241 | + }; |
| 242 | + |
| 243 | + // Sort roots in alphabetical order for deterministic results |
| 244 | + const roots = Array.from(dependencyGraph.getRoots().values()).sort((f1, f2) => |
| 245 | + f1.sourceName.localeCompare(f2.sourceName), |
| 246 | + ); |
| 247 | + |
| 248 | + walk(roots); |
| 249 | + |
| 250 | + // Get all nodes so the graph includes files with no dependencies |
| 251 | + const allSourceNames = Array.from(dependencyGraph.getAllFiles()).map( |
| 252 | + (f) => f.sourceName, |
| 253 | + ); |
| 254 | + |
| 255 | + // Get source names sorted in topological order |
| 256 | + const sortedSourceNames = toposort.array(allSourceNames, sortingGraph); |
| 257 | + |
| 258 | + const sortedFiles = sortedSourceNames.map((sourceName) => |
| 259 | + dependencyGraph.getFileBySourceName(sourceName), |
| 260 | + ); |
| 261 | + |
| 262 | + return sortedFiles.filter((f) => f !== undefined); |
| 263 | +} |
| 264 | + |
| 265 | +function removeUnnecessarySpaces(str: string): string { |
| 266 | + return str.replace(/\s+/g, " ").trim(); |
| 267 | +} |
| 268 | + |
| 269 | +function getLicensesHeader(licenses: string[]): string { |
| 270 | + return licenses.length <= 0 |
| 271 | + ? "" |
| 272 | + : `\n\n// SPDX-License-Identifier: ${licenses.join(" AND ")}`; |
| 273 | +} |
| 274 | + |
| 275 | +function getPragmaAbicoderDirectiveHeader(pragmaDirective: string): string { |
| 276 | + return pragmaDirective === "" ? "" : `\n\n${pragmaDirective};`; |
| 277 | +} |
| 278 | + |
| 279 | +function getTextWithoutImports(resolvedFile: ResolvedFile) { |
| 280 | + const IMPORT_SOLIDITY_REGEX = /^\s*import(\s+)[\s\S]*?;\s*$/gm; |
| 281 | + |
| 282 | + return resolvedFile.content.text.replace(IMPORT_SOLIDITY_REGEX, "").trim(); |
| 283 | +} |
| 284 | + |
| 285 | +function commentLicenses(file: string): string { |
| 286 | + return file.replaceAll( |
| 287 | + SPDX_LICENSES_REGEX, |
| 288 | + (...groups) => `// Original license: SPDX_License_Identifier: ${groups[1]}`, |
| 289 | + ); |
| 290 | +} |
| 291 | + |
| 292 | +function commentPragmaAbicoderDirectives(file: string): string { |
| 293 | + return file.replaceAll(PRAGMA_DIRECTIVES_REGEX, (...groups) => { |
| 294 | + return `// Original pragma directive: ${removeUnnecessarySpaces( |
| 295 | + groups[1], |
| 296 | + )}`; |
| 297 | + }); |
| 298 | +} |
| 299 | + |
| 300 | +export default flattenAction; |
0 commit comments