Skip to content

Commit af65888

Browse files
committed
made benchmarks more resilient to OS filesystem caching issues
1 parent f02cf1e commit af65888

8 files changed

+25
-20
lines changed

benchmark/benchmark.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ const display = (result) => {
1919
(async () => {
2020
process.on('unhandledRejection', (err) => { throw err; });
2121
const ctx = JSON.parse(process.argv[2]);
22-
const db = await require('./drivers').get(ctx.driver)(ctx.filename, ctx.pragma);
22+
const db = await require('./drivers').get(ctx.driver)('../temp/benchmark.db', ctx.pragma);
2323
const fn = require(`./types/${ctx.type}`)[ctx.driver](db, ctx);
2424
if (typeof fn === 'function') setImmediate(sync, fn);
2525
else setImmediate(async, await fn);

benchmark/index.js

+13-7
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,14 @@ const filterBySearchTerms = (searchTerms) => (trial) => {
2424
return searchTerms.every(arg => terms.includes(arg));
2525
};
2626

27+
const sortTrials = (a, b) => {
28+
const aRo = require(`./types/${a.type}`).readonly;
29+
const bRo = require(`./types/${b.type}`).readonly;
30+
if (typeof aRo !== 'boolean') throw new TypeError(`Missing readonly export in benchmark type ${a.type}`);
31+
if (typeof bRo !== 'boolean') throw new TypeError(`Missing readonly export in benchmark type ${b.type}`);
32+
return bRo - aRo;
33+
};
34+
2735
const displayTrialName = (trial) => {
2836
if (trial.description) return console.log(clc.magenta(`--- ${trial.description} ---`));
2937
const name = `${trial.type} ${trial.table} (${trial.columns.join(', ')})`;
@@ -33,8 +41,7 @@ const displayTrialName = (trial) => {
3341

3442
const createContext = (trial, driver) => {
3543
const { data: _unused, ...tableInfo } = tables.get(trial.table);
36-
const ctx = { ...trial, ...tableInfo, driver, filename: `../temp/${iteration++}.db` };
37-
return JSON.stringify(ctx);
44+
return JSON.stringify({ ...trial, ...tableInfo, driver });
3845
};
3946

4047
const erase = () => {
@@ -43,20 +50,19 @@ const erase = () => {
4350

4451
// Determine which trials should be executed.
4552
process.chdir(__dirname);
46-
const trials = getTrials(process.argv.slice(2));
53+
const trials = getTrials(process.argv.slice(2)).sort(sortTrials);
4754
if (!trials.length) {
4855
console.log(clc.yellow('No matching benchmarks found!'));
4956
process.exit();
5057
}
5158

52-
// Create the temporary databases needed to run the benchmark trials.
59+
// Create the temporary database needed to run the benchmark trials.
5360
console.log('Generating tables...');
54-
const drivers = require('./drivers');
55-
const tables = require('./seed')(drivers.size * trials.length);
61+
const tables = require('./seed')();
5662
process.stdout.write(erase());
5763

5864
// Execute each trial for each available driver.
59-
let iteration = 0;
65+
const drivers = require('./drivers');
6066
const nameLength = [...drivers.keys()].reduce((m, d) => Math.max(m, d.length), 0);
6167
for (const trial of trials) {
6268
displayTrialName(trial);

benchmark/seed.js

+4-5
Original file line numberDiff line numberDiff line change
@@ -26,17 +26,17 @@ const tables = new Map([
2626
]);
2727

2828
/*
29-
This function creates a number of pre-populated databases that are deleted
30-
when the process exits.
29+
This function creates a pre-populated database that is deleted when the
30+
process exits.
3131
*/
3232

33-
module.exports = (numberOfDatabases = 2) => {
33+
module.exports = () => {
3434
const tempDir = path.join(__dirname, '..', 'temp');
3535
process.on('exit', () => fs.removeSync(tempDir));
3636
fs.removeSync(tempDir);
3737
fs.ensureDirSync(tempDir);
3838

39-
const db = require('../.')(path.join(tempDir, '0.db'));
39+
const db = require('../.')(path.join(tempDir, 'benchmark.db'));
4040
db.pragma('journal_mode = OFF');
4141
db.pragma('synchronous = OFF');
4242

@@ -48,6 +48,5 @@ module.exports = (numberOfDatabases = 2) => {
4848
}
4949

5050
db.close();
51-
for (let i = 1; i < numberOfDatabases; ++i) fs.copySync(db.name, path.join(tempDir, `${i}.db`));
5251
return tables;
5352
};

benchmark/types/insert.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
'use strict';
2-
// Inserting rows individually (`.run()`)
2+
exports.readonly = false; // Inserting rows individually (`.run()`)
33

44
exports['better-sqlite3'] = (db, { table, columns }) => {
55
const stmt = db.prepare(`INSERT INTO ${table} (${columns.join(', ')}) VALUES (${columns.map(x => '@' + x).join(', ')})`);

benchmark/types/select-all.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
'use strict';
2-
// Reading 100 rows into an array (`.all()`)
2+
exports.readonly = true; // Reading 100 rows into an array (`.all()`)
33

44
exports['better-sqlite3'] = (db, { table, columns, count }) => {
55
const stmt = db.prepare(`SELECT ${columns.join(', ')} FROM ${table} WHERE rowid >= ? LIMIT 100`);

benchmark/types/select-iterate.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
'use strict';
2-
// Iterating over 100 rows (`.iterate()`)
2+
exports.readonly = true; // Iterating over 100 rows (`.iterate()`)
33

44
exports['better-sqlite3'] = (db, { table, columns, count }) => {
55
const stmt = db.prepare(`SELECT ${columns.join(', ')} FROM ${table} WHERE rowid >= ? LIMIT 100`);

benchmark/types/select.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
'use strict';
2-
// Reading rows individually (`.get()`)
2+
exports.readonly = true; // Reading rows individually (`.get()`)
33

44
exports['better-sqlite3'] = (db, { table, columns, count }) => {
55
const stmt = db.prepare(`SELECT ${columns.join(', ')} FROM ${table} WHERE rowid = ?`);

benchmark/types/transaction.js

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
'use strict';
2-
// Inserting 100 rows in a single transaction
2+
exports.readonly = false; // Inserting 100 rows in a single transaction
33

44
exports['better-sqlite3'] = (db, { table, columns }) => {
55
const stmt = db.prepare(`INSERT INTO ${table} (${columns.join(', ')}) VALUES (${columns.map(x => '@' + x).join(', ')})`);
@@ -10,7 +10,7 @@ exports['better-sqlite3'] = (db, { table, columns }) => {
1010
return () => trx(row);
1111
};
1212

13-
exports['node-sqlite3'] = async (db, { table, columns, driver, filename, pragma }) => {
13+
exports['node-sqlite3'] = async (db, { table, columns, driver, pragma }) => {
1414
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${columns.map(x => '@' + x).join(', ')})`;
1515
const row = Object.assign({}, ...Object.entries(await db.get(`SELECT * FROM ${table} LIMIT 1`))
1616
.filter(([k]) => columns.includes(k))
@@ -22,7 +22,7 @@ exports['node-sqlite3'] = async (db, { table, columns, driver, filename, pragma
2222
connection for each transaction.
2323
(http://github.com/mapbox/node-sqlite3/issues/304#issuecomment-45242331)
2424
*/
25-
return () => open(filename, pragma).then(async (db) => {
25+
return () => open('../temp/benchmark.db', pragma).then(async (db) => {
2626
try {
2727
await db.run('BEGIN');
2828
try {

0 commit comments

Comments
 (0)