Import sql file in node.js and execute against PostgreSQL
You can just separate consequent queries with a semicolon when passed to client.query
That works:
var pg = require('pg');pg.connect('postgres://test:test@localhost/test', function(err, client, done){ client.query('CREATE TABLE test (test VARCHAR(255)); INSERT INTO test VALUES(\'test\') '); done();});
And consequently, that works too:
var pg = require('pg');var fs = require('fs');var sql = fs.readFileSync('init_database.sql').toString();pg.connect('postgres://test:test@localhost/test', function(err, client, done){ if(err){ console.log('error: ', err); process.exit(1); } client.query(sql, function(err, result){ done(); if(err){ console.log('error: ', err); process.exit(1); } process.exit(0); });});
I've written the following function which works for my case. It would have been much more simpler if it weren't for:
- Using
batch
to manage concurrency - Having the tricky PostgreSQL COPY case to consider
Code snippet:
function processSQLFile(fileName) { // Extract SQL queries from files. Assumes no ';' in the fileNames var queries = fs.readFileSync(fileName).toString() .replace(/(\r\n|\n|\r)/gm," ") // remove newlines .replace(/\s+/g, ' ') // excess white space .split(";") // split into all statements .map(Function.prototype.call, String.prototype.trim) .filter(function(el) {return el.length != 0}); // remove any empty ones // Execute each SQL query sequentially queries.forEach(function(query) { batch.push(function(done) { if (query.indexOf("COPY") === 0) { // COPY - needs special treatment var regexp = /COPY\ (.*)\ FROM\ (.*)\ DELIMITERS/gmi; var matches = regexp.exec(query); var table = matches[1]; var fileName = matches[2]; var copyString = "COPY " + table + " FROM STDIN DELIMITERS ',' CSV HEADER"; var stream = client.copyFrom(copyString); stream.on('close', function () { done(); }); var csvFile = __dirname + '/' + fileName; var str = fs.readFileSync(csvFile); stream.write(str); stream.end(); } else { // Other queries don't need special treatment client.query(query, function(result) { done(); }); } }); });}
Beware that this would fail if you used semicolons anywhere except to terminate SQL statements.
The @databases/pg
client supports running SQL files out of the box:
const createPool = require('@databases/pg');const {sql} = require('@databases/pg');const db = createPool();db.query(sql.file('my-file.sql')).catch(ex => { console.error(ex); process.exitCode = 1;}).then(() => db.dispose());
It also supports having multiple statements in a single call to db.query
:
const createPool = require('@databases/pg');const {sql} = require('@databases/pg');const db = createPool();db.query(sql` INSERT INTO users (name) VALUES (${'Forbes'}); SELECT * FROM users;`)).then( results => console.log(results)).catch(ex => { console.error(ex); process.exitCode = 1;}).then(() => db.dispose());
In this example, each statement is run in sequence, and the result of the last statement is returned.