On Fri, May 19, 2017 at 2:04 PM, Eric Hill <Eric.Hill@jmp.com> wrote:
> I am pleased to report that with Merlin's suggestion of using the pg-large-object middleware, I have a test case now
showingthat I can write a 25MB buffer from Node.js to Postgres in roughly 700 milliseconds. Here is the JavaScript
code,which is nearly verbatim from the example in the pg-large-object doc:
>
> packages.testLargeObjects = function(callback) {
> var pgp = require('pg-promise')();
> var LargeObjectManager = require('pg-large-object').LargeObjectManager;
> var PassThrough = require('stream').PassThrough;
>
> var bufSize = 1024 * 1024 * 25;
> var buf = new Buffer(bufSize);
> buf.fill("pgrocks");
>
> var connInfo = {
> host: 'localhost',
> port: 5432,
> database: 'mydb',
> user: 'postgres,
> password: 'secret'
> };
>
> var db = pgp(connInfo);
>
> db.tx(function(tx) {
> const lObjMgr = new LargeObjectManager({pgPromise: tx});
> const bufferSize = 16384;
>
> return lObjMgr.createAndWritableStreamAsync(bufferSize)
> .then( ([oid, stream]) => {
> let bufferStream = new PassThrough();
> bufferStream.end(buf);
> bufferStream.pipe(stream);
> return new Promise(function(resolve, reject) {
> stream.on('finish', resolve);
> stream.on('error', reject);
> });
> });
> })
> .then(function() {
> callback();
> pgp.end();
> })
> .catch(function(err) {
> callback(err);
> pgp.end();
> });
> };
>
> Thanks very much!
well done sir! that's probably as fast as you're going to get in node,
at least without a large investment at the driver level.
merlin