Implement SQL database into endpoint.

master
sigonasr2 4 years ago
parent ce5e82e437
commit 1de6f1e21f
  1. 102
      app.js
  2. 30
      db.placeholder.js
  3. 15
      node_modules/.bin/mime
  4. 1
      node_modules/.bin/mime
  5. 1
      node_modules/.bin/semver
  6. 2
      node_modules/accepts/package.json
  7. 2
      node_modules/array-flatten/package.json
  8. 2
      node_modules/body-parser/package.json
  9. 7
      node_modules/buffer-writer/.travis.yml
  10. 19
      node_modules/buffer-writer/LICENSE
  11. 48
      node_modules/buffer-writer/README.md
  12. 129
      node_modules/buffer-writer/index.js
  13. 57
      node_modules/buffer-writer/package.json
  14. 1
      node_modules/buffer-writer/test/mocha.opts
  15. 218
      node_modules/buffer-writer/test/writer-tests.js
  16. 2
      node_modules/bytes/package.json
  17. 2
      node_modules/content-disposition/package.json
  18. 2
      node_modules/content-type/package.json
  19. 2
      node_modules/cookie-signature/package.json
  20. 2
      node_modules/cookie/package.json
  21. 2
      node_modules/debug/package.json
  22. 2
      node_modules/depd/package.json
  23. 2
      node_modules/destroy/package.json
  24. 2
      node_modules/ee-first/package.json
  25. 2
      node_modules/encodeurl/package.json
  26. 2
      node_modules/escape-html/package.json
  27. 2
      node_modules/etag/package.json
  28. 2
      node_modules/express/package.json
  29. 2
      node_modules/finalhandler/package.json
  30. 2
      node_modules/forwarded/package.json
  31. 2
      node_modules/fresh/package.json
  32. 2
      node_modules/http-errors/package.json
  33. 2
      node_modules/iconv-lite/package.json
  34. 2
      node_modules/inherits/package.json
  35. 2
      node_modules/ipaddr.js/package.json
  36. 2
      node_modules/media-typer/package.json
  37. 2
      node_modules/merge-descriptors/package.json
  38. 2
      node_modules/methods/package.json
  39. 2
      node_modules/mime-db/package.json
  40. 2
      node_modules/mime-types/package.json
  41. 0
      node_modules/mime/cli.js
  42. 2
      node_modules/mime/package.json
  43. 0
      node_modules/mime/src/build.js
  44. 2
      node_modules/ms/package.json
  45. 2
      node_modules/negotiator/package.json
  46. 2
      node_modules/on-finished/package.json
  47. 8
      node_modules/packet-reader/.travis.yml
  48. 87
      node_modules/packet-reader/README.md
  49. 65
      node_modules/packet-reader/index.js
  50. 52
      node_modules/packet-reader/package.json
  51. 148
      node_modules/packet-reader/test/index.js
  52. 2
      node_modules/parseurl/package.json
  53. 2
      node_modules/path-to-regexp/package.json
  54. 21
      node_modules/pg-connection-string/LICENSE
  55. 72
      node_modules/pg-connection-string/README.md
  56. 14
      node_modules/pg-connection-string/index.d.ts
  57. 89
      node_modules/pg-connection-string/index.js
  58. 68
      node_modules/pg-connection-string/package.json
  59. 13
      node_modules/pg-int8/LICENSE
  60. 16
      node_modules/pg-int8/README.md
  61. 100
      node_modules/pg-int8/index.js
  62. 52
      node_modules/pg-int8/package.json
  63. 21
      node_modules/pg-pool/LICENSE
  64. 376
      node_modules/pg-pool/README.md
  65. 403
      node_modules/pg-pool/index.js
  66. 67
      node_modules/pg-pool/package.json
  67. 42
      node_modules/pg-pool/test/bring-your-own-promise.js
  68. 29
      node_modules/pg-pool/test/connection-strings.js
  69. 229
      node_modules/pg-pool/test/connection-timeout.js
  70. 40
      node_modules/pg-pool/test/ending.js
  71. 260
      node_modules/pg-pool/test/error-handling.js
  72. 86
      node_modules/pg-pool/test/events.js
  73. 87
      node_modules/pg-pool/test/idle-timeout.js
  74. 226
      node_modules/pg-pool/test/index.js
  75. 20
      node_modules/pg-pool/test/logging.js
  76. 98
      node_modules/pg-pool/test/max-uses.js
  77. 54
      node_modules/pg-pool/test/releasing-clients.js
  78. 10
      node_modules/pg-pool/test/setup.js
  79. 58
      node_modules/pg-pool/test/sizing.js
  80. 19
      node_modules/pg-pool/test/submittable.js
  81. 0
      node_modules/pg-pool/test/timeout.js
  82. 25
      node_modules/pg-pool/test/verify.js
  83. 21
      node_modules/pg-protocol/LICENSE
  84. 14
      node_modules/pg-protocol/dist/BufferReader.d.ts
  85. 48
      node_modules/pg-protocol/dist/BufferReader.js
  86. 1
      node_modules/pg-protocol/dist/BufferReader.js.map
  87. 20
      node_modules/pg-protocol/dist/BufferWriter.d.ts
  88. 109
      node_modules/pg-protocol/dist/BufferWriter.js
  89. 1
      node_modules/pg-protocol/dist/BufferWriter.js.map
  90. 1
      node_modules/pg-protocol/dist/b.d.ts
  91. 25
      node_modules/pg-protocol/dist/b.js
  92. 1
      node_modules/pg-protocol/dist/b.js.map
  93. 14
      node_modules/pg-protocol/dist/buffer-reader.d.ts
  94. 49
      node_modules/pg-protocol/dist/buffer-reader.js
  95. 1
      node_modules/pg-protocol/dist/buffer-reader.js.map
  96. 16
      node_modules/pg-protocol/dist/buffer-writer.d.ts
  97. 80
      node_modules/pg-protocol/dist/buffer-writer.js
  98. 1
      node_modules/pg-protocol/dist/buffer-writer.js.map
  99. 22
      node_modules/pg-protocol/dist/connection.d.ts
  100. 311
      node_modules/pg-protocol/dist/connection.js
  101. Some files were not shown because too many files have changed in this diff Show More

102
app.js

@ -2,66 +2,92 @@ const express = require('express')
var fs = require('fs') var fs = require('fs')
const app = express() const app = express()
const bodyParser = require('body-parser') const bodyParser = require('body-parser')
var students = [] const db = require('./db').db;
//var students = []
/*students:
username
email
grades*/
app.use(bodyParser.urlencoded({extended: true})); app.use(bodyParser.urlencoded({extended: true}));
app.use(bodyParser.json()); app.use(bodyParser.json());
//app.get('/', (req, res) => res.send(users)) //app.get('/', (req, res) => res.send(users))
function handleError(err) {
if (err) {
throw err;
}
}
app.get('/students', (req, res) => { app.get('/students', (req, res) => {
if (req.query.search) { if (req.query.search) {
res.send(students.filter((student)=>student.username===req.query.search)); //res.send(students.filter((student)=>student.username===req.query.search));
db.query("select * from student where username=$1",[req.query.search],(err,data)=>{
handleError(err);
res.status(200).json(data.rows);
})
} else { } else {
res.send(students); db.query("select * from student", (err,data)=>{
handleError(err);
res.status(200).json(data.rows);
})
} }
}) })
app.get('/students/:studentId', (req, res) => { app.get('/students/:studentId', (req, res) => {
var student = students[req.params.studentId-1]; db.query("select * from student where id=$1", [req.params.studentId], (err,data)=>{
if (student) { handleError(err);
res.send(student); if (data.rows.length>0) {
} else { res.status(200).send(data.rows)
res.status(400).send("Student with id "+req.params.studentId+" does not exist!"); } else {
} res.status(400).send("Student with id "+req.params.studentId+" does not exist!");
}
})
}) })
app.get('/grades/:studentId', (req, res) => { app.get('/grades/:studentId', (req, res) => {
var student = students[req.params.studentId-1]; db.query("select student.id, grade.grade from student inner join grade on student.id=grade.studentid where student.id=$1", [req.params.studentId], (err,data)=>{
if (student) { handleError(err);
var grades = student.grades; if (data.rows.length>0) {
res.send(grades); res.status(200).send(data.rows)
} else {
res.status(400).send("Student with id "+req.params.studentId+" does not exist!");
}
})
app.post('/grades', (req, res) => {
var student = students[req.body.studentId-1];
if (student) {
if (req.body.grade) {
var grades = student.grades;
student.grades.push(req.body.grade)
res.status(200).send("OK!");
} else { } else {
res.status(400).send("grade was not provided!"); res.status(400).send("Student with id "+req.params.studentId+" does not have any grades!");
} }
})
})
app.post('/grades', (req, res) => {
if (req.body && req.body.studentId && req.body.grade) {
/*Verify user w/ID exists*/db.query("select * from student where id=$1", [req.body.studentId])
.then(data=>data.rows.length>0)
.then((exists)=>{
if (exists) {
return db.query("insert into grade(studentid,grade) values($1,$2) returning *", [req.body.studentId,req.body.grade])
} else {
res.status(400).send("Student with id "+req.body.studentId+" does not exist!");
}
})
.then(data=>{
res.status(200).send("OK! Updated "+data.rows.length+" rows.");
})
} else { } else {
res.status(400).send("studentId "+req.body.studentId+" does not exist!"); res.status(400).send("Missing studentId / grade!");
} }
}) })
app.post('/register', (req, res) => { app.post('/register', (req, res) => {
if (req.body.username) { if (req.body && req.body.username && req.body.email) {
if (req.body.email) { /*Verify user has unique username.*/db.query("select * from student where username=$1",[req.body.username])
var newUser = req.body; .then(data=>{
newUser.id = students.length; if (data.rows.length>0) {
newUser.grades = []; res.status(400).send("Student with username "+req.body.username+" already exists! Must be unique.");
students.push(newUser); } else {
res.status(200).send("OK!"); return db.query("insert into student(username,email) values($1,$2) returning *",[req.body.username,req.body.email]);
} else { }
res.status(400).send("email was not provided!"); })
} .then(data=>{res.status(200).send("OK! Updated "+data.rows.length+" rows.")});
} else { } else {
res.status(400).send("username was not provided!"); res.status(400).send("Missing username / email!");
} }
}) })
const port = 3000 const port = 3004
app.listen(port, () => console.log(`My API that refuses to rest and is wonderful is listening at http://localhost:${port}`)) app.listen(port, () => console.log(`My API that refuses to rest and is wonderful is listening at http://localhost:${port}`))

@ -0,0 +1,30 @@
/*
Define database settings in here! Then rename this file to db.js to run the server!
*/
/*MySQL structures:
Required Tables:
students=> \d student
id | integer | not null default nextval('student_id_seq'::regclass)
username | character varying(255) |
email | character varying(255) |
students=> \d grade
studentid | integer |
grade | integer |
*/
const pg = require('pg').Pool;
const db = new pg({
user: '',
host: 'localhost',
database: 'students',
password: '',
port: 5432
})
module.exports = {db};

15
node_modules/.bin/mime generated vendored

@ -1,15 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../mime/cli.js" "$@"
ret=$?
else
node "$basedir/../mime/cli.js" "$@"
ret=$?
fi
exit $ret

1
node_modules/.bin/mime generated vendored

@ -0,0 +1 @@
../mime/cli.js

1
node_modules/.bin/semver generated vendored

@ -0,0 +1 @@
../semver/bin/semver

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", "_resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz",
"_shasum": "531bc726517a3b2b41f850021c6cc15eaab507cd", "_shasum": "531bc726517a3b2b41f850021c6cc15eaab507cd",
"_spec": "accepts@~1.3.7", "_spec": "accepts@~1.3.7",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"bugs": { "bugs": {
"url": "https://github.com/jshttp/accepts/issues" "url": "https://github.com/jshttp/accepts/issues"
}, },

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "_resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
"_shasum": "9a5f699051b1e7073328f2a008968b64ea2955d2", "_shasum": "9a5f699051b1e7073328f2a008968b64ea2955d2",
"_spec": "array-flatten@1.1.1", "_spec": "array-flatten@1.1.1",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "Blake Embrey", "name": "Blake Embrey",
"email": "hello@blakeembrey.com", "email": "hello@blakeembrey.com",

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", "_resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz",
"_shasum": "96b2709e57c9c4e09a6fd66a8fd979844f69f08a", "_shasum": "96b2709e57c9c4e09a6fd66a8fd979844f69f08a",
"_spec": "body-parser@1.19.0", "_spec": "body-parser@1.19.0",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"bugs": { "bugs": {
"url": "https://github.com/expressjs/body-parser/issues" "url": "https://github.com/expressjs/body-parser/issues"
}, },

@ -0,0 +1,7 @@
language: node_js
node_js:
- 4
- 6
- 8
- 10
- 11

@ -0,0 +1,19 @@
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

@ -0,0 +1,48 @@
# buffer-writer
[![Build Status](https://secure.travis-ci.org/brianc/node-buffer-writer.png?branch=master)](http://travis-ci.org/brianc/node-buffer-writer)
Fast & efficient buffer writer used to keep memory usage low by internally recycling a single large buffer.
Used as the binary protocol writer in [node-postgres](https://github.com/brianc/node-postgres)
Since postgres requires big endian encoding, this only writes big endian numbers for now, but can & probably will easily be extended to write little endian as well.
I'll admit this has a few postgres specific things I might need to take out in the future, such as `addHeader`
## api
`var writer = new (require('buffer-writer')());`
### writer.addInt32(num)
Writes a 4-byte big endian binary encoded number to the end of the buffer.
### writer.addInt16(num)
Writes a 2-byte big endian binary encoded number to the end of the buffer.
### writer.addCString(string)
Writes a string to the buffer `utf8` encoded and adds a null character (`\0`) at the end.
### var buffer = writer.addHeader(char)
Writes the 5 byte PostgreSQL required header to the beginning of the buffer. (1 byte for character, 1 BE Int32 for length of the buffer)
### var buffer = writer.join()
Collects all data in the writer and joins it into a single, new buffer.
### var buffer = writer.flush(char)
Writes the 5 byte postgres required message header, collects all data in the writer and joins it into a single, new buffer, and then resets the writer.
## thoughts
This is kind of node-postgres specific. If you're interested in using this for a more general purpose thing, lemme know.
I would love to work with you on getting this more reusable for your needs.
## license
MIT

129
node_modules/buffer-writer/index.js generated vendored

@ -0,0 +1,129 @@
//binary data writer tuned for creating
//postgres message packets as effeciently as possible by reusing the
//same buffer to avoid memcpy and limit memory allocations
var Writer = module.exports = function (size) {
this.size = size || 1024;
this.buffer = Buffer.alloc(this.size + 5);
this.offset = 5;
this.headerPosition = 0;
};
//resizes internal buffer if not enough size left
Writer.prototype._ensure = function (size) {
var remaining = this.buffer.length - this.offset;
if (remaining < size) {
var oldBuffer = this.buffer;
// exponential growth factor of around ~ 1.5
// https://stackoverflow.com/questions/2269063/buffer-growth-strategy
var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size;
this.buffer = Buffer.alloc(newSize);
oldBuffer.copy(this.buffer);
}
};
Writer.prototype.addInt32 = function (num) {
this._ensure(4);
this.buffer[this.offset++] = (num >>> 24 & 0xFF);
this.buffer[this.offset++] = (num >>> 16 & 0xFF);
this.buffer[this.offset++] = (num >>> 8 & 0xFF);
this.buffer[this.offset++] = (num >>> 0 & 0xFF);
return this;
};
Writer.prototype.addInt16 = function (num) {
this._ensure(2);
this.buffer[this.offset++] = (num >>> 8 & 0xFF);
this.buffer[this.offset++] = (num >>> 0 & 0xFF);
return this;
};
//for versions of node requiring 'length' as 3rd argument to buffer.write
var writeString = function (buffer, string, offset, len) {
buffer.write(string, offset, len);
};
//overwrite function for older versions of node
if (Buffer.prototype.write.length === 3) {
writeString = function (buffer, string, offset, len) {
buffer.write(string, offset);
};
}
Writer.prototype.addCString = function (string) {
//just write a 0 for empty or null strings
if (!string) {
this._ensure(1);
} else {
var len = Buffer.byteLength(string);
this._ensure(len + 1); //+1 for null terminator
writeString(this.buffer, string, this.offset, len);
this.offset += len;
}
this.buffer[this.offset++] = 0; // null terminator
return this;
};
Writer.prototype.addChar = function (c) {
this._ensure(1);
writeString(this.buffer, c, this.offset, 1);
this.offset++;
return this;
};
Writer.prototype.addString = function (string) {
string = string || "";
var len = Buffer.byteLength(string);
this._ensure(len);
this.buffer.write(string, this.offset);
this.offset += len;
return this;
};
Writer.prototype.getByteLength = function () {
return this.offset - 5;
};
Writer.prototype.add = function (otherBuffer) {
this._ensure(otherBuffer.length);
otherBuffer.copy(this.buffer, this.offset);
this.offset += otherBuffer.length;
return this;
};
Writer.prototype.clear = function () {
this.offset = 5;
this.headerPosition = 0;
this.lastEnd = 0;
};
//appends a header block to all the written data since the last
//subsequent header or to the beginning if there is only one data block
Writer.prototype.addHeader = function (code, last) {
var origOffset = this.offset;
this.offset = this.headerPosition;
this.buffer[this.offset++] = code;
//length is everything in this packet minus the code
this.addInt32(origOffset - (this.headerPosition + 1));
//set next header position
this.headerPosition = origOffset;
//make space for next header
this.offset = origOffset;
if (!last) {
this._ensure(5);
this.offset += 5;
}
};
Writer.prototype.join = function (code) {
if (code) {
this.addHeader(code, true);
}
return this.buffer.slice(code ? 0 : 5, this.offset);
};
Writer.prototype.flush = function (code) {
var result = this.join(code);
this.clear();
return result;
};

@ -0,0 +1,57 @@
{
"_from": "buffer-writer@2.0.0",
"_id": "buffer-writer@2.0.0",
"_inBundle": false,
"_integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==",
"_location": "/buffer-writer",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "buffer-writer@2.0.0",
"name": "buffer-writer",
"escapedName": "buffer-writer",
"rawSpec": "2.0.0",
"saveSpec": null,
"fetchSpec": "2.0.0"
},
"_requiredBy": [
"/pg"
],
"_resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
"_shasum": "ce7eb81a38f7829db09c873f2fbb792c0c98ec04",
"_spec": "buffer-writer@2.0.0",
"_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/pg",
"author": {
"name": "Brian M. Carlson"
},
"bugs": {
"url": "https://github.com/brianc/node-buffer-writer/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "a fast, efficient buffer writer",
"devDependencies": {
"mocha": "5.2.0"
},
"engines": {
"node": ">=4"
},
"homepage": "https://github.com/brianc/node-buffer-writer#readme",
"keywords": [
"buffer",
"writer",
"builder"
],
"license": "MIT",
"main": "index.js",
"name": "buffer-writer",
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-buffer-writer.git"
},
"scripts": {
"test": "mocha --throw-deprecation"
},
"version": "2.0.0"
}

@ -0,0 +1 @@
--ui tdd

@ -0,0 +1,218 @@
var Writer = require(__dirname + "/../");
var assert = require('assert');
var util = require('util');
assert.equalBuffers = function (actual, expected) {
var spit = function (actual, expected) {
console.log("");
console.log("actual " + util.inspect(actual));
console.log("expect " + util.inspect(expected));
console.log("");
};
if (actual.length != expected.length) {
spit(actual, expected);
assert.strictEqual(actual.length, expected.length);
}
for (var i = 0; i < actual.length; i++) {
if (actual[i] != expected[i]) {
spit(actual, expected);
}
assert.strictEqual(actual[i], expected[i]);
}
};
suite('adding int32', function () {
var testAddingInt32 = function (int, expectedBuffer) {
test('writes ' + int, function () {
var subject = new Writer();
var result = subject.addInt32(int).join();
assert.equalBuffers(result, expectedBuffer);
});
};
testAddingInt32(0, [0, 0, 0, 0]);
testAddingInt32(1, [0, 0, 0, 1]);
testAddingInt32(256, [0, 0, 1, 0]);
test('writes largest int32', function () {
//todo need to find largest int32 when I have internet access
return false;
});
test('writing multiple int32s', function () {
var subject = new Writer();
var result = subject.addInt32(1).addInt32(10).addInt32(0).join();
assert.equalBuffers(result, [0, 0, 0, 1, 0, 0, 0, 0x0a, 0, 0, 0, 0]);
});
suite('having to resize the buffer', function () {
test('after resize correct result returned', function () {
var subject = new Writer(10);
subject.addInt32(1).addInt32(1).addInt32(1);
assert.equalBuffers(subject.join(), [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1]);
});
});
});
suite('int16', function () {
test('writes 0', function () {
var subject = new Writer();
var result = subject.addInt16(0).join();
assert.equalBuffers(result, [0, 0]);
});
test('writes 400', function () {
var subject = new Writer();
var result = subject.addInt16(400).join();
assert.equalBuffers(result, [1, 0x90]);
});
test('writes many', function () {
var subject = new Writer();
var result = subject.addInt16(0).addInt16(1).addInt16(2).join();
assert.equalBuffers(result, [0, 0, 0, 1, 0, 2]);
});
test('resizes if internal buffer fills up', function () {
var subject = new Writer(3);
var result = subject.addInt16(2).addInt16(3).join();
assert.equalBuffers(result, [0, 2, 0, 3]);
});
});
suite('cString', function () {
test('writes empty cstring', function () {
var subject = new Writer();
var result = subject.addCString().join();
assert.equalBuffers(result, [0]);
});
test('writes two empty cstrings', function () {
var subject = new Writer();
var result = subject.addCString("").addCString("").join();
assert.equalBuffers(result, [0, 0]);
});
test('writes non-empty cstring', function () {
var subject = new Writer();
var result = subject.addCString("!!!").join();
assert.equalBuffers(result, [33, 33, 33, 0]);
});
test('resizes if reached end', function () {
var subject = new Writer(3);
var result = subject.addCString("!!!").join();
assert.equalBuffers(result, [33, 33, 33, 0]);
});
test('writes multiple cstrings', function () {
var subject = new Writer();
var result = subject.addCString("!").addCString("!").join();
assert.equalBuffers(result, [33, 0, 33, 0]);
});
});
test('writes char', function () {
var subject = new Writer(2);
var result = subject.addChar('a').addChar('b').addChar('c').join();
assert.equalBuffers(result, [0x61, 0x62, 0x63]);
});
test('gets correct byte length', function () {
var subject = new Writer(5);
assert.strictEqual(subject.getByteLength(), 0);
subject.addInt32(0);
assert.strictEqual(subject.getByteLength(), 4);
subject.addCString("!");
assert.strictEqual(subject.getByteLength(), 6);
});
test('can add arbitrary buffer to the end', function () {
var subject = new Writer(4);
subject.addCString("!!!")
var result = subject.add(Buffer.from("@@@")).join();
assert.equalBuffers(result, [33, 33, 33, 0, 0x40, 0x40, 0x40]);
});
suite('can write normal string', function () {
var subject = new Writer(4);
var result = subject.addString("!").join();
assert.equalBuffers(result, [33]);
test('can write cString too', function () {
var result = subject.addCString("!").join();
assert.equalBuffers(result, [33, 33, 0]);
});
test('can resize', function () {
var result = subject.addString("!!").join();
assert.equalBuffers(result, [33, 33, 0, 33, 33]);
});
});
suite('clearing', function () {
var subject = new Writer();
subject.addCString("@!!#!#");
subject.addInt32(10401);
test('clears', function () {
subject.clear();
assert.equalBuffers(subject.join(), []);
});
test('writing more', function () {
var joinedResult = subject.addCString("!").addInt32(9).addInt16(2).join();
assert.equalBuffers(joinedResult, [33, 0, 0, 0, 0, 9, 0, 2]);
});
test('returns result', function () {
var flushedResult = subject.flush();
assert.equalBuffers(flushedResult, [33, 0, 0, 0, 0, 9, 0, 2])
});
test('clears the writer', function () {
assert.equalBuffers(subject.join(), [])
assert.equalBuffers(subject.flush(), [])
});
});
test("resizing to much larger", function () {
var subject = new Writer(2);
var string = "!!!!!!!!";
var result = subject.addCString(string).flush();
assert.equalBuffers(result, [33, 33, 33, 33, 33, 33, 33, 33, 0]);
});
suite("flush", function () {
test('added as a hex code to a full writer', function () {
var subject = new Writer(2);
var result = subject.addCString("!").flush(0x50);
assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]);
});
test('added as a hex code to a non-full writer', function () {
var subject = new Writer(10).addCString("!");
var joinedResult = subject.join(0x50);
var result = subject.flush(0x50);
assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]);
});
test('added as a hex code to a buffer which requires resizing', function () {
var result = new Writer(2).addCString("!!!!!!!!").flush(0x50);
assert.equalBuffers(result, [0x50, 0, 0, 0, 0x0D, 33, 33, 33, 33, 33, 33, 33, 33, 0]);
});
});
suite("header", function () {
test('adding two packets with headers', function () {
var subject = new Writer(10).addCString("!");
subject.addHeader(0x50);
subject.addCString("!!");
subject.addHeader(0x40);
subject.addCString("!");
var result = subject.flush(0x10);
assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0, 0x40, 0, 0, 0, 7, 33, 33, 0, 0x10, 0, 0, 0, 6, 33, 0]);
});
});

2
node_modules/bytes/package.json generated vendored

@ -22,7 +22,7 @@
"_resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", "_resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz",
"_shasum": "f6cf7933a360e0588fa9fde85651cdc7f805d1f6", "_shasum": "f6cf7933a360e0588fa9fde85651cdc7f805d1f6",
"_spec": "bytes@3.1.0", "_spec": "bytes@3.1.0",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\body-parser", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/body-parser",
"author": { "author": {
"name": "TJ Holowaychuk", "name": "TJ Holowaychuk",
"email": "tj@vision-media.ca", "email": "tj@vision-media.ca",

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", "_resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz",
"_shasum": "e130caf7e7279087c5616c2007d0485698984fbd", "_shasum": "e130caf7e7279087c5616c2007d0485698984fbd",
"_spec": "content-disposition@0.5.3", "_spec": "content-disposition@0.5.3",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "Douglas Christopher Wilson", "name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com" "email": "doug@somethingdoug.com"

@ -22,7 +22,7 @@
"_resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", "_resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
"_shasum": "e138cc75e040c727b1966fe5e5f8c9aee256fe3b", "_shasum": "e138cc75e040c727b1966fe5e5f8c9aee256fe3b",
"_spec": "content-type@~1.0.4", "_spec": "content-type@~1.0.4",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "Douglas Christopher Wilson", "name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com" "email": "doug@somethingdoug.com"

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "_resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
"_shasum": "e303a882b342cc3ee8ca513a79999734dab3ae2c", "_shasum": "e303a882b342cc3ee8ca513a79999734dab3ae2c",
"_spec": "cookie-signature@1.0.6", "_spec": "cookie-signature@1.0.6",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "TJ Holowaychuk", "name": "TJ Holowaychuk",
"email": "tj@learnboost.com" "email": "tj@learnboost.com"

2
node_modules/cookie/package.json generated vendored

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", "_resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz",
"_shasum": "beb437e7022b3b6d49019d088665303ebe9c14ba", "_shasum": "beb437e7022b3b6d49019d088665303ebe9c14ba",
"_spec": "cookie@0.4.0", "_spec": "cookie@0.4.0",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "Roman Shtylman", "name": "Roman Shtylman",
"email": "shtylman@gmail.com" "email": "shtylman@gmail.com"

2
node_modules/debug/package.json generated vendored

@ -24,7 +24,7 @@
"_resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "_resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"_shasum": "5d128515df134ff327e90a4c93f4e077a536341f", "_shasum": "5d128515df134ff327e90a4c93f4e077a536341f",
"_spec": "debug@2.6.9", "_spec": "debug@2.6.9",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "TJ Holowaychuk", "name": "TJ Holowaychuk",
"email": "tj@vision-media.ca" "email": "tj@vision-media.ca"

2
node_modules/depd/package.json generated vendored

@ -24,7 +24,7 @@
"_resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", "_resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
"_shasum": "9bcd52e14c097763e749b274c4346ed2e560b5a9", "_shasum": "9bcd52e14c097763e749b274c4346ed2e560b5a9",
"_spec": "depd@~1.1.2", "_spec": "depd@~1.1.2",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "Douglas Christopher Wilson", "name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com" "email": "doug@somethingdoug.com"

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", "_resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz",
"_shasum": "978857442c44749e4206613e37946205826abd80", "_shasum": "978857442c44749e4206613e37946205826abd80",
"_spec": "destroy@~1.0.4", "_spec": "destroy@~1.0.4",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\send", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/send",
"author": { "author": {
"name": "Jonathan Ong", "name": "Jonathan Ong",
"email": "me@jongleberry.com", "email": "me@jongleberry.com",

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "_resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
"_shasum": "590c61156b0ae2f4f0255732a158b266bc56b21d", "_shasum": "590c61156b0ae2f4f0255732a158b266bc56b21d",
"_spec": "ee-first@1.1.1", "_spec": "ee-first@1.1.1",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\on-finished", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/on-finished",
"author": { "author": {
"name": "Jonathan Ong", "name": "Jonathan Ong",
"email": "me@jongleberry.com", "email": "me@jongleberry.com",

@ -24,7 +24,7 @@
"_resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", "_resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
"_shasum": "ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59", "_shasum": "ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59",
"_spec": "encodeurl@~1.0.2", "_spec": "encodeurl@~1.0.2",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"bugs": { "bugs": {
"url": "https://github.com/pillarjs/encodeurl/issues" "url": "https://github.com/pillarjs/encodeurl/issues"
}, },

@ -24,7 +24,7 @@
"_resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", "_resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
"_shasum": "0258eae4d3d0c0974de1c169188ef0051d1d1988", "_shasum": "0258eae4d3d0c0974de1c169188ef0051d1d1988",
"_spec": "escape-html@~1.0.3", "_spec": "escape-html@~1.0.3",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"bugs": { "bugs": {
"url": "https://github.com/component/escape-html/issues" "url": "https://github.com/component/escape-html/issues"
}, },

2
node_modules/etag/package.json generated vendored

@ -22,7 +22,7 @@
"_resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "_resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
"_shasum": "41ae2eeb65efa62268aebfea83ac7d79299b0887", "_shasum": "41ae2eeb65efa62268aebfea83ac7d79299b0887",
"_spec": "etag@~1.8.1", "_spec": "etag@~1.8.1",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"bugs": { "bugs": {
"url": "https://github.com/jshttp/etag/issues" "url": "https://github.com/jshttp/etag/issues"
}, },

@ -22,7 +22,7 @@
"_resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", "_resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz",
"_shasum": "4491fc38605cf51f8629d39c2b5d026f98a4c134", "_shasum": "4491fc38605cf51f8629d39c2b5d026f98a4c134",
"_spec": "express", "_spec": "express",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI", "_where": "/var/www/html/MyWonderfulTirelessAPI",
"author": { "author": {
"name": "TJ Holowaychuk", "name": "TJ Holowaychuk",
"email": "tj@vision-media.ca" "email": "tj@vision-media.ca"

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", "_resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz",
"_shasum": "b7e7d000ffd11938d0fdb053506f6ebabe9f587d", "_shasum": "b7e7d000ffd11938d0fdb053506f6ebabe9f587d",
"_spec": "finalhandler@~1.1.2", "_spec": "finalhandler@~1.1.2",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "Douglas Christopher Wilson", "name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com" "email": "doug@somethingdoug.com"

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", "_resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz",
"_shasum": "98c23dab1175657b8c0573e8ceccd91b0ff18c84", "_shasum": "98c23dab1175657b8c0573e8ceccd91b0ff18c84",
"_spec": "forwarded@~0.1.2", "_spec": "forwarded@~0.1.2",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\proxy-addr", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/proxy-addr",
"bugs": { "bugs": {
"url": "https://github.com/jshttp/forwarded/issues" "url": "https://github.com/jshttp/forwarded/issues"
}, },

2
node_modules/fresh/package.json generated vendored

@ -22,7 +22,7 @@
"_resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "_resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
"_shasum": "3d8cadd90d976569fa835ab1f8e4b23a105605a7", "_shasum": "3d8cadd90d976569fa835ab1f8e4b23a105605a7",
"_spec": "fresh@0.5.2", "_spec": "fresh@0.5.2",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "TJ Holowaychuk", "name": "TJ Holowaychuk",
"email": "tj@vision-media.ca", "email": "tj@vision-media.ca",

@ -23,7 +23,7 @@
"_resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", "_resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz",
"_shasum": "4f5029cf13239f31036e5b2e55292bcfbcc85c8f", "_shasum": "4f5029cf13239f31036e5b2e55292bcfbcc85c8f",
"_spec": "http-errors@1.7.2", "_spec": "http-errors@1.7.2",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\body-parser", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/body-parser",
"author": { "author": {
"name": "Jonathan Ong", "name": "Jonathan Ong",
"email": "me@jongleberry.com", "email": "me@jongleberry.com",

@ -22,7 +22,7 @@
"_resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", "_resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"_shasum": "2022b4b25fbddc21d2f524974a474aafe733908b", "_shasum": "2022b4b25fbddc21d2f524974a474aafe733908b",
"_spec": "iconv-lite@0.4.24", "_spec": "iconv-lite@0.4.24",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\body-parser", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/body-parser",
"author": { "author": {
"name": "Alexander Shtuchkin", "name": "Alexander Shtuchkin",
"email": "ashtuchkin@gmail.com" "email": "ashtuchkin@gmail.com"

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
"_shasum": "633c2c83e3da42a502f52466022480f4208261de", "_shasum": "633c2c83e3da42a502f52466022480f4208261de",
"_spec": "inherits@2.0.3", "_spec": "inherits@2.0.3",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\http-errors", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/http-errors",
"browser": "./inherits_browser.js", "browser": "./inherits_browser.js",
"bugs": { "bugs": {
"url": "https://github.com/isaacs/inherits/issues" "url": "https://github.com/isaacs/inherits/issues"

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", "_resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
"_shasum": "bff38543eeb8984825079ff3a2a8e6cbd46781b3", "_shasum": "bff38543eeb8984825079ff3a2a8e6cbd46781b3",
"_spec": "ipaddr.js@1.9.1", "_spec": "ipaddr.js@1.9.1",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\proxy-addr", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/proxy-addr",
"author": { "author": {
"name": "whitequark", "name": "whitequark",
"email": "whitequark@whitequark.org" "email": "whitequark@whitequark.org"

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "_resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
"_shasum": "8710d7af0aa626f8fffa1ce00168545263255748", "_shasum": "8710d7af0aa626f8fffa1ce00168545263255748",
"_spec": "media-typer@0.3.0", "_spec": "media-typer@0.3.0",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\type-is", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/type-is",
"author": { "author": {
"name": "Douglas Christopher Wilson", "name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com" "email": "doug@somethingdoug.com"

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "_resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
"_shasum": "b00aaa556dd8b44568150ec9d1b953f3f90cbb61", "_shasum": "b00aaa556dd8b44568150ec9d1b953f3f90cbb61",
"_spec": "merge-descriptors@1.0.1", "_spec": "merge-descriptors@1.0.1",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"author": { "author": {
"name": "Jonathan Ong", "name": "Jonathan Ong",
"email": "me@jongleberry.com", "email": "me@jongleberry.com",

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "_resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
"_shasum": "5529a4d67654134edcc5266656835b0f851afcee", "_shasum": "5529a4d67654134edcc5266656835b0f851afcee",
"_spec": "methods@~1.1.2", "_spec": "methods@~1.1.2",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"browser": { "browser": {
"http": false "http": false
}, },

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz", "_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz",
"_shasum": "fa11c5eb0aca1334b4233cb4d52f10c5a6272f92", "_shasum": "fa11c5eb0aca1334b4233cb4d52f10c5a6272f92",
"_spec": "mime-db@1.44.0", "_spec": "mime-db@1.44.0",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\mime-types", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/mime-types",
"bugs": { "bugs": {
"url": "https://github.com/jshttp/mime-db/issues" "url": "https://github.com/jshttp/mime-db/issues"
}, },

@ -22,7 +22,7 @@
"_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz", "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz",
"_shasum": "47949f98e279ea53119f5722e0f34e529bec009f", "_shasum": "47949f98e279ea53119f5722e0f34e529bec009f",
"_spec": "mime-types@~2.1.24", "_spec": "mime-types@~2.1.24",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\accepts", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/accepts",
"bugs": { "bugs": {
"url": "https://github.com/jshttp/mime-types/issues" "url": "https://github.com/jshttp/mime-types/issues"
}, },

2
node_modules/mime/package.json generated vendored

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", "_resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
"_shasum": "32cd9e5c64553bd58d19a568af452acff04981b1", "_shasum": "32cd9e5c64553bd58d19a568af452acff04981b1",
"_spec": "mime@1.6.0", "_spec": "mime@1.6.0",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\send", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/send",
"author": { "author": {
"name": "Robert Kieffer", "name": "Robert Kieffer",
"email": "robert@broofa.com", "email": "robert@broofa.com",

2
node_modules/ms/package.json generated vendored

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "_resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"_shasum": "5608aeadfc00be6c2901df5f9861788de0d597c8", "_shasum": "5608aeadfc00be6c2901df5f9861788de0d597c8",
"_spec": "ms@2.0.0", "_spec": "ms@2.0.0",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\debug", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/debug",
"bugs": { "bugs": {
"url": "https://github.com/zeit/ms/issues" "url": "https://github.com/zeit/ms/issues"
}, },

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", "_resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz",
"_shasum": "feacf7ccf525a77ae9634436a64883ffeca346fb", "_shasum": "feacf7ccf525a77ae9634436a64883ffeca346fb",
"_spec": "negotiator@0.6.2", "_spec": "negotiator@0.6.2",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\accepts", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/accepts",
"bugs": { "bugs": {
"url": "https://github.com/jshttp/negotiator/issues" "url": "https://github.com/jshttp/negotiator/issues"
}, },

@ -24,7 +24,7 @@
"_resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", "_resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz",
"_shasum": "20f1336481b083cd75337992a16971aa2d906947", "_shasum": "20f1336481b083cd75337992a16971aa2d906947",
"_spec": "on-finished@~2.3.0", "_spec": "on-finished@~2.3.0",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"bugs": { "bugs": {
"url": "https://github.com/jshttp/on-finished/issues" "url": "https://github.com/jshttp/on-finished/issues"
}, },

@ -0,0 +1,8 @@
language: node_js
node_js: "10"
matrix:
include:
- node_js: "4"
- node_js: "6"
- node_js: "8"

@ -0,0 +1,87 @@
node-packet-reader
==================
Handy little well tested module for reading length-prefixed binary packets.
Since buffers come off a socket in randomly sized chunks you can't expect them to cleanly
break on packet boundaries. This module allows you to push buffers in and read
full packets out the other side, so you can get to parsing right away and not have
to manage concatenating partial buffers and searching through them for packets.
## install
` $ npm install packet-reader `
## example
```js
var Reader = require('packet-reader')
var reader = new Reader()
//assuming you have a socket emitting `data` events
socket.on('data', function(buffer) {
reader.addChunk(buffer)
var packet = reader.read()
while(packet) {
//do something with fully parsed packet
}
})
```
here's a more full featured example:
let's assume our "packet" for our protocol is 32-bit Big Endian length-prefixed strings
so a "hello world" packet would look something like [length, string]
`[0, 0, 0 0x0B, h, e, l, l, o, w, o, r, l, d]`
```js
var Transform = require('stream').Transform
var Reader = require('packet-reader')
var reader = new Reader()
var parser = new Transform()
parser._transform = function(chunk, encoding, cb) {
reader.addChunk(chunk)
var packet = reader.read()
while(packet) {
this.push(packet.toString('utf8'))
packet = reader.read()
}
cb()
}
var server = net.createServer(function(socket) {
socket.pipe(parser).pipe(stdout)
})
```
There are a few config options for setting optional pre-length padding byte. Read the tests for details.
## License
MIT
Copyright 2015 Brian M. Carlson
All rights reserved.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,65 @@
var assert = require('assert')
var Reader = module.exports = function(options) {
//TODO - remove for version 1.0
if(typeof options == 'number') {
options = { headerSize: options }
}
options = options || {}
this.offset = 0
this.lastChunk = false
this.chunk = null
this.chunkLength = 0
this.headerSize = options.headerSize || 0
this.lengthPadding = options.lengthPadding || 0
this.header = null
assert(this.headerSize < 2, 'pre-length header of more than 1 byte length not currently supported')
}
Reader.prototype.addChunk = function(chunk) {
if (!this.chunk || this.offset === this.chunkLength) {
this.chunk = chunk
this.chunkLength = chunk.length
this.offset = 0
return
}
var newChunkLength = chunk.length
var newLength = this.chunkLength + newChunkLength
if (newLength > this.chunk.length) {
var newBufferLength = this.chunk.length * 2
while (newLength >= newBufferLength) {
newBufferLength *= 2
}
var newBuffer = Buffer.alloc(newBufferLength)
this.chunk.copy(newBuffer)
this.chunk = newBuffer
}
chunk.copy(this.chunk, this.chunkLength)
this.chunkLength = newLength
}
Reader.prototype.read = function() {
if(this.chunkLength < (this.headerSize + 4 + this.offset)) {
return false
}
if(this.headerSize) {
this.header = this.chunk[this.offset]
}
//read length of next item
var length = this.chunk.readUInt32BE(this.offset + this.headerSize) + this.lengthPadding
//next item spans more chunks than we have
var remaining = this.chunkLength - (this.offset + 4 + this.headerSize)
if(length > remaining) {
return false
}
this.offset += (this.headerSize + 4)
var result = this.chunk.slice(this.offset, this.offset + length)
this.offset += length
return result
}

@ -0,0 +1,52 @@
{
"_from": "packet-reader@1.0.0",
"_id": "packet-reader@1.0.0",
"_inBundle": false,
"_integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==",
"_location": "/packet-reader",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "packet-reader@1.0.0",
"name": "packet-reader",
"escapedName": "packet-reader",
"rawSpec": "1.0.0",
"saveSpec": null,
"fetchSpec": "1.0.0"
},
"_requiredBy": [
"/pg"
],
"_resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
"_shasum": "9238e5480dedabacfe1fe3f2771063f164157d74",
"_spec": "packet-reader@1.0.0",
"_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/pg",
"author": {
"name": "Brian M. Carlson"
},
"bugs": {
"url": "https://github.com/brianc/node-packet-reader/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Read binary packets...",
"devDependencies": {
"mocha": "~1.21.5"
},
"directories": {
"test": "test"
},
"homepage": "https://github.com/brianc/node-packet-reader",
"license": "MIT",
"main": "index.js",
"name": "packet-reader",
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-packet-reader.git"
},
"scripts": {
"test": "mocha"
},
"version": "1.0.0"
}

@ -0,0 +1,148 @@
var assert = require('assert')
var Reader = require('../')
describe('packet-reader', function() {
beforeEach(function() {
this.reader = new Reader(1)
})
it('reads perfect 1 length buffer', function() {
this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1]))
var result = this.reader.read()
assert.equal(result.length, 1)
assert.equal(result[0], 1)
assert.strictEqual(false, this.reader.read())
})
it('reads perfect longer buffer', function() {
this.reader.addChunk(Buffer.from([0, 0, 0, 0, 4, 1, 2, 3, 4]))
var result = this.reader.read()
assert.equal(result.length, 4)
assert.strictEqual(false, this.reader.read())
})
it('reads two parts', function() {
this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1]))
var result = this.reader.read()
assert.strictEqual(false, result)
this.reader.addChunk(Buffer.from([2]))
var result = this.reader.read()
assert.equal(result.length, 1, 'should return 1 length buffer')
assert.equal(result[0], 2)
assert.strictEqual(this.reader.read(), false)
})
it('reads multi-part', function() {
this.reader.addChunk(Buffer.from([0, 0, 0, 0, 16]))
assert.equal(false, this.reader.read())
this.reader.addChunk(Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]))
assert.equal(false, this.reader.read())
this.reader.addChunk(Buffer.from([9, 10, 11, 12, 13, 14, 15, 16]))
var result = this.reader.read()
assert.equal(result.length, 16)
})
it('resets internal buffer at end of packet', function() {
this.reader.addChunk(Buffer.from([0, 0, 0, 0, 16]))
this.reader.addChunk(Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]))
this.reader.addChunk(Buffer.from([9, 10, 11, 12, 13, 14, 15, 16]))
var result = this.reader.read()
assert.equal(result.length, 16)
var newChunk = Buffer.from([0, 0, 0, 0, 16])
this.reader.addChunk(newChunk)
assert.equal(this.reader.offset, 0, 'should have been reset to 0.')
assert.strictEqual(this.reader.chunk, newChunk)
})
it('reads multiple messages from single chunk', function() {
this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 2, 1, 2]))
var result = this.reader.read()
assert.equal(result.length, 1, 'should have 1 length buffer')
assert.equal(result[0], 1)
var result = this.reader.read()
assert.equal(result.length, 2, 'should have 2 length buffer but was ' + result.length)
assert.equal(result[0], 1)
assert.equal(result[1], 2)
assert.strictEqual(false, this.reader.read())
})
it('reads 1 and a split', function() {
this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1, 0, 0]))//, 0, 0, 2, 1, 2]))
var result = this.reader.read()
assert.equal(result.length, 1, 'should have 1 length buffer')
assert.equal(result[0], 1)
var result = this.reader.read()
assert.strictEqual(result, false)
this.reader.addChunk(Buffer.from([0, 0, 2, 1, 2]))
var result = this.reader.read()
assert.equal(result.length, 2, 'should have 2 length buffer but was ' + result.length)
assert.equal(result[0], 1)
assert.equal(result[1], 2)
assert.strictEqual(false, this.reader.read())
})
})
describe('variable length header', function() {
beforeEach(function() {
this.reader = new Reader()
})
it('reads double message buffers', function() {
this.reader.addChunk(Buffer.from([
0, 0, 0, 1, 1,
0, 0, 0, 2, 1, 2]))
var result = this.reader.read()
assert(result)
assert.equal(result.length, 1)
assert.equal(result[0], 1)
result = this.reader.read()
assert(result)
assert.equal(result.length, 2)
assert.equal(result[0], 1)
assert.equal(result[1], 2)
assert.strictEqual(this.reader.read(), false)
})
})
describe('1 length code', function() {
beforeEach(function() {
this.reader = new Reader(1)
})
it('reads code', function() {
this.reader.addChunk(Buffer.from([9, 0, 0, 0, 1, 1]))
var result = this.reader.read()
assert(result)
assert.equal(this.reader.header, 9)
assert.equal(result.length, 1)
assert.equal(result[0], 1)
})
it('is set on uncompleted read', function() {
assert.equal(this.reader.header, null)
this.reader.addChunk(Buffer.from([2, 0, 0, 0, 1]))
assert.strictEqual(this.reader.read(), false)
assert.equal(this.reader.header, 2)
})
})
describe('postgres style packet', function() {
beforeEach(function() {
this.reader = new Reader({
headerSize: 1,
lengthPadding: -4
})
})
it('reads with padded length', function() {
this.reader.addChunk(Buffer.from([1, 0, 0, 0, 8, 0, 0, 2, 0]))
var result = this.reader.read()
assert(result)
assert.equal(result.length, 4)
assert.equal(result[0], 0)
assert.equal(result[1], 0)
assert.equal(result[2], 2)
assert.equal(result[3], 0)
})
})

@ -23,7 +23,7 @@
"_resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "_resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
"_shasum": "9da19e7bee8d12dff0513ed5b76957793bc2e8d4", "_shasum": "9da19e7bee8d12dff0513ed5b76957793bc2e8d4",
"_spec": "parseurl@~1.3.3", "_spec": "parseurl@~1.3.3",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"bugs": { "bugs": {
"url": "https://github.com/pillarjs/parseurl/issues" "url": "https://github.com/pillarjs/parseurl/issues"
}, },

@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "_resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"_shasum": "df604178005f522f15eb4490e7247a1bfaa67f8c", "_shasum": "df604178005f522f15eb4490e7247a1bfaa67f8c",
"_spec": "path-to-regexp@0.1.7", "_spec": "path-to-regexp@0.1.7",
"_where": "C:\\Users\\sigon\\MyWonderfulTirelessAPI\\node_modules\\express", "_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/express",
"bugs": { "bugs": {
"url": "https://github.com/component/path-to-regexp/issues" "url": "https://github.com/component/path-to-regexp/issues"
}, },

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Iced Development
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,72 @@
pg-connection-string
====================
[![NPM](https://nodei.co/npm/pg-connection-string.png?compact=true)](https://nodei.co/npm/pg-connection-string/)
[![Build Status](https://travis-ci.org/iceddev/pg-connection-string.svg?branch=master)](https://travis-ci.org/iceddev/pg-connection-string)
[![Coverage Status](https://coveralls.io/repos/github/iceddev/pg-connection-string/badge.svg?branch=master)](https://coveralls.io/github/iceddev/pg-connection-string?branch=master)
Functions for dealing with a PostgresSQL connection string
`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git)
Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
MIT License
## Usage
```js
var parse = require('pg-connection-string').parse;
var config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
```
The resulting config contains a subset of the following properties:
* `host` - Postgres server hostname or, for UNIX doamain sockets, the socket filename
* `port` - port on which to connect
* `user` - User with which to authenticate to the server
* `password` - Corresponding password
* `database` - Database name within the server
* `client_encoding` - string encoding the client will use
* `ssl`, either a boolean or an object with properties
* `cert`
* `key`
* `ca`
* any other query parameters (for example, `application_name`) are preserved intact.
## Connection Strings
The short summary of acceptable URLs is:
* `socket:<path>?<query>` - UNIX domain socket
* `postgres://<user>:<password>@<host>:<port>/<database>?<query>` - TCP connection
But see below for more details.
### UNIX Domain Sockets
When user and password are not given, the socket path follows `socket:`, as in `socket:/var/run/pgsql`.
This form can be shortened to just a path: `/var/run/pgsql`.
When user and password are given, they are included in the typical URL positions, with an empty `host`, as in `socket://user:pass@/var/run/pgsql`.
Query parameters follow a `?` character, including the following special query parameters:
* `db=<database>` - sets the database name (urlencoded)
* `encoding=<encoding>` - sets the `client_encoding` property
### TCP Connections
TCP connections to the Postgres server are indicated with `pg:` or `postgres:` schemes (in fact, any scheme but `socket:` is accepted).
If username and password are included, they should be urlencoded.
The database name, however, should *not* be urlencoded.
Query parameters follow a `?` character, including the following special query parameters:
* `host=<host>` - sets `host` property, overriding the URL's host
* `encoding=<encoding>` - sets the `client_encoding` property
* `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly
* `sslcert=<filename>` - reads data from the given file and includes the result as `ssl.cert`
* `sslkey=<filename>` - reads data from the given file and includes the result as `ssl.key`
* `sslrootcert=<filename>` - reads data from the given file and includes the result as `ssl.ca`
A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty.

@ -0,0 +1,14 @@
export function parse(connectionString: string): ConnectionOptions
export interface ConnectionOptions {
host: string | null
password?: string
user?: string
port?: string | null
database: string | null | undefined
client_encoding?: string
ssl?: boolean | string
application_name?: string
fallback_application_name?: string
}

@ -0,0 +1,89 @@
'use strict'
var url = require('url')
var fs = require('fs')
//Parse method copied from https://github.com/brianc/node-postgres
//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
//MIT License
//parses a connection string
function parse(str) {
//unix socket
if (str.charAt(0) === '/') {
var config = str.split(' ')
return { host: config[0], database: config[1] }
}
// url parse expects spaces encoded as %20
var result = url.parse(
/ |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str) ? encodeURI(str).replace(/\%25(\d\d)/g, '%$1') : str,
true
)
var config = result.query
for (var k in config) {
if (Array.isArray(config[k])) {
config[k] = config[k][config[k].length - 1]
}
}
var auth = (result.auth || ':').split(':')
config.user = auth[0]
config.password = auth.splice(1).join(':')
config.port = result.port
if (result.protocol == 'socket:') {
config.host = decodeURI(result.pathname)
config.database = result.query.db
config.client_encoding = result.query.encoding
return config
}
if (!config.host) {
// Only set the host if there is no equivalent query param.
config.host = result.hostname
}
// If the host is missing it might be a URL-encoded path to a socket.
var pathname = result.pathname
if (!config.host && pathname && /^%2f/i.test(pathname)) {
var pathnameSplit = pathname.split('/')
config.host = decodeURIComponent(pathnameSplit[0])
pathname = pathnameSplit.splice(1).join('/')
}
// result.pathname is not always guaranteed to have a '/' prefix (e.g. relative urls)
// only strip the slash if it is present.
if (pathname && pathname.charAt(0) === '/') {
pathname = pathname.slice(1) || null
}
config.database = pathname && decodeURI(pathname)
if (config.ssl === 'true' || config.ssl === '1') {
config.ssl = true
}
if (config.ssl === '0') {
config.ssl = false
}
if (config.sslcert || config.sslkey || config.sslrootcert) {
config.ssl = {}
}
if (config.sslcert) {
config.ssl.cert = fs.readFileSync(config.sslcert).toString()
}
if (config.sslkey) {
config.ssl.key = fs.readFileSync(config.sslkey).toString()
}
if (config.sslrootcert) {
config.ssl.ca = fs.readFileSync(config.sslrootcert).toString()
}
return config
}
module.exports = parse
parse.parse = parse

@ -0,0 +1,68 @@
{
"_from": "pg-connection-string@^2.2.3",
"_id": "pg-connection-string@2.2.3",
"_inBundle": false,
"_integrity": "sha512-I/KCSQGmOrZx6sMHXkOs2MjddrYcqpza3Dtsy0AjIgBr/bZiPJRK9WhABXN1Uy1UDazRbi9gZEzO2sAhL5EqiQ==",
"_location": "/pg-connection-string",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "pg-connection-string@^2.2.3",
"name": "pg-connection-string",
"escapedName": "pg-connection-string",
"rawSpec": "^2.2.3",
"saveSpec": null,
"fetchSpec": "^2.2.3"
},
"_requiredBy": [
"/pg"
],
"_resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.2.3.tgz",
"_shasum": "48e1158ec37eaa82e98dbcb7307103ec303fe0e7",
"_spec": "pg-connection-string@^2.2.3",
"_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/pg",
"author": {
"name": "Blaine Bublitz",
"email": "blaine@iceddev.com",
"url": "http://iceddev.com/"
},
"bugs": {
"url": "https://github.com/iceddev/pg-connection-string/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Functions for dealing with a PostgresSQL connection string",
"devDependencies": {
"chai": "^4.1.1",
"coveralls": "^3.0.4",
"istanbul": "^0.4.5",
"mocha": "^7.1.2"
},
"files": [
"index.js",
"index.d.ts"
],
"gitHead": "f3136a7d5d5498280924b3e06f47f8ce80dbe4e6",
"homepage": "https://github.com/iceddev/pg-connection-string",
"keywords": [
"pg",
"connection",
"string",
"parse"
],
"license": "MIT",
"main": "./index.js",
"name": "pg-connection-string",
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-postgres.git"
},
"scripts": {
"check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100",
"coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls",
"test": "istanbul cover _mocha && npm run check-coverage"
},
"types": "./index.d.ts",
"version": "2.2.3"
}

13
node_modules/pg-int8/LICENSE generated vendored

@ -0,0 +1,13 @@
Copyright © 2017, Charmander <~@charmander.me>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.

16
node_modules/pg-int8/README.md generated vendored

@ -0,0 +1,16 @@
[![Build status][ci image]][ci]
64-bit big-endian signed integer-to-string conversion designed for [pg][].
```js
const readInt8 = require('pg-int8');
readInt8(Buffer.from([0, 1, 2, 3, 4, 5, 6, 7]))
// '283686952306183'
```
[pg]: https://github.com/brianc/node-postgres
[ci]: https://travis-ci.org/charmander/pg-int8
[ci image]: https://api.travis-ci.org/charmander/pg-int8.svg

100
node_modules/pg-int8/index.js generated vendored

@ -0,0 +1,100 @@
'use strict';
// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer
var BASE = 1000000;
function readInt8(buffer) {
var high = buffer.readInt32BE(0);
var low = buffer.readUInt32BE(4);
var sign = '';
if (high < 0) {
high = ~high + (low === 0);
low = (~low + 1) >>> 0;
sign = '-';
}
var result = '';
var carry;
var t;
var digits;
var pad;
var l;
var i;
{
carry = high % BASE;
high = high / BASE >>> 0;
t = 0x100000000 * carry + low;
low = t / BASE >>> 0;
digits = '' + (t - BASE * low);
if (low === 0 && high === 0) {
return sign + digits + result;
}
pad = '';
l = 6 - digits.length;
for (i = 0; i < l; i++) {
pad += '0';
}
result = pad + digits + result;
}
{
carry = high % BASE;
high = high / BASE >>> 0;
t = 0x100000000 * carry + low;
low = t / BASE >>> 0;
digits = '' + (t - BASE * low);
if (low === 0 && high === 0) {
return sign + digits + result;
}
pad = '';
l = 6 - digits.length;
for (i = 0; i < l; i++) {
pad += '0';
}
result = pad + digits + result;
}
{
carry = high % BASE;
high = high / BASE >>> 0;
t = 0x100000000 * carry + low;
low = t / BASE >>> 0;
digits = '' + (t - BASE * low);
if (low === 0 && high === 0) {
return sign + digits + result;
}
pad = '';
l = 6 - digits.length;
for (i = 0; i < l; i++) {
pad += '0';
}
result = pad + digits + result;
}
{
carry = high % BASE;
t = 0x100000000 * carry + low;
digits = '' + t % BASE;
return sign + digits + result;
}
}
module.exports = readInt8;

52
node_modules/pg-int8/package.json generated vendored

@ -0,0 +1,52 @@
{
"_from": "pg-int8@1.0.1",
"_id": "pg-int8@1.0.1",
"_inBundle": false,
"_integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"_location": "/pg-int8",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "pg-int8@1.0.1",
"name": "pg-int8",
"escapedName": "pg-int8",
"rawSpec": "1.0.1",
"saveSpec": null,
"fetchSpec": "1.0.1"
},
"_requiredBy": [
"/pg-types"
],
"_resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"_shasum": "943bd463bf5b71b4170115f80f8efc9a0c0eb78c",
"_spec": "pg-int8@1.0.1",
"_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/pg-types",
"bugs": {
"url": "https://github.com/charmander/pg-int8/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "64-bit big-endian signed integer-to-string conversion",
"devDependencies": {
"@charmander/eslint-config-base": "1.0.2",
"tap": "10.7.3"
},
"engines": {
"node": ">=4.0.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/charmander/pg-int8#readme",
"license": "ISC",
"name": "pg-int8",
"repository": {
"type": "git",
"url": "git+https://github.com/charmander/pg-int8.git"
},
"scripts": {
"test": "tap test"
},
"version": "1.0.1"
}

21
node_modules/pg-pool/LICENSE generated vendored

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Brian M. Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

376
node_modules/pg-pool/README.md generated vendored

@ -0,0 +1,376 @@
# pg-pool
[![Build Status](https://travis-ci.org/brianc/node-pg-pool.svg?branch=master)](https://travis-ci.org/brianc/node-pg-pool)
A connection pool for node-postgres
## install
```sh
npm i pg-pool pg
```
## use
### create
to use pg-pool you must first create an instance of a pool
```js
var Pool = require('pg-pool')
// by default the pool uses the same
// configuration as whatever `pg` version you have installed
var pool = new Pool()
// you can pass properties to the pool
// these properties are passed unchanged to both the node-postgres Client constructor
// and the node-pool (https://github.com/coopernurse/node-pool) constructor
// allowing you to fully configure the behavior of both
var pool2 = new Pool({
database: 'postgres',
user: 'brianc',
password: 'secret!',
port: 5432,
ssl: true,
max: 20, // set pool max size to 20
idleTimeoutMillis: 1000, // close idle clients after 1 second
connectionTimeoutMillis: 1000, // return an error after 1 second if connection could not be established
maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
})
//you can supply a custom client constructor
//if you want to use the native postgres client
var NativeClient = require('pg').native.Client
var nativePool = new Pool({ Client: NativeClient })
//you can even pool pg-native clients directly
var PgNativeClient = require('pg-native')
var pgNativePool = new Pool({ Client: PgNativeClient })
```
##### Note:
The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL.
```js
const Pool = require('pg-pool');
const url = require('url')
const params = url.parse(process.env.DATABASE_URL);
const auth = params.auth.split(':');
const config = {
user: auth[0],
password: auth[1],
host: params.hostname,
port: params.port,
database: params.pathname.split('/')[1],
ssl: true
};
const pool = new Pool(config);
/*
Transforms, 'progres://DBuser:secret@DBHost:#####/myDB', into
config = {
user: 'DBuser',
password: 'secret',
host: 'DBHost',
port: '#####',
database: 'myDB',
ssl: true
}
*/
```
### acquire clients with a promise
pg-pool supports a fully promise-based api for acquiring clients
```js
var pool = new Pool()
pool.connect().then(client => {
client.query('select $1::text as name', ['pg-pool']).then(res => {
client.release()
console.log('hello from', res.rows[0].name)
})
.catch(e => {
client.release()
console.error('query error', e.message, e.stack)
})
})
```
### plays nice with async/await
this ends up looking much nicer if you're using [co](https://github.com/tj/co) or async/await:
```js
// with async/await
(async () => {
var pool = new Pool()
var client = await pool.connect()
try {
var result = await client.query('select $1::text as name', ['brianc'])
console.log('hello from', result.rows[0])
} finally {
client.release()
}
})().catch(e => console.error(e.message, e.stack))
// with co
co(function * () {
var client = yield pool.connect()
try {
var result = yield client.query('select $1::text as name', ['brianc'])
console.log('hello from', result.rows[0])
} finally {
client.release()
}
}).catch(e => console.error(e.message, e.stack))
```
### your new favorite helper method
because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in:
```js
var pool = new Pool()
var time = await pool.query('SELECT NOW()')
var name = await pool.query('select $1::text as name', ['brianc'])
console.log(name.rows[0].name, 'says hello at', time.rows[0].name)
```
you can also use a callback here if you'd like:
```js
var pool = new Pool()
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
console.log(res.rows[0].name) // brianc
})
```
__pro tip:__ unless you need to run a transaction (which requires a single client for multiple queries) or you
have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor)
you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return
clients back to the pool after the query is done.
### drop-in backwards compatible
pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years:
```js
var pool = new Pool()
pool.connect((err, client, done) => {
if (err) return done(err)
client.query('SELECT $1::text as name', ['pg-pool'], (err, res) => {
done()
if (err) {
return console.error('query error', e.message, e.stack)
}
console.log('hello from', res.rows[0].name)
})
})
```
### shut it down
When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app
will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows:
```js
var pool = new Pool()
var client = await pool.connect()
console.log(await client.query('select now()'))
client.release()
await pool.end()
```
### a note on instances
The pool should be a __long-lived object__ in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example:
```js
// assume this is a file in your program at ./your-app/lib/db.js
// correct usage: create the pool and let it live
// 'globally' here, controlling access to it through exported methods
var pool = new pg.Pool()
// this is the right way to export the query method
module.exports.query = (text, values) => {
console.log('query:', text, values)
return pool.query(text, values)
}
// this would be the WRONG way to export the connect method
module.exports.connect = () => {
// notice how we would be creating a pool instance here
// every time we called 'connect' to get a new client?
// that's a bad thing & results in creating an unbounded
// number of pools & therefore connections
var aPool = new pg.Pool()
return aPool.connect()
}
```
### events
Every instance of a `Pool` is an event emitter. These instances emit the following events:
#### error
Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients.
Example:
```js
const Pool = require('pg-pool')
const pool = new Pool()
// attach an error handler to the pool for when a connected, idle client
// receives an error by being disconnected, etc
pool.on('error', function(error, client) {
// handle this in the same way you would treat process.on('uncaughtException')
// it is supplied the error as well as the idle client which received the error
})
```
#### connect
Fired whenever the pool creates a __new__ `pg.Client` instance and successfully connects it to the backend.
Example:
```js
const Pool = require('pg-pool')
const pool = new Pool()
var count = 0
pool.on('connect', client => {
client.count = count++
})
pool
.connect()
.then(client => {
return client
.query('SELECT $1::int AS "clientCount"', [client.count])
.then(res => console.log(res.rows[0].clientCount)) // outputs 0
.then(() => client)
})
.then(client => client.release())
```
#### acquire
Fired whenever the a client is acquired from the pool
Example:
This allows you to count the number of clients which have ever been acquired from the pool.
```js
var Pool = require('pg-pool')
var pool = new Pool()
var acquireCount = 0
pool.on('acquire', function (client) {
acquireCount++
})
var connectCount = 0
pool.on('connect', function () {
connectCount++
})
for (var i = 0; i < 200; i++) {
pool.query('SELECT NOW()')
}
setTimeout(function () {
console.log('connect count:', connectCount) // output: connect count: 10
console.log('acquire count:', acquireCount) // output: acquire count: 200
}, 100)
```
### environment variables
pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are:
```
PGDATABASE=my_db
PGUSER=username
PGPASSWORD="my awesome password"
PGPORT=5432
PGSSLMODE=require
```
Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box.
## bring your own promise
In versions of node `<=0.12.x` there is no native promise implementation available globally. You can polyfill the promise globally like this:
```js
// first run `npm install promise-polyfill --save
if (typeof Promise == 'undefined') {
global.Promise = require('promise-polyfill')
}
```
You can use any other promise implementation you'd like. The pool also allows you to configure the promise implementation on a per-pool level:
```js
var bluebirdPool = new Pool({
Promise: require('bluebird')
})
```
__please note:__ in node `<=0.12.x` the pool will throw if you do not provide a promise constructor in one of the two ways mentioned above. In node `>=4.0.0` the pool will use the native promise implementation by default; however, the two methods above still allow you to "bring your own."
## maxUses and read-replica autoscaling (e.g. AWS Aurora)
The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections.
The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing.
Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections.
If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas.
This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance.
You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value:
```
maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize
```
In the example above, assuming we acquire and release 1 connection per request and we are aiming for a 30 minute rebalancing window:
```
maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize
7200 = 1800 * 1000 / 10 / 25
```
## tests
To run tests clone the repo, `npm i` in the working dir, and then run `npm test`
## contributions
I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there.
## license
The MIT License (MIT)
Copyright (c) 2016 Brian M. Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

403
node_modules/pg-pool/index.js generated vendored

@ -0,0 +1,403 @@
'use strict'
const EventEmitter = require('events').EventEmitter
const NOOP = function () {}
const removeWhere = (list, predicate) => {
const i = list.findIndex(predicate)
return i === -1 ? undefined : list.splice(i, 1)[0]
}
class IdleItem {
constructor(client, idleListener, timeoutId) {
this.client = client
this.idleListener = idleListener
this.timeoutId = timeoutId
}
}
class PendingItem {
constructor(callback) {
this.callback = callback
}
}
function throwOnDoubleRelease() {
throw new Error('Release called on client which has already been released to the pool.')
}
function promisify(Promise, callback) {
if (callback) {
return { callback: callback, result: undefined }
}
let rej
let res
const cb = function (err, client) {
err ? rej(err) : res(client)
}
const result = new Promise(function (resolve, reject) {
res = resolve
rej = reject
})
return { callback: cb, result: result }
}
function makeIdleListener(pool, client) {
return function idleListener(err) {
err.client = client
client.removeListener('error', idleListener)
client.on('error', () => {
pool.log('additional client error after disconnection due to error', err)
})
pool._remove(client)
// TODO - document that once the pool emits an error
// the client has already been closed & purged and is unusable
pool.emit('error', err, client)
}
}
class Pool extends EventEmitter {
constructor(options, Client) {
super()
this.options = Object.assign({}, options)
if (options != null && 'password' in options) {
// "hiding" the password so it doesn't show up in stack traces
// or if the client is console.logged
Object.defineProperty(this.options, 'password', {
configurable: true,
enumerable: false,
writable: true,
value: options.password,
})
}
this.options.max = this.options.max || this.options.poolSize || 10
this.options.maxUses = this.options.maxUses || Infinity
this.log = this.options.log || function () {}
this.Client = this.options.Client || Client || require('pg').Client
this.Promise = this.options.Promise || global.Promise
if (typeof this.options.idleTimeoutMillis === 'undefined') {
this.options.idleTimeoutMillis = 10000
}
this._clients = []
this._idle = []
this._pendingQueue = []
this._endCallback = undefined
this.ending = false
this.ended = false
}
_isFull() {
return this._clients.length >= this.options.max
}
_pulseQueue() {
this.log('pulse queue')
if (this.ended) {
this.log('pulse queue ended')
return
}
if (this.ending) {
this.log('pulse queue on ending')
if (this._idle.length) {
this._idle.slice().map((item) => {
this._remove(item.client)
})
}
if (!this._clients.length) {
this.ended = true
this._endCallback()
}
return
}
// if we don't have any waiting, do nothing
if (!this._pendingQueue.length) {
this.log('no queued requests')
return
}
// if we don't have any idle clients and we have no more room do nothing
if (!this._idle.length && this._isFull()) {
return
}
const pendingItem = this._pendingQueue.shift()
if (this._idle.length) {
const idleItem = this._idle.pop()
clearTimeout(idleItem.timeoutId)
const client = idleItem.client
const idleListener = idleItem.idleListener
return this._acquireClient(client, pendingItem, idleListener, false)
}
if (!this._isFull()) {
return this.newClient(pendingItem)
}
throw new Error('unexpected condition')
}
_remove(client) {
const removed = removeWhere(this._idle, (item) => item.client === client)
if (removed !== undefined) {
clearTimeout(removed.timeoutId)
}
this._clients = this._clients.filter((c) => c !== client)
client.end()
this.emit('remove', client)
}
connect(cb) {
if (this.ending) {
const err = new Error('Cannot use a pool after calling end on the pool')
return cb ? cb(err) : this.Promise.reject(err)
}
const response = promisify(this.Promise, cb)
const result = response.result
// if we don't have to connect a new client, don't do so
if (this._clients.length >= this.options.max || this._idle.length) {
// if we have idle clients schedule a pulse immediately
if (this._idle.length) {
process.nextTick(() => this._pulseQueue())
}
if (!this.options.connectionTimeoutMillis) {
this._pendingQueue.push(new PendingItem(response.callback))
return result
}
const queueCallback = (err, res, done) => {
clearTimeout(tid)
response.callback(err, res, done)
}
const pendingItem = new PendingItem(queueCallback)
// set connection timeout on checking out an existing client
const tid = setTimeout(() => {
// remove the callback from pending waiters because
// we're going to call it with a timeout error
removeWhere(this._pendingQueue, (i) => i.callback === queueCallback)
pendingItem.timedOut = true
response.callback(new Error('timeout exceeded when trying to connect'))
}, this.options.connectionTimeoutMillis)
this._pendingQueue.push(pendingItem)
return result
}
this.newClient(new PendingItem(response.callback))
return result
}
newClient(pendingItem) {
const client = new this.Client(this.options)
this._clients.push(client)
const idleListener = makeIdleListener(this, client)
this.log('checking client timeout')
// connection timeout logic
let tid
let timeoutHit = false
if (this.options.connectionTimeoutMillis) {
tid = setTimeout(() => {
this.log('ending client due to timeout')
timeoutHit = true
// force kill the node driver, and let libpq do its teardown
client.connection ? client.connection.stream.destroy() : client.end()
}, this.options.connectionTimeoutMillis)
}
this.log('connecting new client')
client.connect((err) => {
if (tid) {
clearTimeout(tid)
}
client.on('error', idleListener)
if (err) {
this.log('client failed to connect', err)
// remove the dead client from our list of clients
this._clients = this._clients.filter((c) => c !== client)
if (timeoutHit) {
err.message = 'Connection terminated due to connection timeout'
}
// this client won’t be released, so move on immediately
this._pulseQueue()
if (!pendingItem.timedOut) {
pendingItem.callback(err, undefined, NOOP)
}
} else {
this.log('new client connected')
return this._acquireClient(client, pendingItem, idleListener, true)
}
})
}
// acquire a client for a pending work item
_acquireClient(client, pendingItem, idleListener, isNew) {
if (isNew) {
this.emit('connect', client)
}
this.emit('acquire', client)
client.release = this._releaseOnce(client, idleListener)
client.removeListener('error', idleListener)
if (!pendingItem.timedOut) {
if (isNew && this.options.verify) {
this.options.verify(client, (err) => {
if (err) {
client.release(err)
return pendingItem.callback(err, undefined, NOOP)
}
pendingItem.callback(undefined, client, client.release)
})
} else {
pendingItem.callback(undefined, client, client.release)
}
} else {
if (isNew && this.options.verify) {
this.options.verify(client, client.release)
} else {
client.release()
}
}
}
// returns a function that wraps _release and throws if called more than once
_releaseOnce(client, idleListener) {
let released = false
return (err) => {
if (released) {
throwOnDoubleRelease()
}
released = true
this._release(client, idleListener, err)
}
}
// release a client back to the poll, include an error
// to remove it from the pool
_release(client, idleListener, err) {
client.on('error', idleListener)
client._poolUseCount = (client._poolUseCount || 0) + 1
// TODO(bmc): expose a proper, public interface _queryable and _ending
if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) {
if (client._poolUseCount >= this.options.maxUses) {
this.log('remove expended client')
}
this._remove(client)
this._pulseQueue()
return
}
// idle timeout
let tid
if (this.options.idleTimeoutMillis) {
tid = setTimeout(() => {
this.log('remove idle client')
this._remove(client)
}, this.options.idleTimeoutMillis)
}
this._idle.push(new IdleItem(client, idleListener, tid))
this._pulseQueue()
}
query(text, values, cb) {
// guard clause against passing a function as the first parameter
if (typeof text === 'function') {
const response = promisify(this.Promise, text)
setImmediate(function () {
return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported'))
})
return response.result
}
// allow plain text query without values
if (typeof values === 'function') {
cb = values
values = undefined
}
const response = promisify(this.Promise, cb)
cb = response.callback
this.connect((err, client) => {
if (err) {
return cb(err)
}
let clientReleased = false
const onError = (err) => {
if (clientReleased) {
return
}
clientReleased = true
client.release(err)
cb(err)
}
client.once('error', onError)
this.log('dispatching query')
client.query(text, values, (err, res) => {
this.log('query dispatched')
client.removeListener('error', onError)
if (clientReleased) {
return
}
clientReleased = true
client.release(err)
if (err) {
return cb(err)
} else {
return cb(undefined, res)
}
})
})
return response.result
}
end(cb) {
this.log('ending')
if (this.ending) {
const err = new Error('Called end on pool more than once')
return cb ? cb(err) : this.Promise.reject(err)
}
this.ending = true
const promised = promisify(this.Promise, cb)
this._endCallback = promised.callback
this._pulseQueue()
return promised.result
}
get waitingCount() {
return this._pendingQueue.length
}
get idleCount() {
return this._idle.length
}
get totalCount() {
return this._clients.length
}
}
module.exports = Pool

67
node_modules/pg-pool/package.json generated vendored

@ -0,0 +1,67 @@
{
"_from": "pg-pool@^3.2.1",
"_id": "pg-pool@3.2.1",
"_inBundle": false,
"_integrity": "sha512-BQDPWUeKenVrMMDN9opfns/kZo4lxmSWhIqo+cSAF7+lfi9ZclQbr9vfnlNaPr8wYF3UYjm5X0yPAhbcgqNOdA==",
"_location": "/pg-pool",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "pg-pool@^3.2.1",
"name": "pg-pool",
"escapedName": "pg-pool",
"rawSpec": "^3.2.1",
"saveSpec": null,
"fetchSpec": "^3.2.1"
},
"_requiredBy": [
"/pg"
],
"_resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.2.1.tgz",
"_shasum": "5f4afc0f58063659aeefa952d36af49fa28b30e0",
"_spec": "pg-pool@^3.2.1",
"_where": "/var/www/html/MyWonderfulTirelessAPI/node_modules/pg",
"author": {
"name": "Brian M. Carlson"
},
"bugs": {
"url": "https://github.com/brianc/node-pg-pool/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Connection pool for node-postgres",
"devDependencies": {
"bluebird": "3.4.1",
"co": "4.6.0",
"expect.js": "0.3.1",
"lodash": "^4.17.11",
"mocha": "^7.1.2",
"pg-cursor": "^1.3.0"
},
"directories": {
"test": "test"
},
"gitHead": "f3136a7d5d5498280924b3e06f47f8ce80dbe4e6",
"homepage": "https://github.com/brianc/node-pg-pool#readme",
"keywords": [
"pg",
"postgres",
"pool",
"database"
],
"license": "MIT",
"main": "index.js",
"name": "pg-pool",
"peerDependencies": {
"pg": ">=8.0"
},
"repository": {
"type": "git",
"url": "git://github.com/brianc/node-postgres.git"
},
"scripts": {
"test": " node_modules/.bin/mocha"
},
"version": "3.2.1"
}

@ -0,0 +1,42 @@
'use strict'
const co = require('co')
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const BluebirdPromise = require('bluebird')
const Pool = require('../')
const checkType = (promise) => {
expect(promise).to.be.a(BluebirdPromise)
return promise.catch((e) => undefined)
}
describe('Bring your own promise', function () {
it(
'uses supplied promise for operations',
co.wrap(function* () {
const pool = new Pool({ Promise: BluebirdPromise })
const client1 = yield checkType(pool.connect())
client1.release()
yield checkType(pool.query('SELECT NOW()'))
const client2 = yield checkType(pool.connect())
// TODO - make sure pg supports BYOP as well
client2.release()
yield checkType(pool.end())
})
)
it(
'uses promises in errors',
co.wrap(function* () {
const pool = new Pool({ Promise: BluebirdPromise, port: 48484 })
yield checkType(pool.connect())
yield checkType(pool.end())
yield checkType(pool.connect())
yield checkType(pool.query())
yield checkType(pool.end())
})
)
})

@ -0,0 +1,29 @@
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('Connection strings', function () {
it('pool delegates connectionString property to client', function (done) {
const connectionString = 'postgres://foo:bar@baz:1234/xur'
const pool = new Pool({
// use a fake client so we can check we're passed the connectionString
Client: function (args) {
expect(args.connectionString).to.equal(connectionString)
return {
connect: function (cb) {
cb(new Error('testing'))
},
on: function () {},
}
},
connectionString: connectionString,
})
pool.connect(function (err, client) {
expect(err).to.not.be(undefined)
done()
})
})
})

@ -0,0 +1,229 @@
'use strict'
const net = require('net')
const co = require('co')
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const before = require('mocha').before
const after = require('mocha').after
const Pool = require('../')
describe('connection timeout', () => {
const connectionFailure = new Error('Temporary connection failure')
before((done) => {
this.server = net.createServer((socket) => {
socket.on('data', () => {
// discard any buffered data or the server wont terminate
})
})
this.server.listen(() => {
this.port = this.server.address().port
done()
})
})
after((done) => {
this.server.close(done)
})
it('should callback with an error if timeout is passed', (done) => {
const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' })
pool.connect((err, client, release) => {
expect(err).to.be.an(Error)
expect(err.message).to.contain('timeout')
expect(client).to.equal(undefined)
expect(pool.idleCount).to.equal(0)
done()
})
})
it('should reject promise with an error if timeout is passed', (done) => {
const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' })
pool.connect().catch((err) => {
expect(err).to.be.an(Error)
expect(err.message).to.contain('timeout')
expect(pool.idleCount).to.equal(0)
done()
})
})
it(
'should handle multiple timeouts',
co.wrap(
function* () {
const errors = []
const pool = new Pool({ connectionTimeoutMillis: 1, port: this.port, host: 'localhost' })
for (var i = 0; i < 15; i++) {
try {
yield pool.connect()
} catch (e) {
errors.push(e)
}
}
expect(errors).to.have.length(15)
}.bind(this)
)
)
it('should timeout on checkout of used connection', (done) => {
const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 })
pool.connect((err, client, release) => {
expect(err).to.be(undefined)
expect(client).to.not.be(undefined)
pool.connect((err, client) => {
expect(err).to.be.an(Error)
expect(client).to.be(undefined)
release()
pool.end(done)
})
})
})
it('should not break further pending checkouts on a timeout', (done) => {
const pool = new Pool({ connectionTimeoutMillis: 200, max: 1 })
pool.connect((err, client, releaseOuter) => {
expect(err).to.be(undefined)
pool.connect((err, client) => {
expect(err).to.be.an(Error)
expect(client).to.be(undefined)
releaseOuter()
})
setTimeout(() => {
pool.connect((err, client, releaseInner) => {
expect(err).to.be(undefined)
expect(client).to.not.be(undefined)
releaseInner()
pool.end(done)
})
}, 100)
})
})
it('should timeout on query if all clients are busy', (done) => {
const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 })
pool.connect((err, client, release) => {
expect(err).to.be(undefined)
expect(client).to.not.be(undefined)
pool.query('select now()', (err, result) => {
expect(err).to.be.an(Error)
expect(result).to.be(undefined)
release()
pool.end(done)
})
})
})
it('should recover from timeout errors', (done) => {
const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 })
pool.connect((err, client, release) => {
expect(err).to.be(undefined)
expect(client).to.not.be(undefined)
pool.query('select now()', (err, result) => {
expect(err).to.be.an(Error)
expect(result).to.be(undefined)
release()
pool.query('select $1::text as name', ['brianc'], (err, res) => {
expect(err).to.be(undefined)
expect(res.rows).to.have.length(1)
pool.end(done)
})
})
})
})
it('continues processing after a connection failure', (done) => {
const Client = require('pg').Client
const orgConnect = Client.prototype.connect
let called = false
Client.prototype.connect = function (cb) {
// Simulate a failure on first call
if (!called) {
called = true
return setTimeout(() => {
cb(connectionFailure)
}, 100)
}
// And pass-through the second call
orgConnect.call(this, cb)
}
const pool = new Pool({
Client: Client,
connectionTimeoutMillis: 1000,
max: 1,
})
pool.connect((err, client, release) => {
expect(err).to.be(connectionFailure)
pool.query('select $1::text as name', ['brianc'], (err, res) => {
expect(err).to.be(undefined)
expect(res.rows).to.have.length(1)
pool.end(done)
})
})
})
it('releases newly connected clients if the queued already timed out', (done) => {
const Client = require('pg').Client
const orgConnect = Client.prototype.connect
let connection = 0
Client.prototype.connect = function (cb) {
// Simulate a failure on first call
if (connection === 0) {
connection++
return setTimeout(() => {
cb(connectionFailure)
}, 300)
}
// And second connect taking > connection timeout
if (connection === 1) {
connection++
return setTimeout(() => {
orgConnect.call(this, cb)
}, 1000)
}
orgConnect.call(this, cb)
}
const pool = new Pool({
Client: Client,
connectionTimeoutMillis: 1000,
max: 1,
})
// Direct connect
pool.connect((err, client, release) => {
expect(err).to.be(connectionFailure)
})
// Queued
let called = 0
pool.connect((err, client, release) => {
// Verify the callback is only called once
expect(called++).to.be(0)
expect(err).to.be.an(Error)
pool.query('select $1::text as name', ['brianc'], (err, res) => {
expect(err).to.be(undefined)
expect(res.rows).to.have.length(1)
pool.end(done)
})
})
})
})

@ -0,0 +1,40 @@
'use strict'
const co = require('co')
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('pool ending', () => {
it('ends without being used', (done) => {
const pool = new Pool()
pool.end(done)
})
it('ends with a promise', () => {
return new Pool().end()
})
it(
'ends with clients',
co.wrap(function* () {
const pool = new Pool()
const res = yield pool.query('SELECT $1::text as name', ['brianc'])
expect(res.rows[0].name).to.equal('brianc')
return pool.end()
})
)
it(
'allows client to finish',
co.wrap(function* () {
const pool = new Pool()
const query = pool.query('SELECT $1::text as name', ['brianc'])
yield pool.end()
const res = yield query
expect(res.rows[0].name).to.equal('brianc')
})
)
})

@ -0,0 +1,260 @@
'use strict'
const net = require('net')
const co = require('co')
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('pool error handling', function () {
it('Should complete these queries without dying', function (done) {
const pool = new Pool()
let errors = 0
let shouldGet = 0
function runErrorQuery() {
shouldGet++
return new Promise(function (resolve, reject) {
pool
.query("SELECT 'asd'+1 ")
.then(function (res) {
reject(res) // this should always error
})
.catch(function (err) {
errors++
resolve(err)
})
})
}
const ps = []
for (let i = 0; i < 5; i++) {
ps.push(runErrorQuery())
}
Promise.all(ps).then(function () {
expect(shouldGet).to.eql(errors)
pool.end(done)
})
})
describe('calling release more than once', () => {
it(
'should throw each time',
co.wrap(function* () {
const pool = new Pool()
const client = yield pool.connect()
client.release()
expect(() => client.release()).to.throwError()
expect(() => client.release()).to.throwError()
return yield pool.end()
})
)
it('should throw each time with callbacks', function (done) {
const pool = new Pool()
pool.connect(function (err, client, clientDone) {
expect(err).not.to.be.an(Error)
clientDone()
expect(() => clientDone()).to.throwError()
expect(() => clientDone()).to.throwError()
pool.end(done)
})
})
})
describe('calling connect after end', () => {
it('should return an error', function* () {
const pool = new Pool()
const res = yield pool.query('SELECT $1::text as name', ['hi'])
expect(res.rows[0].name).to.equal('hi')
const wait = pool.end()
pool.query('select now()')
yield wait
expect(() => pool.query('select now()')).to.reject()
})
})
describe('using an ended pool', () => {
it('rejects all additional promises', (done) => {
const pool = new Pool()
const promises = []
pool.end().then(() => {
const squash = (promise) => promise.catch((e) => 'okay!')
promises.push(squash(pool.connect()))
promises.push(squash(pool.query('SELECT NOW()')))
promises.push(squash(pool.end()))
Promise.all(promises).then((res) => {
expect(res).to.eql(['okay!', 'okay!', 'okay!'])
done()
})
})
})
it('returns an error on all additional callbacks', (done) => {
const pool = new Pool()
pool.end(() => {
pool.query('SELECT *', (err) => {
expect(err).to.be.an(Error)
pool.connect((err) => {
expect(err).to.be.an(Error)
pool.end((err) => {
expect(err).to.be.an(Error)
done()
})
})
})
})
})
})
describe('error from idle client', () => {
it(
'removes client from pool',
co.wrap(function* () {
const pool = new Pool()
const client = yield pool.connect()
expect(pool.totalCount).to.equal(1)
expect(pool.waitingCount).to.equal(0)
expect(pool.idleCount).to.equal(0)
client.release()
yield new Promise((resolve, reject) => {
process.nextTick(() => {
let poolError
pool.once('error', (err) => {
poolError = err
})
let clientError
client.once('error', (err) => {
clientError = err
})
client.emit('error', new Error('expected'))
expect(clientError.message).to.equal('expected')
expect(poolError.message).to.equal('expected')
expect(pool.idleCount).to.equal(0)
expect(pool.totalCount).to.equal(0)
pool.end().then(resolve, reject)
})
})
})
)
})
describe('error from in-use client', () => {
it(
'keeps the client in the pool',
co.wrap(function* () {
const pool = new Pool()
const client = yield pool.connect()
expect(pool.totalCount).to.equal(1)
expect(pool.waitingCount).to.equal(0)
expect(pool.idleCount).to.equal(0)
yield new Promise((resolve, reject) => {
process.nextTick(() => {
let poolError
pool.once('error', (err) => {
poolError = err
})
let clientError
client.once('error', (err) => {
clientError = err
})
client.emit('error', new Error('expected'))
expect(clientError.message).to.equal('expected')
expect(poolError).not.to.be.ok()
expect(pool.idleCount).to.equal(0)
expect(pool.totalCount).to.equal(1)
client.release()
pool.end().then(resolve, reject)
})
})
})
)
})
describe('passing a function to pool.query', () => {
it('calls back with error', (done) => {
const pool = new Pool()
console.log('passing fn to query')
pool.query((err) => {
expect(err).to.be.an(Error)
pool.end(done)
})
})
})
describe('pool with lots of errors', () => {
it(
'continues to work and provide new clients',
co.wrap(function* () {
const pool = new Pool({ max: 1 })
const errors = []
for (var i = 0; i < 20; i++) {
try {
yield pool.query('invalid sql')
} catch (err) {
errors.push(err)
}
}
expect(errors).to.have.length(20)
expect(pool.idleCount).to.equal(0)
expect(pool.query).to.be.a(Function)
const res = yield pool.query('SELECT $1::text as name', ['brianc'])
expect(res.rows).to.have.length(1)
expect(res.rows[0].name).to.equal('brianc')
return pool.end()
})
)
})
it('should continue with queued items after a connection failure', (done) => {
const closeServer = net
.createServer((socket) => {
socket.destroy()
})
.unref()
closeServer.listen(() => {
const pool = new Pool({ max: 1, port: closeServer.address().port, host: 'localhost' })
pool.connect((err) => {
expect(err).to.be.an(Error)
if (err.code) {
expect(err.code).to.be('ECONNRESET')
}
})
pool.connect((err) => {
expect(err).to.be.an(Error)
if (err.code) {
expect(err.code).to.be('ECONNRESET')
}
closeServer.close(() => {
pool.end(done)
})
})
})
})
it('handles post-checkout client failures in pool.query', (done) => {
const pool = new Pool({ max: 1 })
pool.on('error', () => {
// We double close the connection in this test, prevent exception caused by that
})
pool.query('SELECT pg_sleep(5)', [], (err) => {
expect(err).to.be.an(Error)
done()
})
setTimeout(() => {
pool._clients[0].end()
}, 1000)
})
})

@ -0,0 +1,86 @@
'use strict'
const expect = require('expect.js')
const EventEmitter = require('events').EventEmitter
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('events', function () {
it('emits connect before callback', function (done) {
const pool = new Pool()
let emittedClient = false
pool.on('connect', function (client) {
emittedClient = client
})
pool.connect(function (err, client, release) {
if (err) return done(err)
release()
pool.end()
expect(client).to.be(emittedClient)
done()
})
})
it('emits "connect" only with a successful connection', function () {
const pool = new Pool({
// This client will always fail to connect
Client: mockClient({
connect: function (cb) {
process.nextTick(() => {
cb(new Error('bad news'))
})
},
}),
})
pool.on('connect', function () {
throw new Error('should never get here')
})
return pool.connect().catch((e) => expect(e.message).to.equal('bad news'))
})
it('emits acquire every time a client is acquired', function (done) {
const pool = new Pool()
let acquireCount = 0
pool.on('acquire', function (client) {
expect(client).to.be.ok()
acquireCount++
})
for (let i = 0; i < 10; i++) {
pool.connect(function (err, client, release) {
if (err) return done(err)
release()
})
pool.query('SELECT now()')
}
setTimeout(function () {
expect(acquireCount).to.be(20)
pool.end(done)
}, 100)
})
it('emits error and client if an idle client in the pool hits an error', function (done) {
const pool = new Pool()
pool.connect(function (err, client) {
expect(err).to.equal(undefined)
client.release()
setImmediate(function () {
client.emit('error', new Error('problem'))
})
pool.once('error', function (err, errClient) {
expect(err.message).to.equal('problem')
expect(errClient).to.equal(client)
done()
})
})
})
})
function mockClient(methods) {
return function () {
const client = new EventEmitter()
Object.assign(client, methods)
return client
}
}

@ -0,0 +1,87 @@
'use strict'
const co = require('co')
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
const wait = (time) => new Promise((resolve) => setTimeout(resolve, time))
describe('idle timeout', () => {
it('should timeout and remove the client', (done) => {
const pool = new Pool({ idleTimeoutMillis: 10 })
pool.query('SELECT NOW()')
pool.on('remove', () => {
expect(pool.idleCount).to.equal(0)
expect(pool.totalCount).to.equal(0)
done()
})
})
it(
'times out and removes clients when others are also removed',
co.wrap(function* () {
const pool = new Pool({ idleTimeoutMillis: 10 })
const clientA = yield pool.connect()
const clientB = yield pool.connect()
clientA.release()
clientB.release(new Error())
const removal = new Promise((resolve) => {
pool.on('remove', () => {
expect(pool.idleCount).to.equal(0)
expect(pool.totalCount).to.equal(0)
resolve()
})
})
const timeout = wait(100).then(() => Promise.reject(new Error('Idle timeout failed to occur')))
try {
yield Promise.race([removal, timeout])
} finally {
pool.end()
}
})
)
it(
'can remove idle clients and recreate them',
co.wrap(function* () {
const pool = new Pool({ idleTimeoutMillis: 1 })
const results = []
for (var i = 0; i < 20; i++) {
let query = pool.query('SELECT NOW()')
expect(pool.idleCount).to.equal(0)
expect(pool.totalCount).to.equal(1)
results.push(yield query)
yield wait(2)
expect(pool.idleCount).to.equal(0)
expect(pool.totalCount).to.equal(0)
}
expect(results).to.have.length(20)
})
)
it(
'does not time out clients which are used',
co.wrap(function* () {
const pool = new Pool({ idleTimeoutMillis: 1 })
const results = []
for (var i = 0; i < 20; i++) {
let client = yield pool.connect()
expect(pool.totalCount).to.equal(1)
expect(pool.idleCount).to.equal(0)
yield wait(10)
results.push(yield client.query('SELECT NOW()'))
client.release()
expect(pool.idleCount).to.equal(1)
expect(pool.totalCount).to.equal(1)
}
expect(results).to.have.length(20)
return pool.end()
})
)
})

226
node_modules/pg-pool/test/index.js generated vendored

@ -0,0 +1,226 @@
'use strict'
const expect = require('expect.js')
const _ = require('lodash')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('pool', function () {
describe('with callbacks', function () {
it('works totally unconfigured', function (done) {
const pool = new Pool()
pool.connect(function (err, client, release) {
if (err) return done(err)
client.query('SELECT NOW()', function (err, res) {
release()
if (err) return done(err)
expect(res.rows).to.have.length(1)
pool.end(done)
})
})
})
it('passes props to clients', function (done) {
const pool = new Pool({ binary: true })
pool.connect(function (err, client, release) {
release()
if (err) return done(err)
expect(client.binary).to.eql(true)
pool.end(done)
})
})
it('can run a query with a callback without parameters', function (done) {
const pool = new Pool()
pool.query('SELECT 1 as num', function (err, res) {
expect(res.rows[0]).to.eql({ num: 1 })
pool.end(function () {
done(err)
})
})
})
it('can run a query with a callback', function (done) {
const pool = new Pool()
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
expect(res.rows[0]).to.eql({ name: 'brianc' })
pool.end(function () {
done(err)
})
})
})
it('passes connection errors to callback', function (done) {
const pool = new Pool({ port: 53922 })
pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
expect(res).to.be(undefined)
expect(err).to.be.an(Error)
// a connection error should not polute the pool with a dead client
expect(pool.totalCount).to.equal(0)
pool.end(function (err) {
done(err)
})
})
})
it('does not pass client to error callback', function (done) {
const pool = new Pool({ port: 58242 })
pool.connect(function (err, client, release) {
expect(err).to.be.an(Error)
expect(client).to.be(undefined)
expect(release).to.be.a(Function)
pool.end(done)
})
})
it('removes client if it errors in background', function (done) {
const pool = new Pool()
pool.connect(function (err, client, release) {
release()
if (err) return done(err)
client.testString = 'foo'
setTimeout(function () {
client.emit('error', new Error('on purpose'))
}, 10)
})
pool.on('error', function (err) {
expect(err.message).to.be('on purpose')
expect(err.client).to.not.be(undefined)
expect(err.client.testString).to.be('foo')
err.client.connection.stream.on('end', function () {
pool.end(done)
})
})
})
it('should not change given options', function (done) {
const options = { max: 10 }
const pool = new Pool(options)
pool.connect(function (err, client, release) {
release()
if (err) return done(err)
expect(options).to.eql({ max: 10 })
pool.end(done)
})
})
it('does not create promises when connecting', function (done) {
const pool = new Pool()
const returnValue = pool.connect(function (err, client, release) {
release()
if (err) return done(err)
pool.end(done)
})
expect(returnValue).to.be(undefined)
})
it('does not create promises when querying', function (done) {
const pool = new Pool()
const returnValue = pool.query('SELECT 1 as num', function (err) {
pool.end(function () {
done(err)
})
})
expect(returnValue).to.be(undefined)
})
it('does not create promises when ending', function (done) {
const pool = new Pool()
const returnValue = pool.end(done)
expect(returnValue).to.be(undefined)
})
it('never calls callback syncronously', function (done) {
const pool = new Pool()
pool.connect((err, client) => {
if (err) throw err
client.release()
setImmediate(() => {
let called = false
pool.connect((err, client) => {
if (err) throw err
called = true
client.release()
setImmediate(() => {
pool.end(done)
})
})
expect(called).to.equal(false)
})
})
})
})
describe('with promises', function () {
it('connects, queries, and disconnects', function () {
const pool = new Pool()
return pool.connect().then(function (client) {
return client.query('select $1::text as name', ['hi']).then(function (res) {
expect(res.rows).to.eql([{ name: 'hi' }])
client.release()
return pool.end()
})
})
})
it('executes a query directly', () => {
const pool = new Pool()
return pool.query('SELECT $1::text as name', ['hi']).then((res) => {
expect(res.rows).to.have.length(1)
expect(res.rows[0].name).to.equal('hi')
return pool.end()
})
})
it('properly pools clients', function () {
const pool = new Pool({ poolSize: 9 })
const promises = _.times(30, function () {
return pool.connect().then(function (client) {
return client.query('select $1::text as name', ['hi']).then(function (res) {
client.release()
return res
})
})
})
return Promise.all(promises).then(function (res) {
expect(res).to.have.length(30)
expect(pool.totalCount).to.be(9)
return pool.end()
})
})
it('supports just running queries', function () {
const pool = new Pool({ poolSize: 9 })
const text = 'select $1::text as name'
const values = ['hi']
const query = { text: text, values: values }
const promises = _.times(30, () => pool.query(query))
return Promise.all(promises).then(function (queries) {
expect(queries).to.have.length(30)
return pool.end()
})
})
it('recovers from query errors', function () {
const pool = new Pool()
const errors = []
const promises = _.times(30, () => {
return pool.query('SELECT asldkfjasldkf').catch(function (e) {
errors.push(e)
})
})
return Promise.all(promises).then(() => {
expect(errors).to.have.length(30)
expect(pool.totalCount).to.equal(0)
expect(pool.idleCount).to.equal(0)
return pool.query('SELECT $1::text as name', ['hi']).then(function (res) {
expect(res.rows).to.eql([{ name: 'hi' }])
return pool.end()
})
})
})
})
})

@ -0,0 +1,20 @@
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('logging', function () {
it('logs to supplied log function if given', function () {
const messages = []
const log = function (msg) {
messages.push(msg)
}
const pool = new Pool({ log: log })
return pool.query('SELECT NOW()').then(function () {
expect(messages.length).to.be.greaterThan(0)
return pool.end()
})
})
})

@ -0,0 +1,98 @@
const expect = require('expect.js')
const co = require('co')
const _ = require('lodash')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('maxUses', () => {
it(
'can create a single client and use it once',
co.wrap(function* () {
const pool = new Pool({ maxUses: 2 })
expect(pool.waitingCount).to.equal(0)
const client = yield pool.connect()
const res = yield client.query('SELECT $1::text as name', ['hi'])
expect(res.rows[0].name).to.equal('hi')
client.release()
pool.end()
})
)
it(
'getting a connection a second time returns the same connection and releasing it also closes it',
co.wrap(function* () {
const pool = new Pool({ maxUses: 2 })
expect(pool.waitingCount).to.equal(0)
const client = yield pool.connect()
client.release()
const client2 = yield pool.connect()
expect(client).to.equal(client2)
expect(client2._ending).to.equal(false)
client2.release()
expect(client2._ending).to.equal(true)
return yield pool.end()
})
)
it(
'getting a connection a third time returns a new connection',
co.wrap(function* () {
const pool = new Pool({ maxUses: 2 })
expect(pool.waitingCount).to.equal(0)
const client = yield pool.connect()
client.release()
const client2 = yield pool.connect()
expect(client).to.equal(client2)
client2.release()
const client3 = yield pool.connect()
expect(client3).not.to.equal(client2)
client3.release()
return yield pool.end()
})
)
it(
'getting a connection from a pending request gets a fresh client when the released candidate is expended',
co.wrap(function* () {
const pool = new Pool({ max: 1, maxUses: 2 })
expect(pool.waitingCount).to.equal(0)
const client1 = yield pool.connect()
pool.connect().then((client2) => {
expect(client2).to.equal(client1)
expect(pool.waitingCount).to.equal(1)
// Releasing the client this time should also expend it since maxUses is 2, causing client3 to be a fresh client
client2.release()
})
const client3Promise = pool.connect().then((client3) => {
// client3 should be a fresh client since client2's release caused the first client to be expended
expect(pool.waitingCount).to.equal(0)
expect(client3).not.to.equal(client1)
return client3.release()
})
// There should be two pending requests since we have 3 connect requests but a max size of 1
expect(pool.waitingCount).to.equal(2)
// Releasing the client should not yet expend it since maxUses is 2
client1.release()
yield client3Promise
return yield pool.end()
})
)
it(
'logs when removing an expended client',
co.wrap(function* () {
const messages = []
const log = function (msg) {
messages.push(msg)
}
const pool = new Pool({ maxUses: 1, log })
const client = yield pool.connect()
client.release()
expect(messages).to.contain('remove expended client')
return yield pool.end()
})
)
})

@ -0,0 +1,54 @@
const Pool = require('../')
const expect = require('expect.js')
const net = require('net')
describe('releasing clients', () => {
it('removes a client which cannot be queried', async () => {
// make a pool w/ only 1 client
const pool = new Pool({ max: 1 })
expect(pool.totalCount).to.eql(0)
const client = await pool.connect()
expect(pool.totalCount).to.eql(1)
expect(pool.idleCount).to.eql(0)
// reach into the client and sever its connection
client.connection.end()
// wait for the client to error out
const err = await new Promise((resolve) => client.once('error', resolve))
expect(err).to.be.ok()
expect(pool.totalCount).to.eql(1)
expect(pool.idleCount).to.eql(0)
// try to return it to the pool - this removes it because its broken
client.release()
expect(pool.totalCount).to.eql(0)
expect(pool.idleCount).to.eql(0)
// make sure pool still works
const { rows } = await pool.query('SELECT NOW()')
expect(rows).to.have.length(1)
await pool.end()
})
it('removes a client which is ending', async () => {
// make a pool w/ only 1 client
const pool = new Pool({ max: 1 })
expect(pool.totalCount).to.eql(0)
const client = await pool.connect()
expect(pool.totalCount).to.eql(1)
expect(pool.idleCount).to.eql(0)
// end the client gracefully (but you shouldn't do this with pooled clients)
client.end()
// try to return it to the pool
client.release()
expect(pool.totalCount).to.eql(0)
expect(pool.idleCount).to.eql(0)
// make sure pool still works
const { rows } = await pool.query('SELECT NOW()')
expect(rows).to.have.length(1)
await pool.end()
})
})

@ -0,0 +1,10 @@
const crash = (reason) => {
process.on(reason, (err) => {
console.error(reason, err.stack)
process.exit(-1)
})
}
crash('unhandledRejection')
crash('uncaughtError')
crash('warning')

@ -0,0 +1,58 @@
const expect = require('expect.js')
const co = require('co')
const _ = require('lodash')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('pool size of 1', () => {
it(
'can create a single client and use it once',
co.wrap(function* () {
const pool = new Pool({ max: 1 })
expect(pool.waitingCount).to.equal(0)
const client = yield pool.connect()
const res = yield client.query('SELECT $1::text as name', ['hi'])
expect(res.rows[0].name).to.equal('hi')
client.release()
pool.end()
})
)
it(
'can create a single client and use it multiple times',
co.wrap(function* () {
const pool = new Pool({ max: 1 })
expect(pool.waitingCount).to.equal(0)
const client = yield pool.connect()
const wait = pool.connect()
expect(pool.waitingCount).to.equal(1)
client.release()
const client2 = yield wait
expect(client).to.equal(client2)
client2.release()
return yield pool.end()
})
)
it(
'can only send 1 query at a time',
co.wrap(function* () {
const pool = new Pool({ max: 1 })
// the query text column name changed in PostgreSQL 9.2
const versionResult = yield pool.query('SHOW server_version_num')
const version = parseInt(versionResult.rows[0].server_version_num, 10)
const queryColumn = version < 90200 ? 'current_query' : 'query'
const queryText = 'SELECT COUNT(*) as counts FROM pg_stat_activity WHERE ' + queryColumn + ' = $1'
const queries = _.times(20, () => pool.query(queryText, [queryText]))
const results = yield Promise.all(queries)
const counts = results.map((res) => parseInt(res.rows[0].counts, 10))
expect(counts).to.eql(_.times(20, (i) => 1))
return yield pool.end()
})
)
})

@ -0,0 +1,19 @@
'use strict'
const Cursor = require('pg-cursor')
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('submittle', () => {
it('is returned from the query method', false, (done) => {
const pool = new Pool()
const cursor = pool.query(new Cursor('SELECT * from generate_series(0, 1000)'))
cursor.read((err, rows) => {
expect(err).to.be(undefined)
expect(!!rows).to.be.ok()
cursor.close(done)
})
})
})

@ -0,0 +1,25 @@
'use strict'
const expect = require('expect.js')
const describe = require('mocha').describe
const it = require('mocha').it
const Pool = require('../')
describe('verify', () => {
it('verifies a client with a callback', false, (done) => {
const pool = new Pool({
verify: (client, cb) => {
client.release()
cb(new Error('nope'))
},
})
pool.connect((err, client) => {
expect(err).to.be.an(Error)
expect(err.message).to.be('nope')
pool.end()
done()
})
})
})

21
node_modules/pg-protocol/LICENSE generated vendored

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2010 - 2020 Brian Carlson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,14 @@
/// <reference types="node" />
export declare class BufferReader {
private offset;
private buffer;
private encoding;
constructor(offset?: number);
setBuffer(offset: number, buffer: Buffer): void;
int16(): number;
byte(): number;
int32(): number;
string(length: number): string;
cstring(): string;
bytes(length: number): Buffer;
}

@ -0,0 +1,48 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const emptyBuffer = Buffer.allocUnsafe(0);
class BufferReader {
constructor(offset = 0) {
this.offset = offset;
this.buffer = emptyBuffer;
// TODO(bmc): support non-utf8 encoding
this.encoding = 'utf-8';
}
setBuffer(offset, buffer) {
this.offset = offset;
this.buffer = buffer;
}
int16() {
const result = this.buffer.readInt16BE(this.offset);
this.offset += 2;
return result;
}
byte() {
const result = this.buffer[this.offset];
this.offset++;
return result;
}
int32() {
const result = this.buffer.readInt32BE(this.offset);
this.offset += 4;
return result;
}
string(length) {
const result = this.buffer.toString(this.encoding, this.offset, this.offset + length);
this.offset += length;
return result;
}
cstring() {
var start = this.offset;
var end = this.buffer.indexOf(0, start);
this.offset = end + 1;
return this.buffer.toString(this.encoding, start, end);
}
bytes(length) {
const result = this.buffer.slice(this.offset, this.offset + length);
this.offset += length;
return result;
}
}
exports.BufferReader = BufferReader;
//# sourceMappingURL=BufferReader.js.map

@ -0,0 +1 @@
{"version":3,"file":"BufferReader.js","sourceRoot":"","sources":["../src/BufferReader.ts"],"names":[],"mappings":";;AAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;AAE1C,MAAa,YAAY;IAIvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAH9B,WAAM,GAAW,WAAW,CAAC;QACrC,uCAAuC;QAC/B,aAAQ,GAAW,OAAO,CAAC;IAEnC,CAAC;IACM,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IACM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QACjB,OAAO,MAAM,CAAC;IAChB,CAAC;IACM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,EAAE,CAAC;QACd,OAAO,MAAM,CAAC;IAChB,CAAC;IACM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QACjB,OAAO,MAAM,CAAC;IAChB,CAAC;IACM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC;QACtF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC;QACtB,OAAO,MAAM,CAAC;IAChB,CAAC;IACM,OAAO;QACZ,IAAI,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC;QACxB,IAAI,GAAG,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,GAAG,GAAG,GAAG,CAAC,CAAC;QACtB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,CAAC,CAAC;IACzD,CAAC;IACM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC;QACpE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC;QACtB,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAzCD,oCAyCC"}

@ -0,0 +1,20 @@
/// <reference types="node" />
export declare class Writer {
private buffer;
private offset;
private headerPosition;
private readonly encoding;
constructor(size?: number);
private _ensure;
addInt32(num: number): Writer;
addInt16(num: number): Writer;
addCString(string: string): Writer;
addChar(c: string): Writer;
addString(string?: string): Writer;
getByteLength(): number;
add(otherBuffer: Buffer): Writer;
clear(): void;
addHeader(code: number, last?: boolean): void;
join(code?: number): Buffer;
flush(code?: number): Buffer;
}

@ -0,0 +1,109 @@
"use strict";
//binary data writer tuned for creating
//postgres message packets as effeciently as possible by reusing the
//same buffer to avoid memcpy and limit memory allocations
Object.defineProperty(exports, "__esModule", { value: true });
class Writer {
constructor(size = 1024) {
this.offset = 5;
this.headerPosition = 0;
this.encoding = 'utf-8';
this.buffer = Buffer.alloc(size + 5);
}
_ensure(size) {
var remaining = this.buffer.length - this.offset;
if (remaining < size) {
var oldBuffer = this.buffer;
// exponential growth factor of around ~ 1.5
// https://stackoverflow.com/questions/2269063/buffer-growth-strategy
var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size;
this.buffer = Buffer.alloc(newSize);
oldBuffer.copy(this.buffer);
}
}
addInt32(num) {
this._ensure(4);
this.buffer[this.offset++] = (num >>> 24 & 0xFF);
this.buffer[this.offset++] = (num >>> 16 & 0xFF);
this.buffer[this.offset++] = (num >>> 8 & 0xFF);
this.buffer[this.offset++] = (num >>> 0 & 0xFF);
return this;
}
addInt16(num) {
this._ensure(2);
this.buffer[this.offset++] = (num >>> 8 & 0xFF);
this.buffer[this.offset++] = (num >>> 0 & 0xFF);
return this;
}
addCString(string) {
//just write a 0 for empty or null strings
if (!string) {
this._ensure(1);
}
else {
var len = Buffer.byteLength(string);
this._ensure(len + 1); //+1 for null terminator
this.buffer.write(string, this.offset, this.encoding);
this.offset += len;
}
this.buffer[this.offset++] = 0; // null terminator
return this;
}
// note: this assumes character is 1 byte - used for writing protocol charcodes
addChar(c) {
this._ensure(1);
this.buffer.write(c, this.offset);
this.offset++;
return this;
}
addString(string = "") {
var len = Buffer.byteLength(string);
this._ensure(len);
this.buffer.write(string, this.offset);
this.offset += len;
return this;
}
getByteLength() {
return this.offset - 5;
}
add(otherBuffer) {
this._ensure(otherBuffer.length);
otherBuffer.copy(this.buffer, this.offset);
this.offset += otherBuffer.length;
return this;
}
clear() {
this.offset = 5;
this.headerPosition = 0;
}
//appends a header block to all the written data since the last
//subsequent header or to the beginning if there is only one data block
addHeader(code, last = false) {
var origOffset = this.offset;
this.offset = this.headerPosition;
this.buffer[this.offset++] = code;
//length is everything in this packet minus the code
this.addInt32(origOffset - (this.headerPosition + 1));
//set next header position
this.headerPosition = origOffset;
//make space for next header
this.offset = origOffset;
if (!last) {
this._ensure(5);
this.offset += 5;
}
}
join(code) {
if (code) {
this.addHeader(code, true);
}
return this.buffer.slice(code ? 0 : 5, this.offset);
}
flush(code) {
var result = this.join(code);
this.clear();
return result;
}
}
exports.Writer = Writer;
//# sourceMappingURL=BufferWriter.js.map

@ -0,0 +1 @@
{"version":3,"file":"BufferWriter.js","sourceRoot":"","sources":["../src/BufferWriter.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,oEAAoE;AACpE,0DAA0D;;AAE1D,MAAa,MAAM;IAKjB,YAAY,OAAe,IAAI;QAHvB,WAAM,GAAW,CAAC,CAAC;QACnB,mBAAc,GAAW,CAAC,CAAC;QAClB,aAAQ,GAAG,OAAO,CAAC;QAElC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,CAAA;IACtC,CAAC;IAEO,OAAO,CAAC,IAAY;QAC1B,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QACjD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC;YAC5B,4CAA4C;YAC5C,qEAAqE;YACrE,IAAI,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC;YAChE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACpC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QAChB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,GAAG,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,GAAG,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QAChB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,0CAA0C;QAC1C,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;SACjB;aAAM;YACL,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;YACpC,IAAI,CAAC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,wBAAwB;YAC/C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;YACrD,IAAI,CAAC,MAAM,IAAI,GAAG,CAAC;SACpB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,kBAAkB;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,+EAA+E;IACxE,OAAO,CAAC,CAAS;QACtB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QAClC,IAAI,CAAC,MAAM,EAAE,CAAC;QACd,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QACpC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAClB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACvC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAC;QACnB,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,aAAa;QAClB,OAAO,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;IACzB,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACjC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAC;QAClC,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,KAAK;QACV,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;QAChB,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;IAC1B,CAAC;IAED,+DAA+D;IAC/D,uEAAuE;IAChE,SAAS,CAAC,IAAY,EAAE,OAAgB,KAAK;QAClD,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC;QAC7B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,cAAc,CAAC;QAClC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,CAAC;QAClC,oDAAoD;QACpD,IAAI,CAAC,QAAQ,CAAC,UAAU,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAC,CAAC;QACtD,0BAA0B;QAC1B,IAAI,CAAC,cAAc,GAAG,UAAU,CAAC;QACjC,4BAA4B;QAC5B,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;QACzB,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YAChB,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;SAClB;IACH,CAAC;IAEM,IAAI,CAAC,IAAa;QACvB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;SAC5B;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,IAAI,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC7B,IAAI,CAAC,KAAK,EAAE,CAAC;QACb,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAlHD,wBAkHC"}

@ -0,0 +1 @@
export {};

@ -0,0 +1,25 @@
"use strict";
// file for microbenchmarking
Object.defineProperty(exports, "__esModule", { value: true });
const buffer_writer_1 = require("./buffer-writer");
const buffer_reader_1 = require("./buffer-reader");
const LOOPS = 1000;
let count = 0;
let start = Date.now();
const writer = new buffer_writer_1.Writer();
const reader = new buffer_reader_1.BufferReader();
const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]);
const run = () => {
if (count > LOOPS) {
console.log(Date.now() - start);
return;
}
count++;
for (let i = 0; i < LOOPS; i++) {
reader.setBuffer(0, buffer);
reader.cstring();
}
setImmediate(run);
};
run();
//# sourceMappingURL=b.js.map

@ -0,0 +1 @@
{"version":3,"file":"b.js","sourceRoot":"","sources":["../src/b.ts"],"names":[],"mappings":";AAAA,6BAA6B;;AAE7B,mDAAwC;AAExC,mDAA8C;AAE9C,MAAM,KAAK,GAAG,IAAI,CAAA;AAClB,IAAI,KAAK,GAAG,CAAC,CAAA;AACb,IAAI,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;AACtB,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,MAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;AACjC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;AAE3D,MAAM,GAAG,GAAG,GAAG,EAAE;IACf,IAAI,KAAK,GAAG,KAAK,EAAE;QACjB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,CAAA;QAC/B,OAAM;KACP;IACD,KAAK,EAAE,CAAA;IACP,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC9B,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;QAC3B,MAAM,CAAC,OAAO,EAAE,CAAA;KACjB;IACD,YAAY,CAAC,GAAG,CAAC,CAAA;AACnB,CAAC,CAAA;AAED,GAAG,EAAE,CAAA"}

@ -0,0 +1,14 @@
/// <reference types="node" />
export declare class BufferReader {
private offset;
private buffer;
private encoding;
constructor(offset?: number);
setBuffer(offset: number, buffer: Buffer): void;
int16(): number;
byte(): number;
int32(): number;
string(length: number): string;
cstring(): string;
bytes(length: number): Buffer;
}

@ -0,0 +1,49 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const emptyBuffer = Buffer.allocUnsafe(0);
class BufferReader {
constructor(offset = 0) {
this.offset = offset;
this.buffer = emptyBuffer;
// TODO(bmc): support non-utf8 encoding?
this.encoding = 'utf-8';
}
setBuffer(offset, buffer) {
this.offset = offset;
this.buffer = buffer;
}
int16() {
const result = this.buffer.readInt16BE(this.offset);
this.offset += 2;
return result;
}
byte() {
const result = this.buffer[this.offset];
this.offset++;
return result;
}
int32() {
const result = this.buffer.readInt32BE(this.offset);
this.offset += 4;
return result;
}
string(length) {
const result = this.buffer.toString(this.encoding, this.offset, this.offset + length);
this.offset += length;
return result;
}
cstring() {
const start = this.offset;
let end = start;
while (this.buffer[end++] !== 0) { }
this.offset = end;
return this.buffer.toString(this.encoding, start, end - 1);
}
bytes(length) {
const result = this.buffer.slice(this.offset, this.offset + length);
this.offset += length;
return result;
}
}
exports.BufferReader = BufferReader;
//# sourceMappingURL=buffer-reader.js.map

@ -0,0 +1 @@
{"version":3,"file":"buffer-reader.js","sourceRoot":"","sources":["../src/buffer-reader.ts"],"names":[],"mappings":";;AAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAEzC,MAAa,YAAY;IAMvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAL9B,WAAM,GAAW,WAAW,CAAA;QAEpC,wCAAwC;QAChC,aAAQ,GAAW,OAAO,CAAA;IAEO,CAAC;IAEnC,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACvC,IAAI,CAAC,MAAM,EAAE,CAAA;QACb,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACrF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,OAAO;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,GAAG,GAAG,KAAK,CAAA;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,EAAE,GAAE;QACnC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;QACjB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;IAC5D,CAAC;IAEM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACnE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlDD,oCAkDC"}

@ -0,0 +1,16 @@
/// <reference types="node" />
export declare class Writer {
private size;
private buffer;
private offset;
private headerPosition;
constructor(size?: number);
private ensure;
addInt32(num: number): Writer;
addInt16(num: number): Writer;
addCString(string: string): Writer;
addString(string?: string): Writer;
add(otherBuffer: Buffer): Writer;
private join;
flush(code?: number): Buffer;
}

@ -0,0 +1,80 @@
"use strict";
//binary data writer tuned for encoding binary specific to the postgres binary protocol
Object.defineProperty(exports, "__esModule", { value: true });
class Writer {
constructor(size = 256) {
this.size = size;
this.offset = 5;
this.headerPosition = 0;
this.buffer = Buffer.alloc(size);
}
ensure(size) {
var remaining = this.buffer.length - this.offset;
if (remaining < size) {
var oldBuffer = this.buffer;
// exponential growth factor of around ~ 1.5
// https://stackoverflow.com/questions/2269063/buffer-growth-strategy
var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size;
this.buffer = Buffer.alloc(newSize);
oldBuffer.copy(this.buffer);
}
}
addInt32(num) {
this.ensure(4);
this.buffer[this.offset++] = (num >>> 24) & 0xff;
this.buffer[this.offset++] = (num >>> 16) & 0xff;
this.buffer[this.offset++] = (num >>> 8) & 0xff;
this.buffer[this.offset++] = (num >>> 0) & 0xff;
return this;
}
addInt16(num) {
this.ensure(2);
this.buffer[this.offset++] = (num >>> 8) & 0xff;
this.buffer[this.offset++] = (num >>> 0) & 0xff;
return this;
}
addCString(string) {
if (!string) {
this.ensure(1);
}
else {
var len = Buffer.byteLength(string);
this.ensure(len + 1); // +1 for null terminator
this.buffer.write(string, this.offset, 'utf-8');
this.offset += len;
}
this.buffer[this.offset++] = 0; // null terminator
return this;
}
addString(string = '') {
var len = Buffer.byteLength(string);
this.ensure(len);
this.buffer.write(string, this.offset);
this.offset += len;
return this;
}
add(otherBuffer) {
this.ensure(otherBuffer.length);
otherBuffer.copy(this.buffer, this.offset);
this.offset += otherBuffer.length;
return this;
}
join(code) {
if (code) {
this.buffer[this.headerPosition] = code;
//length is everything in this packet minus the code
const length = this.offset - (this.headerPosition + 1);
this.buffer.writeInt32BE(length, this.headerPosition + 1);
}
return this.buffer.slice(code ? 0 : 5, this.offset);
}
flush(code) {
var result = this.join(code);
this.offset = 5;
this.headerPosition = 0;
this.buffer = Buffer.allocUnsafe(this.size);
return result;
}
}
exports.Writer = Writer;
//# sourceMappingURL=buffer-writer.js.map

@ -0,0 +1 @@
{"version":3,"file":"buffer-writer.js","sourceRoot":"","sources":["../src/buffer-writer.ts"],"names":[],"mappings":";AAAA,uFAAuF;;AAEvF,MAAa,MAAM;IAIjB,YAAoB,OAAO,GAAG;QAAV,SAAI,GAAJ,IAAI,CAAM;QAFtB,WAAM,GAAW,CAAC,CAAA;QAClB,mBAAc,GAAW,CAAC,CAAA;QAEhC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;IAClC,CAAC;IAEO,MAAM,CAAC,IAAY;QACzB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAChD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;YAC3B,4CAA4C;YAC5C,qEAAqE;YACrE,IAAI,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAA;YAC/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACnC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;SAC5B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACf;aAAM;YACL,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;YACnC,IAAI,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,CAAC,yBAAyB;YAC9C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;YAC/C,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;SACnB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA,CAAC,kBAAkB;QACjD,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;QAClB,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QAC/B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAA;QACjC,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,IAAI,CAAC,IAAa;QACxB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,IAAI,CAAA;YACvC,oDAAoD;YACpD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;YACtD,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;SAC1D;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;IACrD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,IAAI,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;QACf,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC3C,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlFD,wBAkFC"}

@ -0,0 +1,22 @@
/**
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* README.md file in the root directory of this source tree.
*/
/// <reference types="node" />
declare var net: any;
declare var EventEmitter: any;
declare var util: any;
declare var Writer: any;
declare const parse: any;
declare var warnDeprecation: any;
declare var TEXT_MODE: number;
declare class Connection extends EventEmitter {
constructor(config: any);
}
declare var emptyBuffer: Buffer;
declare const flushBuffer: Buffer;
declare const syncBuffer: Buffer;
declare const END_BUFFER: Buffer;

@ -0,0 +1,311 @@
"use strict";
/**
* Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com)
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* README.md file in the root directory of this source tree.
*/
var net = require('net');
var EventEmitter = require('events').EventEmitter;
var util = require('util');
var Writer = require('buffer-writer');
// eslint-disable-next-line
const { parse } = require('pg-packet-stream');
var warnDeprecation = require('./compat/warn-deprecation');
var TEXT_MODE = 0;
class Connection extends EventEmitter {
constructor(config) {
super();
config = config || {};
this.stream = config.stream || new net.Socket();
this.stream.setNoDelay(true);
this._keepAlive = config.keepAlive;
this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis;
this.lastBuffer = false;
this.lastOffset = 0;
this.buffer = null;
this.offset = null;
this.encoding = config.encoding || 'utf8';
this.parsedStatements = {};
this.writer = new Writer();
this.ssl = config.ssl || false;
this._ending = false;
this._mode = TEXT_MODE;
this._emitMessage = false;
var self = this;
this.on('newListener', function (eventName) {
if (eventName === 'message') {
self._emitMessage = true;
}
});
}
}
Connection.prototype.connect = function (port, host) {
var self = this;
if (this.stream.readyState === 'closed') {
this.stream.connect(port, host);
}
else if (this.stream.readyState === 'open') {
this.emit('connect');
}
this.stream.on('connect', function () {
if (self._keepAlive) {
self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis);
}
self.emit('connect');
});
const reportStreamError = function (error) {
// errors about disconnections should be ignored during disconnect
if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) {
return;
}
self.emit('error', error);
};
this.stream.on('error', reportStreamError);
this.stream.on('close', function () {
self.emit('end');
});
if (!this.ssl) {
return this.attachListeners(this.stream);
}
this.stream.once('data', function (buffer) {
var responseCode = buffer.toString('utf8');
switch (responseCode) {
case 'N': // Server does not support SSL connections
return self.emit('error', new Error('The server does not support SSL connections'));
case 'S': // Server supports SSL connections, continue with a secure connection
break;
default:
// Any other response byte, including 'E' (ErrorResponse) indicating a server error
return self.emit('error', new Error('There was an error establishing an SSL connection'));
}
var tls = require('tls');
const options = Object.apply({
socket: self.stream,
}, self.ssl);
if (net.isIP(host) === 0) {
options.servername = host;
}
self.stream = tls.connect(options);
self.attachListeners(self.stream);
self.stream.on('error', reportStreamError);
self.emit('sslconnect');
});
};
Connection.prototype.attachListeners = function (stream) {
// TODO(bmc): support binary
const mode = this._mode === TEXT_MODE ? 'text' : 'binary';
parse(this.stream, (msg) => {
var eventName = msg.name === 'error' ? 'errorMessage' : msg.name;
this.emit(eventName, msg);
});
this.stream.on('end', () => this.emit('end'));
};
Connection.prototype.requestSsl = function () {
var bodyBuffer = this.writer
.addInt16(0x04d2)
.addInt16(0x162f)
.flush();
var length = bodyBuffer.length + 4;
var buffer = new Writer()
.addInt32(length)
.add(bodyBuffer)
.join();
this.stream.write(buffer);
};
Connection.prototype.startup = function (config) {
var writer = this.writer.addInt16(3).addInt16(0);
Object.keys(config).forEach(function (key) {
var val = config[key];
writer.addCString(key).addCString(val);
});
writer.addCString('client_encoding').addCString("'utf-8'");
var bodyBuffer = writer.addCString('').flush();
// this message is sent without a code
var length = bodyBuffer.length + 4;
var buffer = new Writer()
.addInt32(length)
.add(bodyBuffer)
.join();
this.stream.write(buffer);
};
Connection.prototype.cancel = function (processID, secretKey) {
var bodyBuffer = this.writer
.addInt16(1234)
.addInt16(5678)
.addInt32(processID)
.addInt32(secretKey)
.flush();
var length = bodyBuffer.length + 4;
var buffer = new Writer()
.addInt32(length)
.add(bodyBuffer)
.join();
this.stream.write(buffer);
};
Connection.prototype.password = function (password) {
// 0x70 = 'p'
this._send(0x70, this.writer.addCString(password));
};
Connection.prototype.sendSASLInitialResponseMessage = function (mechanism, initialResponse) {
// 0x70 = 'p'
this.writer
.addCString(mechanism)
.addInt32(Buffer.byteLength(initialResponse))
.addString(initialResponse);
this._send(0x70);
};
Connection.prototype.sendSCRAMClientFinalMessage = function (additionalData) {
// 0x70 = 'p'
this.writer.addString(additionalData);
this._send(0x70);
};
Connection.prototype._send = function (code, more) {
if (!this.stream.writable) {
return false;
}
return this.stream.write(this.writer.flush(code));
};
Connection.prototype.query = function (text) {
// 0x51 = Q
this.stream.write(this.writer.addCString(text).flush(0x51));
};
// send parse message
Connection.prototype.parse = function (query) {
// expect something like this:
// { name: 'queryName',
// text: 'select * from blah',
// types: ['int8', 'bool'] }
// normalize missing query names to allow for null
query.name = query.name || '';
if (query.name.length > 63) {
/* eslint-disable no-console */
console.error('Warning! Postgres only supports 63 characters for query names.');
console.error('You supplied %s (%s)', query.name, query.name.length);
console.error('This can cause conflicts and silent errors executing queries');
/* eslint-enable no-console */
}
// normalize null type array
query.types = query.types || [];
var len = query.types.length;
var buffer = this.writer
.addCString(query.name) // name of query
.addCString(query.text) // actual query text
.addInt16(len);
for (var i = 0; i < len; i++) {
buffer.addInt32(query.types[i]);
}
var code = 0x50;
this._send(code);
this.flush();
};
// send bind message
// "more" === true to buffer the message until flush() is called
Connection.prototype.bind = function (config) {
// normalize config
config = config || {};
config.portal = config.portal || '';
config.statement = config.statement || '';
config.binary = config.binary || false;
var values = config.values || [];
var len = values.length;
var useBinary = false;
for (var j = 0; j < len; j++) {
useBinary = useBinary || values[j] instanceof Buffer;
}
var buffer = this.writer.addCString(config.portal).addCString(config.statement);
if (!useBinary) {
buffer.addInt16(0);
}
else {
buffer.addInt16(len);
for (j = 0; j < len; j++) {
buffer.addInt16(values[j] instanceof Buffer);
}
}
buffer.addInt16(len);
for (var i = 0; i < len; i++) {
var val = values[i];
if (val === null || typeof val === 'undefined') {
buffer.addInt32(-1);
}
else if (val instanceof Buffer) {
buffer.addInt32(val.length);
buffer.add(val);
}
else {
buffer.addInt32(Buffer.byteLength(val));
buffer.addString(val);
}
}
if (config.binary) {
buffer.addInt16(1); // format codes to use binary
buffer.addInt16(1);
}
else {
buffer.addInt16(0); // format codes to use text
}
// 0x42 = 'B'
this._send(0x42);
this.flush();
};
// send execute message
// "more" === true to buffer the message until flush() is called
Connection.prototype.execute = function (config) {
config = config || {};
config.portal = config.portal || '';
config.rows = config.rows || '';
this.writer.addCString(config.portal).addInt32(config.rows);
// 0x45 = 'E'
this._send(0x45);
this.flush();
};
var emptyBuffer = Buffer.alloc(0);
const flushBuffer = Buffer.from([0x48, 0x00, 0x00, 0x00, 0x04]);
Connection.prototype.flush = function () {
if (this.stream.writable) {
this.stream.write(flushBuffer);
}
};
const syncBuffer = Buffer.from([0x53, 0x00, 0x00, 0x00, 0x04]);
Connection.prototype.sync = function () {
this._ending = true;
// clear out any pending data in the writer
this.writer.clear();
if (this.stream.writable) {
this.stream.write(syncBuffer);
this.stream.write(flushBuffer);
}
};
const END_BUFFER = Buffer.from([0x58, 0x00, 0x00, 0x00, 0x04]);
Connection.prototype.end = function () {
// 0x58 = 'X'
this.writer.clear();
this._ending = true;
return this.stream.write(END_BUFFER, () => {
this.stream.end();
});
};
Connection.prototype.close = function (msg) {
this.writer.addCString(msg.type + (msg.name || ''));
this._send(0x43);
};
Connection.prototype.describe = function (msg) {
this.writer.addCString(msg.type + (msg.name || ''));
this._send(0x44);
this.flush();
};
Connection.prototype.sendCopyFromChunk = function (chunk) {
this.stream.write(this.writer.add(chunk).flush(0x64));
};
Connection.prototype.endCopyFrom = function () {
this.stream.write(this.writer.add(emptyBuffer).flush(0x63));
};
Connection.prototype.sendCopyFail = function (msg) {
// this.stream.write(this.writer.add(emptyBuffer).flush(0x66));
this.writer.addCString(msg);
this._send(0x66);
};
module.exports = Connection;
//# sourceMappingURL=connection.js.map

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save