diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..1923d41
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,8 @@
+root = true
+
+[*]
+indent_style = space
+indent_size = 2
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
diff --git a/.eslintignore b/.eslintignore
new file mode 100644
index 0000000..4ebc8ae
--- /dev/null
+++ b/.eslintignore
@@ -0,0 +1 @@
+coverage
diff --git a/.gitignore b/.gitignore
index 5148e52..daa5a20 100644
--- a/.gitignore
+++ b/.gitignore
@@ -35,3 +35,9 @@ jspm_packages
# Optional REPL history
.node_repl_history
+
+# goodparts symlink
+.eslintrc.js
+
+# environment variables
+*.env
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..3f91ac9
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,12 @@
+language: node_js
+node_js:
+ - "4"
+ - "6"
+services:
+ - postgresql
+before_script:
+ - psql -c 'create database testdb;' -U postgres
+before_install:
+ - pip install --user codecov
+after_success:
+ - codecov --file coverage/lcov.info --disable search
diff --git a/README.md b/README.md
index 5ddf854..a6b7346 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,192 @@
# joi-postgresql
A little experiment in defining models in Joi and creating PostgreSQL Tables
+
+[![Build Status](https://travis-ci.org/dwyl/joi-postgresql.svg?branch=master)](https://travis-ci.org/dwyl/joi-postgresql)
+[![codecov](https://codecov.io/gh/dwyl/joi-postgresql/branch/master/graph/badge.svg)](https://codecov.io/gh/dwyl/joi-postgresql)
+[![Code Climate](https://codeclimate.com/github/dwyl/joi-postgresql/badges/gpa.svg)](https://codeclimate.com/github/dwyl/joi-postgresql)
+[![dependencies Status](https://david-dm.org/dwyl/joi-postgresql/status.svg)](https://david-dm.org/dwyl/joi-postgresql)
+[![devDependencies Status](https://david-dm.org/dwyl/joi-postgresql/dev-status.svg)](https://david-dm.org/dwyl/joi-postgresql?type=dev)
+[![contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat)](https://github.com/dwyl/joi-postgresql/issues)
+
+## abase-db
+
+### What?
+abase-db is a [hapi](https://github.com/hapijs/hapi) plugin that provides an easy way to set up postgres database tables and perform CRUD operations by declaring a schema object which is heavily influenced by [joi](https://github.com/hapijs/joi).
+
+It can be used alone but is most powerful when used as part of [abase](https://github.com/dwyl/abase) or with your select few abase plugins.
+
+Note if you are totally new to Hapi.js see: https://github.com/dwyl/learn-hapi
+And/or if you are new to postgres check out: https://github.com/dwyl/learn-postgresql
+
+### Why?
+
+From a joi schema we should be able to infer many things about the fields in a database. `abase-db` provides the mapping between a config (inspired by joi schema) to commands that can create tables with the correct fields.
+
+We also want a "plug and play" access and easy to use handlers to perform CRUD operations and `abase-db` offers this without having to worry about any postgres querying.
+
+For more understanding of *why* see the parent module [abase]((https://github.com/dwyl/abase)) as this provides just the db part.
+
+> #### Why PostgreSQL?
+
+> While there is a lot of hype surrounding NoSQL Databases like MongoDB & Redis, we found we were having to write a lot of code to do useful queries. And while de-normalising data might "make sense" for "scalability" in theory, what we found in practice is that even with 100 Billion Records (way more users than 99.99% of companies/startups!) a well-managed PostgreSQL cluster copes very well.
+
+> Make up your own mind: https://www.postgresql.org/about
+If you're still Curious or Worried about scaling PostgreSQL? see: https://www.citusdata.com Want to model the network of people as a graph? https://github.com/cayleygraph/cayley
+
+### How?
+
+1. Install `npm install abase-db --save`
+2. Write a schema for your tables like so:
+```js
+ var schema = {
+ tableName: 'users',
+ fields: {
+ name: { type: 'string' }
+ }
+ }
+```
+3. Run a database remotely or locally (see [here](https://github.com/dwyl/learn-postgresql) for how) and acquire db url or connection object.
+4. Create options object of the form:
+```js
+ var options = {
+ dbConnection: process.env.DATABASE_URL,
+ schema: dbSchema
+ };
+```
+5. Plugin
+```js
+server.register([
+ { register: require('abase-db'), options: options }
+], function () {
+ server.route(routes);
+ server.start();
+});
+```
+6. Play
+```js
+handler: function (request, reply) {
+ return request.abase.db.insert(
+ { tableName: 'users', fields: request.payload },
+ function () { return reply('OK') }
+ );
+}
+```
+7. Play without hapi. See API section below.
+
+### API
+
+#### Plugin: `require('abase-db')`
+
+##### Registration
+When registered with Hapi takes options of the form:
+```
+ { dbConnection, schema }
+```
+###### dbConnection
+Either provide a database url and we'll do the rest or an object that can used to configure a pooled connection with [node-pg](https://github.com/brianc/node-postgres#client-pooling).
+###### Schema
+
+The schema is in align with the requirements made by [abase]((https://github.com/dwyl/abase)) and as stated before is inspired by joi and will try to provide a one to one mapping.
+
+The schema must be an object (or an array of objects for multiple tables) of the form: `{ tableName, fields }`.
+
+`fields` is of the form `{ [fieldName]: { type, rest: optional }`
+
+Table and field names must be valid postgres table and column names. (non empty, alphanumeric, no leading number, less than 64)
+
+Each field must be given a type prop. Data/joi types we support:
+
+| Joi type (type prop for field)| Postgres type | Notes |
+|---|---|---|
+| `date` | `DATE` or `TIMESTAMP` | set `timestamp: true` for latter |
+| `number` | `DOUBLE PRECISION` or `BIGINT` | set `integer: true` for latter |
+| `string` | `VARCHAR(80 or max)` | `80` default, set `max: 123` as you like for more/less |
+|boolean | BOOLEAN | |
+| `id` | VARCHAR(36) | **warning** if using this type do not add this field to your insert, we will generate an id on each insertion (Generated with [aguid](https://github.com/dwyl/aguid)) |
+
+More information can be inferred from `lib/config_validator.js`
+
+Each field can also take more properties most of which will be used by other abase modules and have no effect but the ones we care about right now are.
+
+| Property | Notes |
+|---|---|
+| `unique` | set to `true` if you want column unique |
+| `primaryKey` | set to `true` if you want this field to act as your primary key (note only one field allowed!) |
+| `max`, `timestamp`, `integer` | see types table above for relevance |
+
+##### Under the hood
+
+###### Table Set Up
+With given database and schema, on initialisation of plugin, we will create all necessary tables if they don't already exist.
+
+This will only therefore happen if starting server for the first time, or if a new table is added to the schema.
+
+**Unfortunately** if you want to modify a tables schema you will have to drop the whole table to have the database reconfigured on start up. We look to find a nice process for this in the future if you want to update your tables with new columns.
+
+###### Request decoration
+
+Each request will have the db handlers `insert`, `select`, `update`, `delete`. They all have clients attached and ready to go.
+
+They can be accessed like so: `request.abase.db.insert`.
+
+They are all of the form `function(options, callback = optional)` and return promises if no callback given.
+
+The `options` object must contain `tableName`, i.e. the table you want to operate on. Below are more details for properties of options.
+
+| Property | Used in | Notes |
+| --- | --- | --- |
+| `fields` | `insert`, `update` | Object with field names and values corresponding to the schema provided |
+| `select` | `select` | array of keys which want to be retrieved, if not present defaults to all columns |
+| `where` | `select`, `update`, `delete` | object with field names and values that must match by equality (would like inequality in future) |
+
+###### Server decoration
+
+The hapi server will be given a method `endAbaseDb` of the form `function (callback)` which can be called to closed the pool connection.
+
+##### use
+
+#### validate: `require('abase-db').validate`
+
+Helper that you can use to check your schema outside of hapi. Takes in a schema object and will throw if it fails.
+
+#### createConnection: `require('abase').createConnection`
+
+Helper of the form `function(dbConnection)` to create a single node-pg client that is configured in the same way as how you provide your dbConnection above.
+
+#### handlers: `require('abase').handlers`
+
+Object with methods `insert`, `select`, `update`, `delete`, `init`, `flush`.
+
+They all have form `function(client, schema, options, cb)` so you will have to bind your own client.
+
+Crud operation documented above.
+
+##### init
+Used at plugin registration takes same schema and doesn't use options arg.
+
+##### flush
+Used to drop tables easily. If given options arg will delete on a table by table basis but if left out will delete all tables in schema.
+options takes the form `{tableName}`.
+
+### Examples and tests
+
+#### setup
+
+For examples and tests you will need a `config.env` file at the root of your project with a test database url like so:
+```
+TEST_DATABASE_URL=psql://localhost:5432/testdb
+```
+
+Note: this database must be running and before tests are run the tables may be removed from the database so don't keep anything important there.
+
+#### Simple example
+
+To see a simple example in action type `npm run example` into your command line.
+
+### Questions and Suggestions
+
+We hope you find this module useful!
+
+If you need something cleared up, have any requests or want to offer any improvements then please create an issue or better yet a PR!
+
+Note We are aware that not all postgres features may be supported yet. This module will need a few iterations so please suggest missing features to be implemented as you use it and we can hopefully work together to solve it.
diff --git a/example/index.js b/example/index.js
new file mode 100644
index 0000000..64a534d
--- /dev/null
+++ b/example/index.js
@@ -0,0 +1,34 @@
+'use strict';
+
+var env = require('env2')('config.env'); // eslint-disable-line
+
+var Hapi = require('hapi');
+var hoek = require('hoek');
+var AbaseDb = require('../lib/');
+var routes = require('./routes.js');
+var dbSchema = require('./schema.js');
+
+var server = new Hapi.Server();
+
+var abaseDbOptions = {
+ dbConnection: process.env.TEST_DATABASE_URL,
+ schema: dbSchema
+};
+
+server.connection({ port: 8000 });
+
+server.register([
+ { register: AbaseDb, options: abaseDbOptions }
+], function (err) {
+ hoek.assert(!err, err);
+
+ server.route(routes);
+
+ server.start(function (error) {
+ hoek.assert(!error, error);
+
+ console.log('Visit: http://localhost:' + server.info.port + '/'); // eslint-disable-line
+ });
+});
+
+module.exports = server;
diff --git a/example/routes.js b/example/routes.js
new file mode 100644
index 0000000..8f43651
--- /dev/null
+++ b/example/routes.js
@@ -0,0 +1,72 @@
+'use strict';
+
+var newPost = '
'
+;
+
+function existingPost (post) {
+ var id = post.id;
+ var title = post.title;
+ var body = post.body;
+
+ return ''
+ + '
'
+ ;
+}
+
+module.exports = [{
+ method: 'GET',
+ path: '/',
+ handler: function (request, reply) {
+ return request.abase.db.select({ tableName: 'posts' }, function (_, data) {
+ var sortedRows = data.rows.sort(function (a, b) {
+ return a.id > b.id;
+ });
+
+ return reply(newPost + sortedRows.map(existingPost).join('
'));
+ });
+ }
+}, {
+ method: 'POST',
+ path: '/new',
+ handler: function (request, reply) {
+ var id = Date.now();
+ var fields = Object.assign({ id: id }, request.payload);
+
+ return request.abase.db.insert(
+ { tableName: 'posts', fields: fields },
+ function () { return reply.redirect('/') }
+ );
+ }
+}, {
+ method: 'GET',
+ path: '/delete/{id}',
+ handler: function (request, reply) {
+ var id = request.params.id;
+
+ return request.abase.db.delete(
+ { tableName: 'posts', where: { id: id } },
+ function () { return reply.redirect('/') }
+ );
+ }
+}, {
+ method: 'POST',
+ path: '/update/{id}',
+ handler: function (request, reply) {
+ var id = request.params.id;
+
+ return request.abase.db.update(
+ { tableName: 'posts', where: { id: id }, fields: request.payload },
+ function () { return reply.redirect('/') }
+ );
+ }
+}];
diff --git a/example/schema.js b/example/schema.js
new file mode 100644
index 0000000..68ac0dc
--- /dev/null
+++ b/example/schema.js
@@ -0,0 +1,10 @@
+'use strict';
+
+module.exports = {
+ tableName: 'posts',
+ fields: {
+ title: { type: 'string' },
+ body: { type: 'string' },
+ id: { type: 'number', integer: true }
+ }
+};
diff --git a/example_schema.js b/example_schema.js
new file mode 100644
index 0000000..f557b94
--- /dev/null
+++ b/example_schema.js
@@ -0,0 +1,19 @@
+'use strict';
+
+module.exports = {
+ tableName: 'user_data',
+ fields: {
+ email: {
+ type: 'string',
+ email: true
+ },
+ username: {
+ type: 'string',
+ min: 3,
+ max: 20,
+ unique: true
+ },
+ dob: { type: 'date' },
+ id: { type: 'id' }
+ }
+};
diff --git a/lib/config_validator.js b/lib/config_validator.js
new file mode 100644
index 0000000..8bf4812
--- /dev/null
+++ b/lib/config_validator.js
@@ -0,0 +1,34 @@
+'use strict';
+
+var Joi = require('joi');
+
+var mapObj = require('./create_table_map.js').mapObj;
+
+// non empty, alphanumeric, no leading number, less than 64
+var dbNameRegEx = /^[A-Za-z_]\w{0,62}$/;
+var fieldTypes = Object.keys(mapObj);
+
+var typeSchema = Joi.any()
+ .valid(fieldTypes)
+ .required()
+;
+var fieldSchema = Joi.object()
+ .keys({ type: typeSchema })
+ .unknown()
+;
+var tableSchema = Joi.object().keys({
+ tableName: Joi.string()
+ .regex(dbNameRegEx)
+ .required(),
+ fields: Joi.object()
+ .pattern(dbNameRegEx, fieldSchema)
+ .required()
+});
+
+var configSchema = [tableSchema, Joi.array().items(tableSchema)];
+
+module.exports = function (config) {
+ return Joi.assert(config, configSchema);
+};
+
+module.exports.dbNameRegEx = dbNameRegEx;
diff --git a/lib/create_table_map.js b/lib/create_table_map.js
new file mode 100644
index 0000000..85e0d72
--- /dev/null
+++ b/lib/create_table_map.js
@@ -0,0 +1,38 @@
+'use strict';
+
+var mapObj = {
+ id: function () {
+ return 'VARCHAR(36)';
+ },
+ number: function (opts) {
+ return opts.integer ? 'BIGINT' : 'DOUBLE PRECISION';
+ },
+ string: function (opts) {
+ var length = opts.max || 80;
+
+ return 'VARCHAR(' + length + ')';
+ },
+ boolean: function () {
+ return 'BOOLEAN';
+ },
+ date: function (opts) {
+ return opts.timestamp ? 'TIMESTAMP' : 'DATE';
+ }
+};
+
+function mapper (name, type, options, tableName) {
+ var opts = options || {};
+ var constraints = '';
+
+ if (opts.primaryKey) {
+ constraints += ' CONSTRAINT ' + tableName + '_pk PRIMARY KEY';
+ }
+ if (opts.unique) {
+ constraints += ' CONSTRAINT ' + tableName + '_' + name + '_unique UNIQUE';
+ }
+
+ return name + ' ' + mapObj[type](opts) + constraints;
+}
+
+module.exports = mapper;
+module.exports.mapObj = mapObj;
diff --git a/lib/db_handlers.js b/lib/db_handlers.js
new file mode 100644
index 0000000..006df70
--- /dev/null
+++ b/lib/db_handlers.js
@@ -0,0 +1,39 @@
+'use strict';
+
+var sqlGen = require('./sql_gen.js');
+
+var methods = {};
+
+function multipleQuery (client, queries, cb) {
+ function nextQuery () {
+ var last = queries.length === 1;
+
+ return client.query(queries.pop(), !last ? nextQuery : cb);
+ }
+
+ return nextQuery();
+}
+
+methods.init = function (client, config, _, cb) {
+ var tables = [].concat(config);
+ var queries = tables.map(sqlGen.init);
+
+ return multipleQuery(client, queries, cb);
+};
+
+methods.flush = function (client, config, options, cb) {
+ var tables = [].concat(options || config);
+ var queries = tables.map(sqlGen.dropTable);
+
+ return multipleQuery(client, queries, cb);
+};
+
+['select', 'update', 'delete', 'insert'].forEach(function (method) {
+ methods[method] = function (client, config, options, cb) {
+ var args = sqlGen[method](config, options).concat([cb]);
+
+ return client.query.apply(client, args);
+ };
+});
+
+module.exports = methods;
diff --git a/lib/index.js b/lib/index.js
new file mode 100644
index 0000000..71a876a
--- /dev/null
+++ b/lib/index.js
@@ -0,0 +1,51 @@
+/*
+ * Abase DB plugin
+ *
+ * Accepts path the schema defining the user model in the plugin options,
+ * or relies on the schema attached to the server settings object.
+ *
+ * Provides database helper functions to do schema-compatible CRUD operations.
+ * Attaches these methods to the request object at the pre-handler lifecycle
+ * point.
+ */
+'use strict';
+
+var pg = require('pg');
+
+var parseOptions = require('./parse_options.js');
+var instantiateDb = require('./instantiate_db.js');
+var configValidator = require('./config_validator.js');
+var handlers = require('./db_handlers.js');
+
+exports.register = function (server, options, next) {
+ var schema = parseOptions.schema(options, server);
+ var connection = parseOptions.dbConfig(options);
+ var pool = new pg.Pool(connection);
+
+ configValidator(schema);
+
+ return instantiateDb(pool, schema, function (dbErr, db) {
+ server.ext('onPreHandler', function (request, reply) {
+ request.abase = { db: db };
+ reply.continue();
+ });
+
+ server.decorate('server', 'endAbaseDb', function (cb) {
+ pool.end(cb);
+ });
+
+ return next(dbErr);
+ });
+};
+
+exports.register.attributes = { name: 'abase-db' };
+
+exports.handlers = handlers;
+
+exports.validate = configValidator;
+
+exports.createClient = function (dbConnection) {
+ var connection = parseOptions.dbConfig({ dbConnection: dbConnection });
+
+ return new pg.Client(connection);
+};
diff --git a/lib/instantiate_db.js b/lib/instantiate_db.js
new file mode 100644
index 0000000..9244197
--- /dev/null
+++ b/lib/instantiate_db.js
@@ -0,0 +1,43 @@
+'use strict';
+
+var handlers = require('./db_handlers.js');
+
+var exposedHandlers = ['select', 'update', 'delete', 'insert'];
+
+function bindPoolClient (schema, handler, pool) {
+ return function (options, cb) {
+ return pool.connect()
+ .then(function (client) {
+ return handlers[handler](client, schema, options)
+ .then(function (result) {
+ client.release();
+
+ return cb ? cb(null, result) : result;
+ })
+ .catch(function (err) {
+ client.release();
+
+ return cb ? cb(err) : null;
+ })
+ ;
+ })
+ .catch(function (err) {
+ return cb ? cb(err) : null;
+ })
+ ;
+ };
+}
+
+function bindHandlers (pool, schema) {
+ return exposedHandlers.reduce(function (acc, handler) {
+ acc[handler] = bindPoolClient(schema, handler, pool);
+
+ return acc;
+ }, {});
+}
+
+module.exports = function (pool, schema, callback) {
+ return bindPoolClient(schema, 'init', pool)(null, function (err) {
+ return callback(err, bindHandlers(pool, schema));
+ });
+};
diff --git a/lib/parse_options.js b/lib/parse_options.js
new file mode 100644
index 0000000..1b71f6e
--- /dev/null
+++ b/lib/parse_options.js
@@ -0,0 +1,30 @@
+'use strict';
+
+var url = require('url');
+
+exports.schema = function (options, server) {
+ return server.app.abase
+ || options.schema
+ || require(options.schemaPath) // eslint-disable-line
+ ;
+};
+
+
+exports.dbConfig = function (options) {
+ var parsed;
+ var dbConnection = options.dbConnection;
+
+ if (typeof dbConnection === 'string') {
+ parsed = url.parse(dbConnection);
+
+ return {
+ host: parsed.hostname,
+ port: parsed.port,
+ database: parsed.pathname.split('/')[1],
+ user: (parsed.auth || '').split(':')[0],
+ password: (parsed.auth || '').split(':')[1]
+ };
+ }
+
+ return dbConnection;
+};
diff --git a/lib/sql_gen.js b/lib/sql_gen.js
new file mode 100644
index 0000000..b3d3d3a
--- /dev/null
+++ b/lib/sql_gen.js
@@ -0,0 +1,142 @@
+'use strict';
+
+var aguid = require('aguid');
+
+var mapper = require('./create_table_map.js');
+var utils = require('./utils.js');
+
+
+function paramStr (columns, opts) {
+ var offset = (opts && opts.offset) || 0;
+ var assign = (opts && opts.assign) || false;
+
+ return columns.map(function (k, i) {
+ var suff = '$' + (1 + i + (offset || 0));
+ var pref = assign ? k + '=' : '';
+
+ return pref + suff;
+ });
+}
+
+
+function processWhere (where, query, values) {
+ var keys = Object.keys(where);
+ var conds = paramStr(keys, { offset: values.length, assign: true });
+ var vals = utils.values(where, keys);
+
+ return {
+ query: query.concat('WHERE').concat(conds.join(' AND ')),
+ values: values.concat(vals)
+ };
+}
+
+
+exports.init = function init (config) {
+ var tableName = config.tableName;
+ var fields = config.fields;
+
+ var columns = Object.keys(fields).map(function (key) {
+ var type = fields[key].type;
+ var opts = utils.except(['type'], fields[key]);
+
+ return mapper(key, type, opts, tableName);
+ });
+
+ return ['CREATE TABLE IF NOT EXISTS "' + tableName + '"']
+ .concat('(' + columns.join(', ') + ')')
+ .join(' ')
+ .trim();
+};
+
+
+exports.select = function select (_, options) {
+ var columns = options.select || ['*'];
+ var values = [];
+ var query = ['SELECT']
+ .concat(columns.join(', '))
+ .concat('FROM')
+ .concat('"' + options.tableName + '"');
+ var result;
+
+ if (options.where) {
+ result = processWhere(options.where, query, values);
+ query = result.query;
+ values = result.values;
+ }
+
+ query = query.join(' ').trim();
+
+ return [query, values];
+};
+
+
+exports.insert = function insert (config, options) {
+ var fields = options.fields || {};
+ var tableConfig = []
+ .concat(config)
+ .filter(function (table) {
+ return table.tableName === options.tableName;
+ })[0]
+ ;
+ var idFields = Object.keys(tableConfig.fields).filter(function (field) {
+ return tableConfig.fields[field].type === 'id';
+ });
+ var ids = idFields.map(function () {
+ return aguid();
+ });
+ var normalColumns = Object.keys(fields);
+ var values = utils.values(fields, normalColumns).concat(ids);
+ var columns = normalColumns.concat(idFields);
+ var params = paramStr(columns);
+ var query = ['INSERT INTO "' + options.tableName + '"']
+ .concat('(' + columns.join(', ') + ')')
+ .concat('VALUES')
+ .concat('(' + params.join(', ') + ')')
+ .join(' ')
+ .trim()
+ + ' RETURNING ' + '(' + idFields.join(', ') + ')'
+ ;
+
+ return [query, values];
+};
+
+
+exports.update = function update (_, options) {
+ var fields = options.fields || {};
+ var columns = Object.keys(fields);
+ var conditions = paramStr(columns, { assign: true });
+ var values = utils.values(fields, columns);
+
+ var query = ['UPDATE "' + options.tableName + '"']
+ .concat('SET')
+ .concat(conditions.join(', '));
+ var result;
+
+ if (options.where) {
+ result = processWhere(options.where, query, values);
+ query = result.query;
+ values = result.values;
+ }
+
+ query = query.join(' ').trim();
+
+ return [query, values];
+};
+
+
+exports.delete = function del (_, options) {
+ var query = ['DELETE FROM "' + options.tableName + '"'];
+ var values = [];
+ var result = processWhere(options.where, query, values);
+
+ query = result.query;
+ values = result.values;
+
+ query = query.join(' ').trim();
+
+ return [query, values];
+};
+
+exports.dropTable = function dropTable (options) {
+ return 'DROP TABLE "' + options.tableName + '";';
+};
diff --git a/lib/utils.js b/lib/utils.js
new file mode 100644
index 0000000..d432370
--- /dev/null
+++ b/lib/utils.js
@@ -0,0 +1,26 @@
+'use strict';
+
+exports.values = function (obj, keys) {
+ return (keys || Object.keys(obj))
+ .map(function (k) { return obj[k] });
+};
+
+
+function except (fields, obj) {
+ var o = {};
+
+ Object.keys(obj).forEach(function (k) {
+ if (fields.indexOf(k) === -1) {
+ o[k] = obj[k];
+ }
+ });
+
+ return o;
+}
+
+
+exports.except = except;
+
+exports.shallowCopy = function (obj) {
+ return except([], obj);
+};
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..7a4bb86
--- /dev/null
+++ b/package.json
@@ -0,0 +1,38 @@
+{
+ "name": "abase-db",
+ "version": "0.2.3",
+ "description": "A little experiment in defining models in Joi and creating PostgreSQL Tables",
+ "main": "lib/",
+ "devDependencies": {
+ "goodparts": "^1.1.0",
+ "istanbul": "^0.4.5",
+ "pre-commit": "^1.1.3",
+ "tape": "^4.6.2",
+ "hapi": "^15.1.1"
+ },
+ "dependencies": {
+ "aguid": "^1.0.4",
+ "env2": "^2.1.1",
+ "hoek": "^4.1.0",
+ "joi": "^9.0.4",
+ "pg": "^6.1.0"
+ },
+ "scripts": {
+ "test": "tape './test/**/*.test.js'",
+ "lint": "node_modules/.bin/goodparts .",
+ "lint:fix": "node_modules/.bin/goodparts . --fix",
+ "cover": "node_modules/.bin/istanbul cover node_modules/.bin/tape './test/*.test.js'",
+ "check-coverage": "node_modules/.bin/istanbul check-coverage --statements 100 --functions 100 --lines 100 --branches 100",
+ "example": "node example/"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/dwyl/joi-postgresql.git"
+ },
+ "author": "@eliascodes && @jrans",
+ "license": "ISC",
+ "bugs": {
+ "url": "https://github.com/dwyl/joi-postgresql/issues"
+ },
+ "homepage": "https://github.com/dwyl/joi-postgresql#readme"
+}
diff --git a/test/config_validator.test.js b/test/config_validator.test.js
new file mode 100644
index 0000000..616b391
--- /dev/null
+++ b/test/config_validator.test.js
@@ -0,0 +1,88 @@
+'use strict';
+
+var test = require('tape');
+
+var validate = require('../lib/config_validator.js');
+var dbNameRegEx = validate.dbNameRegEx;
+
+function validator (config) {
+ return function () {
+ validate(config);
+ };
+}
+
+test('config validator', function (t) {
+ t.throws(
+ validator({ fields: {} }),
+ 'error if no tableName property'
+ );
+ t.throws(
+ validator({ tableName: 'test' }), // eslint-disable-line
+ 'error if no fields property'
+ );
+ t.throws(
+ validator({
+ tableName: '2test',
+ fields: {}
+ }),
+ 'error if table name doesn\t pass db name regex'
+ );
+ t.throws(
+ validator({
+ tableName: 'test',
+ fields: { '2field': { type: 'string' } }
+ }),
+ 'error if field name doesn\'t pass db name regex'
+ );
+ t.doesNotThrow(
+ validator({
+ tableName: 'test',
+ fields: { email: { type: 'string', unknown: 'allowed' } }
+ }),
+ 'no error when extra options unknown'
+ );
+
+ t.end();
+});
+
+test('config validator, multiple tables', function (t) {
+ t.doesNotThrow(
+ validator([{
+ tableName: 'test',
+ fields: { email: { type: 'string' } }
+ }, {
+ tableName: 'test_2',
+ fields: { email: { type: 'string' } }
+ }]),
+ 'handles multiple tables'
+ );
+
+ t.end();
+});
+
+test('dbNameRegEx', function (t) {
+ t.ok(
+ dbNameRegEx.exec('_a1pha_Numer1c'),
+ 'alpha numeric keys allowed only'
+ );
+ t.notOk(
+ dbNameRegEx.exec(''),
+ 'alpha numeric keys allowed only'
+ );
+ t.notOk(
+ dbNameRegEx.exec('no£way'),
+ 'no other characters allowed'
+ );
+ t.notOk(
+ dbNameRegEx.exec('3Numer1c'),
+ 'must only start with a _ or letter'
+ );
+ t.notOk(
+ dbNameRegEx.exec(
+ '_morethan63characters_morethan63characters_morethan63characters_'
+ ),
+ '63 character limit for field names'
+ );
+
+ t.end();
+});
diff --git a/test/create_table_map.test.js b/test/create_table_map.test.js
new file mode 100644
index 0000000..bd283bb
--- /dev/null
+++ b/test/create_table_map.test.js
@@ -0,0 +1,94 @@
+'use strict';
+
+var test = require('tape');
+
+var mapper = require('../lib/create_table_map.js');
+
+var mapObj = mapper.mapObj;
+
+test('Boolean type', function (t) {
+ t.equal(
+ mapObj.boolean({}),
+ 'BOOLEAN',
+ 'boolean type: default'
+ );
+ t.end();
+});
+
+test('Date type', function (t) {
+ t.equal(
+ mapObj.date({}),
+ 'DATE',
+ 'date type: default'
+ );
+ t.equal(
+ mapObj.date({ timestamp: true }),
+ 'TIMESTAMP',
+ 'date type: timestamp'
+ );
+ t.end();
+});
+
+test('Number type', function (t) {
+ t.equal(
+ mapObj.number({}),
+ 'DOUBLE PRECISION',
+ 'number type: default'
+ );
+ t.equal(
+ mapObj.number({ integer: true }),
+ 'BIGINT',
+ 'number type: integer'
+ );
+ t.end();
+});
+
+test('String type', function (t) {
+ t.equal(
+ mapObj.string({}),
+ 'VARCHAR(80)',
+ 'string type: default'
+ );
+ t.equal(
+ mapObj.string({ max: 12 }),
+ 'VARCHAR(12)',
+ 'string type: specifies length'
+ );
+ t.end();
+});
+
+test('Create Table Mapper Function', function (t) {
+ t.equal(
+ mapper('field', 'string', { max: 140 }),
+ 'field VARCHAR(140)',
+ 'name added to sql query and options passed through'
+ );
+ t.end();
+});
+
+test('Create Table Mapper Function w/ no options', function (t) {
+ t.equal(
+ mapper('field', 'string'),
+ 'field VARCHAR(80)',
+ 'name added to sql query and default options used'
+ );
+ t.end();
+});
+
+test('Create Table Mapper Function w/ unique option', function (t) {
+ t.equal(
+ mapper('email', 'string', { unique: true }, 'test_table'),
+ 'email VARCHAR(80) CONSTRAINT test_table_email_unique UNIQUE',
+ 'constraint added to column'
+ );
+ t.end();
+});
+
+test('Create Table Mapper Function w/ primaryKey option', function (t) {
+ t.equal(
+ mapper('email', 'string', { primaryKey: true }, 'test_table'),
+ 'email VARCHAR(80) CONSTRAINT test_table_pk PRIMARY KEY',
+ 'pk constraint added to column'
+ );
+ t.end();
+});
diff --git a/test/db_handlers.test.js b/test/db_handlers.test.js
new file mode 100644
index 0000000..e66aaa2
--- /dev/null
+++ b/test/db_handlers.test.js
@@ -0,0 +1,184 @@
+'use strict';
+
+var test = require('tape');
+
+var dbConn = require('./test_pg_client.js');
+var db = require('../lib/db_handlers.js');
+var schema = require('../example_schema.js');
+
+var multipleSchema = [{
+ tableName: 'table_1', // eslint-disable-line
+ fields: { field: { type: 'string', email: true } }
+}, {
+ tableName: 'table_2', // eslint-disable-line
+ fields: { field: { type: 'string', email: true } }
+}];
+
+var testInsert = {
+ email: 'test@gmail.com',
+ dob: '2001-09-27',
+ username: 'test'
+};
+var testTab = schema.tableName;
+
+var client = dbConn.client;
+
+test('init test client', function (t) {
+ client.connect(function () {
+ client.query('DROP TABLE IF EXISTS ' + schema.tableName);
+ client.query('DROP TABLE IF EXISTS table_1');
+ client.query('DROP TABLE IF EXISTS table_2', t.end);
+ });
+});
+
+test('db.init', function (t) {
+ db.init(client, schema)
+ .then(function () { return client.query('SELECT * from user_data') })
+ .then(function (res) {
+ t.ok(
+ res.fields
+ .map(function (field) { return field.name })
+ .indexOf('dob') > -1
+ , 'table created with a correct field'
+ );
+ t.end();
+ })
+ ;
+});
+
+test('db.init multiple tables', function (t) {
+ function checkFieldExist (res) {
+ t.ok(
+ res.fields
+ .map(function (field) { return field.name })
+ .indexOf('field') > -1
+ , 'table created with a correct field'
+ );
+ }
+ db.init(client, multipleSchema)
+ .then(function () { return client.query('SELECT * from table_1') })
+ .then(checkFieldExist)
+ .then(function () { return client.query('SELECT * from table_2') })
+ .then(checkFieldExist)
+ .then(t.end)
+ ;
+});
+
+
+test('db.insert & default select w custom where', function (t) {
+ db.insert(client, schema, { fields: testInsert, tableName: testTab })
+ .then(function (res) {
+ t.ok(res.rows[0].id, 'id returned in response');
+
+ return db.select(client, schema, {
+ where: { dob: '2001-09-27' },
+ tableName: testTab
+ });
+ })
+ .then(function (res) {
+ t.equal(
+ res.rows[0].email,
+ testInsert.email,
+ 'email correct'
+ );
+ t.equal(
+ res.rows[0].username,
+ testInsert.username,
+ 'username correct'
+ );
+ t.equal(
+ res.rows[0].dob.getFullYear(),
+ new Date(testInsert.dob).getFullYear(),
+ 'get same date back, though now a date object'
+ );
+ t.end();
+ })
+ .catch(function (err) {
+ t.fail(err);
+ t.end();
+ })
+ ;
+});
+
+test('db.insert x 2 same username error', function (t) {
+ t.plan(1);
+ db.insert(client, schema, { fields: testInsert, tableName: testTab })
+ .then(function () {
+ return db.insert(client, schema, {
+ fields: testInsert,
+ tableName: testTab
+ });
+ })
+ .then(function () {
+ t.fails('shouldn\'t allow second insert if unique key given');
+ })
+ .catch(function () {
+ t.pass('shouldn\'t allow second insert if unique key given');
+ })
+ ;
+});
+
+test('db.update w where & custom select w default where', function (t) {
+ t.plan(1);
+ db.update(client, schema, {
+ tableName: testTab,
+ fields: { username: 'bob' },
+ where: { email: 'test@gmail.com' }
+ }).then(function () {
+ return db.select(client, schema, {
+ tableName: testTab,
+ select: ['email', 'username']
+ });
+ })
+ .then(function (res) {
+ t.deepEqual(
+ res.rows[0],
+ {
+ email: 'test@gmail.com',
+ username: 'bob'
+ },
+ 'username updated'
+ );
+ })
+ .catch(t.fail);
+});
+
+test('db.delete w db.select', function (t) {
+ t.plan(1);
+ db.delete(client, schema, { tableName: testTab, where: { username: 'bob' } })
+ .then(function () {
+ return db.select(client, schema, { tableName: testTab });
+ })
+ .then(function (res) { t.equal(res.rows.length, 0, 'nothing left in db') })
+ .catch(t.fail)
+ ;
+});
+
+test('db.flush all via config', function (t) {
+ t.plan(1);
+ db.init(client, schema, null)
+ .then(function () { return db.flush(client, schema) })
+ .then(function () { return client.query('SELECT * FROM ' + testTab + ';') })
+ .catch(function (err) { return t.ok(err, 'selectin flushed table errors') })
+ ;
+});
+
+test('db.flush all via options', function (t) {
+ t.plan(2);
+ db.init(client, multipleSchema, null)
+ .then(function () {
+ return db.flush(client, null, { tableName: 'table_2' });
+ })
+ .then(function () { return client.query('SELECT * FROM table_1;') })
+ .then(function (res) {
+ t.ok(res, 'table_1 remians');
+
+ return client.query('SELECT * FROM table_2;');
+ })
+ .catch(function (err) { return t.ok(err, 'selectin flushed table errors') })
+ ;
+});
+
+test('close test DB connections', function (t) {
+ client.end(t.end);
+});
diff --git a/test/index.test.js b/test/index.test.js
new file mode 100644
index 0000000..c6be034
--- /dev/null
+++ b/test/index.test.js
@@ -0,0 +1,78 @@
+'use strict';
+
+var test = require('tape');
+var Hapi = require('hapi');
+var path = require('path');
+
+var plugin = require('../lib/index.js');
+
+test('Can register DB plugin with `schemaPath` option', function (t) {
+ var server = new Hapi.Server();
+
+ server.connection();
+ server.register({
+ register: plugin,
+ options: {
+ schemaPath: path.resolve(__dirname, '..', 'example_schema.js'),
+ dbConnection: process.env.TEST_DATABASE_URL
+ }
+ }, function (err) {
+ if (err) {
+ t.fail(err);
+ }
+
+ server.route({
+ method: 'GET',
+ path: '/',
+ handler: function (request, reply) {
+ t.equal(typeof request.abase.db.insert, 'function', 'bound insert');
+ t.equal(typeof request.abase.db.select, 'function', 'bound select');
+ t.equal(typeof request.abase.db.update, 'function', 'bound update');
+ t.equal(typeof request.abase.db.delete, 'function', 'bound delete');
+
+ request.abase.db.select({})
+ .then(function (result) {
+ t.equal(result.rows.length, 0, 'nothing in the DB');
+ reply('');
+ })
+ .catch(function () {
+ reply('');
+ });
+ }
+ });
+
+ server.inject({ method: 'GET', url: '/' }, function (response) {
+ t.equal(response.statusCode, 200, '200 OK Code');
+ t.equal(response.payload, '', 'Empty (normal) response');
+ server.endAbaseDb(t.end);
+ });
+ });
+});
+
+test('db handlers exposed', function (t) {
+ var handlers = Object.keys(plugin.handlers);
+ var wanted = ['insert', 'select', 'delete', 'update', 'init', 'flush'];
+
+ t.ok(
+ wanted.reduce(function (truth, handler) {
+ return truth && handlers.indexOf(handler) > -1;
+ }, true),
+ 'all handlers found: ' + wanted.join(', ')
+ );
+ t.end();
+});
+
+test('validate exposed', function (t) {
+ t.ok(
+ typeof plugin.validate === 'function',
+ 'validate function given'
+ );
+ t.end();
+});
+
+test('createClient helper', function (t) {
+ var testClient = plugin.createClient(process.env.TEST_DATABASE_URL);
+
+ t.ok(testClient.connection, 'client object returned');
+ t.end();
+});
diff --git a/test/istantiate_db.test.js b/test/istantiate_db.test.js
new file mode 100644
index 0000000..a27c541
--- /dev/null
+++ b/test/istantiate_db.test.js
@@ -0,0 +1,103 @@
+'use strict';
+
+var test = require('tape');
+
+var dbConn = require('./test_pg_client.js');
+var instantiateDb = require('../lib/instantiate_db.js');
+var schema = require('../example_schema.js');
+
+var pool = dbConn.pool;
+
+var testInsert = {
+ email: 'test@gmail.com',
+ dob: '2001-09-27',
+ username: 'test'
+};
+var testTab = schema.tableName;
+
+test('instantiateDb gives obj w/ methods bound to pg.Pool to cb', function (t) {
+ instantiateDb(pool, schema, function (err, db) {
+ if (err) {
+ t.fail(err, 'should work ok');
+ }
+ t.equal(typeof db.insert, 'function', '.insert method exists');
+ t.equal(typeof db.update, 'function', '.update method exists');
+ t.equal(typeof db.select, 'function', '.select method exists');
+ t.equal(typeof db.delete, 'function', '.delete method exists');
+ t.end();
+ });
+});
+
+test('db bound .insert adds to DB :: promise interface', function (t) {
+ instantiateDb(pool, schema, function (_, db) {
+ db.insert({ fields: testInsert, tableName: testTab })
+ .then(function () {
+ return db.select({
+ tableName: testTab,
+ where: { email: testInsert.email }
+ });
+ })
+ .then(function (result) {
+ t.equal(result.rows[0].id.length, 36, 'guid generated');
+ t.equal(result.rows[0].email, testInsert.email, 'Email matches');
+ t.end();
+ })
+ .catch(t.fail)
+ ;
+ });
+});
+
+test('db bound .delete removes line from DB :: cb interface', function (t) {
+ instantiateDb(pool, schema, function (_, db) {
+ db.delete({ tableName: testTab, where: testInsert }, function (deleteErr) {
+ if (deleteErr) {
+ t.fail(deleteErr);
+ }
+
+ db.select({ tableName: testTab }, function (selectErr, result) {
+ if (selectErr) {
+ t.fail(selectErr);
+ }
+
+ t.equal(result.rows.length, 0, 'Nothing left in DB');
+ t.end();
+ });
+ });
+ });
+});
+
+test('invalid args error for handler given to cb', function (t) {
+ instantiateDb(pool, schema, function (_, db) {
+ db.delete(
+ { tableName: testTab, where: 'Should not be a string' },
+ function (handlerError) {
+ t.ok(handlerError, 'callback given error from handler being abused');
+
+ t.end();
+ }
+ );
+ });
+});
+
+test('pool error', function (t) {
+ t.plan(2);
+
+ instantiateDb(pool, schema, function (_, db) {
+ pool.end(function () {
+ db.delete(
+ { tableName: testTab, where: testInsert },
+ function (poolError) {
+ t.ok(poolError, 'callback given error from failed pool connection');
+ }
+ );
+ db.delete({ tableName: testTab, where: testInsert }).then(function (res) {
+ t.notOk(res, 'no returned result if have caught error and no cb');
+ });
+ });
+ });
+});
+
+// keep at bottom
+test('close test DB connections', function (t) {
+ pool.end(t.end);
+});
diff --git a/test/parse_options.test.js b/test/parse_options.test.js
new file mode 100644
index 0000000..9c5af54
--- /dev/null
+++ b/test/parse_options.test.js
@@ -0,0 +1,61 @@
+'use strict';
+
+var test = require('tape');
+var path = require('path');
+
+var parseOptions = require('../lib/parse_options.js');
+
+
+test('parseOptions.schema', function (t) {
+ t.deepEqual(
+ parseOptions.schema(
+ {
+ schema: { object: 'schema' },
+ schemaPath: path.resolve(__dirname, '..', 'example_schema.js')
+ },
+ { app: { abase: { server: 'schema' } } }
+ ),
+ { server: 'schema' },
+ 'server trumps schema object'
+ );
+ t.deepEqual(
+ parseOptions.schema(
+ {
+ schema: { table: 'schema' },
+ schemaPath: path.resolve(__dirname, '..', 'example_schema.js')
+ },
+ { app: {} }
+ ),
+ { table: 'schema' },
+ 'schema trumps schemaPath'
+ );
+ t.deepEqual(
+ parseOptions.schema(
+ { schemaPath: path.resolve(__dirname, '..', 'example_schema.js') },
+ { app: {} }
+ ),
+ require('../example_schema.js'), // eslint-disable-line
+ 'schema trumps schemaPath'
+ );
+ t.end();
+});
+
+test('parseOptions.dbConfig', function (t) {
+ t.deepEqual(
+ parseOptions.dbConfig({ dbConnection: { parsed: 'object' } }),
+ { parsed: 'object' },
+ 'does nothing if config already complete'
+ );
+ t.deepEqual(
+ parseOptions.dbConfig({ dbConnection: 'psql://localhost:5432/testdb' }),
+ {
+ database: 'testdb',
+ host: 'localhost',
+ password: undefined, // eslint-disable-line
+ port: '5432',
+ user: ''
+ },
+ 'parses db url and handles no user and pass'
+ );
+ t.end();
+});
diff --git a/test/sql_gen.test.js b/test/sql_gen.test.js
new file mode 100644
index 0000000..4bc4f62
--- /dev/null
+++ b/test/sql_gen.test.js
@@ -0,0 +1,202 @@
+'use strict';
+
+var tape = require('tape');
+
+var sqlGen = require('../lib/sql_gen.js');
+var schema = require('../example_schema.js');
+
+tape('::init should throw on empty or invalid input', function (t) {
+ t.throws(function () {
+ sqlGen.init();
+ });
+ t.end();
+});
+
+tape('::init - generate SQL to create a table if none exists', function (t) {
+ var query = sqlGen.init(schema);
+
+ t.equal(
+ query,
+ 'CREATE TABLE IF NOT EXISTS "user_data" ('
+ + 'email VARCHAR(80), '
+ + 'username VARCHAR(20) CONSTRAINT user_data_username_unique UNIQUE, '
+ + 'dob DATE, '
+ + 'id VARCHAR(36)'
+ + ')',
+ 'Create table query generation from config object'
+ );
+ t.end();
+});
+
+tape('::select - generate SQL to select columns from a table', function (t) {
+ var query = sqlGen.select(null, {
+ tableName: schema.tableName,
+ select: ['email', 'dob']
+ });
+
+ t.equal(
+ query[0],
+ 'SELECT email, dob FROM "user_data"',
+ 'Generate parameterised query'
+ );
+ t.deepEqual(query[1], [], 'Generate values for parameterised query');
+ t.end();
+});
+
+tape('::select - gen. SQL to select cols from table w/ where', function (t) {
+ var query = sqlGen.select(null, {
+ tableName: schema.tableName,
+ select: ['email', 'dob'],
+ where: { foo: 'bar' }
+ });
+
+ t.equal(
+ query[0],
+ 'SELECT email, dob FROM "user_data" WHERE foo=$1',
+ 'Generate parameterised query'
+ );
+ t.deepEqual(query[1], ['bar'], 'Generate values for parameterised query');
+ t.end();
+});
+
+tape('::insert - generate SQL to insert a column into a table', function (t) {
+ var query = sqlGen.insert(
+ { tableName: schema.tableName, fields: {} },
+ { tableName: schema.tableName, fields: { email: 'me@poop.com' } }
+ );
+
+ t.equal(
+ query[0],
+ 'INSERT INTO "user_data" (email) VALUES ($1) RETURNING ()',
+ 'Generate parameterised query'
+ );
+ t.deepEqual(
+ query[1],
+ ['me@poop.com'],
+ 'Generate values for parameterised query'
+ );
+ t.end();
+});
+
+tape('::insert - generate SQL to insert blank col into table', function (t) {
+ var query = sqlGen.insert(
+ { tableName: schema.tableName, fields: {} },
+ { tableName: schema.tableName }
+ );
+
+ t.equal(
+ query[0],
+ 'INSERT INTO "user_data" () VALUES () RETURNING ()',
+ 'Generate query for blank line'
+ );
+ t.deepEqual(
+ query[1],
+ [],
+ 'Generate empty array'
+ );
+ t.end();
+});
+
+tape('::update - generate SQL to update a column in a table', function (t) {
+ var query = sqlGen.update(null, {
+ tableName: schema.tableName,
+ fields: { email: 'me@poop.com' }
+ });
+
+ t.equal(
+ query[0],
+ 'UPDATE "user_data" SET email=$1',
+ 'Generate parameterised query'
+ );
+ t.deepEqual(
+ query[1],
+ ['me@poop.com'],
+ 'Generate values for parameterised query'
+ );
+ t.end();
+});
+
+tape('::update - generate SQL to update no fields of column', function (t) {
+ var query = sqlGen.update(null, { tableName: schema.tableName });
+
+ t.equal(
+ query[0],
+ 'UPDATE "user_data" SET',
+ 'Generate query for blank line'
+ );
+ t.deepEqual(
+ query[1],
+ [],
+ 'Generate empty array'
+ );
+ t.end();
+});
+
+tape('::update - gen. SQL to update a col in table w/ where', function (t) {
+ var query = sqlGen.update(null, {
+ tableName: schema.tableName,
+ fields: { email: 'me@poop.com' },
+ where: { foo: 'bar' }
+ });
+
+ t.equal(
+ query[0],
+ 'UPDATE "user_data" SET email=$1 WHERE foo=$2',
+ 'Generate parameterised query'
+ );
+ t.deepEqual(
+ query[1],
+ ['me@poop.com', 'bar'],
+ 'Generate values for parameterised query'
+ );
+ t.end();
+});
+
+tape('::delete should generate SQL to delete a row from a table', function (t) {
+ var query = sqlGen.delete(null, {
+ tableName: schema.tableName,
+ where: { username: 'bob' }
+ });
+
+ t.equal(
+ query[0],
+ 'DELETE FROM "user_data" WHERE username=$1',
+ 'Generate parameterised query'
+ );
+ t.deepEqual(
+ query[1],
+ ['bob'],
+ 'Generate values for parameterised query'
+ );
+ t.end();
+});
+
+tape('::delete should gen SQL to delete row w/ multiple where', function (t) {
+ var query = sqlGen.delete(null, {
+ tableName: schema.tableName,
+ where: { username: 'bob', dob: '20/04/1988' }
+ });
+
+ t.equal(
+ query[0],
+ 'DELETE FROM "user_data" WHERE username=$1 AND dob=$2',
+ 'Generate parameterised query'
+ );
+ t.deepEqual(
+ query[1],
+ ['bob', '20/04/1988'],
+ 'Generate values for parameterised query'
+ );
+ t.end();
+});
+
+tape('::dropTable should gen SQL to drop table', function (t) {
+ var query = sqlGen.dropTable({ tableName: schema.tableName });
+
+ t.equal(
+ query,
+ 'DROP TABLE "user_data";',
+ 'Generate parameterised query'
+ );
+ t.end();
+});
diff --git a/test/test_pg_client.js b/test/test_pg_client.js
new file mode 100644
index 0000000..e0b4ab0
--- /dev/null
+++ b/test/test_pg_client.js
@@ -0,0 +1,26 @@
+'use strict';
+
+var url = require('url');
+var pg = require('pg');
+
+var parsed, connection;
+
+require('env2')('config.env');
+
+if (!process.env.TEST_DATABASE_URL) {
+ throw new Error('TEST_DATABASE_URL must be defined');
+}
+
+parsed = url.parse(process.env.TEST_DATABASE_URL);
+connection = {
+ host: parsed.hostname,
+ port: parsed.port,
+ database: parsed.pathname.split('/')[1],
+ user: (parsed.auth || '').split(':')[0],
+ password: (parsed.auth || '').split(':')[1]
+};
+
+module.exports = {
+ client: new pg.Client(connection),
+ pool: new pg.Pool(connection)
+};
diff --git a/test/utils.test.js b/test/utils.test.js
new file mode 100644
index 0000000..2cf2614
--- /dev/null
+++ b/test/utils.test.js
@@ -0,0 +1,51 @@
+'use strict';
+
+var test = require('tape');
+var _ = require('../lib/utils.js');
+
+var o = {
+ a: 1,
+ b: 2
+};
+
+test('::values w/ default keys value', function (t) {
+ var result = _.values(o);
+
+ t.ok(result.indexOf(o.a) > -1, 'Key "a"\'s value found');
+ t.ok(result.indexOf(o.b) > -1, 'Key "b"\'s value found');
+ t.end();
+});
+
+test('::values w/ chosen order', function (t) {
+ t.deepEqual(
+ _.values(o, ['b', 'a']),
+ [o.b, o.a],
+ '"b" given back first, "a" second'
+ );
+
+ t.end();
+});
+
+test('::except', function (t) {
+ t.deepEqual(
+ _.except(['b'], o),
+ { a: 1 },
+ 'Only "a" prop left'
+ );
+
+ t.end();
+});
+
+test('::shallowCopy', function (t) {
+ var n = {
+ a: o,
+ b: 'c'
+ };
+ var copy = _.shallowCopy(n);
+
+ t.deepEqual(copy, n, 'deep equal');
+ t.notEqual(copy, n, 'Not same object');
+ t.equal(copy.a, o, 'Only shallowly copied');
+
+ t.end();
+});