From c04cdb0813cc161bd8b5ff0197a3a18b31f5e170 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Tue, 1 Apr 2014 20:30:49 +0200 Subject: [PATCH 001/152] Improve mapping of nested arrays. --- lib/mapping-generator.js | 18 ++++++++++++------ test/boost-field-test.js | 2 +- test/mapping-generator-test.js | 33 +++++++++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 7 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 1c456f4b..eb667b02 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -5,7 +5,6 @@ Generator.prototype.generateMapping = function(schema, cb){ var cleanTree = getCleanTree(schema.tree, schema.paths, ''); delete cleanTree[schema.get('versionKey')]; var mapping = getMapping(cleanTree, ''); - cb(null, { properties: mapping }); }; @@ -113,16 +112,23 @@ function getCleanTree(tree, paths, prefix) { // Field has some kind of type if (type) { - // If it is an nestec schema + // If it is a nested array if (value[0]) { - //A nested schema can be just a blank object with no defined paths - if(value[0].tree && value[0].paths){ - cleanTree[field] = getCleanTree(value[0].tree, value[0].paths, ''); + // A nested array can contain complex objects + if (paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { + cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); } // Check for single type arrays (which elasticsearch will treat as the core type i.e. [String] = string) else if ( paths[field].caster && paths[field].caster.instance ) { - cleanTree[field] = {type: paths[field].caster.instance.toLowerCase()}; + // Even for simple types the value can be an object if there is other attributes than type + if(typeof value[0] === 'object'){ + cleanTree[field] = value[0]; + } else { + cleanTree[field] = {}; + } + cleanTree[field].type = paths[field].caster.instance.toLowerCase(); } + //A nested schema can be just a blank object with no defined paths else{ cleanTree[field] = { type:'object' diff --git a/test/boost-field-test.js b/test/boost-field-test.js index 60f10fdf..e9b94108 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -30,7 +30,7 @@ describe('Add Boost Option Per Field', function(){ it('should create a mapping with boost field added', function(done){ BlogPost.createMapping(function(err, mapping){ esClient.getMapping('blogposts', 'blogpost', function(err, mapping){ - var props = mapping.blogpost.properties; + var props = mapping.blogposts.mappings.blogpost.properties; props.title.type.should.eql('string'); props.title.boost.should.eql(2.0); done(); diff --git a/test/mapping-generator-test.js b/test/mapping-generator-test.js index 4773ffcc..4562cfc2 100644 --- a/test/mapping-generator-test.js +++ b/test/mapping-generator-test.js @@ -75,6 +75,7 @@ describe('MappingGenerator', function(){ done(); }); }); + it('recognizes an multi_field and maps it as one', function(done){ generator.generateMapping(new Schema({ test: { @@ -137,6 +138,38 @@ describe('MappingGenerator', function(){ done(); }); }); + it('recognizes a nested array with a simple type and maps it as a simple attribute', function(done){ + generator.generateMapping(new Schema({ + contacts: [String] + }), function(err, mapping){ + mapping.properties.contacts.type.should.eql('string'); + done(); + }); + }); + it('recognizes a nested array with a simple type and additional attributes and maps it as a simple attribute', function(done){ + generator.generateMapping(new Schema({ + contacts: [{ type: String, es_index: 'not_analyzed' }] + }), function(err, mapping){ + mapping.properties.contacts.type.should.eql('string'); + mapping.properties.contacts.index.should.eql('not_analyzed'); + done(); + }); + }); + it('recognizes a nested array with a complex object and maps it', function(done){ + generator.generateMapping(new Schema({ + name: String, + contacts: [{ + email: {type: String, es_index: 'not_analyzed' }, + telephone: String + }] + }), function(err, mapping){ + mapping.properties.name.type.should.eql('string'); + mapping.properties.contacts.properties.email.type.should.eql('string'); + mapping.properties.contacts.properties.email.index.should.eql('not_analyzed'); + mapping.properties.contacts.properties.telephone.type.should.eql('string'); + done(); + }); + }); it('excludes a virtual property from mapping', function(done){ var PersonSchema = new Schema({ first_name: {type: String}, From 2a6e07b64bccad895e130aa6e0666f60305a6d96 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Thu, 24 Apr 2014 21:08:10 +0200 Subject: [PATCH 002/152] added possibility to specify any river options --- lib/mongoosastic.js | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index e9298999..f3205f9b 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -173,21 +173,22 @@ module.exports = function elasticSearchPlugin(schema, options){ var model = this; setIndexNameIfUnset(model.modelName); if (!this.db.name) throw "ERROR: "+ model.modelName +".river() call before mongoose.connect" - esClient.putRiver( - 'mongodb', - indexName, - { - type: 'mongodb', - mongodb: { - db: this.db.name, - collection: indexName, - gridfs: (useRiver && useRiver.gridfs) ? useRiver.gridfs : false - }, - index: { - name: indexName, - type: typeName - } - }, cb ); + + // the river definition can come from the options of mongoosasic, + // but some attributes will be overwritten anyway + // see https://github.com/richardwilly98/elasticsearch-river-mongodb/wiki + var riverDefinition = useRiver.definition || {}; + + riverDefinition.type = 'mongodb'; + riverDefinition.mongodb = riverDefinition.mongodb || {}; + riverDefinition.mongodb.db = this.db.name; + riverDefinition.mongodb.collection = indexName; + riverDefinition.gridfs = (useRiver && useRiver.gridfs) ? useRiver.gridfs : false; + riverDefinition.index = riverDefinition.index || {}; + riverDefinition.index.name = indexName; + riverDefinition.index.type = typeName; + + esClient.putRiver('mongodb', indexName, riverDefinition, cb); } } }; From b11adb9ec88504966b293fb8e9872f0921de4f65 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Fri, 25 Apr 2014 21:21:34 +0200 Subject: [PATCH 003/152] support changing indexName with river without loosing the correct collection name --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f3205f9b..0430d804 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -182,7 +182,7 @@ module.exports = function elasticSearchPlugin(schema, options){ riverDefinition.type = 'mongodb'; riverDefinition.mongodb = riverDefinition.mongodb || {}; riverDefinition.mongodb.db = this.db.name; - riverDefinition.mongodb.collection = indexName; + riverDefinition.mongodb.collection = this.modelName.toLowerCase() + 's'; riverDefinition.gridfs = (useRiver && useRiver.gridfs) ? useRiver.gridfs : false; riverDefinition.index = riverDefinition.index || {}; riverDefinition.index.name = indexName; From 95368d0653902c783f29743b95d33d532aac655b Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Fri, 9 May 2014 20:41:32 +0200 Subject: [PATCH 004/152] make serialize more robust for deep models with empty parts --- lib/serialize.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/serialize.js b/lib/serialize.js index 4e763005..381aeaab 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -5,9 +5,11 @@ function serialize(model, mapping) { var serializedForm = {}; for (var field in mapping.properties) { - var val = serialize(model[field], mapping.properties[field]); - if (val !== undefined) { - serializedForm[field] = val; + if (model) { + var val = serialize(model[field], mapping.properties[field]); + if (val !== undefined) { + serializedForm[field] = val; + } } } From c7db5a723d0d1e02ebb86663f80e12e55ee97b3e Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Fri, 9 May 2014 22:13:36 +0200 Subject: [PATCH 005/152] make serialize compatible with nested arrays and their mappings --- lib/serialize.js | 31 ++++++++++++++++++------------- test/serialize-test.js | 30 +++++++++++++++++++++++------- 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/lib/serialize.js b/lib/serialize.js index 381aeaab..55b108e8 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -1,20 +1,25 @@ module.exports = serialize; -function serialize(model, mapping) { - if (mapping.properties) { - var serializedForm = {}; - - for (var field in mapping.properties) { - if (model) { - var val = serialize(model[field], mapping.properties[field]); - if (val !== undefined) { - serializedForm[field] = val; - } - } +function _serializeObject(object, mapping) { + var serialized = {}; + for (var field in mapping.properties) { + var val = serialize(object[field], mapping.properties[field]); + if (val !== undefined) { + serialized[field] = val; } + } + return serialized; +} - return serializedForm; - +function serialize(model, mapping) { + if (mapping.properties && model) { + if (Array.isArray(model)) { + return model.map(function(object) { + return _serializeObject(object, mapping); + }); + } else { + return _serializeObject(model, mapping); + } } else if (typeof value === 'object' && value !== null) { var name = value.constructor.name; if (name === 'ObjectID') { diff --git a/test/serialize-test.js b/test/serialize-test.js index ed0da45a..f824ab17 100644 --- a/test/serialize-test.js +++ b/test/serialize-test.js @@ -14,7 +14,8 @@ var PersonSchema22 = new Schema({ , last: String }, dob: Date, - bowlingBall: {type:Schema.ObjectId, ref:'BowlingBall'} + bowlingBall: {type:Schema.ObjectId, ref:'BowlingBall'}, + games: [{score: Number, date: Date}] }); var Person = mongoose.model('Person22', PersonSchema22); @@ -26,24 +27,39 @@ generator.generateMapping(PersonSchema22, function(err, tmp) { mapping = tmp; }); -describe('serialize', function(){ +describe.only('serialize', function(){ var dude = new Person({ - name: {first:'Jeffery', last:'Lebowski'}, + name: {first:'Jeffrey', last:'Lebowski'}, dob: new Date(Date.parse('05/17/1962')), - bowlingBall: new BowlingBall() + bowlingBall: new BowlingBall(), + games: [{score: 80, date: new Date(Date.parse('05/17/1962'))}, {score: 80, date: new Date(Date.parse('06/17/1962'))}] }); + + // another person with missing parts to test robustness + var millionnaire = new Person({ + name: {first:'Jeffrey', last:'Lebowski'}, + }); + + it('should serialize a document with missing bits', function(){ + var serialized = serialize(millionnaire, mapping); + serialized.should.have.property('games', []); + }); + describe('with no indexed fields', function(){ var serialized = serialize(dude, mapping); it('should serialize model fields', function(){ - serialized.name.first.should.eql('Jeffery'); + serialized.name.first.should.eql('Jeffrey'); serialized.name.last.should.eql('Lebowski'); }); it('should serialize object ids as strings', function(){ serialized.bowlingBall.should.eql(dude.bowlingBall); }); - it('should serialize dates in ISO 8601 format', function(){ - serialized.dob.should.eql(dude.dob) + serialized.dob.should.eql(dude.dob); + }); + it('should serialize nested arrays', function(){ + serialized.games.should.have.lengthOf(2); + serialized.games[0].should.have.property('score', 80); }); }); From 57b5ece119fba1b285933d605d34cb5da8cf3ff3 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Mon, 9 Jun 2014 21:58:18 +0200 Subject: [PATCH 006/152] first support of bulk indexing mode --- lib/mongoosastic.js | 54 +++++++++++++++++++++++++++---- test/bulk-test.js | 73 ++++++++++++++++++++++++++++++++++++++++++ test/serialize-test.js | 2 +- 3 files changed, 122 insertions(+), 7 deletions(-) create mode 100644 test/bulk-test.js diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 0430d804..3c8b4bd3 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -13,7 +13,8 @@ module.exports = function elasticSearchPlugin(schema, options){ , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 , esClient = new elastical.Client(host, options) - , useRiver = options && options.useRiver; + , useRiver = options && options.useRiver + , bulk = options && options.bulk; if (useRiver) setUpRiver(schema); @@ -44,8 +45,12 @@ module.exports = function elasticSearchPlugin(schema, options){ } var model = this; setIndexNameIfUnset(model.constructor.modelName); - esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); - } + if(bulk) { + bulkIndex(index || indexName, type || typeName, this); + } else { + esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); + } + }; /** * Unset elastic search index @@ -53,7 +58,11 @@ module.exports = function elasticSearchPlugin(schema, options){ schema.methods.unIndex = function(){ var model = this; setIndexNameIfUnset(model.constructor.modelName); - deleteByMongoId(esClient, model, indexName, typeName, 3); + if(bulk) { + bulkDelete(index || indexName, type || typeName, this); + } else { + deleteByMongoId(esClient, model, indexName, typeName, 3); + } } /** * Synchronize an existing collection @@ -127,6 +136,35 @@ module.exports = function elasticSearchPlugin(schema, options){ }); }; + var bulkBuffer = []; + + function bulkDelete(indexName, typeName, model) { + bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}); + } + + function bulkIndex(indexName, typeName, model) { + bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}); + } + + var bulkTimeout; + + function bulkAdd(instruction) { + bulkBuffer.push(instruction); + clearTimeout(bulkTimeout); + if(bulkBuffer.length >= (bulk.size || 1000)) { + schema.statics.flush(); + } else { + bulkTimeout = setTimeout(function(){ + schema.statics.flush(); + }, bulk.delay || 1000); + } + } + + schema.statics.flush = function(){ + esClient.bulk(bulkBuffer); + bulkBuffer = []; + }; + function setIndexNameIfUnset(model){ var modelName = model.toLowerCase(); if(!indexName){ @@ -146,7 +184,11 @@ module.exports = function elasticSearchPlugin(schema, options){ schema.post('remove', function(){ var model = this; setIndexNameIfUnset(model.constructor.modelName); - deleteByMongoId(esClient, model, indexName, typeName, 3); + if(bulk) { + bulkDelete(indexName, typeName, this); + } else { + deleteByMongoId(esClient, model, indexName, typeName, 3); + } }); /** @@ -257,4 +299,4 @@ function deleteByMongoId(client, model,indexName, typeName, tries){ model.emit('es-removed', err, res); } }); -} +} \ No newline at end of file diff --git a/test/bulk-test.js b/test/bulk-test.js new file mode 100644 index 00000000..658f3883 --- /dev/null +++ b/test/bulk-test.js @@ -0,0 +1,73 @@ +var mongoose = require('mongoose'), + elastical = require('elastical'), + esClient = new(require('elastical').Client)(), + should = require('should'), + config = require('./config'), + Schema = mongoose.Schema, + ObjectId = Schema.ObjectId, + async = require('async'), + mongoosastic = require('../lib/mongoosastic'); + +var BookSchema = new Schema({ + title: String +}); +BookSchema.plugin(mongoosastic, { + bulk: { + size: 10, + delay: 100 + } +}); + +var Book = mongoose.model('Book2', BookSchema); + +describe('Bulk mode', function() { + var books = null; + + before(function(done) { + config.deleteIndexIfExists(['book2s'], function() { + mongoose.connect(config.mongoUrl, function() { + var client = mongoose.connections[0].db; + client.collection('book2s', function(err, _books) { + books = _books; + Book.remove(done); + }); + }); + }); + }); + before(function(done) { + async.forEach(bookTitles(), function(title, cb) { + new Book({ + title: title + }).save(cb); + }, function() { + setTimeout(done, 1200); + }); + }); + before(function(done) { + Book.findOne({ + title: 'American Gods' + }, function(err, book) { + book.remove(function() { + setTimeout(done, 1200); + }); + }); + }); + it('should index all objects and support deletions too', function(done) { + Book.search({}, function(err, results) { + results.should.have.property('hits').with.property('total', 52); + done(); + }); + }); +}); + +function bookTitles() { + var books = [ + 'American Gods', + 'Gods of the Old World', + 'American Gothic' + ]; + for (var i = 0; i < 50; i++) { + books.push('ABABABA' + i); + } + return books; +} \ No newline at end of file diff --git a/test/serialize-test.js b/test/serialize-test.js index f824ab17..a482b205 100644 --- a/test/serialize-test.js +++ b/test/serialize-test.js @@ -27,7 +27,7 @@ generator.generateMapping(PersonSchema22, function(err, tmp) { mapping = tmp; }); -describe.only('serialize', function(){ +describe('serialize', function(){ var dude = new Person({ name: {first:'Jeffrey', last:'Lebowski'}, dob: new Date(Date.parse('05/17/1962')), From 122f0ed16ce205b5bf480554cf38213c0f49ed3b Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Mon, 9 Jun 2014 22:20:50 +0200 Subject: [PATCH 007/152] forgotten callback --- lib/mongoosastic.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 3c8b4bd3..dd5f2c6e 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -47,6 +47,7 @@ module.exports = function elasticSearchPlugin(schema, options){ setIndexNameIfUnset(model.constructor.modelName); if(bulk) { bulkIndex(index || indexName, type || typeName, this); + cb(); } else { esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); } From a3a2a2adaa69a36b4bc421c5648fa6f39b43488b Mon Sep 17 00:00:00 2001 From: taterbase Date: Fri, 24 Oct 2014 12:23:28 -0600 Subject: [PATCH 008/152] Update repo info in package.json --- .travis.yml | 4 ---- package.json | 4 ++-- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 690e2dd2..d2b4c927 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,7 +8,3 @@ node_js: services: - mongodb - elasticsearch - -notifications: - email: - - james.r.carr@gmail.com diff --git a/package.json b/package.json index c90295c1..06a36277 100644 --- a/package.json +++ b/package.json @@ -2,11 +2,11 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "0.6.0", + "version": "0.6.1", "tags":["mongodb", "elastic search", "mongoose", "full text search"], "repository": { "type": "git", - "url": "git://github.com/jamescarr/mongoosastic" + "url": "git://github.com/mongoosastic/mongoosastic" }, "main":"lib/mongoosastic.js", "dependencies": { From 75efeee03918c179d30066305a83c48b2bc10794 Mon Sep 17 00:00:00 2001 From: taterbase Date: Fri, 24 Oct 2014 12:23:44 -0600 Subject: [PATCH 009/152] Update badge --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index 22fb8b1a..d5bd3cf9 100644 --- a/readme.md +++ b/readme.md @@ -1,6 +1,6 @@ # Mongoosastic [![Build -Status](https://secure.travis-ci.org/jamescarr/mongoosastic.png?branch=master)](http://travis-ci.org/jamescarr/mongoosastic) +Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) A [mongoose](http://mongoosejs.com/) plugin that indexes models into [elasticsearch](http://www.elasticsearch.org/). I kept running into cases where I needed full text search capabilities in my From 4ad5c37f53c65e87fc2115836e6e574ba032d74e Mon Sep 17 00:00:00 2001 From: George Shank Date: Fri, 24 Oct 2014 12:36:34 -0600 Subject: [PATCH 010/152] Update readme.md More dynamic version info --- readme.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/readme.md b/readme.md index d5bd3cf9..061d4cca 100644 --- a/readme.md +++ b/readme.md @@ -1,6 +1,7 @@ # Mongoosastic [![Build Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) +[![NPM version](https://badge.fury.io/js/mongoosastic.svg)](http://badge.fury.io/js/mongoosastic) A [mongoose](http://mongoosejs.com/) plugin that indexes models into [elasticsearch](http://www.elasticsearch.org/). I kept running into cases where I needed full text search capabilities in my @@ -9,8 +10,6 @@ full text search, I also needed the ability to filter ranges of data points in the searches and even highlight matches. For these reasons, elastic search was a perfect fit and hence this project. -## Current Version -The current version is ``0.6.0`` ## Installation From 822ee1e21057a9f5c718a780b5ef4890ce62ac2f Mon Sep 17 00:00:00 2001 From: nlko Date: Mon, 17 Feb 2014 16:36:23 +0100 Subject: [PATCH 011/152] Keep geo_* types in the mapping Prior, only geo_point were kept in the mapping. --- lib/mapping-generator.js | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 1bbf1df4..9cb3f05c 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -146,10 +146,19 @@ function getCleanTree(tree, paths, prefix) { // It has no type for some reason } else { - // Because it is an geo_point object!! - if (typeof value === 'object' && value.geo_point) { - cleanTree[field] = value.geo_point; - continue; + // Because it is an geo_* object!! + if (typeof value === 'object') + { + var key; + var geoFound = false; + for (key in value) { + if (value.hasOwnProperty(key) && /^geo_/.test(key)) { + cleanTree[field] = value[key]; + geoFound = true; + //break; + } + } + if(geoFound) continue } // If it's a virtual type, don't map it From 9f8fa2f067e7b91cac796268dbabacdce5a3fc6c Mon Sep 17 00:00:00 2001 From: nlko Date: Mon, 17 Feb 2014 18:41:31 +0100 Subject: [PATCH 012/152] Correct boost test field (support ES 0.9 and 1.0). In my tests, the mapping format returned by the getMapping function is not the same between 0.90.11 and 1.0 --- test/boost-field-test.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/boost-field-test.js b/test/boost-field-test.js index e9b94108..740d63e4 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -30,7 +30,11 @@ describe('Add Boost Option Per Field', function(){ it('should create a mapping with boost field added', function(done){ BlogPost.createMapping(function(err, mapping){ esClient.getMapping('blogposts', 'blogpost', function(err, mapping){ - var props = mapping.blogposts.mappings.blogpost.properties; + /* elasticsearch 1.0 & 0.9 support */ + var props = + mapping.blogpost != undefined ? + mapping.blogpost.properties: /* ES 0.9.11 */ + mapping.blogposts.mappings.blogpost.properties; /* ES 1.0.0 */ props.title.type.should.eql('string'); props.title.boost.should.eql(2.0); done(); From 02b01384801621c2e8e8b5f87844799ea4e10e02 Mon Sep 17 00:00:00 2001 From: nlko Date: Thu, 20 Feb 2014 10:15:44 +0100 Subject: [PATCH 013/152] Add test for undefined object field in the path prior of its use --- lib/mapping-generator.js | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 9cb3f05c..48c21c95 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -117,21 +117,20 @@ function getCleanTree(tree, paths, prefix) { } // Field has some kind of type if (type) { - // If it is an nestec schema + // If it is an nested schema if (value[0]) { //A nested schema can be just a blank object with no defined paths if(value[0].tree && value[0].paths){ cleanTree[field] = getCleanTree(value[0].tree, value[0].paths, ''); + } else if ( paths[field] && paths[field].caster && paths[field].caster.instance ) { + cleanTree[field] = {type: paths[field].caster.instance.toLowerCase()}; } // Check for single type arrays (which elasticsearch will treat as the core type i.e. [String] = string) else if (!paths[field] && prefix) { if(paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; } - } else if( paths[field].caster && paths[field].caster.instance ) { - cleanTree[field] = {type: paths[field].caster.instance.toLowerCase()}; - } - else{ + } else { cleanTree[field] = { type:'object' }; From 2214a6f337d9dd8b718256915bca1b1598c24056 Mon Sep 17 00:00:00 2001 From: nlko Date: Thu, 20 Feb 2014 15:55:08 +0100 Subject: [PATCH 014/152] Added testfor geo_shape and updated manual --- readme.md | 78 ++++++++++++++++++++ test/geo-test.js | 180 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 258 insertions(+) create mode 100644 test/geo-test.js diff --git a/readme.md b/readme.md index d5bd3cf9..4f8a7289 100644 --- a/readme.md +++ b/readme.md @@ -242,6 +242,17 @@ var ExampleSchema = new Schema({ lon: { type: Number } } + geo_shape: { + coordinates : [], + type: {type: String}, + geo_shape: { + type:String, + es_type: "geo_shape", + es_tree: "quadtree", + es_precision: "1km" + } + } + // Special feature : specify a cast method to pre-process the field before indexing it someFieldToCast : { type: String, @@ -249,6 +260,7 @@ var ExampleSchema = new Schema({ return value + ' something added'; } } + }); // Used as nested schema above. @@ -258,6 +270,56 @@ var SubSchema = new Schema({ }); ``` +## Geo mapping +Prior to index any geo mapped data (or calling the synchronize), +the mapping must be manualy created with the createMapping (see above). + +Notice that the name of the field containing the ES geo data must start by +'geo_' to be recognize as such. + +# Indexing a geo point + +```javascript + var geo = new GeoModel({ + … + geo_with_lat_lon: { lat: 1, lon: 2} + … + }); +``` + +# Indexing a geo shape + +```javascript + var geo = new GeoModel({ + … + geo_shape:{ + type:'envelope', + coordinates: [[3,4],[1,2] /* Arrays of coord : [[lon,lat],[lon,lat]] */ + } + … + }); +``` + +Mapping, indexing and searching example for geo shape can be found in test/geo-test.js + +For example, one can retrieve the list of document where the shape contain a specific +point (or polygon...) + +```javascript + var geoQuery = { + "query": {"match_all": {}}, + "filter": {"geo_shape": { + "geo_shape": { + "shape": { + "type": "point", + "coordinates": [3,1] + }, + "relation": "intersects" + } + }} + } +``` + ### Advanced Queries The full query DSL of elasticsearch is exposed through the search method. For example, if you wanted to find all people between ages 21 @@ -358,6 +420,7 @@ The index method takes 3 arguments: Note that indexing a model does not mean it will be persisted to mongodb. Use save for that. +<<<<<<< HEAD ### Truncating an index The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. @@ -366,6 +429,21 @@ The static method truncate will deleted all documents from the associated index. ```javascript GarbageModel.truncate(function(err){...}); +======= +### Saving a document +The indexing takes place after saving inside the mongodb and is a defered process. +One can check the end of the indexion catching es-indexed event. + +```javascript +doc.save(function(err){ + if (err) throw err; + /* Document indexation on going */ + doc.on('es-indexed', function(err, res){ + if (err) throw err; + /* Document is indexed */ + }); + }); +>>>>>>> Added testfor geo_shape and updated manual ``` ### Model.plugin(mongoosastic, options) diff --git a/test/geo-test.js b/test/geo-test.js new file mode 100644 index 00000000..c798b6df --- /dev/null +++ b/test/geo-test.js @@ -0,0 +1,180 @@ +var mongoose = require('mongoose') + , elastical = require('elastical') + , esClient = new(require('elastical').Client) + , should = require('should') + , config = require('./config') + , Schema = mongoose.Schema + , ObjectId = Schema.ObjectId + , mongoosastic = require('../lib/mongoosastic'); + + +var GeoSchema; + + +var GeoModel; + +describe('GeoTest', function(){ + before(function(done){ + mongoose.connect(config.mongoUrl, function(){ + config.deleteIndexIfExists(['geodocs'], function(){ + + GeoSchema = new Schema({ + myId: Number, + frame: { + coordinates : [], + type: {type: String}, + geo_shape: { + type:String, + es_type: "geo_shape", + es_tree: "quadtree", + es_precision: "1km" + } + } + }); + + GeoSchema.plugin(mongoosastic); + GeoModel = mongoose.model('geodoc', GeoSchema); + + GeoModel.createMapping(function(err, mapping){ + GeoModel.remove(function(){ + + esClient.getMapping('geodocs', 'geodoc', function(err, mapping){ + mapping.geodoc.properties.frame.type.should.eql('geo_shape'); + done(); + }); + }); + }); + + }); + }); + }); + + it('should be able to create and store geo coordinates', function(done){ + + var geo = new GeoModel({ + myId : 1, + frame:{ + type:'envelope', + coordinates: [[3,4],[1,2]] + } + }); + + geo2 = new GeoModel({ + myId : 2, + frame:{ + type:'envelope', + coordinates: [[2,3],[4,0]] + } + }); + + + var saveAndWait = function (doc,cb) { + doc.save(function(err) { + if (err) cb(err); + else doc.on('es-indexed', cb ); + }); + }; + + saveAndWait(geo,function(err){ + if (err) throw err; + saveAndWait(geo2,function(err){ + if (err) throw err; + // Mongodb request + GeoModel.find({},function(err, res){ + if (err) throw err; + res.length.should.eql(2); + res[0].frame.type.should.eql('envelope'); + res[0].frame.coordinates[0].should.eql([3,4]); + res[0].frame.coordinates[1].should.eql([1,2]); + done(); + })})})}) + + var getDocOrderedQuery = {"query": {"match_all": {}},"sort":{"myId":{"order":"asc"}}}; + + it('should be able to find geo coordinates in the indexes', function(done){ + setTimeout(function(){ + // ES request + GeoModel.search(getDocOrderedQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(2); + res.hits.hits[0]._source.frame.type.should.eql('envelope'); + res.hits.hits[0]._source.frame.coordinates.should.eql([[3,4],[1,2]]); + done(); + }); + }, 1100); + }); + + it('should be able to resync geo coordinates from the database', function(done){ + config.deleteIndexIfExists(['geodocs'], function(){ + GeoModel.createMapping(function(err, mapping){ + var stream = GeoModel.synchronize() + , count = 0; + + stream.on('data', function(err, doc){ + count++; + }); + + stream.on('close', function(){ + count.should.eql(2); + + setTimeout(function(){ + GeoModel.search(getDocOrderedQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(2); + res.hits.hits[0]._source.frame.type.should.eql('envelope'); + res.hits.hits[0]._source.frame.coordinates.should.eql([[3,4],[1,2]]); + done(); + }); + }, 1000); + }); + }); + }); + }); + + + + it('should be able to search points inside frames', function(done){ + var geoQuery = { + "query": {"match_all": {}}, + "filter": {"geo_shape": { + "frame": { + "shape": { + "type": "point", + "coordinates": [3,1] + }, + "relation": "intersects" + } + }} + } + + setTimeout(function(){ + GeoModel.search(geoQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(1); + res.hits.hits[0]._source.myId.should.eql(2); + geoQuery.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; + GeoModel.search(geoQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(1); + res.hits.hits[0]._source.myId.should.eql(1); + + geoQuery.filter.geo_shape.frame.shape.coordinates = [3,2]; + GeoModel.search(geoQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(2); + + geoQuery.filter.geo_shape.frame.shape.coordinates = [0,0]; + GeoModel.search(geoQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(0); + done(); + }); + }); + }); + + }); + }, 1000); + }); + + +}); From d0e2be27d171742688e5e12406a403b6259c38ab Mon Sep 17 00:00:00 2001 From: nlko Date: Thu, 20 Feb 2014 16:00:29 +0100 Subject: [PATCH 015/152] Add ES 1.0 support for geo shape tests --- test/geo-test.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/geo-test.js b/test/geo-test.js index c798b6df..a07d1234 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -39,7 +39,10 @@ describe('GeoTest', function(){ GeoModel.remove(function(){ esClient.getMapping('geodocs', 'geodoc', function(err, mapping){ - mapping.geodoc.properties.frame.type.should.eql('geo_shape'); + (mapping.geodoc != undefined ? + mapping.geodoc: /* ES 0.9.11 */ + mapping.geodocs.mappings.geodoc /* ES 1.0.0 */ + ).properties.frame.type.should.eql('geo_shape'); done(); }); }); From 82fb1b2e51eda7ec0e58b2cd29180f752b8d83dc Mon Sep 17 00:00:00 2001 From: nlko Date: Tue, 25 Feb 2014 10:32:51 +0100 Subject: [PATCH 016/152] Correct enveloppe test Enveloppe corners were in wrong order resulting in a bad test. --- test/geo-test.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/geo-test.js b/test/geo-test.js index a07d1234..f530c98d 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -58,7 +58,7 @@ describe('GeoTest', function(){ myId : 1, frame:{ type:'envelope', - coordinates: [[3,4],[1,2]] + coordinates: [[1,4],[3,2]] } }); @@ -87,8 +87,8 @@ describe('GeoTest', function(){ if (err) throw err; res.length.should.eql(2); res[0].frame.type.should.eql('envelope'); - res[0].frame.coordinates[0].should.eql([3,4]); - res[0].frame.coordinates[1].should.eql([1,2]); + res[0].frame.coordinates[0].should.eql([1,4]); + res[0].frame.coordinates[1].should.eql([3,2]); done(); })})})}) @@ -101,7 +101,7 @@ describe('GeoTest', function(){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); - res.hits.hits[0]._source.frame.coordinates.should.eql([[3,4],[1,2]]); + res.hits.hits[0]._source.frame.coordinates.should.eql([[1,4],[3,2]]); done(); }); }, 1100); @@ -125,7 +125,7 @@ describe('GeoTest', function(){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); - res.hits.hits[0]._source.frame.coordinates.should.eql([[3,4],[1,2]]); + res.hits.hits[0]._source.frame.coordinates.should.eql([[1,4],[3,2]]); done(); }); }, 1000); @@ -166,7 +166,7 @@ describe('GeoTest', function(){ if (err) throw err; res.hits.total.should.eql(2); - geoQuery.filter.geo_shape.frame.shape.coordinates = [0,0]; + geoQuery.filter.geo_shape.frame.shape.coordinates = [0,3]; GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(0); From cd84784549f18ce02cea9aeffd7c01f36bc3ceff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?H=C3=BCseyin=20BABAL?= Date: Mon, 17 Mar 2014 21:35:02 +0200 Subject: [PATCH 017/152] Get first level of hits field Provided fix for etting first level `hits` field of search results when used hydrate --- lib/mongoosastic.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f0c64751..32f5f6e3 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -152,7 +152,7 @@ module.exports = function elasticSearchPlugin(schema, options){ cb(err); }else{ if (alwaysHydrate || options.hydrate) { - hydrate(results, model, options.hydrateOptions || defaultHydrateOptions || {}, cb); + hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb); }else{ cb(null, res); } @@ -251,7 +251,8 @@ function createMappingIfNotPresent(client, indexName, typeName, schema, settings }); } -function hydrate(results, model, options, cb){ +function hydrate(res, model, options, cb){ + var results = res.hits; var resultsMap = {} var ids = results.hits.map(function(a, i){ resultsMap[a._id] = i @@ -276,7 +277,8 @@ function hydrate(results, model, options, cb){ hits[i] = doc }) results.hits = hits; - cb(null, results); + res.hits = results; + cb(null, res); } }); } From a6f6c3e69fe41ea43b1e4bea0040c63a7c1ec772 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 11:43:26 -0600 Subject: [PATCH 018/152] Update hydrated tests to conform to api --- test/index-test.js | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/test/index-test.js b/test/index-test.js index 711bb5f3..f0366e21 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -223,10 +223,10 @@ describe('indexing', function(){ it('when gathering search results while respecting default hydrate options', function(done){ Person.search({query:'James'}, function(err, res) { - res.hits[0].address.should.eql('Exampleville, MO'); - res.hits[0].name.should.eql('James Carr'); - res.hits[0].should.not.have.property('phone'); - res.hits[0].should.not.be.an.instanceof(Person); + res.hits.hits[0].address.should.eql('Exampleville, MO'); + res.hits.hits[0].name.should.eql('James Carr'); + res.hits.hits[0].should.not.have.property('phone'); + res.hits.hits[0].should.not.be.an.instanceof(Person); done(); }); }); @@ -258,9 +258,9 @@ describe('indexing', function(){ it('should hydrate returned documents if desired', function(done){ Talk.search({query:'cool'}, {hydrate:true}, function(err, res) { - res.total.should.eql(1) + res.hits.total.should.eql(1) - var talk = res.hits[0] + var talk = res.hits.hits[0] talk.should.have.property('title') talk.should.have.property('year'); talk.should.have.property('abstract') @@ -283,14 +283,14 @@ describe('indexing', function(){ it('should only return indexed fields and have indexed sub-objects', function(done){ Person.search({query:'Bob'}, function(err, res) { - res.hits[0].address.should.eql('Exampleville, MO'); - res.hits[0].name.should.eql('Bob Carr'); - res.hits[0].should.have.property('life'); - res.hits[0].life.born.should.eql(1950); - res.hits[0].life.should.not.have.property('died'); - res.hits[0].life.should.not.have.property('other'); - res.hits[0].should.not.have.property('phone'); - res.hits[0].should.not.be.an.instanceof(Person); + res.hits.hits[0].address.should.eql('Exampleville, MO'); + res.hits.hits[0].name.should.eql('Bob Carr'); + res.hits.hits[0].should.have.property('life'); + res.hits.hits[0].life.born.should.eql(1950); + res.hits.hits[0].life.should.not.have.property('died'); + res.hits.hits[0].life.should.not.have.property('other'); + res.hits.hits[0].should.not.have.property('phone'); + res.hits.hits[0].should.not.be.an.instanceof(Person); done(); }); }); @@ -298,9 +298,9 @@ describe('indexing', function(){ it('should allow extra query options when hydrating', function(done){ Talk.search({query:'cool'}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { - res.total.should.eql(1) + res.hits.total.should.eql(1) - var talk = res.hits[0] + var talk = res.hits.hits[0] talk.should.have.property('title') talk.should.have.property('year'); talk.should.have.property('abstract') From 241dfde0241b51117c4a80ce60091e0e93db2012 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 11:48:29 -0600 Subject: [PATCH 019/152] remove river code --- lib/mongoosastic.js | 36 +----------------------------------- 1 file changed, 1 insertion(+), 35 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f0c64751..8d3fa7c9 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -13,12 +13,8 @@ module.exports = function elasticSearchPlugin(schema, options){ , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 , esClient = new elastical.Client(host, options) - , useRiver = options && options.useRiver; - if (useRiver) - setUpRiver(schema); - else - setUpMiddlewareHooks(schema); + setUpMiddlewareHooks(schema); /** * ElasticSearch Client @@ -200,36 +196,6 @@ module.exports = function elasticSearchPlugin(schema, options){ }); } - /* - * Experimental MongoDB River functionality - * NOTICE: Only tested with: - * MongoDB V2.4.1 - * Elasticsearch V0.20.6 - * elasticsearch-river-mongodb V1.6.5 - * - https://github.com/richardwilly98/elasticsearch-river-mongodb/ - */ - function setUpRiver(schema) { - schema.statics.river = function(cb) { - var model = this; - setIndexNameIfUnset(model.modelName); - if (!this.db.name) throw "ERROR: "+ model.modelName +".river() call before mongoose.connect" - esClient.putRiver( - 'mongodb', - indexName, - { - type: 'mongodb', - mongodb: { - db: this.db.name, - collection: indexName, - gridfs: (useRiver && useRiver.gridfs) ? useRiver.gridfs : false - }, - index: { - name: indexName, - type: typeName - } - }, cb ); - } - } }; From 9a79e38b8d9e7ed6fea7e94e63ebb2e2643dd905 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 12:48:10 -0600 Subject: [PATCH 020/152] use containEql instead of include --- test/search-features-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/search-features-test.js b/test/search-features-test.js index 0810cbae..764a2336 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -53,7 +53,7 @@ describe('Query DSL', function(){ }, function(err, res){ res.hits.total.should.eql(2); res.hits.hits.forEach(function(bond){ - ['Legal', 'Construction'].should.include(bond._source.name); + ['Legal', 'Construction'].should.containEql(bond._source.name); }); done(); }); From a5711a0c29a11c7164082f111f46d8950b7a8158 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 16:43:41 -0600 Subject: [PATCH 021/152] big api changes, big version bump --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 06a36277..6e8d8500 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "0.6.1", + "version": "1.0.0", "tags":["mongodb", "elastic search", "mongoose", "full text search"], "repository": { "type": "git", From b5e5cc0c5467d50868b060697473a13d7438171d Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 17:40:46 -0600 Subject: [PATCH 022/152] Add documentation about bulk api --- package.json | 2 +- readme.md | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 6e8d8500..a1c706f7 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "1.0.0", + "version": "1.0.1", "tags":["mongodb", "elastic search", "mongoose", "full text search"], "repository": { "type": "git", diff --git a/readme.md b/readme.md index 2a19f3fa..daca1ad3 100644 --- a/readme.md +++ b/readme.md @@ -130,6 +130,21 @@ var stream = Book.synchronize({author: 'Arthur C. Clarke'}) One caveat... synchronization is kinda slow for now. Use with care. +### Bulk Indexing + +You can also specify `bulk` options with mongoose which will utilize elasticsearch's bulk indexing api. This will cause the `synchronize` function to use bulk indexing as well. + +Mongoosastic will wait 1 second (or specified delay) until it has 1000 docs (or specified size) and then perform bulk indexing. + +```javascript +BookSchema.plugin(mongoosastic, { + bulk: { + size: 10, // preferred number of docs to bulk index + delay: 100 //milliseconds to wait for enough docs to meet size constraint + } +}); +``` + ### Per Field Options Schemas can be configured to have special options per field. These match with the existing [field mapping configurations](http://www.elasticsearch.org/guide/reference/mapping/core-types.html) defined by elasticsearch with the only difference being they are all prefixed by "es_". From 178df7b74a19842054f658f334be40aee3686a81 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 17:54:34 -0600 Subject: [PATCH 023/152] Document geo_shape --- package.json | 2 +- readme.md | 27 +++++++++++++-------------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/package.json b/package.json index a1c706f7..994c90cd 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "1.0.1", + "version": "1.0.2", "tags":["mongodb", "elastic search", "mongoose", "full text search"], "repository": { "type": "git", diff --git a/readme.md b/readme.md index daca1ad3..40a54ae1 100644 --- a/readme.md +++ b/readme.md @@ -284,14 +284,14 @@ var SubSchema = new Schema({ }); ``` -## Geo mapping +### Geo mapping Prior to index any geo mapped data (or calling the synchronize), the mapping must be manualy created with the createMapping (see above). Notice that the name of the field containing the ES geo data must start by 'geo_' to be recognize as such. -# Indexing a geo point +#### Indexing a geo point ```javascript var geo = new GeoModel({ @@ -301,7 +301,7 @@ Notice that the name of the field containing the ES geo data must start by }); ``` -# Indexing a geo shape +#### Indexing a geo shape ```javascript var geo = new GeoModel({ @@ -434,16 +434,6 @@ The index method takes 3 arguments: Note that indexing a model does not mean it will be persisted to mongodb. Use save for that. -<<<<<<< HEAD -### Truncating an index - -The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. - -#### Usage - -```javascript -GarbageModel.truncate(function(err){...}); -======= ### Saving a document The indexing takes place after saving inside the mongodb and is a defered process. One can check the end of the indexion catching es-indexed event. @@ -457,7 +447,16 @@ doc.save(function(err){ /* Document is indexed */ }); }); ->>>>>>> Added testfor geo_shape and updated manual +``` + +### Truncating an index + +The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. + +#### Usage + +```javascript +GarbageModel.truncate(function(err){...}); ``` ### Model.plugin(mongoosastic, options) From b48f7a4a6b3fee603a94097f6205a6df074f64ef Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 28 Oct 2014 13:35:28 -0600 Subject: [PATCH 024/152] remove semicolons from mongoosastic.js --- lib/mongoosastic.js | 235 +++++++++++++++++++++++--------------------- package.json | 26 +++-- 2 files changed, 137 insertions(+), 124 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 013cc461..41157651 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,9 +1,10 @@ -var elastical = require('elastical') - , generator = new(require('./mapping-generator')) - , serialize = require('./serialize') - , events = require('events'); +var elasticsearch = require('elasticsearch') + , elastical = require('elastical') + , generator = new(require('./mapping-generator')) + , serialize = require('./serialize') + , events = require('events') -module.exports = function elasticSearchPlugin(schema, options){ +module.exports = function Mongoosastic(schema, options){ var mapping = getMapping(schema) , indexName = options && options.index , typeName = options && options.type @@ -12,15 +13,15 @@ module.exports = function elasticSearchPlugin(schema, options){ , _mapping = null , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 - , esClient = new elastical.Client(host, options) - , bulk = options && options.bulk; + , esClient = new elasticsearch.Client({host: {host: host, port: port}}) + , bulk = options && options.bulk - setUpMiddlewareHooks(schema); + setUpMiddlewareHooks(schema) /** * ElasticSearch Client */ - schema.statics.esClient = esClient; + schema.statics.esClient = esClient /** * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once @@ -30,13 +31,16 @@ module.exports = function elasticSearchPlugin(schema, options){ * @param callback Function */ schema.statics.createMapping = function(settings, cb) { - if (!cb) { - cb = settings; - settings = undefined; + if (arguments.length === 0) { + throw new Error("Must provide a callback to createMapping function") + } else if(arguments.length === 1) { + cb = arguments[0] + settings = undefined } - setIndexNameIfUnset(this.modelName); - createMappingIfNotPresent(esClient, indexName, typeName, schema, settings, cb); - }; + + setIndexNameIfUnset(this.modelName) + createMappingIfNotPresent(esClient, indexName, typeName, schema, settings, cb) + } /** * @param indexName String (optional) @@ -45,32 +49,35 @@ module.exports = function elasticSearchPlugin(schema, options){ */ schema.methods.index = function(index, type, cb){ if(cb == null && typeof index == 'function'){ - cb = index; - index = null; + cb = index + index = null }else if (cb == null && typeof type == 'function'){ - cb = type; + cb = type type = null } - var model = this; - setIndexNameIfUnset(model.constructor.modelName); + var model = this + setIndexNameIfUnset(model.constructor.modelName) if(bulk) { - bulkIndex(index || indexName, type || typeName, this); - cb(); + bulkIndex(index || indexName, type || typeName, this) + cb() } else { - esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); + //esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb) + esClient.index({ + index: + }, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb) } - }; + } /** * Unset elastic search index */ schema.methods.unIndex = function(){ - var model = this; - setIndexNameIfUnset(model.constructor.modelName); + var model = this + setIndexNameIfUnset(model.constructor.modelName) if(bulk) { - bulkDelete(index || indexName, type || typeName, this); + bulkDelete(index || indexName, type || typeName, this) } else { - deleteByMongoId(esClient, model, indexName, typeName, 3); + deleteByMongoId(esClient, model, indexName, typeName, 3) } } @@ -86,8 +93,8 @@ module.exports = function elasticSearchPlugin(schema, options){ } } }, function(err, res) { - cb(err); - }); + cb(err) + }) } /** @@ -102,41 +109,41 @@ module.exports = function elasticSearchPlugin(schema, options){ , closeValues = [] , counter = 0 , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} - ; + - setIndexNameIfUnset(model.modelName); - var stream = model.find(query).stream(); + setIndexNameIfUnset(model.modelName) + var stream = model.find(query).stream() stream.on('data', function(doc){ - counter++; + counter++ doc.save(function(err){ if (err) { - em.emit('error', err); - return; + em.emit('error', err) + return } doc.on('es-indexed', function(err, doc){ - counter--; + counter-- if(err){ - em.emit('error', err); + em.emit('error', err) }else{ - em.emit('data', null, doc); + em.emit('data', null, doc) } if (readyToClose && counter === 0) close() - }); - }); - }); + }) + }) + }) stream.on('close', function(a, b){ - readyToClose = true; - closeValues = [a, b]; + readyToClose = true + closeValues = [a, b] if (counter === 0) close() - }); + }) stream.on('error', function(err){ - em.emit('error', err); - }); - return em; - }; + em.emit('error', err) + }) + return em + } /** * ElasticSearch search function * @@ -145,70 +152,70 @@ module.exports = function elasticSearchPlugin(schema, options){ * @param callback - callback called with search results */ schema.statics.search = function(query, options, cb){ - var model = this; - setIndexNameIfUnset(model.modelName); + var model = this + setIndexNameIfUnset(model.modelName) if(typeof options != 'object'){ - cb = options; - options = {}; + cb = options + options = {} } - query.index = indexName; + query.index = indexName esClient.search(query, function(err, results, res){ if(err){ - cb(err); + cb(err) }else{ if (alwaysHydrate || options.hydrate) { - hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb); + hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb) }else{ - cb(null, res); + cb(null, res) } } - }); - }; + }) + } - var bulkBuffer = []; + var bulkBuffer = [] function bulkDelete(indexName, typeName, model) { - bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}); + bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}) } function bulkIndex(indexName, typeName, model) { - bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}); + bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}) } - var bulkTimeout; + var bulkTimeout function bulkAdd(instruction) { - bulkBuffer.push(instruction); - clearTimeout(bulkTimeout); + bulkBuffer.push(instruction) + clearTimeout(bulkTimeout) if(bulkBuffer.length >= (bulk.size || 1000)) { - schema.statics.flush(); + schema.statics.flush() } else { bulkTimeout = setTimeout(function(){ - schema.statics.flush(); - }, bulk.delay || 1000); + schema.statics.flush() + }, bulk.delay || 1000) } } schema.statics.flush = function(){ - esClient.bulk(bulkBuffer); - bulkBuffer = []; - }; + esClient.bulk(bulkBuffer) + bulkBuffer = [] + } schema.statics.refresh = function(cb){ - var model = this; - setIndexNameIfUnset(model.modelName); + var model = this + setIndexNameIfUnset(model.modelName) - esClient.refresh(indexName, cb); - }; + esClient.refresh(indexName, cb) + } function setIndexNameIfUnset(model){ - var modelName = model.toLowerCase(); + var modelName = model.toLowerCase() if(!indexName){ - indexName = modelName + "s"; + indexName = modelName + "s" } if(!typeName){ - typeName = modelName; + typeName = modelName } } @@ -219,84 +226,84 @@ module.exports = function elasticSearchPlugin(schema, options){ */ function setUpMiddlewareHooks(schema) { schema.post('remove', function(){ - var model = this; - setIndexNameIfUnset(model.constructor.modelName); + var model = this + setIndexNameIfUnset(model.constructor.modelName) if(bulk) { - bulkDelete(indexName, typeName, this); + bulkDelete(indexName, typeName, this) } else { - deleteByMongoId(esClient, model, indexName, typeName, 3); + deleteByMongoId(esClient, model, indexName, typeName, 3) } - }); + }) /** * Save in elastic search on save. */ schema.post('save', function(){ - var model = this; + var model = this model.index(function(err, res){ - model.emit('es-indexed', err, res); - }); - }); + model.emit('es-indexed', err, res) + }) + }) } -}; +} function createMappingIfNotPresent(client, indexName, typeName, schema, settings, cb) { generator.generateMapping(schema, function(err, mapping) { - var completeMapping = {}; - completeMapping[typeName] = mapping; + var completeMapping = {} + completeMapping[typeName] = mapping client.indexExists(indexName, function(err, exists) { if (exists) { - client.putMapping(indexName, typeName, completeMapping, cb); + client.putMapping(indexName, typeName, completeMapping, cb) } else { client.createIndex(indexName, { settings: settings, mappings: completeMapping - }, cb); + }, cb) } - }); - }); + }) + }) } function hydrate(res, model, options, cb){ - var results = res.hits; + var results = res.hits var resultsMap = {} var ids = results.hits.map(function(a, i){ resultsMap[a._id] = i - return a._id; - }); - var query = model.find({_id:{$in:ids}}); + return a._id + }) + var query = model.find({_id:{$in:ids}}) // Build Mongoose query based on hydrate options // Example: {lean: true, sort: '-name', select: 'address name'} Object.keys(options).forEach(function(option){ - query[option](options[option]); - }); + query[option](options[option]) + }) query.exec(function(err, docs){ if(err){ - return cb(err); + return cb(err) }else{ - var hits = []; + var hits = [] docs.forEach(function(doc) { var i = resultsMap[doc._id] hits[i] = doc }) - results.hits = hits; - res.hits = results; - cb(null, res); + results.hits = hits + res.hits = results + cb(null, res) } - }); + }) } function getMapping(schema){ - var retMapping = {}; + var retMapping = {} generator.generateMapping(schema, function(err, mapping){ - retMapping = mapping; - }); - return retMapping; + retMapping = mapping + }) + return retMapping } function deleteByMongoId(client, model,indexName, typeName, tries){ client.delete(indexName, typeName, model._id.toString(), function(err, res){ @@ -305,11 +312,11 @@ function deleteByMongoId(client, model,indexName, typeName, tries){ if(tries <= 0){ // future issue.. what do we do!? }else{ - deleteByMongoId(client, model, indexName, typeName, --tries); + deleteByMongoId(client, model, indexName, typeName, --tries) } - }, 500); + }, 500) }else{ - model.emit('es-removed', err, res); + model.emit('es-removed', err, res) } - }); + }) } diff --git a/package.json b/package.json index 994c90cd..479113fb 100644 --- a/package.json +++ b/package.json @@ -3,28 +3,34 @@ "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", "version": "1.0.2", - "tags":["mongodb", "elastic search", "mongoose", "full text search"], + "tags": [ + "mongodb", + "elastic search", + "mongoose", + "full text search" + ], "repository": { "type": "git", "url": "git://github.com/mongoosastic/mongoosastic" }, - "main":"lib/mongoosastic.js", + "main": "lib/mongoosastic.js", "dependencies": { - "elastical":"0.0.12" + "elastical": "0.0.13", + "elasticsearch": "^2.4.3" }, "peerDependencies": { - "mongoose":"3.8.x" + "mongoose": "3.8.x" }, "devDependencies": { - "mocha":"*" - , "should":"*" - , "async":"*" - , "mongoose":"3.8.x" + "mocha": "*", + "should": "*", + "async": "*", + "mongoose": "3.8.x" }, "engines": { "node": ">= 0.8.0" }, - "scripts":{ - "test":"mocha -R spec -t 20000 -b" + "scripts": { + "test": "mocha -R spec -t 20000 -b" } } From 080f7f6fb95709bce44d8a971ca95d0525f4eea7 Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 28 Oct 2014 15:56:06 -0600 Subject: [PATCH 025/152] first pass at integrating elasticsearch driver --- lib/mongoosastic.js | 290 +++++++++++++++++--------- package.json | 3 +- test/alternative-index-method-test.js | 8 +- test/bulk-test.js | 4 +- 4 files changed, 194 insertions(+), 111 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 41157651..05b159e8 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -3,6 +3,7 @@ var elasticsearch = require('elasticsearch') , generator = new(require('./mapping-generator')) , serialize = require('./serialize') , events = require('events') + , nop = require('nop') module.exports = function Mongoosastic(schema, options){ var mapping = getMapping(schema) @@ -15,6 +16,8 @@ module.exports = function Mongoosastic(schema, options){ , port = options && options.port ? options.port : 9200 , esClient = new elasticsearch.Client({host: {host: host, port: port}}) , bulk = options && options.bulk + , bulkBuffer = [] + , bulkTimeout setUpMiddlewareHooks(schema) @@ -27,100 +30,130 @@ module.exports = function Mongoosastic(schema, options){ * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once * the mapping is created - * @param settings String (optional) + * @param settings Object (optional) * @param callback Function */ schema.statics.createMapping = function(settings, cb) { - if (arguments.length === 0) { - throw new Error("Must provide a callback to createMapping function") - } else if(arguments.length === 1) { - cb = arguments[0] + if(arguments.length < 2) { + cb = arguments[0] || nop settings = undefined } setIndexNameIfUnset(this.modelName) - createMappingIfNotPresent(esClient, indexName, typeName, schema, settings, cb) + + createMappingIfNotPresent({ + client: esClient, + indexName: indexName, + typeName: typeName, + schema: schema, + settings: settings + }, cb) } /** - * @param indexName String (optional) - * @param typeName String (optional) + * @param options Object (optional) * @param callback Function */ - schema.methods.index = function(index, type, cb){ - if(cb == null && typeof index == 'function'){ - cb = index - index = null - }else if (cb == null && typeof type == 'function'){ - cb = type - type = null + schema.methods.index = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} } - var model = this - setIndexNameIfUnset(model.constructor.modelName) + + setIndexNameIfUnset(this.constructor.modelName) + + var index = options.index || indexName + , type = options.type || typeName + if(bulk) { - bulkIndex(index || indexName, type || typeName, this) + bulkIndex(index, type, this) cb() } else { - //esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb) esClient.index({ - index: - }, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb) + index: index, + type: type, + id: this._id.toString(), + body: serialize(this, mapping) + }, cb) } } /** * Unset elastic search index + * @param options - (optional) options for unIndex + * @param callback - callback when unIndex is complete */ - schema.methods.unIndex = function(){ - var model = this - setIndexNameIfUnset(model.constructor.modelName) - if(bulk) { - bulkDelete(index || indexName, type || typeName, this) - } else { - deleteByMongoId(esClient, model, indexName, typeName, 3) + schema.methods.unIndex = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} } + + setIndexNameIfUnset(this.constructor.modelName) + + options.index = options.index || indexName + options.type = options.type || typeName + options.model = this + options.client = esClient + options.tries = 3 + + if(bulk) + bulkDelete(options, cb) + else + deleteByMongoId(options, cb) } /** * Delete all documents from a type/index + * @param options - (optional) specify index/type * @param callback - callback when truncation is complete */ - schema.statics.esTruncate = function(cb) { - esClient.delete(indexName, typeName, '', { - query: { + schema.statics.esTruncate = function(options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + var index = options.index || indexName + , type = options.type || typeName + + esClient.deleteByQuery({ + index: index, + type: type, + body: { query: { - "match_all": {} + query: { + "match_all": {} + } } } - }, function(err, res) { - cb(err) - }) + }, cb) } /** * Synchronize an existing collection * - * @param callback - callback when synchronization is complete + * @param query - query for documents you want to synchronize */ schema.statics.synchronize = function(query){ - var model = this - , em = new events.EventEmitter() + var em = new events.EventEmitter() , readyToClose , closeValues = [] , counter = 0 , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} + query = query || {} - setIndexNameIfUnset(model.modelName) - var stream = model.find(query).stream() + setIndexNameIfUnset(this.modelName) + + var stream = this.find(query).stream() stream.on('data', function(doc){ counter++ doc.save(function(err){ - if (err) { - em.emit('error', err) - return - } + if (err) + return em.emit('error', err) + doc.on('es-indexed', function(err, doc){ counter-- if(err){ @@ -133,15 +166,18 @@ module.exports = function Mongoosastic(schema, options){ }) }) }) + stream.on('close', function(a, b){ readyToClose = true closeValues = [a, b] if (counter === 0) close() }) + stream.on('error', function(err){ em.emit('error', err) }) + return em } /** @@ -152,30 +188,31 @@ module.exports = function Mongoosastic(schema, options){ * @param callback - callback called with search results */ schema.statics.search = function(query, options, cb){ + if (arguments.length === 2) { + cb = arguments[1] + options = {} + } + var model = this + setIndexNameIfUnset(model.modelName) - if(typeof options != 'object'){ - cb = options - options = {} - } - query.index = indexName - esClient.search(query, function(err, results, res){ + query.index = options.index || indexName + query.type = options.type || typeName + + esClient.search(query, function(err, res){ if(err){ cb(err) - }else{ - if (alwaysHydrate || options.hydrate) { + } else { + if (alwaysHydrate || options.hydrate) hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb) - }else{ + else cb(null, res) - } } }) } - var bulkBuffer = [] - - function bulkDelete(indexName, typeName, model) { + function bulkDelete(indexName, typeName, model, cb) { bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}) } @@ -183,11 +220,10 @@ module.exports = function Mongoosastic(schema, options){ bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}) } - var bulkTimeout - function bulkAdd(instruction) { bulkBuffer.push(instruction) clearTimeout(bulkTimeout) + if(bulkBuffer.length >= (bulk.size || 1000)) { schema.statics.flush() } else { @@ -197,16 +233,27 @@ module.exports = function Mongoosastic(schema, options){ } } - schema.statics.flush = function(){ - esClient.bulk(bulkBuffer) + schema.statics.flush = function(cb){ + cb = cb || function(err) { if (err) console.log(err) } + + esClient.bulk({ + body: bulkBuffer + }, function(err) { + cb(err) + }) bulkBuffer = [] } - schema.statics.refresh = function(cb){ - var model = this - setIndexNameIfUnset(model.modelName) + schema.statics.refresh = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } - esClient.refresh(indexName, cb) + setIndexNameIfUnset(this.modelName) + esClient.indices.refresh({ + index: options.index || indexName + }, cb) } function setIndexNameIfUnset(model){ @@ -225,43 +272,68 @@ module.exports = function Mongoosastic(schema, options){ * to persist to Elasticsearch */ function setUpMiddlewareHooks(schema) { - schema.post('remove', function(){ - var model = this - setIndexNameIfUnset(model.constructor.modelName) + schema.post('remove', function(done){ + setIndexNameIfUnset(this.constructor.modelName) + + var options = { + index: indexName, + type: typeName, + tries: 3, + model: this, + client: esClient + } + if(bulk) { - bulkDelete(indexName, typeName, this) + bulkDelete(options, done) } else { - deleteByMongoId(esClient, model, indexName, typeName, 3) + deleteByMongoId(options, done) } }) /** * Save in elastic search on save. */ - schema.post('save', function(){ - var model = this - model.index(function(err, res){ - model.emit('es-indexed', err, res) + schema.post('save', function(done){ + this.index(function(err, res){ + this.emit('es-indexed', err, res) + done(err) }) }) } } +function createMappingIfNotPresent(options, cb) { + var client = options.client + , indexName = options.indexName + , typeName = options.typeName + , schema = options.schema + , settings = options.settings - -function createMappingIfNotPresent(client, indexName, typeName, schema, settings, cb) { generator.generateMapping(schema, function(err, mapping) { var completeMapping = {} completeMapping[typeName] = mapping - client.indexExists(indexName, function(err, exists) { + client.indices.exists({index: indexName}, function(err, exists) { + if (err) + return cb(err) + if (exists) { - client.putMapping(indexName, typeName, completeMapping, cb) - } else { - client.createIndex(indexName, { - settings: settings, - mappings: completeMapping + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping }, cb) + } else { + client.indices.create({index: indexName}, function(err) { + if (err) + return cb(err) + + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb) + }) } }) }) @@ -269,12 +341,12 @@ function createMappingIfNotPresent(client, indexName, typeName, schema, settings function hydrate(res, model, options, cb){ var results = res.hits - var resultsMap = {} - var ids = results.hits.map(function(a, i){ - resultsMap[a._id] = i - return a._id - }) - var query = model.find({_id:{$in:ids}}) + , resultsMap = {} + , ids = results.hits.map(function(a, i){ + resultsMap[a._id] = i + return a._id + }) + , query = model.find({_id:{$in:ids}}) // Build Mongoose query based on hydrate options // Example: {lean: true, sort: '-name', select: 'address name'} @@ -283,9 +355,9 @@ function hydrate(res, model, options, cb){ }) query.exec(function(err, docs){ - if(err){ + if(err) { return cb(err) - }else{ + } else { var hits = [] docs.forEach(function(doc) { @@ -298,6 +370,7 @@ function hydrate(res, model, options, cb){ } }) } + function getMapping(schema){ var retMapping = {} generator.generateMapping(schema, function(err, mapping){ @@ -305,18 +378,27 @@ function getMapping(schema){ }) return retMapping } -function deleteByMongoId(client, model,indexName, typeName, tries){ - client.delete(indexName, typeName, model._id.toString(), function(err, res){ - if(err && err.message.indexOf('404') > -1){ - setTimeout(function(){ - if(tries <= 0){ - // future issue.. what do we do!? - }else{ - deleteByMongoId(client, model, indexName, typeName, --tries) - } - }, 500) - }else{ - model.emit('es-removed', err, res) - } - }) + +function deleteByMongoId(options, cb){ + var index = options.index + , type = options.type + , client = options.esClient + , model = options.model + , tries = options.tries + + client.delete(index, type, model._id.toString(), function(err, res){ + if(err && err.message.indexOf('404') > -1){ + setTimeout(function(){ + if(tries <= 0) { + return cb(err) + } else { + options.tries = --tries + deleteByMongoId(options, cb) + } + }, 500) + }else{ + model.emit('es-removed', err, res) + cb(err) + } + }) } diff --git a/package.json b/package.json index 479113fb..688ebc46 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,8 @@ "main": "lib/mongoosastic.js", "dependencies": { "elastical": "0.0.13", - "elasticsearch": "^2.4.3" + "elasticsearch": "^2.4.3", + "nop": "^1.0.0" }, "peerDependencies": { "mongoose": "3.8.x" diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 1d43eb01..34e2b8c5 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -43,10 +43,10 @@ describe('Index Method', function(){ it('should be able to index to alternative index', function(done){ Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ doc.message = 'I know taebo!'; - doc.index('public_tweets', function(){ + doc.index({index: 'public_tweets'}, function(){ setTimeout(function(){ - esClient.search({index: 'public_tweets', query:'know'}, function(err, results, res){ - res.hits.hits[0]._source.message.should.eql('I know taebo!'); + esClient.search({index: 'public_tweets', query:'know'}, function(err, res){ + res.hits[0]._source.message.should.eql('I know taebo!'); done(); }); }, config.indexingTimeout); @@ -56,7 +56,7 @@ describe('Index Method', function(){ it('should be able to index to alternative index and type', function(done){ Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ doc.message = 'I know taebo!'; - doc.index('public_tweets', 'utterings', function(){ + doc.index({index: 'public_tweets', type: 'utterings'}, function(){ setTimeout(function(){ esClient.search({index: 'public_tweets', type: 'utterings', query:'know'}, function(err, results, res){ res.hits.hits[0]._source.message.should.eql('I know taebo!'); diff --git a/test/bulk-test.js b/test/bulk-test.js index 658f3883..9e06a098 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -20,7 +20,7 @@ BookSchema.plugin(mongoosastic, { var Book = mongoose.model('Book2', BookSchema); -describe('Bulk mode', function() { +describe.only('Bulk mode', function() { var books = null; before(function(done) { @@ -70,4 +70,4 @@ function bookTitles() { books.push('ABABABA' + i); } return books; -} \ No newline at end of file +} From 7af9fffefdd397ce853990d2a323b2dc35514dc7 Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 28 Oct 2014 17:10:01 -0600 Subject: [PATCH 026/152] Close to fixing geo test --- lib/mongoosastic.js | 42 ++++++++++++++++++++++++++++++------------ test/bulk-test.js | 2 +- test/geo-test.js | 25 ++++++++++++++----------- 3 files changed, 45 insertions(+), 24 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 05b159e8..207da221 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -66,7 +66,11 @@ module.exports = function Mongoosastic(schema, options){ , type = options.type || typeName if(bulk) { - bulkIndex(index, type, this) + bulkIndex({ + index: index, + type: type, + model: this + }) cb() } else { esClient.index({ @@ -212,12 +216,25 @@ module.exports = function Mongoosastic(schema, options){ }) } - function bulkDelete(indexName, typeName, model, cb) { - bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}) + function bulkDelete(options) { + bulkAdd({ + delete: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) } - function bulkIndex(indexName, typeName, model) { - bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}) + function bulkIndex(options) { + bulkAdd({ + index: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) + bulkAdd({doc: options.model}) } function bulkAdd(instruction) { @@ -272,7 +289,7 @@ module.exports = function Mongoosastic(schema, options){ * to persist to Elasticsearch */ function setUpMiddlewareHooks(schema) { - schema.post('remove', function(done){ + schema.post('remove', function(){ setIndexNameIfUnset(this.constructor.modelName) var options = { @@ -284,19 +301,20 @@ module.exports = function Mongoosastic(schema, options){ } if(bulk) { - bulkDelete(options, done) + bulkDelete(options, nop) } else { - deleteByMongoId(options, done) + deleteByMongoId(options, nop) } }) /** * Save in elastic search on save. */ - schema.post('save', function(done){ - this.index(function(err, res){ - this.emit('es-indexed', err, res) - done(err) + schema.post('save', function(){ + var model = this + + model.index(function(err, res){ + model.emit('es-indexed', err, res) }) }) } diff --git a/test/bulk-test.js b/test/bulk-test.js index 9e06a098..88d36d79 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -20,7 +20,7 @@ BookSchema.plugin(mongoosastic, { var Book = mongoose.model('Book2', BookSchema); -describe.only('Bulk mode', function() { +describe('Bulk mode', function() { var books = null; before(function(done) { diff --git a/test/geo-test.js b/test/geo-test.js index f530c98d..052bc71d 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -13,7 +13,7 @@ var GeoSchema; var GeoModel; -describe('GeoTest', function(){ +describe.only('GeoTest', function(){ before(function(done){ mongoose.connect(config.mongoUrl, function(){ config.deleteIndexIfExists(['geodocs'], function(){ @@ -92,7 +92,7 @@ describe('GeoTest', function(){ done(); })})})}) - var getDocOrderedQuery = {"query": {"match_all": {}},"sort":{"myId":{"order":"asc"}}}; + var getDocOrderedQuery = {"query": {"match_all": {}},"sort":"myId:asc"}; it('should be able to find geo coordinates in the indexes', function(done){ setTimeout(function(){ @@ -138,16 +138,19 @@ describe('GeoTest', function(){ it('should be able to search points inside frames', function(done){ var geoQuery = { - "query": {"match_all": {}}, - "filter": {"geo_shape": { - "frame": { - "shape": { - "type": "point", - "coordinates": [3,1] - }, - "relation": "intersects" + filtered: { + "query": {"match_all": {}}, + "filter": { + "geo_shape": { + "frame": { + "shape": { + "type": "point", + "coordinates": [3,1] + } + } + } } - }} + } } setTimeout(function(){ From c78cf0145b0d886cefd46d2c40cf976d45e2d55e Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 28 Oct 2014 18:00:51 -0600 Subject: [PATCH 027/152] had to scale back abstraction on search --- lib/mongoosastic.js | 4 ++-- test/geo-test.js | 41 ++++++++++++++++++++++++----------------- 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 207da221..c5e6a2d1 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -201,8 +201,8 @@ module.exports = function Mongoosastic(schema, options){ setIndexNameIfUnset(model.modelName) - query.index = options.index || indexName - query.type = options.type || typeName + query.index = options.index || query.index || indexName + query.type = options.type || query.type || typeName esClient.search(query, function(err, res){ if(err){ diff --git a/test/geo-test.js b/test/geo-test.js index 052bc71d..f0b1dbc8 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -13,7 +13,7 @@ var GeoSchema; var GeoModel; -describe.only('GeoTest', function(){ +describe('GeoTest', function(){ before(function(done){ mongoose.connect(config.mongoUrl, function(){ config.deleteIndexIfExists(['geodocs'], function(){ @@ -92,7 +92,12 @@ describe.only('GeoTest', function(){ done(); })})})}) - var getDocOrderedQuery = {"query": {"match_all": {}},"sort":"myId:asc"}; + var getDocOrderedQuery = { + "query": { + "match_all": {} + }, + "sort":"myId:asc" + }; it('should be able to find geo coordinates in the indexes', function(done){ setTimeout(function(){ @@ -138,14 +143,16 @@ describe.only('GeoTest', function(){ it('should be able to search points inside frames', function(done){ var geoQuery = { - filtered: { - "query": {"match_all": {}}, - "filter": { - "geo_shape": { - "frame": { - "shape": { - "type": "point", - "coordinates": [3,1] + query: { + filtered: { + "query": {"match_all": {}}, + "filter": { + "geo_shape": { + "frame": { + "shape": { + "type": "point", + "coordinates": [3,1] + } } } } @@ -154,23 +161,23 @@ describe.only('GeoTest', function(){ } setTimeout(function(){ - GeoModel.search(geoQuery,function(err, res){ + GeoModel.search({body: geoQuery},function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(2); - geoQuery.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; - GeoModel.search(geoQuery,function(err, res){ + geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; + GeoModel.search({body: geoQuery},function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(1); - geoQuery.filter.geo_shape.frame.shape.coordinates = [3,2]; - GeoModel.search(geoQuery,function(err, res){ + geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; + GeoModel.search({body: geoQuery},function(err, res){ if (err) throw err; res.hits.total.should.eql(2); - geoQuery.filter.geo_shape.frame.shape.coordinates = [0,3]; - GeoModel.search(geoQuery,function(err, res){ + geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; + GeoModel.search({body: geoQuery},function(err, res){ if (err) throw err; res.hits.total.should.eql(0); done(); From 6af3339c5c8a4e49440dc563df3c3ca4f3436f74 Mon Sep 17 00:00:00 2001 From: taterbase Date: Wed, 29 Oct 2014 12:26:52 -0600 Subject: [PATCH 028/152] All tests passing --- lib/mongoosastic.js | 27 ++++++++++----- test/alternative-index-method-test.js | 23 ++++++------ test/geo-test.js | 27 ++++++++------- test/index-test.js | 50 ++++++++++++++++++++------- test/synchronize-test.js | 2 +- test/truncate-test.js | 8 +++-- 6 files changed, 89 insertions(+), 48 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index c5e6a2d1..f43c2eb8 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -126,9 +126,7 @@ module.exports = function Mongoosastic(schema, options){ type: type, body: { query: { - query: { - "match_all": {} - } + match_all: {} } } }, cb) @@ -198,13 +196,20 @@ module.exports = function Mongoosastic(schema, options){ } var model = this + , esQuery = { + body: query, + index: options.index || indexName, + type: options.type || typeName + } - setIndexNameIfUnset(model.modelName) + Object.keys(options).forEach(function(opt) { + if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) + esQuery[opt] = options[opt] + }) - query.index = options.index || query.index || indexName - query.type = options.type || query.type || typeName + setIndexNameIfUnset(model.modelName) - esClient.search(query, function(err, res){ + esClient.search(esQuery, function(err, res){ if(err){ cb(err) } else { @@ -400,11 +405,15 @@ function getMapping(schema){ function deleteByMongoId(options, cb){ var index = options.index , type = options.type - , client = options.esClient + , client = options.client , model = options.model , tries = options.tries - client.delete(index, type, model._id.toString(), function(err, res){ + client.delete({ + index: index, + type: type, + id: model._id.toString() + }, function(err, res){ if(err && err.message.indexOf('404') > -1){ setTimeout(function(){ if(tries <= 0) { diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 34e2b8c5..bea6a820 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -1,10 +1,8 @@ var mongoose = require('mongoose') - , elastical = require('elastical') , should = require('should') , config = require('./config') , Schema = mongoose.Schema , ObjectId = Schema.ObjectId - , esClient = new(require('elastical').Client) , mongoosastic = require('../lib/mongoosastic') , Tweet = require('./models/tweet'); @@ -12,11 +10,13 @@ describe('Index Method', function(){ before(function(done){ mongoose.connect(config.mongoUrl, function(){ config.deleteIndexIfExists(['tweets', 'public_tweets'], function(){ - config.createModelAndEnsureIndex(Tweet, { - user: 'jamescarr' - , message: "I know kung-fu!" - , post_date: new Date() - }, done); + Tweet.remove(function() { + config.createModelAndEnsureIndex(Tweet, { + user: 'jamescarr' + , message: "I know kung-fu!" + , post_date: new Date() + }, done); + }) }); }); }); @@ -27,12 +27,13 @@ describe('Index Method', function(){ done(); }); }); + it('should be able to index it directly without saving', function(done){ Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ doc.message = 'I know nodejitsu!'; doc.index(function(){ setTimeout(function(){ - Tweet.search({query:'know'}, function(err, res){ + Tweet.search({query: {query_string: {query: 'know'}}}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know nodejitsu!'); done(); }); @@ -45,8 +46,8 @@ describe('Index Method', function(){ doc.message = 'I know taebo!'; doc.index({index: 'public_tweets'}, function(){ setTimeout(function(){ - esClient.search({index: 'public_tweets', query:'know'}, function(err, res){ - res.hits[0]._source.message.should.eql('I know taebo!'); + Tweet.search({query: {query_string: {query: 'know'}}}, {index: 'public_tweets'}, function(err, res){ + res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); }, config.indexingTimeout); @@ -58,7 +59,7 @@ describe('Index Method', function(){ doc.message = 'I know taebo!'; doc.index({index: 'public_tweets', type: 'utterings'}, function(){ setTimeout(function(){ - esClient.search({index: 'public_tweets', type: 'utterings', query:'know'}, function(err, results, res){ + Tweet.search({query: {query_string: {query: 'know'}}}, {index: 'public_tweets', type: 'utterings'}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); diff --git a/test/geo-test.js b/test/geo-test.js index f0b1dbc8..5706cc4b 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -91,18 +91,15 @@ describe('GeoTest', function(){ res[0].frame.coordinates[1].should.eql([3,2]); done(); })})})}) - - var getDocOrderedQuery = { - "query": { - "match_all": {} - }, - "sort":"myId:asc" - }; it('should be able to find geo coordinates in the indexes', function(done){ setTimeout(function(){ // ES request - GeoModel.search(getDocOrderedQuery,function(err, res){ + GeoModel.search({ + query: { + match_all: {} + } + }, {sort: "myId:asc"}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); @@ -126,7 +123,11 @@ describe('GeoTest', function(){ count.should.eql(2); setTimeout(function(){ - GeoModel.search(getDocOrderedQuery,function(err, res){ + GeoModel.search({ + query: { + match_all: {} + } + }, {sort: "myId:asc"}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); @@ -161,23 +162,23 @@ describe('GeoTest', function(){ } setTimeout(function(){ - GeoModel.search({body: geoQuery},function(err, res){ + GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(2); geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; - GeoModel.search({body: geoQuery},function(err, res){ + GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(1); geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; - GeoModel.search({body: geoQuery},function(err, res){ + GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(2); geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; - GeoModel.search({body: geoQuery},function(err, res){ + GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(0); done(); diff --git a/test/index-test.js b/test/index-test.js index f0366e21..dd898fde 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -107,19 +107,33 @@ describe('indexing', function(){ }); it('should be able to execute a simple query', function(done){ - Tweet.search({query:'Riak'}, function(err, results) { + Tweet.search({ + query: { + query_string: { + query: 'Riak' + } + } + }, function(err, results) { results.hits.total.should.eql(1) results.hits.hits[0]._source.message.should.eql('I like Riak better') done(); }); }); + it('should be able to execute a simple query', function(done){ - Tweet.search({query:'jamescarr'}, function(err, results) { + Tweet.search({ + query: { + query_string: { + query: 'jamescarr' + } + } + }, function(err, results) { results.hits.total.should.eql(1) results.hits.hits[0]._source.message.should.eql('I like Riak better') done() }); }); + it('should report errors', function(done){ Tweet.search({queriez:'jamescarr'}, function(err, results) { err.message.should.match(/SearchPhaseExecutionException/); @@ -140,7 +154,13 @@ describe('indexing', function(){ it('should remove from index when model is removed', function(done){ tweet.remove(function(){ setTimeout(function(){ - Tweet.search({query:'shouldnt'}, function(err, res){ + Tweet.search({ + query: { + query_string: { + query: 'shouldnt' + } + } + }, function(err, res){ res.hits.total.should.eql(0); done(); }); @@ -150,7 +170,13 @@ describe('indexing', function(){ it('should remove only index', function(done){ tweet.on('es-removed', function(err, res){ setTimeout(function(){ - Tweet.search({query:'shouldnt'}, function(err, res){ + Tweet.search({ + query: { + query_string: { + query: 'shouldnt' + } + } + }, function(err, res){ res.hits.total.should.eql(0); done(); }); @@ -197,14 +223,14 @@ describe('indexing', function(){ }); it('should only find models of type Tweet', function(done){ - Tweet.search({query:'Dude'}, function(err, res){ + Tweet.search({query: {query_string: {query: 'Dude'}}}, function(err, res){ res.hits.total.should.eql(1); res.hits.hits[0]._source.user.should.eql('Dude'); done(); }); }); it('should only find models of type Talk', function(done){ - Talk.search({query:'Dude'}, function(err, res){ + Talk.search({query: {query_string: {query: 'Dude'}}}, function(err, res){ res.hits.total.should.eql(1); res.hits.hits[0]._source.title.should.eql('Dude'); done(); @@ -222,7 +248,7 @@ describe('indexing', function(){ }); it('when gathering search results while respecting default hydrate options', function(done){ - Person.search({query:'James'}, function(err, res) { + Person.search({query: {query_string: {query: 'James'}}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('James Carr'); res.hits.hits[0].should.not.have.property('phone'); @@ -243,7 +269,7 @@ describe('indexing', function(){ }); it('should only return indexed fields', function(done){ - Talk.search({query:'cool'}, function(err, res) { + Talk.search({query: {query_string: {query: 'cool'}}}, function(err, res) { res.hits.total.should.eql(1); var talk = res.hits.hits[0]._source; @@ -257,7 +283,7 @@ describe('indexing', function(){ }); it('should hydrate returned documents if desired', function(done){ - Talk.search({query:'cool'}, {hydrate:true}, function(err, res) { + Talk.search({query: {query_string: {query: 'cool'}}}, {hydrate:true}, function(err, res) { res.hits.total.should.eql(1) var talk = res.hits.hits[0] @@ -282,7 +308,7 @@ describe('indexing', function(){ }); it('should only return indexed fields and have indexed sub-objects', function(done){ - Person.search({query:'Bob'}, function(err, res) { + Person.search({query: {query_string: {query: 'Bob'}}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('Bob Carr'); res.hits.hits[0].should.have.property('life'); @@ -297,7 +323,7 @@ describe('indexing', function(){ }); it('should allow extra query options when hydrating', function(done){ - Talk.search({query:'cool'}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { + Talk.search({query: {query_string: {query: 'cool'}}}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { res.hits.total.should.eql(1) var talk = res.hits.hits[0] @@ -336,7 +362,7 @@ describe('indexing', function(){ }); var Bum = mongoose.model('bum', BumSchema); config.createModelAndEnsureIndex(Bum, {name:'Roger Wilson'}, function(){ - Bum.search({query:'Wilson'}, function(err, results){ + Bum.search({query: {query_string: {query: 'Wilson'}}}, function(err, results){ results.hits.total.should.eql(1); done(); }); diff --git a/test/synchronize-test.js b/test/synchronize-test.js index f7b42237..d3d32096 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -47,7 +47,7 @@ describe('Synchronize', function(){ stream.on('close', function(){ count.should.eql(53); setTimeout(function(){ - Book.search({query:'American'}, function(err, results){ + Book.search({query: {query_string: {query: 'American'}}}, function(err, results){ results.hits.total.should.eql(2); done(); }); diff --git a/test/truncate-test.js b/test/truncate-test.js index cafcfc20..d1409e48 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -44,7 +44,11 @@ describe('Truncate', function() { it('should be able to truncate all documents', function(done) { Dummy.esTruncate(function(err) { Dummy.search({ - query: 'Text1' + query: { + query_string: { + query: 'Text1' + } + } }, function(err, results) { results.hits.total.should.eql(0); done(err); @@ -52,4 +56,4 @@ describe('Truncate', function() { }); }); }); -}); \ No newline at end of file +}); From 755fd8ec6924c17ef24572add67bb32efb57ae67 Mon Sep 17 00:00:00 2001 From: taterbase Date: Wed, 29 Oct 2014 12:45:48 -0600 Subject: [PATCH 029/152] remove elastical dependency --- lib/mongoosastic.js | 1 - package.json | 1 - test/boost-field-test.js | 8 +++++--- test/bulk-test.js | 2 -- test/config.js | 10 +++++++--- test/geo-test.js | 8 +++++--- test/index-test.js | 26 +++++++++++++++++--------- test/search-features-test.js | 2 -- test/synchronize-test.js | 2 -- test/truncate-test.js | 2 -- 10 files changed, 34 insertions(+), 28 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f43c2eb8..2b8a45c5 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,5 +1,4 @@ var elasticsearch = require('elasticsearch') - , elastical = require('elastical') , generator = new(require('./mapping-generator')) , serialize = require('./serialize') , events = require('events') diff --git a/package.json b/package.json index 688ebc46..44b8cfd9 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,6 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elastical": "0.0.13", "elasticsearch": "^2.4.3", "nop": "^1.0.0" }, diff --git a/test/boost-field-test.js b/test/boost-field-test.js index 5346330b..26b8290c 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -1,6 +1,5 @@ var mongoose = require('mongoose') - , elastical = require('elastical') - , esClient = new(require('elastical').Client) + , esClient = new(require('elasticsearch').Client) , should = require('should') , config = require('./config') , Schema = mongoose.Schema @@ -29,7 +28,10 @@ describe('Add Boost Option Per Field', function(){ it('should create a mapping with boost field added', function(done){ BlogPost.createMapping(function(err, mapping){ - esClient.getMapping('blogposts', 'blogpost', function(err, mapping){ + esClient.indices.getMapping({ + index: 'blogposts', + type: 'blogpost' + }, function(err, mapping){ /* elasticsearch 1.0 & 0.9 support */ var props = mapping.blogpost != undefined ? diff --git a/test/bulk-test.js b/test/bulk-test.js index 88d36d79..f7dc243d 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -1,6 +1,4 @@ var mongoose = require('mongoose'), - elastical = require('elastical'), - esClient = new(require('elastical').Client)(), should = require('should'), config = require('./config'), Schema = mongoose.Schema, diff --git a/test/config.js b/test/config.js index 2fad571a..9943b81a 100644 --- a/test/config.js +++ b/test/config.js @@ -1,4 +1,4 @@ -var esClient = new(require('elastical').Client) +var esClient = new(require('elasticsearch').Client) , async = require('async'); const INDEXING_TIMEOUT = 1100; @@ -8,9 +8,13 @@ module.exports = { , indexingTimeout: INDEXING_TIMEOUT , deleteIndexIfExists: function(indexes, done){ async.forEach(indexes, function(index, cb){ - esClient.indexExists(index, function(err, exists){ + esClient.indices.exists({ + index: index + }, function(err, exists){ if(exists){ - esClient.deleteIndex(index, cb); + esClient.indices.delete({ + index: index + }, cb); }else{ cb(); } diff --git a/test/geo-test.js b/test/geo-test.js index 5706cc4b..7b09f26d 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -1,6 +1,5 @@ var mongoose = require('mongoose') - , elastical = require('elastical') - , esClient = new(require('elastical').Client) + , esClient = new(require('elasticsearch').Client) , should = require('should') , config = require('./config') , Schema = mongoose.Schema @@ -38,7 +37,10 @@ describe('GeoTest', function(){ GeoModel.createMapping(function(err, mapping){ GeoModel.remove(function(){ - esClient.getMapping('geodocs', 'geodoc', function(err, mapping){ + esClient.indices.getMapping({ + index: 'geodocs', + type: 'geodoc' + }, function(err, mapping){ (mapping.geodoc != undefined ? mapping.geodoc: /* ES 0.9.11 */ mapping.geodocs.mappings.geodoc /* ES 1.0.0 */ diff --git a/test/index-test.js b/test/index-test.js index dd898fde..9c786f74 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -1,10 +1,9 @@ var mongoose = require('mongoose') - , elastical = require('elastical') , should = require('should') , config = require('./config') , Schema = mongoose.Schema , ObjectId = Schema.ObjectId - , esClient = new(require('elastical').Client) + , esClient = new(require('elasticsearch').Client) , mongoosastic = require('../lib/mongoosastic') , Tweet = require('./models/tweet'); @@ -99,8 +98,12 @@ describe('indexing', function(){ }); it("should use the model's id as ES id", function(done){ Tweet.findOne({message:"I like Riak better"}, function(err, doc){ - esClient.get('tweets', doc._id.toString(), function(err, res){ - res.message.should.eql(doc.message); + esClient.get({ + index: 'tweets', + type: 'tweet', + id: doc._id.toString() + }, function(err, res){ + res._source.message.should.eql(doc.message); done() }); }); @@ -342,13 +345,18 @@ describe('indexing', function(){ describe('Existing Index', function(){ before(function(done){ config.deleteIndexIfExists(['ms_sample'], function(){ - esClient.createIndex('ms_sample', {mappings:{ - bum:{ - properties: { - name: {type:'string'} + esClient.indices.create({ + index: 'ms_sample', + body: { + mappings:{ + bum:{ + properties: { + name: {type:'string'} + } + } } } - }}, done); + }, done); }); }); diff --git a/test/search-features-test.js b/test/search-features-test.js index 6954b07f..93ad2baa 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -1,5 +1,4 @@ var mongoose = require('mongoose') - , elastical = require('elastical') , should = require('should') , config = require('./config') , Schema = mongoose.Schema @@ -7,7 +6,6 @@ var mongoose = require('mongoose') , async = require('async') , mongoosastic = require('../lib/mongoosastic'); -var esClient = new elastical.Client(); var BondSchema = new Schema({ name: String , type: {type:String, default:'Other Bond'} diff --git a/test/synchronize-test.js b/test/synchronize-test.js index d3d32096..0a9f8251 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -1,6 +1,4 @@ var mongoose = require('mongoose') - , elastical = require('elastical') - , esClient = new(require('elastical').Client) , should = require('should') , config = require('./config') , Schema = mongoose.Schema diff --git a/test/truncate-test.js b/test/truncate-test.js index d1409e48..93cd106d 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -1,6 +1,4 @@ var mongoose = require('mongoose'), - elastical = require('elastical'), - esClient = new(require('elastical').Client), should = require('should'), config = require('./config'), Schema = mongoose.Schema, From 441617138d3da3d8009fde46d6f4c77fe65d0c6a Mon Sep 17 00:00:00 2001 From: taterbase Date: Wed, 29 Oct 2014 12:56:43 -0600 Subject: [PATCH 030/152] Break out docs --- CONTRIBUTING.md | 53 ++++++++++++++++++++++++++++++++++++++ LICENSE.md | 9 +++++++ readme.md | 67 ------------------------------------------------- 3 files changed, 62 insertions(+), 67 deletions(-) create mode 100644 CONTRIBUTING.md create mode 100644 LICENSE.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..7d8e095c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,53 @@ +# Contributing +Pull requests are always welcome as long as an accompanying test case is +associated. + +This project is configured to use [git +flow](https://github.com/nvie/gitflow/) and the following conventions +are used: + +* ``develop`` - represents current active development and can possibly be + unstable. +* ``master`` - pristine copy of repository, represents the currently + stable release found in the npm index. +* ``feature/**`` - represents a new feature being worked on + +If you wish to contribute, the only requirement is to: + +- branch a new feature branch from develop (if you're working on an + issue, prefix it with the issue number) +- make the changes, with accompanying test cases +- issue a pull request against develop branch + +Although I use git flow and prefix feature branches with "feature/" I +don't require this for pull requests... all I care is that the feature +branch name makes sense. + +Pulls requests against master or pull requests branched from master will +be rejected. + +## Examples +Someone picks up issue #39 on selective indexing. + +Good branch names: +* 39-selective-indexing +* feature/39-selective-indexing + +Someone submits a new feature that allows shard configuration: + +Good branch names: +* feature/shard-configuration +* shard-configuration +* or file an issue, then create a feature branch + +Feel free to ping me if you need help! :) + +## Running Tests +In order to run the tests you will need: + +* An elasticsearch server running on port 9200 +* A mongodb server +* [mocha](http://visionmedia.github.com/mocha/) + +With those installed, running ''npm test'' will run the tests with the +preferred timeout (which is extended for integration tests. diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 00000000..61673abd --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,9 @@ +[The MIT License](https://tldrlegal.com/l/mit) + +Copyright (c) 2012 James R. Carr + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/readme.md b/readme.md index 40a54ae1..a3bc3af0 100644 --- a/readme.md +++ b/readme.md @@ -495,70 +495,3 @@ SupervisorSchema.plugin(mongoosastic, {index: 'employees', type:'manager'}); var Supervisor = mongoose.model('supervisor', SupervisorSchema); ``` - -## Contributing -Pull requests are always welcome as long as an accompanying test case is -associated. - -This project is configured to use [git -flow](https://github.com/nvie/gitflow/) and the following conventions -are used: - -* ``develop`` - represents current active development and can possibly be - unstable. -* ``master`` - pristine copy of repository, represents the currently - stable release found in the npm index. -* ``feature/**`` - represents a new feature being worked on - -If you wish to contribute, the only requirement is to: - -- branch a new feature branch from develop (if you're working on an - issue, prefix it with the issue number) -- make the changes, with accompanying test cases -- issue a pull request against develop branch - -Although I use git flow and prefix feature branches with "feature/" I -don't require this for pull requests... all I care is that the feature -branch name makes sense. - -Pulls requests against master or pull requests branched from master will -be rejected. - -#### Examples -Someone picks up issue #39 on selective indexing. - -Good branch names: -* 39-selective-indexing -* feature/39-selective-indexing - -Someone submits a new feature that allows shard configuration: - -Good branch names: -* feature/shard-configuration -* shard-configuration -* or file an issue, then create a feature branch - -Feel free to ping me if you need help! :) - -### Running Tests -In order to run the tests you will need: - -* An elasticsearch server running on port 9200 -* A mongodb server -* [mocha](http://visionmedia.github.com/mocha/) - -With those installed, running ''npm test'' will run the tests with the -preferred timeout (which is extended for integration tests. - - -## License -[The MIT License](https://tldrlegal.com/l/mit) - -Copyright (c) 2012 James R. Carr - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - From 1c58fbe9d3d9fde6f541a92e84c1baa307934dc9 Mon Sep 17 00:00:00 2001 From: George Shank Date: Wed, 29 Oct 2014 12:57:20 -0600 Subject: [PATCH 031/152] uppercase README --- readme.md => README.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename readme.md => README.md (100%) diff --git a/readme.md b/README.md similarity index 100% rename from readme.md rename to README.md From 953a3b354a259fcc400c037e3e17a9003966e86c Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 11:32:23 -0600 Subject: [PATCH 032/152] refactor bulk api --- lib/mongoosastic.js | 41 ++++++++++++++++++++++++++++++----------- 1 file changed, 30 insertions(+), 11 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 2b8a45c5..cf5fe927 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -70,7 +70,7 @@ module.exports = function Mongoosastic(schema, options){ type: type, model: this }) - cb() + setImmediate(cb) } else { esClient.index({ index: index, @@ -142,7 +142,13 @@ module.exports = function Mongoosastic(schema, options){ , closeValues = [] , counter = 0 , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} - + + //Set indexing to be bulk when synchronizing to make synchronizing faster + bulk = { + delay: 1000, + size: 1000 + } + query = query || {} setIndexNameIfUnset(this.modelName) @@ -162,17 +168,18 @@ module.exports = function Mongoosastic(schema, options){ }else{ em.emit('data', null, doc) } - if (readyToClose && counter === 0) - close() }) }) }) stream.on('close', function(a, b){ - readyToClose = true closeValues = [a, b] - if (counter === 0) - close() + var closeInterval = setInterval(function() { + if (counter === 0 && bulkBuffer.length === 0) { + clearInterval(closeInterval) + close() + } + }, 1000) }) stream.on('error', function(err){ @@ -220,7 +227,7 @@ module.exports = function Mongoosastic(schema, options){ }) } - function bulkDelete(options) { + function bulkDelete(options, cb) { bulkAdd({ delete: { _index: options.index || indexName, @@ -228,6 +235,7 @@ module.exports = function Mongoosastic(schema, options){ _id: options.model._id.toString() } }) + cb() } function bulkIndex(options) { @@ -238,18 +246,29 @@ module.exports = function Mongoosastic(schema, options){ _id: options.model._id.toString() } }) - bulkAdd({doc: options.model}) + bulkAdd(options.model) + } + + function clearBulkTimeout() { + clearTimeout(bulkTimeout) + bulkTimeout = undefined } function bulkAdd(instruction) { bulkBuffer.push(instruction) - clearTimeout(bulkTimeout) + + //Return because we need the doc being indexed + //Before we start inserting + if (instruction.index && instruction.index._index) + return if(bulkBuffer.length >= (bulk.size || 1000)) { schema.statics.flush() - } else { + clearBulkTimeout() + } else if (bulkTimeout === undefined){ bulkTimeout = setTimeout(function(){ schema.statics.flush() + clearBulkTimeout() }, bulk.delay || 1000) } } From 7c84d8ee8b0b041c01c7ab329be8c58b30611a87 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 11:34:39 -0600 Subject: [PATCH 033/152] don't stop bulk options with synchronize --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index cf5fe927..afd8d4d4 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -144,7 +144,7 @@ module.exports = function Mongoosastic(schema, options){ , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} //Set indexing to be bulk when synchronizing to make synchronizing faster - bulk = { + bulk = bulk || { delay: 1000, size: 1000 } From 0ac8a968cefcdd2b4ac93169e4fc10aeb7ac9907 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:23:13 -0600 Subject: [PATCH 034/152] refresh readme --- README.md | 374 ++++++++++++++++++++++++++---------------------------- 1 file changed, 181 insertions(+), 193 deletions(-) diff --git a/README.md b/README.md index a3bc3af0..9ffb6624 100644 --- a/README.md +++ b/README.md @@ -3,28 +3,51 @@ Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) [![NPM version](https://badge.fury.io/js/mongoosastic.svg)](http://badge.fury.io/js/mongoosastic) -A [mongoose](http://mongoosejs.com/) plugin that indexes models into [elasticsearch](http://www.elasticsearch.org/). I kept -running into cases where I needed full text search capabilities in my -mongodb based models only to discover mongodb has none. In addition to -full text search, I also needed the ability to filter ranges of data -points in the searches and even highlight matches. For these reasons, -elastic search was a perfect fit and hence this project. - - +Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatically index your models into [elasticsearch](http://www.elasticsearch.org/). + +- [Installation](#installation) +- [Setup](#setup) +- [Indexing](#indexing) + - [Saving a document](#saving-a-document) + - [Indexing nested models](#indexing-nested-models) + - [Indexing an existing collection](#indexing-an-existing-collection) + - [Bulk indexing](#bulk-indexing) + - [Indexing on demand](#indexing-on-demand) + - [Truncating an index](#truncating-an-index) +- [Mapping](#mapping) + - [Geo mapping](#geo-mapping) + - [Indexing a geo point](#indexing-a-geo-point) + - [Indexing a geo shape](#indexing-a-geo-shape) + - [Creating mappings on-demand](#creating-mappings-on-demand) +- [Queries](#queries) + - [Hydration](#hydration) ## Installation ```bash -npm install mongoosastic - +npm install -S mongoosastic ``` -Or add it to your package.json +## Setup + +### Model.plugin(mongoosastic, options) -## Usage +Options are: -To make a model indexed into elastic search simply add the plugin. +* `index` - the index in elastic search to use. Defaults to the + pluralization of the model name. +* `type` - the type this model represents in elastic search. Defaults + to the model name. +* `host` - the host elastic search is running on +* `port` - the port elastic search is running on +* `auth` - the authentication needed to reach elastic search server. In the standard format of 'username:password' +* `protocol` - the protocol the elastic search server uses. Defaults to http +* `hydrate` - whether or not to lookup results in mongodb before +* `hydrateOptions` - options to pass into hydrate function +* `bulk` - size and delay options for bulk indexing +To have a model indexed into elastic search simply add the plugin. + ```javascript var mongoose = require('mongoose') , mongoosastic = require('mongoosastic') @@ -64,7 +87,40 @@ User.plugin(mongoosastic) In this case only the name field will be indexed for searching. -####Indexing Nested Models +Now, by adding the plugin, the model will have a new method called +`search` which can be used to make simple to complex searches. The `search` +method accepts [standard elasticsearch query DSL](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-queries.html) + +```javascript +User.search({ + query_string: { + query: "john" + } +}, function(err, results) { + // results here +}); + +``` + +## Indexing + +### Saving a document +The indexing takes place after saving inside the mongodb and is a defered process. +One can check the end of the indexion catching es-indexed event. + +```javascript +doc.save(function(err){ + if (err) throw err; + /* Document indexation on going */ + doc.on('es-indexed', function(err, res){ + if (err) throw err; + /* Document is indexed */ + }); + }); +``` + + +###Indexing Nested Models In order to index nested models you can refer following example. ```javascript @@ -85,16 +141,6 @@ var User = new Schema({ User.plugin(mongoosastic) ``` -Finally, adding the plugin will add a new method to the model called -search which can be used to make simple to complex searches. - -```javascript - -User.search({query:"john"}, function(err, results) { - // results here -}); - -``` ### Indexing An Existing Collection Already have a mongodb collection that you'd like to index using this @@ -128,8 +174,6 @@ You can also synchronize a subset of documents based on a query! var stream = Book.synchronize({author: 'Arthur C. Clarke'}) ``` -One caveat... synchronization is kinda slow for now. Use with care. - ### Bulk Indexing You can also specify `bulk` options with mongoose which will utilize elasticsearch's bulk indexing api. This will cause the `synchronize` function to use bulk indexing as well. @@ -145,7 +189,38 @@ BookSchema.plugin(mongoosastic, { }); ``` -### Per Field Options +### Indexing On Demand +You can do on-demand indexes using the `index` function + +```javascript +Dude.findOne({name:'Jeffery Lebowski', function(err, dude){ + dude.awesome = true; + dude.index(function(err, res){ + console.log("egads! I've been indexed!"); + }); +}); +``` + +The index method takes 2 arguments: + +* `options` (optional) - {index, type} - the index and type to publish to. Defaults to the standard index and type. + the model was setup with. +* `callback` - callback function to be invoked when model has been + indexed. + +Note that indexing a model does not mean it will be persisted to +mongodb. Use save for that. + +### Truncating an index + +The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. + +```javascript +GarbageModel.truncate(function(err){...}); +``` + +## Mapping + Schemas can be configured to have special options per field. These match with the existing [field mapping configurations](http://www.elasticsearch.org/guide/reference/mapping/core-types.html) defined by elasticsearch with the only difference being they are all prefixed by "es_". @@ -165,46 +240,7 @@ This example uses a few other mapping fields... such as null_value and type (which overrides whatever value the schema type is, useful if you want stronger typing such as float). -#### Creating Mappings for These Features -The way this can be mapped in elastic search is by creating a mapping -for the index the model belongs to. Currently to the best of my -knowledge mappings are create once when creating an index and can only -be modified by destroying the index. The optionnal first parameter is -the settings option for the index (for defining analysers for example or whatever is [there](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-update-settings.html). - -As such, creating the mapping is a one time operation and can be done as -follows (using the BookSchema as an example): - -```javascript -var BookSchema = new Schema({ - title: {type:String, es_boost:2.0} - , author: {type:String, es_null_value:"Unknown Author"} - , publicationDate: {type:Date, es_type:'date'} - -BookSchema.plugin(mongoosastic); -var Book = mongoose.model('Book', BookSchema); -Book.createMapping({ - "analysis" : { - "analyzer":{ - "content":{ - "type":"custom", - "tokenizer":"whitespace" - } - } - } -},function(err, mapping){ - // do neat things here -}); - -``` -This feature is still a work in progress. As of this writing you'll have -to manage whether or not you need to create the mapping, mongoosastic -will make no assumptions and simply attempt to create the mapping. If -the mapping already exists, an Exception detailing such will be -populated in the `err` argument. - -#### Mapping options -There are various types that can be defined in elasticsearch. Check out http://www.elasticsearch.org/guide/reference/mapping/ for more information. Here are examples to the currently possible definitions in mongoosastic: +There are various mapping options that can be defined in elasticsearch. Check out [http://www.elasticsearch.org/guide/reference/mapping/](http://www.elasticsearch.org/guide/reference/mapping/) for more information. Here are examples to the currently possible definitions in mongoosastic: ```javascript var ExampleSchema = new Schema({ @@ -294,24 +330,24 @@ Notice that the name of the field containing the ES geo data must start by #### Indexing a geo point ```javascript - var geo = new GeoModel({ - … - geo_with_lat_lon: { lat: 1, lon: 2} - … - }); +var geo = new GeoModel({ + /* … */ + geo_with_lat_lon: { lat: 1, lon: 2} + /* … */ +}); ``` #### Indexing a geo shape ```javascript - var geo = new GeoModel({ - … - geo_shape:{ - type:'envelope', - coordinates: [[3,4],[1,2] /* Arrays of coord : [[lon,lat],[lon,lat]] */ - } - … - }); +var geo = new GeoModel({ + … + geo_shape:{ + type:'envelope', + coordinates: [[3,4],[1,2] /* Arrays of coord : [[lon,lat],[lon,lat]] */ + } + … +}); ``` Mapping, indexing and searching example for geo shape can be found in test/geo-test.js @@ -320,33 +356,68 @@ For example, one can retrieve the list of document where the shape contain a spe point (or polygon...) ```javascript - var geoQuery = { - "query": {"match_all": {}}, - "filter": {"geo_shape": { - "geo_shape": { - "shape": { - "type": "point", - "coordinates": [3,1] - }, - "relation": "intersects" +var geoQuery = { + "match_all": {} + } + +var geoFilter = { + geo_shape: { + geo_shape": { + shape: { + type: "point", + coordinates: [3,1] + } } - }} + } } + +GeoModel.search(geoQuery, {filter: geoFilter}, function(err, res) { /* ... */ }) ``` -### Advanced Queries +### Creating Mappings On Demand +Creating the mapping is a one time operation and can be done as +follows (using the BookSchema as an example): + +```javascript +var BookSchema = new Schema({ + title: {type:String, es_boost:2.0} + , author: {type:String, es_null_value:"Unknown Author"} + , publicationDate: {type:Date, es_type:'date'} + +BookSchema.plugin(mongoosastic); +var Book = mongoose.model('Book', BookSchema); +Book.createMapping({ + "analysis" : { + "analyzer":{ + "content":{ + "type":"custom", + "tokenizer":"whitespace" + } + } + } +},function(err, mapping){ + // do neat things here +}); + +``` +This feature is still a work in progress. As of this writing you'll have +to manage whether or not you need to create the mapping, mongoosastic +will make no assumptions and simply attempt to create the mapping. If +the mapping already exists, an Exception detailing such will be +populated in the `err` argument. + + +## Queries The full query DSL of elasticsearch is exposed through the search method. For example, if you wanted to find all people between ages 21 and 30: ```javascript Person.search({ - query:{ - range: { - age:{ - from:21 - , to: 30 - } + range: { + age:{ + from:21 + , to: 30 } } }, function(err, people){ @@ -354,9 +425,19 @@ Person.search({ }); ``` - See the elasticsearch [Query DSL](http://www.elasticsearch.org/guide/reference/query-dsl/) docs for more information. +You can also specify query options like [sorts](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort) + +```javascript +Person.search({/* ... */}, {sort: "age:asc"}, function(err, people){ + //sorted results +}); +``` + +Options for queries must adhere to the [javascript elasticsearch driver specs](http://www.elasticsearch.org/guide/en/elasticsearch/client/javascript-api/current/api-reference.html#api-search). + + ### Hydration By default objects returned from performing a search will be the objects as is in elastic search. This is useful in cases where only what was @@ -368,7 +449,7 @@ provide {hydrate:true} as the second argument to a search call. ```javascript -User.search({query:"john"}, {hydrate:true}, function(err, results) { +User.search({query_string: {query: "john"}}, {hydrate:true}, function(err, results) { // results here }); @@ -379,7 +460,7 @@ how to query for the mongoose object. ```javascript -User.search({query:"john"}, {hydrate:true, hydrateOptions: {select: 'name age'}}, function(err, results) { +User.search({query_string: {query: "john"}}, {hydrate:true, hydrateOptions: {select: 'name age'}}, function(err, results) { // results here }); @@ -402,96 +483,3 @@ var User = new Schema({ User.plugin(mongoosastic, {hydrate:true, hydrateOptions: {lean: true}}) ``` - - -### Indexing On Demand -While developing mongoose I came across a scenario where we needed to be -able to save models (and search them) but a single action would -"publish" those models to be searched from a public site. To address -this I create a new method: `index`. - -#### Usage -Usage is as simple as calling index on an existing model. - -```javascript -Dude.findOne({name:'Jeffery Lebowski', function(err, dude){ - dude.awesome = true; - dude.index(function(err, res){ - console.log("egads! I've been indexed!"); - }); -}); -``` - -The index method takes 3 arguments: - -* `index` (optional) - the index to publish to. Defaults to the index - the model was setup with. -* `type` (optional) - the type to publish as. Defaults to the type the - model was setup with. -* `callback` - callback function to be invoked when model has been - indexed. - -Note that indexing a model does not mean it will be persisted to -mongodb. Use save for that. - -### Saving a document -The indexing takes place after saving inside the mongodb and is a defered process. -One can check the end of the indexion catching es-indexed event. - -```javascript -doc.save(function(err){ - if (err) throw err; - /* Document indexation on going */ - doc.on('es-indexed', function(err, res){ - if (err) throw err; - /* Document is indexed */ - }); - }); -``` - -### Truncating an index - -The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. - -#### Usage - -```javascript -GarbageModel.truncate(function(err){...}); -``` - -### Model.plugin(mongoosastic, options) - -Options are: - -* `index` - the index in elastic search to use. Defaults to the - pluralization of the model name. -* `type` - the type this model represents in elastic search. Defaults - to the model name. -* `host` - the host elastic search is running on -* `port` - the port elastic search is running on -* `auth` - the authentication needed to reach elastic search server. In the standard format of 'username:password' -* `protocol` - the protocol the elastic search server uses. Defaults to http -* `hydrate` - whether or not to lookup results in mongodb before - returning results from a search. Defaults to false. -* `curlDebug` - elastical debugging. Defaults to false. - -Here are all other avaible options invloved in connection to elastic search server: -https://ramv.github.io/node-elastical/docs/classes/Client.html - -Experimental Options: - -#### Specifying Different Index and Type -Perhaps you have an existing index and you want to specify the index and -type used to index your document? No problem!! - -```javascript -var SupervisorSchema = new Schema({ - name: String -, department: String -}); - -SupervisorSchema.plugin(mongoosastic, {index: 'employees', type:'manager'}); - -var Supervisor = mongoose.model('supervisor', SupervisorSchema); - -``` From 848fd23ee9d1de9beb3919e656b9da3ecdf7f062 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:23:19 -0600 Subject: [PATCH 035/152] Update query interface --- lib/mongoosastic.js | 2 +- test/alternative-index-method-test.js | 6 ++-- test/bulk-test.js | 18 ++++++------ test/geo-test.js | 32 +++++++++------------ test/index-test.js | 40 +++++++++++---------------- test/search-features-test.js | 10 +++---- test/synchronize-test.js | 2 +- test/truncate-test.js | 6 ++-- 8 files changed, 48 insertions(+), 68 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index afd8d4d4..627ce097 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -203,7 +203,7 @@ module.exports = function Mongoosastic(schema, options){ var model = this , esQuery = { - body: query, + body: {query: query}, index: options.index || indexName, type: options.type || typeName } diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index bea6a820..5c4e9895 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -33,7 +33,7 @@ describe('Index Method', function(){ doc.message = 'I know nodejitsu!'; doc.index(function(){ setTimeout(function(){ - Tweet.search({query: {query_string: {query: 'know'}}}, function(err, res){ + Tweet.search({query_string: {query: 'know'}}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know nodejitsu!'); done(); }); @@ -46,7 +46,7 @@ describe('Index Method', function(){ doc.message = 'I know taebo!'; doc.index({index: 'public_tweets'}, function(){ setTimeout(function(){ - Tweet.search({query: {query_string: {query: 'know'}}}, {index: 'public_tweets'}, function(err, res){ + Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets'}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); @@ -59,7 +59,7 @@ describe('Index Method', function(){ doc.message = 'I know taebo!'; doc.index({index: 'public_tweets', type: 'utterings'}, function(){ setTimeout(function(){ - Tweet.search({query: {query_string: {query: 'know'}}}, {index: 'public_tweets', type: 'utterings'}, function(err, res){ + Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets', type: 'utterings'}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); diff --git a/test/bulk-test.js b/test/bulk-test.js index f7dc243d..d03be08e 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -37,24 +37,22 @@ describe('Bulk mode', function() { new Book({ title: title }).save(cb); - }, function() { - setTimeout(done, 1200); - }); + }, done) }); before(function(done) { Book.findOne({ title: 'American Gods' }, function(err, book) { - book.remove(function() { - setTimeout(done, 1200); - }); + book.remove(done) }); }); it('should index all objects and support deletions too', function(done) { - Book.search({}, function(err, results) { - results.should.have.property('hits').with.property('total', 52); - done(); - }); + setTimeout(function() { + Book.search({match_all: {}}, function(err, results) { + results.should.have.property('hits').with.property('total', 52); + done(); + }); + }, 1500) }); }); diff --git a/test/geo-test.js b/test/geo-test.js index 7b09f26d..567ff413 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -98,9 +98,7 @@ describe('GeoTest', function(){ setTimeout(function(){ // ES request GeoModel.search({ - query: { - match_all: {} - } + match_all: {} }, {sort: "myId:asc"}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); @@ -126,9 +124,7 @@ describe('GeoTest', function(){ setTimeout(function(){ GeoModel.search({ - query: { - match_all: {} - } + match_all: {} }, {sort: "myId:asc"}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); @@ -146,16 +142,14 @@ describe('GeoTest', function(){ it('should be able to search points inside frames', function(done){ var geoQuery = { - query: { - filtered: { - "query": {"match_all": {}}, - "filter": { - "geo_shape": { - "frame": { - "shape": { - "type": "point", - "coordinates": [3,1] - } + filtered: { + "query": {"match_all": {}}, + "filter": { + "geo_shape": { + "frame": { + "shape": { + "type": "point", + "coordinates": [3,1] } } } @@ -168,18 +162,18 @@ describe('GeoTest', function(){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(2); - geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(1); - geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(2); - geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(0); diff --git a/test/index-test.js b/test/index-test.js index 9c786f74..fd9c6308 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -111,10 +111,8 @@ describe('indexing', function(){ it('should be able to execute a simple query', function(done){ Tweet.search({ - query: { - query_string: { - query: 'Riak' - } + query_string: { + query: 'Riak' } }, function(err, results) { results.hits.total.should.eql(1) @@ -125,10 +123,8 @@ describe('indexing', function(){ it('should be able to execute a simple query', function(done){ Tweet.search({ - query: { - query_string: { - query: 'jamescarr' - } + query_string: { + query: 'jamescarr' } }, function(err, results) { results.hits.total.should.eql(1) @@ -158,10 +154,8 @@ describe('indexing', function(){ tweet.remove(function(){ setTimeout(function(){ Tweet.search({ - query: { - query_string: { - query: 'shouldnt' - } + query_string: { + query: 'shouldnt' } }, function(err, res){ res.hits.total.should.eql(0); @@ -174,10 +168,8 @@ describe('indexing', function(){ tweet.on('es-removed', function(err, res){ setTimeout(function(){ Tweet.search({ - query: { - query_string: { - query: 'shouldnt' - } + query_string: { + query: 'shouldnt' } }, function(err, res){ res.hits.total.should.eql(0); @@ -226,14 +218,14 @@ describe('indexing', function(){ }); it('should only find models of type Tweet', function(done){ - Tweet.search({query: {query_string: {query: 'Dude'}}}, function(err, res){ + Tweet.search({query_string: {query: 'Dude'}}, function(err, res){ res.hits.total.should.eql(1); res.hits.hits[0]._source.user.should.eql('Dude'); done(); }); }); it('should only find models of type Talk', function(done){ - Talk.search({query: {query_string: {query: 'Dude'}}}, function(err, res){ + Talk.search({query_string: {query: 'Dude'}}, function(err, res){ res.hits.total.should.eql(1); res.hits.hits[0]._source.title.should.eql('Dude'); done(); @@ -251,7 +243,7 @@ describe('indexing', function(){ }); it('when gathering search results while respecting default hydrate options', function(done){ - Person.search({query: {query_string: {query: 'James'}}}, function(err, res) { + Person.search({query_string: {query: 'James'}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('James Carr'); res.hits.hits[0].should.not.have.property('phone'); @@ -272,7 +264,7 @@ describe('indexing', function(){ }); it('should only return indexed fields', function(done){ - Talk.search({query: {query_string: {query: 'cool'}}}, function(err, res) { + Talk.search({query_string: {query: 'cool'}}, function(err, res) { res.hits.total.should.eql(1); var talk = res.hits.hits[0]._source; @@ -286,7 +278,7 @@ describe('indexing', function(){ }); it('should hydrate returned documents if desired', function(done){ - Talk.search({query: {query_string: {query: 'cool'}}}, {hydrate:true}, function(err, res) { + Talk.search({query_string: {query: 'cool'}}, {hydrate:true}, function(err, res) { res.hits.total.should.eql(1) var talk = res.hits.hits[0] @@ -311,7 +303,7 @@ describe('indexing', function(){ }); it('should only return indexed fields and have indexed sub-objects', function(done){ - Person.search({query: {query_string: {query: 'Bob'}}}, function(err, res) { + Person.search({query_string: {query: 'Bob'}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('Bob Carr'); res.hits.hits[0].should.have.property('life'); @@ -326,7 +318,7 @@ describe('indexing', function(){ }); it('should allow extra query options when hydrating', function(done){ - Talk.search({query: {query_string: {query: 'cool'}}}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { + Talk.search({query_string: {query: 'cool'}}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { res.hits.total.should.eql(1) var talk = res.hits.hits[0] @@ -370,7 +362,7 @@ describe('indexing', function(){ }); var Bum = mongoose.model('bum', BumSchema); config.createModelAndEnsureIndex(Bum, {name:'Roger Wilson'}, function(){ - Bum.search({query: {query_string: {query: 'Wilson'}}}, function(err, results){ + Bum.search({query_string: {query: 'Wilson'}}, function(err, results){ results.hits.total.should.eql(1); done(); }); diff --git a/test/search-features-test.js b/test/search-features-test.js index 93ad2baa..993de91e 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -40,12 +40,10 @@ describe('Query DSL', function(){ describe('range', function(){ it('should be able to find within range', function(done){ Bond.search({ - query:{ - range: { - price:{ - from:20000 - , to: 30000 - } + range: { + price:{ + from:20000 + , to: 30000 } } }, function(err, res){ diff --git a/test/synchronize-test.js b/test/synchronize-test.js index 0a9f8251..45eeb25b 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -45,7 +45,7 @@ describe('Synchronize', function(){ stream.on('close', function(){ count.should.eql(53); setTimeout(function(){ - Book.search({query: {query_string: {query: 'American'}}}, function(err, results){ + Book.search({query_string: {query: 'American'}}, function(err, results){ results.hits.total.should.eql(2); done(); }); diff --git a/test/truncate-test.js b/test/truncate-test.js index 93cd106d..cc2dbc7f 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -42,10 +42,8 @@ describe('Truncate', function() { it('should be able to truncate all documents', function(done) { Dummy.esTruncate(function(err) { Dummy.search({ - query: { - query_string: { - query: 'Text1' - } + query_string: { + query: 'Text1' } }, function(err, results) { results.hits.total.should.eql(0); From 59b1ef61f872ad8a80af55fb82c236792b7f0b47 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:25:10 -0600 Subject: [PATCH 036/152] Add gitter badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 9ffb6624..bae5993c 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ [![Build Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) [![NPM version](https://badge.fury.io/js/mongoosastic.svg)](http://badge.fury.io/js/mongoosastic) +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mongoosastic/mongoosastic?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatically index your models into [elasticsearch](http://www.elasticsearch.org/). From e9f1a87ce2be4235042f52f27938032b6fc2113e Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:27:17 -0600 Subject: [PATCH 037/152] formatting --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index bae5993c..628901f1 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatical - [Creating mappings on-demand](#creating-mappings-on-demand) - [Queries](#queries) - [Hydration](#hydration) + ## Installation ```bash From a465b9658afc8a35b863ea3bb3e891fc547b1228 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:28:31 -0600 Subject: [PATCH 038/152] significant version bump --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 44b8cfd9..483224e3 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "1.0.2", + "version": "2.0.0", "tags": [ "mongodb", "elastic search", From 33537341c29bf4e43a72355a49e399ea57455a2f Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 13:33:08 -0600 Subject: [PATCH 039/152] longer delay for bulk test --- test/bulk-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/bulk-test.js b/test/bulk-test.js index d03be08e..d742b6f0 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -52,7 +52,7 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 1500) + }, 2000) }); }); From 516438f8c8dfca9f055d19f54bf94f9010bbadc3 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 13:51:15 -0600 Subject: [PATCH 040/152] updates for travis --- .travis.yml | 1 - test/bulk-test.js | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index d2b4c927..377e5bc7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,6 @@ language: node_js node_js: - 0.11 - 0.10 - - 0.8 services: - mongodb diff --git a/test/bulk-test.js b/test/bulk-test.js index d742b6f0..031fd3a3 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -52,7 +52,7 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 2000) + }, 3000) }); }); From fcc6a46aa152bf97737b34a55ad55d30ef5aa14f Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 13:58:59 -0600 Subject: [PATCH 041/152] Add changelog --- CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..6785832b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,12 @@ +## 2.0.0 (2014-10-10) + +Features: + +- Moved to [official elasticsearch driver](https://github.com/elasticsearch/elasticsearch-js) + - Caused `search` api to conform closer to official driver + - Added options to searching +- Refactored bulk api +- Refreshed README.md +- Added CHANGELOG.md +- Added CONTRIBUTING.md +- Added LICENSE.md From 3a6ad9b9b0ae8a3e708d94daea3f209451a31ee9 Mon Sep 17 00:00:00 2001 From: Sascha Schwabbauer Date: Sun, 2 Nov 2014 00:14:01 +0100 Subject: [PATCH 042/152] 'protocol' and 'auth' options are ignored This fixes an issue, where the 'protocol' and 'auth' options were ignored. --- lib/mongoosastic.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 627ce097..61ef1beb 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -13,13 +13,15 @@ module.exports = function Mongoosastic(schema, options){ , _mapping = null , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 - , esClient = new elasticsearch.Client({host: {host: host, port: port}}) + , protocol = options && options.protocol ? options.protocol : 'http' + , auth = options && options.auth ? options.auth : null + , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth}}) , bulk = options && options.bulk , bulkBuffer = [] , bulkTimeout setUpMiddlewareHooks(schema) - + /** * ElasticSearch Client */ From 8ef97d50f7e73da84b874f6beac647eac0be8037 Mon Sep 17 00:00:00 2001 From: taterbase Date: Sat, 1 Nov 2014 21:46:32 -0600 Subject: [PATCH 043/152] bump package.json --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 483224e3..16958c7d 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.0", + "version": "2.0.1", "tags": [ "mongodb", "elastic search", From 1618f4b62e9b6571fc9adbf5a4a7fc99f84cfb09 Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 4 Nov 2014 17:20:16 -0700 Subject: [PATCH 044/152] Treat null query like undefined --- lib/mongoosastic.js | 3 +++ package.json | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 61ef1beb..ce33d458 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -203,6 +203,9 @@ module.exports = function Mongoosastic(schema, options){ options = {} } + if (query === null) + query = undefined + var model = this , esQuery = { body: {query: query}, diff --git a/package.json b/package.json index 16958c7d..9adce9bc 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.1", + "version": "2.0.2", "tags": [ "mongodb", "elastic search", From a1ee33ad334b333076a987837fdcc7e31e139685 Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 4 Nov 2014 17:39:53 -0700 Subject: [PATCH 045/152] remove unstable node testing, broken for now --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 377e5bc7..07b5d92c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,6 @@ language: node_js node_js: - - 0.11 - 0.10 services: From feb2060dfcceb7fcb2e5cdf7798328cb7a8a0a36 Mon Sep 17 00:00:00 2001 From: b96705008 Date: Sun, 9 Nov 2014 15:08:56 +0800 Subject: [PATCH 046/152] get rid of "continue" when encounter objectid --- lib/mapping-generator.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 73b68568..ed384126 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -52,9 +52,9 @@ function getMapping(cleanTree, prefix) { } // If it is a objectid make it a string. - if(value.type === 'objectid'){ + if (value.type === 'objectid') { mapping[field].type = 'string'; - continue; + // do not continue here so we can handle other es_ options } //If indexing a number, and no es_type specified, default to double From 622239d13c340d69a47dc6ebdc0d6e4c2caf6373 Mon Sep 17 00:00:00 2001 From: taterbase Date: Sun, 9 Nov 2014 16:30:19 -0700 Subject: [PATCH 047/152] Update semver --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9adce9bc..2b3b0062 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.2", + "version": "2.0.3", "tags": [ "mongodb", "elastic search", From eee48de8cd657c3dbe492fe005247fcb1bc0cdcd Mon Sep 17 00:00:00 2001 From: Ignacio Lago Date: Mon, 10 Nov 2014 12:09:59 +0100 Subject: [PATCH 048/152] Serialize on bulk calls. Serialize: this = full model. --- lib/mongoosastic.js | 8 +++++++- lib/serialize.js | 4 ++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index ce33d458..f92b887c 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -67,10 +67,16 @@ module.exports = function Mongoosastic(schema, options){ , type = options.type || typeName if(bulk) { + /** + * To serialize in bulk it needs the _id + */ + var serialModel = serialize(this, mapping); + serialModel._id = this._id; + bulkIndex({ index: index, type: type, - model: this + model: serialModel }) setImmediate(cb) } else { diff --git a/lib/serialize.js b/lib/serialize.js index e0987354..55300db6 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -3,7 +3,7 @@ module.exports = serialize; function _serializeObject(object, mapping) { var serialized = {}; for (var field in mapping.properties) { - var val = serialize(object[field], mapping.properties[field]); + var val = serialize.call(object, object[field], mapping.properties[field]); if (val !== undefined) { serialized[field] = val; } @@ -30,7 +30,7 @@ function serialize(model, mapping) { } else { if (mapping.cast && typeof(mapping.cast) !== 'function') throw new Error('es_cast must be a function'); - model = mapping.cast ? mapping.cast(model) : model; + model = mapping.cast ? mapping.cast.call(this, model) : model; if (typeof model === 'object' && model !== null) { var name = model.constructor.name; if (name === 'ObjectID') { From c065446b4ea0f3d03cdd634fc7bd63399f96a6bc Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 10 Nov 2014 11:06:36 -0700 Subject: [PATCH 049/152] Update semver --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2b3b0062..cd940f0b 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.3", + "version": "2.0.4", "tags": [ "mongodb", "elastic search", From 93d00fb9260b694ee0163679c5baffa76e1e31c7 Mon Sep 17 00:00:00 2001 From: Nicolas McCurdy Date: Thu, 20 Nov 2014 19:39:26 -0500 Subject: [PATCH 050/152] In documentation files, rename "truncate" to "esTruncate" --- README.md | 4 ++-- test/truncate-test.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 628901f1..f86fdb04 100644 --- a/README.md +++ b/README.md @@ -215,10 +215,10 @@ mongodb. Use save for that. ### Truncating an index -The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. +The static method `esTruncate` will delete all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. ```javascript -GarbageModel.truncate(function(err){...}); +GarbageModel.esTruncate(function(err){...}); ``` ## Mapping diff --git a/test/truncate-test.js b/test/truncate-test.js index cc2dbc7f..da236202 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -38,7 +38,7 @@ describe('Truncate', function() { after(function(done) { Dummy.remove(done); }); - describe('truncate', function() { + describe('esTruncate', function() { it('should be able to truncate all documents', function(done) { Dummy.esTruncate(function(err) { Dummy.search({ From d4636e3ba66b1c4054c5620543703ad705bec349 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 20 Nov 2014 19:29:40 -0700 Subject: [PATCH 051/152] Update semver --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index cd940f0b..46ce90ef 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.4", + "version": "2.0.5", "tags": [ "mongodb", "elastic search", From 098d5c0ae3e4ee5604ab260a44b0bea46af22f4a Mon Sep 17 00:00:00 2001 From: Christophe Wagner Date: Wed, 10 Dec 2014 18:24:35 +0100 Subject: [PATCH 052/152] add settings when index is created --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f92b887c..ef7ae42d 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -376,7 +376,7 @@ function createMappingIfNotPresent(options, cb) { body: completeMapping }, cb) } else { - client.indices.create({index: indexName}, function(err) { + client.indices.create({index: indexName, body: settings}, function(err) { if (err) return cb(err) From a1c25990cca9717497c0e706d7ac64b5ed204819 Mon Sep 17 00:00:00 2001 From: taterbase Date: Wed, 10 Dec 2014 18:06:40 -0700 Subject: [PATCH 053/152] Update semver --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 46ce90ef..ed595417 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.5", + "version": "2.0.6", "tags": [ "mongodb", "elastic search", From 67ce1298f594321a259b176b76e5f2ed3d45e726 Mon Sep 17 00:00:00 2001 From: srfrnk Date: Fri, 19 Dec 2014 17:00:18 +0200 Subject: [PATCH 054/152] allow debugging the calls made by elasticsearch client. added ability to send {log:"trace"} in options to enable logging --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index ef7ae42d..22ab3cad 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -15,7 +15,7 @@ module.exports = function Mongoosastic(schema, options){ , port = options && options.port ? options.port : 9200 , protocol = options && options.protocol ? options.protocol : 'http' , auth = options && options.auth ? options.auth : null - , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth}}) + , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth},log:options.log}) , bulk = options && options.bulk , bulkBuffer = [] , bulkTimeout From d7a6fb9970e25f4be546ffc0ab6b565e3d6a0e02 Mon Sep 17 00:00:00 2001 From: Srfrnk Date: Sun, 1 Feb 2015 09:23:11 +0200 Subject: [PATCH 055/152] fixed commit --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 22ab3cad..ae2bc91e 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -15,7 +15,7 @@ module.exports = function Mongoosastic(schema, options){ , port = options && options.port ? options.port : 9200 , protocol = options && options.protocol ? options.protocol : 'http' , auth = options && options.auth ? options.auth : null - , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth},log:options.log}) + , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth},log:(options&&options.log)}) , bulk = options && options.bulk , bulkBuffer = [] , bulkTimeout From d3a906e3d7533222335b8d16bf7edb703a31f1a6 Mon Sep 17 00:00:00 2001 From: Srfrnk Date: Sun, 1 Feb 2015 09:29:40 +0200 Subject: [PATCH 056/152] wrong value used... now should be better. --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index ae2bc91e..356760f1 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -15,7 +15,7 @@ module.exports = function Mongoosastic(schema, options){ , port = options && options.port ? options.port : 9200 , protocol = options && options.protocol ? options.protocol : 'http' , auth = options && options.auth ? options.auth : null - , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth},log:(options&&options.log)}) + , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth},log:(options?options.log:null)}) , bulk = options && options.bulk , bulkBuffer = [] , bulkTimeout From 36ca0e3308ca3c8bc8384fd6f2b692064d8e3f63 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 1 Mar 2015 16:34:17 +0100 Subject: [PATCH 057/152] small patch for nested array schemas --- lib/mapping-generator.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index ed384126..97665e84 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -119,7 +119,7 @@ function getCleanTree(tree, paths, prefix) { // If it is an nested schema if (value[0]) { // A nested array can contain complex objects - if (paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { + if (paths[field] && paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); } else if ( paths[field] && paths[field].caster && paths[field].caster.instance ) { // Even for simple types the value can be an object if there is other attributes than type From 13a856e458eeb38ba2f0dcfb3167f474bd491842 Mon Sep 17 00:00:00 2001 From: Gary Pearman Date: Mon, 16 Mar 2015 21:54:35 +0000 Subject: [PATCH 058/152] Fixes #49 --- lib/serialize.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/serialize.js b/lib/serialize.js index 55300db6..584b4ce5 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -27,6 +27,7 @@ function serialize(model, mapping) { } else if (name === 'Date') { return new Date(value).toJSON(); } + return model; } else { if (mapping.cast && typeof(mapping.cast) !== 'function') throw new Error('es_cast must be a function'); From 2048e3a84172c745dcada3a0442da5d855369ff3 Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Tue, 17 Mar 2015 11:55:52 +0100 Subject: [PATCH 059/152] 2.0.7 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ed595417..af07f640 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.6", + "version": "2.0.7", "tags": [ "mongodb", "elastic search", From 6c2a3577006b00b5a526c3d88a9a9656216a15c5 Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Tue, 17 Mar 2015 18:06:55 +0100 Subject: [PATCH 060/152] fixed timeout for bulk test --- test/bulk-test.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/bulk-test.js b/test/bulk-test.js index 031fd3a3..200aaca4 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -47,12 +47,15 @@ describe('Bulk mode', function() { }); }); it('should index all objects and support deletions too', function(done) { + + // This timeout is important, as Elasticsearch is "near-realtime" and the index/deletion takes time that + // needs to be taken into account in these tests setTimeout(function() { Book.search({match_all: {}}, function(err, results) { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 3000) + }, 4000); }); }); From 4c9142a7a5c8b5aae8f1ada55fe8cd4e72e099b1 Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Tue, 17 Mar 2015 18:07:04 +0100 Subject: [PATCH 061/152] 2.0.8 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index af07f640..bbfd9ac1 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.7", + "version": "2.0.8", "tags": [ "mongodb", "elastic search", From 4c0d7c39bbde875ababc93d94d0adda7a4e60cb0 Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Tue, 17 Mar 2015 18:32:22 +0100 Subject: [PATCH 062/152] added timeout env variable for travis to wait on index deletion --- .travis.yml | 3 +++ test/bulk-test.js | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 07b5d92c..10f0d1ee 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,3 +6,6 @@ node_js: services: - mongodb - elasticsearch + +env: + - BULK_TEST_TIMEOUT=20000 diff --git a/test/bulk-test.js b/test/bulk-test.js index 200aaca4..a1c8204d 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -55,7 +55,7 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 4000); + }, process.env.BULK_TEST_TIMEOUT || 4000); }); }); From 2a88117d048f4c37a41534b8468832245cc1521d Mon Sep 17 00:00:00 2001 From: Gustavo Date: Tue, 17 Mar 2015 21:59:44 +0100 Subject: [PATCH 063/152] Update .travis.yml testing conf. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 10f0d1ee..83928022 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,4 +8,4 @@ services: - elasticsearch env: - - BULK_TEST_TIMEOUT=20000 + - BULK_TEST_TIMEOUT=19000 From a0abae1da3d109d696725a4ad4a9cc37b07353f0 Mon Sep 17 00:00:00 2001 From: guumaster Date: Thu, 19 Mar 2015 11:03:01 +0100 Subject: [PATCH 064/152] Increased timeout for travis environment --- .travis.yml | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 83928022..31a642e5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,4 +8,4 @@ services: - elasticsearch env: - - BULK_TEST_TIMEOUT=19000 + - BULK_TEST_TIMEOUT=30000 diff --git a/package.json b/package.json index bbfd9ac1..866b9cb3 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,6 @@ "node": ">= 0.8.0" }, "scripts": { - "test": "mocha -R spec -t 20000 -b" + "test": "mocha -R spec -t 60000 -b" } } From 44d1f58b19ec69ce72849332ecbb2ee44a7a5a5b Mon Sep 17 00:00:00 2001 From: guumaster Date: Thu, 19 Mar 2015 11:19:14 +0100 Subject: [PATCH 065/152] 2.0.9 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 866b9cb3..1e7933d4 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.8", + "version": "2.0.9", "tags": [ "mongodb", "elastic search", From 1a1bfd609f50a6aa4c69ae19a69f8bbe3a212240 Mon Sep 17 00:00:00 2001 From: guumaster Date: Thu, 19 Mar 2015 12:40:52 +0100 Subject: [PATCH 066/152] Dependecies updated --- package.json | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/package.json b/package.json index 1e7933d4..7840d3b5 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "version": "2.0.9", "tags": [ "mongodb", + "elasticsearch", "elastic search", "mongoose", "full text search" @@ -15,17 +16,17 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elasticsearch": "^2.4.3", + "elasticsearch": "~2.4.x", "nop": "^1.0.0" }, "peerDependencies": { - "mongoose": "3.8.x" + "mongoose": "~3.8.x" }, "devDependencies": { - "mocha": "*", - "should": "*", - "async": "*", - "mongoose": "3.8.x" + "async": "^0.9.x", + "mocha": "^2.2.x", + "mongoose": "~3.8.x", + "should": "^5.2.x" }, "engines": { "node": ">= 0.8.0" From 8fe19f3692a549eab8ef130baa572a62cf6d34c1 Mon Sep 17 00:00:00 2001 From: guumaster Date: Thu, 19 Mar 2015 13:34:39 +0100 Subject: [PATCH 067/152] updated elasticsearch dependency. changed bulk config --- package.json | 2 +- test/bulk-test.js | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 7840d3b5..19f3fab8 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elasticsearch": "~2.4.x", + "elasticsearch": "^3.1.x", "nop": "^1.0.0" }, "peerDependencies": { diff --git a/test/bulk-test.js b/test/bulk-test.js index a1c8204d..f0e91aa0 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -11,8 +11,8 @@ var BookSchema = new Schema({ }); BookSchema.plugin(mongoosastic, { bulk: { - size: 10, - delay: 100 + size: 100, + delay: 1000 } }); From 20355b45b97756019eb303fa1a4816351239df4d Mon Sep 17 00:00:00 2001 From: guumaster Date: Thu, 19 Mar 2015 14:36:08 +0100 Subject: [PATCH 068/152] 2.0.10 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 19f3fab8..38ccc435 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.9", + "version": "2.0.10", "tags": [ "mongodb", "elasticsearch", From b2396d5c02242fc0e549ec9b2430e5f8a167297e Mon Sep 17 00:00:00 2001 From: guumaster Date: Fri, 20 Mar 2015 22:49:06 +0100 Subject: [PATCH 069/152] added full CHANGELOG.md --- CHANGELOG.md | 212 ++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 200 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6785832b..265a59f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,200 @@ -## 2.0.0 (2014-10-10) - -Features: - -- Moved to [official elasticsearch driver](https://github.com/elasticsearch/elasticsearch-js) - - Caused `search` api to conform closer to official driver - - Added options to searching -- Refactored bulk api -- Refreshed README.md -- Added CHANGELOG.md -- Added CONTRIBUTING.md -- Added LICENSE.md +2.0.10 / 2015-03-19 +=================== + + * updated elasticsearch dependency. changed bulk config + * Dependecies updated + +2.0.9 / 2015-03-19 +================== + + * Increased timeout for travis environment + * Update .travis.yml + testing conf. + * added timeout env variable for travis to wait on index deletion + +2.0.8 / 2015-03-17 +================== + + * fixed timeout for bulk test + * Merge pull request [#40](https://github.com/mongoosastic/mongoosastic/issues/40) from srfrnk/patch-1 + Patch 1 - fixed + * Merge pull request [#47](https://github.com/mongoosastic/mongoosastic/issues/47) from guumaster/master + small patch for nested array schemas + * Merge pull request [#53](https://github.com/mongoosastic/mongoosastic/issues/53) from gazsp/master + Fixes [#49](https://github.com/mongoosastic/mongoosastic/issues/49) + * Fixes [#49](https://github.com/mongoosastic/mongoosastic/issues/49) + * small patch for nested array schemas + * wrong value used... now should be better. + * fixed commit + * allow debugging the calls made by elasticsearch client. + added ability to send {log:"trace"} in options to enable logging + +2.0.6 / 2014-12-11 +================== + + * Merge pull request [#35](https://github.com/mongoosastic/mongoosastic/issues/35) from jitowix/master + add settings when index is created + * add settings when index is created + +2.0.5 / 2014-11-21 +================== + + * Merge pull request [#30](https://github.com/mongoosastic/mongoosastic/issues/30) from nicolasmccurdy/mention-estruncate + In documentation files, rename "truncate" to "esTruncate" + * In documentation files, rename "truncate" to "esTruncate" + +2.0.4 / 2014-11-10 +================== + + * Merge pull request [#27](https://github.com/mongoosastic/mongoosastic/issues/27) from ignlg/feature/serialize-cast-bulk + Serialize on bulk calls. Serialize: this = full model. + * Serialize on bulk calls. Serialize: this = full model. + +2.0.3 / 2014-11-10 +================== + + * Merge pull request [#26](https://github.com/mongoosastic/mongoosastic/issues/26) from b96705008/master + get rid of "continue" when encounter objectid (issue [#12](https://github.com/mongoosastic/mongoosastic/issues/12)) + * get rid of "continue" when encounter objectid + * remove unstable node testing, broken for now + +2.0.2 / 2014-11-05 +================== + + * Treat null query like undefined + +2.0.1 / 2014-11-02 +================== + + * Merge pull request [#23](https://github.com/mongoosastic/mongoosastic/issues/23) from sascha/master + 'protocol' and 'auth' options are ignored + * 'protocol' and 'auth' options are ignored + This fixes an issue, where the 'protocol' and 'auth' options were ignored. + * Merge pull request [#21](https://github.com/mongoosastic/mongoosastic/issues/21) from mongoosastic/feature/official-driver + Feature/official driver + * Add changelog + +2.0.0 / 2014-10-30 +================== + + * updates for travis + * longer delay for bulk test + * significant version bump + * formatting + * Add gitter badge + * Update query interface + * refresh readme + * don't stop bulk options with synchronize + * refactor bulk api + * uppercase README + * Break out docs + * remove elastical dependency + * All tests passing + * had to scale back abstraction on search + * Close to fixing geo test + * first pass at integrating elasticsearch driver + * remove semicolons from mongoosastic.js + +1.0.2 / 2014-10-28 +================== + + * Document geo_shape + +1.0.1 / 2014-10-28 +================== + + * Add documentation about bulk api + +1.0.0 / 2014-10-28 +================== + + * big api changes, big version bump + * Merge pull request [#17](https://github.com/mongoosastic/mongoosastic/issues/17) from mongoosastic/albanm/feature/bulk-and-array-indexing + Albanm/feature/bulk and array indexing + * resolve conflicts + * use containEql instead of include + * Merge pull request [#16](https://github.com/mongoosastic/mongoosastic/issues/16) from mongoosastic/remove-river-code + remove river code + * Merge pull request [#14](https://github.com/mongoosastic/mongoosastic/issues/14) from mongoosastic/nlko-geo_shape + Nlko geo shape + * Merge pull request [#15](https://github.com/mongoosastic/mongoosastic/issues/15) from mongoosastic/cubuzoa/feature/hydrate-hits + Cubuzoa/feature/hydrate hits + * remove river code + * Update hydrated tests to conform to api + * Get first level of hits field + Provided fix for etting first level `hits` field of search results when + used hydrate + * Correct enveloppe test + Enveloppe corners were in wrong order resulting in a bad test. + * Add ES 1.0 support for geo shape tests + * Added testfor geo_shape and updated manual + * Add test for undefined object field in the path prior of its use + * Correct boost test field (support ES 0.9 and 1.0). + In my tests, the mapping format returned by the getMapping function is + not the same between 0.90.11 and 1.0 + * Keep geo_* types in the mapping + Prior, only geo_point were kept in the mapping. + * Update readme.md + More dynamic version info + +0.6.1 / 2014-10-24 +================== + + * Update badge + * Update repo info in package.json + +0.6.0 / 2014-10-14 +================== + + * remove outdated river info + * add more node versions to travis + * Merge pull request [#128](https://github.com/mongoosastic/mongoosastic/issues/128) from marsanla/patch-3 + Add elasticsearch client + * Merge pull request [#120](https://github.com/mongoosastic/mongoosastic/issues/120) from antoineverger/master + Add the esTruncate static method to remove all documents from an index + * Add elasticsearch client + Add elasticsearch client to avoid duplicate instances and call from model plugin. + * Documentation for the truncate static method + * Centralise the "warmup" timeout value in the config to make it easier to adjust it + * Add the feature to pre-process a field before indexing + * Add the esTruncate static method to remove all documents from an index + * bump semver + * Merge pull request [#119](https://github.com/mongoosastic/mongoosastic/issues/119) from antoineverger/master + Add the settings to create mapping. + +0.5.0 / 2014-09-23 +================== + + * Add the settings to create mapping. + First step to have a better configuration of the index settings. + * update readme to reflect version + +0.4.1 / 2014-08-28 +================== + + * Merge pull request [#116](https://github.com/mongoosastic/mongoosastic/issues/116) from sascha/feature/id-in-subdocuments + '_id' and/or 'id' properties in subdocuments + * Merge pull request [#115](https://github.com/mongoosastic/mongoosastic/issues/115) from danteata/master + fixed configuration setup example + * '_id' and 'id' in subdocuments + Added the possibility to have properties called '_id' or 'id' within subdocuments. + * fixed configuration setup example + * Update semver to reflect api change + * Merge pull request [#111](https://github.com/mongoosastic/mongoosastic/issues/111) from astro/refresh + expose index refresh + +0.4.0 / 2014-08-18 +================== + + * Update version, could be some breaking changes + +0.3.0 / 2014-08-15 +================== + + * Merge pull request [#113](https://github.com/mongoosastic/mongoosastic/issues/113) from aschmid/master + fixed issue where object properties where ignored by serialize + * fixed issue where object properties where ignored by serialize + * Merge pull request [#99](https://github.com/mongoosastic/mongoosastic/issues/99) from xren/master + Emit the error when doc.save() fails during synchronization + * Merge pull request [#100](https://github.com/mongoosastic/mongoosastic/issues/100) from clippPR/master + fixing this bug (hopefully) - https://github.com/jamescarr/mongoosastic/... From 2065eeaa6f21e7c3d1c95c723cec1bdfc42548a1 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 21 Mar 2015 09:38:13 +0100 Subject: [PATCH 070/152] Added highlight feature #51 --- lib/mongoosastic.js | 19 ++++- test/highlight-features-test.js | 127 ++++++++++++++++++++++++++++++++ 2 files changed, 142 insertions(+), 4 deletions(-) create mode 100644 test/highlight-features-test.js diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 356760f1..06af893d 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -209,15 +209,22 @@ module.exports = function Mongoosastic(schema, options){ options = {} } + options.hydrateOptions = options.hydrateOptions || defaultHydrateOptions || {}; + if (query === null) query = undefined var model = this , esQuery = { - body: {query: query}, + body: { + query: query + }, index: options.index || indexName, type: options.type || typeName } + if (options.highlight) { + esQuery.body.highlight = options.highlight; + } Object.keys(options).forEach(function(opt) { if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) @@ -231,7 +238,7 @@ module.exports = function Mongoosastic(schema, options){ cb(err) } else { if (alwaysHydrate || options.hydrate) - hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb) + hydrate(res, model, options, cb) else cb(null, res) } @@ -399,11 +406,12 @@ function hydrate(res, model, options, cb){ return a._id }) , query = model.find({_id:{$in:ids}}) + hydrateOptions = options.hydrateOptions // Build Mongoose query based on hydrate options // Example: {lean: true, sort: '-name', select: 'address name'} - Object.keys(options).forEach(function(option){ - query[option](options[option]) + Object.keys(hydrateOptions).forEach(function(option){ + query[option](hydrateOptions[option]) }) query.exec(function(err, docs){ @@ -414,6 +422,9 @@ function hydrate(res, model, options, cb){ docs.forEach(function(doc) { var i = resultsMap[doc._id] + if (options.highlight) { + doc._highlight = results.hits[i].highlight; + } hits[i] = doc }) results.hits = hits diff --git a/test/highlight-features-test.js b/test/highlight-features-test.js new file mode 100644 index 00000000..28ca387b --- /dev/null +++ b/test/highlight-features-test.js @@ -0,0 +1,127 @@ +var mongoose = require('mongoose') + , should = require('should') + , config = require('./config') + , Schema = mongoose.Schema + , ObjectId = Schema.ObjectId + , async = require('async') + , mongoosastic = require('../lib/mongoosastic'); + +var TextSchema = new Schema({ + title: String, + quote: String +}); + +TextSchema.plugin(mongoosastic); + +var Text = mongoose.model('Text', TextSchema); + +describe('Highlight search', function(){ + before(function(done){ + mongoose.connect(config.mongoUrl, function(){ + Text.remove(function(){ + config.deleteIndexIfExists(['texts'], function(){ + + // Quotes are from Terry Pratchett's Discworld books + var texts = [ + new Text({ + title: 'The colour of magic', + quote: 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' + }), + new Text({ + title: 'The Light Fantastic', + quote: 'The death of the warrior or the old man or the little child, this I understand, and I take ' + + 'away the pain and end the suffering. I do not understand this death-of-the-mind' + }), + new Text({ + title: 'Equal Rites', + quote: 'Time passed, which, basically, is its job' + }), + new Text({ + title: 'Mort', + quote: 'You don\'t see people at their best in this job, said Death.' + }) + ]; + async.forEach(texts, save, function(){ + setTimeout(done, config.indexingTimeout); + }); + }); + }); + }); + }); + after(function(done){ + Text.remove(done); + }); + + var responses = [ + 'You don\'t see people at their best in this job, said Death.', + 'The death of the warrior or the old man or the little child, this I understand, and I take away the', + ' pain and end the suffering. I do not understand this death-of-the-mind', + 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' + ]; + + describe('Highlight without hydrating', function(){ + it('should return highlighted text on every hit result', function(done){ + + Text.search({ + match_phrase: { + quote: 'Death' + } + }, { + highlight: { + fields: { + quote: {} + } + } + },function(err, res){ + + res.hits.total.should.eql(3); + res.hits.hits.forEach(function(text){ + text.should.have.property('highlight'); + text.highlight.should.have.property('quote'); + text.highlight.quote.forEach(function(q){ + responses.should.containEql(q); + }); + }); + done(); + }); + }); + + }); + + describe('Highlight hydrated results', function(){ + it('should return highlighted text on every resulting document', function(done){ + + Text.search({ + match_phrase: { + quote: 'Death' + } + }, { + hydrate: true, + highlight: { + fields: { + quote: {} + } + } + },function(err, res){ + + res.hits.total.should.eql(3); + res.hits.hits.forEach(function(model){ + model.should.have.property('_highlight'); + model._highlight.should.have.property('quote'); + model._highlight.quote.forEach(function(q){ + responses.should.containEql(q); + }); + }); + done(); + }); + }); + + + + }); +}); + +function save(model, cb){ + model.save(); + model.on('es-indexed', cb); +} From cf86ed30d973a07aa0a627d1c6264f7ee3c4d353 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 21 Mar 2015 11:16:13 +0100 Subject: [PATCH 071/152] lots of smalls code cleaning --- lib/mapping-generator.js | 12 ++-- lib/mongoosastic.js | 14 ++--- test/alternative-index-method-test.js | 10 +-- test/boost-field-test.js | 2 +- test/bulk-test.js | 89 ++++++++++++--------------- test/config.js | 21 +++++++ test/geo-test.js | 53 +++++++--------- test/highlight-features-test.js | 10 +-- test/index-test.js | 31 +++++----- test/mapping-generator-test.js | 7 +-- test/search-features-test.js | 10 +-- test/serialize-test.js | 11 ++-- test/synchronize-test.js | 24 +++----- test/truncate-test.js | 3 +- 14 files changed, 135 insertions(+), 162 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 97665e84..4f9180c5 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -104,7 +104,7 @@ function getCleanTree(tree, paths, prefix) { } for (var field in tree){ - if (prefix === '' && (field === "id" || field === "_id")) { + if (prefix === '' && (field === 'id' || field === '_id')) { continue; } @@ -155,11 +155,11 @@ function getCleanTree(tree, paths, prefix) { var geoFound = false; for (key in value) { if (value.hasOwnProperty(key) && /^geo_/.test(key)) { - cleanTree[field] = value[key]; - geoFound = true; - //break; - } - } + cleanTree[field] = value[key]; + geoFound = true; + //break; + } + } if(geoFound) continue } diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 06af893d..7a90cd8f 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -10,7 +10,6 @@ module.exports = function Mongoosastic(schema, options){ , typeName = options && options.type , alwaysHydrate = options && options.hydrate , defaultHydrateOptions = options && options.hydrateOptions - , _mapping = null , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 , protocol = options && options.protocol ? options.protocol : 'http' @@ -146,7 +145,6 @@ module.exports = function Mongoosastic(schema, options){ */ schema.statics.synchronize = function(query){ var em = new events.EventEmitter() - , readyToClose , closeValues = [] , counter = 0 , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} @@ -217,7 +215,7 @@ module.exports = function Mongoosastic(schema, options){ var model = this , esQuery = { body: { - query: query + query: query }, index: options.index || indexName, type: options.type || typeName @@ -248,8 +246,8 @@ module.exports = function Mongoosastic(schema, options){ function bulkDelete(options, cb) { bulkAdd({ delete: { - _index: options.index || indexName, - _type: options.type || typeName, + _index: options.index || indexName, + _type: options.type || typeName, _id: options.model._id.toString() } }) @@ -259,8 +257,8 @@ module.exports = function Mongoosastic(schema, options){ function bulkIndex(options) { bulkAdd({ index: { - _index: options.index || indexName, - _type: options.type || typeName, + _index: options.index || indexName, + _type: options.type || typeName, _id: options.model._id.toString() } }) @@ -317,7 +315,7 @@ module.exports = function Mongoosastic(schema, options){ function setIndexNameIfUnset(model){ var modelName = model.toLowerCase() if(!indexName){ - indexName = modelName + "s" + indexName = modelName + 's' } if(!typeName){ typeName = modelName diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 5c4e9895..3c3c92c7 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -1,10 +1,9 @@ var mongoose = require('mongoose') , should = require('should') , config = require('./config') - , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId , mongoosastic = require('../lib/mongoosastic') - , Tweet = require('./models/tweet'); + , Tweet = require('./models/tweet') + , Schema = mongoose.Schema; describe('Index Method', function(){ before(function(done){ @@ -13,7 +12,7 @@ describe('Index Method', function(){ Tweet.remove(function() { config.createModelAndEnsureIndex(Tweet, { user: 'jamescarr' - , message: "I know kung-fu!" + , message: 'I know kung-fu!' , post_date: new Date() }, done); }) @@ -41,6 +40,7 @@ describe('Index Method', function(){ }); }); }); + it('should be able to index to alternative index', function(done){ Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ doc.message = 'I know taebo!'; @@ -54,6 +54,7 @@ describe('Index Method', function(){ }); }); }); + it('should be able to index to alternative index and type', function(done){ Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ doc.message = 'I know taebo!'; @@ -67,4 +68,5 @@ describe('Index Method', function(){ }); }); }); + }); diff --git a/test/boost-field-test.js b/test/boost-field-test.js index 26b8290c..e28c4b35 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -3,7 +3,6 @@ var mongoose = require('mongoose') , should = require('should') , config = require('./config') , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId , mongoosastic = require('../lib/mongoosastic'); @@ -15,6 +14,7 @@ var TweetSchema = new Schema({ }); TweetSchema.plugin(mongoosastic); + var BlogPost = mongoose.model('BlogPost', TweetSchema); describe('Add Boost Option Per Field', function(){ diff --git a/test/bulk-test.js b/test/bulk-test.js index f0e91aa0..3bb9540a 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -1,52 +1,51 @@ var mongoose = require('mongoose'), - should = require('should'), - config = require('./config'), - Schema = mongoose.Schema, - ObjectId = Schema.ObjectId, - async = require('async'), - mongoosastic = require('../lib/mongoosastic'); + async = require('async'), + should = require('should'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); var BookSchema = new Schema({ - title: String + title: String }); BookSchema.plugin(mongoosastic, { - bulk: { - size: 100, - delay: 1000 - } + bulk: { + size: 100, + delay: 1000 + } }); var Book = mongoose.model('Book2', BookSchema); describe('Bulk mode', function() { - var books = null; + var books = null; - before(function(done) { - config.deleteIndexIfExists(['book2s'], function() { - mongoose.connect(config.mongoUrl, function() { - var client = mongoose.connections[0].db; - client.collection('book2s', function(err, _books) { - books = _books; - Book.remove(done); - }); - }); - }); - }); - before(function(done) { - async.forEach(bookTitles(), function(title, cb) { - new Book({ - title: title - }).save(cb); - }, done) - }); - before(function(done) { - Book.findOne({ - title: 'American Gods' - }, function(err, book) { - book.remove(done) - }); - }); - it('should index all objects and support deletions too', function(done) { + before(function(done) { + config.deleteIndexIfExists(['book2s'], function() { + mongoose.connect(config.mongoUrl, function() { + var client = mongoose.connections[0].db; + client.collection('book2s', function(err, _books) { + books = _books; + Book.remove(done); + }); + }); + }); + }); + before(function(done) { + async.forEach(config.bookTitlesArray(), function(title, cb) { + new Book({ + title: title + }).save(cb); + }, done) + }); + before(function(done) { + Book.findOne({ + title: 'American Gods' + }, function(err, book) { + book.remove(done) + }); + }); + it('should index all objects and support deletions too', function(done) { // This timeout is important, as Elasticsearch is "near-realtime" and the index/deletion takes time that // needs to be taken into account in these tests @@ -56,17 +55,5 @@ describe('Bulk mode', function() { done(); }); }, process.env.BULK_TEST_TIMEOUT || 4000); - }); + }); }); - -function bookTitles() { - var books = [ - 'American Gods', - 'Gods of the Old World', - 'American Gothic' - ]; - for (var i = 0; i < 50; i++) { - books.push('ABABABA' + i); - } - return books; -} diff --git a/test/config.js b/test/config.js index 9943b81a..4d959ca2 100644 --- a/test/config.js +++ b/test/config.js @@ -22,6 +22,8 @@ module.exports = { }, done); } , createModelAndEnsureIndex: createModelAndEnsureIndex + , saveAndWaitIndex: saveAndWaitIndex + , bookTitlesArray: bookTitlesArray }; function createModelAndEnsureIndex(model, obj, cb){ @@ -32,3 +34,22 @@ function createModelAndEnsureIndex(model, obj, cb){ }); }); } + +function saveAndWaitIndex(model, cb){ + model.save(function(err) { + if (err) cb(err); + else model.on('es-indexed', cb ); + }); +} + +function bookTitlesArray() { + var books = [ + 'American Gods', + 'Gods of the Old World', + 'American Gothic' + ]; + for (var i = 0; i < 50; i++) { + books.push('ABABABA' + i); + } + return books; +} diff --git a/test/geo-test.js b/test/geo-test.js index 567ff413..08e9b9bc 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -3,13 +3,9 @@ var mongoose = require('mongoose') , should = require('should') , config = require('./config') , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId , mongoosastic = require('../lib/mongoosastic'); - var GeoSchema; - - var GeoModel; describe('GeoTest', function(){ @@ -24,9 +20,9 @@ describe('GeoTest', function(){ type: {type: String}, geo_shape: { type:String, - es_type: "geo_shape", - es_tree: "quadtree", - es_precision: "1km" + es_type: 'geo_shape', + es_tree: 'quadtree', + es_precision: '1km' } } }); @@ -72,34 +68,30 @@ describe('GeoTest', function(){ } }); - - var saveAndWait = function (doc,cb) { - doc.save(function(err) { - if (err) cb(err); - else doc.on('es-indexed', cb ); - }); - }; - - saveAndWait(geo,function(err){ + config.saveAndWaitIndex(geo, function(err){ if (err) throw err; - saveAndWait(geo2,function(err){ + config.saveAndWaitIndex(geo2, function(err){ if (err) throw err; // Mongodb request - GeoModel.find({},function(err, res){ + GeoModel.find({}, function(err, res) { if (err) throw err; res.length.should.eql(2); res[0].frame.type.should.eql('envelope'); res[0].frame.coordinates[0].should.eql([1,4]); res[0].frame.coordinates[1].should.eql([3,2]); done(); - })})})}) + }) + }) + }) + + }) it('should be able to find geo coordinates in the indexes', function(done){ setTimeout(function(){ // ES request GeoModel.search({ match_all: {} - }, {sort: "myId:asc"}, function(err, res){ + }, {sort: 'myId:asc'}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); @@ -125,7 +117,7 @@ describe('GeoTest', function(){ setTimeout(function(){ GeoModel.search({ match_all: {} - }, {sort: "myId:asc"}, function(err, res){ + }, {sort: 'myId:asc'}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); @@ -138,18 +130,18 @@ describe('GeoTest', function(){ }); }); - - it('should be able to search points inside frames', function(done){ var geoQuery = { filtered: { - "query": {"match_all": {}}, - "filter": { - "geo_shape": { - "frame": { - "shape": { - "type": "point", - "coordinates": [3,1] + query: { + match_all: {} + }, + filter: { + geo_shape: { + frame: { + shape: { + type: 'point', + coordinates: [3,1] } } } @@ -186,5 +178,4 @@ describe('GeoTest', function(){ }, 1000); }); - }); diff --git a/test/highlight-features-test.js b/test/highlight-features-test.js index 28ca387b..5eabeb3a 100644 --- a/test/highlight-features-test.js +++ b/test/highlight-features-test.js @@ -1,9 +1,8 @@ var mongoose = require('mongoose') + , async = require('async') , should = require('should') , config = require('./config') , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , async = require('async') , mongoosastic = require('../lib/mongoosastic'); var TextSchema = new Schema({ @@ -41,7 +40,7 @@ describe('Highlight search', function(){ quote: 'You don\'t see people at their best in this job, said Death.' }) ]; - async.forEach(texts, save, function(){ + async.forEach(texts, config.saveAndWaitIndex, function(){ setTimeout(done, config.indexingTimeout); }); }); @@ -120,8 +119,3 @@ describe('Highlight search', function(){ }); }); - -function save(model, cb){ - model.save(); - model.on('es-indexed', cb); -} diff --git a/test/index-test.js b/test/index-test.js index fd9c6308..00f40ebe 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -1,9 +1,8 @@ var mongoose = require('mongoose') + , esClient = new(require('elasticsearch').Client) , should = require('should') , config = require('./config') , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , esClient = new(require('elasticsearch').Client) , mongoosastic = require('../lib/mongoosastic') , Tweet = require('./models/tweet'); @@ -17,7 +16,7 @@ var TalkSchema = new Schema({ }); TalkSchema.plugin(mongoosastic) -var Talk = mongoose.model("Talk", TalkSchema); +var Talk = mongoose.model('Talk', TalkSchema); var PersonSchema = new Schema({ name: {type:String, es_indexed:true} @@ -35,7 +34,7 @@ PersonSchema.plugin(mongoosastic, { , hydrateOptions: {lean: true, sort: '-name', select: 'address name life'} }); -var Person = mongoose.model("Person", PersonSchema); +var Person = mongoose.model('Person', PersonSchema); // -- alright let's test this shiznit! describe('indexing', function(){ @@ -65,8 +64,8 @@ describe('indexing', function(){ Tweet.createMapping({analysis: { analyzer: { stem: { - tokenizer: "standard", - filter: ["standard", "lowercase", "stop", "porter_stem"] + tokenizer: 'standard', + filter: ['standard', 'lowercase', 'stop', 'porter_stem'] } } } @@ -92,12 +91,12 @@ describe('indexing', function(){ config.createModelAndEnsureIndex(Tweet, { user: 'jamescarr' , userId: 1 - , message: "I like Riak better" + , message: 'I like Riak better' , post_date: new Date() }, done); }); - it("should use the model's id as ES id", function(done){ - Tweet.findOne({message:"I like Riak better"}, function(err, doc){ + it('should use the model\'s id as ES id', function(done){ + Tweet.findOne({message:'I like Riak better'}, function(err, doc){ esClient.get({ index: 'tweets', type: 'tweet', @@ -199,13 +198,13 @@ describe('indexing', function(){ var talk = new Talk({ speaker: '' , year: 2013 - , title: "Dude" - , abstract: "" + , title: 'Dude' + , abstract: '' , bio: '' }); var tweet = new Tweet({ user: 'Dude' - , message: "Go see the big lebowski" + , message: 'Go see the big lebowski' , post_date: new Date() }); tweet.save(function(){ @@ -237,7 +236,7 @@ describe('indexing', function(){ before(function(done){ config.createModelAndEnsureIndex(Person, { name: 'James Carr' - , address: "Exampleville, MO" + , address: 'Exampleville, MO' , phone: '(555)555-5555' }, done); }); @@ -257,8 +256,8 @@ describe('indexing', function(){ config.createModelAndEnsureIndex(Talk,{ speaker: 'James Carr' , year: 2013 - , title: "Node.js Rocks" - , abstract: "I told you node.js was cool. Listen to me!" + , title: 'Node.js Rocks' + , abstract: 'I told you node.js was cool. Listen to me!' , bio: 'One awesome dude.' }, done); }); @@ -296,7 +295,7 @@ describe('indexing', function(){ before(function(done){ config.createModelAndEnsureIndex(Person, { name: 'Bob Carr' - , address: "Exampleville, MO" + , address: 'Exampleville, MO' , phone: '(555)555-5555' , life: { born: 1950, other: 2000 } }, done); diff --git a/test/mapping-generator-test.js b/test/mapping-generator-test.js index 61d07fc4..4ea5f4f3 100644 --- a/test/mapping-generator-test.js +++ b/test/mapping-generator-test.js @@ -1,8 +1,7 @@ -var Generator = require('../lib/mapping-generator') - , mongoose = require('mongoose') +var mongoose = require('mongoose') , should = require('should') , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId + , Generator = require('../lib/mapping-generator') , generator = new Generator(); describe('MappingGenerator', function(){ @@ -243,7 +242,7 @@ describe('MappingGenerator', function(){ it('maps all fields when schema has no es_indexed flag', function(done) { generator.generateMapping(new Schema({ implicit_field_1: {type: String}, - implicit_field_2: {type: Number}, + implicit_field_2: {type: Number} }), function(err, mapping){ mapping.properties.should.have.property('implicit_field_1'); mapping.properties.should.have.property('implicit_field_2'); diff --git a/test/search-features-test.js b/test/search-features-test.js index 993de91e..788a0f31 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -1,9 +1,8 @@ var mongoose = require('mongoose') + , async = require('async') , should = require('should') , config = require('./config') , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , async = require('async') , mongoosastic = require('../lib/mongoosastic'); var BondSchema = new Schema({ @@ -27,7 +26,7 @@ describe('Query DSL', function(){ , new Bond({name:'Construction', type:'B', price:20000}) , new Bond({name:'Legal', type:'C', price:30000}) ]; - async.forEach(bonds, save, function(){ + async.forEach(bonds, config.saveAndWaitIndex, function(){ setTimeout(done, config.indexingTimeout); }); }); @@ -56,8 +55,3 @@ describe('Query DSL', function(){ }); }); }); - -function save(model, cb){ - model.save(); - model.on('es-indexed', cb); -} diff --git a/test/serialize-test.js b/test/serialize-test.js index 710ede54..774473a2 100644 --- a/test/serialize-test.js +++ b/test/serialize-test.js @@ -1,13 +1,10 @@ -var should = require('should') +var mongoose = require('mongoose') + , should = require('should') , generator = new (require('../lib/mapping-generator')) , serialize = require('../lib/serialize') - , mongoose = require('mongoose') - , Schema = mongoose.Schema - , ObjectId = Schema.Types.ObjectId; + , Schema = mongoose.Schema; -var BowlingBall = mongoose.model('BowlingBall', new Schema({ - -})); +var BowlingBall = mongoose.model('BowlingBall', new Schema); var PersonSchema22 = new Schema({ name: { first: String diff --git a/test/synchronize-test.js b/test/synchronize-test.js index 45eeb25b..5138d9e2 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -1,14 +1,14 @@ var mongoose = require('mongoose') , should = require('should') + , async = require('async') , config = require('./config') + , mongoosastic = require('../lib/mongoosastic') , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , async = require('async') - , mongoosastic = require('../lib/mongoosastic'); var BookSchema = new Schema({ title: String }); + BookSchema.plugin(mongoosastic); var Book = mongoose.model('Book', BookSchema); @@ -27,13 +27,15 @@ describe('Synchronize', function(){ }); }); }); + describe('existing collection', function(){ + before(function(done){ - async.forEach(bookTitles() - , function(title, cb){ + async.forEach(config.bookTitlesArray(), function(title, cb){ books.insert({title:title}, cb); }, done); }); + it('should index all existing objects', function(done){ var stream = Book.synchronize() , count = 0; @@ -52,16 +54,6 @@ describe('Synchronize', function(){ }, config.indexingTimeout); }); }); + }); }); -function bookTitles(){ - var books = [ - 'American Gods', - 'Gods of the Old World', - 'American Gothic' - ]; - for(var i = 0; i < 50; i++){ - books.push('ABABABA'+i); - } - return books; -} diff --git a/test/truncate-test.js b/test/truncate-test.js index da236202..98a89150 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -1,9 +1,8 @@ var mongoose = require('mongoose'), + async = require('async'), should = require('should'), config = require('./config'), Schema = mongoose.Schema, - ObjectId = Schema.ObjectId, - async = require('async'), mongoosastic = require('../lib/mongoosastic'); var DummySchema = new Schema({ From fd969d40546dc4758176b973b4d6aa96eec2f0fc Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 21 Mar 2015 12:26:55 +0100 Subject: [PATCH 072/152] updates to README --- README.md | 63 +++++++++++++++++++++++++++---------------------------- 1 file changed, 31 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index f86fdb04..ed2a773a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ # Mongoosastic -[![Build -Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) +[![Build Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) [![NPM version](https://badge.fury.io/js/mongoosastic.svg)](http://badge.fury.io/js/mongoosastic) [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mongoosastic/mongoosastic?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) @@ -9,19 +8,19 @@ Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatical - [Installation](#installation) - [Setup](#setup) - [Indexing](#indexing) - - [Saving a document](#saving-a-document) - - [Indexing nested models](#indexing-nested-models) - - [Indexing an existing collection](#indexing-an-existing-collection) - - [Bulk indexing](#bulk-indexing) - - [Indexing on demand](#indexing-on-demand) - - [Truncating an index](#truncating-an-index) + - [Saving a document](#saving-a-document) + - [Indexing nested models](#indexing-nested-models) + - [Indexing an existing collection](#indexing-an-existing-collection) + - [Bulk indexing](#bulk-indexing) + - [Indexing on demand](#indexing-on-demand) + - [Truncating an index](#truncating-an-index) - [Mapping](#mapping) - - [Geo mapping](#geo-mapping) - - [Indexing a geo point](#indexing-a-geo-point) - - [Indexing a geo shape](#indexing-a-geo-shape) - - [Creating mappings on-demand](#creating-mappings-on-demand) + - [Geo mapping](#geo-mapping) + - [Indexing a geo point](#indexing-a-geo-point) + - [Indexing a geo shape](#indexing-a-geo-shape) + - [Creating mappings on-demand](#creating-mappings-on-demand) - [Queries](#queries) - - [Hydration](#hydration) + - [Hydration](#hydration) ## Installation @@ -35,20 +34,20 @@ npm install -S mongoosastic Options are: -* `index` - the index in elastic search to use. Defaults to the +* `index` - the index in Elasticsearch to use. Defaults to the pluralization of the model name. -* `type` - the type this model represents in elastic search. Defaults +* `type` - the type this model represents in Elasticsearch. Defaults to the model name. -* `host` - the host elastic search is running on -* `port` - the port elastic search is running on -* `auth` - the authentication needed to reach elastic search server. In the standard format of 'username:password' -* `protocol` - the protocol the elastic search server uses. Defaults to http +* `host` - the host Elasticsearch is running on +* `port` - the port Elasticsearch is running on +* `auth` - the authentication needed to reach Elasticsearch server. In the standard format of 'username:password' +* `protocol` - the protocol the Elasticsearch server uses. Defaults to http * `hydrate` - whether or not to lookup results in mongodb before * `hydrateOptions` - options to pass into hydrate function * `bulk` - size and delay options for bulk indexing -To have a model indexed into elastic search simply add the plugin. +To have a model indexed into Elasticsearch simply add the plugin. ```javascript var mongoose = require('mongoose') @@ -67,12 +66,12 @@ User.plugin(mongoosastic) This will by default simply use the pluralization of the model name as the index while using the model name itself as the type. So if you create a new User object and save it, you can see it by navigating to -http://localhost:9200/users/user/_search (this assumes elasticsearch is +http://localhost:9200/users/user/_search (this assumes Elasticsearch is running locally on port 9200). -The default behavior is all fields get indexed into elasticsearch. This can be a little wasteful especially considering that +The default behavior is all fields get indexed into Elasticsearch. This can be a little wasteful especially considering that the document is now just being duplicated between mongodb and -elasticsearch so you should consider opting to index only certain fields by specifying ''es_indexed'' on the +Elasticsearch so you should consider opting to index only certain fields by specifying ''es_indexed'' on the fields you want to store: @@ -91,7 +90,7 @@ will be indexed for searching. Now, by adding the plugin, the model will have a new method called `search` which can be used to make simple to complex searches. The `search` -method accepts [standard elasticsearch query DSL](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-queries.html) +method accepts [standard Elasticsearch query DSL](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-queries.html) ```javascript User.search({ @@ -178,7 +177,7 @@ var stream = Book.synchronize({author: 'Arthur C. Clarke'}) ### Bulk Indexing -You can also specify `bulk` options with mongoose which will utilize elasticsearch's bulk indexing api. This will cause the `synchronize` function to use bulk indexing as well. +You can also specify `bulk` options with mongoose which will utilize Elasticsearch's bulk indexing api. This will cause the `synchronize` function to use bulk indexing as well. Mongoosastic will wait 1 second (or specified delay) until it has 1000 docs (or specified size) and then perform bulk indexing. @@ -215,7 +214,7 @@ mongodb. Use save for that. ### Truncating an index -The static method `esTruncate` will delete all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. +The static method `esTruncate` will delete all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in Elasticsearch. ```javascript GarbageModel.esTruncate(function(err){...}); @@ -224,7 +223,7 @@ GarbageModel.esTruncate(function(err){...}); ## Mapping Schemas can be configured to have special options per field. These match -with the existing [field mapping configurations](http://www.elasticsearch.org/guide/reference/mapping/core-types.html) defined by elasticsearch with the only difference being they are all prefixed by "es_". +with the existing [field mapping configurations](http://www.elasticsearch.org/guide/reference/mapping/core-types.html) defined by Elasticsearch with the only difference being they are all prefixed by "es_". So for example. If you wanted to index a book model and have the boost for title set to 2.0 (giving it greater priority when searching) you'd @@ -242,7 +241,7 @@ This example uses a few other mapping fields... such as null_value and type (which overrides whatever value the schema type is, useful if you want stronger typing such as float). -There are various mapping options that can be defined in elasticsearch. Check out [http://www.elasticsearch.org/guide/reference/mapping/](http://www.elasticsearch.org/guide/reference/mapping/) for more information. Here are examples to the currently possible definitions in mongoosastic: +There are various mapping options that can be defined in Elasticsearch. Check out [http://www.elasticsearch.org/guide/reference/mapping/](http://www.elasticsearch.org/guide/reference/mapping/) for more information. Here are examples to the currently possible definitions in mongoosastic: ```javascript var ExampleSchema = new Schema({ @@ -410,7 +409,7 @@ populated in the `err` argument. ## Queries -The full query DSL of elasticsearch is exposed through the search +The full query DSL of Elasticsearch is exposed through the search method. For example, if you wanted to find all people between ages 21 and 30: @@ -427,7 +426,7 @@ Person.search({ }); ``` -See the elasticsearch [Query DSL](http://www.elasticsearch.org/guide/reference/query-dsl/) docs for more information. +See the Elasticsearch [Query DSL](http://www.elasticsearch.org/guide/reference/query-dsl/) docs for more information. You can also specify query options like [sorts](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort) @@ -442,7 +441,7 @@ Options for queries must adhere to the [javascript elasticsearch driver specs](h ### Hydration By default objects returned from performing a search will be the objects -as is in elastic search. This is useful in cases where only what was +as is in Elasticsearch. This is useful in cases where only what was indexed needs to be displayed (think a list of results) while the actual mongoose object contains the full data when viewing one of the results. @@ -468,7 +467,7 @@ User.search({query_string: {query: "john"}}, {hydrate:true, hydrateOptions: {sel ``` -Note using hydrate will be a degree slower as it will perform an elasticsearch +Note using hydrate will be a degree slower as it will perform an Elasticsearch query and then do a query against mongodb for all the ids returned from the search result. From 5bfae810cda4d290b3444b010932d7c04445ad8e Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 21 Mar 2015 12:48:13 +0100 Subject: [PATCH 073/152] added multihost connection feature #56 --- CHANGELOG.md | 9 +++ README.md | 34 +++++++++--- lib/mongoosastic.js | 44 +++++++++++---- test/connection-test.js | 118 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 188 insertions(+), 17 deletions(-) create mode 100644 test/connection-test.js diff --git a/CHANGELOG.md b/CHANGELOG.md index 265a59f2..d991b868 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,12 @@ +2.1.0 / 2015-03-21 +=================== + + * added multihost connection feature [#56](https://github.com/mongoosastic/mongoosastic/issues/56) + * updates to README + * lots of smalls code cleaning + * Added highlight feature [#51](https://github.com/mongoosastic/mongoosastic/issues/51) + * added full CHANGELOG.md + 2.0.10 / 2015-03-19 =================== diff --git a/README.md b/README.md index ed2a773a..d196f4d5 100644 --- a/README.md +++ b/README.md @@ -34,10 +34,10 @@ npm install -S mongoosastic Options are: -* `index` - the index in Elasticsearch to use. Defaults to the - pluralization of the model name. -* `type` - the type this model represents in Elasticsearch. Defaults - to the model name. +* `index` - the index in Elasticsearch to use. Defaults to the pluralization of the model name. +* `type` - the type this model represents in Elasticsearch. Defaults to the model name. +* `esClient` - an existing Elasticsearch `Client` instance. +* `hosts` - an array hosts Elasticsearch is running on. * `host` - the host Elasticsearch is running on * `port` - the port Elasticsearch is running on * `auth` - the authentication needed to reach Elasticsearch server. In the standard format of 'username:password' @@ -71,7 +71,7 @@ running locally on port 9200). The default behavior is all fields get indexed into Elasticsearch. This can be a little wasteful especially considering that the document is now just being duplicated between mongodb and -Elasticsearch so you should consider opting to index only certain fields by specifying ''es_indexed'' on the +Elasticsearch so you should consider opting to index only certain fields by specifying `es_indexed` on the fields you want to store: @@ -85,8 +85,7 @@ var User = new Schema({ User.plugin(mongoosastic) ``` -In this case only the name field -will be indexed for searching. +In this case only the name field will be indexed for searching. Now, by adding the plugin, the model will have a new method called `search` which can be used to make simple to complex searches. The `search` @@ -103,6 +102,27 @@ User.search({ ``` +To connect to more than one host, you can use an array of hosts. + +```javascript +MyModel.plugin(mongoosastic, { + hosts: [ + 'localhost:9200', + 'anotherhost:9200' + ] +}) +``` + +Also, you can re-use an existing Elasticsearch `Client` instance + +```javascript +var esClient = new elasticsearch.Client({host: 'localhost:9200'}); +MyModel.plugin(mongoosastic, { + esClient: esClient +}) +``` + + ## Indexing ### Saving a document diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 7a90cd8f..1b9a67e8 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -3,21 +3,25 @@ var elasticsearch = require('elasticsearch') , serialize = require('./serialize') , events = require('events') , nop = require('nop') + , util = require('util') module.exports = function Mongoosastic(schema, options){ - var mapping = getMapping(schema) + options = options || {} + + var bulkTimeout, bulkBuffer = [], esClient + , mapping = getMapping(schema) , indexName = options && options.index , typeName = options && options.type , alwaysHydrate = options && options.hydrate , defaultHydrateOptions = options && options.hydrateOptions - , host = options && options.host ? options.host : 'localhost' - , port = options && options.port ? options.port : 9200 - , protocol = options && options.protocol ? options.protocol : 'http' - , auth = options && options.auth ? options.auth : null - , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth},log:(options?options.log:null)}) , bulk = options && options.bulk - , bulkBuffer = [] - , bulkTimeout + + + if (options.esClient) { + esClient = options.esClient + } else { + esClient = createEsClient(options); + } setUpMiddlewareHooks(schema) @@ -89,7 +93,7 @@ module.exports = function Mongoosastic(schema, options){ } /** - * Unset elastic search index + * Unset elasticsearch index * @param options - (optional) options for unIndex * @param callback - callback when unIndex is complete */ @@ -347,7 +351,7 @@ module.exports = function Mongoosastic(schema, options){ }) /** - * Save in elastic search on save. + * Save in elasticsearch on save. */ schema.post('save', function(){ var model = this @@ -360,6 +364,26 @@ module.exports = function Mongoosastic(schema, options){ } +function createEsClient(options) { + + var esOptions = {} + + if (util.isArray(options.hosts) ) { + esOptions.host = options.hosts + } else { + esOptions.host = { + host: options && options.host ? options.host : 'localhost', + port: options && options.port ? options.port : 9200, + protocol: options && options.protocol ? options.protocol : 'http', + auth: options && options.auth ? options.auth : null + }; + } + + esOptions.log = (options?options.log:null) + + return new elasticsearch.Client(esOptions) +} + function createMappingIfNotPresent(options, cb) { var client = options.client , indexName = options.indexName diff --git a/test/connection-test.js b/test/connection-test.js new file mode 100644 index 00000000..1b4f98f9 --- /dev/null +++ b/test/connection-test.js @@ -0,0 +1,118 @@ +var mongoose = require('mongoose'), + async = require('async'), + should = require('should'), + elasticsearch = require('elasticsearch'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); + +var DummySchema = new Schema({ + text: String +}); +var Dummy = mongoose.model('Dummy1', DummySchema, 'dummys'); + + +describe('Elasticsearch Connection', function() { + + before(function(done) { + + mongoose.connect(config.mongoUrl, function() { + Dummy.remove(function() { + config.deleteIndexIfExists(['dummys'], function() { + var dummies = [ + new Dummy({ + text: 'Text1' + }), + new Dummy({ + text: 'Text2' + }) + ]; + async.forEach(dummies, function(item, cb) { + item.save(cb); + }, function() { + setTimeout(done, config.indexingTimeout); + }); + }); + }); + }); + }); + + after(function(done) { + Dummy.remove(done); + }); + + it('should be able to connect with default options', function(done) { + + DummySchema.plugin(mongoosastic); + var Dummy = mongoose.model('Dummy2', DummySchema, 'dummys'); + + tryDummySearch(Dummy, done); + + }); + + + it('should be able to connect with explicit options', function(done) { + + DummySchema.plugin(mongoosastic, { + host: 'localhost', + port: 9200 + }); + + var Dummy = mongoose.model('Dummy3', DummySchema, 'dummys'); + + tryDummySearch(Dummy, done); + + }); + + it('should be able to connect with an array of hosts', function(done) { + + DummySchema.plugin(mongoosastic, { + hosts: [ + 'localhost:9200', + 'localhost:9200' + ] + }); + var Dummy = mongoose.model('Dummy4', DummySchema, 'dummys'); + + tryDummySearch(Dummy, done); + + }); + + it('should be able to connect with an existing elasticsearch client', function(done) { + + var esClient = new elasticsearch.Client({host: 'localhost:9200'}); + + esClient.ping({ + requestTimeout: 1000 + }, function (err) { + if (err) { + return done(err); + } + + DummySchema.plugin(mongoosastic, { + esClient: esClient + }); + var Dummy = mongoose.model('Dummy5', DummySchema, 'dummys'); + + tryDummySearch(Dummy, done); + }); + + }); + +}); + +function tryDummySearch(model, cb) { + setTimeout(function(){ + model.search({ + query_string: { + query: 'Text1' + } + }, function(err, results) { + if(err) return cb(err) + + results.hits.total.should.eql(0); + cb(err); + }); + }, config.indexingTimeout); + +} From 5a6e983a5ed23759bbaa492781ac5cc60a2df6bb Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 21 Mar 2015 12:55:47 +0100 Subject: [PATCH 074/152] 2.1.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 38ccc435..7f22340c 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.10", + "version": "2.1.0", "tags": [ "mongodb", "elasticsearch", From 6656e28fa8dc23077332877a836715db3b7e7f4e Mon Sep 17 00:00:00 2001 From: Ro Ramtohul Date: Thu, 26 Mar 2015 15:41:43 +0000 Subject: [PATCH 075/152] Add esCount functionality --- lib/mongoosastic.js | 20 ++++++++++++++++ test/count-test.js | 56 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 test/count-test.js diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 1b9a67e8..8021ff4d 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -247,6 +247,26 @@ module.exports = function Mongoosastic(schema, options){ }) } + schema.statics.esCount = function (query, cb) { + var model = this; + setIndexNameIfUnset(model.modelName); + + if (cb == null && typeof query === 'function') { + cb = query; + query = null; + } + + var esQuery = { + body: { + query: query + }, + index: options.index || indexName, + type: options.type || typeName + } + + esClient.count(esQuery, cb); + } + function bulkDelete(options, cb) { bulkAdd({ delete: { diff --git a/test/count-test.js b/test/count-test.js new file mode 100644 index 00000000..15daa450 --- /dev/null +++ b/test/count-test.js @@ -0,0 +1,56 @@ +var mongoose = require('mongoose') + , async = require('async') + , esClient = new(require('elasticsearch').Client) + , should = require('should') + , config = require('./config') + , Schema = mongoose.Schema + , mongoosastic = require('../lib/mongoosastic'); + + +var CommentSchema = new Schema({ + user: String + , post_date: {type:Date, es_type:'date'} + , message: {type:String} + , title: {type:String, es_boost:2.0} +}); + +CommentSchema.plugin(mongoosastic); + +var Comment = mongoose.model('Comment', CommentSchema); + +describe('Count', function(){ + before(function(done){ + mongoose.connect(config.mongoUrl, function(){ + Comment.remove(function(){ + config.deleteIndexIfExists(['comments'], function() { + var comments = [ + new Comment({ + user: 'terry', + title: 'Ilikecars' + }), + new Comment({ + user: 'fred', + title: 'Ihatefish' + }) + ]; + async.forEach(comments, function(item, cb) { + item.save(cb); + }, function() { + setTimeout(done, config.indexingTimeout); + }); + }); + }); + }); + }); + + it('should count a type', function(done){ + Comment.esCount({ + term: { + user: 'terry' + } + }, function(err, results) { + results.count.should.eql(1); + done(err); + }); + }); +}); From 56d592f2b76028b9ca3322f067adae58c72b3209 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 28 Mar 2015 22:50:42 +0100 Subject: [PATCH 076/152] added gulp, jshint and jscs --- .jscsrc | 7 + .jshintrc | 17 + CHANGELOG.md | 6 + gulpfile.js | 30 ++ lib/mapping-generator.js | 59 ++- lib/mongoosastic.js | 502 +++++++++++++------------- lib/serialize.js | 46 +-- package.json | 8 +- test/alternative-index-method-test.js | 62 ++-- test/boost-field-test.js | 54 +-- test/bulk-test.js | 15 +- test/config.js | 71 ++-- test/connection-test.js | 16 +- test/geo-test.js | 206 ++++++----- test/highlight-features-test.js | 109 +++--- test/index-test.js | 370 ++++++++++--------- test/mapping-generator-test.js | 149 ++++---- test/models/tweet.js | 21 +- test/search-features-test.js | 61 ++-- test/serialize-test.js | 60 +-- test/synchronize-test.js | 49 +-- test/truncate-test.js | 10 +- 22 files changed, 1060 insertions(+), 868 deletions(-) create mode 100644 .jscsrc create mode 100644 .jshintrc create mode 100644 gulpfile.js diff --git a/.jscsrc b/.jscsrc new file mode 100644 index 00000000..6c00c569 --- /dev/null +++ b/.jscsrc @@ -0,0 +1,7 @@ +{ + "preset": "airbnb", + "requireMultipleVarDecl": null, + "disallowMultipleVarDecl": null, + "requireCamelCaseOrUpperCaseIdentifiers": "ignoreProperties", + "requirePaddingNewLinesAfterBlocks": null +} diff --git a/.jshintrc b/.jshintrc new file mode 100644 index 00000000..03d5208b --- /dev/null +++ b/.jshintrc @@ -0,0 +1,17 @@ +{ + "browser": false, + "node": true, + "camelcase": false, + "eqeqeq": true, + "indent": 2, + "latedef": false, + "newcap": true, + "quotmark": "single", + "strict": false, + "undef": true, + "unused": "vars", + "eqnull": true, + "forin": false, + "mocha": true, + "esnext": true +} diff --git a/CHANGELOG.md b/CHANGELOG.md index d991b868..cb58b568 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +2.1.1 / 2015-03-28 +=================== + + * added gulp, jshint and jscs + * example fixed and dependencies updated + 2.1.0 / 2015-03-21 =================== diff --git a/gulpfile.js b/gulpfile.js new file mode 100644 index 00000000..6b873911 --- /dev/null +++ b/gulpfile.js @@ -0,0 +1,30 @@ +var gulp = require('gulp'); +var mocha = require('gulp-mocha'); +var jshint = require('gulp-jshint'); +var jscs = require('gulp-jscs'); + +var SOURCE_FILES = ['*.js', './example/*.js', './lib/*.js', './test/**/*.js']; + +gulp.task('lint', function() { + return gulp.src(SOURCE_FILES) + .pipe(jshint('.jshintrc')) + .pipe(jshint.reporter('jshint-stylish')); +}); + +gulp.task('jscs', function() { + return gulp.src(SOURCE_FILES) + .pipe(jscs()); +}); + +gulp.task('mocha', function() { + return gulp.src(['test/*-test.js'], { read: false }) + .pipe(mocha({ + reporter: 'spec', + timeout: 60000, + globals: { + should: require('should') + } + })); +}); + +gulp.task('default', ['lint', 'jscs', 'mocha']); diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 4f9180c5..e0d2bfdf 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -1,17 +1,15 @@ -function Generator(){ +function Generator() { } -Generator.prototype.generateMapping = function(schema, cb){ +Generator.prototype.generateMapping = function(schema, cb) { var cleanTree = getCleanTree(schema.tree, schema.paths, ''); delete cleanTree[schema.get('versionKey')]; var mapping = getMapping(cleanTree, ''); - cb(null, { properties: mapping }); + cb(null, {properties: mapping}); }; module.exports = Generator; - - // // Generates the mapping // @@ -23,9 +21,9 @@ module.exports = Generator; // function getMapping(cleanTree, prefix) { var mapping = {}, - value = {}, - implicitFields = [], - hasEs_index = false; + value, + implicitFields = [], + hasEsIndex = false; if (prefix !== '') { prefix = prefix + '.'; @@ -37,13 +35,12 @@ function getMapping(cleanTree, prefix) { mapping[field].type = value.type; // Check if field was explicity indexed, if not keep track implicitly - if(value.es_indexed) { - hasEs_index = true; + if (value.es_indexed) { + hasEsIndex = true; } else if (value.type) { implicitFields.push(field); } - // If there is no type, then it's an object with subfields. if (!value.type) { mapping[field].type = 'object'; @@ -53,12 +50,12 @@ function getMapping(cleanTree, prefix) { // If it is a objectid make it a string. if (value.type === 'objectid') { - mapping[field].type = 'string'; // do not continue here so we can handle other es_ options + mapping[field].type = 'string'; } //If indexing a number, and no es_type specified, default to double - if (value.type === 'number' && value['es_type'] === undefined) { + if (value.type === 'number' && value.es_type === undefined) { mapping[field].type = 'double'; continue; } @@ -73,7 +70,7 @@ function getMapping(cleanTree, prefix) { } //If one of the fields was explicitly indexed, delete all implicit fields - if (hasEs_index) { + if (hasEsIndex) { implicitFields.forEach(function(field) { delete mapping[field]; }); @@ -82,7 +79,6 @@ function getMapping(cleanTree, prefix) { return mapping; } - // // Generates a clean tree // @@ -96,14 +92,14 @@ function getMapping(cleanTree, prefix) { function getCleanTree(tree, paths, prefix) { var cleanTree = {}, - type = '', - value = {}; + type = '', + value = {}; if (prefix !== '') { prefix = prefix + '.'; } - for (var field in tree){ + for (var field in tree) { if (prefix === '' && (field === 'id' || field === '_id')) { continue; } @@ -111,9 +107,10 @@ function getCleanTree(tree, paths, prefix) { type = getTypeFromPaths(paths, prefix + field); value = tree[field]; - if(value.es_indexed === false) { + if (value.es_indexed === false) { continue; } + // Field has some kind of type if (type) { // If it is an nested schema @@ -121,24 +118,25 @@ function getCleanTree(tree, paths, prefix) { // A nested array can contain complex objects if (paths[field] && paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); - } else if ( paths[field] && paths[field].caster && paths[field].caster.instance ) { + } else if (paths[field] && paths[field].caster && paths[field].caster.instance) { // Even for simple types the value can be an object if there is other attributes than type - if(typeof value[0] === 'object'){ + if (typeof value[0] === 'object') { cleanTree[field] = value[0]; } else { cleanTree[field] = {}; } + cleanTree[field].type = paths[field].caster.instance.toLowerCase(); } else if (!paths[field] && prefix) { - if(paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { + if (paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; } } else { cleanTree[field] = { - type:'object' + type: 'object' }; } - } else if (value === String || value === Object || value === Date || value === Number || value === Boolean || value === Array){ + } else if (value === String || value === Object || value === Date || value === Number || value === Boolean || value === Array) { cleanTree[field] = {}; cleanTree[field].type = type; } else { @@ -146,21 +144,22 @@ function getCleanTree(tree, paths, prefix) { cleanTree[field].type = type; } - // It has no type for some reason + // It has no type for some reason } else { // Because it is an geo_* object!! - if (typeof value === 'object') - { + if (typeof value === 'object') { var key; var geoFound = false; for (key in value) { if (value.hasOwnProperty(key) && /^geo_/.test(key)) { cleanTree[field] = value[key]; geoFound = true; - //break; } } - if(geoFound) continue + + if (geoFound) { + continue; + } } // If it's a virtual type, don't map it @@ -178,8 +177,6 @@ function getCleanTree(tree, paths, prefix) { return cleanTree; } - - // // Get type from the mongoose schema // diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 1b9a67e8..371c9485 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,49 +1,49 @@ -var elasticsearch = require('elasticsearch') - , generator = new(require('./mapping-generator')) - , serialize = require('./serialize') - , events = require('events') - , nop = require('nop') - , util = require('util') - -module.exports = function Mongoosastic(schema, options){ - options = options || {} - - var bulkTimeout, bulkBuffer = [], esClient - , mapping = getMapping(schema) - , indexName = options && options.index - , typeName = options && options.type - , alwaysHydrate = options && options.hydrate - , defaultHydrateOptions = options && options.hydrateOptions - , bulk = options && options.bulk - +var elasticsearch = require('elasticsearch'), + Generator = require('./mapping-generator'), + generator = new Generator(), + serialize = require('./serialize'), + events = require('events'), + nop = require('nop'), + util = require('util'); + +module.exports = function Mongoosastic(schema, options) { + options = options || {}; + + var bulkTimeout, bulkBuffer = [], esClient, + mapping = getMapping(schema), + indexName = options && options.index, + typeName = options && options.type, + alwaysHydrate = options && options.hydrate, + defaultHydrateOptions = options && options.hydrateOptions, + bulk = options && options.bulk; if (options.esClient) { - esClient = options.esClient + esClient = options.esClient; } else { esClient = createEsClient(options); } - setUpMiddlewareHooks(schema) + setUpMiddlewareHooks(schema); /** - * ElasticSearch Client - */ - schema.statics.esClient = esClient + * ElasticSearch Client + */ + schema.statics.esClient = esClient; /** - * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once + * Create the mapping. Takes an optional settings parameter and a callback that will be called once * the mapping is created * @param settings Object (optional) - * @param callback Function + * @param cb Function */ schema.statics.createMapping = function(settings, cb) { - if(arguments.length < 2) { - cb = arguments[0] || nop - settings = undefined + if (arguments.length < 2) { + cb = arguments[0] || nop; + settings = undefined; } - setIndexNameIfUnset(this.modelName) + setIndexNameIfUnset(this.modelName); createMappingIfNotPresent({ client: esClient, @@ -51,25 +51,25 @@ module.exports = function Mongoosastic(schema, options){ typeName: typeName, schema: schema, settings: settings - }, cb) - } + }, cb); + }; /** * @param options Object (optional) - * @param callback Function + * @param cb Function */ - schema.methods.index = function(options, cb){ + schema.methods.index = function(options, cb) { if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} + cb = arguments[0] || nop; + options = {}; } - setIndexNameIfUnset(this.constructor.modelName) + setIndexNameIfUnset(this.constructor.modelName); - var index = options.index || indexName - , type = options.type || typeName + var index = options.index || indexName, + type = options.type || typeName; - if(bulk) { + if (bulk) { /** * To serialize in bulk it needs the _id */ @@ -80,56 +80,56 @@ module.exports = function Mongoosastic(schema, options){ index: index, type: type, model: serialModel - }) - setImmediate(cb) + }); + setImmediate(cb); } else { esClient.index({ index: index, type: type, id: this._id.toString(), body: serialize(this, mapping) - }, cb) + }, cb); } - } + }; /** - * Unset elasticsearch index - * @param options - (optional) options for unIndex - * @param callback - callback when unIndex is complete - */ - schema.methods.unIndex = function(options, cb){ + * Unset elasticsearch index + * @param options - (optional) options for unIndex + * @param cb - callback when unIndex is complete + */ + schema.methods.unIndex = function(options, cb) { if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} + cb = arguments[0] || nop; + options = {}; } - setIndexNameIfUnset(this.constructor.modelName) + setIndexNameIfUnset(this.constructor.modelName); - options.index = options.index || indexName - options.type = options.type || typeName - options.model = this - options.client = esClient - options.tries = 3 + options.index = options.index || indexName; + options.type = options.type || typeName; + options.model = this; + options.client = esClient; + options.tries = 3; - if(bulk) - bulkDelete(options, cb) + if (bulk) + bulkDelete(options, cb); else - deleteByMongoId(options, cb) - } + deleteByMongoId(options, cb); + }; /** * Delete all documents from a type/index * @param options - (optional) specify index/type - * @param callback - callback when truncation is complete + * @param cb - callback when truncation is complete */ schema.statics.esTruncate = function(options, cb) { if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} + cb = arguments[0] || nop; + options = {}; } - var index = options.index || indexName - , type = options.type || typeName + var index = options.index || indexName, + type = options.type || typeName; esClient.deleteByQuery({ index: index, @@ -139,113 +139,117 @@ module.exports = function Mongoosastic(schema, options){ match_all: {} } } - }, cb) - } + }, cb); + }; /** * Synchronize an existing collection * * @param query - query for documents you want to synchronize */ - schema.statics.synchronize = function(query){ - var em = new events.EventEmitter() - , closeValues = [] - , counter = 0 - , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} + schema.statics.synchronize = function(query) { + var em = new events.EventEmitter(), + closeValues = [], + counter = 0, + close = function() { + em.emit.apply(em, ['close'].concat(closeValues)); + }; //Set indexing to be bulk when synchronizing to make synchronizing faster bulk = bulk || { delay: 1000, size: 1000 - } + }; - query = query || {} + query = query || {}; - setIndexNameIfUnset(this.modelName) + setIndexNameIfUnset(this.modelName); - var stream = this.find(query).stream() + var stream = this.find(query).stream(); - stream.on('data', function(doc){ - counter++ - doc.save(function(err){ - if (err) - return em.emit('error', err) + stream.on('data', function(doc) { + counter++; + doc.save(function(err) { + if (err) { + return em.emit('error', err); + } - doc.on('es-indexed', function(err, doc){ - counter-- - if(err){ - em.emit('error', err) - }else{ - em.emit('data', null, doc) + doc.on('es-indexed', function(err, doc) { + counter--; + if (err) { + em.emit('error', err); + } else { + em.emit('data', null, doc); } - }) - }) - }) + }); + }); + }); - stream.on('close', function(a, b){ - closeValues = [a, b] + stream.on('close', function(a, b) { + closeValues = [a, b]; var closeInterval = setInterval(function() { if (counter === 0 && bulkBuffer.length === 0) { - clearInterval(closeInterval) - close() + clearInterval(closeInterval); + close(); } - }, 1000) - }) + }, 1000); + }); - stream.on('error', function(err){ - em.emit('error', err) - }) + stream.on('error', function(err) { + em.emit('error', err); + }); - return em - } + return em; + }; /** * ElasticSearch search function * * @param query - query object to perform search with * @param options - (optional) special search options, such as hydrate - * @param callback - callback called with search results + * @param cb - callback called with search results */ - schema.statics.search = function(query, options, cb){ + schema.statics.search = function(query, options, cb) { if (arguments.length === 2) { - cb = arguments[1] - options = {} + cb = arguments[1]; + options = {}; } options.hydrateOptions = options.hydrateOptions || defaultHydrateOptions || {}; if (query === null) - query = undefined + query = undefined; - var model = this - , esQuery = { + var _this = this, + esQuery = { body: { query: query }, index: options.index || indexName, - type: options.type || typeName - } - if (options.highlight) { - esQuery.body.highlight = options.highlight; - } + type: options.type || typeName + }; + if (options.highlight) { + esQuery.body.highlight = options.highlight; + } Object.keys(options).forEach(function(opt) { if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) - esQuery[opt] = options[opt] - }) + esQuery[opt] = options[opt]; + }); - setIndexNameIfUnset(model.modelName) + setIndexNameIfUnset(this.modelName); + + esClient.search(esQuery, function(err, res) { + if (err) { + return cb(err); + } - esClient.search(esQuery, function(err, res){ - if(err){ - cb(err) + if (alwaysHydrate || options.hydrate) { + hydrate(res, _this, options, cb); } else { - if (alwaysHydrate || options.hydrate) - hydrate(res, model, options, cb) - else - cb(null, res) + cb(null, res); } - }) - } + }); + }; function bulkDelete(options, cb) { bulkAdd({ @@ -254,8 +258,8 @@ module.exports = function Mongoosastic(schema, options){ _type: options.type || typeName, _id: options.model._id.toString() } - }) - cb() + }); + cb(); } function bulkIndex(options) { @@ -265,75 +269,78 @@ module.exports = function Mongoosastic(schema, options){ _type: options.type || typeName, _id: options.model._id.toString() } - }) - bulkAdd(options.model) + }); + bulkAdd(options.model); } function clearBulkTimeout() { - clearTimeout(bulkTimeout) - bulkTimeout = undefined + clearTimeout(bulkTimeout); + bulkTimeout = undefined; } function bulkAdd(instruction) { - bulkBuffer.push(instruction) + bulkBuffer.push(instruction); //Return because we need the doc being indexed //Before we start inserting if (instruction.index && instruction.index._index) - return - - if(bulkBuffer.length >= (bulk.size || 1000)) { - schema.statics.flush() - clearBulkTimeout() - } else if (bulkTimeout === undefined){ - bulkTimeout = setTimeout(function(){ - schema.statics.flush() - clearBulkTimeout() - }, bulk.delay || 1000) + return; + + if (bulkBuffer.length >= (bulk.size || 1000)) { + schema.statics.flush(); + clearBulkTimeout(); + } else if (bulkTimeout === undefined) { + bulkTimeout = setTimeout(function() { + schema.statics.flush(); + clearBulkTimeout(); + }, bulk.delay || 1000); } } - schema.statics.flush = function(cb){ - cb = cb || function(err) { if (err) console.log(err) } + schema.statics.flush = function(cb) { + cb = cb || function(err) { + if (err) { + console.log(err); + } + }; esClient.bulk({ body: bulkBuffer - }, function(err) { - cb(err) - }) - bulkBuffer = [] - } + }, cb); + + bulkBuffer = []; + }; - schema.statics.refresh = function(options, cb){ + schema.statics.refresh = function(options, cb) { if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} + cb = arguments[0] || nop; + options = {}; } - setIndexNameIfUnset(this.modelName) + setIndexNameIfUnset(this.modelName); esClient.indices.refresh({ index: options.index || indexName - }, cb) - } + }, cb); + }; - function setIndexNameIfUnset(model){ - var modelName = model.toLowerCase() - if(!indexName){ - indexName = modelName + 's' + function setIndexNameIfUnset(model) { + var modelName = model.toLowerCase(); + if (!indexName) { + indexName = modelName + 's'; } - if(!typeName){ - typeName = modelName + + if (!typeName) { + typeName = modelName; } } - /** * Use standard Mongoose Middleware hooks * to persist to Elasticsearch */ function setUpMiddlewareHooks(schema) { - schema.post('remove', function(){ - setIndexNameIfUnset(this.constructor.modelName) + schema.post('remove', function() { + setIndexNameIfUnset(this.constructor.modelName); var options = { index: indexName, @@ -341,153 +348,160 @@ module.exports = function Mongoosastic(schema, options){ tries: 3, model: this, client: esClient - } + }; - if(bulk) { - bulkDelete(options, nop) + if (bulk) { + bulkDelete(options, nop); } else { - deleteByMongoId(options, nop) + deleteByMongoId(options, nop); } - }) + }); /** * Save in elasticsearch on save. */ - schema.post('save', function(){ - var model = this + schema.post('save', function() { + var _this = this; - model.index(function(err, res){ - model.emit('es-indexed', err, res) - }) - }) + _this.index(function(err, res) { + _this.emit('es-indexed', err, res); + }); + }); } -} +}; function createEsClient(options) { - var esOptions = {} + var esOptions = {}; - if (util.isArray(options.hosts) ) { - esOptions.host = options.hosts + if (util.isArray(options.hosts)) { + esOptions.host = options.hosts; } else { esOptions.host = { host: options && options.host ? options.host : 'localhost', port: options && options.port ? options.port : 9200, protocol: options && options.protocol ? options.protocol : 'http', - auth: options && options.auth ? options.auth : null + auth: options && options.auth ? options.auth : null, + keepAlive: false }; } - esOptions.log = (options?options.log:null) + esOptions.log = (options ? options.log : null); - return new elasticsearch.Client(esOptions) + return new elasticsearch.Client(esOptions); } function createMappingIfNotPresent(options, cb) { - var client = options.client - , indexName = options.indexName - , typeName = options.typeName - , schema = options.schema - , settings = options.settings + var client = options.client, + indexName = options.indexName, + typeName = options.typeName, + schema = options.schema, + settings = options.settings; generator.generateMapping(schema, function(err, mapping) { - var completeMapping = {} - completeMapping[typeName] = mapping + var completeMapping = {}; + completeMapping[typeName] = mapping; client.indices.exists({index: indexName}, function(err, exists) { - if (err) - return cb(err) + if (err) { + return cb(err); + } if (exists) { client.indices.putMapping({ index: indexName, type: typeName, body: completeMapping - }, cb) + }, cb); + } else { client.indices.create({index: indexName, body: settings}, function(err) { if (err) - return cb(err) + return cb(err); client.indices.putMapping({ index: indexName, type: typeName, body: completeMapping - }, cb) - }) + }, cb); + }); } - }) - }) + }); + }); } -function hydrate(res, model, options, cb){ - var results = res.hits - , resultsMap = {} - , ids = results.hits.map(function(a, i){ - resultsMap[a._id] = i - return a._id - }) - , query = model.find({_id:{$in:ids}}) - hydrateOptions = options.hydrateOptions +function hydrate(res, model, options, cb) { + var results = res.hits, + resultsMap = {}, + ids = results.hits.map(function(a, i) { + resultsMap[a._id] = i; + return a._id; + }), + + query = model.find({_id: {$in: ids}}), + hydrateOptions = options.hydrateOptions; // Build Mongoose query based on hydrate options // Example: {lean: true, sort: '-name', select: 'address name'} - Object.keys(hydrateOptions).forEach(function(option){ - query[option](hydrateOptions[option]) - }) + Object.keys(hydrateOptions).forEach(function(option) { + query[option](hydrateOptions[option]); + }); - query.exec(function(err, docs){ - if(err) { - return cb(err) + query.exec(function(err, docs) { + if (err) { + return cb(err); } else { - var hits = [] + var hits = []; docs.forEach(function(doc) { - var i = resultsMap[doc._id] + var i = resultsMap[doc._id]; if (options.highlight) { doc._highlight = results.hits[i].highlight; } - hits[i] = doc - }) - results.hits = hits - res.hits = results - cb(null, res) + + hits[i] = doc; + }); + + results.hits = hits; + res.hits = results; + cb(null, res); } - }) + }); } -function getMapping(schema){ - var retMapping = {} - generator.generateMapping(schema, function(err, mapping){ - retMapping = mapping - }) - return retMapping +function getMapping(schema) { + var retMapping = {}; + generator.generateMapping(schema, function(err, mapping) { + retMapping = mapping; + }); + + return retMapping; } -function deleteByMongoId(options, cb){ - var index = options.index - , type = options.type - , client = options.client - , model = options.model - , tries = options.tries +function deleteByMongoId(options, cb) { + var index = options.index, + type = options.type, + client = options.client, + model = options.model, + tries = options.tries; client.delete({ index: index, type: type, id: model._id.toString() - }, function(err, res){ - if(err && err.message.indexOf('404') > -1){ - setTimeout(function(){ - if(tries <= 0) { - return cb(err) + }, function(err, res) { + if (err && err.message.indexOf('404') > -1) { + setTimeout(function() { + if (tries <= 0) { + return cb(err); } else { - options.tries = --tries - deleteByMongoId(options, cb) + options.tries = --tries; + deleteByMongoId(options, cb); } - }, 500) - }else{ - model.emit('es-removed', err, res) - cb(err) + }, 500); + } else { + model.emit('es-removed', err, res); + cb(err); } - }) + }); } diff --git a/lib/serialize.js b/lib/serialize.js index 584b4ce5..9a2bca57 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -8,40 +8,42 @@ function _serializeObject(object, mapping) { serialized[field] = val; } } + return serialized; } function serialize(model, mapping) { + var name; + if (mapping.properties && model) { + if (Array.isArray(model)) { return model.map(function(object) { return _serializeObject(object, mapping); }); - } else { - return _serializeObject(model, mapping); } - } else if (typeof value === 'object' && value !== null) { - var name = value.constructor.name; + + return _serializeObject(model, mapping); + + } + + if (mapping.cast && typeof mapping.cast !== 'function') { + throw new Error('es_cast must be a function'); + } + + model = mapping.cast ? mapping.cast.call(this, model) : model; + if (typeof model === 'object' && model !== null) { + name = model.constructor.name; if (name === 'ObjectID') { - return value.toString(); - } else if (name === 'Date') { - return new Date(value).toJSON(); + return model.toString(); } - return model; - } else { - if (mapping.cast && typeof(mapping.cast) !== 'function') - throw new Error('es_cast must be a function'); - model = mapping.cast ? mapping.cast.call(this, model) : model; - if (typeof model === 'object' && model !== null) { - var name = model.constructor.name; - if (name === 'ObjectID') { - return model.toString(); - } else if (name === 'Date') { - return new Date(model).toJSON(); - } - return model; - } else { - return model; + + if (name === 'Date') { + return new Date(model).toJSON(); } + } + + return model; + } diff --git a/package.json b/package.json index 7f22340c..b601eb46 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,12 @@ }, "devDependencies": { "async": "^0.9.x", + "gulp": "^3.8.11", + "gulp-jscs": "^1.4.0", + "gulp-jshint": "^1.9.4", + "gulp-mocha": "^2.0.0", + "jscs": "^1.12.0", + "jshint-stylish": "^1.0.1", "mocha": "^2.2.x", "mongoose": "~3.8.x", "should": "^5.2.x" @@ -32,6 +38,6 @@ "node": ">= 0.8.0" }, "scripts": { - "test": "mocha -R spec -t 60000 -b" + "test": "gulp" } } diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 3c3c92c7..16d671ac 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -1,38 +1,35 @@ -var mongoose = require('mongoose') - , should = require('should') - , config = require('./config') - , mongoosastic = require('../lib/mongoosastic') - , Tweet = require('./models/tweet') - , Schema = mongoose.Schema; +var mongoose = require('mongoose'), + config = require('./config'), + Tweet = require('./models/tweet'); -describe('Index Method', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - config.deleteIndexIfExists(['tweets', 'public_tweets'], function(){ +describe('Index Method', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + config.deleteIndexIfExists(['tweets', 'public_tweets'], function() { Tweet.remove(function() { config.createModelAndEnsureIndex(Tweet, { - user: 'jamescarr' - , message: 'I know kung-fu!' - , post_date: new Date() + user: 'jamescarr', + message: 'I know kung-fu!', + post_date: new Date() }, done); - }) + }); }); }); }); - after(function(done){ - Tweet.remove(function(){ + after(function(done) { + Tweet.remove(function() { mongoose.disconnect(); done(); }); }); - it('should be able to index it directly without saving', function(done){ - Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ + it('should be able to index it directly without saving', function(done) { + Tweet.findOne({message: 'I know kung-fu!'}, function(err, doc) { doc.message = 'I know nodejitsu!'; - doc.index(function(){ - setTimeout(function(){ - Tweet.search({query_string: {query: 'know'}}, function(err, res){ + doc.index(function() { + setTimeout(function() { + Tweet.search({query_string: {query: 'know'}}, function(err, res) { res.hits.hits[0]._source.message.should.eql('I know nodejitsu!'); done(); }); @@ -41,12 +38,12 @@ describe('Index Method', function(){ }); }); - it('should be able to index to alternative index', function(done){ - Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ + it('should be able to index to alternative index', function(done) { + Tweet.findOne({message: 'I know kung-fu!'}, function(err, doc) { doc.message = 'I know taebo!'; - doc.index({index: 'public_tweets'}, function(){ - setTimeout(function(){ - Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets'}, function(err, res){ + doc.index({index: 'public_tweets'}, function() { + setTimeout(function() { + Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets'}, function(err, res) { res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); @@ -55,12 +52,15 @@ describe('Index Method', function(){ }); }); - it('should be able to index to alternative index and type', function(done){ - Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ + it('should be able to index to alternative index and type', function(done) { + Tweet.findOne({message: 'I know kung-fu!'}, function(err, doc) { doc.message = 'I know taebo!'; - doc.index({index: 'public_tweets', type: 'utterings'}, function(){ - setTimeout(function(){ - Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets', type: 'utterings'}, function(err, res){ + doc.index({index: 'public_tweets', type: 'utterings'}, function() { + setTimeout(function() { + Tweet.search({query_string: {query: 'know'}}, { + index: 'public_tweets', + type: 'utterings' + }, function(err, res) { res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); diff --git a/test/boost-field-test.js b/test/boost-field-test.js index e28c4b35..4bf5430c 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -1,42 +1,52 @@ -var mongoose = require('mongoose') - , esClient = new(require('elasticsearch').Client) - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , mongoosastic = require('../lib/mongoosastic'); - +var mongoose = require('mongoose'), + elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client({ + deadTimeout: 0, + keepAlive: false + }), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); var TweetSchema = new Schema({ - user: String - , post_date: {type:Date, es_type:'date'} - , message: {type:String} - , title: {type:String, es_boost:2.0} + user: String, + post_date: {type: Date, es_type: 'date'}, + message: {type: String}, + title: {type: String, es_boost: 2.0} }); TweetSchema.plugin(mongoosastic); var BlogPost = mongoose.model('BlogPost', TweetSchema); -describe('Add Boost Option Per Field', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - BlogPost.remove(function(){ - config.deleteIndexIfExists(['blogposts'], done) +describe('Add Boost Option Per Field', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + BlogPost.remove(function() { + config.deleteIndexIfExists(['blogposts'], done); }); }); }); - it('should create a mapping with boost field added', function(done){ - BlogPost.createMapping(function(err, mapping){ + after(function(done) { + mongoose.disconnect(); + BlogPost.esClient.close(); + esClient.close(); + done(); + }); + + it('should create a mapping with boost field added', function(done) { + BlogPost.createMapping(function() { esClient.indices.getMapping({ index: 'blogposts', type: 'blogpost' - }, function(err, mapping){ + }, function(err, mapping) { /* elasticsearch 1.0 & 0.9 support */ - var props = mapping.blogpost != undefined ? - mapping.blogpost.properties : /* ES 0.9.11 */ - mapping.blogposts.mappings.blogpost.properties; /* ES 1.0.0 */ + var props = mapping.blogpost !== undefined ? + mapping.blogpost.properties : /* ES 0.9.11 */ + mapping.blogposts.mappings.blogpost.properties; + /* ES 1.0.0 */ props.title.type.should.eql('string'); props.title.boost.should.eql(2.0); diff --git a/test/bulk-test.js b/test/bulk-test.js index 3bb9540a..f5ce10e9 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -1,6 +1,5 @@ var mongoose = require('mongoose'), async = require('async'), - should = require('should'), config = require('./config'), Schema = mongoose.Schema, mongoosastic = require('../lib/mongoosastic'); @@ -31,20 +30,30 @@ describe('Bulk mode', function() { }); }); }); + before(function(done) { async.forEach(config.bookTitlesArray(), function(title, cb) { new Book({ title: title }).save(cb); - }, done) + }, done); }); + before(function(done) { Book.findOne({ title: 'American Gods' }, function(err, book) { - book.remove(done) + book.remove(done); }); }); + + after(function(done) { + mongoose.disconnect(); + Book.esClient.close(); + done(); + + }); + it('should index all objects and support deletions too', function(done) { // This timeout is important, as Elasticsearch is "near-realtime" and the index/deletion takes time that diff --git a/test/config.js b/test/config.js index 4d959ca2..945f689e 100644 --- a/test/config.js +++ b/test/config.js @@ -1,44 +1,57 @@ -var esClient = new(require('elasticsearch').Client) - , async = require('async'); +var elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client({ + host: 'localhost:9200', + deadTimeout: 0, + keepAlive: false + }), + async = require('async'); -const INDEXING_TIMEOUT = 1100; +const INDEXING_TIMEOUT = process.env.INDEXING_TIMEOUT || 1100; module.exports = { - mongoUrl: 'mongodb://localhost/es-test' - , indexingTimeout: INDEXING_TIMEOUT - , deleteIndexIfExists: function(indexes, done){ - async.forEach(indexes, function(index, cb){ - esClient.indices.exists({ - index: index - }, function(err, exists){ - if(exists){ - esClient.indices.delete({ - index: index - }, cb); - }else{ - cb(); - } - }); - }, done); - } - , createModelAndEnsureIndex: createModelAndEnsureIndex - , saveAndWaitIndex: saveAndWaitIndex - , bookTitlesArray: bookTitlesArray + mongoUrl: 'mongodb://localhost/es-test', + indexingTimeout: INDEXING_TIMEOUT, + deleteIndexIfExists: deleteIndexIfExists, + createModelAndEnsureIndex: createModelAndEnsureIndex, + saveAndWaitIndex: saveAndWaitIndex, + bookTitlesArray: bookTitlesArray, + getClient: function() { + return esClient; + }, + close: function() { + esClient.close(); + } }; -function createModelAndEnsureIndex(model, obj, cb){ - var dude = new model(obj); - dude.save(function(){ - dude.on('es-indexed', function(err, res){ +function deleteIndexIfExists(indexes, done) { + async.forEach(indexes, function(index, cb) { + esClient.indices.exists({ + index: index + }, function(err, exists) { + if (exists) { + esClient.indices.delete({ + index: index + }, cb); + } else { + cb(); + } + }); + }, done); +} + +function createModelAndEnsureIndex(Model, obj, cb) { + var dude = new Model(obj); + dude.save(function() { + dude.on('es-indexed', function(err, res) { setTimeout(cb, INDEXING_TIMEOUT); }); }); } -function saveAndWaitIndex(model, cb){ +function saveAndWaitIndex(model, cb) { model.save(function(err) { if (err) cb(err); - else model.on('es-indexed', cb ); + else model.on('es-indexed', cb); }); } diff --git a/test/connection-test.js b/test/connection-test.js index 1b4f98f9..0b1791be 100644 --- a/test/connection-test.js +++ b/test/connection-test.js @@ -1,6 +1,5 @@ var mongoose = require('mongoose'), async = require('async'), - should = require('should'), elasticsearch = require('elasticsearch'), config = require('./config'), Schema = mongoose.Schema, @@ -11,7 +10,6 @@ var DummySchema = new Schema({ }); var Dummy = mongoose.model('Dummy1', DummySchema, 'dummys'); - describe('Elasticsearch Connection', function() { before(function(done) { @@ -38,7 +36,9 @@ describe('Elasticsearch Connection', function() { }); after(function(done) { - Dummy.remove(done); + Dummy.remove(); + mongoose.disconnect(); + done(); }); it('should be able to connect with default options', function(done) { @@ -50,7 +50,6 @@ describe('Elasticsearch Connection', function() { }); - it('should be able to connect with explicit options', function(done) { DummySchema.plugin(mongoosastic, { @@ -84,7 +83,7 @@ describe('Elasticsearch Connection', function() { esClient.ping({ requestTimeout: 1000 - }, function (err) { + }, function(err) { if (err) { return done(err); } @@ -102,15 +101,18 @@ describe('Elasticsearch Connection', function() { }); function tryDummySearch(model, cb) { - setTimeout(function(){ + setTimeout(function() { model.search({ query_string: { query: 'Text1' } }, function(err, results) { - if(err) return cb(err) + if (err) { + return cb(err); + } results.hits.total.should.eql(0); + model.esClient.close(); cb(err); }); }, config.indexingTimeout); diff --git a/test/geo-test.js b/test/geo-test.js index 08e9b9bc..95896d58 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -1,44 +1,44 @@ -var mongoose = require('mongoose') - , esClient = new(require('elasticsearch').Client) - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , mongoosastic = require('../lib/mongoosastic'); +var mongoose = require('mongoose'), + elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client(), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); var GeoSchema; var GeoModel; -describe('GeoTest', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - config.deleteIndexIfExists(['geodocs'], function(){ +describe('GeoTest', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + config.deleteIndexIfExists(['geodocs'], function() { GeoSchema = new Schema({ - myId: Number, - frame: { - coordinates : [], - type: {type: String}, - geo_shape: { - type:String, - es_type: 'geo_shape', - es_tree: 'quadtree', - es_precision: '1km' - } - } - }); + myId: Number, + frame: { + coordinates: [], + type: {type: String}, + geo_shape: { + type: String, + es_type: 'geo_shape', + es_tree: 'quadtree', + es_precision: '1km' + } + } + }); GeoSchema.plugin(mongoosastic); GeoModel = mongoose.model('geodoc', GeoSchema); - - GeoModel.createMapping(function(err, mapping){ - GeoModel.remove(function(){ - - esClient.indices.getMapping({ - index: 'geodocs', - type: 'geodoc' - }, function(err, mapping){ - (mapping.geodoc != undefined ? - mapping.geodoc: /* ES 0.9.11 */ + + GeoModel.createMapping(function(err, mapping) { + GeoModel.remove(function() { + + esClient.indices.getMapping({ + index: 'geodocs', + type: 'geodoc' + }, function(err, mapping) { + (mapping.geodoc !== undefined ? + mapping.geodoc : /* ES 0.9.11 */ mapping.geodocs.mappings.geodoc /* ES 1.0.0 */ ).properties.frame.type.should.eql('geo_shape'); done(); @@ -50,78 +50,91 @@ describe('GeoTest', function(){ }); }); - it('should be able to create and store geo coordinates', function(done){ + after(function(done) { + GeoModel.esClient.close(); + mongoose.disconnect(); + esClient.close(); + done(); + }); + + it('should be able to create and store geo coordinates', function(done) { var geo = new GeoModel({ - myId : 1, - frame:{ - type:'envelope', - coordinates: [[1,4],[3,2]] + myId: 1, + frame: { + type: 'envelope', + coordinates: [[1, 4], [3, 2]] } }); - geo2 = new GeoModel({ - myId : 2, - frame:{ - type:'envelope', - coordinates: [[2,3],[4,0]] + var geo2 = new GeoModel({ + myId: 2, + frame: { + type: 'envelope', + coordinates: [[2, 3], [4, 0]] } }); - config.saveAndWaitIndex(geo, function(err){ - if (err) throw err; - config.saveAndWaitIndex(geo2, function(err){ - if (err) throw err; + config.saveAndWaitIndex(geo, function(err) { + if (err) { + throw err; + } + + config.saveAndWaitIndex(geo2, function(err) { + if (err) { + throw err; + } + // Mongodb request GeoModel.find({}, function(err, res) { if (err) throw err; res.length.should.eql(2); res[0].frame.type.should.eql('envelope'); - res[0].frame.coordinates[0].should.eql([1,4]); - res[0].frame.coordinates[1].should.eql([3,2]); + res[0].frame.coordinates[0].should.eql([1, 4]); + res[0].frame.coordinates[1].should.eql([3, 2]); done(); - }) - }) - }) + }); + }); + }); - }) + }); - it('should be able to find geo coordinates in the indexes', function(done){ - setTimeout(function(){ - // ES request - GeoModel.search({ - match_all: {} - }, {sort: 'myId:asc'}, function(err, res){ - if (err) throw err; - res.hits.total.should.eql(2); - res.hits.hits[0]._source.frame.type.should.eql('envelope'); - res.hits.hits[0]._source.frame.coordinates.should.eql([[1,4],[3,2]]); - done(); - }); - }, 1100); + it('should be able to find geo coordinates in the indexes', function(done) { + setTimeout(function() { + // ES request + GeoModel.search({ + match_all: {} + }, {sort: 'myId:asc'}, function(err, res) { + if (err) throw err; + res.hits.total.should.eql(2); + res.hits.hits[0]._source.frame.type.should.eql('envelope'); + res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); + done(); + }); + }, 1100); }); - it('should be able to resync geo coordinates from the database', function(done){ - config.deleteIndexIfExists(['geodocs'], function(){ - GeoModel.createMapping(function(err, mapping){ - var stream = GeoModel.synchronize() - , count = 0; + it('should be able to resync geo coordinates from the database', function(done) { + config.deleteIndexIfExists(['geodocs'], function() { + GeoModel.createMapping(function(err, mapping) { + var stream = GeoModel.synchronize(), + count = 0; - stream.on('data', function(err, doc){ + stream.on('data', function(err, doc) { count++; }); - stream.on('close', function(){ + stream.on('close', function() { count.should.eql(2); - setTimeout(function(){ + setTimeout(function() { GeoModel.search({ match_all: {} - }, {sort: 'myId:asc'}, function(err, res){ - if (err) throw err; + }, {sort: 'myId:asc'}, function(err, res) { + if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); - res.hits.hits[0]._source.frame.coordinates.should.eql([[1,4],[3,2]]); + res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); done(); }); }, 1000); @@ -129,8 +142,8 @@ describe('GeoTest', function(){ }); }); }); - - it('should be able to search points inside frames', function(done){ + + it('should be able to search points inside frames', function(done) { var geoQuery = { filtered: { query: { @@ -141,41 +154,40 @@ describe('GeoTest', function(){ frame: { shape: { type: 'point', - coordinates: [3,1] + coordinates: [3, 1] } } } } } - } + }; - setTimeout(function(){ - GeoModel.search(geoQuery,function(err, res){ - if (err) throw err; + setTimeout(function() { + GeoModel.search(geoQuery, function(err, res) { + if (err) throw err; res.hits.total.should.eql(1); - res.hits.hits[0]._source.myId.should.eql(2); - geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; - GeoModel.search(geoQuery,function(err, res){ - if (err) throw err; + res.hits.hits[0]._source.myId.should.eql(2); + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [1.5, 2.5]; + GeoModel.search(geoQuery, function(err, res) { + if (err) throw err; res.hits.total.should.eql(1); - res.hits.hits[0]._source.myId.should.eql(1); + res.hits.hits[0]._source.myId.should.eql(1); - geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; - GeoModel.search(geoQuery,function(err, res){ - if (err) throw err; + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [3, 2]; + GeoModel.search(geoQuery, function(err, res) { + if (err) throw err; res.hits.total.should.eql(2); - geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; - GeoModel.search(geoQuery,function(err, res){ - if (err) throw err; + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [0, 3]; + GeoModel.search(geoQuery, function(err, res) { + if (err) throw err; res.hits.total.should.eql(0); done(); }); }); - }); - + }); }); - }, 1000); + }, 1000); }); }); diff --git a/test/highlight-features-test.js b/test/highlight-features-test.js index 5eabeb3a..e18c8fd1 100644 --- a/test/highlight-features-test.js +++ b/test/highlight-features-test.js @@ -1,54 +1,57 @@ -var mongoose = require('mongoose') - , async = require('async') - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , mongoosastic = require('../lib/mongoosastic'); +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); var TextSchema = new Schema({ - title: String, - quote: String + title: String, + quote: String }); TextSchema.plugin(mongoosastic); var Text = mongoose.model('Text', TextSchema); -describe('Highlight search', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - Text.remove(function(){ - config.deleteIndexIfExists(['texts'], function(){ +describe('Highlight search', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Text.remove(function() { + config.deleteIndexIfExists(['texts'], function() { // Quotes are from Terry Pratchett's Discworld books var texts = [ - new Text({ - title: 'The colour of magic', - quote: 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' - }), - new Text({ - title: 'The Light Fantastic', - quote: 'The death of the warrior or the old man or the little child, this I understand, and I take ' + - 'away the pain and end the suffering. I do not understand this death-of-the-mind' - }), - new Text({ - title: 'Equal Rites', - quote: 'Time passed, which, basically, is its job' - }), - new Text({ - title: 'Mort', - quote: 'You don\'t see people at their best in this job, said Death.' - }) + new Text({ + title: 'The colour of magic', + quote: 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' + }), + new Text({ + title: 'The Light Fantastic', + quote: 'The death of the warrior or the old man or the little child, this I understand, and I take ' + + 'away the pain and end the suffering. I do not understand this death-of-the-mind' + }), + new Text({ + title: 'Equal Rites', + quote: 'Time passed, which, basically, is its job' + }), + new Text({ + title: 'Mort', + quote: 'You don\'t see people at their best in this job, said Death.' + }) ]; - async.forEach(texts, config.saveAndWaitIndex, function(){ + async.forEach(texts, config.saveAndWaitIndex, function() { setTimeout(done, config.indexingTimeout); }); }); }); }); }); - after(function(done){ - Text.remove(done); + + after(function(done) { + Text.remove(); + Text.esClient.close(); + mongoose.disconnect(); + done(); }); var responses = [ @@ -58,42 +61,43 @@ describe('Highlight search', function(){ 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' ]; - describe('Highlight without hydrating', function(){ - it('should return highlighted text on every hit result', function(done){ + describe('Highlight without hydrating', function() { + it('should return highlighted text on every hit result', function(done) { Text.search({ - match_phrase: { - quote: 'Death' - } + match_phrase: { + quote: 'Death' + } }, { highlight: { fields: { quote: {} } } - },function(err, res){ + }, function(err, res) { res.hits.total.should.eql(3); - res.hits.hits.forEach(function(text){ + res.hits.hits.forEach(function(text) { text.should.have.property('highlight'); text.highlight.should.have.property('quote'); - text.highlight.quote.forEach(function(q){ - responses.should.containEql(q); + text.highlight.quote.forEach(function(q) { + responses.should.containEql(q); }); }); + done(); }); }); }); - describe('Highlight hydrated results', function(){ - it('should return highlighted text on every resulting document', function(done){ + describe('Highlight hydrated results', function() { + it('should return highlighted text on every resulting document', function(done) { Text.search({ - match_phrase: { - quote: 'Death' - } + match_phrase: { + quote: 'Death' + } }, { hydrate: true, highlight: { @@ -101,21 +105,20 @@ describe('Highlight search', function(){ quote: {} } } - },function(err, res){ + }, function(err, res) { res.hits.total.should.eql(3); - res.hits.hits.forEach(function(model){ + res.hits.hits.forEach(function(model) { model.should.have.property('_highlight'); model._highlight.should.have.property('quote'); - model._highlight.quote.forEach(function(q){ - responses.should.containEql(q); + model._highlight.quote.forEach(function(q) { + responses.should.containEql(q); }); }); + done(); }); }); - - }); }); diff --git a/test/index-test.js b/test/index-test.js index 00f40ebe..3911323f 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -1,230 +1,260 @@ -var mongoose = require('mongoose') - , esClient = new(require('elasticsearch').Client) - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , mongoosastic = require('../lib/mongoosastic') - , Tweet = require('./models/tweet'); +var mongoose = require('mongoose'), + should = require('should'), + elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client(), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'), + Tweet = require('./models/tweet'); // -- Only index specific field var TalkSchema = new Schema({ - speaker: String - , year: {type: Number, es_indexed:true} - , title: {type:String, es_indexed:true} - , abstract: {type:String, es_indexed:true} - , bio: String + speaker: String, + year: {type: Number, es_indexed: true}, + title: {type: String, es_indexed: true}, + abstract: {type: String, es_indexed: true}, + bio: String }); -TalkSchema.plugin(mongoosastic) + +TalkSchema.plugin(mongoosastic); var Talk = mongoose.model('Talk', TalkSchema); var PersonSchema = new Schema({ - name: {type:String, es_indexed:true} - , phone: {type:String, es_indexed:true} - , address: String - , life: { - born: {type: Number, es_indexed:true} - , died: {type: Number, es_indexed:true} + name: {type: String, es_indexed: true}, + phone: {type: String, es_indexed: true}, + address: String, + life: { + born: {type: Number, es_indexed: true}, + died: {type: Number, es_indexed: true} } }); PersonSchema.plugin(mongoosastic, { - index:'people' -, type: 'dude' -, hydrate: true -, hydrateOptions: {lean: true, sort: '-name', select: 'address name life'} + index: 'people', + type: 'dude', + hydrate: true, + hydrateOptions: {lean: true, sort: '-name', select: 'address name life'} }); var Person = mongoose.model('Person', PersonSchema); +var BumSchema = new Schema({ + name: String +}); +BumSchema.plugin(mongoosastic, { + index: 'ms_sample', + type: 'bum' +}); +var Bum = mongoose.model('bum', BumSchema); + // -- alright let's test this shiznit! -describe('indexing', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - Tweet.remove(function(){ - config.deleteIndexIfExists(['tweets', 'talks', 'people', 'public_tweets'], done) +describe('indexing', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Tweet.remove(function() { + config.deleteIndexIfExists(['tweets', 'talks', 'people', 'public_tweets'], done); }); }); }); - after(function(done){ - Tweet.remove(function(){ - mongoose.disconnect(); - done(); - }); + after(function(done) { + mongoose.disconnect(); + Talk.esClient.close(); + Person.esClient.close(); + Bum.esClient.close(); + esClient.close(); + config.deleteIndexIfExists(['tweets', 'talks', 'people'], done); + + //done(); }); - describe('Creating Index', function(){ - it('should create index if none exists', function(done){ - Tweet.createMapping(function(err, response){ + describe('Creating Index', function() { + it('should create index if none exists', function(done) { + Tweet.createMapping(function(err, response) { + should.exists(response); response.should.not.have.property('error'); done(); }); }); - it('should create index with settings if none exists', function(done){ - Tweet.createMapping({analysis: { - analyzer: { - stem: { - tokenizer: 'standard', - filter: ['standard', 'lowercase', 'stop', 'porter_stem'] + + it('should create index with settings if none exists', function(done) { + Tweet.createMapping({ + analysis: { + analyzer: { + stem: { + tokenizer: 'standard', + filter: ['standard', 'lowercase', 'stop', 'porter_stem'] + } } } - } - },function(err, response){ + }, function(err, response) { + should.exists(response); response.should.not.have.property('error'); done(); }); - }); - it('should update index if one already exists', function(done){ - Tweet.createMapping(function(err, response){ + }); + + it('should update index if one already exists', function(done) { + Tweet.createMapping(function(err, response) { response.should.not.have.property('error'); done(); }); }); - after(function(done){ - config.deleteIndexIfExists(['tweets', 'talks', 'people'], done) + after(function(done) { + config.deleteIndexIfExists(['tweets', 'talks', 'people'], done); }); + }); - describe('Default plugin', function(){ - before(function(done){ + describe('Default plugin', function() { + before(function(done) { config.createModelAndEnsureIndex(Tweet, { - user: 'jamescarr' - , userId: 1 - , message: 'I like Riak better' - , post_date: new Date() + user: 'jamescarr', + userId: 1, + message: 'I like Riak better', + post_date: new Date() }, done); }); - it('should use the model\'s id as ES id', function(done){ - Tweet.findOne({message:'I like Riak better'}, function(err, doc){ + + it('should use the model\'s id as ES id', function(done) { + Tweet.findOne({message: 'I like Riak better'}, function(err, doc) { esClient.get({ index: 'tweets', type: 'tweet', id: doc._id.toString() - }, function(err, res){ + }, function(err, res) { res._source.message.should.eql(doc.message); - done() + done(); }); }); }); - it('should be able to execute a simple query', function(done){ + it('should be able to execute a simple query', function(done) { Tweet.search({ query_string: { query: 'Riak' } }, function(err, results) { - results.hits.total.should.eql(1) - results.hits.hits[0]._source.message.should.eql('I like Riak better') + results.hits.total.should.eql(1); + results.hits.hits[0]._source.message.should.eql('I like Riak better'); done(); }); }); - it('should be able to execute a simple query', function(done){ + it('should be able to execute a simple query', function(done) { Tweet.search({ query_string: { query: 'jamescarr' } }, function(err, results) { - results.hits.total.should.eql(1) - results.hits.hits[0]._source.message.should.eql('I like Riak better') - done() + results.hits.total.should.eql(1); + results.hits.hits[0]._source.message.should.eql('I like Riak better'); + done(); }); }); - it('should report errors', function(done){ - Tweet.search({queriez:'jamescarr'}, function(err, results) { + it('should report errors', function(done) { + Tweet.search({queriez: 'jamescarr'}, function(err, results) { err.message.should.match(/SearchPhaseExecutionException/); - should.not.exist(results) - done() + should.not.exist(results); + done(); }); }); }); - describe('Removing', function(){ + + describe('Removing', function() { var tweet = null; - beforeEach(function(done){ + beforeEach(function(done) { tweet = new Tweet({ - user:'jamescarr' - , message: 'Saying something I shouldnt' + user: 'jamescarr', + message: 'Saying something I shouldnt' }); config.createModelAndEnsureIndex(Tweet, tweet, done); }); - it('should remove from index when model is removed', function(done){ - tweet.remove(function(){ - setTimeout(function(){ - Tweet.search({ - query_string: { - query: 'shouldnt' - } - }, function(err, res){ - res.hits.total.should.eql(0); - done(); - }); - }, config.indexingTimeout); + + it('should remove from index when model is removed', function(done) { + tweet.remove(function() { + setTimeout(function() { + Tweet.search({ + query_string: { + query: 'shouldnt' + } + }, function(err, res) { + res.hits.total.should.eql(0); + done(); + }); + }, config.indexingTimeout); }); }); - it('should remove only index', function(done){ - tweet.on('es-removed', function(err, res){ - setTimeout(function(){ + + it('should remove only index', function(done) { + tweet.on('es-removed', function(err, res) { + setTimeout(function() { Tweet.search({ - query_string: { - query: 'shouldnt' - } - }, function(err, res){ + query_string: { + query: 'shouldnt' + } + }, function(err, res) { res.hits.total.should.eql(0); done(); }); }, config.indexingTimeout); }); - tweet.unIndex() + + tweet.unIndex(); }); - it('should queue for later removal if not in index', function(done){ + it('should queue for later removal if not in index', function(done) { // behavior here is to try 3 times and then give up. var tweet = new Tweet({ - user:'jamescarr' - , message: 'ABBA' + user: 'jamescarr', + message: 'ABBA' }); - tweet.save(function(){ - tweet.remove(); + tweet.save(function() { + setTimeout(function() { + tweet.remove(); + tweet.on('es-removed', done); + }, 200); }); - tweet.on('es-removed', done); }); }); - describe('Isolated Models', function(){ - before(function(done){ + + describe('Isolated Models', function() { + before(function(done) { var talk = new Talk({ - speaker: '' - , year: 2013 - , title: 'Dude' - , abstract: '' - , bio: '' + speaker: '', + year: 2013, + title: 'Dude', + abstract: '', + bio: '' }); var tweet = new Tweet({ - user: 'Dude' - , message: 'Go see the big lebowski' - , post_date: new Date() + user: 'Dude', + message: 'Go see the big lebowski', + post_date: new Date() }); - tweet.save(function(){ - talk.save(function(){ - talk.on('es-indexed', function(err, res){ + tweet.save(function() { + talk.save(function() { + talk.on('es-indexed', function(err, res) { setTimeout(done, config.indexingTimeout); }); }); }); }); - it('should only find models of type Tweet', function(done){ - Tweet.search({query_string: {query: 'Dude'}}, function(err, res){ + it('should only find models of type Tweet', function(done) { + Tweet.search({query_string: {query: 'Dude'}}, function(err, res) { res.hits.total.should.eql(1); res.hits.hits[0]._source.user.should.eql('Dude'); done(); }); }); - it('should only find models of type Talk', function(done){ - Talk.search({query_string: {query: 'Dude'}}, function(err, res){ + + it('should only find models of type Talk', function(done) { + Talk.search({query_string: {query: 'Dude'}}, function(err, res) { res.hits.total.should.eql(1); res.hits.hits[0]._source.title.should.eql('Dude'); done(); @@ -232,16 +262,16 @@ describe('indexing', function(){ }); }); - describe('Always hydrate', function(){ - before(function(done){ + describe('Always hydrate', function() { + before(function(done) { config.createModelAndEnsureIndex(Person, { - name: 'James Carr' - , address: 'Exampleville, MO' - , phone: '(555)555-5555' + name: 'James Carr', + address: 'Exampleville, MO', + phone: '(555)555-5555' }, done); }); - it('when gathering search results while respecting default hydrate options', function(done){ + it('when gathering search results while respecting default hydrate options', function(done) { Person.search({query_string: {query: 'James'}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('James Carr'); @@ -251,18 +281,19 @@ describe('indexing', function(){ }); }); }); - describe('Subset of Fields', function(){ - before(function(done){ - config.createModelAndEnsureIndex(Talk,{ - speaker: 'James Carr' - , year: 2013 - , title: 'Node.js Rocks' - , abstract: 'I told you node.js was cool. Listen to me!' - , bio: 'One awesome dude.' + + describe('Subset of Fields', function() { + before(function(done) { + config.createModelAndEnsureIndex(Talk, { + speaker: 'James Carr', + year: 2013, + title: 'Node.js Rocks', + abstract: 'I told you node.js was cool. Listen to me!', + bio: 'One awesome dude.' }, done); }); - it('should only return indexed fields', function(done){ + it('should only return indexed fields', function(done) { Talk.search({query_string: {query: 'cool'}}, function(err, res) { res.hits.total.should.eql(1); @@ -276,32 +307,32 @@ describe('indexing', function(){ }); }); - it('should hydrate returned documents if desired', function(done){ - Talk.search({query_string: {query: 'cool'}}, {hydrate:true}, function(err, res) { - res.hits.total.should.eql(1) + it('should hydrate returned documents if desired', function(done) { + Talk.search({query_string: {query: 'cool'}}, {hydrate: true}, function(err, res) { + res.hits.total.should.eql(1); - var talk = res.hits.hits[0] - talk.should.have.property('title') + var talk = res.hits.hits[0]; + talk.should.have.property('title'); talk.should.have.property('year'); - talk.should.have.property('abstract') - talk.should.have.property('speaker') - talk.should.have.property('bio') + talk.should.have.property('abstract'); + talk.should.have.property('speaker'); + talk.should.have.property('bio'); talk.should.be.an.instanceof(Talk); done(); }); }); - describe('Sub-object Fields', function(){ - before(function(done){ + describe('Sub-object Fields', function() { + before(function(done) { config.createModelAndEnsureIndex(Person, { - name: 'Bob Carr' - , address: 'Exampleville, MO' - , phone: '(555)555-5555' - , life: { born: 1950, other: 2000 } + name: 'Bob Carr', + address: 'Exampleville, MO', + phone: '(555)555-5555', + life: {born: 1950, other: 2000} }, done); }); - it('should only return indexed fields and have indexed sub-objects', function(done){ + it('should only return indexed fields and have indexed sub-objects', function(done) { Person.search({query_string: {query: 'Bob'}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('Bob Carr'); @@ -316,16 +347,16 @@ describe('indexing', function(){ }); }); - it('should allow extra query options when hydrating', function(done){ - Talk.search({query_string: {query: 'cool'}}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { - res.hits.total.should.eql(1) + it('should allow extra query options when hydrating', function(done) { + Talk.search({query_string: {query: 'cool'}}, {hydrate: true, hydrateOptions: {lean: true}}, function(err, res) { + res.hits.total.should.eql(1); - var talk = res.hits.hits[0] - talk.should.have.property('title') + var talk = res.hits.hits[0]; + talk.should.have.property('title'); talk.should.have.property('year'); - talk.should.have.property('abstract') - talk.should.have.property('speaker') - talk.should.have.property('bio') + talk.should.have.property('abstract'); + talk.should.have.property('speaker'); + talk.should.have.property('bio'); talk.should.not.be.an.instanceof(Talk); done(); }); @@ -333,16 +364,16 @@ describe('indexing', function(){ }); - describe('Existing Index', function(){ - before(function(done){ - config.deleteIndexIfExists(['ms_sample'], function(){ + describe('Existing Index', function() { + before(function(done) { + config.deleteIndexIfExists(['ms_sample'], function() { esClient.indices.create({ index: 'ms_sample', body: { - mappings:{ - bum:{ + mappings: { + bum: { properties: { - name: {type:'string'} + name: {type: 'string'} } } } @@ -351,17 +382,10 @@ describe('indexing', function(){ }); }); - it('should just work', function(done){ - var BumSchema = new Schema({ - name: String - }); - BumSchema.plugin(mongoosastic, { - index: 'ms_sample' - , type: 'bum' - }); - var Bum = mongoose.model('bum', BumSchema); - config.createModelAndEnsureIndex(Bum, {name:'Roger Wilson'}, function(){ - Bum.search({query_string: {query: 'Wilson'}}, function(err, results){ + it('should just work', function(done) { + + config.createModelAndEnsureIndex(Bum, {name: 'Roger Wilson'}, function() { + Bum.search({query_string: {query: 'Wilson'}}, function(err, results) { results.hits.total.should.eql(1); done(); }); @@ -370,5 +394,3 @@ describe('indexing', function(){ }); }); - - diff --git a/test/mapping-generator-test.js b/test/mapping-generator-test.js index 4ea5f4f3..abc49198 100644 --- a/test/mapping-generator-test.js +++ b/test/mapping-generator-test.js @@ -1,108 +1,114 @@ -var mongoose = require('mongoose') - , should = require('should') - , Schema = mongoose.Schema - , Generator = require('../lib/mapping-generator') - , generator = new Generator(); +var mongoose = require('mongoose'), + should = require('should'), + Schema = mongoose.Schema, + Generator = require('../lib/mapping-generator'), + generator = new Generator(); -describe('MappingGenerator', function(){ +describe('MappingGenerator', function() { - describe('type mapping', function(){ - it('maps field with simple String type', function(done){ + describe('type mapping', function() { + it('maps field with simple String type', function(done) { generator.generateMapping(new Schema({ name: String - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.type.should.eql('string'); done(); }); }); - it('maps field with String type attribute', function(done){ + it('maps field with String type attribute', function(done) { generator.generateMapping(new Schema({ - name: {type:String} - }), function(err, mapping){ + name: {type: String} + }), function(err, mapping) { mapping.properties.name.type.should.eql('string'); done(); }); }); - it('converts Date type to date', function(done){ + + it('converts Date type to date', function(done) { generator.generateMapping(new Schema({ - graduationDate: {type:Date, es_format: 'YYYY-MM-dd'} - }), function(err, mapping){ + graduationDate: {type: Date, es_format: 'YYYY-MM-dd'} + }), function(err, mapping) { mapping.properties.graduationDate.type.should.eql('date'); done(); }); }); - it('removes _id field without prefix', function(done){ + + it('removes _id field without prefix', function(done) { generator.generateMapping(new Schema({ _id: {type: Schema.Types.ObjectId}, user: { _id: {type: Schema.Types.ObjectId}, name: {type: String} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.should.not.have.property('_id'); done(); }); }); - it('does not remove _id field with prefix', function(done){ + + it('does not remove _id field with prefix', function(done) { generator.generateMapping(new Schema({ _id: {type: Schema.Types.ObjectId}, user: { _id: {type: Schema.Types.ObjectId}, name: {type: String} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.user.properties.should.have.property('_id'); done(); }); }); - it('converts object id to string if not _id', function(done){ + + it('converts object id to string if not _id', function(done) { generator.generateMapping(new Schema({ - oid: {type:Schema.Types.ObjectId} - }), function(err, mapping){ + oid: {type: Schema.Types.ObjectId} + }), function(err, mapping) { mapping.properties.oid.type.should.eql('string'); done(); }); }); - it('recognizes an object and maps it as one', function(done){ + + it('recognizes an object and maps it as one', function(done) { generator.generateMapping(new Schema({ contact: { - email: {type: String}, - telephone: {type: String} + email: {type: String}, + telephone: {type: String} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.contact.properties.email.type.should.eql('string'); mapping.properties.contact.properties.telephone.type.should.eql('string'); done(); }); }); - it('recognizes an object and handles explict es_indexed', function(done){ + + it('recognizes an object and handles explict es_indexed', function(done) { generator.generateMapping(new Schema({ name: {type: String, es_indexed: true}, contact: { - email: {type: String, es_indexed: true}, - telephone: {type: String} + email: {type: String, es_indexed: true}, + telephone: {type: String} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.type.should.eql('string'); mapping.properties.contact.properties.email.type.should.eql('string'); mapping.properties.contact.properties.should.not.have.property('telephone'); done(); }); }); - - it('recognizes an multi_field and maps it as one', function(done){ + + it('recognizes an multi_field and maps it as one', function(done) { generator.generateMapping(new Schema({ test: { type: String, es_include_in_all: false, es_type: 'multi_field', es_fields: { - test: { type: 'string', index: 'analyzed' }, - untouched: { type: 'string', index: 'not_analyzed' } + test: {type: 'string', index: 'analyzed'}, + untouched: {type: 'string', index: 'not_analyzed'} } } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.test.type.should.eql('multi_field'); mapping.properties.test.fields.test.type.should.eql('string'); mapping.properties.test.fields.test.index.should.eql('analyzed'); @@ -111,18 +117,20 @@ describe('MappingGenerator', function(){ done(); }); }); - it('recognizes an geo_point and maps it as one', function(done){ + + it('recognizes an geo_point and maps it as one', function(done) { generator.generateMapping(new Schema({ geo: { type: String, es_type: 'geo_point' } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.geo.type.should.eql('geo_point'); done(); }); }); - it('recognizes an geo_point with independent lat lon fields and maps it as one', function(done){ + + it('recognizes an geo_point with independent lat lon fields and maps it as one', function(done) { generator.generateMapping(new Schema({ geo_with_lat_lon: { geo_point: { @@ -130,54 +138,58 @@ describe('MappingGenerator', function(){ es_type: 'geo_point', es_lat_lon: true }, - lat: { type: Number }, - lon: { type: Number } + lat: {type: Number}, + lon: {type: Number} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.geo_with_lat_lon.type.should.eql('geo_point'); mapping.properties.geo_with_lat_lon.lat_lon.should.eql(true); done(); }); }); - it('recognizes an nested schema and maps it', function(done){ + + it('recognizes an nested schema and maps it', function(done) { var NameSchema = new Schema({ first_name: {type: String}, last_name: {type: String} }); generator.generateMapping(new Schema({ name: [NameSchema] - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.type.should.eql('object'); mapping.properties.name.properties.first_name.type.should.eql('string'); mapping.properties.name.properties.last_name.type.should.eql('string'); done(); }); }); - it('recognizes a nested array with a simple type and maps it as a simple attribute', function(done){ + + it('recognizes a nested array with a simple type and maps it as a simple attribute', function(done) { generator.generateMapping(new Schema({ contacts: [String] - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.contacts.type.should.eql('string'); done(); }); }); - it('recognizes a nested array with a simple type and additional attributes and maps it as a simple attribute', function(done){ + + it('recognizes a nested array with a simple type and additional attributes and maps it as a simple attribute', function(done) { generator.generateMapping(new Schema({ - contacts: [{ type: String, es_index: 'not_analyzed' }] - }), function(err, mapping){ + contacts: [{type: String, es_index: 'not_analyzed'}] + }), function(err, mapping) { mapping.properties.contacts.type.should.eql('string'); mapping.properties.contacts.index.should.eql('not_analyzed'); done(); }); }); - it('recognizes a nested array with a complex object and maps it', function(done){ + + it('recognizes a nested array with a complex object and maps it', function(done) { generator.generateMapping(new Schema({ name: String, contacts: [{ - email: {type: String, es_index: 'not_analyzed' }, - telephone: String + email: {type: String, es_index: 'not_analyzed'}, + telephone: String }] - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.type.should.eql('string'); mapping.properties.contacts.properties.email.type.should.eql('string'); mapping.properties.contacts.properties.email.index.should.eql('not_analyzed'); @@ -185,20 +197,21 @@ describe('MappingGenerator', function(){ done(); }); }); - it('excludes a virtual property from mapping', function(done){ + + it('excludes a virtual property from mapping', function(done) { var PersonSchema = new Schema({ first_name: {type: String}, last_name: {type: String}, age: {type: Number} }); - PersonSchema.virtual('birthYear').set(function (year) { + PersonSchema.virtual('birthYear').set(function(year) { this.age = new Date().getFullYear() - year; - }) + }); generator.generateMapping(new Schema({ name: [PersonSchema] - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.properties.first_name.type.should.eql('string'); mapping.properties.name.properties.last_name.type.should.eql('string'); mapping.properties.name.properties.age.type.should.eql('double'); @@ -208,30 +221,32 @@ describe('MappingGenerator', function(){ }); }); - describe('elastic search fields', function(){ - it('type can be overridden', function(done){ + describe('elastic search fields', function() { + it('type can be overridden', function(done) { generator.generateMapping(new Schema({ - name: {type:String, es_type:'date'} - }), function(err, mapping){ + name: {type: String, es_type: 'date'} + }), function(err, mapping) { mapping.properties.name.type.should.eql('date'); done(); }); }); - it('adds the boost field', function(done){ + + it('adds the boost field', function(done) { generator.generateMapping(new Schema({ - name: {type:String, es_boost:2.2} - }), function(err, mapping){ + name: {type: String, es_boost: 2.2} + }), function(err, mapping) { mapping.properties.name.boost.should.eql(2.2); done(); }); }); - it('respects schemas with explicit es_indexes', function(done){ + + it('respects schemas with explicit es_indexes', function(done) { generator.generateMapping(new Schema({ implicit_field_1: {type: String}, explicit_field_1: {type: Number, es_indexed: true}, implicit_field_2: {type: Number}, explicit_field_2: {type: String, es_indexed: true} - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.should.have.property('explicit_field_1'); mapping.properties.should.have.property('explicit_field_2'); mapping.properties.should.not.have.property('implicit_field_1'); @@ -239,15 +254,17 @@ describe('MappingGenerator', function(){ done(); }); }); + it('maps all fields when schema has no es_indexed flag', function(done) { generator.generateMapping(new Schema({ implicit_field_1: {type: String}, implicit_field_2: {type: Number} - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.should.have.property('implicit_field_1'); mapping.properties.should.have.property('implicit_field_2'); done(); }); }); + }); }); diff --git a/test/models/tweet.js b/test/models/tweet.js index f991c3ad..952aa500 100644 --- a/test/models/tweet.js +++ b/test/models/tweet.js @@ -1,15 +1,20 @@ -var mongoose = require('mongoose') - , Schema = mongoose.Schema - , mongoosastic = require('../../lib/mongoosastic'); +var mongoose = require('mongoose'), + Schema = mongoose.Schema, + config = require('../config'), + mongoosastic = require('../../lib/mongoosastic'); // -- simplest indexing... index all fields var TweetSchema = new Schema({ - user: String - , userId: Number - , post_date: Date - , message: String + user: String, + userId: Number, + post_date: Date, + message: String }); -TweetSchema.plugin(mongoosastic) +TweetSchema.plugin(mongoosastic, { + index: 'tweets', + type: 'tweet', + esClient: config.getClient() +}); module.exports = mongoose.model('Tweet', TweetSchema); diff --git a/test/search-features-test.js b/test/search-features-test.js index 788a0f31..52c7aa9f 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -1,55 +1,60 @@ -var mongoose = require('mongoose') - , async = require('async') - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , mongoosastic = require('../lib/mongoosastic'); +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); var BondSchema = new Schema({ - name: String - , type: {type:String, default:'Other Bond'} - , price: Number + name: String, + type: {type: String, default: 'Other Bond'}, + price: Number }); BondSchema.plugin(mongoosastic); var Bond = mongoose.model('Bond', BondSchema); -describe('Query DSL', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - Bond.remove(function(){ - config.deleteIndexIfExists(['bonds'], function(){ +describe('Query DSL', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Bond.remove(function() { + config.deleteIndexIfExists(['bonds'], function() { var bonds = [ - new Bond({name:'Bail', type:'A', price:10000}) - , new Bond({name:'Commercial', type:'B', price:15000}) - , new Bond({name:'Construction', type:'B', price:20000}) - , new Bond({name:'Legal', type:'C', price:30000}) + new Bond({name: 'Bail', type: 'A', price: 10000}), + new Bond({name: 'Commercial', type: 'B', price: 15000}), + new Bond({name: 'Construction', type: 'B', price: 20000}), + new Bond({name: 'Legal', type: 'C', price: 30000}) ]; - async.forEach(bonds, config.saveAndWaitIndex, function(){ + async.forEach(bonds, config.saveAndWaitIndex, function() { setTimeout(done, config.indexingTimeout); }); }); }); }); }); - after(function(done){ - Bond.remove(done); + + after(function(done) { + Bond.remove(); + Bond.esClient.close(); + mongoose.disconnect(); + done(); }); - describe('range', function(){ - it('should be able to find within range', function(done){ + + describe('range', function() { + it('should be able to find within range', function(done) { Bond.search({ range: { - price:{ - from:20000 - , to: 30000 + price: { + from: 20000, + to: 30000 } } - }, function(err, res){ + }, function(err, res) { res.hits.total.should.eql(2); - res.hits.hits.forEach(function(bond){ + res.hits.hits.forEach(function(bond) { ['Legal', 'Construction'].should.containEql(bond._source.name); }); + done(); }); }); diff --git a/test/serialize-test.js b/test/serialize-test.js index 774473a2..23b41fe2 100644 --- a/test/serialize-test.js +++ b/test/serialize-test.js @@ -1,22 +1,22 @@ -var mongoose = require('mongoose') - , should = require('should') - , generator = new (require('../lib/mapping-generator')) - , serialize = require('../lib/serialize') - , Schema = mongoose.Schema; +var mongoose = require('mongoose'), + Generator = require('../lib/mapping-generator'), + generator = new Generator(), + serialize = require('../lib/serialize'), + Schema = mongoose.Schema; -var BowlingBall = mongoose.model('BowlingBall', new Schema); +var BowlingBall = mongoose.model('BowlingBall', new Schema()); var PersonSchema22 = new Schema({ name: { - first: String - , last: String + first: String, + last: String }, dob: Date, - bowlingBall: {type:Schema.ObjectId, ref:'BowlingBall'}, + bowlingBall: {type: Schema.ObjectId, ref: 'BowlingBall'}, games: [{score: Number, date: Date}], - somethingToCast : { + somethingToCast: { type: String, - es_cast: function(element){ - return element+' has been cast'; + es_cast: function(element) { + return element + ' has been cast'; } } }); @@ -30,49 +30,55 @@ generator.generateMapping(PersonSchema22, function(err, tmp) { mapping = tmp; }); -describe('serialize', function(){ +describe('serialize', function() { var dude = new Person({ - name: {first:'Jeffrey', last:'Lebowski'}, + name: {first: 'Jeffrey', last: 'Lebowski'}, dob: new Date(Date.parse('05/17/1962')), bowlingBall: new BowlingBall(), - games: [{score: 80, date: new Date(Date.parse('05/17/1962'))}, {score: 80, date: new Date(Date.parse('06/17/1962'))}], + games: [{score: 80, date: new Date(Date.parse('05/17/1962'))}, { + score: 80, + date: new Date(Date.parse('06/17/1962')) + }], somethingToCast: 'Something' }); // another person with missing parts to test robustness var millionnaire = new Person({ - name: {first:'Jeffrey', last:'Lebowski'}, + name: {first: 'Jeffrey', last: 'Lebowski'} }); - it('should serialize a document with missing bits', function(){ + it('should serialize a document with missing bits', function() { var serialized = serialize(millionnaire, mapping); serialized.should.have.property('games', []); }); - describe('with no indexed fields', function(){ + describe('with no indexed fields', function() { var serialized = serialize(dude, mapping); - it('should serialize model fields', function(){ + it('should serialize model fields', function() { serialized.name.first.should.eql('Jeffrey'); serialized.name.last.should.eql('Lebowski'); }); - it('should serialize object ids as strings', function(){ + + it('should serialize object ids as strings', function() { serialized.bowlingBall.should.not.eql(dude.bowlingBall); serialized.bowlingBall.should.be.type('string'); }); - it('should serialize dates in ISO 8601 format', function(){ - serialized.dob.should.eql(dude.dob.toJSON()) + + it('should serialize dates in ISO 8601 format', function() { + serialized.dob.should.eql(dude.dob.toJSON()); }); - it('should serialize nested arrays', function(){ + + it('should serialize nested arrays', function() { serialized.games.should.have.lengthOf(2); serialized.games[0].should.have.property('score', 80); }); - it('should cast and serialize field', function(){ - serialized.somethingToCast.should.eql('Something has been cast') - }); + it('should cast and serialize field', function() { + serialized.somethingToCast.should.eql('Something has been cast'); + }); }); - describe('indexed fields', function(){ + describe('indexed fields', function() { }); }); diff --git a/test/synchronize-test.js b/test/synchronize-test.js index 5138d9e2..1d53b9fe 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -1,9 +1,8 @@ -var mongoose = require('mongoose') - , should = require('should') - , async = require('async') - , config = require('./config') - , mongoosastic = require('../lib/mongoosastic') - , Schema = mongoose.Schema +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + mongoosastic = require('../lib/mongoosastic'), + Schema = mongoose.Schema; var BookSchema = new Schema({ title: String @@ -13,14 +12,14 @@ BookSchema.plugin(mongoosastic); var Book = mongoose.model('Book', BookSchema); -describe('Synchronize', function(){ +describe('Synchronize', function() { var books = null; - before(function(done){ - config.deleteIndexIfExists(['books'], function(){ - mongoose.connect(config.mongoUrl, function(){ + before(function(done) { + config.deleteIndexIfExists(['books'], function() { + mongoose.connect(config.mongoUrl, function() { var client = mongoose.connections[0].db; - client.collection('books', function(err, _books){ + client.collection('books', function(err, _books) { books = _books; Book.remove(done); }); @@ -28,26 +27,32 @@ describe('Synchronize', function(){ }); }); - describe('existing collection', function(){ + after(function(done) { + Book.esClient.close(); + mongoose.disconnect(); + done(); + }); + + describe('existing collection', function() { - before(function(done){ - async.forEach(config.bookTitlesArray(), function(title, cb){ - books.insert({title:title}, cb); + before(function(done) { + async.forEach(config.bookTitlesArray(), function(title, cb) { + books.insert({title: title}, cb); }, done); }); - it('should index all existing objects', function(done){ - var stream = Book.synchronize() - , count = 0; + it('should index all existing objects', function(done) { + var stream = Book.synchronize(), + count = 0; - stream.on('data', function(err, doc){ + stream.on('data', function(err, doc) { count++; }); - stream.on('close', function(){ + stream.on('close', function() { count.should.eql(53); - setTimeout(function(){ - Book.search({query_string: {query: 'American'}}, function(err, results){ + setTimeout(function() { + Book.search({query_string: {query: 'American'}}, function(err, results) { results.hits.total.should.eql(2); done(); }); diff --git a/test/truncate-test.js b/test/truncate-test.js index 98a89150..356aa91e 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -1,6 +1,5 @@ var mongoose = require('mongoose'), async = require('async'), - should = require('should'), config = require('./config'), Schema = mongoose.Schema, mongoosastic = require('../lib/mongoosastic'); @@ -23,7 +22,7 @@ describe('Truncate', function() { }), new Dummy({ text: 'Text2' - }), + }) ]; async.forEach(dummies, function(item, cb) { item.save(cb); @@ -34,9 +33,14 @@ describe('Truncate', function() { }); }); }); + after(function(done) { - Dummy.remove(done); + Dummy.remove(); + Dummy.esClient.close(); + mongoose.disconnect(); + done(); }); + describe('esTruncate', function() { it('should be able to truncate all documents', function(done) { Dummy.esTruncate(function(err) { From 766dda5c4436f948a75feb1ee92ed88a1f909e41 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 28 Mar 2015 22:32:51 +0100 Subject: [PATCH 077/152] example fixed and dependencies updated --- example/blog/app.js | 78 ++++++++++++++++------------------ example/blog/package.json | 17 +++++--- example/blog/routes/index.js | 8 ---- example/blog/views/layout.jade | 3 +- 4 files changed, 47 insertions(+), 59 deletions(-) delete mode 100644 example/blog/routes/index.js diff --git a/example/blog/app.js b/example/blog/app.js index 70981f09..1086fcd2 100644 --- a/example/blog/app.js +++ b/example/blog/app.js @@ -1,79 +1,73 @@ -var express = require('express') - , routes = require('./routes') - , mongoose = require('mongoose') - , mongoosastic = require('../../lib/mongoosastic') - , Schema = mongoose.Schema; +var express = require('express'), + bodyParser = require('body-parser'), + errorhandler = require('errorhandler'), + mongoose = require('mongoose'), + mongoosastic = require('../../lib/mongoosastic'), + Schema = mongoose.Schema; -var app = module.exports = express.createServer(); +var app = module.exports = express(); // Configuration +app.set('views', __dirname + '/views'); +app.set('view engine', 'jade'); +app.use(bodyParser()); +app.use(express.static(__dirname + '/public')); -app.configure(function(){ - app.set('views', __dirname + '/views'); - app.set('view engine', 'jade'); - app.use(express.bodyParser()); - app.use(express.methodOverride()); - app.use(app.router); - app.use(express.static(__dirname + '/public')); -}); - -app.configure('development', function(){ - app.use(express.errorHandler({ dumpExceptions: true, showStack: true })); -}); - -app.configure('production', function(){ - app.use(express.errorHandler()); -}); +app.use(errorhandler()); // Model -mongoose.connect('mongodb://localhost/silly-blog', function(err){ - console.log(err); +mongoose.connect('mongodb://localhost/silly-blog', function(err) { + if (err) { + console.error(err); + } console.log('connected.... unless you see an error the line before this!'); }); + var BlogPostSchema = new Schema({ - title:{type:String, es_boost:2.0} - , content: {type:String} + title: {type: String, es_boost: 2.0}, + content: {type: String} }); BlogPostSchema.plugin(mongoosastic); var BlogPost = mongoose.model('BlogPost', BlogPostSchema); -BlogPost.createMapping(function(err, mapping){ - if(err){ +BlogPost.createMapping(function(err, mapping) { + if (err) { console.log('error creating mapping (you can safely ignore this)'); console.log(err); - }else{ + } else { console.log('mapping created!'); console.log(mapping); } }); - // Routes -app.get('/', function(req, res){ - res.render('index', {title:'Mongoosastic Example'}); +app.get('/', function(req, res) { + res.render('index', {title: 'Mongoosastic Example'}); }); -app.post('/search', function(req, res){ - BlogPost.search({query:req.body.q}, function(err, results){ +app.post('/search', function(req, res) { + BlogPost.search({query_string: {query: req.body.q}}, function(err, results) { res.send(results); }); }); -app.get('/post', function(req, res){ - res.render('post', {title:'New Post'}); + +app.get('/post', function(req, res) { + res.render('post', {title: 'New Post'}); }); -app.post('/post', function(req, res){ - var post = new BlogPost(req.body) - post.save(function(){ + +app.post('/post', function(req, res) { + var post = new BlogPost(req.body); + post.save(function() { res.redirect('/'); - post.on('es-indexed', function(){ + post.on('es-indexed', function() { console.log('document indexed'); }); }); }); -app.listen(3000, function(){ - console.log("Express server listening on port %d in %s mode", app.address().port, app.settings.env); +app.listen(3000, function() { + console.log('Express server listening on port %d in %s mode', 3000, app.settings.env); }); diff --git a/example/blog/package.json b/example/blog/package.json index 84470bfe..404a8a0b 100644 --- a/example/blog/package.json +++ b/example/blog/package.json @@ -1,9 +1,12 @@ { - "name": "application-name" - , "version": "0.0.1" - , "private": true - , "dependencies": { - "express": "2.5.10" - , "jade": ">= 0.0.1" + "name": "blog-mongoosastic-demo", + "version": "1.0.0", + "private": true, + "dependencies": { + "mongoose": "~3.8.x", + "express": "^4.12.x", + "errorhandler": "^1.3.x", + "body-parser": "^1.12.x", + "jade": "^1.9.x" } -} \ No newline at end of file +} diff --git a/example/blog/routes/index.js b/example/blog/routes/index.js deleted file mode 100644 index fd69215b..00000000 --- a/example/blog/routes/index.js +++ /dev/null @@ -1,8 +0,0 @@ - -/* - * GET home page. - */ - -exports.index = function(req, res){ - res.render('index', { title: 'Express' }) -}; \ No newline at end of file diff --git a/example/blog/views/layout.jade b/example/blog/views/layout.jade index 1a369412..37cc98f7 100644 --- a/example/blog/views/layout.jade +++ b/example/blog/views/layout.jade @@ -1,6 +1,5 @@ -!!! html head title= title link(rel='stylesheet', href='/stylesheets/style.css') - body!= body \ No newline at end of file + body!= body From e9965e46b0b82db8816b8ad09b0fbfa1955bea66 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 29 Mar 2015 00:02:31 +0100 Subject: [PATCH 078/152] 2.1.1 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b601eb46..35c04065 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.1.0", + "version": "2.1.1", "tags": [ "mongodb", "elasticsearch", From 9e229de2babc1232e5ee8713c3c2450dc1ca6070 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 29 Mar 2015 00:02:46 +0100 Subject: [PATCH 079/152] 2.1.2 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 35c04065..e47e11d9 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.1.1", + "version": "2.1.2", "tags": [ "mongodb", "elasticsearch", From 5e5549ce2b481cbd06e3db4e7a4294b4458a2470 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 29 Mar 2015 11:04:09 +0200 Subject: [PATCH 080/152] added doc parameter on mongoose hooks --- lib/mongoosastic.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 371c9485..8689d8ff 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -339,14 +339,14 @@ module.exports = function Mongoosastic(schema, options) { * to persist to Elasticsearch */ function setUpMiddlewareHooks(schema) { - schema.post('remove', function() { - setIndexNameIfUnset(this.constructor.modelName); + schema.post('remove', function(doc) { + setIndexNameIfUnset(doc.constructor.modelName); var options = { index: indexName, type: typeName, tries: 3, - model: this, + model: doc, client: esClient }; @@ -360,11 +360,11 @@ module.exports = function Mongoosastic(schema, options) { /** * Save in elasticsearch on save. */ - schema.post('save', function() { - var _this = this; - _this.index(function(err, res) { - _this.emit('es-indexed', err, res); + schema.post('save', function(doc) { + + doc.index(function(err, res) { + doc.emit('es-indexed', err, res); }); }); } From 4823b5157551050f84272ac74ccf8a927305af58 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 29 Mar 2015 11:04:50 +0200 Subject: [PATCH 081/152] 2.1.3 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index e47e11d9..525ffc86 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.1.2", + "version": "2.1.3", "tags": [ "mongodb", "elasticsearch", From db9dade97f0145af47b2c670f022fe253b4e78ef Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 29 Mar 2015 11:32:44 +0200 Subject: [PATCH 082/152] changelog updated --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index cb58b568..019a3b3c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +2.1.3 / 2015-03-29 +================== + + * added doc parameter on mongoose hooks + 2.1.1 / 2015-03-28 =================== From 1c74b7e7dec8fabfea5857884d01ae3e81ea4699 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 29 Mar 2015 11:59:51 +0200 Subject: [PATCH 083/152] mongoose and elasticsearch version bump. --- README.md | 8 ++++++++ package.json | 6 +++--- test/serialize-test.js | 4 ++-- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index d196f4d5..04c36e1a 100644 --- a/README.md +++ b/README.md @@ -24,10 +24,18 @@ Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatical ## Installation +The latest version of this package will be as close as possible to the latest `elasticsearch` and `mongoose` packages. If you are working with latest mongoose package, install normally: + ```bash npm install -S mongoosastic ``` +If you are working with `mongoose@3.8.x` use `mongoosastic@2.x` and install a specific version: + +```bash +npm install -S mongoosastic@^2.x +``` + ## Setup ### Model.plugin(mongoosastic, options) diff --git a/package.json b/package.json index 525ffc86..b48cc1f0 100644 --- a/package.json +++ b/package.json @@ -16,11 +16,11 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elasticsearch": "^3.1.x", + "elasticsearch": "^4.0.x", "nop": "^1.0.0" }, "peerDependencies": { - "mongoose": "~3.8.x" + "mongoose": "^4.x" }, "devDependencies": { "async": "^0.9.x", @@ -31,7 +31,7 @@ "jscs": "^1.12.0", "jshint-stylish": "^1.0.1", "mocha": "^2.2.x", - "mongoose": "~3.8.x", + "mongoose": "^4.x", "should": "^5.2.x" }, "engines": { diff --git a/test/serialize-test.js b/test/serialize-test.js index 23b41fe2..0d6ea027 100644 --- a/test/serialize-test.js +++ b/test/serialize-test.js @@ -60,8 +60,8 @@ describe('serialize', function() { }); it('should serialize object ids as strings', function() { - serialized.bowlingBall.should.not.eql(dude.bowlingBall); - serialized.bowlingBall.should.be.type('string'); + serialized.bowlingBall.should.eql(dude.bowlingBall); + serialized.bowlingBall.should.be.type('object'); }); it('should serialize dates in ISO 8601 format', function() { From 99ff58b9f3b7aa5d0f403393da2b861fd5175fae Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 29 Mar 2015 12:15:12 +0200 Subject: [PATCH 084/152] 3.0.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b48cc1f0..4d136360 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.1.3", + "version": "3.0.0", "tags": [ "mongodb", "elasticsearch", From 8fe08486aa3023c76217d7db2395fedad0552a9e Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 29 Mar 2015 12:17:33 +0200 Subject: [PATCH 085/152] updated changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 019a3b3c..e8b57575 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +3.0.0 / 2015-03-29 +================== + + * mongoose and elasticsearch version bump + 2.1.3 / 2015-03-29 ================== From 427901faf3310c2f336e8dce2ce16257f75441ab Mon Sep 17 00:00:00 2001 From: Ro Ramtohul Date: Mon, 30 Mar 2015 19:45:45 +0100 Subject: [PATCH 086/152] Bug fixes. Syntax fixes. --- esCount-patch.diff | 114 ++++++++++++++++++++++++++++++++++++++++++++ lib/mongoosastic.js | 9 ++-- test/config.js | 2 +- test/count-test.js | 53 +++++++++++--------- 4 files changed, 149 insertions(+), 29 deletions(-) create mode 100644 esCount-patch.diff diff --git a/esCount-patch.diff b/esCount-patch.diff new file mode 100644 index 00000000..aca726da --- /dev/null +++ b/esCount-patch.diff @@ -0,0 +1,114 @@ +Index: lib/mongoosastic.js +IDEA additional info: +Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP +<+>UTF-8 +=================================================================== +--- lib/mongoosastic.js (date 1427621708000) ++++ lib/mongoosastic.js (revision ) +@@ -251,9 +251,8 @@ + }); + }; + +- schema.statics.esCount = function (query, cb) { ++ schema.statics.esCount = function(query, cb) { +- var model = this; +- setIndexNameIfUnset(model.modelName); ++ setIndexNameIfUnset(this.modelName); + + if (cb == null && typeof query === 'function') { + cb = query; +@@ -266,10 +265,10 @@ + }, + index: options.index || indexName, + type: options.type || typeName +- } ++ }; + + esClient.count(esQuery, cb); +- } ++ }; + + function bulkDelete(options, cb) { + bulkAdd({ +Index: test/count-test.js +IDEA additional info: +Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP +<+>UTF-8 +=================================================================== +--- test/count-test.js (date 1427621708000) ++++ test/count-test.js (revision ) +@@ -1,27 +1,29 @@ +-var mongoose = require('mongoose') +- , async = require('async') +- , esClient = new(require('elasticsearch').Client) +- , should = require('should') +- , config = require('./config') +- , Schema = mongoose.Schema +- , mongoosastic = require('../lib/mongoosastic'); ++var mongoose = require('mongoose'), ++ async = require('async'), ++ config = require('./config'), ++ Schema = mongoose.Schema, ++ mongoosastic = require('../lib/mongoosastic'); + +- + var CommentSchema = new Schema({ +- user: String +- , post_date: {type:Date, es_type:'date'} +- , message: {type:String} +- , title: {type:String, es_boost:2.0} ++ user: String, ++ post_date: {type: Date, es_type: 'date'}, ++ message: {type: String}, ++ title: {type: String, es_boost: 2.0} + }); + +-CommentSchema.plugin(mongoosastic); ++CommentSchema.plugin(mongoosastic, { ++ bulk: { ++ size: 2, ++ delay: 100 ++ } ++}); + + var Comment = mongoose.model('Comment', CommentSchema); + +-describe('Count', function(){ ++describe.only('Count', function() { +- before(function(done){ ++ before(function(done) { +- mongoose.connect(config.mongoUrl, function(){ ++ mongoose.connect(config.mongoUrl, function() { +- Comment.remove(function(){ ++ Comment.remove(function() { + config.deleteIndexIfExists(['comments'], function() { + var comments = [ + new Comment({ +@@ -34,16 +36,21 @@ + }) + ]; + async.forEach(comments, function(item, cb) { +- item.save(cb); +- }, function() { +- setTimeout(done, config.indexingTimeout); +- }); +- }); ++ item.save(cb); ++ }, function() { ++ setTimeout(done, config.indexingTimeout); ++ }); ++ }); + }); + }); ++ }); ++ ++ after(function() { ++ mongoose.disconnect(); ++ Comment.esClient.close(); + }); + +- it('should count a type', function(done){ ++ it('should count a type', function(done) { + Comment.esCount({ + term: { + user: 'terry' \ No newline at end of file diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 2ff5ac38..db42d9f1 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -251,9 +251,8 @@ module.exports = function Mongoosastic(schema, options) { }); }; - schema.statics.esCount = function (query, cb) { - var model = this; - setIndexNameIfUnset(model.modelName); + schema.statics.esCount = function(query, cb) { + setIndexNameIfUnset(this.modelName); if (cb == null && typeof query === 'function') { cb = query; @@ -266,10 +265,10 @@ module.exports = function Mongoosastic(schema, options) { }, index: options.index || indexName, type: options.type || typeName - } + }; esClient.count(esQuery, cb); - } + }; function bulkDelete(options, cb) { bulkAdd({ diff --git a/test/config.js b/test/config.js index 945f689e..136994a7 100644 --- a/test/config.js +++ b/test/config.js @@ -6,7 +6,7 @@ var elasticsearch = require('elasticsearch'), }), async = require('async'); -const INDEXING_TIMEOUT = process.env.INDEXING_TIMEOUT || 1100; +const INDEXING_TIMEOUT = process.env.INDEXING_TIMEOUT || 2000; module.exports = { mongoUrl: 'mongodb://localhost/es-test', diff --git a/test/count-test.js b/test/count-test.js index 15daa450..f87dfbad 100644 --- a/test/count-test.js +++ b/test/count-test.js @@ -1,27 +1,29 @@ -var mongoose = require('mongoose') - , async = require('async') - , esClient = new(require('elasticsearch').Client) - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , mongoosastic = require('../lib/mongoosastic'); - +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); var CommentSchema = new Schema({ - user: String - , post_date: {type:Date, es_type:'date'} - , message: {type:String} - , title: {type:String, es_boost:2.0} + user: String, + post_date: {type:Date, es_type:'date'}, + message: {type:String}, + title: {type:String, es_boost:2.0} }); -CommentSchema.plugin(mongoosastic); +CommentSchema.plugin(mongoosastic, { + bulk: { + size: 2, + delay: 100 + } +}); var Comment = mongoose.model('Comment', CommentSchema); -describe('Count', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - Comment.remove(function(){ +describe.only('Count', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Comment.remove(function() { config.deleteIndexIfExists(['comments'], function() { var comments = [ new Comment({ @@ -34,16 +36,21 @@ describe('Count', function(){ }) ]; async.forEach(comments, function(item, cb) { - item.save(cb); - }, function() { - setTimeout(done, config.indexingTimeout); - }); - }); + item.save(cb); + }, function() { + setTimeout(done, config.indexingTimeout); + }); + }); }); }); }); - it('should count a type', function(done){ + after(function() { + mongoose.disconnect(); + Comment.esClient.close(); + }); + + it('should count a type', function(done) { Comment.esCount({ term: { user: 'terry' From fe3639819bbc7bb2c6143c670157912b10ad9330 Mon Sep 17 00:00:00 2001 From: Ro Ramtohul Date: Mon, 30 Mar 2015 19:59:50 +0100 Subject: [PATCH 087/152] Set manual timeout for count test --- test/count-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/count-test.js b/test/count-test.js index f87dfbad..4d4d5d13 100644 --- a/test/count-test.js +++ b/test/count-test.js @@ -38,7 +38,7 @@ describe.only('Count', function() { async.forEach(comments, function(item, cb) { item.save(cb); }, function() { - setTimeout(done, config.indexingTimeout); + setTimeout(done, 2000); }); }); }); From 8893c57d18a1a86aecf87d169b833fbd81c4ff83 Mon Sep 17 00:00:00 2001 From: Can Kutlu Kinay Date: Sat, 18 Apr 2015 16:53:09 +0300 Subject: [PATCH 088/152] Enable document filtering while indexing Fixes #66 --- lib/mongoosastic.js | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 8689d8ff..ae02bf89 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -15,7 +15,8 @@ module.exports = function Mongoosastic(schema, options) { typeName = options && options.type, alwaysHydrate = options && options.hydrate, defaultHydrateOptions = options && options.hydrateOptions, - bulk = options && options.bulk; + bulk = options && options.bulk, + filter = options && options.filter; if (options.esClient) { esClient = options.esClient; @@ -59,6 +60,10 @@ module.exports = function Mongoosastic(schema, options) { * @param cb Function */ schema.methods.index = function(options, cb) { + if (filter && !filter(this)) { + return cb(); + } + if (arguments.length < 2) { cb = arguments[0] || nop; options = {}; From f9d1c29a48fb91fb304043eaab71211305608f1e Mon Sep 17 00:00:00 2001 From: Ro Ramtohul Date: Sat, 25 Apr 2015 13:00:02 +0100 Subject: [PATCH 089/152] Delete esCount-patch.diff --- esCount-patch.diff | 114 --------------------------------------------- 1 file changed, 114 deletions(-) delete mode 100644 esCount-patch.diff diff --git a/esCount-patch.diff b/esCount-patch.diff deleted file mode 100644 index aca726da..00000000 --- a/esCount-patch.diff +++ /dev/null @@ -1,114 +0,0 @@ -Index: lib/mongoosastic.js -IDEA additional info: -Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP -<+>UTF-8 -=================================================================== ---- lib/mongoosastic.js (date 1427621708000) -+++ lib/mongoosastic.js (revision ) -@@ -251,9 +251,8 @@ - }); - }; - -- schema.statics.esCount = function (query, cb) { -+ schema.statics.esCount = function(query, cb) { -- var model = this; -- setIndexNameIfUnset(model.modelName); -+ setIndexNameIfUnset(this.modelName); - - if (cb == null && typeof query === 'function') { - cb = query; -@@ -266,10 +265,10 @@ - }, - index: options.index || indexName, - type: options.type || typeName -- } -+ }; - - esClient.count(esQuery, cb); -- } -+ }; - - function bulkDelete(options, cb) { - bulkAdd({ -Index: test/count-test.js -IDEA additional info: -Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP -<+>UTF-8 -=================================================================== ---- test/count-test.js (date 1427621708000) -+++ test/count-test.js (revision ) -@@ -1,27 +1,29 @@ --var mongoose = require('mongoose') -- , async = require('async') -- , esClient = new(require('elasticsearch').Client) -- , should = require('should') -- , config = require('./config') -- , Schema = mongoose.Schema -- , mongoosastic = require('../lib/mongoosastic'); -+var mongoose = require('mongoose'), -+ async = require('async'), -+ config = require('./config'), -+ Schema = mongoose.Schema, -+ mongoosastic = require('../lib/mongoosastic'); - -- - var CommentSchema = new Schema({ -- user: String -- , post_date: {type:Date, es_type:'date'} -- , message: {type:String} -- , title: {type:String, es_boost:2.0} -+ user: String, -+ post_date: {type: Date, es_type: 'date'}, -+ message: {type: String}, -+ title: {type: String, es_boost: 2.0} - }); - --CommentSchema.plugin(mongoosastic); -+CommentSchema.plugin(mongoosastic, { -+ bulk: { -+ size: 2, -+ delay: 100 -+ } -+}); - - var Comment = mongoose.model('Comment', CommentSchema); - --describe('Count', function(){ -+describe.only('Count', function() { -- before(function(done){ -+ before(function(done) { -- mongoose.connect(config.mongoUrl, function(){ -+ mongoose.connect(config.mongoUrl, function() { -- Comment.remove(function(){ -+ Comment.remove(function() { - config.deleteIndexIfExists(['comments'], function() { - var comments = [ - new Comment({ -@@ -34,16 +36,21 @@ - }) - ]; - async.forEach(comments, function(item, cb) { -- item.save(cb); -- }, function() { -- setTimeout(done, config.indexingTimeout); -- }); -- }); -+ item.save(cb); -+ }, function() { -+ setTimeout(done, config.indexingTimeout); -+ }); -+ }); - }); - }); -+ }); -+ -+ after(function() { -+ mongoose.disconnect(); -+ Comment.esClient.close(); - }); - -- it('should count a type', function(done){ -+ it('should count a type', function(done) { - Comment.esCount({ - term: { - user: 'terry' \ No newline at end of file From 9b2cb44aea44dd9afc665fa268c242d84b076852 Mon Sep 17 00:00:00 2001 From: Ro Ramtohul Date: Sat, 25 Apr 2015 13:22:21 +0100 Subject: [PATCH 090/152] Remove .only from tests --- test/count-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/count-test.js b/test/count-test.js index 4d4d5d13..33ae11fd 100644 --- a/test/count-test.js +++ b/test/count-test.js @@ -20,7 +20,7 @@ CommentSchema.plugin(mongoosastic, { var Comment = mongoose.model('Comment', CommentSchema); -describe.only('Count', function() { +describe('Count', function() { before(function(done) { mongoose.connect(config.mongoUrl, function() { Comment.remove(function() { From 68db11375b36f4b5dc6fd54648ec518968cf79c7 Mon Sep 17 00:00:00 2001 From: Sukru BEZEN Date: Sun, 26 Apr 2015 19:34:31 +0300 Subject: [PATCH 091/152] Add tests to the filtering logic --- lib/mongoosastic.js | 13 ++++----- test/config.js | 6 +++++ test/filtering-test.js | 60 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 73 insertions(+), 6 deletions(-) create mode 100644 test/filtering-test.js diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index ae02bf89..7c6aadb0 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -60,15 +60,15 @@ module.exports = function Mongoosastic(schema, options) { * @param cb Function */ schema.methods.index = function(options, cb) { - if (filter && !filter(this)) { - return cb(); - } - if (arguments.length < 2) { cb = arguments[0] || nop; options = {}; } + if (filter && filter(this)) { + return cb(); + } + setIndexNameIfUnset(this.constructor.modelName); var index = options.index || indexName, @@ -367,9 +367,10 @@ module.exports = function Mongoosastic(schema, options) { */ schema.post('save', function(doc) { - doc.index(function(err, res) { - doc.emit('es-indexed', err, res); + if (!filter || !filter(doc)) { + doc.emit('es-indexed', err, res); + } }); }); } diff --git a/test/config.js b/test/config.js index 945f689e..246e3999 100644 --- a/test/config.js +++ b/test/config.js @@ -13,6 +13,7 @@ module.exports = { indexingTimeout: INDEXING_TIMEOUT, deleteIndexIfExists: deleteIndexIfExists, createModelAndEnsureIndex: createModelAndEnsureIndex, + createModelAndSave: createModelAndSave, saveAndWaitIndex: saveAndWaitIndex, bookTitlesArray: bookTitlesArray, getClient: function() { @@ -48,6 +49,11 @@ function createModelAndEnsureIndex(Model, obj, cb) { }); } +function createModelAndSave(Model, obj, cb) { + var dude = new Model(obj); + dude.save(cb); +} + function saveAndWaitIndex(model, cb) { model.save(function(err) { if (err) cb(err); diff --git a/test/filtering-test.js b/test/filtering-test.js new file mode 100644 index 00000000..0d729851 --- /dev/null +++ b/test/filtering-test.js @@ -0,0 +1,60 @@ +var mongoose = require('mongoose'), + should = require('should'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); + +// -- Only index specific field +var MovieSchema = new Schema({ + title: {type: String, required: true, default: '', es_indexed: true}, + genre: {type: String, required:true, default: '', enum: ['horror', 'action', 'adventure', 'other'], es_indexed: true} +}); + +MovieSchema.plugin(mongoosastic, { + filter: function(self) { + return self.genre === 'action'; + } +}); + +var Movie = mongoose.model('Movie', MovieSchema); + +describe('Filter mode', function() { + var movies = null; + this.timeout(5000); + + before(function(done) { + config.deleteIndexIfExists(['movies'], function() { + mongoose.connect(config.mongoUrl, function() { + var client = mongoose.connections[0].db; + client.collection('movies', function(err, _movies) { + movies = _movies; + Movie.remove(done); + }); + }); + }); + }); + + after(function(done) { + mongoose.disconnect(); + Movie.esClient.close(); + done(); + }); + + it('should index horror genre', function(done) { + config.createModelAndEnsureIndex(Movie, {title: 'LOTR', genre: 'horror'}, function() { + Movie.search({term: {genre: 'horror'}}, function(err, results) { + results.hits.total.should.eql(1); + done(); + }); + }); + }); + + it('should not index action genre', function(done) { + config.createModelAndSave(Movie, {title: 'Man in Black', genre: 'action'}, function() { + Movie.search({term: {genre: 'action'}}, function(err, results) { + results.hits.total.should.eql(0); + done(); + }); + }); + }); +}); From c4099b27ed37fab2344f9bce60d26249ed03c88e Mon Sep 17 00:00:00 2001 From: Francesco Nero Date: Wed, 29 Apr 2015 11:44:25 +0200 Subject: [PATCH 092/152] Fix #64 This fix addresses cases where esTruncate was invoked without calling a save or remove before it. This caused indexName to be missing when needed by esClient.deleteByQuery. --- lib/mongoosastic.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 8689d8ff..de913513 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -128,6 +128,8 @@ module.exports = function Mongoosastic(schema, options) { options = {}; } + setIndexNameIfUnset(this.modelName); + var index = options.index || indexName, type = options.type || typeName; From 621963287b61bf56d118291ab180ff706396af3d Mon Sep 17 00:00:00 2001 From: guumaster Date: Fri, 1 May 2015 12:13:11 +0200 Subject: [PATCH 093/152] updated CHANGELOG.md --- CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8b57575..1259f1f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +3.1.0 / 2015-05-01 +================== + + * added esCount feature [#58](https://github.com/mongoosastic/mongoosastic/issues/58) + * fixed esTruncate [#69](https://github.com/mongoosastic/mongoosastic/issues/69) + * added filtering index logic [#67](https://github.com/mongoosastic/mongoosastic/issues/67) + 3.0.0 / 2015-03-29 ================== From 7f885398830289e4f934ee5bc509d2ebfcea9112 Mon Sep 17 00:00:00 2001 From: guumaster Date: Fri, 1 May 2015 12:15:00 +0200 Subject: [PATCH 094/152] 3.1.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 4d136360..6706e43f 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.0.0", + "version": "3.1.0", "tags": [ "mongodb", "elasticsearch", From 92a97ba2909938f2ba49a7e9e075885bd6a2d723 Mon Sep 17 00:00:00 2001 From: Sukru BEZEN Date: Fri, 1 May 2015 16:06:04 +0300 Subject: [PATCH 095/152] Modify README to include filter example --- README.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/README.md b/README.md index 04c36e1a..fdde4fd3 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,7 @@ Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatical - [Indexing nested models](#indexing-nested-models) - [Indexing an existing collection](#indexing-an-existing-collection) - [Bulk indexing](#bulk-indexing) + - [Filtered indexing](#filtered-indexing) - [Indexing on demand](#indexing-on-demand) - [Truncating an index](#truncating-an-index) - [Mapping](#mapping) @@ -53,6 +54,7 @@ Options are: * `hydrate` - whether or not to lookup results in mongodb before * `hydrateOptions` - options to pass into hydrate function * `bulk` - size and delay options for bulk indexing +* `filter` - the function used for filtered indexing To have a model indexed into Elasticsearch simply add the plugin. @@ -218,6 +220,28 @@ BookSchema.plugin(mongoosastic, { }); ``` +### Filtered Indexing + +You can specify a filter function to index a model to Elasticsearch based on some specific conditions. + +Filtering function must return True for conditions that will ignore indexing to Elasticsearch. + +```javascript +var MovieSchema = new Schema({ + title: {type: String}, + genre: {type: String, enum: ['horror', 'action', 'adventure', 'other']} +}); + +MovieSchema.plugin(mongoosastic, { + filter: function(doc) { + return doc.genre === 'action'; + } +}); +``` + +Instances of Movie model having 'action' as their genre will not be indexed to Elasticsearch. + + ### Indexing On Demand You can do on-demand indexes using the `index` function From 8ec17f15fb76c6b02e4b9e59d1478ca9f9ac7432 Mon Sep 17 00:00:00 2001 From: Francesco Nero Date: Fri, 1 May 2015 17:46:03 +0200 Subject: [PATCH 096/152] Fix #73 The setIndexNameIfUnset was called too late --- lib/mongoosastic.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index fdd28898..f5842cf2 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -226,6 +226,8 @@ module.exports = function Mongoosastic(schema, options) { if (query === null) query = undefined; + setIndexNameIfUnset(this.modelName); + var _this = this, esQuery = { body: { @@ -243,7 +245,6 @@ module.exports = function Mongoosastic(schema, options) { esQuery[opt] = options[opt]; }); - setIndexNameIfUnset(this.modelName); esClient.search(esQuery, function(err, res) { if (err) { From 62cabf23b574415088f4e41b72d6d7f1442be7b9 Mon Sep 17 00:00:00 2001 From: Francesco Nero Date: Sat, 2 May 2015 15:50:42 +0200 Subject: [PATCH 097/152] Fix connection-test The test tried to search on deleted indexes, which caused elastic to raise an exception. Since the objective was to simply check whether the connection was successful, we now search on the meta _all index, which is guaranteed to always exist. --- test/connection-test.js | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/test/connection-test.js b/test/connection-test.js index 0b1791be..03188673 100644 --- a/test/connection-test.js +++ b/test/connection-test.js @@ -103,18 +103,22 @@ describe('Elasticsearch Connection', function() { function tryDummySearch(model, cb) { setTimeout(function() { model.search({ - query_string: { - query: 'Text1' - } - }, function(err, results) { - if (err) { - return cb(err); - } - - results.hits.total.should.eql(0); - model.esClient.close(); - cb(err); - }); + simple_query_string: { + query: 'Text1' + } + }, + { + index: '_all' + }, + function(err, results) { + if (err) { + return cb(err); + } + + results.hits.total.should.eql(0); + model.esClient.close(); + cb(err); + }); }, config.indexingTimeout); } From 01682ef7e371fedc03190f02c1bd842e7c04c159 Mon Sep 17 00:00:00 2001 From: Francesco Nero Date: Sat, 2 May 2015 15:59:48 +0200 Subject: [PATCH 098/152] Remove blank line to comply with JSCS. --- lib/mongoosastic.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f5842cf2..34c1bb31 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -245,7 +245,6 @@ module.exports = function Mongoosastic(schema, options) { esQuery[opt] = options[opt]; }); - esClient.search(esQuery, function(err, res) { if (err) { return cb(err); From 04c00ff4c797e654209e785489e9935314c0b00f Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Mon, 25 May 2015 15:06:44 +0200 Subject: [PATCH 099/152] 3.1.1 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 6706e43f..dc79e928 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.1.0", + "version": "3.1.1", "tags": [ "mongodb", "elasticsearch", From 69b5e052d97789c0930f02a1a0831dccea2f76db Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Mon, 25 May 2015 15:14:57 +0200 Subject: [PATCH 100/152] updated devDependencies --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index dc79e928..8b1dc2ee 100644 --- a/package.json +++ b/package.json @@ -23,16 +23,16 @@ "mongoose": "^4.x" }, "devDependencies": { - "async": "^0.9.x", + "async": "^1.0.0", "gulp": "^3.8.11", "gulp-jscs": "^1.4.0", "gulp-jshint": "^1.9.4", "gulp-mocha": "^2.0.0", "jscs": "^1.12.0", - "jshint-stylish": "^1.0.1", + "jshint-stylish": "^2.0.0", "mocha": "^2.2.x", "mongoose": "^4.x", - "should": "^5.2.x" + "should": "^6.0.3" }, "engines": { "node": ">= 0.8.0" From 08f2ef52a0f71ce67aa7b07cd3ed212b79758292 Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Mon, 25 May 2015 16:04:36 +0200 Subject: [PATCH 101/152] added complex sorting options. #78 --- lib/mongoosastic.js | 21 ++++++++++- test/search-features-test.js | 72 ++++++++++++++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 34c1bb31..41a15f25 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -241,8 +241,19 @@ module.exports = function Mongoosastic(schema, options) { } Object.keys(options).forEach(function(opt) { - if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) + if (!opt.match(/(hydrate|sort)/) && options.hasOwnProperty(opt)) { esQuery[opt] = options[opt]; + } + + if (options.sort) { + if (isString(options.sort) || isStringArray(options.sort)) { + esQuery.sort = options.sort; + } else { + esQuery.body.sort = options.sort; + } + + } + }); esClient.search(esQuery, function(err, res) { @@ -532,3 +543,11 @@ function deleteByMongoId(options, cb) { } }); } + +function isString(subject) { + return typeof subject === 'string'; +} + +function isStringArray(arr) { + return arr.filter && arr.length === (arr.filter(function(x){ return (typeof x === 'string'); })).length; +} diff --git a/test/search-features-test.js b/test/search-features-test.js index 52c7aa9f..f4bd42f5 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -59,4 +59,76 @@ describe('Query DSL', function() { }); }); }); + + describe('Sort', function() { + + var getNames = function(i) { return i._source.name; }; + var expectedDesc = ['Legal', 'Construction', 'Commercial', 'Bail']; + var expectedAsc = expectedDesc.concat([]).reverse(); // clone and reverse + + describe('Simple sort', function() { + + it('should be able to return all data, sorted by name ascending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: 'name:asc' + }, function(err, res) { + res.hits.total.should.eql(4); + expectedAsc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + + it('should be able to return all data, sorted by name descending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: ['name:desc'] + }, function(err, res) { + res.hits.total.should.eql(4); + expectedDesc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + }); + + describe('Complex sort', function() { + + it('should be able to return all data, sorted by name ascending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: { + name: { order: 'asc' } + } + }, function(err, res) { + res.hits.total.should.eql(4); + expectedAsc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + + it('should be able to return all data, sorted by name descending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: { + name: { order: 'desc' }, + type: { order: 'asc' } + } + }, function(err, res) { + res.hits.total.should.eql(4); + expectedDesc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + }); + + }); + }); From fd84d1d944cac4220d7cdc32540719ca8f490773 Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Mon, 25 May 2015 16:15:13 +0200 Subject: [PATCH 102/152] added .idea to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 9daa8247..0aa35d8a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ .DS_Store node_modules +.idea From 6badd91baa22ed355190bb3a881edd2ec7e7f8ff Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Mon, 25 May 2015 16:15:35 +0200 Subject: [PATCH 103/152] linted --- lib/mongoosastic.js | 8 +-- test/filtering-test.js | 1 - test/search-features-test.js | 122 +++++++++++++++++------------------ 3 files changed, 65 insertions(+), 66 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 41a15f25..a050ea10 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -247,9 +247,9 @@ module.exports = function Mongoosastic(schema, options) { if (options.sort) { if (isString(options.sort) || isStringArray(options.sort)) { - esQuery.sort = options.sort; + esQuery.sort = options.sort; } else { - esQuery.body.sort = options.sort; + esQuery.body.sort = options.sort; } } @@ -545,9 +545,9 @@ function deleteByMongoId(options, cb) { } function isString(subject) { - return typeof subject === 'string'; + return typeof subject === 'string'; } function isStringArray(arr) { - return arr.filter && arr.length === (arr.filter(function(x){ return (typeof x === 'string'); })).length; + return arr.filter && arr.length === (arr.filter(function(x) { return (typeof x === 'string'); })).length; } diff --git a/test/filtering-test.js b/test/filtering-test.js index 0d729851..6bc9a016 100644 --- a/test/filtering-test.js +++ b/test/filtering-test.js @@ -1,5 +1,4 @@ var mongoose = require('mongoose'), - should = require('should'), config = require('./config'), Schema = mongoose.Schema, mongoosastic = require('../lib/mongoosastic'); diff --git a/test/search-features-test.js b/test/search-features-test.js index f4bd42f5..cc7a89c1 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -62,73 +62,73 @@ describe('Query DSL', function() { describe('Sort', function() { - var getNames = function(i) { return i._source.name; }; - var expectedDesc = ['Legal', 'Construction', 'Commercial', 'Bail']; - var expectedAsc = expectedDesc.concat([]).reverse(); // clone and reverse - - describe('Simple sort', function() { - - it('should be able to return all data, sorted by name ascending', function(done) { - Bond.search({ - match_all: {} - }, { - sort: 'name:asc' - }, function(err, res) { - res.hits.total.should.eql(4); - expectedAsc.should.eql(res.hits.hits.map(getNames)); - - done(); - }); - }); + var getNames = function(i) { return i._source.name; }; + var expectedDesc = ['Legal', 'Construction', 'Commercial', 'Bail']; + var expectedAsc = expectedDesc.concat([]).reverse(); // clone and reverse + + describe('Simple sort', function() { + + it('should be able to return all data, sorted by name ascending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: 'name:asc' + }, function(err, res) { + res.hits.total.should.eql(4); + expectedAsc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); - it('should be able to return all data, sorted by name descending', function(done) { - Bond.search({ - match_all: {} - }, { - sort: ['name:desc'] - }, function(err, res) { - res.hits.total.should.eql(4); - expectedDesc.should.eql(res.hits.hits.map(getNames)); - - done(); - }); - }); + it('should be able to return all data, sorted by name descending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: ['name:desc'] + }, function(err, res) { + res.hits.total.should.eql(4); + expectedDesc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); }); + }); - describe('Complex sort', function() { - - it('should be able to return all data, sorted by name ascending', function(done) { - Bond.search({ - match_all: {} - }, { - sort: { - name: { order: 'asc' } - } - }, function(err, res) { - res.hits.total.should.eql(4); - expectedAsc.should.eql(res.hits.hits.map(getNames)); - - done(); - }); - }); + describe('Complex sort', function() { - it('should be able to return all data, sorted by name descending', function(done) { - Bond.search({ - match_all: {} - }, { - sort: { - name: { order: 'desc' }, - type: { order: 'asc' } - } - }, function(err, res) { - res.hits.total.should.eql(4); - expectedDesc.should.eql(res.hits.hits.map(getNames)); - - done(); - }); - }); + it('should be able to return all data, sorted by name ascending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: { + name: { order: 'asc' } + } + }, function(err, res) { + res.hits.total.should.eql(4); + expectedAsc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); }); + it('should be able to return all data, sorted by name descending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: { + name: { order: 'desc' }, + type: { order: 'asc' } + } + }, function(err, res) { + res.hits.total.should.eql(4); + expectedDesc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + }); + }); }); From bf7d35904f73ed5794faf36ab83841f9c7853bdf Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Mon, 25 May 2015 16:22:01 +0200 Subject: [PATCH 104/152] 3.1.2 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8b1dc2ee..dede4b3a 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.1.1", + "version": "3.1.2", "tags": [ "mongodb", "elasticsearch", From 86a6246183cf2fb22ef10c56073862d8da9e23a9 Mon Sep 17 00:00:00 2001 From: "gustavo.marin" Date: Mon, 25 May 2015 17:46:38 +0200 Subject: [PATCH 105/152] changelog updated --- CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1259f1f5..eb85e329 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +3.1.2 / 2015-05-25 +================== + + * added complex sorting object to `search()` options parameters [#79](https://github.com/mongoosastic/mongoosastic/issues/79) + * devDependecies updated + +3.1.1 / 2015-05-25 +================== + + * fixed index creation [#75](https://github.com/mongoosastic/mongoosastic/issues/75) + * added index filtering documentation [#72](https://github.com/mongoosastic/mongoosastic/issues/72) + 3.1.0 / 2015-05-01 ================== From a8ad4a7f32fd8885dd9e356a9d690d19dbc47e2b Mon Sep 17 00:00:00 2001 From: Jon Burgess Date: Thu, 18 Jun 2015 15:07:00 +0100 Subject: [PATCH 106/152] added pause/resume and batchSize to stream to improve performance --- lib/mongoosastic.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index a050ea10..1e737080 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -172,13 +172,15 @@ module.exports = function Mongoosastic(schema, options) { setIndexNameIfUnset(this.modelName); - var stream = this.find(query).stream(); + var stream = this.find(query).batchSize(50).stream(); stream.on('data', function(doc) { + stream.pause(); counter++; doc.save(function(err) { if (err) { - return em.emit('error', err); + em.emit('error', err); + stream.resume(); } doc.on('es-indexed', function(err, doc) { @@ -188,6 +190,7 @@ module.exports = function Mongoosastic(schema, options) { } else { em.emit('data', null, doc); } + stream.resume(); }); }); }); From 53e308f8832663c8729b3c9572577c7b92126a34 Mon Sep 17 00:00:00 2001 From: Jon Burgess Date: Thu, 18 Jun 2015 15:09:22 +0100 Subject: [PATCH 107/152] added missing return --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 1e737080..30002f89 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -180,7 +180,7 @@ module.exports = function Mongoosastic(schema, options) { doc.save(function(err) { if (err) { em.emit('error', err); - stream.resume(); + return stream.resume(); } doc.on('es-indexed', function(err, doc) { From 6b09cc130585c63f3f7a8353d930b0d462c885ee Mon Sep 17 00:00:00 2001 From: Jon Burgess Date: Tue, 23 Jun 2015 15:08:31 +0100 Subject: [PATCH 108/152] added batch to bulk defaults, each item now defaults individually --- lib/mongoosastic.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 30002f89..8b9e4a5e 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -163,16 +163,16 @@ module.exports = function Mongoosastic(schema, options) { }; //Set indexing to be bulk when synchronizing to make synchronizing faster - bulk = bulk || { - delay: 1000, - size: 1000 - }; + bulk = bulk || {}; + bulk.delay = bulk.delay || 1000; + bulk.size = bulk.size || 1000; + bulk.batch = bulk.batch || 50; query = query || {}; setIndexNameIfUnset(this.modelName); - var stream = this.find(query).batchSize(50).stream(); + var stream = this.find(query).batchSize(bulk.batch).stream(); stream.on('data', function(doc) { stream.pause(); From 0b2e29516d6a8c3fe4fd212a89affb153d1391d3 Mon Sep 17 00:00:00 2001 From: Jon Burgess Date: Tue, 23 Jun 2015 15:52:59 +0100 Subject: [PATCH 109/152] added comment --- lib/mongoosastic.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 8b9e4a5e..da7dec8e 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -163,6 +163,7 @@ module.exports = function Mongoosastic(schema, options) { }; //Set indexing to be bulk when synchronizing to make synchronizing faster + //Set default values when not present bulk = bulk || {}; bulk.delay = bulk.delay || 1000; bulk.size = bulk.size || 1000; From bb9edc7b1344b30bb24b172819c89141284906db Mon Sep 17 00:00:00 2001 From: James Date: Tue, 30 Jun 2015 13:57:22 +0100 Subject: [PATCH 110/152] fix for single index not working after running synchronize --- lib/mongoosastic.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index a050ea10..3bce8bf1 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -198,6 +198,7 @@ module.exports = function Mongoosastic(schema, options) { if (counter === 0 && bulkBuffer.length === 0) { clearInterval(closeInterval); close(); + bulk = false; } }, 1000); }); From 2de7e2b8b3f9ca9c58f24060891a45ac779c1586 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 11:59:26 +0200 Subject: [PATCH 111/152] 3.1.1 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 6706e43f..dc79e928 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.1.0", + "version": "3.1.1", "tags": [ "mongodb", "elasticsearch", From b3ce63ae3ef00fece4b452c2035a1bc4bd6dc902 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 12:02:03 +0200 Subject: [PATCH 112/152] 3.1.3 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index dede4b3a..6c2de9b2 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.1.2", + "version": "3.1.3", "tags": [ "mongodb", "elasticsearch", From 0c755a496c698bdead0b334a58d60ef57dd775b2 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 12:07:16 +0200 Subject: [PATCH 113/152] upgrade travis to use new infrastructure --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 31a642e5..7ab89e81 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,5 @@ +sudo: false + language: node_js node_js: From 583738d6ce34d5426a80e7e173cdb9e71b0a2370 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 12:15:45 +0200 Subject: [PATCH 114/152] updated CHANGELOG --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index eb85e329..2fef8eee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +3.1.3 / 2015-07-19 +================== + + * improves synchronize high memory usage [#84](https://github.com/mongoosastic/mongoosastic/issues/84) + 3.1.2 / 2015-05-25 ================== From 21480c76722003b0bf46ac070d3a1b808a1a9526 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 12:29:09 +0200 Subject: [PATCH 115/152] reverting travis config --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7ab89e81..c784931c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,9 @@ -sudo: false language: node_js node_js: - 0.10 + - 0.12 services: - mongodb From 6cb434dfe7767f8d0336efa6507219482fd8a3c3 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 12:35:34 +0200 Subject: [PATCH 116/152] added iojs plattform to travis --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index c784931c..89362c12 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,8 +2,9 @@ language: node_js node_js: - - 0.10 - - 0.12 + - 0.10.x + - 0.12.x + - iojs services: - mongodb From 1b71a5c700826c55958bc1ac64cddfe51336c3bd Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 12:37:01 +0200 Subject: [PATCH 117/152] fixed .travis.yml --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 89362c12..2ae511b4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,8 +2,8 @@ language: node_js node_js: - - 0.10.x - - 0.12.x + - 0.10 + - 0.12 - iojs services: From 380a3567dbfb9bcc977a805786fa5287e12af108 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 12:49:14 +0200 Subject: [PATCH 118/152] updated package dependencies --- package.json | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/package.json b/package.json index 6c2de9b2..f56419e1 100644 --- a/package.json +++ b/package.json @@ -16,12 +16,10 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elasticsearch": "^4.0.x", + "mongoose": "^4.x", + "elasticsearch": "^5.0.0", "nop": "^1.0.0" }, - "peerDependencies": { - "mongoose": "^4.x" - }, "devDependencies": { "async": "^1.0.0", "gulp": "^3.8.11", @@ -32,10 +30,10 @@ "jshint-stylish": "^2.0.0", "mocha": "^2.2.x", "mongoose": "^4.x", - "should": "^6.0.3" + "should": "^7.0.2" }, "engines": { - "node": ">= 0.8.0" + "node": ">= 0.10.0" }, "scripts": { "test": "gulp" From 23a2cfc32c33f1627cc4fa7cb4445102e2aaf503 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 12:58:40 +0200 Subject: [PATCH 119/152] added timeouts to geo-tests --- test/geo-test.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/geo-test.js b/test/geo-test.js index 95896d58..b41a78bb 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -111,7 +111,7 @@ describe('GeoTest', function() { res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); done(); }); - }, 1100); + }, process.env.BULK_TEST_TIMEOUT || 1100); }); it('should be able to resync geo coordinates from the database', function(done) { @@ -137,7 +137,7 @@ describe('GeoTest', function() { res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); done(); }); - }, 1000); + }, process.env.BULK_TEST_TIMEOUT || 1100); }); }); }); @@ -187,7 +187,7 @@ describe('GeoTest', function() { }); }); }); - }, 1000); + }, process.env.BULK_TEST_TIMEOUT || 1000); }); }); From eae364919ef9b15712bff40e3905ecfa0b933af3 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 13:00:04 +0200 Subject: [PATCH 120/152] fixed travis config --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 2ae511b4..a95ae1ea 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +sudo: false language: node_js From f639189db7617a3c70a8c6301c08b2ee245736c7 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 13:23:08 +0200 Subject: [PATCH 121/152] updated package.json --- package.json | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/package.json b/package.json index f56419e1..8760893c 100644 --- a/package.json +++ b/package.json @@ -16,20 +16,19 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "mongoose": "^4.x", "elasticsearch": "^5.0.0", + "mongoose": "^4.0.7", "nop": "^1.0.0" }, "devDependencies": { - "async": "^1.0.0", - "gulp": "^3.8.11", - "gulp-jscs": "^1.4.0", - "gulp-jshint": "^1.9.4", - "gulp-mocha": "^2.0.0", - "jscs": "^1.12.0", - "jshint-stylish": "^2.0.0", - "mocha": "^2.2.x", - "mongoose": "^4.x", + "async": "^1.3.0", + "gulp": "^3.9.0", + "gulp-jscs": "^1.6.0", + "gulp-jshint": "^1.11.2", + "gulp-mocha": "^2.1.3", + "jscs": "^1.13.1", + "jshint-stylish": "^2.0.1", + "mocha": "^2.2.5", "should": "^7.0.2" }, "engines": { @@ -37,5 +36,11 @@ }, "scripts": { "test": "gulp" - } + }, + "licence": [ + { + "name": "MIT", + "url": "http://opensource.org/licenses/MIT" + } + ] } From bb1e1d7d8ad62fec8df37b65e010f9686b58941b Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 13:24:21 +0200 Subject: [PATCH 122/152] increased mocha timeout. linted --- gulpfile.js | 2 +- test/geo-test.js | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gulpfile.js b/gulpfile.js index 6b873911..4b409218 100644 --- a/gulpfile.js +++ b/gulpfile.js @@ -20,7 +20,7 @@ gulp.task('mocha', function() { return gulp.src(['test/*-test.js'], { read: false }) .pipe(mocha({ reporter: 'spec', - timeout: 60000, + timeout: 600000, globals: { should: require('should') } diff --git a/test/geo-test.js b/test/geo-test.js index b41a78bb..48f1c512 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -137,7 +137,7 @@ describe('GeoTest', function() { res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); done(); }); - }, process.env.BULK_TEST_TIMEOUT || 1100); + }, process.env.BULK_TEST_TIMEOUT || 1100); }); }); }); @@ -187,7 +187,7 @@ describe('GeoTest', function() { }); }); }); - }, process.env.BULK_TEST_TIMEOUT || 1000); + }, process.env.BULK_TEST_TIMEOUT || 1000); }); }); From 3001e854cf60c0c172f3d18c9c4dc5962c5ca012 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 13:39:13 +0200 Subject: [PATCH 123/152] tweaking elasticsearch timeouts for travis build --- .travis.yml | 3 ++- test/alternative-index-method-test.js | 6 +++--- test/bulk-test.js | 2 +- test/config.js | 4 +++- test/connection-test.js | 4 ++-- test/count-test.js | 2 +- test/geo-test.js | 6 +++--- test/highlight-features-test.js | 2 +- test/index-test.js | 6 +++--- test/search-features-test.js | 2 +- test/synchronize-test.js | 2 +- test/truncate-test.js | 2 +- 12 files changed, 22 insertions(+), 19 deletions(-) diff --git a/.travis.yml b/.travis.yml index a95ae1ea..aace7a50 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,4 +12,5 @@ services: - elasticsearch env: - - BULK_TEST_TIMEOUT=30000 + - BULK_ACTION_TIMEOUT=30000 + - INDEXING_TIMEOUT=3000 diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 16d671ac..dc285d25 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -33,7 +33,7 @@ describe('Index Method', function() { res.hits.hits[0]._source.message.should.eql('I know nodejitsu!'); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); }); @@ -47,7 +47,7 @@ describe('Index Method', function() { res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); }); @@ -64,7 +64,7 @@ describe('Index Method', function() { res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); }); diff --git a/test/bulk-test.js b/test/bulk-test.js index f5ce10e9..36a1b077 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -63,6 +63,6 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, process.env.BULK_TEST_TIMEOUT || 4000); + }, config.BULK_ACTION_TIMEOUT); }); }); diff --git a/test/config.js b/test/config.js index f480299d..6bb488f6 100644 --- a/test/config.js +++ b/test/config.js @@ -7,10 +7,12 @@ var elasticsearch = require('elasticsearch'), async = require('async'); const INDEXING_TIMEOUT = process.env.INDEXING_TIMEOUT || 2000; +const BULK_ACTION_TIMEOUT = process.env.BULK_ACTION_TIMEOUT || 4000; module.exports = { mongoUrl: 'mongodb://localhost/es-test', - indexingTimeout: INDEXING_TIMEOUT, + INDEXING_TIMEOUT: INDEXING_TIMEOUT, + BULK_ACTION_TIMEOUT: BULK_ACTION_TIMEOUT, deleteIndexIfExists: deleteIndexIfExists, createModelAndEnsureIndex: createModelAndEnsureIndex, createModelAndSave: createModelAndSave, diff --git a/test/connection-test.js b/test/connection-test.js index 03188673..e9ef563b 100644 --- a/test/connection-test.js +++ b/test/connection-test.js @@ -28,7 +28,7 @@ describe('Elasticsearch Connection', function() { async.forEach(dummies, function(item, cb) { item.save(cb); }, function() { - setTimeout(done, config.indexingTimeout); + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); @@ -119,6 +119,6 @@ function tryDummySearch(model, cb) { model.esClient.close(); cb(err); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); } diff --git a/test/count-test.js b/test/count-test.js index 33ae11fd..015e30ce 100644 --- a/test/count-test.js +++ b/test/count-test.js @@ -38,7 +38,7 @@ describe('Count', function() { async.forEach(comments, function(item, cb) { item.save(cb); }, function() { - setTimeout(done, 2000); + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); diff --git a/test/geo-test.js b/test/geo-test.js index 48f1c512..f5c9ff1c 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -111,7 +111,7 @@ describe('GeoTest', function() { res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); done(); }); - }, process.env.BULK_TEST_TIMEOUT || 1100); + }, config.INDEXING_TIMEOUT); }); it('should be able to resync geo coordinates from the database', function(done) { @@ -137,7 +137,7 @@ describe('GeoTest', function() { res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); done(); }); - }, process.env.BULK_TEST_TIMEOUT || 1100); + }, config.INDEXING_TIMEOUT); }); }); }); @@ -187,7 +187,7 @@ describe('GeoTest', function() { }); }); }); - }, process.env.BULK_TEST_TIMEOUT || 1000); + }, config.INDEXING_TIMEOUT); }); }); diff --git a/test/highlight-features-test.js b/test/highlight-features-test.js index e18c8fd1..b7c48aa3 100644 --- a/test/highlight-features-test.js +++ b/test/highlight-features-test.js @@ -40,7 +40,7 @@ describe('Highlight search', function() { }) ]; async.forEach(texts, config.saveAndWaitIndex, function() { - setTimeout(done, config.indexingTimeout); + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); diff --git a/test/index-test.js b/test/index-test.js index 3911323f..4df2a08b 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -184,7 +184,7 @@ describe('indexing', function() { res.hits.total.should.eql(0); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); @@ -199,7 +199,7 @@ describe('indexing', function() { res.hits.total.should.eql(0); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); tweet.unIndex(); @@ -239,7 +239,7 @@ describe('indexing', function() { tweet.save(function() { talk.save(function() { talk.on('es-indexed', function(err, res) { - setTimeout(done, config.indexingTimeout); + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); diff --git a/test/search-features-test.js b/test/search-features-test.js index cc7a89c1..5227aaa0 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -26,7 +26,7 @@ describe('Query DSL', function() { new Bond({name: 'Legal', type: 'C', price: 30000}) ]; async.forEach(bonds, config.saveAndWaitIndex, function() { - setTimeout(done, config.indexingTimeout); + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); diff --git a/test/synchronize-test.js b/test/synchronize-test.js index 1d53b9fe..8b8b6cbe 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -56,7 +56,7 @@ describe('Synchronize', function() { results.hits.total.should.eql(2); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); diff --git a/test/truncate-test.js b/test/truncate-test.js index 356aa91e..f5ce20cb 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -27,7 +27,7 @@ describe('Truncate', function() { async.forEach(dummies, function(item, cb) { item.save(cb); }, function() { - setTimeout(done, config.indexingTimeout); + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); From 6c340c8d665ada00bc139990b8ce19990a4ca27b Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 13:42:00 +0200 Subject: [PATCH 124/152] fixed travis environment variables --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index aace7a50..9d619f1f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,5 +12,4 @@ services: - elasticsearch env: - - BULK_ACTION_TIMEOUT=30000 - - INDEXING_TIMEOUT=3000 + - BULK_ACTION_TIMEOUT=30000 INDEXING_TIMEOUT=3000 From 742a233ab7ba378807ecd9e6c810ef24b4df72bc Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 13:51:46 +0200 Subject: [PATCH 125/152] updated changelog --- CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2fef8eee..36484d50 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +3.1.4 / 2015-07-19 +================== + + * updated package.json dependencies and added "licence" field. + * added `iojs` and Node 0.12 to travis environments. + * upgraded to new travis infraestructure. + * minor tweaks to improve tests. + 3.1.3 / 2015-07-19 ================== From 473b7e020a7a565f865068fbc5ef571c46f49699 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 19 Jul 2015 13:52:06 +0200 Subject: [PATCH 126/152] 3.1.4 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8760893c..cb74c3ea 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.1.3", + "version": "3.1.4", "tags": [ "mongodb", "elasticsearch", From 03d86c3fb46e3742912041aca7a3a51b40337dc4 Mon Sep 17 00:00:00 2001 From: isayme Date: Mon, 7 Sep 2015 17:03:44 +0800 Subject: [PATCH 127/152] index/unindex when findOneAndUpdate/findOneAndRemove --- lib/mongoosastic.js | 58 ++++++++++++++++++++++++++------------------- test/index-test.js | 44 ++++++++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 25 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 1b58ddc8..9ca19128 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -376,40 +376,48 @@ module.exports = function Mongoosastic(schema, options) { } } + function postRemove(doc) { + setIndexNameIfUnset(doc.constructor.modelName); + + var options = { + index: indexName, + type: typeName, + tries: 3, + model: doc, + client: esClient + }; + + if (bulk) { + bulkDelete(options, nop); + } else { + deleteByMongoId(options, nop); + } + } + + function postSave(doc) { + doc.index(function(err, res) { + if (!filter || !filter(doc)) { + doc.emit('es-indexed', err, res); + } + }); + } + /** * Use standard Mongoose Middleware hooks * to persist to Elasticsearch */ function setUpMiddlewareHooks(schema) { - schema.post('remove', function(doc) { - setIndexNameIfUnset(doc.constructor.modelName); - - var options = { - index: indexName, - type: typeName, - tries: 3, - model: doc, - client: esClient - }; - - if (bulk) { - bulkDelete(options, nop); - } else { - deleteByMongoId(options, nop); - } - }); + /** + * Remove in elasticsearch on remove + */ + schema.post('remove', postRemove); + schema.post('findOneAndRemove', postRemove); /** * Save in elasticsearch on save. */ - - schema.post('save', function(doc) { - doc.index(function(err, res) { - if (!filter || !filter(doc)) { - doc.emit('es-indexed', err, res); - } - }); - }); + schema.post('save', postSave); + schema.post('findOneAndUpdate', postSave); } }; diff --git a/test/index-test.js b/test/index-test.js index 4df2a08b..de69a239 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -154,6 +154,28 @@ describe('indexing', function() { }); }); + it('should reindex when findOneAndUpdate', function(done) { + Tweet.findOneAndUpdate({ + message: 'I like Riak better' + }, { + message: 'I like Jack better' + }, { + new: true + }, function(err, doc) { + setTimeout(function() { + Tweet.search({ + query_string: { + query: 'Jack' + } + }, function(err, results) { + results.hits.total.should.eql(1); + results.hits.hits[0]._source.message.should.eql('I like Jack better'); + done(); + }); + }, config.INDEXING_TIMEOUT); + }); + }); + it('should report errors', function(done) { Tweet.search({queriez: 'jamescarr'}, function(err, results) { err.message.should.match(/SearchPhaseExecutionException/); @@ -220,6 +242,28 @@ describe('indexing', function() { }); }); + it('should remove from index when findOneAndRemove', function(done) { + var tweet = new Tweet({ + user: 'jamescarr', + message: 'findOneAndRemove' + }); + + config.createModelAndEnsureIndex(Tweet, tweet, function() { + Tweet.findByIdAndRemove(tweet._id, function() { + setTimeout(function() { + Tweet.search({ + query_string: { + query: 'findOneAndRemove' + } + }, function(err, res) { + res.hits.total.should.eql(0); + done(); + }); + }, config.INDEXING_TIMEOUT); + }); + }); + }); + }); describe('Isolated Models', function() { From a04d1bfcf44e86cc908f1a18c73cc10b2d8fc168 Mon Sep 17 00:00:00 2001 From: isayme Date: Mon, 7 Sep 2015 19:32:20 +0800 Subject: [PATCH 128/152] update dependency elasticsearch to ^8.0.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index cb74c3ea..95f1612e 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elasticsearch": "^5.0.0", + "elasticsearch": "^8.0.0", "mongoose": "^4.0.7", "nop": "^1.0.0" }, From 6056f4164c4606802d533ea1ab7fb4d1b0867360 Mon Sep 17 00:00:00 2001 From: guumaster Date: Mon, 7 Sep 2015 16:50:45 +0200 Subject: [PATCH 129/152] package.json updated. and src linted --- .jscsrc | 1 + package.json | 14 +++++++------- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.jscsrc b/.jscsrc index 6c00c569..ba63066e 100644 --- a/.jscsrc +++ b/.jscsrc @@ -2,6 +2,7 @@ "preset": "airbnb", "requireMultipleVarDecl": null, "disallowMultipleVarDecl": null, + "requireTrailingComma": null, "requireCamelCaseOrUpperCaseIdentifiers": "ignoreProperties", "requirePaddingNewLinesAfterBlocks": null } diff --git a/package.json b/package.json index 95f1612e..c3777c76 100644 --- a/package.json +++ b/package.json @@ -16,20 +16,20 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elasticsearch": "^8.0.0", - "mongoose": "^4.0.7", + "elasticsearch": "^8.0.1", + "mongoose": "^4.1.5", "nop": "^1.0.0" }, "devDependencies": { - "async": "^1.3.0", + "async": "^1.4.2", "gulp": "^3.9.0", - "gulp-jscs": "^1.6.0", + "gulp-jscs": "^2.0.0", "gulp-jshint": "^1.11.2", "gulp-mocha": "^2.1.3", - "jscs": "^1.13.1", + "jscs": "^2.1.1", "jshint-stylish": "^2.0.1", - "mocha": "^2.2.5", - "should": "^7.0.2" + "mocha": "^2.3.1", + "should": "^7.1.0" }, "engines": { "node": ">= 0.10.0" From 0e35bd74a6f4598791700b1d60df625dffbd2246 Mon Sep 17 00:00:00 2001 From: guumaster Date: Mon, 7 Sep 2015 17:33:12 +0200 Subject: [PATCH 130/152] added fuzzy search test --- test/search-features-test.js | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/test/search-features-test.js b/test/search-features-test.js index 5227aaa0..cce42c12 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -131,4 +131,27 @@ describe('Query DSL', function() { }); + describe('test', function() { + + it('should do a fuzzy query', function(done) { + var getNames = function(i) { return i._source.name; }; + + Bond.search({ + match: { + name: { + query: 'comersial', + fuzziness: 2 + } + } + }, function(err, res) { + + res.hits.total.should.eql(1); + ['Commercial'].should.eql(res.hits.hits.map(getNames)); + done(); + }); + + }); + + }); + }); From de3343b563a715af4aa19ab34c8ac5ba9b35d86a Mon Sep 17 00:00:00 2001 From: guumaster Date: Mon, 7 Sep 2015 17:38:11 +0200 Subject: [PATCH 131/152] 3.2.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c3777c76..8f86abef 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.1.4", + "version": "3.2.0", "tags": [ "mongodb", "elasticsearch", From 737ab548498948a084e5631f6922fcf274b934e8 Mon Sep 17 00:00:00 2001 From: Gustavo Date: Mon, 7 Sep 2015 17:56:36 +0200 Subject: [PATCH 132/152] small json typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index fdde4fd3..26f98edc 100644 --- a/README.md +++ b/README.md @@ -415,7 +415,7 @@ var geoQuery = { var geoFilter = { geo_shape: { - geo_shape": { + geo_shape: { shape: { type: "point", coordinates: [3,1] From e19fbc3aa45dbc7246075dc27d73725ed4edc55d Mon Sep 17 00:00:00 2001 From: Jose Maza Date: Wed, 9 Sep 2015 16:06:59 -0600 Subject: [PATCH 133/152] added support for suggesters --- lib/mongoosastic.js | 4 ++ test/suggesters-test.js | 100 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 104 insertions(+) create mode 100644 test/suggesters-test.js diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 9ca19128..22e34fcd 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -245,6 +245,10 @@ module.exports = function Mongoosastic(schema, options) { esQuery.body.highlight = options.highlight; } + if (options.suggest) { + esQuery.body.suggest = options.suggest; + } + Object.keys(options).forEach(function(opt) { if (!opt.match(/(hydrate|sort)/) && options.hasOwnProperty(opt)) { esQuery[opt] = options[opt]; diff --git a/test/suggesters-test.js b/test/suggesters-test.js new file mode 100644 index 00000000..028f52ee --- /dev/null +++ b/test/suggesters-test.js @@ -0,0 +1,100 @@ +var mongoose = require('mongoose'), + elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client({ + deadTimeout: 0, + keepAlive: false + }), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); + +var KittenSchema = new Schema({ + name: {type: String, es_type: 'completion', es_index_analyzer: 'simple', es_search_analyzer: 'simple', es_indexed: true}, + breed: {type: String } +}); + +KittenSchema.plugin(mongoosastic); + +var Kitten = mongoose.model('Kitten', KittenSchema); + +Kitten.createMapping(); + +describe('Suggesters', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Kitten.remove(function() { + config.deleteIndexIfExists(['kittens'], function() { + Kitten = mongoose.model('Kitten', KittenSchema); + Kitten.createMapping(); + + var kittens = [ + new Kitten({ + name: 'Cookie', + breed: 'Aegean' + }), + new Kitten({ + name: 'Chipmunk', + breed: 'Aegean' + }), + new Kitten({ + name: 'Twix', + breed: 'Persian' + }), + new Kitten({ + name: 'Cookies and Cream', + breed: 'Persian' + }) + ]; + async.forEach(kittens, config.saveAndWaitIndex, function() { + setTimeout(done, config.INDEXING_TIMEOUT); + }); + }); + }); + }); + }); + + after(function(done) { + Kitten.remove(); + Kitten.esClient.close(); + mongoose.disconnect(); + done(); + }); + + describe('Testing Suggest', function() { + it('should index property name with type completion', function(done) { + + Kitten = mongoose.model('Kitten', KittenSchema); + Kitten.createMapping(function() { + esClient.indices.getMapping({ + index: 'kittens', + type: 'kitten' + }, function(err, mapping) { + var props = mapping.kitten !== undefined ? /* elasticsearch 1.0 & 0.9 support */ + mapping.kitten.properties : /* ES 0.9.11 */ + mapping.kittens.mappings.kitten.properties; /* ES 1.0.0 */ + props.name.type.should.eql('completion'); + done(); + }); + }); + }); + it('should return suggestions after hits', function(done) { + Kitten.search({ + match_all: {} + }, { + suggest: { + kittensuggest: { + text: 'Cook', + completion: { + field: 'name' + } + } + } + }, function(err, res) { + res.should.have.property('suggest'); + res.suggest.kittensuggest[0].options.length.should.eql(2); + done(); + }); + }); + }); +}); From 412868d1a5b70e98ad044a6d9b54d050ce591c25 Mon Sep 17 00:00:00 2001 From: root Date: Thu, 10 Sep 2015 18:50:39 +0200 Subject: [PATCH 134/152] fix(postSave): fix findOneAndUpdate if document doesn't exist --- lib/mongoosastic.js | 12 +++++++----- test/index-test.js | 14 ++++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 9ca19128..7dbf8088 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -395,11 +395,13 @@ module.exports = function Mongoosastic(schema, options) { } function postSave(doc) { - doc.index(function(err, res) { - if (!filter || !filter(doc)) { - doc.emit('es-indexed', err, res); - } - }); + if (doc) { + doc.index(function(err, res) { + if (!filter || !filter(doc)) { + doc.emit('es-indexed', err, res); + } + }); + } } /** diff --git a/test/index-test.js b/test/index-test.js index de69a239..c4808b9f 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -176,6 +176,20 @@ describe('indexing', function() { }); }); + it('should be able to execute findOneAndUpdate if document doesn\'t exist', function(done) { + Tweet.findOneAndUpdate({ + message: 'Not existing document' + }, { + message: 'I like Jack better' + }, { + new: true + }, function(err, doc) { + should.not.exist(err); + should.not.exist(doc); + done(); + }); + }); + it('should report errors', function(done) { Tweet.search({queriez: 'jamescarr'}, function(err, results) { err.message.should.match(/SearchPhaseExecutionException/); From 25c8c55ca3b061ae74d5d5f9cabd34a0f829761e Mon Sep 17 00:00:00 2001 From: Jose Maza Date: Thu, 10 Sep 2015 10:56:22 -0600 Subject: [PATCH 135/152] fixed suggesters tests --- test/suggesters-test.js | 69 ++++++++++++++++++++--------------------- 1 file changed, 33 insertions(+), 36 deletions(-) diff --git a/test/suggesters-test.js b/test/suggesters-test.js index 028f52ee..007fef87 100644 --- a/test/suggesters-test.js +++ b/test/suggesters-test.js @@ -9,55 +9,52 @@ var mongoose = require('mongoose'), Schema = mongoose.Schema, mongoosastic = require('../lib/mongoosastic'); -var KittenSchema = new Schema({ - name: {type: String, es_type: 'completion', es_index_analyzer: 'simple', es_search_analyzer: 'simple', es_indexed: true}, - breed: {type: String } -}); - -KittenSchema.plugin(mongoosastic); - -var Kitten = mongoose.model('Kitten', KittenSchema); - -Kitten.createMapping(); +var KittenSchema; +var Kitten; describe('Suggesters', function() { before(function(done) { mongoose.connect(config.mongoUrl, function() { - Kitten.remove(function() { - config.deleteIndexIfExists(['kittens'], function() { + config.deleteIndexIfExists(['kittens'], function() { + KittenSchema = new Schema({ + name: {type: String, es_type: 'completion', es_index_analyzer: 'simple', es_search_analyzer: 'simple', es_indexed: true}, + breed: {type: String } + }); + KittenSchema.plugin(mongoosastic); Kitten = mongoose.model('Kitten', KittenSchema); - Kitten.createMapping(); - - var kittens = [ - new Kitten({ - name: 'Cookie', - breed: 'Aegean' - }), - new Kitten({ - name: 'Chipmunk', - breed: 'Aegean' - }), - new Kitten({ - name: 'Twix', - breed: 'Persian' - }), - new Kitten({ - name: 'Cookies and Cream', - breed: 'Persian' - }) - ]; - async.forEach(kittens, config.saveAndWaitIndex, function() { - setTimeout(done, config.INDEXING_TIMEOUT); + Kitten.createMapping({}, function(err, mapping) { + Kitten.remove(function() { + var kittens = [ + new Kitten({ + name: 'Cookie', + breed: 'Aegean' + }), + new Kitten({ + name: 'Chipmunk', + breed: 'Aegean' + }), + new Kitten({ + name: 'Twix', + breed: 'Persian' + }), + new Kitten({ + name: 'Cookies and Cream', + breed: 'Persian' + }) + ]; + async.forEach(kittens, config.saveAndWaitIndex, function() { + setTimeout(done, config.INDEXING_TIMEOUT); + }); + }); }); }); - }); }); }); after(function(done) { - Kitten.remove(); Kitten.esClient.close(); mongoose.disconnect(); + esClient.close(); done(); }); From 9999a5fa5f07064c1929031f892b747e39e04162 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 13 Sep 2015 11:48:07 +0200 Subject: [PATCH 136/152] updated README --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 26f98edc..7c5e8117 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,10 @@ # Mongoosastic [![Build Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) -[![NPM version](https://badge.fury.io/js/mongoosastic.svg)](http://badge.fury.io/js/mongoosastic) -[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mongoosastic/mongoosastic?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![NPM version](https://img.shields.io/npm/v/mongoosastic.svg)](https://www.npmjs.com/package/mongoosastic) +[![Downloads](https://img.shields.io/npm/dm/mongoosastic.svg)](https://www.npmjs.com/package/mongoosastic) +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mongoosastic/mongoosastic?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) + +[![NPM](https://nodei.co/npm/mongoosastic.png)](https://nodei.co/npm/mongoosastic/) Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatically index your models into [elasticsearch](http://www.elasticsearch.org/). From 27f56cd509a511dcc63e4e48e10a39e1600f18ba Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 13 Sep 2015 11:48:11 +0200 Subject: [PATCH 137/152] 3.3.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8f86abef..d75b16c4 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.2.0", + "version": "3.3.0", "tags": [ "mongodb", "elasticsearch", From 9688c0c45176c31e6afb0ef3777c78714916b3ab Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 27 Sep 2015 00:14:45 +0200 Subject: [PATCH 138/152] removed gulp dependency. moved to ESLint instead of jshint/jscs --- .eslintrc | 15 + .jscsrc | 8 - .jshintrc | 17 - gulpfile.js | 30 -- lib/mapping-generator.js | 112 ++--- lib/mongoosastic.js | 610 +++++++++++++------------- lib/serialize.js | 44 +- package.json | 27 +- test/alternative-index-method-test.js | 6 +- test/boost-field-test.js | 4 +- test/bulk-test.js | 9 +- test/config.js | 48 +- test/connection-test.js | 67 +-- test/count-test.js | 14 +- test/filtering-test.js | 10 +- test/geo-test.js | 44 +- test/highlight-features-test.js | 25 +- test/index-test.js | 52 +-- test/search-features-test.js | 8 +- test/suggesters-test.js | 100 ++--- test/synchronize-test.js | 5 +- test/truncate-test.js | 6 +- 22 files changed, 627 insertions(+), 634 deletions(-) create mode 100644 .eslintrc delete mode 100644 .jscsrc delete mode 100644 .jshintrc delete mode 100644 gulpfile.js diff --git a/.eslintrc b/.eslintrc new file mode 100644 index 00000000..179bbe35 --- /dev/null +++ b/.eslintrc @@ -0,0 +1,15 @@ +{ + "env": { + "node": true, + "mocha": true + }, + "extends": "airbnb/base", + "rules": { + "func-names": 0, + "no-use-before-define": 1, + "one-var": 0, + "no-var": 0, + "comma-dangle": 0, + "padded-blocks": 0 + } +} diff --git a/.jscsrc b/.jscsrc deleted file mode 100644 index ba63066e..00000000 --- a/.jscsrc +++ /dev/null @@ -1,8 +0,0 @@ -{ - "preset": "airbnb", - "requireMultipleVarDecl": null, - "disallowMultipleVarDecl": null, - "requireTrailingComma": null, - "requireCamelCaseOrUpperCaseIdentifiers": "ignoreProperties", - "requirePaddingNewLinesAfterBlocks": null -} diff --git a/.jshintrc b/.jshintrc deleted file mode 100644 index 03d5208b..00000000 --- a/.jshintrc +++ /dev/null @@ -1,17 +0,0 @@ -{ - "browser": false, - "node": true, - "camelcase": false, - "eqeqeq": true, - "indent": 2, - "latedef": false, - "newcap": true, - "quotmark": "single", - "strict": false, - "undef": true, - "unused": "vars", - "eqnull": true, - "forin": false, - "mocha": true, - "esnext": true -} diff --git a/gulpfile.js b/gulpfile.js deleted file mode 100644 index 4b409218..00000000 --- a/gulpfile.js +++ /dev/null @@ -1,30 +0,0 @@ -var gulp = require('gulp'); -var mocha = require('gulp-mocha'); -var jshint = require('gulp-jshint'); -var jscs = require('gulp-jscs'); - -var SOURCE_FILES = ['*.js', './example/*.js', './lib/*.js', './test/**/*.js']; - -gulp.task('lint', function() { - return gulp.src(SOURCE_FILES) - .pipe(jshint('.jshintrc')) - .pipe(jshint.reporter('jshint-stylish')); -}); - -gulp.task('jscs', function() { - return gulp.src(SOURCE_FILES) - .pipe(jscs()); -}); - -gulp.task('mocha', function() { - return gulp.src(['test/*-test.js'], { read: false }) - .pipe(mocha({ - reporter: 'spec', - timeout: 600000, - globals: { - should: require('should') - } - })); -}); - -gulp.task('default', ['lint', 'jscs', 'mocha']); diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index e0d2bfdf..d634fbc3 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -1,14 +1,30 @@ -function Generator() { -} -Generator.prototype.generateMapping = function(schema, cb) { - var cleanTree = getCleanTree(schema.tree, schema.paths, ''); - delete cleanTree[schema.get('versionKey')]; - var mapping = getMapping(cleanTree, ''); - cb(null, {properties: mapping}); -}; +// +// Get type from the mongoose schema +// +// Returns the type, so in case none is set, it's the mongoose type. +// +// @param paths +// @param field +// @return the type or false +// +function getTypeFromPaths(paths, field) { + var type = false; -module.exports = Generator; + if (paths[field] && paths[field].options.type === Date) { + return 'date'; + } + + if (paths[field] && paths[field].options.type === Boolean) { + return 'boolean'; + } + + if (paths[field]) { + type = paths[field].instance ? paths[field].instance.toLowerCase() : 'object'; + } + + return type; +} // // Generates the mapping @@ -16,20 +32,20 @@ module.exports = Generator; // Can be called recursively. // // @param cleanTree -// @param prefix +// @param inPrefix // @return the mapping // -function getMapping(cleanTree, prefix) { +function getMapping(cleanTree, inPrefix) { var mapping = {}, - value, + value, field, prop, implicitFields = [], - hasEsIndex = false; - - if (prefix !== '') { - prefix = prefix + '.'; - } + hasEsIndex = false, + prefix = inPrefix !== '' ? inPrefix + '.' : inPrefix; - for (var field in cleanTree) { + for (field in cleanTree) { + if (!cleanTree.hasOwnProperty(field)) { + continue; + } value = cleanTree[field]; mapping[field] = {}; mapping[field].type = value.type; @@ -54,25 +70,25 @@ function getMapping(cleanTree, prefix) { mapping[field].type = 'string'; } - //If indexing a number, and no es_type specified, default to double + // If indexing a number, and no es_type specified, default to double if (value.type === 'number' && value.es_type === undefined) { mapping[field].type = 'double'; continue; } // Else, it has a type and we want to map that! - for (var prop in value) { + for (prop in value) { // Map to field if it's an Elasticsearch option - if (prop.indexOf('es_') === 0 && prop !== 'es_indexed') { + if (value.hasOwnProperty(prop) && prop.indexOf('es_') === 0 && prop !== 'es_indexed') { mapping[field][prop.replace(/^es_/, '')] = value[prop]; } } } - //If one of the fields was explicitly indexed, delete all implicit fields + // If one of the fields was explicitly indexed, delete all implicit fields if (hasEsIndex) { - implicitFields.forEach(function(field) { - delete mapping[field]; + implicitFields.forEach(function checkImplicit(implicitField) { + delete mapping[implicitField]; }); } @@ -89,17 +105,17 @@ function getMapping(cleanTree, prefix) { // @param prefix // @return the tree // -function getCleanTree(tree, paths, prefix) { +function getCleanTree(tree, paths, inPrefix) { var cleanTree = {}, type = '', - value = {}; + value = {}, + field, + key, + geoFound = false, + prefix = inPrefix !== '' ? inPrefix + '.' : inPrefix; - if (prefix !== '') { - prefix = prefix + '.'; - } - - for (var field in tree) { + for (field in tree) { if (prefix === '' && (field === 'id' || field === '_id')) { continue; } @@ -148,8 +164,6 @@ function getCleanTree(tree, paths, prefix) { } else { // Because it is an geo_* object!! if (typeof value === 'object') { - var key; - var geoFound = false; for (key in value) { if (value.hasOwnProperty(key) && /^geo_/.test(key)) { cleanTree[field] = value[key]; @@ -177,29 +191,15 @@ function getCleanTree(tree, paths, prefix) { return cleanTree; } -// -// Get type from the mongoose schema -// -// Returns the type, so in case none is set, it's the mongoose type. -// -// @param paths -// @param field -// @return the type or false -// -function getTypeFromPaths(paths, field) { - var type = false; - - if (paths[field] && paths[field].options.type === Date) { - return 'date'; - } +function Generator() { +} - if (paths[field] && paths[field].options.type === Boolean) { - return 'boolean'; - } +Generator.prototype.generateMapping = function generateMapping(schema, cb) { + var cleanTree = getCleanTree(schema.tree, schema.paths, ''), mapping; + delete cleanTree[schema.get('versionKey')]; + mapping = getMapping(cleanTree, ''); + cb(null, {properties: mapping}); +}; - if (paths[field]) { - type = paths[field].instance ? paths[field].instance.toLowerCase() : 'object'; - } +module.exports = Generator; - return type; -} diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 147945c9..36290bab 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -3,11 +3,152 @@ var elasticsearch = require('elasticsearch'), generator = new Generator(), serialize = require('./serialize'), events = require('events'), - nop = require('nop'), - util = require('util'); + util = require('util'), + nop = function nop() {}; -module.exports = function Mongoosastic(schema, options) { - options = options || {}; +function isString(subject) { + return typeof subject === 'string'; +} + +function isStringArray(arr) { + return arr.filter && arr.length === (arr.filter(function check(item) { return (typeof item === 'string'); })).length; +} + +function getMapping(schema) { + var retMapping = {}; + generator.generateMapping(schema, function mappingCb(err, mapping) { + retMapping = mapping; + }); + + return retMapping; +} + +function createEsClient(options) { + + var esOptions = {}; + + if (util.isArray(options.hosts)) { + esOptions.host = options.hosts; + } else { + esOptions.host = { + host: options && options.host ? options.host : 'localhost', + port: options && options.port ? options.port : 9200, + protocol: options && options.protocol ? options.protocol : 'http', + auth: options && options.auth ? options.auth : null, + keepAlive: false + }; + } + + esOptions.log = (options ? options.log : null); + + return new elasticsearch.Client(esOptions); +} + +function createMappingIfNotPresent(options, cb) { + var client = options.client, + indexName = options.indexName, + typeName = options.typeName, + schema = options.schema, + settings = options.settings; + + generator.generateMapping(schema, function mapper(ignoredErr, mapping) { + var completeMapping = {}; + completeMapping[typeName] = mapping; + client.indices.exists({ index: indexName }, function existsCb(err, exists) { + if (err) { + return cb(err); + } + + if (exists) { + return client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb); + + } + return client.indices.create({ index: indexName, body: settings }, function indexCb(indexErr) { + if (indexErr) { + return cb(indexErr); + } + + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb); + }); + }); + }); +} + +function hydrate(res, model, options, cb) { + var results = res.hits, + resultsMap = {}, + ids = results.hits.map(function mapHits(result, idx) { + resultsMap[result._id] = idx; + return result._id; + }), + + query = model.find({_id: {$in: ids}}), + hydrateOptions = options.hydrateOptions; + + // Build Mongoose query based on hydrate options + // Example: {lean: true, sort: '-name', select: 'address name'} + Object.keys(hydrateOptions).forEach(function getOpts(option) { + query[option](hydrateOptions[option]); + }); + + query.exec(function queryCb(err, docs) { + var hits = []; + if (err) { + return cb(err); + } + + docs.forEach(function highlight(doc) { + var idx = resultsMap[doc._id]; + if (options.highlight) { + doc._highlight = results.hits[idx].highlight; + } + + hits[idx] = doc; + }); + + results.hits = hits; + res.hits = results; + cb(null, res); + }); +} + +function deleteByMongoId(options, cb) { + var index = options.index, + type = options.type, + client = options.client, + model = options.model, + tries = options.tries; + + client.delete({ + index: index, + type: type, + id: model._id.toString() + }, function deleteCb(err, res) { + if (err && err.message.indexOf('404') > -1) { + setTimeout(function delayedDelete() { + if (tries <= 0) { + return cb(err); + } + options.tries = --tries; + deleteByMongoId(options, cb); + }, 500); + } else { + model.emit('es-removed', err, res); + cb(err); + } + }); +} + +function Mongoosastic(schema, pluginOpts) { + var options = pluginOpts || {}; var bulkTimeout, bulkBuffer = [], esClient, mapping = getMapping(schema), @@ -24,7 +165,64 @@ module.exports = function Mongoosastic(schema, options) { esClient = createEsClient(options); } - setUpMiddlewareHooks(schema); + function setIndexNameIfUnset(model) { + var modelName = model.toLowerCase(); + if (!indexName) { + indexName = modelName + 's'; + } + + if (!typeName) { + typeName = modelName; + } + } + + function clearBulkTimeout() { + clearTimeout(bulkTimeout); + bulkTimeout = undefined; + } + + function bulkAdd(instruction) { + bulkBuffer.push(instruction); + + // Return because we need the doc being indexed + // Before we start inserting + if (instruction.index && instruction.index._index) { + return; + } + + if (bulkBuffer.length >= (bulk.size || 1000)) { + schema.statics.flush(); + clearBulkTimeout(); + } else if (bulkTimeout === undefined) { + bulkTimeout = setTimeout(function delayedBulkAdd() { + schema.statics.flush(); + clearBulkTimeout(); + }, bulk.delay || 1000); + } + } + + function bulkDelete(opts, cb) { + bulkAdd({ + delete: { + _index: opts.index || indexName, + _type: opts.type || typeName, + _id: opts.model._id.toString() + } + }); + cb(); + } + + function bulkIndex(opts) { + bulkAdd({ + index: { + _index: opts.index || indexName, + _type: opts.type || typeName, + _id: opts.model._id.toString() + } + }); + bulkAdd(opts.model); + } + /** * ElasticSearch Client @@ -38,9 +236,10 @@ module.exports = function Mongoosastic(schema, options) { * @param settings Object (optional) * @param cb Function */ - schema.statics.createMapping = function(settings, cb) { + schema.statics.createMapping = function createMapping(inSettings, inCb) { + var cb = inCb, settings = inSettings; if (arguments.length < 2) { - cb = arguments[0] || nop; + cb = inSettings || nop; settings = undefined; } @@ -59,9 +258,12 @@ module.exports = function Mongoosastic(schema, options) { * @param options Object (optional) * @param cb Function */ - schema.methods.index = function(options, cb) { + schema.methods.index = function schemaIndex(inOpts, inCb) { + var index, type, serialModel, + cb = inCb, opts = inOpts; + if (arguments.length < 2) { - cb = arguments[0] || nop; + cb = inOpts || nop; options = {}; } @@ -71,14 +273,14 @@ module.exports = function Mongoosastic(schema, options) { setIndexNameIfUnset(this.constructor.modelName); - var index = options.index || indexName, - type = options.type || typeName; + index = opts.index || indexName; + type = opts.type || typeName; if (bulk) { /** * To serialize in bulk it needs the _id */ - var serialModel = serialize(this, mapping); + serialModel = serialize(this, mapping); serialModel._id = this._id; bulkIndex({ @@ -102,24 +304,27 @@ module.exports = function Mongoosastic(schema, options) { * @param options - (optional) options for unIndex * @param cb - callback when unIndex is complete */ - schema.methods.unIndex = function(options, cb) { + schema.methods.unIndex = function unIndex(inOpts, inCb) { + var opts = inOpts, cb = inCb; + if (arguments.length < 2) { - cb = arguments[0] || nop; - options = {}; + cb = inOpts || nop; + opts = {}; } setIndexNameIfUnset(this.constructor.modelName); - options.index = options.index || indexName; - options.type = options.type || typeName; - options.model = this; - options.client = esClient; - options.tries = 3; + opts.index = opts.index || indexName; + opts.type = opts.type || typeName; + opts.model = this; + opts.client = esClient; + opts.tries = 3; - if (bulk) - bulkDelete(options, cb); - else - deleteByMongoId(options, cb); + if (bulk) { + bulkDelete(opts, cb); + } else { + deleteByMongoId(opts, cb); + } }; /** @@ -127,16 +332,19 @@ module.exports = function Mongoosastic(schema, options) { * @param options - (optional) specify index/type * @param cb - callback when truncation is complete */ - schema.statics.esTruncate = function(options, cb) { + schema.statics.esTruncate = function esTruncate(inOpts, inCb) { + var index, type, + opts = inOpts, cb = inCb; + if (arguments.length < 2) { - cb = arguments[0] || nop; - options = {}; + cb = inOpts || nop; + opts = {}; } setIndexNameIfUnset(this.modelName); - var index = options.index || indexName, - type = options.type || typeName; + index = opts.index || indexName; + type = opts.type || typeName; esClient.deleteByQuery({ index: index, @@ -154,51 +362,52 @@ module.exports = function Mongoosastic(schema, options) { * * @param query - query for documents you want to synchronize */ - schema.statics.synchronize = function(query) { + schema.statics.synchronize = function synchronize(inQuery) { var em = new events.EventEmitter(), closeValues = [], counter = 0, - close = function() { + stream, + query = inQuery || {}, + close = function close() { em.emit.apply(em, ['close'].concat(closeValues)); }; - //Set indexing to be bulk when synchronizing to make synchronizing faster - //Set default values when not present + // Set indexing to be bulk when synchronizing to make synchronizing faster + // Set default values when not present bulk = bulk || {}; bulk.delay = bulk.delay || 1000; bulk.size = bulk.size || 1000; bulk.batch = bulk.batch || 50; - query = query || {}; - setIndexNameIfUnset(this.modelName); - var stream = this.find(query).batchSize(bulk.batch).stream(); + stream = this.find(query).batchSize(bulk.batch).stream(); - stream.on('data', function(doc) { + stream.on('data', function onData(doc) { stream.pause(); counter++; - doc.save(function(err) { + doc.save(function onSave(err) { if (err) { em.emit('error', err); return stream.resume(); } - doc.on('es-indexed', function(err, doc) { + doc.on('es-indexed', function onIndex(indexErr, inDoc) { counter--; - if (err) { - em.emit('error', err); + if (indexErr) { + em.emit('error', indexErr); } else { - em.emit('data', null, doc); + em.emit('data', null, inDoc); } stream.resume(); }); }); }); - stream.on('close', function(a, b) { - closeValues = [a, b]; - var closeInterval = setInterval(function() { + stream.on('close', function onClose(pA, pB) { + var closeInterval; + closeValues = [pA, pB]; + closeInterval = setInterval(function checkInterval() { if (counter === 0 && bulkBuffer.length === 0) { clearInterval(closeInterval); close(); @@ -207,12 +416,13 @@ module.exports = function Mongoosastic(schema, options) { }, 1000); }); - stream.on('error', function(err) { + stream.on('error', function onError(err) { em.emit('error', err); }); return em; }; + /** * ElasticSearch search function * @@ -220,135 +430,90 @@ module.exports = function Mongoosastic(schema, options) { * @param options - (optional) special search options, such as hydrate * @param cb - callback called with search results */ - schema.statics.search = function(query, options, cb) { + schema.statics.search = function search(inQuery, inOpts, inCb) { + var _this = this, + cb = inCb, + opts = inOpts, + esQuery, + query = inQuery === null ? undefined : inQuery; + if (arguments.length === 2) { cb = arguments[1]; - options = {}; + opts = {}; } - options.hydrateOptions = options.hydrateOptions || defaultHydrateOptions || {}; - - if (query === null) - query = undefined; + opts.hydrateOptions = opts.hydrateOptions || defaultHydrateOptions || {}; setIndexNameIfUnset(this.modelName); - var _this = this, - esQuery = { - body: { - query: query - }, - index: options.index || indexName, - type: options.type || typeName - }; - if (options.highlight) { - esQuery.body.highlight = options.highlight; + esQuery = { + body: { + query: query + }, + index: opts.index || indexName, + type: opts.type || typeName + }; + if (opts.highlight) { + esQuery.body.highlight = opts.highlight; } - if (options.suggest) { - esQuery.body.suggest = options.suggest; + if (opts.suggest) { + esQuery.body.suggest = opts.suggest; } - Object.keys(options).forEach(function(opt) { - if (!opt.match(/(hydrate|sort)/) && options.hasOwnProperty(opt)) { - esQuery[opt] = options[opt]; + Object.keys(opts).forEach(function collectKeys(opt) { + if (!opt.match(/(hydrate|sort)/) && opts.hasOwnProperty(opt)) { + esQuery[opt] = opts[opt]; } - if (options.sort) { - if (isString(options.sort) || isStringArray(options.sort)) { - esQuery.sort = options.sort; + if (opts.sort) { + if (isString(opts.sort) || isStringArray(opts.sort)) { + esQuery.sort = opts.sort; } else { - esQuery.body.sort = options.sort; + esQuery.body.sort = opts.sort; } } }); - esClient.search(esQuery, function(err, res) { + esClient.search(esQuery, function searchCb(err, res) { if (err) { return cb(err); } - if (alwaysHydrate || options.hydrate) { - hydrate(res, _this, options, cb); + if (alwaysHydrate || opts.hydrate) { + hydrate(res, _this, opts, cb); } else { cb(null, res); } }); }; - schema.statics.esCount = function(query, cb) { + schema.statics.esCount = function esCount(inQuery, inCb) { + var cb = inCb, query = inQuery, esQuery; + setIndexNameIfUnset(this.modelName); - if (cb == null && typeof query === 'function') { + if (!cb && typeof query === 'function') { cb = query; query = null; } - var esQuery = { + esQuery = { body: { query: query }, - index: options.index || indexName, - type: options.type || typeName + index: indexName, + type: typeName }; esClient.count(esQuery, cb); }; - function bulkDelete(options, cb) { - bulkAdd({ - delete: { - _index: options.index || indexName, - _type: options.type || typeName, - _id: options.model._id.toString() - } - }); - cb(); - } - - function bulkIndex(options) { - bulkAdd({ - index: { - _index: options.index || indexName, - _type: options.type || typeName, - _id: options.model._id.toString() - } - }); - bulkAdd(options.model); - } - - function clearBulkTimeout() { - clearTimeout(bulkTimeout); - bulkTimeout = undefined; - } - - function bulkAdd(instruction) { - bulkBuffer.push(instruction); - - //Return because we need the doc being indexed - //Before we start inserting - if (instruction.index && instruction.index._index) - return; - - if (bulkBuffer.length >= (bulk.size || 1000)) { - schema.statics.flush(); - clearBulkTimeout(); - } else if (bulkTimeout === undefined) { - bulkTimeout = setTimeout(function() { - schema.statics.flush(); - clearBulkTimeout(); - }, bulk.delay || 1000); - } - } - schema.statics.flush = function(cb) { - cb = cb || function(err) { - if (err) { - console.log(err); - } - }; + schema.statics.flush = function flush(inCb) { + var cb = inCb || nop; esClient.bulk({ body: bulkBuffer @@ -357,33 +522,23 @@ module.exports = function Mongoosastic(schema, options) { bulkBuffer = []; }; - schema.statics.refresh = function(options, cb) { + schema.statics.refresh = function refresh(inOpts, inCb) { + var cb = inCb, + opts = inOpts; if (arguments.length < 2) { - cb = arguments[0] || nop; - options = {}; + cb = inOpts || nop; + opts = {}; } setIndexNameIfUnset(this.modelName); esClient.indices.refresh({ - index: options.index || indexName + index: opts.index || indexName }, cb); }; - function setIndexNameIfUnset(model) { - var modelName = model.toLowerCase(); - if (!indexName) { - indexName = modelName + 's'; - } - - if (!typeName) { - typeName = modelName; - } - } function postRemove(doc) { - setIndexNameIfUnset(doc.constructor.modelName); - - var options = { + var opts = { index: indexName, type: typeName, tries: 3, @@ -391,16 +546,18 @@ module.exports = function Mongoosastic(schema, options) { client: esClient }; + setIndexNameIfUnset(doc.constructor.modelName); + if (bulk) { - bulkDelete(options, nop); + bulkDelete(opts, nop); } else { - deleteByMongoId(options, nop); + deleteByMongoId(opts, nop); } } function postSave(doc) { if (doc) { - doc.index(function(err, res) { + doc.index(function onIndex(err, res) { if (!filter || !filter(doc)) { doc.emit('es-indexed', err, res); } @@ -412,161 +569,22 @@ module.exports = function Mongoosastic(schema, options) { * Use standard Mongoose Middleware hooks * to persist to Elasticsearch */ - function setUpMiddlewareHooks(schema) { + function setUpMiddlewareHooks(inSchema) { /** * Remove in elasticsearch on remove */ - schema.post('remove', postRemove); - schema.post('findOneAndRemove', postRemove); + inSchema.post('remove', postRemove); + inSchema.post('findOneAndRemove', postRemove); /** * Save in elasticsearch on save. */ - schema.post('save', postSave); - schema.post('findOneAndUpdate', postSave); + inSchema.post('save', postSave); + inSchema.post('findOneAndUpdate', postSave); } -}; - -function createEsClient(options) { - - var esOptions = {}; - - if (util.isArray(options.hosts)) { - esOptions.host = options.hosts; - } else { - esOptions.host = { - host: options && options.host ? options.host : 'localhost', - port: options && options.port ? options.port : 9200, - protocol: options && options.protocol ? options.protocol : 'http', - auth: options && options.auth ? options.auth : null, - keepAlive: false - }; - } - - esOptions.log = (options ? options.log : null); - - return new elasticsearch.Client(esOptions); -} - -function createMappingIfNotPresent(options, cb) { - var client = options.client, - indexName = options.indexName, - typeName = options.typeName, - schema = options.schema, - settings = options.settings; - - generator.generateMapping(schema, function(err, mapping) { - var completeMapping = {}; - completeMapping[typeName] = mapping; - client.indices.exists({index: indexName}, function(err, exists) { - if (err) { - return cb(err); - } - - if (exists) { - client.indices.putMapping({ - index: indexName, - type: typeName, - body: completeMapping - }, cb); - - } else { - client.indices.create({index: indexName, body: settings}, function(err) { - if (err) - return cb(err); - - client.indices.putMapping({ - index: indexName, - type: typeName, - body: completeMapping - }, cb); - }); - } - }); - }); -} - -function hydrate(res, model, options, cb) { - var results = res.hits, - resultsMap = {}, - ids = results.hits.map(function(a, i) { - resultsMap[a._id] = i; - return a._id; - }), - - query = model.find({_id: {$in: ids}}), - hydrateOptions = options.hydrateOptions; - - // Build Mongoose query based on hydrate options - // Example: {lean: true, sort: '-name', select: 'address name'} - Object.keys(hydrateOptions).forEach(function(option) { - query[option](hydrateOptions[option]); - }); - - query.exec(function(err, docs) { - if (err) { - return cb(err); - } else { - var hits = []; - - docs.forEach(function(doc) { - var i = resultsMap[doc._id]; - if (options.highlight) { - doc._highlight = results.hits[i].highlight; - } - - hits[i] = doc; - }); - - results.hits = hits; - res.hits = results; - cb(null, res); - } - }); -} - -function getMapping(schema) { - var retMapping = {}; - generator.generateMapping(schema, function(err, mapping) { - retMapping = mapping; - }); - - return retMapping; -} - -function deleteByMongoId(options, cb) { - var index = options.index, - type = options.type, - client = options.client, - model = options.model, - tries = options.tries; - - client.delete({ - index: index, - type: type, - id: model._id.toString() - }, function(err, res) { - if (err && err.message.indexOf('404') > -1) { - setTimeout(function() { - if (tries <= 0) { - return cb(err); - } else { - options.tries = --tries; - deleteByMongoId(options, cb); - } - }, 500); - } else { - model.emit('es-removed', err, res); - cb(err); - } - }); -} + setUpMiddlewareHooks(schema); -function isString(subject) { - return typeof subject === 'string'; } -function isStringArray(arr) { - return arr.filter && arr.length === (arr.filter(function(x) { return (typeof x === 'string'); })).length; -} +module.exports = Mongoosastic; diff --git a/lib/serialize.js b/lib/serialize.js index 9a2bca57..c5014905 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -1,24 +1,23 @@ -module.exports = serialize; - -function _serializeObject(object, mapping) { - var serialized = {}; - for (var field in mapping.properties) { - var val = serialize.call(object, object[field], mapping.properties[field]); - if (val !== undefined) { - serialized[field] = val; +module.exports = function serialize(model, mapping) { + var name, outModel; + + function _serializeObject(object, mappingData) { + var serialized = {}, field, val; + for (field in mappingData.properties) { + if (mappingData.properties.hasOwnProperty(field)) { + val = serialize.call(object, object[field], mappingData.properties[field]); + if (val !== undefined) { + serialized[field] = val; + } + } } + return serialized; } - return serialized; -} - -function serialize(model, mapping) { - var name; - if (mapping.properties && model) { if (Array.isArray(model)) { - return model.map(function(object) { + return model.map(function mapModel(object) { return _serializeObject(object, mapping); }); } @@ -31,19 +30,20 @@ function serialize(model, mapping) { throw new Error('es_cast must be a function'); } - model = mapping.cast ? mapping.cast.call(this, model) : model; - if (typeof model === 'object' && model !== null) { - name = model.constructor.name; + outModel = mapping.cast ? mapping.cast.call(this, model) : model; + if (typeof outModel === 'object' && outModel !== null) { + name = outModel.constructor.name; if (name === 'ObjectID') { - return model.toString(); + return outModel.toString(); } if (name === 'Date') { - return new Date(model).toJSON(); + return new Date(outModel).toJSON(); } } - return model; + return outModel; + +}; -} diff --git a/package.json b/package.json index d75b16c4..1cc141e7 100644 --- a/package.json +++ b/package.json @@ -16,31 +16,24 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elasticsearch": "^8.0.1", - "mongoose": "^4.1.5", - "nop": "^1.0.0" + "elasticsearch": "^8.2.0", + "mongoose": "^4.1.8" }, "devDependencies": { "async": "^1.4.2", - "gulp": "^3.9.0", - "gulp-jscs": "^2.0.0", - "gulp-jshint": "^1.11.2", - "gulp-mocha": "^2.1.3", - "jscs": "^2.1.1", - "jshint-stylish": "^2.0.1", - "mocha": "^2.3.1", + "babel-eslint": "^4.1.3", + "eslint": "^1.5.1", + "eslint-config-airbnb": "0.0.9", + "mocha": "^2.3.3", "should": "^7.1.0" }, "engines": { "node": ">= 0.10.0" }, "scripts": { - "test": "gulp" + "lint": "eslint lib", + "mocha": "mocha test/*-test.js -R spec -t 600000", + "test": "npm run lint && npm run mocha" }, - "licence": [ - { - "name": "MIT", - "url": "http://opensource.org/licenses/MIT" - } - ] + "license": "MIT" } diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index dc285d25..6f18c496 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -29,7 +29,7 @@ describe('Index Method', function() { doc.message = 'I know nodejitsu!'; doc.index(function() { setTimeout(function() { - Tweet.search({query_string: {query: 'know'}}, function(err, res) { + Tweet.search({query_string: {query: 'know'}}, function(err1, res) { res.hits.hits[0]._source.message.should.eql('I know nodejitsu!'); done(); }); @@ -43,7 +43,7 @@ describe('Index Method', function() { doc.message = 'I know taebo!'; doc.index({index: 'public_tweets'}, function() { setTimeout(function() { - Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets'}, function(err, res) { + Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets'}, function(err1, res) { res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); @@ -60,7 +60,7 @@ describe('Index Method', function() { Tweet.search({query_string: {query: 'know'}}, { index: 'public_tweets', type: 'utterings' - }, function(err, res) { + }, function(err1, res) { res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); diff --git a/test/boost-field-test.js b/test/boost-field-test.js index 4bf5430c..fe8a9c8b 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -6,6 +6,7 @@ var mongoose = require('mongoose'), }), config = require('./config'), Schema = mongoose.Schema, + BlogPost, mongoosastic = require('../lib/mongoosastic'); var TweetSchema = new Schema({ @@ -15,9 +16,10 @@ var TweetSchema = new Schema({ title: {type: String, es_boost: 2.0} }); + TweetSchema.plugin(mongoosastic); -var BlogPost = mongoose.model('BlogPost', TweetSchema); +BlogPost = mongoose.model('BlogPost', TweetSchema); describe('Add Boost Option Per Field', function() { before(function(done) { diff --git a/test/bulk-test.js b/test/bulk-test.js index 36a1b077..73e5d71c 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -2,11 +2,14 @@ var mongoose = require('mongoose'), async = require('async'), config = require('./config'), Schema = mongoose.Schema, + Book, mongoosastic = require('../lib/mongoosastic'); var BookSchema = new Schema({ title: String }); + + BookSchema.plugin(mongoosastic, { bulk: { size: 100, @@ -14,17 +17,15 @@ BookSchema.plugin(mongoosastic, { } }); -var Book = mongoose.model('Book2', BookSchema); +Book = mongoose.model('Book2', BookSchema); describe('Bulk mode', function() { - var books = null; before(function(done) { config.deleteIndexIfExists(['book2s'], function() { mongoose.connect(config.mongoUrl, function() { var client = mongoose.connections[0].db; - client.collection('book2s', function(err, _books) { - books = _books; + client.collection('book2s', function() { Book.remove(done); }); }); diff --git a/test/config.js b/test/config.js index 6bb488f6..ee675553 100644 --- a/test/config.js +++ b/test/config.js @@ -9,22 +9,6 @@ var elasticsearch = require('elasticsearch'), const INDEXING_TIMEOUT = process.env.INDEXING_TIMEOUT || 2000; const BULK_ACTION_TIMEOUT = process.env.BULK_ACTION_TIMEOUT || 4000; -module.exports = { - mongoUrl: 'mongodb://localhost/es-test', - INDEXING_TIMEOUT: INDEXING_TIMEOUT, - BULK_ACTION_TIMEOUT: BULK_ACTION_TIMEOUT, - deleteIndexIfExists: deleteIndexIfExists, - createModelAndEnsureIndex: createModelAndEnsureIndex, - createModelAndSave: createModelAndSave, - saveAndWaitIndex: saveAndWaitIndex, - bookTitlesArray: bookTitlesArray, - getClient: function() { - return esClient; - }, - close: function() { - esClient.close(); - } -}; function deleteIndexIfExists(indexes, done) { async.forEach(indexes, function(index, cb) { @@ -45,7 +29,7 @@ function deleteIndexIfExists(indexes, done) { function createModelAndEnsureIndex(Model, obj, cb) { var dude = new Model(obj); dude.save(function() { - dude.on('es-indexed', function(err, res) { + dude.on('es-indexed', function() { setTimeout(cb, INDEXING_TIMEOUT); }); }); @@ -65,12 +49,30 @@ function saveAndWaitIndex(model, cb) { function bookTitlesArray() { var books = [ - 'American Gods', - 'Gods of the Old World', - 'American Gothic' - ]; - for (var i = 0; i < 50; i++) { - books.push('ABABABA' + i); + 'American Gods', + 'Gods of the Old World', + 'American Gothic' + ], idx; + for (idx = 0; idx < 50; idx++) { + books.push('ABABABA' + idx); } return books; } + +module.exports = { + mongoUrl: 'mongodb://localhost/es-test', + INDEXING_TIMEOUT: INDEXING_TIMEOUT, + BULK_ACTION_TIMEOUT: BULK_ACTION_TIMEOUT, + deleteIndexIfExists: deleteIndexIfExists, + createModelAndEnsureIndex: createModelAndEnsureIndex, + createModelAndSave: createModelAndSave, + saveAndWaitIndex: saveAndWaitIndex, + bookTitlesArray: bookTitlesArray, + getClient: function() { + return esClient; + }, + close: function() { + esClient.close(); + } +}; + diff --git a/test/connection-test.js b/test/connection-test.js index e9ef563b..a2efa74a 100644 --- a/test/connection-test.js +++ b/test/connection-test.js @@ -8,8 +8,30 @@ var mongoose = require('mongoose'), var DummySchema = new Schema({ text: String }); + var Dummy = mongoose.model('Dummy1', DummySchema, 'dummys'); +function tryDummySearch(model, cb) { + setTimeout(function() { + model.search({ + simple_query_string: { + query: 'Text1' + } + }, { + index: '_all' + }, function(err, results) { + if (err) { + return cb(err); + } + + results.hits.total.should.eql(0); + model.esClient.close(); + cb(err); + }); + }, config.INDEXING_TIMEOUT); + +} + describe('Elasticsearch Connection', function() { before(function(done) { @@ -42,28 +64,31 @@ describe('Elasticsearch Connection', function() { }); it('should be able to connect with default options', function(done) { + var Dummy2; DummySchema.plugin(mongoosastic); - var Dummy = mongoose.model('Dummy2', DummySchema, 'dummys'); + Dummy2= mongoose.model('Dummy2', DummySchema, 'dummys'); - tryDummySearch(Dummy, done); + tryDummySearch(Dummy2, done); }); it('should be able to connect with explicit options', function(done) { + var Dummy3; DummySchema.plugin(mongoosastic, { host: 'localhost', port: 9200 }); - var Dummy = mongoose.model('Dummy3', DummySchema, 'dummys'); + Dummy3 = mongoose.model('Dummy3', DummySchema, 'dummys'); - tryDummySearch(Dummy, done); + tryDummySearch(Dummy3, done); }); it('should be able to connect with an array of hosts', function(done) { + var Dummy4; DummySchema.plugin(mongoosastic, { hosts: [ @@ -71,9 +96,10 @@ describe('Elasticsearch Connection', function() { 'localhost:9200' ] }); - var Dummy = mongoose.model('Dummy4', DummySchema, 'dummys'); - tryDummySearch(Dummy, done); + Dummy4 = mongoose.model('Dummy4', DummySchema, 'dummys'); + + tryDummySearch(Dummy4, done); }); @@ -84,6 +110,8 @@ describe('Elasticsearch Connection', function() { esClient.ping({ requestTimeout: 1000 }, function(err) { + var Dummy5; + if (err) { return done(err); } @@ -91,34 +119,13 @@ describe('Elasticsearch Connection', function() { DummySchema.plugin(mongoosastic, { esClient: esClient }); - var Dummy = mongoose.model('Dummy5', DummySchema, 'dummys'); - tryDummySearch(Dummy, done); + Dummy5 = mongoose.model('Dummy5', DummySchema, 'dummys'); + + tryDummySearch(Dummy5, done); }); }); }); -function tryDummySearch(model, cb) { - setTimeout(function() { - model.search({ - simple_query_string: { - query: 'Text1' - } - }, - { - index: '_all' - }, - function(err, results) { - if (err) { - return cb(err); - } - - results.hits.total.should.eql(0); - model.esClient.close(); - cb(err); - }); - }, config.INDEXING_TIMEOUT); - -} diff --git a/test/count-test.js b/test/count-test.js index 015e30ce..4ca983ff 100644 --- a/test/count-test.js +++ b/test/count-test.js @@ -2,15 +2,17 @@ var mongoose = require('mongoose'), async = require('async'), config = require('./config'), Schema = mongoose.Schema, + Comment, mongoosastic = require('../lib/mongoosastic'); var CommentSchema = new Schema({ user: String, - post_date: {type:Date, es_type:'date'}, - message: {type:String}, - title: {type:String, es_boost:2.0} + post_date: {type: Date, es_type: 'date'}, + message: {type: String}, + title: {type: String, es_boost: 2.0} }); + CommentSchema.plugin(mongoosastic, { bulk: { size: 2, @@ -18,7 +20,7 @@ CommentSchema.plugin(mongoosastic, { } }); -var Comment = mongoose.model('Comment', CommentSchema); +Comment = mongoose.model('Comment', CommentSchema); describe('Count', function() { before(function(done) { @@ -38,8 +40,8 @@ describe('Count', function() { async.forEach(comments, function(item, cb) { item.save(cb); }, function() { - setTimeout(done, config.INDEXING_TIMEOUT); - }); + setTimeout(done, config.INDEXING_TIMEOUT); + }); }); }); }); diff --git a/test/filtering-test.js b/test/filtering-test.js index 6bc9a016..ffd9f1e9 100644 --- a/test/filtering-test.js +++ b/test/filtering-test.js @@ -1,32 +1,32 @@ var mongoose = require('mongoose'), config = require('./config'), Schema = mongoose.Schema, + Movie, mongoosastic = require('../lib/mongoosastic'); // -- Only index specific field var MovieSchema = new Schema({ title: {type: String, required: true, default: '', es_indexed: true}, - genre: {type: String, required:true, default: '', enum: ['horror', 'action', 'adventure', 'other'], es_indexed: true} + genre: {type: String, required: true, default: '', enum: ['horror', 'action', 'adventure', 'other'], es_indexed: true} }); + MovieSchema.plugin(mongoosastic, { filter: function(self) { return self.genre === 'action'; } }); -var Movie = mongoose.model('Movie', MovieSchema); +Movie = mongoose.model('Movie', MovieSchema); describe('Filter mode', function() { - var movies = null; this.timeout(5000); before(function(done) { config.deleteIndexIfExists(['movies'], function() { mongoose.connect(config.mongoUrl, function() { var client = mongoose.connections[0].db; - client.collection('movies', function(err, _movies) { - movies = _movies; + client.collection('movies', function() { Movie.remove(done); }); }); diff --git a/test/geo-test.js b/test/geo-test.js index f5c9ff1c..1cbe380a 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -30,7 +30,7 @@ describe('GeoTest', function() { GeoSchema.plugin(mongoosastic); GeoModel = mongoose.model('geodoc', GeoSchema); - GeoModel.createMapping(function(err, mapping) { + GeoModel.createMapping(function() { GeoModel.remove(function() { esClient.indices.getMapping({ @@ -80,14 +80,14 @@ describe('GeoTest', function() { throw err; } - config.saveAndWaitIndex(geo2, function(err) { - if (err) { - throw err; + config.saveAndWaitIndex(geo2, function(err2) { + if (err2) { + throw err2; } // Mongodb request - GeoModel.find({}, function(err, res) { - if (err) throw err; + GeoModel.find({}, function(err3, res) { + if (err3) throw err3; res.length.should.eql(2); res[0].frame.type.should.eql('envelope'); res[0].frame.coordinates[0].should.eql([1, 4]); @@ -116,11 +116,11 @@ describe('GeoTest', function() { it('should be able to resync geo coordinates from the database', function(done) { config.deleteIndexIfExists(['geodocs'], function() { - GeoModel.createMapping(function(err, mapping) { + GeoModel.createMapping(function() { var stream = GeoModel.synchronize(), count = 0; - stream.on('data', function(err, doc) { + stream.on('data', function() { count++; }); @@ -163,25 +163,25 @@ describe('GeoTest', function() { }; setTimeout(function() { - GeoModel.search(geoQuery, function(err, res) { - if (err) throw err; - res.hits.total.should.eql(1); - res.hits.hits[0]._source.myId.should.eql(2); + GeoModel.search(geoQuery, function(err1, res1) { + if (err1) throw err1; + res1.hits.total.should.eql(1); + res1.hits.hits[0]._source.myId.should.eql(2); geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [1.5, 2.5]; - GeoModel.search(geoQuery, function(err, res) { - if (err) throw err; - res.hits.total.should.eql(1); - res.hits.hits[0]._source.myId.should.eql(1); + GeoModel.search(geoQuery, function(err2, res2) { + if (err2) throw err2; + res2.hits.total.should.eql(1); + res2.hits.hits[0]._source.myId.should.eql(1); geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [3, 2]; - GeoModel.search(geoQuery, function(err, res) { - if (err) throw err; - res.hits.total.should.eql(2); + GeoModel.search(geoQuery, function(err3, res3) { + if (err3) throw err3; + res3.hits.total.should.eql(2); geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [0, 3]; - GeoModel.search(geoQuery, function(err, res) { - if (err) throw err; - res.hits.total.should.eql(0); + GeoModel.search(geoQuery, function(err4, res4) { + if (err4) throw err4; + res4.hits.total.should.eql(0); done(); }); }); diff --git a/test/highlight-features-test.js b/test/highlight-features-test.js index b7c48aa3..3ca16963 100644 --- a/test/highlight-features-test.js +++ b/test/highlight-features-test.js @@ -2,6 +2,7 @@ var mongoose = require('mongoose'), async = require('async'), config = require('./config'), Schema = mongoose.Schema, + Text, mongoosastic = require('../lib/mongoosastic'); var TextSchema = new Schema({ @@ -11,9 +12,16 @@ var TextSchema = new Schema({ TextSchema.plugin(mongoosastic); -var Text = mongoose.model('Text', TextSchema); +Text = mongoose.model('Text', TextSchema); describe('Highlight search', function() { + var responses = [ + 'You don\'t see people at their best in this job, said Death.', + 'The death of the warrior or the old man or the little child, this I understand, and I take away the', + ' pain and end the suffering. I do not understand this death-of-the-mind', + 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' + ]; + before(function(done) { mongoose.connect(config.mongoUrl, function() { Text.remove(function() { @@ -54,13 +62,6 @@ describe('Highlight search', function() { done(); }); - var responses = [ - 'You don\'t see people at their best in this job, said Death.', - 'The death of the warrior or the old man or the little child, this I understand, and I take away the', - ' pain and end the suffering. I do not understand this death-of-the-mind', - 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' - ]; - describe('Highlight without hydrating', function() { it('should return highlighted text on every hit result', function(done) { @@ -80,8 +81,8 @@ describe('Highlight search', function() { res.hits.hits.forEach(function(text) { text.should.have.property('highlight'); text.highlight.should.have.property('quote'); - text.highlight.quote.forEach(function(q) { - responses.should.containEql(q); + text.highlight.quote.forEach(function(query) { + responses.should.containEql(query); }); }); @@ -111,8 +112,8 @@ describe('Highlight search', function() { res.hits.hits.forEach(function(model) { model.should.have.property('_highlight'); model._highlight.should.have.property('quote'); - model._highlight.quote.forEach(function(q) { - responses.should.containEql(q); + model._highlight.quote.forEach(function(query) { + responses.should.containEql(query); }); }); diff --git a/test/index-test.js b/test/index-test.js index c4808b9f..f0b2ecd7 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -4,6 +4,7 @@ var mongoose = require('mongoose'), esClient = new elasticsearch.Client(), config = require('./config'), Schema = mongoose.Schema, + Person, Talk, Bum, mongoosastic = require('../lib/mongoosastic'), Tweet = require('./models/tweet'); @@ -16,9 +17,9 @@ var TalkSchema = new Schema({ bio: String }); -TalkSchema.plugin(mongoosastic); - -var Talk = mongoose.model('Talk', TalkSchema); +var BumSchema = new Schema({ + name: String +}); var PersonSchema = new Schema({ name: {type: String, es_indexed: true}, @@ -29,6 +30,9 @@ var PersonSchema = new Schema({ died: {type: Number, es_indexed: true} } }); + +TalkSchema.plugin(mongoosastic); + PersonSchema.plugin(mongoosastic, { index: 'people', type: 'dude', @@ -36,16 +40,15 @@ PersonSchema.plugin(mongoosastic, { hydrateOptions: {lean: true, sort: '-name', select: 'address name life'} }); -var Person = mongoose.model('Person', PersonSchema); - -var BumSchema = new Schema({ - name: String -}); BumSchema.plugin(mongoosastic, { index: 'ms_sample', type: 'bum' }); -var Bum = mongoose.model('bum', BumSchema); + +Person = mongoose.model('Person', PersonSchema); +Talk = mongoose.model('Talk', TalkSchema); +Bum = mongoose.model('bum', BumSchema); + // -- alright let's test this shiznit! describe('indexing', function() { @@ -65,7 +68,6 @@ describe('indexing', function() { esClient.close(); config.deleteIndexIfExists(['tweets', 'talks', 'people'], done); - //done(); }); describe('Creating Index', function() { @@ -123,7 +125,7 @@ describe('indexing', function() { index: 'tweets', type: 'tweet', id: doc._id.toString() - }, function(err, res) { + }, function(_err, res) { res._source.message.should.eql(doc.message); done(); }); @@ -161,7 +163,7 @@ describe('indexing', function() { message: 'I like Jack better' }, { new: true - }, function(err, doc) { + }, function() { setTimeout(function() { Tweet.search({ query_string: { @@ -225,7 +227,7 @@ describe('indexing', function() { }); it('should remove only index', function(done) { - tweet.on('es-removed', function(err, res) { + tweet.on('es-removed', function() { setTimeout(function() { Tweet.search({ query_string: { @@ -243,21 +245,21 @@ describe('indexing', function() { it('should queue for later removal if not in index', function(done) { // behavior here is to try 3 times and then give up. - var tweet = new Tweet({ + var nTweet = new Tweet({ user: 'jamescarr', message: 'ABBA' }); - tweet.save(function() { + nTweet.save(function() { setTimeout(function() { - tweet.remove(); - tweet.on('es-removed', done); + nTweet.remove(); + nTweet.on('es-removed', done); }, 200); }); }); it('should remove from index when findOneAndRemove', function(done) { - var tweet = new Tweet({ + tweet = new Tweet({ user: 'jamescarr', message: 'findOneAndRemove' }); @@ -296,7 +298,7 @@ describe('indexing', function() { }); tweet.save(function() { talk.save(function() { - talk.on('es-indexed', function(err, res) { + talk.on('es-indexed', function() { setTimeout(done, config.INDEXING_TIMEOUT); }); }); @@ -353,9 +355,9 @@ describe('indexing', function() { it('should only return indexed fields', function(done) { Talk.search({query_string: {query: 'cool'}}, function(err, res) { - res.hits.total.should.eql(1); - var talk = res.hits.hits[0]._source; + + res.hits.total.should.eql(1); talk.should.have.property('title'); talk.should.have.property('year'); talk.should.have.property('abstract'); @@ -367,9 +369,9 @@ describe('indexing', function() { it('should hydrate returned documents if desired', function(done) { Talk.search({query_string: {query: 'cool'}}, {hydrate: true}, function(err, res) { - res.hits.total.should.eql(1); - var talk = res.hits.hits[0]; + + res.hits.total.should.eql(1); talk.should.have.property('title'); talk.should.have.property('year'); talk.should.have.property('abstract'); @@ -407,9 +409,9 @@ describe('indexing', function() { it('should allow extra query options when hydrating', function(done) { Talk.search({query_string: {query: 'cool'}}, {hydrate: true, hydrateOptions: {lean: true}}, function(err, res) { - res.hits.total.should.eql(1); - var talk = res.hits.hits[0]; + + res.hits.total.should.eql(1); talk.should.have.property('title'); talk.should.have.property('year'); talk.should.have.property('abstract'); diff --git a/test/search-features-test.js b/test/search-features-test.js index cce42c12..b05af409 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -2,6 +2,7 @@ var mongoose = require('mongoose'), async = require('async'), config = require('./config'), Schema = mongoose.Schema, + Bond, mongoosastic = require('../lib/mongoosastic'); var BondSchema = new Schema({ @@ -10,9 +11,10 @@ var BondSchema = new Schema({ price: Number }); + BondSchema.plugin(mongoosastic); -var Bond = mongoose.model('Bond', BondSchema); +Bond = mongoose.model('Bond', BondSchema); describe('Query DSL', function() { before(function(done) { @@ -62,7 +64,7 @@ describe('Query DSL', function() { describe('Sort', function() { - var getNames = function(i) { return i._source.name; }; + var getNames = function(res) { return res._source.name; }; var expectedDesc = ['Legal', 'Construction', 'Commercial', 'Bail']; var expectedAsc = expectedDesc.concat([]).reverse(); // clone and reverse @@ -134,7 +136,7 @@ describe('Query DSL', function() { describe('test', function() { it('should do a fuzzy query', function(done) { - var getNames = function(i) { return i._source.name; }; + var getNames = function(res) { return res._source.name; }; Bond.search({ match: { diff --git a/test/suggesters-test.js b/test/suggesters-test.js index 007fef87..96d9aa42 100644 --- a/test/suggesters-test.js +++ b/test/suggesters-test.js @@ -16,38 +16,38 @@ describe('Suggesters', function() { before(function(done) { mongoose.connect(config.mongoUrl, function() { config.deleteIndexIfExists(['kittens'], function() { - KittenSchema = new Schema({ - name: {type: String, es_type: 'completion', es_index_analyzer: 'simple', es_search_analyzer: 'simple', es_indexed: true}, - breed: {type: String } - }); - KittenSchema.plugin(mongoosastic); - Kitten = mongoose.model('Kitten', KittenSchema); - Kitten.createMapping({}, function(err, mapping) { - Kitten.remove(function() { - var kittens = [ - new Kitten({ - name: 'Cookie', - breed: 'Aegean' - }), - new Kitten({ - name: 'Chipmunk', - breed: 'Aegean' - }), - new Kitten({ - name: 'Twix', - breed: 'Persian' - }), - new Kitten({ - name: 'Cookies and Cream', - breed: 'Persian' - }) - ]; - async.forEach(kittens, config.saveAndWaitIndex, function() { - setTimeout(done, config.INDEXING_TIMEOUT); - }); + KittenSchema = new Schema({ + name: {type: String, es_type: 'completion', es_index_analyzer: 'simple', es_search_analyzer: 'simple', es_indexed: true}, + breed: {type: String } + }); + KittenSchema.plugin(mongoosastic); + Kitten = mongoose.model('Kitten', KittenSchema); + Kitten.createMapping({}, function() { + Kitten.remove(function() { + var kittens = [ + new Kitten({ + name: 'Cookie', + breed: 'Aegean' + }), + new Kitten({ + name: 'Chipmunk', + breed: 'Aegean' + }), + new Kitten({ + name: 'Twix', + breed: 'Persian' + }), + new Kitten({ + name: 'Cookies and Cream', + breed: 'Persian' + }) + ]; + async.forEach(kittens, config.saveAndWaitIndex, function() { + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); + }); }); }); @@ -63,35 +63,35 @@ describe('Suggesters', function() { Kitten = mongoose.model('Kitten', KittenSchema); Kitten.createMapping(function() { - esClient.indices.getMapping({ - index: 'kittens', - type: 'kitten' - }, function(err, mapping) { - var props = mapping.kitten !== undefined ? /* elasticsearch 1.0 & 0.9 support */ - mapping.kitten.properties : /* ES 0.9.11 */ - mapping.kittens.mappings.kitten.properties; /* ES 1.0.0 */ - props.name.type.should.eql('completion'); - done(); - }); + esClient.indices.getMapping({ + index: 'kittens', + type: 'kitten' + }, function(err, mapping) { + var props = mapping.kitten !== undefined ? /* elasticsearch 1.0 & 0.9 support */ + mapping.kitten.properties : /* ES 0.9.11 */ + mapping.kittens.mappings.kitten.properties; /* ES 1.0.0 */ + props.name.type.should.eql('completion'); + done(); }); + }); }); it('should return suggestions after hits', function(done) { Kitten.search({ match_all: {} }, { - suggest: { - kittensuggest: { - text: 'Cook', - completion: { - field: 'name' - } + suggest: { + kittensuggest: { + text: 'Cook', + completion: { + field: 'name' } } - }, function(err, res) { - res.should.have.property('suggest'); - res.suggest.kittensuggest[0].options.length.should.eql(2); - done(); - }); + } + }, function(err, res) { + res.should.have.property('suggest'); + res.suggest.kittensuggest[0].options.length.should.eql(2); + done(); + }); }); }); }); diff --git a/test/synchronize-test.js b/test/synchronize-test.js index 8b8b6cbe..d55f8a11 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -2,6 +2,7 @@ var mongoose = require('mongoose'), async = require('async'), config = require('./config'), mongoosastic = require('../lib/mongoosastic'), + Book, Schema = mongoose.Schema; var BookSchema = new Schema({ @@ -10,7 +11,7 @@ var BookSchema = new Schema({ BookSchema.plugin(mongoosastic); -var Book = mongoose.model('Book', BookSchema); +Book = mongoose.model('Book', BookSchema); describe('Synchronize', function() { var books = null; @@ -45,7 +46,7 @@ describe('Synchronize', function() { var stream = Book.synchronize(), count = 0; - stream.on('data', function(err, doc) { + stream.on('data', function() { count++; }); diff --git a/test/truncate-test.js b/test/truncate-test.js index f5ce20cb..77a6de83 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -2,14 +2,16 @@ var mongoose = require('mongoose'), async = require('async'), config = require('./config'), Schema = mongoose.Schema, + Dummy, mongoosastic = require('../lib/mongoosastic'); var DummySchema = new Schema({ text: String }); + DummySchema.plugin(mongoosastic); -var Dummy = mongoose.model('Dummy', DummySchema); +Dummy = mongoose.model('Dummy', DummySchema); describe('Truncate', function() { before(function(done) { @@ -43,7 +45,7 @@ describe('Truncate', function() { describe('esTruncate', function() { it('should be able to truncate all documents', function(done) { - Dummy.esTruncate(function(err) { + Dummy.esTruncate(function() { Dummy.search({ query_string: { query: 'Text1' From 64ae49a5d2ad114139cbe23190269175c273feca Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 27 Sep 2015 00:23:36 +0200 Subject: [PATCH 139/152] updated travis config --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9d619f1f..284959a8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,9 +3,9 @@ sudo: false language: node_js node_js: - - 0.10 - 0.12 - iojs + - stable services: - mongodb From 85d2dcdda32ec460244acaeb935cf99c05a76e4c Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 27 Sep 2015 00:32:43 +0200 Subject: [PATCH 140/152] added AUTHORS file --- AUTHORS | 54 +++++++++++++++++++++++++++++++++++++++ package.json | 1 + scripts/update_authors.sh | 13 ++++++++++ 3 files changed, 68 insertions(+) create mode 100644 AUTHORS create mode 100755 scripts/update_authors.sh diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 00000000..b97a0855 --- /dev/null +++ b/AUTHORS @@ -0,0 +1,54 @@ +Alban Mouton +Andreas Schmid +antoineverger +Antoine Verger +Astro +b96705008 +Brady Brown +Can Kutlu Kinay +chapel +Christian Sturm +Christophe Wagner +danteata +Dan Williams +Eugeny Vlasenko +Francesco Nero +gabrielmancini +Gary Pearman +George +George Shank +Gustavo +gustavo.marin +guumaster +guumaster +Hüseyin BABAL +Hüseyin BABAL +Ignacio Lago +isayme +jamescarr +James Carr +James +James R. Carr +jetNull +John Resig +Jon Buckley +Jon Burgess +Jon Burgess +Jose Maza +Kyle Mathews +Marcos Sanz +Nadeesha Cabral +Nicolas McCurdy +nlko +Phillip Rosen +Robert Katzki +root +Ro Ramtohul +Sascha Schwabbauer +srfrnk +Srfrnk +Sukru BEZEN +taterbase +Warner Onstine +xizhao +xren diff --git a/package.json b/package.json index 1cc141e7..2fabd1a1 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ "node": ">= 0.10.0" }, "scripts": { + "authors": "./scripts/update_authors.sh", "lint": "eslint lib", "mocha": "mocha test/*-test.js -R spec -t 600000", "test": "npm run lint && npm run mocha" diff --git a/scripts/update_authors.sh b/scripts/update_authors.sh new file mode 100755 index 00000000..6456127e --- /dev/null +++ b/scripts/update_authors.sh @@ -0,0 +1,13 @@ +#!/bin/sh +git log --reverse --format='%aN <%aE>' | perl -we ' +BEGIN { +%seen = (), @authors = (); +} +while (<>) { +next if $seen{$_}; +$seen{$_} = push @authors, $_; +} +END { +print @authors; +} +' | sort | uniq > AUTHORS From 4de8ecc338f5597db498dd92ae9238c072b344ba Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 27 Sep 2015 00:37:29 +0200 Subject: [PATCH 141/152] Release 3.3.1 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 1cc141e7..7856ba88 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.3.0", + "version": "3.3.1", "tags": [ "mongodb", "elasticsearch", From 5a8d2cc00bff7dafe91699f7651827b1033b326f Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 27 Sep 2015 00:38:58 +0200 Subject: [PATCH 142/152] Release 3.3.2 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 1ddf8cab..2662d72d 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.3.1", + "version": "3.3.2", "tags": [ "mongodb", "elasticsearch", From f24da6af9104ae27e25dde7e8eafe5bed1b6a195 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 27 Sep 2015 00:46:14 +0200 Subject: [PATCH 143/152] updated CHANGELOG --- CHANGELOG.md | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36484d50..b0171be3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,35 @@ +3.3.2 / 2015-09-27 +================== + + * Merge pull request [#107](https://github.com/mongoosastic/mongoosastic/issues/107) + * added AUTHORS file + * Merge pull request [#106](https://github.com/mongoosastic/mongoosastic/issues/106) + * updated travis config + * removed gulp dependency. moved to ESLint instead of jshint/jscs + +3.3.0 / 2015-09-13 +================== + + * updated README + * Merge pull request [#100](https://github.com/mongoosastic/mongoosastic/issues/100) + * Merge pull request [#104](https://github.com/mongoosastic/mongoosastic/issues/104) + * fixed suggesters tests + * fix(postSave): fix findOneAndUpdate if document doesn't exist + * added support for suggesters + * small json typo + +3.2.0 / 2015-09-07 +================== + * 3.2.0 + * Merge pull request [#98](https://github.com/mongoosastic/mongoosastic/issues/98) + added fuzzy search test + * added fuzzy search test + * Merge pull request [#99](https://github.com/mongoosastic/mongoosastic/issues/99) + * package.json updated. and src linted + * Merge pull request [#97](https://github.com/mongoosastic/mongoosastic/issues/97) + * update dependency elasticsearch to ^8.0.0 + * index/unindex when findOneAndUpdate/findOneAndRemove + 3.1.4 / 2015-07-19 ================== From 52ba0214d788ca876bda6a1d4addb56d72ee286a Mon Sep 17 00:00:00 2001 From: Gustavo Date: Sun, 27 Sep 2015 10:19:55 +0200 Subject: [PATCH 144/152] Create .coveralls.yml --- .coveralls.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .coveralls.yml diff --git a/.coveralls.yml b/.coveralls.yml new file mode 100644 index 00000000..6852a862 --- /dev/null +++ b/.coveralls.yml @@ -0,0 +1,2 @@ +service_name: circleci +repo_token: WoDtU1K0sZH8dMr0uGEhxpUAWNsXxv3Aq From b05a2c9786c2255f3bbea1bc60f4ab1f28e581ab Mon Sep 17 00:00:00 2001 From: Gustavo Date: Sun, 27 Sep 2015 10:20:58 +0200 Subject: [PATCH 145/152] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7c5e8117..0038aa5b 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # Mongoosastic [![Build Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) [![NPM version](https://img.shields.io/npm/v/mongoosastic.svg)](https://www.npmjs.com/package/mongoosastic) +[![Coverage Status](https://coveralls.io/repos/mongoosastic/mongoosastic/badge.svg?branch=master&service=github)](https://coveralls.io/github/mongoosastic/mongoosastic?branch=master) [![Downloads](https://img.shields.io/npm/dm/mongoosastic.svg)](https://www.npmjs.com/package/mongoosastic) [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mongoosastic/mongoosastic?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) From b353701d54e5ddadec69c67ac333d8b063a54526 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 27 Sep 2015 11:00:43 +0200 Subject: [PATCH 146/152] added coverage script --- .gitignore | 1 + package.json | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 0aa35d8a..35b02a0f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .DS_Store node_modules .idea +coverage diff --git a/package.json b/package.json index 2662d72d..3ab9dd60 100644 --- a/package.json +++ b/package.json @@ -22,8 +22,10 @@ "devDependencies": { "async": "^1.4.2", "babel-eslint": "^4.1.3", + "coveralls": "^2.11.4", "eslint": "^1.5.1", "eslint-config-airbnb": "0.0.9", + "istanbul": "^0.3.21", "mocha": "^2.3.3", "should": "^7.1.0" }, @@ -34,7 +36,8 @@ "authors": "./scripts/update_authors.sh", "lint": "eslint lib", "mocha": "mocha test/*-test.js -R spec -t 600000", - "test": "npm run lint && npm run mocha" + "test": "npm run lint && npm run coverage", + "coverage": "istanbul cover ./node_modules/mocha/bin/_mocha --report lcovonly -- -R spec test/*-test.js -t 600000 && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js" }, "license": "MIT" } From 12f788c9907e37d2005e809622d0f28eb8a95f11 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sun, 27 Sep 2015 11:10:52 +0200 Subject: [PATCH 147/152] fixed coveralls token --- .coveralls.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.coveralls.yml b/.coveralls.yml index 6852a862..904b7ff5 100644 --- a/.coveralls.yml +++ b/.coveralls.yml @@ -1,2 +1,2 @@ service_name: circleci -repo_token: WoDtU1K0sZH8dMr0uGEhxpUAWNsXxv3Aq +repo_token: 1pAN4hNbaNFBq8MpfChcOAL71DadtOTiU From 05e95df22257cf95743ce1c653f4136e4c5e5612 Mon Sep 17 00:00:00 2001 From: guumaster Date: Tue, 3 Nov 2015 21:58:47 +0100 Subject: [PATCH 148/152] updated yml config for travisci and coveralls --- .coveralls.yml | 2 +- .travis.yml | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.coveralls.yml b/.coveralls.yml index 904b7ff5..b3edf329 100644 --- a/.coveralls.yml +++ b/.coveralls.yml @@ -1,2 +1,2 @@ -service_name: circleci +service_name: travis-ci repo_token: 1pAN4hNbaNFBq8MpfChcOAL71DadtOTiU diff --git a/.travis.yml b/.travis.yml index 284959a8..483b0cbb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,8 +3,7 @@ sudo: false language: node_js node_js: - - 0.12 - - iojs + - 4.2 - stable services: From 7d7231cfba5298173cfb04060e7c038d3b652956 Mon Sep 17 00:00:00 2001 From: Jason More Date: Wed, 4 Nov 2015 08:38:23 -0600 Subject: [PATCH 149/152] added ability to define a nested type --- lib/mapping-generator.js | 60 ++++++++++++++++++++++------------ test/mapping-generator-test.js | 20 ++++++++++++ 2 files changed, 60 insertions(+), 20 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index d634fbc3..fb9d9297 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -58,10 +58,9 @@ function getMapping(cleanTree, inPrefix) { } // If there is no type, then it's an object with subfields. - if (!value.type) { + if (typeof value === 'object' && !value.type) { mapping[field].type = 'object'; mapping[field].properties = getMapping(value, prefix + field); - continue; } // If it is a objectid make it a string. @@ -83,6 +82,11 @@ function getMapping(cleanTree, inPrefix) { mapping[field][prop.replace(/^es_/, '')] = value[prop]; } } + + // if type is never mapped, delete mapping + if (mapping[field].type === undefined) { + delete mapping[field]; + } } // If one of the fields was explicitly indexed, delete all implicit fields @@ -111,6 +115,7 @@ function getCleanTree(tree, paths, inPrefix) { type = '', value = {}, field, + prop, key, geoFound = false, prefix = inPrefix !== '' ? inPrefix + '.' : inPrefix; @@ -132,25 +137,16 @@ function getCleanTree(tree, paths, inPrefix) { // If it is an nested schema if (value[0]) { // A nested array can contain complex objects - if (paths[field] && paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { - cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); - } else if (paths[field] && paths[field].caster && paths[field].caster.instance) { - // Even for simple types the value can be an object if there is other attributes than type - if (typeof value[0] === 'object') { - cleanTree[field] = value[0]; - } else { - cleanTree[field] = {}; + nestedSchema(paths, field, cleanTree, value, prefix); + } else if (value.type && Array.isArray(value.type)) { + // An object with a nested array + nestedSchema(paths, field, cleanTree, value, prefix); + // Merge top level es settings + for (prop in value) { + // Map to field if it's an Elasticsearch option + if (value.hasOwnProperty(prop) && prop.indexOf('es_') === 0 && prop !== 'es_indexed') { + cleanTree[field][prop] = value[prop]; } - - cleanTree[field].type = paths[field].caster.instance.toLowerCase(); - } else if (!paths[field] && prefix) { - if (paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { - cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; - } - } else { - cleanTree[field] = { - type: 'object' - }; } } else if (value === String || value === Object || value === Date || value === Number || value === Boolean || value === Array) { cleanTree[field] = {}; @@ -191,6 +187,30 @@ function getCleanTree(tree, paths, inPrefix) { return cleanTree; } +function nestedSchema(paths, field, cleanTree, value, prefix) { + // A nested array can contain complex objects + if (paths[field] && paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { + cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); + } else if (paths[field] && paths[field].caster && paths[field].caster.instance) { + // Even for simple types the value can be an object if there is other attributes than type + if (typeof value[0] === 'object') { + cleanTree[field] = value[0]; + } else { + cleanTree[field] = {}; + } + + cleanTree[field].type = paths[field].caster.instance.toLowerCase(); + } else if (!paths[field] && prefix) { + if (paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { + cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; + } + } else { + cleanTree[field] = { + type: 'object' + }; + } +} + function Generator() { } diff --git a/test/mapping-generator-test.js b/test/mapping-generator-test.js index abc49198..7cf400b5 100644 --- a/test/mapping-generator-test.js +++ b/test/mapping-generator-test.js @@ -163,6 +163,26 @@ describe('MappingGenerator', function() { }); }); + it('recognizes an es_type of nested with es_fields and maps it', function(done) { + var NameSchema = new Schema({ + first_name: {type: String, es_index: 'not_analyzed'}, + last_name: {type: String, es_index: 'not_analyzed'} + }); + generator.generateMapping(new Schema({ + name: {type: [NameSchema], es_indexed: true, es_type: 'nested', es_include_in_parent: true} + }), function(err, mapping) { + mapping.properties.name.type.should.eql('nested'); + mapping.properties.name.include_in_parent.should.eql(true); + mapping.properties.name.properties.first_name.type.should.eql('string'); + mapping.properties.name.properties.first_name.index.should.eql('not_analyzed'); + mapping.properties.name.properties.last_name.type.should.eql('string'); + mapping.properties.name.properties.last_name.index.should.eql('not_analyzed'); + should.not.exist(mapping.properties.name.properties.es_include_in_parent); + should.not.exist(mapping.properties.name.properties.es_type); + done(); + }); + }); + it('recognizes a nested array with a simple type and maps it as a simple attribute', function(done) { generator.generateMapping(new Schema({ contacts: [String] From 0757edd064b257cbd8d756270c1bd6fb9dcca669 Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 7 Nov 2015 17:35:10 +0100 Subject: [PATCH 150/152] 3.4.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3ab9dd60..ee5a9766 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "3.3.2", + "version": "3.4.0", "tags": [ "mongodb", "elasticsearch", From fee70c6dcdc28f81200aac2e2d4ce16d4612c88b Mon Sep 17 00:00:00 2001 From: guumaster Date: Sat, 7 Nov 2015 17:39:48 +0100 Subject: [PATCH 151/152] updated CHANGELOG.md --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b0171be3..fe7a18da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +3.4.0 / 2015-11-07 +================== + * Merge pull request [#120](https://github.com/mongoosastic/mongoosastic/issues/120) from JasonMore/master + Recognizes an es_type of nested with es_fields and maps it + * added ability to define a nested type + * Merge pull request [#122](https://github.com/mongoosastic/mongoosastic/issues/122) from guumaster/updated-travis-coveralls + updated yml config for travisci and coveralls + * added coverage script + * Update README.md + * Create .coveralls.yml + 3.3.2 / 2015-09-27 ================== From 3913eecdc4d5287439998eb9e5b7e716d924ad4b Mon Sep 17 00:00:00 2001 From: Simplicissimus Book Farm Date: Tue, 10 Nov 2015 15:51:10 +0100 Subject: [PATCH 152/152] Added check for _id Check if they are strings or ObjectIds --- lib/mongoosastic.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 36290bab..ec907ff7 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -85,9 +85,13 @@ function createMappingIfNotPresent(options, cb) { function hydrate(res, model, options, cb) { var results = res.hits, resultsMap = {}, + mongoose = require('mongoose'), ids = results.hits.map(function mapHits(result, idx) { resultsMap[result._id] = idx; - return result._id; + if (typeof result._id === 'string') + return mongoose.mongo.ObjectID(result._id); + else + return result._id; }), query = model.find({_id: {$in: ids}}),