diff --git a/.coveralls.yml b/.coveralls.yml new file mode 100644 index 00000000..b3edf329 --- /dev/null +++ b/.coveralls.yml @@ -0,0 +1,2 @@ +service_name: travis-ci +repo_token: 1pAN4hNbaNFBq8MpfChcOAL71DadtOTiU diff --git a/.eslintrc b/.eslintrc new file mode 100644 index 00000000..179bbe35 --- /dev/null +++ b/.eslintrc @@ -0,0 +1,15 @@ +{ + "env": { + "node": true, + "mocha": true + }, + "extends": "airbnb/base", + "rules": { + "func-names": 0, + "no-use-before-define": 1, + "one-var": 0, + "no-var": 0, + "comma-dangle": 0, + "padded-blocks": 0 + } +} diff --git a/.gitignore b/.gitignore index 9daa8247..35b02a0f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ .DS_Store node_modules +.idea +coverage diff --git a/.travis.yml b/.travis.yml index 690e2dd2..483b0cbb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,14 @@ +sudo: false + language: node_js node_js: - - 0.11 - - 0.10 - - 0.8 + - 4.2 + - stable services: - mongodb - elasticsearch -notifications: - email: - - james.r.carr@gmail.com +env: + - BULK_ACTION_TIMEOUT=30000 INDEXING_TIMEOUT=3000 diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 00000000..b97a0855 --- /dev/null +++ b/AUTHORS @@ -0,0 +1,54 @@ +Alban Mouton +Andreas Schmid +antoineverger +Antoine Verger +Astro +b96705008 +Brady Brown +Can Kutlu Kinay +chapel +Christian Sturm +Christophe Wagner +danteata +Dan Williams +Eugeny Vlasenko +Francesco Nero +gabrielmancini +Gary Pearman +George +George Shank +Gustavo +gustavo.marin +guumaster +guumaster +Hüseyin BABAL +Hüseyin BABAL +Ignacio Lago +isayme +jamescarr +James Carr +James +James R. Carr +jetNull +John Resig +Jon Buckley +Jon Burgess +Jon Burgess +Jose Maza +Kyle Mathews +Marcos Sanz +Nadeesha Cabral +Nicolas McCurdy +nlko +Phillip Rosen +Robert Katzki +root +Ro Ramtohul +Sascha Schwabbauer +srfrnk +Srfrnk +Sukru BEZEN +taterbase +Warner Onstine +xizhao +xren diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..fe7a18da --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,300 @@ +3.4.0 / 2015-11-07 +================== + * Merge pull request [#120](https://github.com/mongoosastic/mongoosastic/issues/120) from JasonMore/master + Recognizes an es_type of nested with es_fields and maps it + * added ability to define a nested type + * Merge pull request [#122](https://github.com/mongoosastic/mongoosastic/issues/122) from guumaster/updated-travis-coveralls + updated yml config for travisci and coveralls + * added coverage script + * Update README.md + * Create .coveralls.yml + +3.3.2 / 2015-09-27 +================== + + * Merge pull request [#107](https://github.com/mongoosastic/mongoosastic/issues/107) + * added AUTHORS file + * Merge pull request [#106](https://github.com/mongoosastic/mongoosastic/issues/106) + * updated travis config + * removed gulp dependency. moved to ESLint instead of jshint/jscs + +3.3.0 / 2015-09-13 +================== + + * updated README + * Merge pull request [#100](https://github.com/mongoosastic/mongoosastic/issues/100) + * Merge pull request [#104](https://github.com/mongoosastic/mongoosastic/issues/104) + * fixed suggesters tests + * fix(postSave): fix findOneAndUpdate if document doesn't exist + * added support for suggesters + * small json typo + +3.2.0 / 2015-09-07 +================== + * 3.2.0 + * Merge pull request [#98](https://github.com/mongoosastic/mongoosastic/issues/98) + added fuzzy search test + * added fuzzy search test + * Merge pull request [#99](https://github.com/mongoosastic/mongoosastic/issues/99) + * package.json updated. and src linted + * Merge pull request [#97](https://github.com/mongoosastic/mongoosastic/issues/97) + * update dependency elasticsearch to ^8.0.0 + * index/unindex when findOneAndUpdate/findOneAndRemove + +3.1.4 / 2015-07-19 +================== + + * updated package.json dependencies and added "licence" field. + * added `iojs` and Node 0.12 to travis environments. + * upgraded to new travis infraestructure. + * minor tweaks to improve tests. + +3.1.3 / 2015-07-19 +================== + + * improves synchronize high memory usage [#84](https://github.com/mongoosastic/mongoosastic/issues/84) + +3.1.2 / 2015-05-25 +================== + + * added complex sorting object to `search()` options parameters [#79](https://github.com/mongoosastic/mongoosastic/issues/79) + * devDependecies updated + +3.1.1 / 2015-05-25 +================== + + * fixed index creation [#75](https://github.com/mongoosastic/mongoosastic/issues/75) + * added index filtering documentation [#72](https://github.com/mongoosastic/mongoosastic/issues/72) + +3.1.0 / 2015-05-01 +================== + + * added esCount feature [#58](https://github.com/mongoosastic/mongoosastic/issues/58) + * fixed esTruncate [#69](https://github.com/mongoosastic/mongoosastic/issues/69) + * added filtering index logic [#67](https://github.com/mongoosastic/mongoosastic/issues/67) + +3.0.0 / 2015-03-29 +================== + + * mongoose and elasticsearch version bump + +2.1.3 / 2015-03-29 +================== + + * added doc parameter on mongoose hooks + +2.1.1 / 2015-03-28 +=================== + + * added gulp, jshint and jscs + * example fixed and dependencies updated + +2.1.0 / 2015-03-21 +=================== + + * added multihost connection feature [#56](https://github.com/mongoosastic/mongoosastic/issues/56) + * updates to README + * lots of smalls code cleaning + * Added highlight feature [#51](https://github.com/mongoosastic/mongoosastic/issues/51) + * added full CHANGELOG.md + +2.0.10 / 2015-03-19 +=================== + + * updated elasticsearch dependency. changed bulk config + * Dependecies updated + +2.0.9 / 2015-03-19 +================== + + * Increased timeout for travis environment + * Update .travis.yml + testing conf. + * added timeout env variable for travis to wait on index deletion + +2.0.8 / 2015-03-17 +================== + + * fixed timeout for bulk test + * Merge pull request [#40](https://github.com/mongoosastic/mongoosastic/issues/40) from srfrnk/patch-1 + Patch 1 - fixed + * Merge pull request [#47](https://github.com/mongoosastic/mongoosastic/issues/47) from guumaster/master + small patch for nested array schemas + * Merge pull request [#53](https://github.com/mongoosastic/mongoosastic/issues/53) from gazsp/master + Fixes [#49](https://github.com/mongoosastic/mongoosastic/issues/49) + * Fixes [#49](https://github.com/mongoosastic/mongoosastic/issues/49) + * small patch for nested array schemas + * wrong value used... now should be better. + * fixed commit + * allow debugging the calls made by elasticsearch client. + added ability to send {log:"trace"} in options to enable logging + +2.0.6 / 2014-12-11 +================== + + * Merge pull request [#35](https://github.com/mongoosastic/mongoosastic/issues/35) from jitowix/master + add settings when index is created + * add settings when index is created + +2.0.5 / 2014-11-21 +================== + + * Merge pull request [#30](https://github.com/mongoosastic/mongoosastic/issues/30) from nicolasmccurdy/mention-estruncate + In documentation files, rename "truncate" to "esTruncate" + * In documentation files, rename "truncate" to "esTruncate" + +2.0.4 / 2014-11-10 +================== + + * Merge pull request [#27](https://github.com/mongoosastic/mongoosastic/issues/27) from ignlg/feature/serialize-cast-bulk + Serialize on bulk calls. Serialize: this = full model. + * Serialize on bulk calls. Serialize: this = full model. + +2.0.3 / 2014-11-10 +================== + + * Merge pull request [#26](https://github.com/mongoosastic/mongoosastic/issues/26) from b96705008/master + get rid of "continue" when encounter objectid (issue [#12](https://github.com/mongoosastic/mongoosastic/issues/12)) + * get rid of "continue" when encounter objectid + * remove unstable node testing, broken for now + +2.0.2 / 2014-11-05 +================== + + * Treat null query like undefined + +2.0.1 / 2014-11-02 +================== + + * Merge pull request [#23](https://github.com/mongoosastic/mongoosastic/issues/23) from sascha/master + 'protocol' and 'auth' options are ignored + * 'protocol' and 'auth' options are ignored + This fixes an issue, where the 'protocol' and 'auth' options were ignored. + * Merge pull request [#21](https://github.com/mongoosastic/mongoosastic/issues/21) from mongoosastic/feature/official-driver + Feature/official driver + * Add changelog + +2.0.0 / 2014-10-30 +================== + + * updates for travis + * longer delay for bulk test + * significant version bump + * formatting + * Add gitter badge + * Update query interface + * refresh readme + * don't stop bulk options with synchronize + * refactor bulk api + * uppercase README + * Break out docs + * remove elastical dependency + * All tests passing + * had to scale back abstraction on search + * Close to fixing geo test + * first pass at integrating elasticsearch driver + * remove semicolons from mongoosastic.js + +1.0.2 / 2014-10-28 +================== + + * Document geo_shape + +1.0.1 / 2014-10-28 +================== + + * Add documentation about bulk api + +1.0.0 / 2014-10-28 +================== + + * big api changes, big version bump + * Merge pull request [#17](https://github.com/mongoosastic/mongoosastic/issues/17) from mongoosastic/albanm/feature/bulk-and-array-indexing + Albanm/feature/bulk and array indexing + * resolve conflicts + * use containEql instead of include + * Merge pull request [#16](https://github.com/mongoosastic/mongoosastic/issues/16) from mongoosastic/remove-river-code + remove river code + * Merge pull request [#14](https://github.com/mongoosastic/mongoosastic/issues/14) from mongoosastic/nlko-geo_shape + Nlko geo shape + * Merge pull request [#15](https://github.com/mongoosastic/mongoosastic/issues/15) from mongoosastic/cubuzoa/feature/hydrate-hits + Cubuzoa/feature/hydrate hits + * remove river code + * Update hydrated tests to conform to api + * Get first level of hits field + Provided fix for etting first level `hits` field of search results when + used hydrate + * Correct enveloppe test + Enveloppe corners were in wrong order resulting in a bad test. + * Add ES 1.0 support for geo shape tests + * Added testfor geo_shape and updated manual + * Add test for undefined object field in the path prior of its use + * Correct boost test field (support ES 0.9 and 1.0). + In my tests, the mapping format returned by the getMapping function is + not the same between 0.90.11 and 1.0 + * Keep geo_* types in the mapping + Prior, only geo_point were kept in the mapping. + * Update readme.md + More dynamic version info + +0.6.1 / 2014-10-24 +================== + + * Update badge + * Update repo info in package.json + +0.6.0 / 2014-10-14 +================== + + * remove outdated river info + * add more node versions to travis + * Merge pull request [#128](https://github.com/mongoosastic/mongoosastic/issues/128) from marsanla/patch-3 + Add elasticsearch client + * Merge pull request [#120](https://github.com/mongoosastic/mongoosastic/issues/120) from antoineverger/master + Add the esTruncate static method to remove all documents from an index + * Add elasticsearch client + Add elasticsearch client to avoid duplicate instances and call from model plugin. + * Documentation for the truncate static method + * Centralise the "warmup" timeout value in the config to make it easier to adjust it + * Add the feature to pre-process a field before indexing + * Add the esTruncate static method to remove all documents from an index + * bump semver + * Merge pull request [#119](https://github.com/mongoosastic/mongoosastic/issues/119) from antoineverger/master + Add the settings to create mapping. + +0.5.0 / 2014-09-23 +================== + + * Add the settings to create mapping. + First step to have a better configuration of the index settings. + * update readme to reflect version + +0.4.1 / 2014-08-28 +================== + + * Merge pull request [#116](https://github.com/mongoosastic/mongoosastic/issues/116) from sascha/feature/id-in-subdocuments + '_id' and/or 'id' properties in subdocuments + * Merge pull request [#115](https://github.com/mongoosastic/mongoosastic/issues/115) from danteata/master + fixed configuration setup example + * '_id' and 'id' in subdocuments + Added the possibility to have properties called '_id' or 'id' within subdocuments. + * fixed configuration setup example + * Update semver to reflect api change + * Merge pull request [#111](https://github.com/mongoosastic/mongoosastic/issues/111) from astro/refresh + expose index refresh + +0.4.0 / 2014-08-18 +================== + + * Update version, could be some breaking changes + +0.3.0 / 2014-08-15 +================== + + * Merge pull request [#113](https://github.com/mongoosastic/mongoosastic/issues/113) from aschmid/master + fixed issue where object properties where ignored by serialize + * fixed issue where object properties where ignored by serialize + * Merge pull request [#99](https://github.com/mongoosastic/mongoosastic/issues/99) from xren/master + Emit the error when doc.save() fails during synchronization + * Merge pull request [#100](https://github.com/mongoosastic/mongoosastic/issues/100) from clippPR/master + fixing this bug (hopefully) - https://github.com/jamescarr/mongoosastic/... diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..7d8e095c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,53 @@ +# Contributing +Pull requests are always welcome as long as an accompanying test case is +associated. + +This project is configured to use [git +flow](https://github.com/nvie/gitflow/) and the following conventions +are used: + +* ``develop`` - represents current active development and can possibly be + unstable. +* ``master`` - pristine copy of repository, represents the currently + stable release found in the npm index. +* ``feature/**`` - represents a new feature being worked on + +If you wish to contribute, the only requirement is to: + +- branch a new feature branch from develop (if you're working on an + issue, prefix it with the issue number) +- make the changes, with accompanying test cases +- issue a pull request against develop branch + +Although I use git flow and prefix feature branches with "feature/" I +don't require this for pull requests... all I care is that the feature +branch name makes sense. + +Pulls requests against master or pull requests branched from master will +be rejected. + +## Examples +Someone picks up issue #39 on selective indexing. + +Good branch names: +* 39-selective-indexing +* feature/39-selective-indexing + +Someone submits a new feature that allows shard configuration: + +Good branch names: +* feature/shard-configuration +* shard-configuration +* or file an issue, then create a feature branch + +Feel free to ping me if you need help! :) + +## Running Tests +In order to run the tests you will need: + +* An elasticsearch server running on port 9200 +* A mongodb server +* [mocha](http://visionmedia.github.com/mocha/) + +With those installed, running ''npm test'' will run the tests with the +preferred timeout (which is extended for integration tests. diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 00000000..61673abd --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,9 @@ +[The MIT License](https://tldrlegal.com/l/mit) + +Copyright (c) 2012 James R. Carr + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 00000000..0038aa5b --- /dev/null +++ b/README.md @@ -0,0 +1,542 @@ +# Mongoosastic +[![Build Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) +[![NPM version](https://img.shields.io/npm/v/mongoosastic.svg)](https://www.npmjs.com/package/mongoosastic) +[![Coverage Status](https://coveralls.io/repos/mongoosastic/mongoosastic/badge.svg?branch=master&service=github)](https://coveralls.io/github/mongoosastic/mongoosastic?branch=master) +[![Downloads](https://img.shields.io/npm/dm/mongoosastic.svg)](https://www.npmjs.com/package/mongoosastic) +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mongoosastic/mongoosastic?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) + +[![NPM](https://nodei.co/npm/mongoosastic.png)](https://nodei.co/npm/mongoosastic/) + +Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatically index your models into [elasticsearch](http://www.elasticsearch.org/). + +- [Installation](#installation) +- [Setup](#setup) +- [Indexing](#indexing) + - [Saving a document](#saving-a-document) + - [Indexing nested models](#indexing-nested-models) + - [Indexing an existing collection](#indexing-an-existing-collection) + - [Bulk indexing](#bulk-indexing) + - [Filtered indexing](#filtered-indexing) + - [Indexing on demand](#indexing-on-demand) + - [Truncating an index](#truncating-an-index) +- [Mapping](#mapping) + - [Geo mapping](#geo-mapping) + - [Indexing a geo point](#indexing-a-geo-point) + - [Indexing a geo shape](#indexing-a-geo-shape) + - [Creating mappings on-demand](#creating-mappings-on-demand) +- [Queries](#queries) + - [Hydration](#hydration) + +## Installation + +The latest version of this package will be as close as possible to the latest `elasticsearch` and `mongoose` packages. If you are working with latest mongoose package, install normally: + +```bash +npm install -S mongoosastic +``` + +If you are working with `mongoose@3.8.x` use `mongoosastic@2.x` and install a specific version: + +```bash +npm install -S mongoosastic@^2.x +``` + +## Setup + +### Model.plugin(mongoosastic, options) + +Options are: + +* `index` - the index in Elasticsearch to use. Defaults to the pluralization of the model name. +* `type` - the type this model represents in Elasticsearch. Defaults to the model name. +* `esClient` - an existing Elasticsearch `Client` instance. +* `hosts` - an array hosts Elasticsearch is running on. +* `host` - the host Elasticsearch is running on +* `port` - the port Elasticsearch is running on +* `auth` - the authentication needed to reach Elasticsearch server. In the standard format of 'username:password' +* `protocol` - the protocol the Elasticsearch server uses. Defaults to http +* `hydrate` - whether or not to lookup results in mongodb before +* `hydrateOptions` - options to pass into hydrate function +* `bulk` - size and delay options for bulk indexing +* `filter` - the function used for filtered indexing + + +To have a model indexed into Elasticsearch simply add the plugin. + +```javascript +var mongoose = require('mongoose') + , mongoosastic = require('mongoosastic') + , Schema = mongoose.Schema + +var User = new Schema({ + name: String + , email: String + , city: String +}) + +User.plugin(mongoosastic) +``` + +This will by default simply use the pluralization of the model name as the index +while using the model name itself as the type. So if you create a new +User object and save it, you can see it by navigating to +http://localhost:9200/users/user/_search (this assumes Elasticsearch is +running locally on port 9200). + +The default behavior is all fields get indexed into Elasticsearch. This can be a little wasteful especially considering that +the document is now just being duplicated between mongodb and +Elasticsearch so you should consider opting to index only certain fields by specifying `es_indexed` on the +fields you want to store: + + +```javascript +var User = new Schema({ + name: {type:String, es_indexed:true} + , email: String + , city: String +}) + +User.plugin(mongoosastic) +``` + +In this case only the name field will be indexed for searching. + +Now, by adding the plugin, the model will have a new method called +`search` which can be used to make simple to complex searches. The `search` +method accepts [standard Elasticsearch query DSL](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-queries.html) + +```javascript +User.search({ + query_string: { + query: "john" + } +}, function(err, results) { + // results here +}); + +``` + +To connect to more than one host, you can use an array of hosts. + +```javascript +MyModel.plugin(mongoosastic, { + hosts: [ + 'localhost:9200', + 'anotherhost:9200' + ] +}) +``` + +Also, you can re-use an existing Elasticsearch `Client` instance + +```javascript +var esClient = new elasticsearch.Client({host: 'localhost:9200'}); +MyModel.plugin(mongoosastic, { + esClient: esClient +}) +``` + + +## Indexing + +### Saving a document +The indexing takes place after saving inside the mongodb and is a defered process. +One can check the end of the indexion catching es-indexed event. + +```javascript +doc.save(function(err){ + if (err) throw err; + /* Document indexation on going */ + doc.on('es-indexed', function(err, res){ + if (err) throw err; + /* Document is indexed */ + }); + }); +``` + + +###Indexing Nested Models +In order to index nested models you can refer following example. + +```javascript +var Comment = new Schema({ + title: String + , body: String + , author: String +}) + + +var User = new Schema({ + name: {type:String, es_indexed:true} + , email: String + , city: String + , comments: {type:[Comment], es_indexed:true} +}) + +User.plugin(mongoosastic) +``` + + +### Indexing An Existing Collection +Already have a mongodb collection that you'd like to index using this +plugin? No problem! Simply call the synchronize method on your model to +open a mongoose stream and start indexing documents individually. + +```javascript +var BookSchema = new Schema({ + title: String +}); +BookSchema.plugin(mongoosastic); + +var Book = mongoose.model('Book', BookSchema) + , stream = Book.synchronize() + , count = 0; + +stream.on('data', function(err, doc){ + count++; +}); +stream.on('close', function(){ + console.log('indexed ' + count + ' documents!'); +}); +stream.on('error', function(err){ + console.log(err); +}); +``` + +You can also synchronize a subset of documents based on a query! + +```javascript +var stream = Book.synchronize({author: 'Arthur C. Clarke'}) +``` + +### Bulk Indexing + +You can also specify `bulk` options with mongoose which will utilize Elasticsearch's bulk indexing api. This will cause the `synchronize` function to use bulk indexing as well. + +Mongoosastic will wait 1 second (or specified delay) until it has 1000 docs (or specified size) and then perform bulk indexing. + +```javascript +BookSchema.plugin(mongoosastic, { + bulk: { + size: 10, // preferred number of docs to bulk index + delay: 100 //milliseconds to wait for enough docs to meet size constraint + } +}); +``` + +### Filtered Indexing + +You can specify a filter function to index a model to Elasticsearch based on some specific conditions. + +Filtering function must return True for conditions that will ignore indexing to Elasticsearch. + +```javascript +var MovieSchema = new Schema({ + title: {type: String}, + genre: {type: String, enum: ['horror', 'action', 'adventure', 'other']} +}); + +MovieSchema.plugin(mongoosastic, { + filter: function(doc) { + return doc.genre === 'action'; + } +}); +``` + +Instances of Movie model having 'action' as their genre will not be indexed to Elasticsearch. + + +### Indexing On Demand +You can do on-demand indexes using the `index` function + +```javascript +Dude.findOne({name:'Jeffery Lebowski', function(err, dude){ + dude.awesome = true; + dude.index(function(err, res){ + console.log("egads! I've been indexed!"); + }); +}); +``` + +The index method takes 2 arguments: + +* `options` (optional) - {index, type} - the index and type to publish to. Defaults to the standard index and type. + the model was setup with. +* `callback` - callback function to be invoked when model has been + indexed. + +Note that indexing a model does not mean it will be persisted to +mongodb. Use save for that. + +### Truncating an index + +The static method `esTruncate` will delete all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in Elasticsearch. + +```javascript +GarbageModel.esTruncate(function(err){...}); +``` + +## Mapping + +Schemas can be configured to have special options per field. These match +with the existing [field mapping configurations](http://www.elasticsearch.org/guide/reference/mapping/core-types.html) defined by Elasticsearch with the only difference being they are all prefixed by "es_". + +So for example. If you wanted to index a book model and have the boost +for title set to 2.0 (giving it greater priority when searching) you'd +define it as follows: + +```javascript +var BookSchema = new Schema({ + title: {type:String, es_boost:2.0} + , author: {type:String, es_null_value:"Unknown Author"} + , publicationDate: {type:Date, es_type:'date'} +}); + +``` +This example uses a few other mapping fields... such as null_value and +type (which overrides whatever value the schema type is, useful if you +want stronger typing such as float). + +There are various mapping options that can be defined in Elasticsearch. Check out [http://www.elasticsearch.org/guide/reference/mapping/](http://www.elasticsearch.org/guide/reference/mapping/) for more information. Here are examples to the currently possible definitions in mongoosastic: + +```javascript +var ExampleSchema = new Schema({ + // String (core type) + string: {type:String, es_boost:2.0}, + + // Number (core type) + number: {type:Number, es_type:'integer'}, + + // Date (core type) + date: {type:Date, es_type:'date'}, + + // Array type + array: {type:Array, es_type:'string'}, + + // Object type + object: { + field1: {type: String}, + field2: {type: String} + }, + + // Nested type + nested: [SubSchema], + + // Multi field type + multi_field: { + type: String, + es_type: 'multi_field', + es_fields: { + multi_field: { type: 'string', index: 'analyzed' }, + untouched: { type: 'string', index: 'not_analyzed' } + } + }, + + // Geo point type + geo: { + type: String, + es_type: 'geo_point' + }, + + // Geo point type with lat_lon fields + geo_with_lat_lon: { + geo_point: { + type: String, + es_type: 'geo_point', + es_lat_lon: true + }, + lat: { type: Number }, + lon: { type: Number } + } + + geo_shape: { + coordinates : [], + type: {type: String}, + geo_shape: { + type:String, + es_type: "geo_shape", + es_tree: "quadtree", + es_precision: "1km" + } + } + + // Special feature : specify a cast method to pre-process the field before indexing it + someFieldToCast : { + type: String, + es_cast: function(value){ + return value + ' something added'; + } + } + +}); + +// Used as nested schema above. +var SubSchema = new Schema({ + field1: {type: String}, + field2: {type: String} +}); +``` + +### Geo mapping +Prior to index any geo mapped data (or calling the synchronize), +the mapping must be manualy created with the createMapping (see above). + +Notice that the name of the field containing the ES geo data must start by +'geo_' to be recognize as such. + +#### Indexing a geo point + +```javascript +var geo = new GeoModel({ + /* … */ + geo_with_lat_lon: { lat: 1, lon: 2} + /* … */ +}); +``` + +#### Indexing a geo shape + +```javascript +var geo = new GeoModel({ + … + geo_shape:{ + type:'envelope', + coordinates: [[3,4],[1,2] /* Arrays of coord : [[lon,lat],[lon,lat]] */ + } + … +}); +``` + +Mapping, indexing and searching example for geo shape can be found in test/geo-test.js + +For example, one can retrieve the list of document where the shape contain a specific +point (or polygon...) + +```javascript +var geoQuery = { + "match_all": {} + } + +var geoFilter = { + geo_shape: { + geo_shape: { + shape: { + type: "point", + coordinates: [3,1] + } + } + } + } + +GeoModel.search(geoQuery, {filter: geoFilter}, function(err, res) { /* ... */ }) +``` + +### Creating Mappings On Demand +Creating the mapping is a one time operation and can be done as +follows (using the BookSchema as an example): + +```javascript +var BookSchema = new Schema({ + title: {type:String, es_boost:2.0} + , author: {type:String, es_null_value:"Unknown Author"} + , publicationDate: {type:Date, es_type:'date'} + +BookSchema.plugin(mongoosastic); +var Book = mongoose.model('Book', BookSchema); +Book.createMapping({ + "analysis" : { + "analyzer":{ + "content":{ + "type":"custom", + "tokenizer":"whitespace" + } + } + } +},function(err, mapping){ + // do neat things here +}); + +``` +This feature is still a work in progress. As of this writing you'll have +to manage whether or not you need to create the mapping, mongoosastic +will make no assumptions and simply attempt to create the mapping. If +the mapping already exists, an Exception detailing such will be +populated in the `err` argument. + + +## Queries +The full query DSL of Elasticsearch is exposed through the search +method. For example, if you wanted to find all people between ages 21 +and 30: + +```javascript +Person.search({ + range: { + age:{ + from:21 + , to: 30 + } + } +}, function(err, people){ + // all the people who fit the age group are here! +}); + +``` +See the Elasticsearch [Query DSL](http://www.elasticsearch.org/guide/reference/query-dsl/) docs for more information. + +You can also specify query options like [sorts](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort) + +```javascript +Person.search({/* ... */}, {sort: "age:asc"}, function(err, people){ + //sorted results +}); +``` + +Options for queries must adhere to the [javascript elasticsearch driver specs](http://www.elasticsearch.org/guide/en/elasticsearch/client/javascript-api/current/api-reference.html#api-search). + + +### Hydration +By default objects returned from performing a search will be the objects +as is in Elasticsearch. This is useful in cases where only what was +indexed needs to be displayed (think a list of results) while the actual +mongoose object contains the full data when viewing one of the results. + +However, if you want the results to be actual mongoose objects you can +provide {hydrate:true} as the second argument to a search call. + +```javascript + +User.search({query_string: {query: "john"}}, {hydrate:true}, function(err, results) { + // results here +}); + +``` + +You can also pass in a `hydrateOptions` object with information on +how to query for the mongoose object. + +```javascript + +User.search({query_string: {query: "john"}}, {hydrate:true, hydrateOptions: {select: 'name age'}}, function(err, results) { + // results here +}); + +``` + +Note using hydrate will be a degree slower as it will perform an Elasticsearch +query and then do a query against mongodb for all the ids returned from +the search result. + +You can also default this to always be the case by providing it as a +plugin option (as well as setting default hydrate options): + + +```javascript +var User = new Schema({ + name: {type:String, es_indexed:true} + , email: String + , city: String +}) + +User.plugin(mongoosastic, {hydrate:true, hydrateOptions: {lean: true}}) +``` diff --git a/example/blog/app.js b/example/blog/app.js index 70981f09..1086fcd2 100644 --- a/example/blog/app.js +++ b/example/blog/app.js @@ -1,79 +1,73 @@ -var express = require('express') - , routes = require('./routes') - , mongoose = require('mongoose') - , mongoosastic = require('../../lib/mongoosastic') - , Schema = mongoose.Schema; +var express = require('express'), + bodyParser = require('body-parser'), + errorhandler = require('errorhandler'), + mongoose = require('mongoose'), + mongoosastic = require('../../lib/mongoosastic'), + Schema = mongoose.Schema; -var app = module.exports = express.createServer(); +var app = module.exports = express(); // Configuration +app.set('views', __dirname + '/views'); +app.set('view engine', 'jade'); +app.use(bodyParser()); +app.use(express.static(__dirname + '/public')); -app.configure(function(){ - app.set('views', __dirname + '/views'); - app.set('view engine', 'jade'); - app.use(express.bodyParser()); - app.use(express.methodOverride()); - app.use(app.router); - app.use(express.static(__dirname + '/public')); -}); - -app.configure('development', function(){ - app.use(express.errorHandler({ dumpExceptions: true, showStack: true })); -}); - -app.configure('production', function(){ - app.use(express.errorHandler()); -}); +app.use(errorhandler()); // Model -mongoose.connect('mongodb://localhost/silly-blog', function(err){ - console.log(err); +mongoose.connect('mongodb://localhost/silly-blog', function(err) { + if (err) { + console.error(err); + } console.log('connected.... unless you see an error the line before this!'); }); + var BlogPostSchema = new Schema({ - title:{type:String, es_boost:2.0} - , content: {type:String} + title: {type: String, es_boost: 2.0}, + content: {type: String} }); BlogPostSchema.plugin(mongoosastic); var BlogPost = mongoose.model('BlogPost', BlogPostSchema); -BlogPost.createMapping(function(err, mapping){ - if(err){ +BlogPost.createMapping(function(err, mapping) { + if (err) { console.log('error creating mapping (you can safely ignore this)'); console.log(err); - }else{ + } else { console.log('mapping created!'); console.log(mapping); } }); - // Routes -app.get('/', function(req, res){ - res.render('index', {title:'Mongoosastic Example'}); +app.get('/', function(req, res) { + res.render('index', {title: 'Mongoosastic Example'}); }); -app.post('/search', function(req, res){ - BlogPost.search({query:req.body.q}, function(err, results){ +app.post('/search', function(req, res) { + BlogPost.search({query_string: {query: req.body.q}}, function(err, results) { res.send(results); }); }); -app.get('/post', function(req, res){ - res.render('post', {title:'New Post'}); + +app.get('/post', function(req, res) { + res.render('post', {title: 'New Post'}); }); -app.post('/post', function(req, res){ - var post = new BlogPost(req.body) - post.save(function(){ + +app.post('/post', function(req, res) { + var post = new BlogPost(req.body); + post.save(function() { res.redirect('/'); - post.on('es-indexed', function(){ + post.on('es-indexed', function() { console.log('document indexed'); }); }); }); -app.listen(3000, function(){ - console.log("Express server listening on port %d in %s mode", app.address().port, app.settings.env); +app.listen(3000, function() { + console.log('Express server listening on port %d in %s mode', 3000, app.settings.env); }); diff --git a/example/blog/package.json b/example/blog/package.json index 84470bfe..404a8a0b 100644 --- a/example/blog/package.json +++ b/example/blog/package.json @@ -1,9 +1,12 @@ { - "name": "application-name" - , "version": "0.0.1" - , "private": true - , "dependencies": { - "express": "2.5.10" - , "jade": ">= 0.0.1" + "name": "blog-mongoosastic-demo", + "version": "1.0.0", + "private": true, + "dependencies": { + "mongoose": "~3.8.x", + "express": "^4.12.x", + "errorhandler": "^1.3.x", + "body-parser": "^1.12.x", + "jade": "^1.9.x" } -} \ No newline at end of file +} diff --git a/example/blog/routes/index.js b/example/blog/routes/index.js deleted file mode 100644 index fd69215b..00000000 --- a/example/blog/routes/index.js +++ /dev/null @@ -1,8 +0,0 @@ - -/* - * GET home page. - */ - -exports.index = function(req, res){ - res.render('index', { title: 'Express' }) -}; \ No newline at end of file diff --git a/example/blog/views/layout.jade b/example/blog/views/layout.jade index 1a369412..37cc98f7 100644 --- a/example/blog/views/layout.jade +++ b/example/blog/views/layout.jade @@ -1,6 +1,5 @@ -!!! html head title= title link(rel='stylesheet', href='/stylesheets/style.css') - body!= body \ No newline at end of file + body!= body diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 1bbf1df4..fb9d9297 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -1,17 +1,30 @@ -function Generator(){ -} -Generator.prototype.generateMapping = function(schema, cb){ - var cleanTree = getCleanTree(schema.tree, schema.paths, ''); - delete cleanTree[schema.get('versionKey')]; - var mapping = getMapping(cleanTree, ''); +// +// Get type from the mongoose schema +// +// Returns the type, so in case none is set, it's the mongoose type. +// +// @param paths +// @param field +// @return the type or false +// +function getTypeFromPaths(paths, field) { + var type = false; - cb(null, { properties: mapping }); -}; + if (paths[field] && paths[field].options.type === Date) { + return 'date'; + } -module.exports = Generator; + if (paths[field] && paths[field].options.type === Boolean) { + return 'boolean'; + } + if (paths[field]) { + type = paths[field].instance ? paths[field].instance.toLowerCase() : 'object'; + } + return type; +} // // Generates the mapping @@ -19,71 +32,73 @@ module.exports = Generator; // Can be called recursively. // // @param cleanTree -// @param prefix +// @param inPrefix // @return the mapping // -function getMapping(cleanTree, prefix) { +function getMapping(cleanTree, inPrefix) { var mapping = {}, - value = {}, - implicitFields = [], - hasEs_index = false; + value, field, prop, + implicitFields = [], + hasEsIndex = false, + prefix = inPrefix !== '' ? inPrefix + '.' : inPrefix; - if (prefix !== '') { - prefix = prefix + '.'; - } - - for (var field in cleanTree) { + for (field in cleanTree) { + if (!cleanTree.hasOwnProperty(field)) { + continue; + } value = cleanTree[field]; mapping[field] = {}; mapping[field].type = value.type; // Check if field was explicity indexed, if not keep track implicitly - if(value.es_indexed) { - hasEs_index = true; + if (value.es_indexed) { + hasEsIndex = true; } else if (value.type) { implicitFields.push(field); } - // If there is no type, then it's an object with subfields. - if (!value.type) { + if (typeof value === 'object' && !value.type) { mapping[field].type = 'object'; mapping[field].properties = getMapping(value, prefix + field); - continue; } // If it is a objectid make it a string. - if(value.type === 'objectid'){ + if (value.type === 'objectid') { + // do not continue here so we can handle other es_ options mapping[field].type = 'string'; - continue; } - //If indexing a number, and no es_type specified, default to double - if (value.type === 'number' && value['es_type'] === undefined) { + // If indexing a number, and no es_type specified, default to double + if (value.type === 'number' && value.es_type === undefined) { mapping[field].type = 'double'; continue; } // Else, it has a type and we want to map that! - for (var prop in value) { + for (prop in value) { // Map to field if it's an Elasticsearch option - if (prop.indexOf('es_') === 0 && prop !== 'es_indexed') { + if (value.hasOwnProperty(prop) && prop.indexOf('es_') === 0 && prop !== 'es_indexed') { mapping[field][prop.replace(/^es_/, '')] = value[prop]; } } - } - //If one of the fields was explicitly indexed, delete all implicit fields - if (hasEs_index) { - implicitFields.forEach(function(field) { + // if type is never mapped, delete mapping + if (mapping[field].type === undefined) { delete mapping[field]; + } + } + + // If one of the fields was explicitly indexed, delete all implicit fields + if (hasEsIndex) { + implicitFields.forEach(function checkImplicit(implicitField) { + delete mapping[implicitField]; }); } return mapping; } - // // Generates a clean tree // @@ -94,49 +109,46 @@ function getMapping(cleanTree, prefix) { // @param prefix // @return the tree // -function getCleanTree(tree, paths, prefix) { +function getCleanTree(tree, paths, inPrefix) { var cleanTree = {}, - type = '', - value = {}; - - if (prefix !== '') { - prefix = prefix + '.'; - } - - for (var field in tree){ - if (prefix === '' && (field === "id" || field === "_id")) { + type = '', + value = {}, + field, + prop, + key, + geoFound = false, + prefix = inPrefix !== '' ? inPrefix + '.' : inPrefix; + + for (field in tree) { + if (prefix === '' && (field === 'id' || field === '_id')) { continue; } type = getTypeFromPaths(paths, prefix + field); value = tree[field]; - if(value.es_indexed === false) { + if (value.es_indexed === false) { continue; } + // Field has some kind of type if (type) { - // If it is an nestec schema + // If it is an nested schema if (value[0]) { - //A nested schema can be just a blank object with no defined paths - if(value[0].tree && value[0].paths){ - cleanTree[field] = getCleanTree(value[0].tree, value[0].paths, ''); - } - // Check for single type arrays (which elasticsearch will treat as the core type i.e. [String] = string) - else if (!paths[field] && prefix) { - if(paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { - cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; + // A nested array can contain complex objects + nestedSchema(paths, field, cleanTree, value, prefix); + } else if (value.type && Array.isArray(value.type)) { + // An object with a nested array + nestedSchema(paths, field, cleanTree, value, prefix); + // Merge top level es settings + for (prop in value) { + // Map to field if it's an Elasticsearch option + if (value.hasOwnProperty(prop) && prop.indexOf('es_') === 0 && prop !== 'es_indexed') { + cleanTree[field][prop] = value[prop]; } - } else if( paths[field].caster && paths[field].caster.instance ) { - cleanTree[field] = {type: paths[field].caster.instance.toLowerCase()}; - } - else{ - cleanTree[field] = { - type:'object' - }; } - } else if (value === String || value === Object || value === Date || value === Number || value === Boolean || value === Array){ + } else if (value === String || value === Object || value === Date || value === Number || value === Boolean || value === Array) { cleanTree[field] = {}; cleanTree[field].type = type; } else { @@ -144,12 +156,20 @@ function getCleanTree(tree, paths, prefix) { cleanTree[field].type = type; } - // It has no type for some reason + // It has no type for some reason } else { - // Because it is an geo_point object!! - if (typeof value === 'object' && value.geo_point) { - cleanTree[field] = value.geo_point; - continue; + // Because it is an geo_* object!! + if (typeof value === 'object') { + for (key in value) { + if (value.hasOwnProperty(key) && /^geo_/.test(key)) { + cleanTree[field] = value[key]; + geoFound = true; + } + } + + if (geoFound) { + continue; + } } // If it's a virtual type, don't map it @@ -167,31 +187,39 @@ function getCleanTree(tree, paths, prefix) { return cleanTree; } +function nestedSchema(paths, field, cleanTree, value, prefix) { + // A nested array can contain complex objects + if (paths[field] && paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { + cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); + } else if (paths[field] && paths[field].caster && paths[field].caster.instance) { + // Even for simple types the value can be an object if there is other attributes than type + if (typeof value[0] === 'object') { + cleanTree[field] = value[0]; + } else { + cleanTree[field] = {}; + } - -// -// Get type from the mongoose schema -// -// Returns the type, so in case none is set, it's the mongoose type. -// -// @param paths -// @param field -// @return the type or false -// -function getTypeFromPaths(paths, field) { - var type = false; - - if (paths[field] && paths[field].options.type === Date) { - return 'date'; + cleanTree[field].type = paths[field].caster.instance.toLowerCase(); + } else if (!paths[field] && prefix) { + if (paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { + cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; + } + } else { + cleanTree[field] = { + type: 'object' + }; } +} - if (paths[field] && paths[field].options.type === Boolean) { - return 'boolean'; - } +function Generator() { +} - if (paths[field]) { - type = paths[field].instance ? paths[field].instance.toLowerCase() : 'object'; - } +Generator.prototype.generateMapping = function generateMapping(schema, cb) { + var cleanTree = getCleanTree(schema.tree, schema.paths, ''), mapping; + delete cleanTree[schema.get('versionKey')]; + mapping = getMapping(cleanTree, ''); + cb(null, {properties: mapping}); +}; + +module.exports = Generator; - return type; -} diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f0c64751..ec907ff7 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,304 +1,594 @@ -var elastical = require('elastical') - , generator = new(require('./mapping-generator')) - , serialize = require('./serialize') - , events = require('events'); - -module.exports = function elasticSearchPlugin(schema, options){ - var mapping = getMapping(schema) - , indexName = options && options.index - , typeName = options && options.type - , alwaysHydrate = options && options.hydrate - , defaultHydrateOptions = options && options.hydrateOptions - , _mapping = null - , host = options && options.host ? options.host : 'localhost' - , port = options && options.port ? options.port : 9200 - , esClient = new elastical.Client(host, options) - , useRiver = options && options.useRiver; - - if (useRiver) - setUpRiver(schema); - else - setUpMiddlewareHooks(schema); - +var elasticsearch = require('elasticsearch'), + Generator = require('./mapping-generator'), + generator = new Generator(), + serialize = require('./serialize'), + events = require('events'), + util = require('util'), + nop = function nop() {}; + +function isString(subject) { + return typeof subject === 'string'; +} + +function isStringArray(arr) { + return arr.filter && arr.length === (arr.filter(function check(item) { return (typeof item === 'string'); })).length; +} + +function getMapping(schema) { + var retMapping = {}; + generator.generateMapping(schema, function mappingCb(err, mapping) { + retMapping = mapping; + }); + + return retMapping; +} + +function createEsClient(options) { + + var esOptions = {}; + + if (util.isArray(options.hosts)) { + esOptions.host = options.hosts; + } else { + esOptions.host = { + host: options && options.host ? options.host : 'localhost', + port: options && options.port ? options.port : 9200, + protocol: options && options.protocol ? options.protocol : 'http', + auth: options && options.auth ? options.auth : null, + keepAlive: false + }; + } + + esOptions.log = (options ? options.log : null); + + return new elasticsearch.Client(esOptions); +} + +function createMappingIfNotPresent(options, cb) { + var client = options.client, + indexName = options.indexName, + typeName = options.typeName, + schema = options.schema, + settings = options.settings; + + generator.generateMapping(schema, function mapper(ignoredErr, mapping) { + var completeMapping = {}; + completeMapping[typeName] = mapping; + client.indices.exists({ index: indexName }, function existsCb(err, exists) { + if (err) { + return cb(err); + } + + if (exists) { + return client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb); + + } + return client.indices.create({ index: indexName, body: settings }, function indexCb(indexErr) { + if (indexErr) { + return cb(indexErr); + } + + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb); + }); + }); + }); +} + +function hydrate(res, model, options, cb) { + var results = res.hits, + resultsMap = {}, + mongoose = require('mongoose'), + ids = results.hits.map(function mapHits(result, idx) { + resultsMap[result._id] = idx; + if (typeof result._id === 'string') + return mongoose.mongo.ObjectID(result._id); + else + return result._id; + }), + + query = model.find({_id: {$in: ids}}), + hydrateOptions = options.hydrateOptions; + + // Build Mongoose query based on hydrate options + // Example: {lean: true, sort: '-name', select: 'address name'} + Object.keys(hydrateOptions).forEach(function getOpts(option) { + query[option](hydrateOptions[option]); + }); + + query.exec(function queryCb(err, docs) { + var hits = []; + if (err) { + return cb(err); + } + + docs.forEach(function highlight(doc) { + var idx = resultsMap[doc._id]; + if (options.highlight) { + doc._highlight = results.hits[idx].highlight; + } + + hits[idx] = doc; + }); + + results.hits = hits; + res.hits = results; + cb(null, res); + }); +} + +function deleteByMongoId(options, cb) { + var index = options.index, + type = options.type, + client = options.client, + model = options.model, + tries = options.tries; + + client.delete({ + index: index, + type: type, + id: model._id.toString() + }, function deleteCb(err, res) { + if (err && err.message.indexOf('404') > -1) { + setTimeout(function delayedDelete() { + if (tries <= 0) { + return cb(err); + } + options.tries = --tries; + deleteByMongoId(options, cb); + }, 500); + } else { + model.emit('es-removed', err, res); + cb(err); + } + }); +} + +function Mongoosastic(schema, pluginOpts) { + var options = pluginOpts || {}; + + var bulkTimeout, bulkBuffer = [], esClient, + mapping = getMapping(schema), + indexName = options && options.index, + typeName = options && options.type, + alwaysHydrate = options && options.hydrate, + defaultHydrateOptions = options && options.hydrateOptions, + bulk = options && options.bulk, + filter = options && options.filter; + + if (options.esClient) { + esClient = options.esClient; + } else { + esClient = createEsClient(options); + } + + function setIndexNameIfUnset(model) { + var modelName = model.toLowerCase(); + if (!indexName) { + indexName = modelName + 's'; + } + + if (!typeName) { + typeName = modelName; + } + } + + function clearBulkTimeout() { + clearTimeout(bulkTimeout); + bulkTimeout = undefined; + } + + function bulkAdd(instruction) { + bulkBuffer.push(instruction); + + // Return because we need the doc being indexed + // Before we start inserting + if (instruction.index && instruction.index._index) { + return; + } + + if (bulkBuffer.length >= (bulk.size || 1000)) { + schema.statics.flush(); + clearBulkTimeout(); + } else if (bulkTimeout === undefined) { + bulkTimeout = setTimeout(function delayedBulkAdd() { + schema.statics.flush(); + clearBulkTimeout(); + }, bulk.delay || 1000); + } + } + + function bulkDelete(opts, cb) { + bulkAdd({ + delete: { + _index: opts.index || indexName, + _type: opts.type || typeName, + _id: opts.model._id.toString() + } + }); + cb(); + } + + function bulkIndex(opts) { + bulkAdd({ + index: { + _index: opts.index || indexName, + _type: opts.type || typeName, + _id: opts.model._id.toString() + } + }); + bulkAdd(opts.model); + } + + /** - * ElasticSearch Client - */ + * ElasticSearch Client + */ schema.statics.esClient = esClient; /** - * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once + * Create the mapping. Takes an optional settings parameter and a callback that will be called once * the mapping is created - * @param settings String (optional) - * @param callback Function + * @param settings Object (optional) + * @param cb Function */ - schema.statics.createMapping = function(settings, cb) { - if (!cb) { - cb = settings; + schema.statics.createMapping = function createMapping(inSettings, inCb) { + var cb = inCb, settings = inSettings; + if (arguments.length < 2) { + cb = inSettings || nop; settings = undefined; } + setIndexNameIfUnset(this.modelName); - createMappingIfNotPresent(esClient, indexName, typeName, schema, settings, cb); + + createMappingIfNotPresent({ + client: esClient, + indexName: indexName, + typeName: typeName, + schema: schema, + settings: settings + }, cb); }; /** - * @param indexName String (optional) - * @param typeName String (optional) - * @param callback Function + * @param options Object (optional) + * @param cb Function */ - schema.methods.index = function(index, type, cb){ - if(cb == null && typeof index == 'function'){ - cb = index; - index = null; - }else if (cb == null && typeof type == 'function'){ - cb = type; - type = null + schema.methods.index = function schemaIndex(inOpts, inCb) { + var index, type, serialModel, + cb = inCb, opts = inOpts; + + if (arguments.length < 2) { + cb = inOpts || nop; + options = {}; } - var model = this; - setIndexNameIfUnset(model.constructor.modelName); - esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); - } + + if (filter && filter(this)) { + return cb(); + } + + setIndexNameIfUnset(this.constructor.modelName); + + index = opts.index || indexName; + type = opts.type || typeName; + + if (bulk) { + /** + * To serialize in bulk it needs the _id + */ + serialModel = serialize(this, mapping); + serialModel._id = this._id; + + bulkIndex({ + index: index, + type: type, + model: serialModel + }); + setImmediate(cb); + } else { + esClient.index({ + index: index, + type: type, + id: this._id.toString(), + body: serialize(this, mapping) + }, cb); + } + }; /** - * Unset elastic search index - */ - schema.methods.unIndex = function(){ - var model = this; - setIndexNameIfUnset(model.constructor.modelName); - deleteByMongoId(esClient, model, indexName, typeName, 3); - } + * Unset elasticsearch index + * @param options - (optional) options for unIndex + * @param cb - callback when unIndex is complete + */ + schema.methods.unIndex = function unIndex(inOpts, inCb) { + var opts = inOpts, cb = inCb; + + if (arguments.length < 2) { + cb = inOpts || nop; + opts = {}; + } + + setIndexNameIfUnset(this.constructor.modelName); + + opts.index = opts.index || indexName; + opts.type = opts.type || typeName; + opts.model = this; + opts.client = esClient; + opts.tries = 3; + + if (bulk) { + bulkDelete(opts, cb); + } else { + deleteByMongoId(opts, cb); + } + }; /** * Delete all documents from a type/index - * @param callback - callback when truncation is complete + * @param options - (optional) specify index/type + * @param cb - callback when truncation is complete */ - schema.statics.esTruncate = function(cb) { - esClient.delete(indexName, typeName, '', { - query: { + schema.statics.esTruncate = function esTruncate(inOpts, inCb) { + var index, type, + opts = inOpts, cb = inCb; + + if (arguments.length < 2) { + cb = inOpts || nop; + opts = {}; + } + + setIndexNameIfUnset(this.modelName); + + index = opts.index || indexName; + type = opts.type || typeName; + + esClient.deleteByQuery({ + index: index, + type: type, + body: { query: { - "match_all": {} + match_all: {} } } - }, function(err, res) { - cb(err); - }); - } + }, cb); + }; /** * Synchronize an existing collection * - * @param callback - callback when synchronization is complete + * @param query - query for documents you want to synchronize */ - schema.statics.synchronize = function(query){ - var model = this - , em = new events.EventEmitter() - , readyToClose - , closeValues = [] - , counter = 0 - , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} - ; - - setIndexNameIfUnset(model.modelName); - var stream = model.find(query).stream(); - - stream.on('data', function(doc){ + schema.statics.synchronize = function synchronize(inQuery) { + var em = new events.EventEmitter(), + closeValues = [], + counter = 0, + stream, + query = inQuery || {}, + close = function close() { + em.emit.apply(em, ['close'].concat(closeValues)); + }; + + // Set indexing to be bulk when synchronizing to make synchronizing faster + // Set default values when not present + bulk = bulk || {}; + bulk.delay = bulk.delay || 1000; + bulk.size = bulk.size || 1000; + bulk.batch = bulk.batch || 50; + + setIndexNameIfUnset(this.modelName); + + stream = this.find(query).batchSize(bulk.batch).stream(); + + stream.on('data', function onData(doc) { + stream.pause(); counter++; - doc.save(function(err){ + doc.save(function onSave(err) { if (err) { em.emit('error', err); - return; + return stream.resume(); } - doc.on('es-indexed', function(err, doc){ + + doc.on('es-indexed', function onIndex(indexErr, inDoc) { counter--; - if(err){ - em.emit('error', err); - }else{ - em.emit('data', null, doc); + if (indexErr) { + em.emit('error', indexErr); + } else { + em.emit('data', null, inDoc); } - if (readyToClose && counter === 0) - close() + stream.resume(); }); }); }); - stream.on('close', function(a, b){ - readyToClose = true; - closeValues = [a, b]; - if (counter === 0) - close() + + stream.on('close', function onClose(pA, pB) { + var closeInterval; + closeValues = [pA, pB]; + closeInterval = setInterval(function checkInterval() { + if (counter === 0 && bulkBuffer.length === 0) { + clearInterval(closeInterval); + close(); + bulk = false; + } + }, 1000); }); - stream.on('error', function(err){ + + stream.on('error', function onError(err) { em.emit('error', err); }); + return em; }; + /** * ElasticSearch search function * * @param query - query object to perform search with * @param options - (optional) special search options, such as hydrate - * @param callback - callback called with search results + * @param cb - callback called with search results */ - schema.statics.search = function(query, options, cb){ - var model = this; - setIndexNameIfUnset(model.modelName); + schema.statics.search = function search(inQuery, inOpts, inCb) { + var _this = this, + cb = inCb, + opts = inOpts, + esQuery, + query = inQuery === null ? undefined : inQuery; - if(typeof options != 'object'){ - cb = options; - options = {}; + if (arguments.length === 2) { + cb = arguments[1]; + opts = {}; } - query.index = indexName; - esClient.search(query, function(err, results, res){ - if(err){ - cb(err); - }else{ - if (alwaysHydrate || options.hydrate) { - hydrate(results, model, options.hydrateOptions || defaultHydrateOptions || {}, cb); - }else{ - cb(null, res); + + opts.hydrateOptions = opts.hydrateOptions || defaultHydrateOptions || {}; + + setIndexNameIfUnset(this.modelName); + + esQuery = { + body: { + query: query + }, + index: opts.index || indexName, + type: opts.type || typeName + }; + if (opts.highlight) { + esQuery.body.highlight = opts.highlight; + } + + if (opts.suggest) { + esQuery.body.suggest = opts.suggest; + } + + Object.keys(opts).forEach(function collectKeys(opt) { + if (!opt.match(/(hydrate|sort)/) && opts.hasOwnProperty(opt)) { + esQuery[opt] = opts[opt]; + } + + if (opts.sort) { + if (isString(opts.sort) || isStringArray(opts.sort)) { + esQuery.sort = opts.sort; + } else { + esQuery.body.sort = opts.sort; } + + } + + }); + + esClient.search(esQuery, function searchCb(err, res) { + if (err) { + return cb(err); + } + + if (alwaysHydrate || opts.hydrate) { + hydrate(res, _this, opts, cb); + } else { + cb(null, res); } }); }; - schema.statics.refresh = function(cb){ - var model = this; - setIndexNameIfUnset(model.modelName); + schema.statics.esCount = function esCount(inQuery, inCb) { + var cb = inCb, query = inQuery, esQuery; + + setIndexNameIfUnset(this.modelName); - esClient.refresh(indexName, cb); + if (!cb && typeof query === 'function') { + cb = query; + query = null; + } + + esQuery = { + body: { + query: query + }, + index: indexName, + type: typeName + }; + + esClient.count(esQuery, cb); }; - function setIndexNameIfUnset(model){ - var modelName = model.toLowerCase(); - if(!indexName){ - indexName = modelName + "s"; + + schema.statics.flush = function flush(inCb) { + var cb = inCb || nop; + + esClient.bulk({ + body: bulkBuffer + }, cb); + + bulkBuffer = []; + }; + + schema.statics.refresh = function refresh(inOpts, inCb) { + var cb = inCb, + opts = inOpts; + if (arguments.length < 2) { + cb = inOpts || nop; + opts = {}; } - if(!typeName){ - typeName = modelName; + + setIndexNameIfUnset(this.modelName); + esClient.indices.refresh({ + index: opts.index || indexName + }, cb); + }; + + + function postRemove(doc) { + var opts = { + index: indexName, + type: typeName, + tries: 3, + model: doc, + client: esClient + }; + + setIndexNameIfUnset(doc.constructor.modelName); + + if (bulk) { + bulkDelete(opts, nop); + } else { + deleteByMongoId(opts, nop); } } + function postSave(doc) { + if (doc) { + doc.index(function onIndex(err, res) { + if (!filter || !filter(doc)) { + doc.emit('es-indexed', err, res); + } + }); + } + } /** * Use standard Mongoose Middleware hooks * to persist to Elasticsearch */ - function setUpMiddlewareHooks(schema) { - schema.post('remove', function(){ - var model = this; - setIndexNameIfUnset(model.constructor.modelName); - deleteByMongoId(esClient, model, indexName, typeName, 3); - }); - + function setUpMiddlewareHooks(inSchema) { /** - * Save in elastic search on save. + * Remove in elasticsearch on remove */ - schema.post('save', function(){ - var model = this; - model.index(function(err, res){ - model.emit('es-indexed', err, res); - }); - }); - } + inSchema.post('remove', postRemove); + inSchema.post('findOneAndRemove', postRemove); - /* - * Experimental MongoDB River functionality - * NOTICE: Only tested with: - * MongoDB V2.4.1 - * Elasticsearch V0.20.6 - * elasticsearch-river-mongodb V1.6.5 - * - https://github.com/richardwilly98/elasticsearch-river-mongodb/ - */ - function setUpRiver(schema) { - schema.statics.river = function(cb) { - var model = this; - setIndexNameIfUnset(model.modelName); - if (!this.db.name) throw "ERROR: "+ model.modelName +".river() call before mongoose.connect" - esClient.putRiver( - 'mongodb', - indexName, - { - type: 'mongodb', - mongodb: { - db: this.db.name, - collection: indexName, - gridfs: (useRiver && useRiver.gridfs) ? useRiver.gridfs : false - }, - index: { - name: indexName, - type: typeName - } - }, cb ); - } + /** + * Save in elasticsearch on save. + */ + inSchema.post('save', postSave); + inSchema.post('findOneAndUpdate', postSave); } -}; - + setUpMiddlewareHooks(schema); -function createMappingIfNotPresent(client, indexName, typeName, schema, settings, cb) { - generator.generateMapping(schema, function(err, mapping) { - var completeMapping = {}; - completeMapping[typeName] = mapping; - client.indexExists(indexName, function(err, exists) { - if (exists) { - client.putMapping(indexName, typeName, completeMapping, cb); - } else { - client.createIndex(indexName, { - settings: settings, - mappings: completeMapping - }, cb); - } - }); - }); } -function hydrate(results, model, options, cb){ - var resultsMap = {} - var ids = results.hits.map(function(a, i){ - resultsMap[a._id] = i - return a._id; - }); - var query = model.find({_id:{$in:ids}}); - - // Build Mongoose query based on hydrate options - // Example: {lean: true, sort: '-name', select: 'address name'} - Object.keys(options).forEach(function(option){ - query[option](options[option]); - }); - - query.exec(function(err, docs){ - if(err){ - return cb(err); - }else{ - var hits = []; - - docs.forEach(function(doc) { - var i = resultsMap[doc._id] - hits[i] = doc - }) - results.hits = hits; - cb(null, results); - } - }); -} -function getMapping(schema){ - var retMapping = {}; - generator.generateMapping(schema, function(err, mapping){ - retMapping = mapping; - }); - return retMapping; -} -function deleteByMongoId(client, model,indexName, typeName, tries){ - client.delete(indexName, typeName, model._id.toString(), function(err, res){ - if(err && err.message.indexOf('404') > -1){ - setTimeout(function(){ - if(tries <= 0){ - // future issue.. what do we do!? - }else{ - deleteByMongoId(client, model, indexName, typeName, --tries); - } - }, 500); - }else{ - model.emit('es-removed', err, res); - } - }); -} +module.exports = Mongoosastic; diff --git a/lib/serialize.js b/lib/serialize.js index bef81e66..c5014905 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -1,33 +1,49 @@ -module.exports = serialize; +module.exports = function serialize(model, mapping) { + var name, outModel; -function serialize(model, mapping) { + function _serializeObject(object, mappingData) { + var serialized = {}, field, val; + for (field in mappingData.properties) { + if (mappingData.properties.hasOwnProperty(field)) { + val = serialize.call(object, object[field], mappingData.properties[field]); + if (val !== undefined) { + serialized[field] = val; + } + } + } + return serialized; + } - if (mapping.properties) { - var serializedForm = {}; + if (mapping.properties && model) { - for (var field in mapping.properties) { - var val = serialize(model[field], mapping.properties[field]); - if (val !== undefined) { - serializedForm[field] = val; - } + if (Array.isArray(model)) { + return model.map(function mapModel(object) { + return _serializeObject(object, mapping); + }); } - return serializedForm; - - } else { - if (mapping.cast && typeof(mapping.cast) !== 'function') - throw new Error('es_cast must be a function'); - model = mapping.cast ? mapping.cast(model) : model; - if (typeof model === 'object' && model !== null) { - var name = model.constructor.name; - if (name === 'ObjectID') { - return model.toString(); - } else if (name === 'Date') { - return new Date(model).toJSON(); - } - return model; - } else { - return model; + return _serializeObject(model, mapping); + + } + + if (mapping.cast && typeof mapping.cast !== 'function') { + throw new Error('es_cast must be a function'); + } + + outModel = mapping.cast ? mapping.cast.call(this, model) : model; + if (typeof outModel === 'object' && outModel !== null) { + name = outModel.constructor.name; + if (name === 'ObjectID') { + return outModel.toString(); + } + + if (name === 'Date') { + return new Date(outModel).toJSON(); } + } -} \ No newline at end of file + + return outModel; + +}; + diff --git a/package.json b/package.json index c90295c1..ee5a9766 100644 --- a/package.json +++ b/package.json @@ -2,29 +2,42 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "0.6.0", - "tags":["mongodb", "elastic search", "mongoose", "full text search"], + "version": "3.4.0", + "tags": [ + "mongodb", + "elasticsearch", + "elastic search", + "mongoose", + "full text search" + ], "repository": { "type": "git", - "url": "git://github.com/jamescarr/mongoosastic" + "url": "git://github.com/mongoosastic/mongoosastic" }, - "main":"lib/mongoosastic.js", + "main": "lib/mongoosastic.js", "dependencies": { - "elastical":"0.0.12" - }, - "peerDependencies": { - "mongoose":"3.8.x" + "elasticsearch": "^8.2.0", + "mongoose": "^4.1.8" }, "devDependencies": { - "mocha":"*" - , "should":"*" - , "async":"*" - , "mongoose":"3.8.x" + "async": "^1.4.2", + "babel-eslint": "^4.1.3", + "coveralls": "^2.11.4", + "eslint": "^1.5.1", + "eslint-config-airbnb": "0.0.9", + "istanbul": "^0.3.21", + "mocha": "^2.3.3", + "should": "^7.1.0" }, "engines": { - "node": ">= 0.8.0" + "node": ">= 0.10.0" + }, + "scripts": { + "authors": "./scripts/update_authors.sh", + "lint": "eslint lib", + "mocha": "mocha test/*-test.js -R spec -t 600000", + "test": "npm run lint && npm run coverage", + "coverage": "istanbul cover ./node_modules/mocha/bin/_mocha --report lcovonly -- -R spec test/*-test.js -t 600000 && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js" }, - "scripts":{ - "test":"mocha -R spec -t 20000 -b" - } + "license": "MIT" } diff --git a/readme.md b/readme.md deleted file mode 100644 index 22fb8b1a..00000000 --- a/readme.md +++ /dev/null @@ -1,473 +0,0 @@ -# Mongoosastic -[![Build -Status](https://secure.travis-ci.org/jamescarr/mongoosastic.png?branch=master)](http://travis-ci.org/jamescarr/mongoosastic) - -A [mongoose](http://mongoosejs.com/) plugin that indexes models into [elasticsearch](http://www.elasticsearch.org/). I kept -running into cases where I needed full text search capabilities in my -mongodb based models only to discover mongodb has none. In addition to -full text search, I also needed the ability to filter ranges of data -points in the searches and even highlight matches. For these reasons, -elastic search was a perfect fit and hence this project. - -## Current Version -The current version is ``0.6.0`` - -## Installation - -```bash -npm install mongoosastic - -``` - -Or add it to your package.json - -## Usage - -To make a model indexed into elastic search simply add the plugin. - - -```javascript -var mongoose = require('mongoose') - , mongoosastic = require('mongoosastic') - , Schema = mongoose.Schema - -var User = new Schema({ - name: String - , email: String - , city: String -}) - -User.plugin(mongoosastic) -``` - -This will by default simply use the pluralization of the model name as the index -while using the model name itself as the type. So if you create a new -User object and save it, you can see it by navigating to -http://localhost:9200/users/user/_search (this assumes elasticsearch is -running locally on port 9200). - -The default behavior is all fields get indexed into elasticsearch. This can be a little wasteful especially considering that -the document is now just being duplicated between mongodb and -elasticsearch so you should consider opting to index only certain fields by specifying ''es_indexed'' on the -fields you want to store: - - -```javascript -var User = new Schema({ - name: {type:String, es_indexed:true} - , email: String - , city: String -}) - -User.plugin(mongoosastic) -``` - -In this case only the name field -will be indexed for searching. - -####Indexing Nested Models -In order to index nested models you can refer following example. - -```javascript -var Comment = new Schema({ - title: String - , body: String - , author: String -}) - - -var User = new Schema({ - name: {type:String, es_indexed:true} - , email: String - , city: String - , comments: {type:[Comment], es_indexed:true} -}) - -User.plugin(mongoosastic) -``` - -Finally, adding the plugin will add a new method to the model called -search which can be used to make simple to complex searches. - -```javascript - -User.search({query:"john"}, function(err, results) { - // results here -}); - -``` - -### Indexing An Existing Collection -Already have a mongodb collection that you'd like to index using this -plugin? No problem! Simply call the synchronize method on your model to -open a mongoose stream and start indexing documents individually. - -```javascript -var BookSchema = new Schema({ - title: String -}); -BookSchema.plugin(mongoosastic); - -var Book = mongoose.model('Book', BookSchema) - , stream = Book.synchronize() - , count = 0; - -stream.on('data', function(err, doc){ - count++; -}); -stream.on('close', function(){ - console.log('indexed ' + count + ' documents!'); -}); -stream.on('error', function(err){ - console.log(err); -}); -``` - -You can also synchronize a subset of documents based on a query! - -```javascript -var stream = Book.synchronize({author: 'Arthur C. Clarke'}) -``` - -One caveat... synchronization is kinda slow for now. Use with care. - -### Per Field Options -Schemas can be configured to have special options per field. These match -with the existing [field mapping configurations](http://www.elasticsearch.org/guide/reference/mapping/core-types.html) defined by elasticsearch with the only difference being they are all prefixed by "es_". - -So for example. If you wanted to index a book model and have the boost -for title set to 2.0 (giving it greater priority when searching) you'd -define it as follows: - -```javascript -var BookSchema = new Schema({ - title: {type:String, es_boost:2.0} - , author: {type:String, es_null_value:"Unknown Author"} - , publicationDate: {type:Date, es_type:'date'} -}); - -``` -This example uses a few other mapping fields... such as null_value and -type (which overrides whatever value the schema type is, useful if you -want stronger typing such as float). - -#### Creating Mappings for These Features -The way this can be mapped in elastic search is by creating a mapping -for the index the model belongs to. Currently to the best of my -knowledge mappings are create once when creating an index and can only -be modified by destroying the index. The optionnal first parameter is -the settings option for the index (for defining analysers for example or whatever is [there](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-update-settings.html). - -As such, creating the mapping is a one time operation and can be done as -follows (using the BookSchema as an example): - -```javascript -var BookSchema = new Schema({ - title: {type:String, es_boost:2.0} - , author: {type:String, es_null_value:"Unknown Author"} - , publicationDate: {type:Date, es_type:'date'} - -BookSchema.plugin(mongoosastic); -var Book = mongoose.model('Book', BookSchema); -Book.createMapping({ - "analysis" : { - "analyzer":{ - "content":{ - "type":"custom", - "tokenizer":"whitespace" - } - } - } -},function(err, mapping){ - // do neat things here -}); - -``` -This feature is still a work in progress. As of this writing you'll have -to manage whether or not you need to create the mapping, mongoosastic -will make no assumptions and simply attempt to create the mapping. If -the mapping already exists, an Exception detailing such will be -populated in the `err` argument. - -#### Mapping options -There are various types that can be defined in elasticsearch. Check out http://www.elasticsearch.org/guide/reference/mapping/ for more information. Here are examples to the currently possible definitions in mongoosastic: - -```javascript -var ExampleSchema = new Schema({ - // String (core type) - string: {type:String, es_boost:2.0}, - - // Number (core type) - number: {type:Number, es_type:'integer'}, - - // Date (core type) - date: {type:Date, es_type:'date'}, - - // Array type - array: {type:Array, es_type:'string'}, - - // Object type - object: { - field1: {type: String}, - field2: {type: String} - }, - - // Nested type - nested: [SubSchema], - - // Multi field type - multi_field: { - type: String, - es_type: 'multi_field', - es_fields: { - multi_field: { type: 'string', index: 'analyzed' }, - untouched: { type: 'string', index: 'not_analyzed' } - } - }, - - // Geo point type - geo: { - type: String, - es_type: 'geo_point' - }, - - // Geo point type with lat_lon fields - geo_with_lat_lon: { - geo_point: { - type: String, - es_type: 'geo_point', - es_lat_lon: true - }, - lat: { type: Number }, - lon: { type: Number } - } - - // Special feature : specify a cast method to pre-process the field before indexing it - someFieldToCast : { - type: String, - es_cast: function(value){ - return value + ' something added'; - } - } -}); - -// Used as nested schema above. -var SubSchema = new Schema({ - field1: {type: String}, - field2: {type: String} -}); -``` - -### Advanced Queries -The full query DSL of elasticsearch is exposed through the search -method. For example, if you wanted to find all people between ages 21 -and 30: - -```javascript -Person.search({ - query:{ - range: { - age:{ - from:21 - , to: 30 - } - } - } -}, function(err, people){ - // all the people who fit the age group are here! -}); - -``` - -See the elasticsearch [Query DSL](http://www.elasticsearch.org/guide/reference/query-dsl/) docs for more information. - -### Hydration -By default objects returned from performing a search will be the objects -as is in elastic search. This is useful in cases where only what was -indexed needs to be displayed (think a list of results) while the actual -mongoose object contains the full data when viewing one of the results. - -However, if you want the results to be actual mongoose objects you can -provide {hydrate:true} as the second argument to a search call. - -```javascript - -User.search({query:"john"}, {hydrate:true}, function(err, results) { - // results here -}); - -``` - -You can also pass in a `hydrateOptions` object with information on -how to query for the mongoose object. - -```javascript - -User.search({query:"john"}, {hydrate:true, hydrateOptions: {select: 'name age'}}, function(err, results) { - // results here -}); - -``` - -Note using hydrate will be a degree slower as it will perform an elasticsearch -query and then do a query against mongodb for all the ids returned from -the search result. - -You can also default this to always be the case by providing it as a -plugin option (as well as setting default hydrate options): - - -```javascript -var User = new Schema({ - name: {type:String, es_indexed:true} - , email: String - , city: String -}) - -User.plugin(mongoosastic, {hydrate:true, hydrateOptions: {lean: true}}) -``` - - -### Indexing On Demand -While developing mongoose I came across a scenario where we needed to be -able to save models (and search them) but a single action would -"publish" those models to be searched from a public site. To address -this I create a new method: `index`. - -#### Usage -Usage is as simple as calling index on an existing model. - -```javascript -Dude.findOne({name:'Jeffery Lebowski', function(err, dude){ - dude.awesome = true; - dude.index(function(err, res){ - console.log("egads! I've been indexed!"); - }); -}); -``` - -The index method takes 3 arguments: - -* `index` (optional) - the index to publish to. Defaults to the index - the model was setup with. -* `type` (optional) - the type to publish as. Defaults to the type the - model was setup with. -* `callback` - callback function to be invoked when model has been - indexed. - -Note that indexing a model does not mean it will be persisted to -mongodb. Use save for that. - -### Truncating an index - -The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. - -#### Usage - -```javascript -GarbageModel.truncate(function(err){...}); -``` - -### Model.plugin(mongoosastic, options) - -Options are: - -* `index` - the index in elastic search to use. Defaults to the - pluralization of the model name. -* `type` - the type this model represents in elastic search. Defaults - to the model name. -* `host` - the host elastic search is running on -* `port` - the port elastic search is running on -* `auth` - the authentication needed to reach elastic search server. In the standard format of 'username:password' -* `protocol` - the protocol the elastic search server uses. Defaults to http -* `hydrate` - whether or not to lookup results in mongodb before - returning results from a search. Defaults to false. -* `curlDebug` - elastical debugging. Defaults to false. - -Here are all other avaible options invloved in connection to elastic search server: -https://ramv.github.io/node-elastical/docs/classes/Client.html - -Experimental Options: - -#### Specifying Different Index and Type -Perhaps you have an existing index and you want to specify the index and -type used to index your document? No problem!! - -```javascript -var SupervisorSchema = new Schema({ - name: String -, department: String -}); - -SupervisorSchema.plugin(mongoosastic, {index: 'employees', type:'manager'}); - -var Supervisor = mongoose.model('supervisor', SupervisorSchema); - -``` - -## Contributing -Pull requests are always welcome as long as an accompanying test case is -associated. - -This project is configured to use [git -flow](https://github.com/nvie/gitflow/) and the following conventions -are used: - -* ``develop`` - represents current active development and can possibly be - unstable. -* ``master`` - pristine copy of repository, represents the currently - stable release found in the npm index. -* ``feature/**`` - represents a new feature being worked on - -If you wish to contribute, the only requirement is to: - -- branch a new feature branch from develop (if you're working on an - issue, prefix it with the issue number) -- make the changes, with accompanying test cases -- issue a pull request against develop branch - -Although I use git flow and prefix feature branches with "feature/" I -don't require this for pull requests... all I care is that the feature -branch name makes sense. - -Pulls requests against master or pull requests branched from master will -be rejected. - -#### Examples -Someone picks up issue #39 on selective indexing. - -Good branch names: -* 39-selective-indexing -* feature/39-selective-indexing - -Someone submits a new feature that allows shard configuration: - -Good branch names: -* feature/shard-configuration -* shard-configuration -* or file an issue, then create a feature branch - -Feel free to ping me if you need help! :) - -### Running Tests -In order to run the tests you will need: - -* An elasticsearch server running on port 9200 -* A mongodb server -* [mocha](http://visionmedia.github.com/mocha/) - -With those installed, running ''npm test'' will run the tests with the -preferred timeout (which is extended for integration tests. - - -## License -[The MIT License](https://tldrlegal.com/l/mit) - -Copyright (c) 2012 James R. Carr - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/scripts/update_authors.sh b/scripts/update_authors.sh new file mode 100755 index 00000000..6456127e --- /dev/null +++ b/scripts/update_authors.sh @@ -0,0 +1,13 @@ +#!/bin/sh +git log --reverse --format='%aN <%aE>' | perl -we ' +BEGIN { +%seen = (), @authors = (); +} +while (<>) { +next if $seen{$_}; +$seen{$_} = push @authors, $_; +} +END { +print @authors; +} +' | sort | uniq > AUTHORS diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 1d43eb01..6f18c496 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -1,69 +1,72 @@ -var mongoose = require('mongoose') - , elastical = require('elastical') - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , esClient = new(require('elastical').Client) - , mongoosastic = require('../lib/mongoosastic') - , Tweet = require('./models/tweet'); +var mongoose = require('mongoose'), + config = require('./config'), + Tweet = require('./models/tweet'); -describe('Index Method', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - config.deleteIndexIfExists(['tweets', 'public_tweets'], function(){ - config.createModelAndEnsureIndex(Tweet, { - user: 'jamescarr' - , message: "I know kung-fu!" - , post_date: new Date() - }, done); +describe('Index Method', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + config.deleteIndexIfExists(['tweets', 'public_tweets'], function() { + Tweet.remove(function() { + config.createModelAndEnsureIndex(Tweet, { + user: 'jamescarr', + message: 'I know kung-fu!', + post_date: new Date() + }, done); + }); }); }); }); - after(function(done){ - Tweet.remove(function(){ + after(function(done) { + Tweet.remove(function() { mongoose.disconnect(); done(); }); }); - it('should be able to index it directly without saving', function(done){ - Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ + + it('should be able to index it directly without saving', function(done) { + Tweet.findOne({message: 'I know kung-fu!'}, function(err, doc) { doc.message = 'I know nodejitsu!'; - doc.index(function(){ - setTimeout(function(){ - Tweet.search({query:'know'}, function(err, res){ + doc.index(function() { + setTimeout(function() { + Tweet.search({query_string: {query: 'know'}}, function(err1, res) { res.hits.hits[0]._source.message.should.eql('I know nodejitsu!'); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); }); - it('should be able to index to alternative index', function(done){ - Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ + + it('should be able to index to alternative index', function(done) { + Tweet.findOne({message: 'I know kung-fu!'}, function(err, doc) { doc.message = 'I know taebo!'; - doc.index('public_tweets', function(){ - setTimeout(function(){ - esClient.search({index: 'public_tweets', query:'know'}, function(err, results, res){ + doc.index({index: 'public_tweets'}, function() { + setTimeout(function() { + Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets'}, function(err1, res) { res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); }); - it('should be able to index to alternative index and type', function(done){ - Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ + + it('should be able to index to alternative index and type', function(done) { + Tweet.findOne({message: 'I know kung-fu!'}, function(err, doc) { doc.message = 'I know taebo!'; - doc.index('public_tweets', 'utterings', function(){ - setTimeout(function(){ - esClient.search({index: 'public_tweets', type: 'utterings', query:'know'}, function(err, results, res){ + doc.index({index: 'public_tweets', type: 'utterings'}, function() { + setTimeout(function() { + Tweet.search({query_string: {query: 'know'}}, { + index: 'public_tweets', + type: 'utterings' + }, function(err1, res) { res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); }); + }); diff --git a/test/boost-field-test.js b/test/boost-field-test.js index e9b94108..fe8a9c8b 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -1,36 +1,55 @@ -var mongoose = require('mongoose') - , elastical = require('elastical') - , esClient = new(require('elastical').Client) - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , mongoosastic = require('../lib/mongoosastic'); - +var mongoose = require('mongoose'), + elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client({ + deadTimeout: 0, + keepAlive: false + }), + config = require('./config'), + Schema = mongoose.Schema, + BlogPost, + mongoosastic = require('../lib/mongoosastic'); var TweetSchema = new Schema({ - user: String - , post_date: {type:Date, es_type:'date'} - , message: {type:String} - , title: {type:String, es_boost:2.0} + user: String, + post_date: {type: Date, es_type: 'date'}, + message: {type: String}, + title: {type: String, es_boost: 2.0} }); + TweetSchema.plugin(mongoosastic); -var BlogPost = mongoose.model('BlogPost', TweetSchema); -describe('Add Boost Option Per Field', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - BlogPost.remove(function(){ - config.deleteIndexIfExists(['blogposts'], done) +BlogPost = mongoose.model('BlogPost', TweetSchema); + +describe('Add Boost Option Per Field', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + BlogPost.remove(function() { + config.deleteIndexIfExists(['blogposts'], done); }); }); }); - it('should create a mapping with boost field added', function(done){ - BlogPost.createMapping(function(err, mapping){ - esClient.getMapping('blogposts', 'blogpost', function(err, mapping){ - var props = mapping.blogposts.mappings.blogpost.properties; + after(function(done) { + mongoose.disconnect(); + BlogPost.esClient.close(); + esClient.close(); + done(); + }); + + it('should create a mapping with boost field added', function(done) { + BlogPost.createMapping(function() { + esClient.indices.getMapping({ + index: 'blogposts', + type: 'blogpost' + }, function(err, mapping) { + + /* elasticsearch 1.0 & 0.9 support */ + var props = mapping.blogpost !== undefined ? + mapping.blogpost.properties : /* ES 0.9.11 */ + mapping.blogposts.mappings.blogpost.properties; + /* ES 1.0.0 */ + props.title.type.should.eql('string'); props.title.boost.should.eql(2.0); done(); diff --git a/test/bulk-test.js b/test/bulk-test.js new file mode 100644 index 00000000..73e5d71c --- /dev/null +++ b/test/bulk-test.js @@ -0,0 +1,69 @@ +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + Book, + mongoosastic = require('../lib/mongoosastic'); + +var BookSchema = new Schema({ + title: String +}); + + +BookSchema.plugin(mongoosastic, { + bulk: { + size: 100, + delay: 1000 + } +}); + +Book = mongoose.model('Book2', BookSchema); + +describe('Bulk mode', function() { + + before(function(done) { + config.deleteIndexIfExists(['book2s'], function() { + mongoose.connect(config.mongoUrl, function() { + var client = mongoose.connections[0].db; + client.collection('book2s', function() { + Book.remove(done); + }); + }); + }); + }); + + before(function(done) { + async.forEach(config.bookTitlesArray(), function(title, cb) { + new Book({ + title: title + }).save(cb); + }, done); + }); + + before(function(done) { + Book.findOne({ + title: 'American Gods' + }, function(err, book) { + book.remove(done); + }); + }); + + after(function(done) { + mongoose.disconnect(); + Book.esClient.close(); + done(); + + }); + + it('should index all objects and support deletions too', function(done) { + + // This timeout is important, as Elasticsearch is "near-realtime" and the index/deletion takes time that + // needs to be taken into account in these tests + setTimeout(function() { + Book.search({match_all: {}}, function(err, results) { + results.should.have.property('hits').with.property('total', 52); + done(); + }); + }, config.BULK_ACTION_TIMEOUT); + }); +}); diff --git a/test/config.js b/test/config.js index 2fad571a..ee675553 100644 --- a/test/config.js +++ b/test/config.js @@ -1,30 +1,78 @@ -var esClient = new(require('elastical').Client) - , async = require('async'); +var elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client({ + host: 'localhost:9200', + deadTimeout: 0, + keepAlive: false + }), + async = require('async'); -const INDEXING_TIMEOUT = 1100; +const INDEXING_TIMEOUT = process.env.INDEXING_TIMEOUT || 2000; +const BULK_ACTION_TIMEOUT = process.env.BULK_ACTION_TIMEOUT || 4000; -module.exports = { - mongoUrl: 'mongodb://localhost/es-test' - , indexingTimeout: INDEXING_TIMEOUT - , deleteIndexIfExists: function(indexes, done){ - async.forEach(indexes, function(index, cb){ - esClient.indexExists(index, function(err, exists){ - if(exists){ - esClient.deleteIndex(index, cb); - }else{ - cb(); - } - }); - }, done); - } - , createModelAndEnsureIndex: createModelAndEnsureIndex -}; -function createModelAndEnsureIndex(model, obj, cb){ - var dude = new model(obj); - dude.save(function(){ - dude.on('es-indexed', function(err, res){ +function deleteIndexIfExists(indexes, done) { + async.forEach(indexes, function(index, cb) { + esClient.indices.exists({ + index: index + }, function(err, exists) { + if (exists) { + esClient.indices.delete({ + index: index + }, cb); + } else { + cb(); + } + }); + }, done); +} + +function createModelAndEnsureIndex(Model, obj, cb) { + var dude = new Model(obj); + dude.save(function() { + dude.on('es-indexed', function() { setTimeout(cb, INDEXING_TIMEOUT); }); }); } + +function createModelAndSave(Model, obj, cb) { + var dude = new Model(obj); + dude.save(cb); +} + +function saveAndWaitIndex(model, cb) { + model.save(function(err) { + if (err) cb(err); + else model.on('es-indexed', cb); + }); +} + +function bookTitlesArray() { + var books = [ + 'American Gods', + 'Gods of the Old World', + 'American Gothic' + ], idx; + for (idx = 0; idx < 50; idx++) { + books.push('ABABABA' + idx); + } + return books; +} + +module.exports = { + mongoUrl: 'mongodb://localhost/es-test', + INDEXING_TIMEOUT: INDEXING_TIMEOUT, + BULK_ACTION_TIMEOUT: BULK_ACTION_TIMEOUT, + deleteIndexIfExists: deleteIndexIfExists, + createModelAndEnsureIndex: createModelAndEnsureIndex, + createModelAndSave: createModelAndSave, + saveAndWaitIndex: saveAndWaitIndex, + bookTitlesArray: bookTitlesArray, + getClient: function() { + return esClient; + }, + close: function() { + esClient.close(); + } +}; + diff --git a/test/connection-test.js b/test/connection-test.js new file mode 100644 index 00000000..a2efa74a --- /dev/null +++ b/test/connection-test.js @@ -0,0 +1,131 @@ +var mongoose = require('mongoose'), + async = require('async'), + elasticsearch = require('elasticsearch'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); + +var DummySchema = new Schema({ + text: String +}); + +var Dummy = mongoose.model('Dummy1', DummySchema, 'dummys'); + +function tryDummySearch(model, cb) { + setTimeout(function() { + model.search({ + simple_query_string: { + query: 'Text1' + } + }, { + index: '_all' + }, function(err, results) { + if (err) { + return cb(err); + } + + results.hits.total.should.eql(0); + model.esClient.close(); + cb(err); + }); + }, config.INDEXING_TIMEOUT); + +} + +describe('Elasticsearch Connection', function() { + + before(function(done) { + + mongoose.connect(config.mongoUrl, function() { + Dummy.remove(function() { + config.deleteIndexIfExists(['dummys'], function() { + var dummies = [ + new Dummy({ + text: 'Text1' + }), + new Dummy({ + text: 'Text2' + }) + ]; + async.forEach(dummies, function(item, cb) { + item.save(cb); + }, function() { + setTimeout(done, config.INDEXING_TIMEOUT); + }); + }); + }); + }); + }); + + after(function(done) { + Dummy.remove(); + mongoose.disconnect(); + done(); + }); + + it('should be able to connect with default options', function(done) { + var Dummy2; + + DummySchema.plugin(mongoosastic); + Dummy2= mongoose.model('Dummy2', DummySchema, 'dummys'); + + tryDummySearch(Dummy2, done); + + }); + + it('should be able to connect with explicit options', function(done) { + var Dummy3; + + DummySchema.plugin(mongoosastic, { + host: 'localhost', + port: 9200 + }); + + Dummy3 = mongoose.model('Dummy3', DummySchema, 'dummys'); + + tryDummySearch(Dummy3, done); + + }); + + it('should be able to connect with an array of hosts', function(done) { + var Dummy4; + + DummySchema.plugin(mongoosastic, { + hosts: [ + 'localhost:9200', + 'localhost:9200' + ] + }); + + Dummy4 = mongoose.model('Dummy4', DummySchema, 'dummys'); + + tryDummySearch(Dummy4, done); + + }); + + it('should be able to connect with an existing elasticsearch client', function(done) { + + var esClient = new elasticsearch.Client({host: 'localhost:9200'}); + + esClient.ping({ + requestTimeout: 1000 + }, function(err) { + var Dummy5; + + if (err) { + return done(err); + } + + DummySchema.plugin(mongoosastic, { + esClient: esClient + }); + + Dummy5 = mongoose.model('Dummy5', DummySchema, 'dummys'); + + tryDummySearch(Dummy5, done); + }); + + }); + +}); + diff --git a/test/count-test.js b/test/count-test.js new file mode 100644 index 00000000..4ca983ff --- /dev/null +++ b/test/count-test.js @@ -0,0 +1,65 @@ +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + Comment, + mongoosastic = require('../lib/mongoosastic'); + +var CommentSchema = new Schema({ + user: String, + post_date: {type: Date, es_type: 'date'}, + message: {type: String}, + title: {type: String, es_boost: 2.0} +}); + + +CommentSchema.plugin(mongoosastic, { + bulk: { + size: 2, + delay: 100 + } +}); + +Comment = mongoose.model('Comment', CommentSchema); + +describe('Count', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Comment.remove(function() { + config.deleteIndexIfExists(['comments'], function() { + var comments = [ + new Comment({ + user: 'terry', + title: 'Ilikecars' + }), + new Comment({ + user: 'fred', + title: 'Ihatefish' + }) + ]; + async.forEach(comments, function(item, cb) { + item.save(cb); + }, function() { + setTimeout(done, config.INDEXING_TIMEOUT); + }); + }); + }); + }); + }); + + after(function() { + mongoose.disconnect(); + Comment.esClient.close(); + }); + + it('should count a type', function(done) { + Comment.esCount({ + term: { + user: 'terry' + } + }, function(err, results) { + results.count.should.eql(1); + done(err); + }); + }); +}); diff --git a/test/filtering-test.js b/test/filtering-test.js new file mode 100644 index 00000000..ffd9f1e9 --- /dev/null +++ b/test/filtering-test.js @@ -0,0 +1,59 @@ +var mongoose = require('mongoose'), + config = require('./config'), + Schema = mongoose.Schema, + Movie, + mongoosastic = require('../lib/mongoosastic'); + +// -- Only index specific field +var MovieSchema = new Schema({ + title: {type: String, required: true, default: '', es_indexed: true}, + genre: {type: String, required: true, default: '', enum: ['horror', 'action', 'adventure', 'other'], es_indexed: true} +}); + + +MovieSchema.plugin(mongoosastic, { + filter: function(self) { + return self.genre === 'action'; + } +}); + +Movie = mongoose.model('Movie', MovieSchema); + +describe('Filter mode', function() { + this.timeout(5000); + + before(function(done) { + config.deleteIndexIfExists(['movies'], function() { + mongoose.connect(config.mongoUrl, function() { + var client = mongoose.connections[0].db; + client.collection('movies', function() { + Movie.remove(done); + }); + }); + }); + }); + + after(function(done) { + mongoose.disconnect(); + Movie.esClient.close(); + done(); + }); + + it('should index horror genre', function(done) { + config.createModelAndEnsureIndex(Movie, {title: 'LOTR', genre: 'horror'}, function() { + Movie.search({term: {genre: 'horror'}}, function(err, results) { + results.hits.total.should.eql(1); + done(); + }); + }); + }); + + it('should not index action genre', function(done) { + config.createModelAndSave(Movie, {title: 'Man in Black', genre: 'action'}, function() { + Movie.search({term: {genre: 'action'}}, function(err, results) { + results.hits.total.should.eql(0); + done(); + }); + }); + }); +}); diff --git a/test/geo-test.js b/test/geo-test.js new file mode 100644 index 00000000..1cbe380a --- /dev/null +++ b/test/geo-test.js @@ -0,0 +1,193 @@ +var mongoose = require('mongoose'), + elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client(), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); + +var GeoSchema; +var GeoModel; + +describe('GeoTest', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + config.deleteIndexIfExists(['geodocs'], function() { + + GeoSchema = new Schema({ + myId: Number, + frame: { + coordinates: [], + type: {type: String}, + geo_shape: { + type: String, + es_type: 'geo_shape', + es_tree: 'quadtree', + es_precision: '1km' + } + } + }); + + GeoSchema.plugin(mongoosastic); + GeoModel = mongoose.model('geodoc', GeoSchema); + + GeoModel.createMapping(function() { + GeoModel.remove(function() { + + esClient.indices.getMapping({ + index: 'geodocs', + type: 'geodoc' + }, function(err, mapping) { + (mapping.geodoc !== undefined ? + mapping.geodoc : /* ES 0.9.11 */ + mapping.geodocs.mappings.geodoc /* ES 1.0.0 */ + ).properties.frame.type.should.eql('geo_shape'); + done(); + }); + }); + }); + + }); + }); + }); + + after(function(done) { + GeoModel.esClient.close(); + mongoose.disconnect(); + esClient.close(); + done(); + }); + + it('should be able to create and store geo coordinates', function(done) { + + var geo = new GeoModel({ + myId: 1, + frame: { + type: 'envelope', + coordinates: [[1, 4], [3, 2]] + } + }); + + var geo2 = new GeoModel({ + myId: 2, + frame: { + type: 'envelope', + coordinates: [[2, 3], [4, 0]] + } + }); + + config.saveAndWaitIndex(geo, function(err) { + if (err) { + throw err; + } + + config.saveAndWaitIndex(geo2, function(err2) { + if (err2) { + throw err2; + } + + // Mongodb request + GeoModel.find({}, function(err3, res) { + if (err3) throw err3; + res.length.should.eql(2); + res[0].frame.type.should.eql('envelope'); + res[0].frame.coordinates[0].should.eql([1, 4]); + res[0].frame.coordinates[1].should.eql([3, 2]); + done(); + }); + }); + }); + + }); + + it('should be able to find geo coordinates in the indexes', function(done) { + setTimeout(function() { + // ES request + GeoModel.search({ + match_all: {} + }, {sort: 'myId:asc'}, function(err, res) { + if (err) throw err; + res.hits.total.should.eql(2); + res.hits.hits[0]._source.frame.type.should.eql('envelope'); + res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); + done(); + }); + }, config.INDEXING_TIMEOUT); + }); + + it('should be able to resync geo coordinates from the database', function(done) { + config.deleteIndexIfExists(['geodocs'], function() { + GeoModel.createMapping(function() { + var stream = GeoModel.synchronize(), + count = 0; + + stream.on('data', function() { + count++; + }); + + stream.on('close', function() { + count.should.eql(2); + + setTimeout(function() { + GeoModel.search({ + match_all: {} + }, {sort: 'myId:asc'}, function(err, res) { + if (err) throw err; + res.hits.total.should.eql(2); + res.hits.hits[0]._source.frame.type.should.eql('envelope'); + res.hits.hits[0]._source.frame.coordinates.should.eql([[1, 4], [3, 2]]); + done(); + }); + }, config.INDEXING_TIMEOUT); + }); + }); + }); + }); + + it('should be able to search points inside frames', function(done) { + var geoQuery = { + filtered: { + query: { + match_all: {} + }, + filter: { + geo_shape: { + frame: { + shape: { + type: 'point', + coordinates: [3, 1] + } + } + } + } + } + }; + + setTimeout(function() { + GeoModel.search(geoQuery, function(err1, res1) { + if (err1) throw err1; + res1.hits.total.should.eql(1); + res1.hits.hits[0]._source.myId.should.eql(2); + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [1.5, 2.5]; + GeoModel.search(geoQuery, function(err2, res2) { + if (err2) throw err2; + res2.hits.total.should.eql(1); + res2.hits.hits[0]._source.myId.should.eql(1); + + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [3, 2]; + GeoModel.search(geoQuery, function(err3, res3) { + if (err3) throw err3; + res3.hits.total.should.eql(2); + + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [0, 3]; + GeoModel.search(geoQuery, function(err4, res4) { + if (err4) throw err4; + res4.hits.total.should.eql(0); + done(); + }); + }); + }); + }); + }, config.INDEXING_TIMEOUT); + }); + +}); diff --git a/test/highlight-features-test.js b/test/highlight-features-test.js new file mode 100644 index 00000000..3ca16963 --- /dev/null +++ b/test/highlight-features-test.js @@ -0,0 +1,125 @@ +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + Text, + mongoosastic = require('../lib/mongoosastic'); + +var TextSchema = new Schema({ + title: String, + quote: String +}); + +TextSchema.plugin(mongoosastic); + +Text = mongoose.model('Text', TextSchema); + +describe('Highlight search', function() { + var responses = [ + 'You don\'t see people at their best in this job, said Death.', + 'The death of the warrior or the old man or the little child, this I understand, and I take away the', + ' pain and end the suffering. I do not understand this death-of-the-mind', + 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' + ]; + + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Text.remove(function() { + config.deleteIndexIfExists(['texts'], function() { + + // Quotes are from Terry Pratchett's Discworld books + var texts = [ + new Text({ + title: 'The colour of magic', + quote: 'The only reason for walking into the jaws of Death is so\'s you can steal his gold teeth' + }), + new Text({ + title: 'The Light Fantastic', + quote: 'The death of the warrior or the old man or the little child, this I understand, and I take ' + + 'away the pain and end the suffering. I do not understand this death-of-the-mind' + }), + new Text({ + title: 'Equal Rites', + quote: 'Time passed, which, basically, is its job' + }), + new Text({ + title: 'Mort', + quote: 'You don\'t see people at their best in this job, said Death.' + }) + ]; + async.forEach(texts, config.saveAndWaitIndex, function() { + setTimeout(done, config.INDEXING_TIMEOUT); + }); + }); + }); + }); + }); + + after(function(done) { + Text.remove(); + Text.esClient.close(); + mongoose.disconnect(); + done(); + }); + + describe('Highlight without hydrating', function() { + it('should return highlighted text on every hit result', function(done) { + + Text.search({ + match_phrase: { + quote: 'Death' + } + }, { + highlight: { + fields: { + quote: {} + } + } + }, function(err, res) { + + res.hits.total.should.eql(3); + res.hits.hits.forEach(function(text) { + text.should.have.property('highlight'); + text.highlight.should.have.property('quote'); + text.highlight.quote.forEach(function(query) { + responses.should.containEql(query); + }); + }); + + done(); + }); + }); + + }); + + describe('Highlight hydrated results', function() { + it('should return highlighted text on every resulting document', function(done) { + + Text.search({ + match_phrase: { + quote: 'Death' + } + }, { + hydrate: true, + highlight: { + fields: { + quote: {} + } + } + }, function(err, res) { + + res.hits.total.should.eql(3); + res.hits.hits.forEach(function(model) { + model.should.have.property('_highlight'); + model._highlight.should.have.property('quote'); + model._highlight.quote.forEach(function(query) { + responses.should.containEql(query); + }); + }); + + done(); + }); + }); + + }); +}); diff --git a/test/index-test.js b/test/index-test.js index 711bb5f3..f0b2ecd7 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -1,210 +1,320 @@ -var mongoose = require('mongoose') - , elastical = require('elastical') - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , esClient = new(require('elastical').Client) - , mongoosastic = require('../lib/mongoosastic') - , Tweet = require('./models/tweet'); +var mongoose = require('mongoose'), + should = require('should'), + elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client(), + config = require('./config'), + Schema = mongoose.Schema, + Person, Talk, Bum, + mongoosastic = require('../lib/mongoosastic'), + Tweet = require('./models/tweet'); // -- Only index specific field var TalkSchema = new Schema({ - speaker: String - , year: {type: Number, es_indexed:true} - , title: {type:String, es_indexed:true} - , abstract: {type:String, es_indexed:true} - , bio: String + speaker: String, + year: {type: Number, es_indexed: true}, + title: {type: String, es_indexed: true}, + abstract: {type: String, es_indexed: true}, + bio: String }); -TalkSchema.plugin(mongoosastic) -var Talk = mongoose.model("Talk", TalkSchema); +var BumSchema = new Schema({ + name: String +}); var PersonSchema = new Schema({ - name: {type:String, es_indexed:true} - , phone: {type:String, es_indexed:true} - , address: String - , life: { - born: {type: Number, es_indexed:true} - , died: {type: Number, es_indexed:true} + name: {type: String, es_indexed: true}, + phone: {type: String, es_indexed: true}, + address: String, + life: { + born: {type: Number, es_indexed: true}, + died: {type: Number, es_indexed: true} } }); + +TalkSchema.plugin(mongoosastic); + PersonSchema.plugin(mongoosastic, { - index:'people' -, type: 'dude' -, hydrate: true -, hydrateOptions: {lean: true, sort: '-name', select: 'address name life'} + index: 'people', + type: 'dude', + hydrate: true, + hydrateOptions: {lean: true, sort: '-name', select: 'address name life'} +}); + +BumSchema.plugin(mongoosastic, { + index: 'ms_sample', + type: 'bum' }); -var Person = mongoose.model("Person", PersonSchema); +Person = mongoose.model('Person', PersonSchema); +Talk = mongoose.model('Talk', TalkSchema); +Bum = mongoose.model('bum', BumSchema); + // -- alright let's test this shiznit! -describe('indexing', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - Tweet.remove(function(){ - config.deleteIndexIfExists(['tweets', 'talks', 'people', 'public_tweets'], done) +describe('indexing', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Tweet.remove(function() { + config.deleteIndexIfExists(['tweets', 'talks', 'people', 'public_tweets'], done); }); }); }); - after(function(done){ - Tweet.remove(function(){ - mongoose.disconnect(); - done(); - }); + after(function(done) { + mongoose.disconnect(); + Talk.esClient.close(); + Person.esClient.close(); + Bum.esClient.close(); + esClient.close(); + config.deleteIndexIfExists(['tweets', 'talks', 'people'], done); + }); - describe('Creating Index', function(){ - it('should create index if none exists', function(done){ - Tweet.createMapping(function(err, response){ + describe('Creating Index', function() { + it('should create index if none exists', function(done) { + Tweet.createMapping(function(err, response) { + should.exists(response); response.should.not.have.property('error'); done(); }); }); - it('should create index with settings if none exists', function(done){ - Tweet.createMapping({analysis: { - analyzer: { - stem: { - tokenizer: "standard", - filter: ["standard", "lowercase", "stop", "porter_stem"] + + it('should create index with settings if none exists', function(done) { + Tweet.createMapping({ + analysis: { + analyzer: { + stem: { + tokenizer: 'standard', + filter: ['standard', 'lowercase', 'stop', 'porter_stem'] + } } } - } - },function(err, response){ + }, function(err, response) { + should.exists(response); response.should.not.have.property('error'); done(); }); - }); - it('should update index if one already exists', function(done){ - Tweet.createMapping(function(err, response){ + }); + + it('should update index if one already exists', function(done) { + Tweet.createMapping(function(err, response) { response.should.not.have.property('error'); done(); }); }); - after(function(done){ - config.deleteIndexIfExists(['tweets', 'talks', 'people'], done) + after(function(done) { + config.deleteIndexIfExists(['tweets', 'talks', 'people'], done); }); + }); - describe('Default plugin', function(){ - before(function(done){ + describe('Default plugin', function() { + before(function(done) { config.createModelAndEnsureIndex(Tweet, { - user: 'jamescarr' - , userId: 1 - , message: "I like Riak better" - , post_date: new Date() + user: 'jamescarr', + userId: 1, + message: 'I like Riak better', + post_date: new Date() }, done); }); - it("should use the model's id as ES id", function(done){ - Tweet.findOne({message:"I like Riak better"}, function(err, doc){ - esClient.get('tweets', doc._id.toString(), function(err, res){ - res.message.should.eql(doc.message); - done() + + it('should use the model\'s id as ES id', function(done) { + Tweet.findOne({message: 'I like Riak better'}, function(err, doc) { + esClient.get({ + index: 'tweets', + type: 'tweet', + id: doc._id.toString() + }, function(_err, res) { + res._source.message.should.eql(doc.message); + done(); }); }); }); - it('should be able to execute a simple query', function(done){ - Tweet.search({query:'Riak'}, function(err, results) { - results.hits.total.should.eql(1) - results.hits.hits[0]._source.message.should.eql('I like Riak better') + it('should be able to execute a simple query', function(done) { + Tweet.search({ + query_string: { + query: 'Riak' + } + }, function(err, results) { + results.hits.total.should.eql(1); + results.hits.hits[0]._source.message.should.eql('I like Riak better'); done(); }); }); - it('should be able to execute a simple query', function(done){ - Tweet.search({query:'jamescarr'}, function(err, results) { - results.hits.total.should.eql(1) - results.hits.hits[0]._source.message.should.eql('I like Riak better') - done() + + it('should be able to execute a simple query', function(done) { + Tweet.search({ + query_string: { + query: 'jamescarr' + } + }, function(err, results) { + results.hits.total.should.eql(1); + results.hits.hits[0]._source.message.should.eql('I like Riak better'); + done(); }); }); - it('should report errors', function(done){ - Tweet.search({queriez:'jamescarr'}, function(err, results) { + + it('should reindex when findOneAndUpdate', function(done) { + Tweet.findOneAndUpdate({ + message: 'I like Riak better' + }, { + message: 'I like Jack better' + }, { + new: true + }, function() { + setTimeout(function() { + Tweet.search({ + query_string: { + query: 'Jack' + } + }, function(err, results) { + results.hits.total.should.eql(1); + results.hits.hits[0]._source.message.should.eql('I like Jack better'); + done(); + }); + }, config.INDEXING_TIMEOUT); + }); + }); + + it('should be able to execute findOneAndUpdate if document doesn\'t exist', function(done) { + Tweet.findOneAndUpdate({ + message: 'Not existing document' + }, { + message: 'I like Jack better' + }, { + new: true + }, function(err, doc) { + should.not.exist(err); + should.not.exist(doc); + done(); + }); + }); + + it('should report errors', function(done) { + Tweet.search({queriez: 'jamescarr'}, function(err, results) { err.message.should.match(/SearchPhaseExecutionException/); - should.not.exist(results) - done() + should.not.exist(results); + done(); }); }); }); - describe('Removing', function(){ + + describe('Removing', function() { var tweet = null; - beforeEach(function(done){ + beforeEach(function(done) { tweet = new Tweet({ - user:'jamescarr' - , message: 'Saying something I shouldnt' + user: 'jamescarr', + message: 'Saying something I shouldnt' }); config.createModelAndEnsureIndex(Tweet, tweet, done); }); - it('should remove from index when model is removed', function(done){ - tweet.remove(function(){ - setTimeout(function(){ - Tweet.search({query:'shouldnt'}, function(err, res){ - res.hits.total.should.eql(0); - done(); - }); - }, config.indexingTimeout); + + it('should remove from index when model is removed', function(done) { + tweet.remove(function() { + setTimeout(function() { + Tweet.search({ + query_string: { + query: 'shouldnt' + } + }, function(err, res) { + res.hits.total.should.eql(0); + done(); + }); + }, config.INDEXING_TIMEOUT); }); }); - it('should remove only index', function(done){ - tweet.on('es-removed', function(err, res){ - setTimeout(function(){ - Tweet.search({query:'shouldnt'}, function(err, res){ + + it('should remove only index', function(done) { + tweet.on('es-removed', function() { + setTimeout(function() { + Tweet.search({ + query_string: { + query: 'shouldnt' + } + }, function(err, res) { res.hits.total.should.eql(0); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); - tweet.unIndex() + + tweet.unIndex(); }); - it('should queue for later removal if not in index', function(done){ + it('should queue for later removal if not in index', function(done) { // behavior here is to try 3 times and then give up. - var tweet = new Tweet({ - user:'jamescarr' - , message: 'ABBA' + var nTweet = new Tweet({ + user: 'jamescarr', + message: 'ABBA' + }); + + nTweet.save(function() { + setTimeout(function() { + nTweet.remove(); + nTweet.on('es-removed', done); + }, 200); }); + }); - tweet.save(function(){ - tweet.remove(); + it('should remove from index when findOneAndRemove', function(done) { + tweet = new Tweet({ + user: 'jamescarr', + message: 'findOneAndRemove' + }); + + config.createModelAndEnsureIndex(Tweet, tweet, function() { + Tweet.findByIdAndRemove(tweet._id, function() { + setTimeout(function() { + Tweet.search({ + query_string: { + query: 'findOneAndRemove' + } + }, function(err, res) { + res.hits.total.should.eql(0); + done(); + }); + }, config.INDEXING_TIMEOUT); + }); }); - tweet.on('es-removed', done); }); }); - describe('Isolated Models', function(){ - before(function(done){ + + describe('Isolated Models', function() { + before(function(done) { var talk = new Talk({ - speaker: '' - , year: 2013 - , title: "Dude" - , abstract: "" - , bio: '' + speaker: '', + year: 2013, + title: 'Dude', + abstract: '', + bio: '' }); var tweet = new Tweet({ - user: 'Dude' - , message: "Go see the big lebowski" - , post_date: new Date() + user: 'Dude', + message: 'Go see the big lebowski', + post_date: new Date() }); - tweet.save(function(){ - talk.save(function(){ - talk.on('es-indexed', function(err, res){ - setTimeout(done, config.indexingTimeout); + tweet.save(function() { + talk.save(function() { + talk.on('es-indexed', function() { + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); }); - it('should only find models of type Tweet', function(done){ - Tweet.search({query:'Dude'}, function(err, res){ + it('should only find models of type Tweet', function(done) { + Tweet.search({query_string: {query: 'Dude'}}, function(err, res) { res.hits.total.should.eql(1); res.hits.hits[0]._source.user.should.eql('Dude'); done(); }); }); - it('should only find models of type Talk', function(done){ - Talk.search({query:'Dude'}, function(err, res){ + + it('should only find models of type Talk', function(done) { + Talk.search({query_string: {query: 'Dude'}}, function(err, res) { res.hits.total.should.eql(1); res.hits.hits[0]._source.title.should.eql('Dude'); done(); @@ -212,41 +322,42 @@ describe('indexing', function(){ }); }); - describe('Always hydrate', function(){ - before(function(done){ + describe('Always hydrate', function() { + before(function(done) { config.createModelAndEnsureIndex(Person, { - name: 'James Carr' - , address: "Exampleville, MO" - , phone: '(555)555-5555' + name: 'James Carr', + address: 'Exampleville, MO', + phone: '(555)555-5555' }, done); }); - it('when gathering search results while respecting default hydrate options', function(done){ - Person.search({query:'James'}, function(err, res) { - res.hits[0].address.should.eql('Exampleville, MO'); - res.hits[0].name.should.eql('James Carr'); - res.hits[0].should.not.have.property('phone'); - res.hits[0].should.not.be.an.instanceof(Person); + it('when gathering search results while respecting default hydrate options', function(done) { + Person.search({query_string: {query: 'James'}}, function(err, res) { + res.hits.hits[0].address.should.eql('Exampleville, MO'); + res.hits.hits[0].name.should.eql('James Carr'); + res.hits.hits[0].should.not.have.property('phone'); + res.hits.hits[0].should.not.be.an.instanceof(Person); done(); }); }); }); - describe('Subset of Fields', function(){ - before(function(done){ - config.createModelAndEnsureIndex(Talk,{ - speaker: 'James Carr' - , year: 2013 - , title: "Node.js Rocks" - , abstract: "I told you node.js was cool. Listen to me!" - , bio: 'One awesome dude.' + + describe('Subset of Fields', function() { + before(function(done) { + config.createModelAndEnsureIndex(Talk, { + speaker: 'James Carr', + year: 2013, + title: 'Node.js Rocks', + abstract: 'I told you node.js was cool. Listen to me!', + bio: 'One awesome dude.' }, done); }); - it('should only return indexed fields', function(done){ - Talk.search({query:'cool'}, function(err, res) { - res.hits.total.should.eql(1); - + it('should only return indexed fields', function(done) { + Talk.search({query_string: {query: 'cool'}}, function(err, res) { var talk = res.hits.hits[0]._source; + + res.hits.total.should.eql(1); talk.should.have.property('title'); talk.should.have.property('year'); talk.should.have.property('abstract'); @@ -256,56 +367,56 @@ describe('indexing', function(){ }); }); - it('should hydrate returned documents if desired', function(done){ - Talk.search({query:'cool'}, {hydrate:true}, function(err, res) { - res.total.should.eql(1) + it('should hydrate returned documents if desired', function(done) { + Talk.search({query_string: {query: 'cool'}}, {hydrate: true}, function(err, res) { + var talk = res.hits.hits[0]; - var talk = res.hits[0] - talk.should.have.property('title') + res.hits.total.should.eql(1); + talk.should.have.property('title'); talk.should.have.property('year'); - talk.should.have.property('abstract') - talk.should.have.property('speaker') - talk.should.have.property('bio') + talk.should.have.property('abstract'); + talk.should.have.property('speaker'); + talk.should.have.property('bio'); talk.should.be.an.instanceof(Talk); done(); }); }); - describe('Sub-object Fields', function(){ - before(function(done){ + describe('Sub-object Fields', function() { + before(function(done) { config.createModelAndEnsureIndex(Person, { - name: 'Bob Carr' - , address: "Exampleville, MO" - , phone: '(555)555-5555' - , life: { born: 1950, other: 2000 } + name: 'Bob Carr', + address: 'Exampleville, MO', + phone: '(555)555-5555', + life: {born: 1950, other: 2000} }, done); }); - it('should only return indexed fields and have indexed sub-objects', function(done){ - Person.search({query:'Bob'}, function(err, res) { - res.hits[0].address.should.eql('Exampleville, MO'); - res.hits[0].name.should.eql('Bob Carr'); - res.hits[0].should.have.property('life'); - res.hits[0].life.born.should.eql(1950); - res.hits[0].life.should.not.have.property('died'); - res.hits[0].life.should.not.have.property('other'); - res.hits[0].should.not.have.property('phone'); - res.hits[0].should.not.be.an.instanceof(Person); + it('should only return indexed fields and have indexed sub-objects', function(done) { + Person.search({query_string: {query: 'Bob'}}, function(err, res) { + res.hits.hits[0].address.should.eql('Exampleville, MO'); + res.hits.hits[0].name.should.eql('Bob Carr'); + res.hits.hits[0].should.have.property('life'); + res.hits.hits[0].life.born.should.eql(1950); + res.hits.hits[0].life.should.not.have.property('died'); + res.hits.hits[0].life.should.not.have.property('other'); + res.hits.hits[0].should.not.have.property('phone'); + res.hits.hits[0].should.not.be.an.instanceof(Person); done(); }); }); }); - it('should allow extra query options when hydrating', function(done){ - Talk.search({query:'cool'}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { - res.total.should.eql(1) + it('should allow extra query options when hydrating', function(done) { + Talk.search({query_string: {query: 'cool'}}, {hydrate: true, hydrateOptions: {lean: true}}, function(err, res) { + var talk = res.hits.hits[0]; - var talk = res.hits[0] - talk.should.have.property('title') + res.hits.total.should.eql(1); + talk.should.have.property('title'); talk.should.have.property('year'); - talk.should.have.property('abstract') - talk.should.have.property('speaker') - talk.should.have.property('bio') + talk.should.have.property('abstract'); + talk.should.have.property('speaker'); + talk.should.have.property('bio'); talk.should.not.be.an.instanceof(Talk); done(); }); @@ -313,30 +424,28 @@ describe('indexing', function(){ }); - describe('Existing Index', function(){ - before(function(done){ - config.deleteIndexIfExists(['ms_sample'], function(){ - esClient.createIndex('ms_sample', {mappings:{ - bum:{ - properties: { - name: {type:'string'} + describe('Existing Index', function() { + before(function(done) { + config.deleteIndexIfExists(['ms_sample'], function() { + esClient.indices.create({ + index: 'ms_sample', + body: { + mappings: { + bum: { + properties: { + name: {type: 'string'} + } + } } } - }}, done); + }, done); }); }); - it('should just work', function(done){ - var BumSchema = new Schema({ - name: String - }); - BumSchema.plugin(mongoosastic, { - index: 'ms_sample' - , type: 'bum' - }); - var Bum = mongoose.model('bum', BumSchema); - config.createModelAndEnsureIndex(Bum, {name:'Roger Wilson'}, function(){ - Bum.search({query:'Wilson'}, function(err, results){ + it('should just work', function(done) { + + config.createModelAndEnsureIndex(Bum, {name: 'Roger Wilson'}, function() { + Bum.search({query_string: {query: 'Wilson'}}, function(err, results) { results.hits.total.should.eql(1); done(); }); @@ -345,5 +454,3 @@ describe('indexing', function(){ }); }); - - diff --git a/test/mapping-generator-test.js b/test/mapping-generator-test.js index 0d51207c..7cf400b5 100644 --- a/test/mapping-generator-test.js +++ b/test/mapping-generator-test.js @@ -1,108 +1,114 @@ -var Generator = require('../lib/mapping-generator') - , mongoose = require('mongoose') - , should = require('should') - , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , generator = new Generator(); +var mongoose = require('mongoose'), + should = require('should'), + Schema = mongoose.Schema, + Generator = require('../lib/mapping-generator'), + generator = new Generator(); -describe('MappingGenerator', function(){ +describe('MappingGenerator', function() { - describe('type mapping', function(){ - it('maps field with simple String type', function(done){ + describe('type mapping', function() { + it('maps field with simple String type', function(done) { generator.generateMapping(new Schema({ name: String - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.type.should.eql('string'); done(); }); }); - it('maps field with String type attribute', function(done){ + it('maps field with String type attribute', function(done) { generator.generateMapping(new Schema({ - name: {type:String} - }), function(err, mapping){ + name: {type: String} + }), function(err, mapping) { mapping.properties.name.type.should.eql('string'); done(); }); }); - it('converts Date type to date', function(done){ + + it('converts Date type to date', function(done) { generator.generateMapping(new Schema({ - graduationDate: {type:Date, es_format: 'YYYY-MM-dd'} - }), function(err, mapping){ + graduationDate: {type: Date, es_format: 'YYYY-MM-dd'} + }), function(err, mapping) { mapping.properties.graduationDate.type.should.eql('date'); done(); }); }); - it('removes _id field without prefix', function(done){ + + it('removes _id field without prefix', function(done) { generator.generateMapping(new Schema({ _id: {type: Schema.Types.ObjectId}, user: { _id: {type: Schema.Types.ObjectId}, name: {type: String} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.should.not.have.property('_id'); done(); }); }); - it('does not remove _id field with prefix', function(done){ + + it('does not remove _id field with prefix', function(done) { generator.generateMapping(new Schema({ _id: {type: Schema.Types.ObjectId}, user: { _id: {type: Schema.Types.ObjectId}, name: {type: String} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.user.properties.should.have.property('_id'); done(); }); }); - it('converts object id to string if not _id', function(done){ + + it('converts object id to string if not _id', function(done) { generator.generateMapping(new Schema({ - oid: {type:Schema.Types.ObjectId} - }), function(err, mapping){ + oid: {type: Schema.Types.ObjectId} + }), function(err, mapping) { mapping.properties.oid.type.should.eql('string'); done(); }); }); - it('recognizes an object and maps it as one', function(done){ + + it('recognizes an object and maps it as one', function(done) { generator.generateMapping(new Schema({ contact: { - email: {type: String}, - telephone: {type: String} + email: {type: String}, + telephone: {type: String} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.contact.properties.email.type.should.eql('string'); mapping.properties.contact.properties.telephone.type.should.eql('string'); done(); }); }); - it('recognizes an object and handles explict es_indexed', function(done){ + + it('recognizes an object and handles explict es_indexed', function(done) { generator.generateMapping(new Schema({ name: {type: String, es_indexed: true}, contact: { - email: {type: String, es_indexed: true}, - telephone: {type: String} + email: {type: String, es_indexed: true}, + telephone: {type: String} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.type.should.eql('string'); mapping.properties.contact.properties.email.type.should.eql('string'); mapping.properties.contact.properties.should.not.have.property('telephone'); done(); }); }); - it('recognizes an multi_field and maps it as one', function(done){ + + it('recognizes an multi_field and maps it as one', function(done) { generator.generateMapping(new Schema({ test: { type: String, es_include_in_all: false, es_type: 'multi_field', es_fields: { - test: { type: 'string', index: 'analyzed' }, - untouched: { type: 'string', index: 'not_analyzed' } + test: {type: 'string', index: 'analyzed'}, + untouched: {type: 'string', index: 'not_analyzed'} } } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.test.type.should.eql('multi_field'); mapping.properties.test.fields.test.type.should.eql('string'); mapping.properties.test.fields.test.index.should.eql('analyzed'); @@ -111,18 +117,20 @@ describe('MappingGenerator', function(){ done(); }); }); - it('recognizes an geo_point and maps it as one', function(done){ + + it('recognizes an geo_point and maps it as one', function(done) { generator.generateMapping(new Schema({ geo: { type: String, es_type: 'geo_point' } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.geo.type.should.eql('geo_point'); done(); }); }); - it('recognizes an geo_point with independent lat lon fields and maps it as one', function(done){ + + it('recognizes an geo_point with independent lat lon fields and maps it as one', function(done) { generator.generateMapping(new Schema({ geo_with_lat_lon: { geo_point: { @@ -130,43 +138,100 @@ describe('MappingGenerator', function(){ es_type: 'geo_point', es_lat_lon: true }, - lat: { type: Number }, - lon: { type: Number } + lat: {type: Number}, + lon: {type: Number} } - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.geo_with_lat_lon.type.should.eql('geo_point'); mapping.properties.geo_with_lat_lon.lat_lon.should.eql(true); done(); }); }); - it('recognizes an nested schema and maps it', function(done){ + + it('recognizes an nested schema and maps it', function(done) { var NameSchema = new Schema({ first_name: {type: String}, last_name: {type: String} }); generator.generateMapping(new Schema({ name: [NameSchema] - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.type.should.eql('object'); mapping.properties.name.properties.first_name.type.should.eql('string'); mapping.properties.name.properties.last_name.type.should.eql('string'); done(); }); }); - it('excludes a virtual property from mapping', function(done){ + + it('recognizes an es_type of nested with es_fields and maps it', function(done) { + var NameSchema = new Schema({ + first_name: {type: String, es_index: 'not_analyzed'}, + last_name: {type: String, es_index: 'not_analyzed'} + }); + generator.generateMapping(new Schema({ + name: {type: [NameSchema], es_indexed: true, es_type: 'nested', es_include_in_parent: true} + }), function(err, mapping) { + mapping.properties.name.type.should.eql('nested'); + mapping.properties.name.include_in_parent.should.eql(true); + mapping.properties.name.properties.first_name.type.should.eql('string'); + mapping.properties.name.properties.first_name.index.should.eql('not_analyzed'); + mapping.properties.name.properties.last_name.type.should.eql('string'); + mapping.properties.name.properties.last_name.index.should.eql('not_analyzed'); + should.not.exist(mapping.properties.name.properties.es_include_in_parent); + should.not.exist(mapping.properties.name.properties.es_type); + done(); + }); + }); + + it('recognizes a nested array with a simple type and maps it as a simple attribute', function(done) { + generator.generateMapping(new Schema({ + contacts: [String] + }), function(err, mapping) { + mapping.properties.contacts.type.should.eql('string'); + done(); + }); + }); + + it('recognizes a nested array with a simple type and additional attributes and maps it as a simple attribute', function(done) { + generator.generateMapping(new Schema({ + contacts: [{type: String, es_index: 'not_analyzed'}] + }), function(err, mapping) { + mapping.properties.contacts.type.should.eql('string'); + mapping.properties.contacts.index.should.eql('not_analyzed'); + done(); + }); + }); + + it('recognizes a nested array with a complex object and maps it', function(done) { + generator.generateMapping(new Schema({ + name: String, + contacts: [{ + email: {type: String, es_index: 'not_analyzed'}, + telephone: String + }] + }), function(err, mapping) { + mapping.properties.name.type.should.eql('string'); + mapping.properties.contacts.properties.email.type.should.eql('string'); + mapping.properties.contacts.properties.email.index.should.eql('not_analyzed'); + mapping.properties.contacts.properties.telephone.type.should.eql('string'); + done(); + }); + }); + + it('excludes a virtual property from mapping', function(done) { var PersonSchema = new Schema({ first_name: {type: String}, last_name: {type: String}, age: {type: Number} }); - PersonSchema.virtual('birthYear').set(function (year) { + PersonSchema.virtual('birthYear').set(function(year) { this.age = new Date().getFullYear() - year; - }) + }); generator.generateMapping(new Schema({ name: [PersonSchema] - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.name.properties.first_name.type.should.eql('string'); mapping.properties.name.properties.last_name.type.should.eql('string'); mapping.properties.name.properties.age.type.should.eql('double'); @@ -176,30 +241,32 @@ describe('MappingGenerator', function(){ }); }); - describe('elastic search fields', function(){ - it('type can be overridden', function(done){ + describe('elastic search fields', function() { + it('type can be overridden', function(done) { generator.generateMapping(new Schema({ - name: {type:String, es_type:'date'} - }), function(err, mapping){ + name: {type: String, es_type: 'date'} + }), function(err, mapping) { mapping.properties.name.type.should.eql('date'); done(); }); }); - it('adds the boost field', function(done){ + + it('adds the boost field', function(done) { generator.generateMapping(new Schema({ - name: {type:String, es_boost:2.2} - }), function(err, mapping){ + name: {type: String, es_boost: 2.2} + }), function(err, mapping) { mapping.properties.name.boost.should.eql(2.2); done(); }); }); - it('respects schemas with explicit es_indexes', function(done){ + + it('respects schemas with explicit es_indexes', function(done) { generator.generateMapping(new Schema({ implicit_field_1: {type: String}, explicit_field_1: {type: Number, es_indexed: true}, implicit_field_2: {type: Number}, explicit_field_2: {type: String, es_indexed: true} - }), function(err, mapping){ + }), function(err, mapping) { mapping.properties.should.have.property('explicit_field_1'); mapping.properties.should.have.property('explicit_field_2'); mapping.properties.should.not.have.property('implicit_field_1'); @@ -207,15 +274,17 @@ describe('MappingGenerator', function(){ done(); }); }); + it('maps all fields when schema has no es_indexed flag', function(done) { generator.generateMapping(new Schema({ implicit_field_1: {type: String}, - implicit_field_2: {type: Number}, - }), function(err, mapping){ + implicit_field_2: {type: Number} + }), function(err, mapping) { mapping.properties.should.have.property('implicit_field_1'); mapping.properties.should.have.property('implicit_field_2'); done(); }); }); + }); }); diff --git a/test/models/tweet.js b/test/models/tweet.js index f991c3ad..952aa500 100644 --- a/test/models/tweet.js +++ b/test/models/tweet.js @@ -1,15 +1,20 @@ -var mongoose = require('mongoose') - , Schema = mongoose.Schema - , mongoosastic = require('../../lib/mongoosastic'); +var mongoose = require('mongoose'), + Schema = mongoose.Schema, + config = require('../config'), + mongoosastic = require('../../lib/mongoosastic'); // -- simplest indexing... index all fields var TweetSchema = new Schema({ - user: String - , userId: Number - , post_date: Date - , message: String + user: String, + userId: Number, + post_date: Date, + message: String }); -TweetSchema.plugin(mongoosastic) +TweetSchema.plugin(mongoosastic, { + index: 'tweets', + type: 'tweet', + esClient: config.getClient() +}); module.exports = mongoose.model('Tweet', TweetSchema); diff --git a/test/search-features-test.js b/test/search-features-test.js index 6954b07f..b05af409 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -1,67 +1,159 @@ -var mongoose = require('mongoose') - , elastical = require('elastical') - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , async = require('async') - , mongoosastic = require('../lib/mongoosastic'); - -var esClient = new elastical.Client(); +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + Bond, + mongoosastic = require('../lib/mongoosastic'); + var BondSchema = new Schema({ - name: String - , type: {type:String, default:'Other Bond'} - , price: Number + name: String, + type: {type: String, default: 'Other Bond'}, + price: Number }); + BondSchema.plugin(mongoosastic); -var Bond = mongoose.model('Bond', BondSchema); +Bond = mongoose.model('Bond', BondSchema); -describe('Query DSL', function(){ - before(function(done){ - mongoose.connect(config.mongoUrl, function(){ - Bond.remove(function(){ - config.deleteIndexIfExists(['bonds'], function(){ +describe('Query DSL', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + Bond.remove(function() { + config.deleteIndexIfExists(['bonds'], function() { var bonds = [ - new Bond({name:'Bail', type:'A', price:10000}) - , new Bond({name:'Commercial', type:'B', price:15000}) - , new Bond({name:'Construction', type:'B', price:20000}) - , new Bond({name:'Legal', type:'C', price:30000}) + new Bond({name: 'Bail', type: 'A', price: 10000}), + new Bond({name: 'Commercial', type: 'B', price: 15000}), + new Bond({name: 'Construction', type: 'B', price: 20000}), + new Bond({name: 'Legal', type: 'C', price: 30000}) ]; - async.forEach(bonds, save, function(){ - setTimeout(done, config.indexingTimeout); + async.forEach(bonds, config.saveAndWaitIndex, function() { + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); }); }); - after(function(done){ - Bond.remove(done); + + after(function(done) { + Bond.remove(); + Bond.esClient.close(); + mongoose.disconnect(); + done(); }); - describe('range', function(){ - it('should be able to find within range', function(done){ + + describe('range', function() { + it('should be able to find within range', function(done) { Bond.search({ - query:{ - range: { - price:{ - from:20000 - , to: 30000 - } + range: { + price: { + from: 20000, + to: 30000 } } - }, function(err, res){ + }, function(err, res) { res.hits.total.should.eql(2); - res.hits.hits.forEach(function(bond){ + res.hits.hits.forEach(function(bond) { ['Legal', 'Construction'].should.containEql(bond._source.name); }); + done(); }); }); }); -}); -function save(model, cb){ - model.save(); - model.on('es-indexed', cb); -} + describe('Sort', function() { + + var getNames = function(res) { return res._source.name; }; + var expectedDesc = ['Legal', 'Construction', 'Commercial', 'Bail']; + var expectedAsc = expectedDesc.concat([]).reverse(); // clone and reverse + + describe('Simple sort', function() { + + it('should be able to return all data, sorted by name ascending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: 'name:asc' + }, function(err, res) { + res.hits.total.should.eql(4); + expectedAsc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + + it('should be able to return all data, sorted by name descending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: ['name:desc'] + }, function(err, res) { + res.hits.total.should.eql(4); + expectedDesc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + }); + + describe('Complex sort', function() { + + it('should be able to return all data, sorted by name ascending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: { + name: { order: 'asc' } + } + }, function(err, res) { + res.hits.total.should.eql(4); + expectedAsc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + + it('should be able to return all data, sorted by name descending', function(done) { + Bond.search({ + match_all: {} + }, { + sort: { + name: { order: 'desc' }, + type: { order: 'asc' } + } + }, function(err, res) { + res.hits.total.should.eql(4); + expectedDesc.should.eql(res.hits.hits.map(getNames)); + + done(); + }); + }); + }); + + }); + + describe('test', function() { + + it('should do a fuzzy query', function(done) { + var getNames = function(res) { return res._source.name; }; + + Bond.search({ + match: { + name: { + query: 'comersial', + fuzziness: 2 + } + } + }, function(err, res) { + + res.hits.total.should.eql(1); + ['Commercial'].should.eql(res.hits.hits.map(getNames)); + done(); + }); + + }); + + }); + +}); diff --git a/test/serialize-test.js b/test/serialize-test.js index 5eb0c3e5..0d6ea027 100644 --- a/test/serialize-test.js +++ b/test/serialize-test.js @@ -1,24 +1,22 @@ -var should = require('should') - , generator = new (require('../lib/mapping-generator')) - , serialize = require('../lib/serialize') - , mongoose = require('mongoose') - , Schema = mongoose.Schema - , ObjectId = Schema.Types.ObjectId; +var mongoose = require('mongoose'), + Generator = require('../lib/mapping-generator'), + generator = new Generator(), + serialize = require('../lib/serialize'), + Schema = mongoose.Schema; -var BowlingBall = mongoose.model('BowlingBall', new Schema({ - -})); +var BowlingBall = mongoose.model('BowlingBall', new Schema()); var PersonSchema22 = new Schema({ name: { - first: String - , last: String + first: String, + last: String }, dob: Date, - bowlingBall: {type:Schema.ObjectId, ref:'BowlingBall'}, - somethingToCast : { + bowlingBall: {type: Schema.ObjectId, ref: 'BowlingBall'}, + games: [{score: Number, date: Date}], + somethingToCast: { type: String, - es_cast: function(element){ - return element+' has been cast'; + es_cast: function(element) { + return element + ' has been cast'; } } }); @@ -32,34 +30,55 @@ generator.generateMapping(PersonSchema22, function(err, tmp) { mapping = tmp; }); -describe('serialize', function(){ +describe('serialize', function() { var dude = new Person({ - name: {first:'Jeffery', last:'Lebowski'}, + name: {first: 'Jeffrey', last: 'Lebowski'}, dob: new Date(Date.parse('05/17/1962')), bowlingBall: new BowlingBall(), + games: [{score: 80, date: new Date(Date.parse('05/17/1962'))}, { + score: 80, + date: new Date(Date.parse('06/17/1962')) + }], somethingToCast: 'Something' }); - describe('with no indexed fields', function(){ + + // another person with missing parts to test robustness + var millionnaire = new Person({ + name: {first: 'Jeffrey', last: 'Lebowski'} + }); + + it('should serialize a document with missing bits', function() { + var serialized = serialize(millionnaire, mapping); + serialized.should.have.property('games', []); + }); + + describe('with no indexed fields', function() { var serialized = serialize(dude, mapping); - it('should serialize model fields', function(){ - serialized.name.first.should.eql('Jeffery'); + it('should serialize model fields', function() { + serialized.name.first.should.eql('Jeffrey'); serialized.name.last.should.eql('Lebowski'); }); - it('should serialize object ids as strings', function(){ - serialized.bowlingBall.should.not.eql(dude.bowlingBall); - serialized.bowlingBall.should.be.type('string'); + + it('should serialize object ids as strings', function() { + serialized.bowlingBall.should.eql(dude.bowlingBall); + serialized.bowlingBall.should.be.type('object'); }); - it('should serialize dates in ISO 8601 format', function(){ - serialized.dob.should.eql(dude.dob.toJSON()) + it('should serialize dates in ISO 8601 format', function() { + serialized.dob.should.eql(dude.dob.toJSON()); }); - it('should cast and serialize field', function(){ - serialized.somethingToCast.should.eql('Something has been cast') - }); + it('should serialize nested arrays', function() { + serialized.games.should.have.lengthOf(2); + serialized.games[0].should.have.property('score', 80); + }); + + it('should cast and serialize field', function() { + serialized.somethingToCast.should.eql('Something has been cast'); + }); }); - describe('indexed fields', function(){ + describe('indexed fields', function() { }); }); diff --git a/test/suggesters-test.js b/test/suggesters-test.js new file mode 100644 index 00000000..96d9aa42 --- /dev/null +++ b/test/suggesters-test.js @@ -0,0 +1,97 @@ +var mongoose = require('mongoose'), + elasticsearch = require('elasticsearch'), + esClient = new elasticsearch.Client({ + deadTimeout: 0, + keepAlive: false + }), + async = require('async'), + config = require('./config'), + Schema = mongoose.Schema, + mongoosastic = require('../lib/mongoosastic'); + +var KittenSchema; +var Kitten; + +describe('Suggesters', function() { + before(function(done) { + mongoose.connect(config.mongoUrl, function() { + config.deleteIndexIfExists(['kittens'], function() { + KittenSchema = new Schema({ + name: {type: String, es_type: 'completion', es_index_analyzer: 'simple', es_search_analyzer: 'simple', es_indexed: true}, + breed: {type: String } + }); + KittenSchema.plugin(mongoosastic); + Kitten = mongoose.model('Kitten', KittenSchema); + Kitten.createMapping({}, function() { + Kitten.remove(function() { + var kittens = [ + new Kitten({ + name: 'Cookie', + breed: 'Aegean' + }), + new Kitten({ + name: 'Chipmunk', + breed: 'Aegean' + }), + new Kitten({ + name: 'Twix', + breed: 'Persian' + }), + new Kitten({ + name: 'Cookies and Cream', + breed: 'Persian' + }) + ]; + async.forEach(kittens, config.saveAndWaitIndex, function() { + setTimeout(done, config.INDEXING_TIMEOUT); + }); + }); + }); + }); + }); + }); + + after(function(done) { + Kitten.esClient.close(); + mongoose.disconnect(); + esClient.close(); + done(); + }); + + describe('Testing Suggest', function() { + it('should index property name with type completion', function(done) { + + Kitten = mongoose.model('Kitten', KittenSchema); + Kitten.createMapping(function() { + esClient.indices.getMapping({ + index: 'kittens', + type: 'kitten' + }, function(err, mapping) { + var props = mapping.kitten !== undefined ? /* elasticsearch 1.0 & 0.9 support */ + mapping.kitten.properties : /* ES 0.9.11 */ + mapping.kittens.mappings.kitten.properties; /* ES 1.0.0 */ + props.name.type.should.eql('completion'); + done(); + }); + }); + }); + it('should return suggestions after hits', function(done) { + Kitten.search({ + match_all: {} + }, { + suggest: { + kittensuggest: { + text: 'Cook', + completion: { + field: 'name' + } + } + } + }, function(err, res) { + res.should.have.property('suggest'); + res.suggest.kittensuggest[0].options.length.should.eql(2); + done(); + }); + }); + }); +}); diff --git a/test/synchronize-test.js b/test/synchronize-test.js index f7b42237..d55f8a11 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -1,69 +1,65 @@ -var mongoose = require('mongoose') - , elastical = require('elastical') - , esClient = new(require('elastical').Client) - , should = require('should') - , config = require('./config') - , Schema = mongoose.Schema - , ObjectId = Schema.ObjectId - , async = require('async') - , mongoosastic = require('../lib/mongoosastic'); +var mongoose = require('mongoose'), + async = require('async'), + config = require('./config'), + mongoosastic = require('../lib/mongoosastic'), + Book, + Schema = mongoose.Schema; var BookSchema = new Schema({ title: String }); + BookSchema.plugin(mongoosastic); -var Book = mongoose.model('Book', BookSchema); +Book = mongoose.model('Book', BookSchema); -describe('Synchronize', function(){ +describe('Synchronize', function() { var books = null; - before(function(done){ - config.deleteIndexIfExists(['books'], function(){ - mongoose.connect(config.mongoUrl, function(){ + before(function(done) { + config.deleteIndexIfExists(['books'], function() { + mongoose.connect(config.mongoUrl, function() { var client = mongoose.connections[0].db; - client.collection('books', function(err, _books){ + client.collection('books', function(err, _books) { books = _books; Book.remove(done); }); }); }); }); - describe('existing collection', function(){ - before(function(done){ - async.forEach(bookTitles() - , function(title, cb){ - books.insert({title:title}, cb); + + after(function(done) { + Book.esClient.close(); + mongoose.disconnect(); + done(); + }); + + describe('existing collection', function() { + + before(function(done) { + async.forEach(config.bookTitlesArray(), function(title, cb) { + books.insert({title: title}, cb); }, done); }); - it('should index all existing objects', function(done){ - var stream = Book.synchronize() - , count = 0; - stream.on('data', function(err, doc){ + it('should index all existing objects', function(done) { + var stream = Book.synchronize(), + count = 0; + + stream.on('data', function() { count++; }); - stream.on('close', function(){ + stream.on('close', function() { count.should.eql(53); - setTimeout(function(){ - Book.search({query:'American'}, function(err, results){ + setTimeout(function() { + Book.search({query_string: {query: 'American'}}, function(err, results) { results.hits.total.should.eql(2); done(); }); - }, config.indexingTimeout); + }, config.INDEXING_TIMEOUT); }); }); + }); }); -function bookTitles(){ - var books = [ - 'American Gods', - 'Gods of the Old World', - 'American Gothic' - ]; - for(var i = 0; i < 50; i++){ - books.push('ABABABA'+i); - } - return books; -} diff --git a/test/truncate-test.js b/test/truncate-test.js index cafcfc20..77a6de83 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -1,19 +1,17 @@ var mongoose = require('mongoose'), - elastical = require('elastical'), - esClient = new(require('elastical').Client), - should = require('should'), + async = require('async'), config = require('./config'), Schema = mongoose.Schema, - ObjectId = Schema.ObjectId, - async = require('async'), + Dummy, mongoosastic = require('../lib/mongoosastic'); var DummySchema = new Schema({ text: String }); + DummySchema.plugin(mongoosastic); -var Dummy = mongoose.model('Dummy', DummySchema); +Dummy = mongoose.model('Dummy', DummySchema); describe('Truncate', function() { before(function(done) { @@ -26,25 +24,32 @@ describe('Truncate', function() { }), new Dummy({ text: 'Text2' - }), + }) ]; async.forEach(dummies, function(item, cb) { item.save(cb); }, function() { - setTimeout(done, config.indexingTimeout); + setTimeout(done, config.INDEXING_TIMEOUT); }); }); }); }); }); + after(function(done) { - Dummy.remove(done); + Dummy.remove(); + Dummy.esClient.close(); + mongoose.disconnect(); + done(); }); - describe('truncate', function() { + + describe('esTruncate', function() { it('should be able to truncate all documents', function(done) { - Dummy.esTruncate(function(err) { + Dummy.esTruncate(function() { Dummy.search({ - query: 'Text1' + query_string: { + query: 'Text1' + } }, function(err, results) { results.hits.total.should.eql(0); done(err); @@ -52,4 +57,4 @@ describe('Truncate', function() { }); }); }); -}); \ No newline at end of file +});