diff --git a/CMakeLists.txt b/CMakeLists.txt index 7d1f8273..6f83c07f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,9 +1,26 @@ cmake_minimum_required(VERSION 2.8) project(DBoW2) +include(ExternalProject) option(BUILD_DBoW2 "Build DBoW2" ON) option(BUILD_Demo "Build demo application" ON) +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" + "MinSizeRel" "RelWithDebInfo") +endif() + +if(MSVC) + if(CMAKE_CXX_FLAGS MATCHES "/W[0-4]") + string(REGEX REPLACE "/W[0-4]" "/W4" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4") + endif() +elseif(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -pedantic") +endif() + set(HDRS include/DBoW2/BowVector.h include/DBoW2/FBrief.h include/DBoW2/FSurf64.h include/DBoW2/QueryResults.h include/DBoW2/TemplatedDatabase.h include/DBoW2/FORB.h @@ -13,12 +30,41 @@ set(SRCS src/BowVector.cpp src/FBrief.cpp src/FSurf64.cpp src/FORB.cpp src/FeatureVector.cpp src/QueryResults.cpp src/ScoringObject.cpp) +set(DEPENDENCY_DIR ${CMAKE_CURRENT_BINARY_DIR}/dependencies) +set(DEPENDENCY_INSTALL_DIR ${DEPENDENCY_DIR}/install) + find_package(OpenCV REQUIRED) -find_package(DLib REQUIRED) +include_directories(${OpenCV_INCLUDE_DIRS}) + +find_package(Boost REQUIRED) +include_directories(${Boost_INCLUDE_DIR}) + +find_package(DLib QUIET + PATHS ${DEPENDENCY_INSTALL_DIR}) +if(${DLib_FOUND}) + message("DLib library found, using it from the system") + include_directories(${DLib_INCLUDE_DIRS}) + add_custom_target(Dependencies) +else(${DLib_FOUND}) + message("DLib library not found in the system, it will be downloaded on build") + option(DOWNLOAD_DLib_dependency "Download DLib dependency" ON) + if(${DOWNLOAD_DLib_dependency}) + ExternalProject_Add(DLib + PREFIX ${DEPENDENCY_DIR} + GIT_REPOSITORY http://github.com/dorian3d/DLib + GIT_TAG v1.1-nonfree + INSTALL_DIR ${DEPENDENCY_INSTALL_DIR} + CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=) + add_custom_target(Dependencies ${CMAKE_COMMAND} ${CMAKE_SOURCE_DIR} DEPENDS DLib) + else() + message(SEND_ERROR "Please, activate DOWNLOAD_DLib_dependency option or download manually") + endif(${DOWNLOAD_DLib_dependency}) +endif(${DLib_FOUND}) if(BUILD_DBoW2) - include_directories(include/DBoW2/ ${OpenCV_INCLUDE_DIRS} ${DLib_INCLUDE_DIRS}) add_library(${PROJECT_NAME} SHARED ${SRCS}) + include_directories(include/DBoW2/) + add_dependencies(${PROJECT_NAME} Dependencies) target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} ${DLib_LIBS}) endif(BUILD_DBoW2) @@ -39,3 +85,4 @@ install(FILES "${CMAKE_CURRENT_BINARY_DIR}/DBoW2Config.cmake" DESTINATION ${CMAKE_INSTALL_PREFIX}/include/${PROJECT_NAME}) install(FILES "${PROJECT_BINARY_DIR}/DBoW2Config.cmake" DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/cmake/DBoW2/) +install(DIRECTORY ${DEPENDENCY_INSTALL_DIR}/ DESTINATION ${CMAKE_INSTALL_PREFIX} OPTIONAL) diff --git a/README.md b/README.md index 06cc0c25..5fbceec7 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ If you use this software in an academic work, please cite: ## Installation notes -DBoW2 requires [DLib](https://github.com/dorian3d/DLib), which you can find in [my repository](https://github.com/dorian3d/DLib). +DBoW2 requires [DLib](https://github.com/dorian3d/DLib), which is automatically installed if it cannot be found in the system. You can also find it in [my repository](https://github.com/dorian3d/DLib). DBoW2 requires OpenCV and the `Boost::dynamic_bitset` class in order to use the BRIEF version. You can install Boost by typing: diff --git a/demo/demo.cpp b/demo/demo.cpp index b26f8e2a..3e4c1474 100644 --- a/demo/demo.cpp +++ b/demo/demo.cpp @@ -12,16 +12,13 @@ // DBoW2 #include "DBoW2.h" // defines Surf64Vocabulary and Surf64Database -#include "DUtils.h" -#include "DUtilsCV.h" // defines macros CVXX -#include "DVision.h" +#include +#include // OpenCV -#include -#include -#if CV24 -#include -#endif +#include +#include +#include using namespace DBoW2; @@ -76,7 +73,7 @@ void loadFeatures(vector > > &features) features.clear(); features.reserve(NIMAGES); - cv::SURF surf(400, 4, 2, EXTENDED_SURF); + cv::Ptr surf = cv::xfeatures2d::SURF::create(400, 4, 2, EXTENDED_SURF); cout << "Extracting SURF features..." << endl; for(int i = 0; i < NIMAGES; ++i) @@ -89,10 +86,10 @@ void loadFeatures(vector > > &features) vector keypoints; vector descriptors; - surf(image, mask, keypoints, descriptors); + surf->detectAndCompute(image, mask, keypoints, descriptors); features.push_back(vector >()); - changeStructure(descriptors, features.back(), surf.descriptorSize()); + changeStructure(descriptors, features.back(), surf->descriptorSize()); } } diff --git a/include/DBoW2/BowVector.h b/include/DBoW2/BowVector.h index f559811d..670b635a 100644 --- a/include/DBoW2/BowVector.h +++ b/include/DBoW2/BowVector.h @@ -49,7 +49,7 @@ enum ScoringType CHI_SQUARE, KL, BHATTACHARYYA, - DOT_PRODUCT, + DOT_PRODUCT }; /// Vector of words to represent images diff --git a/include/DBoW2/DBoW2.h b/include/DBoW2/DBoW2.h index 2ded3f3a..75c5766f 100644 --- a/include/DBoW2/DBoW2.h +++ b/include/DBoW2/DBoW2.h @@ -16,7 +16,7 @@ * Written by Dorian Galvez-Lopez, * University of Zaragoza * - * Check my website to obtain updates: http://webdiis.unizar.es/~dorian + * Check my website to obtain updates: http://doriangalvez.com * * \section requirements Requirements * This library requires the DUtils, DUtilsCV, DVision and OpenCV libraries, diff --git a/include/DBoW2/FBrief.h b/include/DBoW2/FBrief.h index 5231b544..891b0aa1 100644 --- a/include/DBoW2/FBrief.h +++ b/include/DBoW2/FBrief.h @@ -10,7 +10,7 @@ #ifndef __D_T_F_BRIEF__ #define __D_T_F_BRIEF__ -#include +#include #include #include diff --git a/include/DBoW2/FClass.h b/include/DBoW2/FClass.h index 592e1bdf..ff792b8a 100644 --- a/include/DBoW2/FClass.h +++ b/include/DBoW2/FClass.h @@ -10,7 +10,7 @@ #ifndef __D_T_FCLASS__ #define __D_T_FCLASS__ -#include +#include #include #include diff --git a/include/DBoW2/FORB.h b/include/DBoW2/FORB.h index bc0ba80f..19e25443 100644 --- a/include/DBoW2/FORB.h +++ b/include/DBoW2/FORB.h @@ -10,7 +10,7 @@ #ifndef __D_T_F_ORB__ #define __D_T_F_ORB__ -#include +#include #include #include diff --git a/include/DBoW2/FSurf64.h b/include/DBoW2/FSurf64.h index 36d0a8b4..a47d2c48 100644 --- a/include/DBoW2/FSurf64.h +++ b/include/DBoW2/FSurf64.h @@ -10,7 +10,7 @@ #ifndef __D_T_F_SURF_64__ #define __D_T_F_SURF_64__ -#include +#include #include #include diff --git a/include/DBoW2/TemplatedDatabase.h b/include/DBoW2/TemplatedDatabase.h index 867165ad..96cbe8e6 100644 --- a/include/DBoW2/TemplatedDatabase.h +++ b/include/DBoW2/TemplatedDatabase.h @@ -25,8 +25,6 @@ #include -using namespace std; - namespace DBoW2 { // For query functions @@ -131,7 +129,7 @@ class TemplatedDatabase * @param fvec if given, the vector of nodes and feature indexes is returned * @return id of new entry */ - EntryId add(const vector &features, + EntryId add(const std::vector &features, BowVector *bowvec = NULL, FeatureVector *fvec = NULL); /** @@ -175,7 +173,7 @@ class TemplatedDatabase * @param max_id only entries with id <= max_id are returned in ret. * < 0 means all */ - void query(const vector &features, QueryResults &ret, + void query(const std::vector &features, QueryResults &ret, int max_results = 1, int max_id = -1) const; /** @@ -201,13 +199,13 @@ class TemplatedDatabase * Stores the database in a file * @param filename */ - void save(const string &filename) const; + void save(const std::string &filename) const; /** * Loads the database from a file * @param filename */ - void load(const string &filename); + void load(const std::string &filename); /** * Stores the database in the given file storage structure @@ -326,7 +324,7 @@ class TemplatedDatabase template TemplatedDatabase::TemplatedDatabase (bool use_di, int di_levels) - : m_voc(NULL), m_use_di(use_di), m_dilevels(di_levels) + : m_voc(NULL), m_use_di(use_di), m_dilevels(di_levels), m_nentries(0) { } @@ -402,7 +400,7 @@ TemplatedDatabase& TemplatedDatabase::operator= template EntryId TemplatedDatabase::add( - const vector &features, + const std::vector &features, BowVector *bowvec, FeatureVector *fvec) { BowVector aux; @@ -440,7 +438,7 @@ EntryId TemplatedDatabase::add(const BowVector &v, EntryId entry_id = m_nentries++; BowVector::const_iterator vit; - vector::const_iterator iit; + std::vector::const_iterator iit; if(m_use_di) { @@ -569,7 +567,7 @@ inline int TemplatedDatabase::getDirectIndexLevels() const template void TemplatedDatabase::query( - const vector &features, + const std::vector &features, QueryResults &ret, int max_results, int max_id) const { BowVector vec; @@ -623,8 +621,8 @@ void TemplatedDatabase::queryL1(const BowVector &vec, BowVector::const_iterator vit; typename IFRow::const_iterator rit; - map pairs; - map::iterator pit; + std::map pairs; + std::map::iterator pit; for(vit = vec.begin(); vit != vec.end(); ++vit) { @@ -652,7 +650,7 @@ void TemplatedDatabase::queryL1(const BowVector &vec, else { pairs.insert(pit, - map::value_type(entry_id, value)); + std::map::value_type(entry_id, value)); } } @@ -669,7 +667,7 @@ void TemplatedDatabase::queryL1(const BowVector &vec, // resulting "scores" are now in [-2 best .. 0 worst] // sort vector in ascending order of score - sort(ret.begin(), ret.end()); + std::sort(ret.begin(), ret.end()); // (ret is inverted now --the lower the better--) // cut vector @@ -695,8 +693,8 @@ void TemplatedDatabase::queryL2(const BowVector &vec, BowVector::const_iterator vit; typename IFRow::const_iterator rit; - map pairs; - map::iterator pit; + std::map pairs; + std::map::iterator pit; //map counters; //map::iterator cit; @@ -729,7 +727,7 @@ void TemplatedDatabase::queryL2(const BowVector &vec, else { pairs.insert(pit, - map::value_type(entry_id, value)); + std::map::value_type(entry_id, value)); //counters.insert(cit, // map::value_type(entry_id, 1)); @@ -750,7 +748,7 @@ void TemplatedDatabase::queryL2(const BowVector &vec, // resulting "scores" are now in [-1 best .. 0 worst] // sort vector in ascending order of score - sort(ret.begin(), ret.end()); + std::sort(ret.begin(), ret.end()); // (ret is inverted now --the lower the better--) // cut vector @@ -783,11 +781,11 @@ void TemplatedDatabase::queryChiSquare(const BowVector &vec, BowVector::const_iterator vit; typename IFRow::const_iterator rit; - map > pairs; - map >::iterator pit; + std::map > pairs; + std::map >::iterator pit; - map > sums; // < sum vi, sum wi > - map >::iterator sit; + std::map > sums; // < sum vi, sum wi > + std::map >::iterator sit; // In the current implementation, we suppose vec is not normalized @@ -830,14 +828,14 @@ void TemplatedDatabase::queryChiSquare(const BowVector &vec, else { pairs.insert(pit, - map >::value_type(entry_id, - make_pair(value, 1) )); + std::map >::value_type(entry_id, + std::make_pair(value, 1) )); //expected.insert(eit, // map::value_type(entry_id, dvalue)); sums.insert(sit, - map >::value_type(entry_id, - make_pair(qvalue, dvalue) )); + std::map >::value_type(entry_id, + std::make_pair(qvalue, dvalue) )); } } @@ -866,7 +864,7 @@ void TemplatedDatabase::queryChiSquare(const BowVector &vec, // we have to add +2 to the scores to obtain the chi square score // sort vector in ascending order of score - sort(ret.begin(), ret.end()); + std::sort(ret.begin(), ret.end()); // (ret is inverted now --the lower the better--) // cut vector @@ -894,8 +892,8 @@ void TemplatedDatabase::queryKL(const BowVector &vec, BowVector::const_iterator vit; typename IFRow::const_iterator rit; - map pairs; - map::iterator pit; + std::map pairs; + std::map::iterator pit; for(vit = vec.begin(); vit != vec.end(); ++vit) { @@ -924,7 +922,7 @@ void TemplatedDatabase::queryKL(const BowVector &vec, else { pairs.insert(pit, - map::value_type(entry_id, value)); + std::map::value_type(entry_id, value)); } } @@ -966,7 +964,7 @@ void TemplatedDatabase::queryKL(const BowVector &vec, // sort vector in ascending order // (scores are inverted now --the lower the better--) - sort(ret.begin(), ret.end()); + std::sort(ret.begin(), ret.end()); // cut vector if(max_results > 0 && (int)ret.size() > max_results) @@ -988,8 +986,8 @@ void TemplatedDatabase::queryBhattacharyya( //map pairs; //map::iterator pit; - map > pairs; // > - map >::iterator pit; + std::map > pairs; // > + std::map >::iterator pit; for(vit = vec.begin(); vit != vec.end(); ++vit) { @@ -1018,8 +1016,8 @@ void TemplatedDatabase::queryBhattacharyya( else { pairs.insert(pit, - map >::value_type(entry_id, - make_pair(value, 1))); + std::map >::value_type(entry_id, + std::make_pair(value, 1))); } } @@ -1041,7 +1039,7 @@ void TemplatedDatabase::queryBhattacharyya( // scores are already in [0..1] // sort vector in descending order - sort(ret.begin(), ret.end(), Result::gt); + std::sort(ret.begin(), ret.end(), Result::gt); // cut vector if(max_results > 0 && (int)ret.size() > max_results) @@ -1058,8 +1056,8 @@ void TemplatedDatabase::queryDotProduct( BowVector::const_iterator vit; typename IFRow::const_iterator rit; - map pairs; - map::iterator pit; + std::map pairs; + std::map::iterator pit; for(vit = vec.begin(); vit != vec.end(); ++vit) { @@ -1091,7 +1089,7 @@ void TemplatedDatabase::queryDotProduct( else { pairs.insert(pit, - map::value_type(entry_id, value)); + std::map::value_type(entry_id, value)); } } @@ -1108,7 +1106,7 @@ void TemplatedDatabase::queryDotProduct( // scores are the greater the better // sort vector in descending order - sort(ret.begin(), ret.end(), Result::gt); + std::sort(ret.begin(), ret.end(), Result::gt); // cut vector if(max_results > 0 && (int)ret.size() > max_results) @@ -1130,10 +1128,10 @@ const FeatureVector& TemplatedDatabase::retrieveFeatures // -------------------------------------------------------------------------- template -void TemplatedDatabase::save(const string &filename) const +void TemplatedDatabase::save(const std::string &filename) const { cv::FileStorage fs(filename.c_str(), cv::FileStorage::WRITE); - if(!fs.isOpened()) throw string("Could not open file ") + filename; + if(!fs.isOpened()) throw std::string("Could not open file ") + filename; save(fs); } @@ -1215,7 +1213,7 @@ void TemplatedDatabase::save(cv::FileStorage &fs, for(drit = dit->begin(); drit != dit->end(); ++drit) { NodeId nid = drit->first; - const vector& features = drit->second; + const std::vector& features = drit->second; // save info of last_nid fs << "{"; @@ -1223,7 +1221,7 @@ void TemplatedDatabase::save(cv::FileStorage &fs, // msvc++ 2010 with opencv 2.3.1 does not allow FileStorage::operator<< // with vectors of unsigned int fs << "features" << "[" - << *(const vector*)(&features) << "]"; + << *(const std::vector*)(&features) << "]"; fs << "}"; } @@ -1238,10 +1236,10 @@ void TemplatedDatabase::save(cv::FileStorage &fs, // -------------------------------------------------------------------------- template -void TemplatedDatabase::load(const string &filename) +void TemplatedDatabase::load(const std::string &filename) { cv::FileStorage fs(filename.c_str(), cv::FileStorage::READ); - if(!fs.isOpened()) throw string("Could not open file ") + filename; + if(!fs.isOpened()) throw std::string("Could not open file ") + filename; load(fs); } @@ -1299,13 +1297,13 @@ void TemplatedDatabase::load(const cv::FileStorage &fs, NodeId nid = (int)fe[i]["nodeId"]; dit = m_dfile[eid].insert(m_dfile[eid].end(), - make_pair(nid, vector() )); + make_pair(nid, std::vector() )); // this failed to compile with some opencv versions (2.3.1) //fe[i]["features"] >> dit->second; // this was ok until OpenCV 2.4.1 - //vector aux; + //std::vector aux; //fe[i]["features"] >> aux; // OpenCV < 2.4.1 //dit->second.resize(aux.size()); //std::copy(aux.begin(), aux.end(), dit->second.begin()); diff --git a/include/DBoW2/TemplatedVocabulary.h b/include/DBoW2/TemplatedVocabulary.h index 4d7a237e..53a0e303 100644 --- a/include/DBoW2/TemplatedVocabulary.h +++ b/include/DBoW2/TemplatedVocabulary.h @@ -17,7 +17,7 @@ #include #include #include -#include +#include #include "FeatureVector.h" #include "BowVector.h" @@ -25,8 +25,6 @@ #include -using namespace std; - namespace DBoW2 { /// @param TDescriptor class of descriptor @@ -281,7 +279,7 @@ class TemplatedVocabulary /// Weight if the node is a word WordValue weight; /// Children - vector children; + std::vector children; /// Parent node (undefined in case of root) NodeId parent; /// Node descriptor @@ -321,8 +319,8 @@ class TemplatedVocabulary * @param features (out) pointers to the training features */ void getFeatures( - const vector > &training_features, - vector &features) const; + const std::vector > &training_features, + std::vector &features) const; /** * Returns the word id associated to a feature @@ -349,7 +347,7 @@ class TemplatedVocabulary * @param descriptors descriptors to run the kmeans on * @param current_level current level in the tree */ - void HKmeansStep(NodeId parent_id, const vector &descriptors, + void HKmeansStep(NodeId parent_id, const std::vector &descriptors, int current_level); /** @@ -357,8 +355,8 @@ class TemplatedVocabulary * @note In this class, kmeans++ is used, but this function should be * overriden by inherited classes. */ - virtual void initiateClusters(const vector &descriptors, - vector &clusters) const; + virtual void initiateClusters(const std::vector &descriptors, + std::vector &clusters) const; /** * Creates k clusters from the given descriptor sets by running the @@ -366,8 +364,8 @@ class TemplatedVocabulary * @param descriptors * @param clusters resulting clusters */ - void initiateClustersKMpp(const vector &descriptors, - vector &clusters) const; + void initiateClustersKMpp(const std::vector &descriptors, + std::vector &clusters) const; /** * Create the words of the vocabulary once the tree has been built @@ -380,7 +378,7 @@ class TemplatedVocabulary * created (by calling HKmeansStep and createWords) * @param features */ - void setNodeWeights(const vector > &features); + void setNodeWeights(const std::vector > &features); protected: @@ -548,7 +546,7 @@ void TemplatedVocabulary::create( m_nodes.reserve(expected_nodes); // avoid allocations when creating the tree - vector features; + std::vector features; getFeatures(training_features, features); @@ -599,13 +597,13 @@ void TemplatedVocabulary::create( template void TemplatedVocabulary::getFeatures( - const vector > &training_features, - vector &features) const + const std::vector > &training_features, + std::vector &features) const { features.resize(0); - typename vector >::const_iterator vvit; - typename vector::const_iterator vit; + typename std::vector >::const_iterator vvit; + typename std::vector::const_iterator vit; for(vvit = training_features.begin(); vvit != training_features.end(); ++vvit) { features.reserve(features.size() + vvit->size()); @@ -620,13 +618,13 @@ void TemplatedVocabulary::getFeatures( template void TemplatedVocabulary::HKmeansStep(NodeId parent_id, - const vector &descriptors, int current_level) + const std::vector &descriptors, int current_level) { if(descriptors.empty()) return; // features associated to each cluster - vector clusters; - vector > groups; // groups[i] = [j1, j2, ...] + std::vector clusters; + std::vector > groups; // groups[i] = [j1, j2, ...] // j1, j2, ... indices of descriptors associated to cluster i clusters.reserve(m_k); @@ -656,7 +654,7 @@ void TemplatedVocabulary::HKmeansStep(NodeId parent_id, bool goon = true; // to check if clusters move after iterations - vector last_association, current_association; + std::vector last_association, current_association; while(goon) { @@ -673,7 +671,7 @@ void TemplatedVocabulary::HKmeansStep(NodeId parent_id, for(unsigned int c = 0; c < clusters.size(); ++c) { - vector cluster_descriptors; + std::vector cluster_descriptors; cluster_descriptors.reserve(groups[c].size()); /* @@ -686,7 +684,7 @@ void TemplatedVocabulary::HKmeansStep(NodeId parent_id, } */ - vector::const_iterator vit; + std::vector::const_iterator vit; for(vit = groups[c].begin(); vit != groups[c].end(); ++vit) { cluster_descriptors.push_back(descriptors[*vit]); @@ -702,12 +700,12 @@ void TemplatedVocabulary::HKmeansStep(NodeId parent_id, // calculate distances to cluster centers groups.clear(); - groups.resize(clusters.size(), vector()); + groups.resize(clusters.size(), std::vector()); current_association.resize(descriptors.size()); //assoc.clear(); - typename vector::const_iterator fit; + typename std::vector::const_iterator fit; //unsigned int d = 0; for(fit = descriptors.begin(); fit != descriptors.end(); ++fit)//, ++d) { @@ -776,15 +774,15 @@ void TemplatedVocabulary::HKmeansStep(NodeId parent_id, if(current_level < m_L) { // iterate again with the resulting clusters - const vector &children_ids = m_nodes[parent_id].children; + const std::vector &children_ids = m_nodes[parent_id].children; for(unsigned int i = 0; i < clusters.size(); ++i) { NodeId id = children_ids[i]; - vector child_features; + std::vector child_features; child_features.reserve(groups[i].size()); - vector::const_iterator vit; + std::vector::const_iterator vit; for(vit = groups[i].begin(); vit != groups[i].end(); ++vit) { child_features.push_back(descriptors[*vit]); @@ -802,7 +800,8 @@ void TemplatedVocabulary::HKmeansStep(NodeId parent_id, template void TemplatedVocabulary::initiateClusters - (const vector &descriptors, vector &clusters) const + (const std::vector &descriptors, + std::vector &clusters) const { initiateClustersKMpp(descriptors, clusters); } @@ -811,7 +810,8 @@ void TemplatedVocabulary::initiateClusters template void TemplatedVocabulary::initiateClustersKMpp( - const vector &pfeatures, vector &clusters) const + const std::vector &pfeatures, + std::vector &clusters) const { // Implements kmeans++ seeding algorithm // Algorithm: @@ -828,7 +828,7 @@ void TemplatedVocabulary::initiateClustersKMpp( clusters.resize(0); clusters.reserve(m_k); - vector min_dists(pfeatures.size(), std::numeric_limits::max()); + std::vector min_dists(pfeatures.size(), std::numeric_limits::max()); // 1. @@ -838,8 +838,8 @@ void TemplatedVocabulary::initiateClustersKMpp( clusters.push_back(*pfeatures[ifeature]); // compute the initial distances - typename vector::const_iterator fit; - vector::iterator dit; + typename std::vector::const_iterator fit; + std::vector::iterator dit; dit = min_dists.begin(); for(fit = pfeatures.begin(); fit != pfeatures.end(); ++fit, ++dit) { @@ -903,7 +903,7 @@ void TemplatedVocabulary::createWords() { m_words.reserve( (int)pow((double)m_k, (double)m_L) ); - typename vector::iterator nit; + typename std::vector::iterator nit; nit = m_nodes.begin(); // ignore root for(++nit; nit != m_nodes.end(); ++nit) @@ -921,7 +921,7 @@ void TemplatedVocabulary::createWords() template void TemplatedVocabulary::setNodeWeights - (const vector > &training_features) + (const std::vector > &training_features) { const unsigned int NWords = m_words.size(); const unsigned int NDocs = training_features.size(); @@ -939,11 +939,11 @@ void TemplatedVocabulary::setNodeWeights // Note: this actually calculates the idf part of the tf-idf score. // The complete tf-idf score is calculated in ::transform - vector Ni(NWords, 0); - vector counted(NWords, false); + std::vector Ni(NWords, 0); + std::vector counted(NWords, false); - typename vector >::const_iterator mit; - typename vector::const_iterator fit; + typename std::vector >::const_iterator mit; + typename std::vector::const_iterator fit; for(mit = training_features.begin(); mit != training_features.end(); ++mit) { @@ -1057,7 +1057,7 @@ void TemplatedVocabulary::transform( LNorm norm; bool must = m_scoring_object->mustNormalize(norm); - typename vector::const_iterator fit; + typename std::vector::const_iterator fit; if(m_weighting == TF || m_weighting == TF_IDF) { @@ -1120,7 +1120,7 @@ void TemplatedVocabulary::transform( LNorm norm; bool must = m_scoring_object->mustNormalize(norm); - typename vector::const_iterator fit; + typename std::vector::const_iterator fit; if(m_weighting == TF || m_weighting == TF_IDF) { @@ -1199,8 +1199,8 @@ void TemplatedVocabulary::transform(const TDescriptor &feature, WordId &word_id, WordValue &weight, NodeId *nid, int levelsup) const { // propagate the feature down the tree - vector nodes; - typename vector::const_iterator nit; + std::vector nodes; + typename std::vector::const_iterator nit; // level at which the node must be stored in nid, if given const int nid_level = m_L - levelsup; @@ -1269,7 +1269,7 @@ void TemplatedVocabulary::getWordsFromNode { words.reserve(m_k); // ^1, ^2, ... - vector parents; + std::vector parents; parents.push_back(nid); while(!parents.empty()) @@ -1277,8 +1277,8 @@ void TemplatedVocabulary::getWordsFromNode NodeId parentid = parents.back(); parents.pop_back(); - const vector &child_ids = m_nodes[parentid].children; - vector::const_iterator cit; + const std::vector &child_ids = m_nodes[parentid].children; + std::vector::const_iterator cit; for(cit = child_ids.begin(); cit != child_ids.end(); ++cit) { @@ -1300,7 +1300,7 @@ template int TemplatedVocabulary::stopWords(double minWeight) { int c = 0; - typename vector::iterator wit; + typename std::vector::iterator wit; for(wit = m_words.begin(); wit != m_words.end(); ++wit) { if((*wit)->weight < minWeight) @@ -1318,7 +1318,7 @@ template void TemplatedVocabulary::save(const std::string &filename) const { cv::FileStorage fs(filename.c_str(), cv::FileStorage::WRITE); - if(!fs.isOpened()) throw string("Could not open file ") + filename; + if(!fs.isOpened()) throw std::string("Could not open file ") + filename; save(fs); } @@ -1329,7 +1329,7 @@ template void TemplatedVocabulary::load(const std::string &filename) { cv::FileStorage fs(filename.c_str(), cv::FileStorage::READ); - if(!fs.isOpened()) throw string("Could not open file ") + filename; + if(!fs.isOpened()) throw std::string("Could not open file ") + filename; this->load(fs); } @@ -1377,8 +1377,8 @@ void TemplatedVocabulary::save(cv::FileStorage &f, // tree f << "nodes" << "["; - vector parents, children; - vector::const_iterator pit; + std::vector parents, children; + std::vector::const_iterator pit; parents.push_back(0); // root @@ -1415,7 +1415,7 @@ void TemplatedVocabulary::save(cv::FileStorage &f, // words f << "words" << "["; - typename vector::const_iterator wit; + typename std::vector::const_iterator wit; for(wit = m_words.begin(); wit != m_words.end(); wit++) { WordId id = wit - m_words.begin(); @@ -1460,7 +1460,7 @@ void TemplatedVocabulary::load(const cv::FileStorage &fs, NodeId nid = (int)fn[i]["nodeId"]; NodeId pid = (int)fn[i]["parentId"]; WordValue weight = (WordValue)fn[i]["weight"]; - string d = (string)fn[i]["descriptor"]; + std::string d = (std::string)fn[i]["descriptor"]; m_nodes[nid].id = nid; m_nodes[nid].parent = pid; diff --git a/src/FORB.cpp b/src/FORB.cpp index a1a280e9..07e85c15 100644 --- a/src/FORB.cpp +++ b/src/FORB.cpp @@ -13,8 +13,8 @@ #include #include -#include "DUtils.h" -#include "DVision.h" +#include +#include #include "FORB.h" using namespace std;