diff --git a/dindexer.sql b/dindexer.sql index 0dea56d..c0bce49 100644 --- a/dindexer.sql +++ b/dindexer.sql @@ -4,7 +4,7 @@ -- Dumped from database version 9.4.5 -- Dumped by pg_dump version 9.4.5 --- Started on 2015-11-30 18:28:52 GMT +-- Started on 2015-12-01 16:35:15 GMT SET statement_timeout = 0; SET lock_timeout = 0; @@ -68,6 +68,8 @@ CREATE TABLE files ( size bigint, hash character(48) NOT NULL, is_hash_valid boolean DEFAULT true NOT NULL, + access_time timestamp with time zone, + modify_time timestamp with time zone, CONSTRAINT chk_files_dirsize_zero CHECK (((is_directory = false) OR (size = 0))) ); @@ -242,7 +244,7 @@ GRANT ALL ON SCHEMA public TO postgres; GRANT ALL ON SCHEMA public TO PUBLIC; --- Completed on 2015-11-30 18:28:54 GMT +-- Completed on 2015-12-01 16:35:18 GMT -- -- PostgreSQL database dump complete diff --git a/src/dbbackend.cpp b/src/dbbackend.cpp index 538aaef..160008f 100644 --- a/src/dbbackend.cpp +++ b/src/dbbackend.cpp @@ -23,6 +23,8 @@ #include #include #include +#include +#include namespace din { namespace { @@ -36,6 +38,12 @@ namespace din { << ");"; return oss.str(); } + + boost::string_ref time_to_str (const std::time_t parTime, char* parBuff, std::size_t parLength) { + const auto gtm = std::gmtime(&parTime); + const auto len = std::strftime(parBuff, parLength, "%F %T%z", gtm); + return boost::string_ref(parBuff, len); + } } //unnamed namespace bool read_from_db (FileRecordData& parItem, SetRecordDataFull& parSet, const DinDBSettings& parDB, std::string&& parHash) { @@ -90,6 +98,9 @@ namespace din { return; } + const std::size_t strtime_buff_size = 512; + std::unique_ptr strtime_buff(new char[strtime_buff_size]); + pq::Connection conn(std::string(parDB.username), std::string(parDB.password), std::string(parDB.dbname), std::string(parDB.address), parDB.port); conn.connect(); @@ -98,7 +109,10 @@ namespace din { //TODO: use COPY instead of INSERT INTO for (std::size_t z = 0; z < parData.size(); z += g_batch_size) { std::ostringstream query; - query << "INSERT INTO \"files\" (path, hash, level, group_id, is_directory, is_symlink, size) VALUES "; + query << "INSERT INTO \"files\" " << + "(path, hash, level, group_id, is_directory, is_symlink, size, " << + "access_time, modify_time) VALUES " + ; const char* comma = ""; for (auto i = z; i < std::min(z + g_batch_size, parData.size()); ++i) { @@ -108,8 +122,10 @@ namespace din { << itm.level << ',' << "currval('\"sets_id_seq\"')" << ',' << (itm.is_directory ? "true" : "false") << ',' - << (itm.is_symlink ? "true" : "false") << ',' << itm.size << ')' - ; + << (itm.is_symlink ? "true" : "false") << ',' << itm.size + << ',' << '\'' << time_to_str(itm.atime, strtime_buff.get(), strtime_buff_size) << '\'' + << ',' << '\'' << time_to_str(itm.mtime, strtime_buff.get(), strtime_buff_size) << '\'' + << ')'; comma = ","; } query << ';'; diff --git a/src/dbbackend.hpp b/src/dbbackend.hpp index 42d22b8..c60ffcc 100644 --- a/src/dbbackend.hpp +++ b/src/dbbackend.hpp @@ -22,6 +22,7 @@ #include #include #include +#include namespace din { struct DinDBSettings; @@ -29,6 +30,8 @@ namespace din { struct FileRecordData { std::string path; std::string hash; + std::time_t atime; + std::time_t mtime; uint16_t level; uint64_t size; bool is_directory; diff --git a/src/indexer.cpp b/src/indexer.cpp index a983154..4ee5de4 100644 --- a/src/indexer.cpp +++ b/src/indexer.cpp @@ -44,12 +44,15 @@ namespace din { typedef TigerHash HashType; struct FileEntry { - FileEntry ( const char* parPath, int parLevel, bool parIsDir, bool parIsSymLink) : + FileEntry ( const char* parPath, const fastf::FileStats& parSt ) : path(parPath), hash {}, - level(static_cast(parLevel)), - is_dir(parIsDir), - is_symlink(parIsSymLink) + access_time(parSt.atime), + modify_time(parSt.mtime), + //file_size(0), + level(static_cast(parSt.level)), + is_dir(parSt.is_dir), + is_symlink(parSt.is_symlink) { } @@ -62,6 +65,8 @@ namespace din { std::string path; HashType hash; + std::time_t access_time; + std::time_t modify_time; uint64_t file_size; uint16_t level; bool is_dir; @@ -300,6 +305,8 @@ namespace din { data.push_back(FileRecordData { make_relative_path(base_path, PathName(itm.path)).path(), tiger_to_string(itm.hash), + itm.access_time, + itm.modify_time, itm.level, itm.file_size, itm.is_dir,