Add support for verifying hashes in torrents with multiple files
Directory names are not supported yet so this only works if all the torrent files exist, are of the correct size and are directly located in the working directory
This commit is contained in:
parent
ca049e5f6f
commit
903095c23f
1 changed files with 35 additions and 7 deletions
42
src/main.cpp
42
src/main.cpp
|
@ -47,17 +47,17 @@ int main(int argc, const char* argv[]) {
|
||||||
auto hashes = duck::collect_hashes(values);
|
auto hashes = duck::collect_hashes(values);
|
||||||
std::cout << "Got " << hashes.size() << " hashes\n";
|
std::cout << "Got " << hashes.size() << " hashes\n";
|
||||||
|
|
||||||
const auto file_count = duck::find_int("/info/files/[[size]]", values);
|
const auto file_count = static_cast<std::int_fast32_t>(duck::find_int("/info/files/[[size]]", values));
|
||||||
if (file_count)
|
if (file_count)
|
||||||
std::cout << "Input has " << file_count << " file entries\n";
|
std::cout << "Input has " << file_count << " file entries\n";
|
||||||
else
|
else
|
||||||
std::cout << "Input seems to contain a single file only\n";
|
std::cout << "Input seems to contain a single file only\n";
|
||||||
|
|
||||||
|
const auto piece_length = static_cast<std::size_t>(duck::find_int("/info/piece length", values));
|
||||||
if (0 == file_count) {
|
if (0 == file_count) {
|
||||||
std::string file_name = std::string{duck::find_string("/info/name", values)};
|
std::string file_name = std::string{duck::find_string("/info/name", values)};
|
||||||
std::cout << "Found file name \"" << file_name << "\"\n";
|
std::cout << "Found file name \"" << file_name << "\"\n";
|
||||||
|
|
||||||
const auto piece_length = duck::find_int("/info/piece length", values);
|
|
||||||
std::ifstream istream(file_name, std::ios::in|std::ios::binary);
|
std::ifstream istream(file_name, std::ios::in|std::ios::binary);
|
||||||
|
|
||||||
if (istream.is_open()) {
|
if (istream.is_open()) {
|
||||||
|
@ -68,10 +68,9 @@ int main(int argc, const char* argv[]) {
|
||||||
while (istream.read(reinterpret_cast<char*>(buff.data()), piece_length).gcount() > 0) {
|
while (istream.read(reinterpret_cast<char*>(buff.data()), piece_length).gcount() > 0) {
|
||||||
SHA1::BufferSource buff_source(buff.data(), istream.gcount() * CHAR_BIT);
|
SHA1::BufferSource buff_source(buff.data(), istream.gcount() * CHAR_BIT);
|
||||||
SHA1::MessageDigest calculated_hash = SHA1::computeFromSource(buff_source);
|
SHA1::MessageDigest calculated_hash = SHA1::computeFromSource(buff_source);
|
||||||
SHA1::MessageDigest stored_hash = to_hash160_digest(hashes[hash_index]);
|
SHA1::MessageDigest stored_hash = to_hash160_digest(hashes[hash_index++]);
|
||||||
std::cout << stored_hash.toHexString() << " " <<
|
std::cout << stored_hash.toHexString() << " " <<
|
||||||
calculated_hash.toHexString() << " " << file_name << '\n';
|
calculated_hash.toHexString() << " " << file_name << '\n';
|
||||||
++hash_index;
|
|
||||||
if (stored_hash == calculated_hash)
|
if (stored_hash == calculated_hash)
|
||||||
++match_count;
|
++match_count;
|
||||||
}
|
}
|
||||||
|
@ -84,9 +83,38 @@ int main(int argc, const char* argv[]) {
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
std::string search("/info/files/[[");
|
std::string search("/info/files/[[");
|
||||||
for (std::int_fast32_t z = 0; z < static_cast<std::int_fast32_t>(file_count); ++z) {
|
std::size_t hash_index = 0;
|
||||||
const auto path = duck::find_string(search + std::to_string(z) + "]]/path/[[0]]", values);
|
std::size_t match_count = 0;
|
||||||
std::cout << "path index " << z << '\t' << path << '\n';
|
std::size_t read_size = 0;
|
||||||
|
std::vector<std::uint8_t> buff;
|
||||||
|
for (std::int_fast32_t z = 0; z < file_count; ++z) {
|
||||||
|
auto file_name = std::string{duck::find_string(search + std::to_string(z) + "]]/path/[[0]]", values)};
|
||||||
|
std::cout << "path index " << z << '\t' << file_name << '\n';
|
||||||
|
|
||||||
|
std::ifstream istream(file_name, std::ios::in|std::ios::binary);
|
||||||
|
if (istream.is_open()) {
|
||||||
|
buff.resize(piece_length);
|
||||||
|
while (istream.read(reinterpret_cast<char*>(buff.data() + read_size), piece_length - read_size).gcount() > 0) {
|
||||||
|
read_size += istream.gcount();
|
||||||
|
if (piece_length == read_size or file_count == z + 1) {
|
||||||
|
SHA1::BufferSource buff_source(buff.data(), read_size * CHAR_BIT);
|
||||||
|
SHA1::MessageDigest calculated_hash = SHA1::computeFromSource(buff_source);
|
||||||
|
SHA1::MessageDigest stored_hash = to_hash160_digest(hashes[hash_index++]);
|
||||||
|
std::cout << stored_hash.toHexString() << " " <<
|
||||||
|
calculated_hash.toHexString() << " " << file_name << '\n';
|
||||||
|
if (stored_hash == calculated_hash)
|
||||||
|
++match_count;
|
||||||
|
read_size = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
std::cout << "Unable to open " << file_name << ", bailing out of hash verification\n";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (hash_index == hashes.size()) {
|
||||||
|
std::cout << "Hash verified " << match_count << '/' << hashes.size() << '\n';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue