cpfm/src/datasources/blockchair.cpp
evilny0 a41b13f3b2 Moved data sources out of wallet class.
Added Blockchair support for BTC/BCH.
Support for multiple operations for each ETH tx, introducing support for ERC-20.
Separated DB data to "raw" and "processed". The goal is to be able to wipe processed data (so we can process again with updated rules) without requiring to analyze again input files.
Updated SQL schema to match changes.
2021-02-21 22:59:40 +01:00

246 lines
7.8 KiB
C++

/*
* Copyright (c) 2021, evilny0
*
* This file is part of cpfm.
*
* cpfm is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* cpm is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with cpfm. If not, see <http://www.gnu.org/licenses/>.
*
*/
#include "datasources/blockchair.h"
Time BlockchainDataSourceTypeBTC_Blockchair::getTimeFromString(string s)
{
std::istringstream in{s};
date::sys_time<std::chrono::seconds> tp;
in >> date::parse("%F %T", tp);
Time t;
t.setFromUnixTime(std::chrono::system_clock::to_time_t(tp));
return t;
}
list<BlockchainTxDetailsTypeBTC> BlockchainDataSourceTypeBTC_Blockchair::getTxDetailsListForAddresses(list<string> addresses)
{
linfo << "Blockchair : analyzing address list";
list<BlockchainTxDetailsTypeBTC> l;
if (!bfs::exists(getCacheFilenameForAddresses(addresses)))
{
saveBlockchainAddressesDataToCacheFile(addresses);
}
if (!bfs::exists(getCacheFilenameForAddresses(addresses)))
{
lerr << "Blockchair : cache file could not be found. Addresses data was not retrieved.";
return l;
}
ifstream f;
f.open(getCacheFilenameForAddresses(addresses));
json::value jvalue = json::value::parse(f);
f.close();
for (int i=0;i<jvalue["data"]["transactions"].size();i++)
{
string txHash = jvalue["data"]["transactions"][i].as_string();
linfo << "Blockchair : found tx " << txHash << ". Starting analysis.";
if (!bfs::exists(getCacheFilenameForTx(txHash)))
{
saveBlockchainTxDataToCacheFile(txHash);
}
if (!bfs::exists(getCacheFilenameForTx(txHash)))
{
lerr << "Blockchair : cache file for tx " << txHash << " could not be found. Tx data was not retrieved.";
}
ifstream fTx;
fTx.open(getCacheFilenameForTx(txHash));
json::value jvalueTx = json::value::parse(fTx);
fTx.close();
BlockchainTxDetailsTypeBTC tx;
tx.hash = txHash;
tx.time = getTimeFromString (jvalueTx["data"][txHash]["transaction"]["time"].as_string());
for (int j=0;j<jvalueTx["data"][txHash]["inputs"].size();j++)
{
string addr = jvalueTx["data"][txHash]["inputs"][j]["recipient"].as_string();
__int64 amount = jvalueTx["data"][txHash]["inputs"][j]["value"].as_integer();
Money m = amount;
Money mdecimals = m/100000000;
// The same address can be multiple time in the inputs, so we need to sum the amounts.
tx.inputs[addr] += mdecimals;
}
for (int j=0;j<jvalueTx["data"][txHash]["outputs"].size();j++)
{
string addr = jvalueTx["data"][txHash]["outputs"][j]["recipient"].as_string();
__int64 amount = jvalueTx["data"][txHash]["outputs"][j]["value"].as_integer();
Money m = amount;
Money mdecimals = m/100000000;
// The same address can only be once in the outputs.
tx.outputs[addr] = mdecimals;
}
l.push_back(tx);
linfo << "Blockchair : finished analysis for tx " << txHash << ".";
}
linfo << "Blockchair : finished analysis for address list.";
return l;
}
string BlockchainDataSourceTypeBTC_Blockchair::getCacheFilenameForAddresses(list<string> addresses)
{
string s;
for (const auto a: addresses)
s = s+a;
string cacheFilename("data/cache/blockchair/"+m_blockchainName+"/addresses/" + s);
return cacheFilename;
}
string BlockchainDataSourceTypeBTC_Blockchair::getCacheFilenameForTx(string txHash)
{
string cacheFilename("data/cache/blockchair/"+m_blockchainName+"/tx/" + txHash);
return cacheFilename;
}
void BlockchainDataSourceTypeBTC_Blockchair::saveBlockchainAddressesDataToCacheFile(list<string> addresses)
{
try
{
string s;
for (const auto a: addresses)
{
if (s.length())
s = s+","+a;
else
s = a;
}
linfo << "Blockchair : querying API about addresses : " << s;
string sRequestURL = "/"+m_blockchainName+"/dashboards/addresses/";
sRequestURL += s;
http_client apiclient("https://api.blockchair.com/");
m_currentRequestCacheFilename = getCacheFilenameForAddresses(addresses);
ltrace << "Blockchair : query : " << sRequestURL;
apiclient.request(methods::GET,sRequestURL).then([](http_response response)
{
if (response.status_code() == status_codes::OK)
{
ldebug << "Blockchair : response OK.";
return response.extract_json();
}
else if (response.status_code() == status_codes::TooManyRequests)
{
lwarn << "Blockchair : too many queries! We are being rate limited.";
pplx::task_from_result(json::value());
}
return pplx::task_from_result(json::value());
})
.then([this](pplx::task<json::value> previousTask)
{
if (previousTask.get() != json::value::null())
{
linfo << "Blockchair : saving query result to " << m_currentRequestCacheFilename;
ofstream f;
f.open(m_currentRequestCacheFilename);
f << previousTask.get();
f.close();
}
else
{
lerr << "Blockchair : query result is empty. Nothing will be saved to the cache file.";
}
})
.wait();
}
catch(const http::http_exception& e)
{
lerr << "Blockchair : failed to query API";
}
}
void BlockchainDataSourceTypeBTC_Blockchair::saveBlockchainTxDataToCacheFile(string txHash)
{
try
{
linfo << "Blockchair : querying API about tx : " << txHash;
string sRequestURL = "/"+m_blockchainName+"/dashboards/transaction/";
sRequestURL += txHash;
http_client apiclient("https://api.blockchair.com/");
m_currentRequestCacheFilename = getCacheFilenameForTx(txHash);
ltrace << "Blockchair : query : " << sRequestURL;
apiclient.request(methods::GET,sRequestURL).then([](http_response response)
{
if (response.status_code() == status_codes::OK)
{
ldebug << "Blockchair : response OK.";
return response.extract_json();
}
else if (response.status_code() == status_codes::TooManyRequests)
{
lwarn << "Blockchair : too many queries! We are being rate limited.";
pplx::task_from_result(json::value());
}
return pplx::task_from_result(json::value());
})
.then([this](pplx::task<json::value> previousTask)
{
if (previousTask.get() != json::value::null())
{
linfo << "Blockchair : saving query result to " << m_currentRequestCacheFilename;
ofstream f;
f.open(m_currentRequestCacheFilename);
f << previousTask.get();
f.close();
}
else
{
lerr << "Blockchair : query result is empty. Nothing will be saved to the cache file.";
}
})
.wait();
}
catch(const http::http_exception& e)
{
lerr << "Blockchair : failed to query API about " << txHash;
}
}