Open mraksoll4 opened 8 months ago
example
static bool CheckBlockHeader(const CBlockHeader& block, BlockValidationState& state, const Consensus::Params& consensusParams, bool fCheckPOW = true)
{
// Проверка Proof of Work, если необходимо
bool powResult1 = fCheckPOW ? CheckProofOfWork(block.GetHash(), block.nBits, consensusParams) : true;
bool powResult2 = fCheckPOW ? CheckProofOfWork(block.GetPoWHash(), block.nBits, consensusParams) : true;
// Если проверка Proof of Work не удалась для обоих алгоритмов, возвращаем ошибку
if (!powResult1 || !powResult2) {
return state.Invalid(BlockValidationResult::BLOCK_INVALID_HEADER, "high-hash", "proof of work failed for one or both algorithms");
}
return true;
}
You are requiring the miner to solve both for each block which is different from what my article describes. My article could work if each PoW had its own timestamp and difficulty in the header but that is probably not what you want. If you use only one time stamp and one difficulty in each header then there are 2 options I can think of. 1) let output of one pow be the input to the next pow. Validation confirms nonce -> pow1 -> pow2 -> hash ( < target). 2) assume the relative efficiency of each pow will always be the same so that you just have a conversion factor for the difficulty for one of them based on the difficulty of the other one.
in cases of using as 2 algo SHA256 for example , it make possible fo light app use only sha256 verify.
i was think about separate nbits for each algo.
class CBlockHeader
{
public:
// header
int32_t nVersion;
uint256 hashPrevBlock;
uint256 hashMerkleRoot;
uint32_t nTime;
uint32_t nBits;
uint32_t nBits2;
uint32_t nNonce;
CBlockHeader()
{
SetNull();
}
SERIALIZE_METHODS(CBlockHeader, obj) { READWRITE(obj.nVersion, obj.hashPrevBlock, obj.hashMerkleRoot, obj.nTime, obj.nBits, obj.nBits2, obj.nNonce); }
void SetNull()
{
nVersion = 0;
hashPrevBlock.SetNull();
hashMerkleRoot.SetNull();
nTime = 0;
nBits = 0;
nBits2 = 0;
nNonce = 0;
}
but there is huge problem as we add Nbits2 , it also affects the final hash in case of 2 diff algorithms for example if i want use different power limits for algos in case 2 algos for 1 one block. but if use one diff algo for Yespower and SHA256 as exmaple i think there will be possible cheat with using only sha256.
simple example how i think make it
blockstorage.cpp
bool BlockManager::ReadBlockFromDisk(CBlock& block, const FlatFilePos& pos) const
{
block.SetNull();
// Open history file to read
CAutoFile filein{OpenBlockFile(pos, true)};
if (filein.IsNull()) {
return error("ReadBlockFromDisk: OpenBlockFile failed for %s", pos.ToString());
}
// Read block
try {
filein >> block;
} catch (const std::exception& e) {
return error("%s: Deserialize or I/O error - %s at %s", __func__, e.what(), pos.ToString());
}
// Check the header for both variants of Proof of Work
bool powResult1 = CheckProofOfWork(block.GetHash(), block.nBits, GetConsensus());
bool powResult2 = CheckProofOfWork(block.GetPoWHash(), block.nBits, GetConsensus());
if (!(powResult1 && powResult2)) {
return error("ReadBlockFromDisk: Proof of Work is not valid for both variants for the block header at %s", pos.ToString());
}
// Signet only: check block solution
if (GetConsensus().signet_blocks && !CheckSignetBlockSolution(block, GetConsensus())) {
return error("ReadBlockFromDisk: Errors in block solution at %s", pos.ToString());
}
return true;
}
validation.cpp
static bool CheckBlockHeader(const CBlockHeader& block, BlockValidationState& state, const Consensus::Params& consensusParams, bool fCheckPOW = true)
{
// Check POW's
bool powResult1 = fCheckPOW ? CheckProofOfWork(block.GetHash(), block.nBits, consensusParams) : true;
bool powResult2 = fCheckPOW ? CheckProofOfWork(block.GetPoWHash(), block.nBits, consensusParams) : true;
// Сhecking if both POW's are valid
if (!powResult1 || !powResult2) {
return state.Invalid(BlockValidationResult::BLOCK_INVALID_HEADER, "high-hash", "proof of work failed");
}
return true;
}
bool HasValidProofOfWork(const std::vector<CBlockHeader>& headers, const Consensus::Params& consensusParams)
{
return std::all_of(headers.cbegin(), headers.cend(),
[&](const auto& header) {
bool check1 = CheckProofOfWork(header.GetHash(), header.nBits, consensusParams);
bool check2 = CheckProofOfWork(header.GetPoWHash(), header.nBits, consensusParams);
return check1 && check2;
});
}
rpc/mining.cpp
static bool GenerateBlock(ChainstateManager& chainman, CBlock& block, uint64_t& max_tries, std::shared_ptr<const CBlock>& block_out, bool process_new_block)
{
block_out.reset();
block.hashMerkleRoot = BlockMerkleRoot(block);
while (max_tries > 0 && block.nNonce < std::numeric_limits<uint32_t>::max() && !(CheckProofOfWork(block.GetHash(), block.nBits, chainman.GetConsensus()) && CheckProofOfWork(block.GetPoWHash(), block.nBits, chainman.GetConsensus())) && !ShutdownRequested()) {
++block.nNonce;
--max_tries;
}
if (max_tries == 0 || ShutdownRequested()) {
return false;
}
if (block.nNonce == std::numeric_limits<uint32_t>::max()) {
return true;
}
block_out = std::make_shared<const CBlock>(block);
if (!process_new_block) return true;
if (!chainman.ProcessNewBlock(block_out, /*force_processing=*/true, /*min_pow_checked=*/true, nullptr)) {
throw JSONRPCError(RPC_INTERNAL_ERROR, "ProcessNewBlock, block not accepted");
}
return true;
}
here we can say the goal is to create an alternative easy way of verification for light wallets using a light algorithm - but there are no ideas on how to do it correctly because sha256, for example, in this case makes it possible to spam with "sha256" blocks.
The idea is to prevent SPV wallets like Yespower from using a heavy algorithm, which slows them down a lot. So the idea was to use two or more algorithms, one of which only exists for quick check blocks + perhaps add extra security and prevent centralization. As an example, what I gave above works, but I think that such a solution has a potential vulnerability in that we are tying the difficulty of yespower to the difficulty of sha256
so solution for prevent spam and other - simple use as example sha512 and some extra rounds. What make it no compatible with devices at start what can do "spam"
/** A hasher class for your custom hash (SHA-512 > SHA-512 > SHA-256 > SHA-256). */
class CCustomHash {
private:
CSHA512 sha512;
CSHA256 sha256;
public:
static const size_t OUTPUT_SIZE = CSHA256::OUTPUT_SIZE;
void Finalize(Span<unsigned char> output) {
assert(output.size() == OUTPUT_SIZE);
unsigned char buf[CSHA512::OUTPUT_SIZE];
sha512.Finalize(buf);
sha512.Reset().Write(buf, CSHA512::OUTPUT_SIZE).Finalize(buf);
sha256.Write(buf, CSHA512::OUTPUT_SIZE).Finalize(buf);
sha256.Reset().Write(buf, CSHA256::OUTPUT_SIZE).Finalize(output.data());
}
CCustomHash& Write(Span<const unsigned char> input) {
sha512.Write(input.data(), input.size());
return *this;
}
CCustomHash& Reset() {
sha512.Reset();
sha256.Reset();
return *this;
}
};
/** Compute the 256-bit hash of an object. */
template<typename T>
inline uint256 CustomHash(const T& in1)
{
uint256 result;
CCustomHash().Write(MakeUCharSpan(in1)).Finalize(result);
return result;
}
Example at base pow we are only one algo or set of algos.
Idea as example we are use
Yespower and SHA256
POW if Yespower and SHA256 are valid at the same time = block found
there is point as we can use sha256 like bridge to simple implementation at exist software.
but now I have thoughts about whether there will be problems with the sha256 algorithm.