mirror of
https://github.com/meshtastic/firmware.git
synced 2025-09-04 02:34:14 +00:00
removed overly complex countingcoveragefilter. moved to ephemeral in memory node list
This commit is contained in:
parent
e91651b7ff
commit
5c27934332
@ -1,270 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "CountingCoverageFilter.h"
|
||||
|
||||
CountingCoverageFilter::CountingCoverageFilter()
|
||||
{
|
||||
clear();
|
||||
instantiationTime_ = getTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an item (node) to this counting bloom filter.
|
||||
* Increments the counters for each hash position (up to the max for BITS_PER_COUNTER).
|
||||
*/
|
||||
void CountingCoverageFilter::add(NodeNum item)
|
||||
{
|
||||
// We'll do BLOOM_HASH_FUNCTIONS hash functions. Typically BLOOM_HASH_FUNCTIONS=2 for simplicity.
|
||||
size_t indices[BLOOM_HASH_FUNCTIONS];
|
||||
computeHashIndices(item, indices);
|
||||
|
||||
for (size_t i = 0; i < BLOOM_HASH_FUNCTIONS; i++) {
|
||||
incrementCounter(indices[i]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an item (node), decrementing counters at each hash position (if >0).
|
||||
*/
|
||||
void CountingCoverageFilter::remove(NodeNum item)
|
||||
{
|
||||
size_t indices[BLOOM_HASH_FUNCTIONS];
|
||||
computeHashIndices(item, indices);
|
||||
|
||||
for (size_t i = 0; i < BLOOM_HASH_FUNCTIONS; i++) {
|
||||
decrementCounter(indices[i]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an item "might" be in the set:
|
||||
* - If ALL counters at those BLOOM_HASH_FUNCTIONS positions are > 0,
|
||||
* item is "possible" (false positive possible).
|
||||
* - If ANY position is zero, item is definitely not in the set.
|
||||
*/
|
||||
bool CountingCoverageFilter::check(NodeNum item) const
|
||||
{
|
||||
size_t indices[BLOOM_HASH_FUNCTIONS];
|
||||
computeHashIndices(item, indices);
|
||||
|
||||
for (size_t i = 0; i < BLOOM_HASH_FUNCTIONS; i++) {
|
||||
if (getCounterValue(indices[i]) == 0) {
|
||||
return false; // definitely not in
|
||||
}
|
||||
}
|
||||
return true; // might be in
|
||||
}
|
||||
|
||||
/**
|
||||
* Approximate count of how many items are in the filter.
|
||||
* The naive approach is sum(counters)/BLOOM_HASH_FUNCTIONS. Collisions can inflate this, though.
|
||||
*/
|
||||
float CountingCoverageFilter::approximateCount() const
|
||||
{
|
||||
uint64_t sum = 0;
|
||||
for (size_t i = 0; i < NUM_UNKNOWN_NODE_COUNTERS; i++) {
|
||||
sum += getCounterValue(i);
|
||||
}
|
||||
// We do K increments per item, so a naive estimate is sum/BLOOM_HASH_FUNCTIONS
|
||||
return static_cast<float>(sum) / static_cast<float>(BLOOM_HASH_FUNCTIONS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge (union) this filter with another filter of the same params.
|
||||
* We'll take the max of each counter.
|
||||
* (Alternatively you could add, but max is safer for a union.)
|
||||
*/
|
||||
void CountingCoverageFilter::merge(const CountingCoverageFilter &other)
|
||||
{
|
||||
for (size_t i = 0; i < NUM_UNKNOWN_NODE_COUNTERS; i++) {
|
||||
uint8_t mine = getCounterValue(i);
|
||||
uint8_t theirs = other.getCounterValue(i);
|
||||
uint8_t mergedVal = (mine > theirs) ? mine : theirs;
|
||||
setCounterValue(i, mergedVal);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear out all counters to zero.
|
||||
*/
|
||||
void CountingCoverageFilter::clear()
|
||||
{
|
||||
storage_.fill(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare a standard Bloom (bit-based, e.g., 16 bytes => 128 bits) to see how many bits
|
||||
* are newly set that we do not have a nonzero counter for.
|
||||
* This is purely an approximate approach for "new coverage" bits.
|
||||
*/
|
||||
int CountingCoverageFilter::approximateNewCoverageCount(const CoverageFilter &incoming) const
|
||||
{
|
||||
if (isStale())
|
||||
return 0.0f;
|
||||
|
||||
// 1) Retrieve the bits from the incoming coverage filter
|
||||
const auto &bits = incoming.getBits(); // this is a std::array<uint8_t, BLOOM_FILTER_SIZE_BYTES>
|
||||
size_t coverageByteCount = bits.size(); // typically 16 bytes => 128 bits
|
||||
|
||||
size_t maxBitsToCheck = coverageByteCount * 8;
|
||||
if (maxBitsToCheck > NUM_UNKNOWN_NODE_COUNTERS) {
|
||||
maxBitsToCheck = NUM_UNKNOWN_NODE_COUNTERS;
|
||||
}
|
||||
|
||||
int newCoverageBits = 0;
|
||||
for (size_t bitIndex = 0; bitIndex < maxBitsToCheck; bitIndex++) {
|
||||
size_t byteIndex = bitIndex / 8;
|
||||
uint8_t bitMask = 1 << (bitIndex % 8);
|
||||
|
||||
// Was this bit set in the incoming coverage filter?
|
||||
bool coverageBitSet = (bits[byteIndex] & bitMask) != 0;
|
||||
if (!coverageBitSet) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If our local counter at bitIndex is 0 => "new coverage" bit
|
||||
if (getCounterValue(bitIndex) == 0) {
|
||||
newCoverageBits++;
|
||||
}
|
||||
}
|
||||
return newCoverageBits;
|
||||
}
|
||||
|
||||
float CountingCoverageFilter::approximateCoverageRatio(const CoverageFilter &incoming) const
|
||||
{
|
||||
if (isStale())
|
||||
return 0.0f;
|
||||
|
||||
// 1) How many "new coverage" bits do we see?
|
||||
int newBits = approximateNewCoverageCount(incoming);
|
||||
|
||||
// 2) How many items do we hold, approx?
|
||||
float myApproxCount = approximateCount();
|
||||
if (myApproxCount < 0.00001f) {
|
||||
// Avoid division by zero; or you can return 0 or 1 as suits your logic.
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
// newBits is a bit count, approximateCount() is an item count.
|
||||
// This is a rough ratio. Decide if you want them in the same domain.
|
||||
// We'll treat "newBits" ~ "new items," so ratio = newBits / myApproxCount
|
||||
return static_cast<float>(newBits) / myApproxCount;
|
||||
}
|
||||
|
||||
uint8_t CountingCoverageFilter::getCounterValue(size_t idx) const
|
||||
{
|
||||
assert(idx < NUM_UNKNOWN_NODE_COUNTERS);
|
||||
if (BITS_PER_UNKNOWN_NODE_COUNTER == 8) {
|
||||
// Easiest case: 1 byte per counter
|
||||
return storage_[idx];
|
||||
} else if (BITS_PER_UNKNOWN_NODE_COUNTER == 4) {
|
||||
// 2 counters per byte
|
||||
size_t byteIndex = idx / 2; // each byte holds 2 counters
|
||||
bool second = (idx % 2) == 1; // 0 => lower nibble, 1 => upper nibble
|
||||
uint8_t rawByte = storage_[byteIndex];
|
||||
if (!second) {
|
||||
// lower 4 bits
|
||||
return (rawByte & 0x0F);
|
||||
} else {
|
||||
// upper 4 bits
|
||||
return (rawByte >> 4) & 0x0F;
|
||||
}
|
||||
} else {
|
||||
// If you want to handle other bit widths (2, 3, 16, etc.), you'd do more logic here.
|
||||
static_assert(BITS_PER_UNKNOWN_NODE_COUNTER == 4 || BITS_PER_UNKNOWN_NODE_COUNTER == 8,
|
||||
"Only 4-bit or 8-bit counters allowed.");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the counter at position idx to val (clamped to max representable).
|
||||
*/
|
||||
void CountingCoverageFilter::setCounterValue(size_t idx, uint8_t val)
|
||||
{
|
||||
assert(idx < NUM_UNKNOWN_NODE_COUNTERS);
|
||||
// clamp val
|
||||
uint8_t maxVal = (1 << BITS_PER_UNKNOWN_NODE_COUNTER) - 1; // e.g. 15 for 4 bits, 255 for 8 bits
|
||||
if (val > maxVal)
|
||||
val = maxVal;
|
||||
|
||||
if (BITS_PER_UNKNOWN_NODE_COUNTER == 8) {
|
||||
storage_[idx] = val;
|
||||
} else if (BITS_PER_UNKNOWN_NODE_COUNTER == 4) {
|
||||
size_t byteIndex = idx / 2;
|
||||
bool second = (idx % 2) == 1;
|
||||
uint8_t rawByte = storage_[byteIndex];
|
||||
|
||||
if (!second) {
|
||||
// Lower nibble
|
||||
// clear lower nibble, then set
|
||||
rawByte = (rawByte & 0xF0) | (val & 0x0F);
|
||||
} else {
|
||||
// Upper nibble
|
||||
// clear upper nibble, then set
|
||||
rawByte = (rawByte & 0x0F) | ((val & 0x0F) << 4);
|
||||
}
|
||||
storage_[byteIndex] = rawByte;
|
||||
}
|
||||
}
|
||||
|
||||
bool CountingCoverageFilter::isStale() const
|
||||
{
|
||||
// How long has it been since this filter was created?
|
||||
uint32_t now = getTime();
|
||||
uint32_t age = now - instantiationTime_;
|
||||
return age > STALE_COVERAGE_SECONDS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment the counter at idx by 1 (clamped to max).
|
||||
*/
|
||||
void CountingCoverageFilter::incrementCounter(size_t idx)
|
||||
{
|
||||
// read current
|
||||
uint8_t currVal = getCounterValue(idx);
|
||||
// increment
|
||||
uint8_t nextVal = currVal + 1; // might overflow if at max
|
||||
setCounterValue(idx, nextVal);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrement the counter at idx by 1 (if >0).
|
||||
*/
|
||||
void CountingCoverageFilter::decrementCounter(size_t idx)
|
||||
{
|
||||
// read current
|
||||
uint8_t currVal = getCounterValue(idx);
|
||||
if (currVal > 0) {
|
||||
setCounterValue(idx, currVal - 1);
|
||||
}
|
||||
// else do nothing (can't go negative)
|
||||
}
|
||||
|
||||
void CountingCoverageFilter::computeHashIndices(NodeNum value, size_t outIndices[BLOOM_HASH_FUNCTIONS]) const
|
||||
{
|
||||
// We can use two or more seeds for separate hashes. Here we do two seeds as an example.
|
||||
// If BLOOM_HASH_FUNCTIONS > 2, you'd do more seeds or vary the combined approach.
|
||||
static const uint64_t seed1 = 0xDEADBEEF;
|
||||
static const uint64_t seed2 = 0xBADC0FFE;
|
||||
|
||||
outIndices[0] = hashGeneric(value, seed1);
|
||||
if (BLOOM_HASH_FUNCTIONS >= 2) {
|
||||
outIndices[1] = hashGeneric(value, seed2);
|
||||
}
|
||||
// If BLOOM_HASH_FUNCTIONS were greater than 2, we'd have to update similarly for outIndices[2],
|
||||
// outIndices[3], etc. with new seeds
|
||||
}
|
||||
|
||||
size_t CountingCoverageFilter::hashGeneric(NodeNum value, uint64_t seed) const
|
||||
{
|
||||
// Just a simplistic combine of "value" and "seed" then do std::hash<uint64_t>.
|
||||
uint64_t combined = value ^ (seed + (value << 6) + (value >> 2));
|
||||
|
||||
std::hash<uint64_t> hasher;
|
||||
uint64_t hashOut = hasher(combined);
|
||||
|
||||
// Then map to [0..(NUM_UNKNOWN_NODE_COUNTERS-1)]
|
||||
// because each "slot" is an index from 0..NUM_UNKNOWN_NODE_COUNTERS-1
|
||||
return static_cast<size_t>(hashOut % NUM_UNKNOWN_NODE_COUNTERS);
|
||||
}
|
@ -1,123 +0,0 @@
|
||||
#include "CoverageFilter.h"
|
||||
#include "MeshTypes.h"
|
||||
|
||||
#include <RTC.h>
|
||||
#include <algorithm>
|
||||
#include <array>
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <functional>
|
||||
#include <stdint.h>
|
||||
|
||||
/**
|
||||
* A generic Counting Coverage (bloom) filter, which can be parameterized by:
|
||||
* - NUM_UNKNOWN_NODE_COUNTERS (how many counter "slots")
|
||||
* - BITS_PER_UNKNOWN_NODE_COUNTER (4 bits, 8 bits, etc.)
|
||||
* - BLOOM_HASH_FUNCTIONS (number of hash functions, typically 2 or more)
|
||||
*
|
||||
*/
|
||||
|
||||
// We have NUM_UNKNOWN_NODE_COUNTERS total "slots," and each slot is BITS_PER_UNKNOWN_NODE_COUNTER wide.
|
||||
// For BITS_PER_UNKNOWN_NODE_COUNTER=4, each slot can hold 0..15.
|
||||
// We store these slots in a byte array sized for the total number of bits.
|
||||
// 1) Calculate how many total bits we need:
|
||||
#define STORAGE_BITS NUM_UNKNOWN_NODE_COUNTERS *BITS_PER_UNKNOWN_NODE_COUNTER
|
||||
|
||||
// 2) Convert that to bytes (rounding up)
|
||||
#define STORAGE_BYTES ((STORAGE_BITS + 7) / 8) // integer ceiling division
|
||||
|
||||
class CountingCoverageFilter
|
||||
{
|
||||
public:
|
||||
CountingCoverageFilter();
|
||||
|
||||
/**
|
||||
* Add an item (node) to this counting bloom filter.
|
||||
* Increments the counters for each hash position (up to the max for BITS_PER_COUNTER).
|
||||
*/
|
||||
void add(NodeNum item);
|
||||
|
||||
/**
|
||||
* Remove an item (node), decrementing counters at each hash position (if >0).
|
||||
*/
|
||||
void remove(NodeNum item);
|
||||
|
||||
/**
|
||||
* Check if an item "might" be in the set:
|
||||
* - If ALL counters at those BLOOM_HASH_FUNCTIONS positions are > 0,
|
||||
* item is "possible" (false positive possible).
|
||||
* - If ANY position is zero, item is definitely not in the set.
|
||||
*/
|
||||
bool check(NodeNum item) const;
|
||||
|
||||
/**
|
||||
* Approximate count of how many items are in the filter.
|
||||
* The naive approach is sum(counters)/BLOOM_HASH_FUNCTIONS. Collisions can inflate this, though.
|
||||
*/
|
||||
float approximateCount() const;
|
||||
|
||||
/**
|
||||
* Merge (union) this filter with another filter of the same params.
|
||||
* We'll take the max of each counter.
|
||||
* (Alternatively you could add, but max is safer for a union.)
|
||||
*/
|
||||
void merge(const CountingCoverageFilter &other);
|
||||
|
||||
/**
|
||||
* Clear out all counters to zero.
|
||||
*/
|
||||
void clear();
|
||||
|
||||
/**
|
||||
* Compare a standard Bloom (bit-based, e.g., 16 bytes => 128 bits) to see how many bits
|
||||
* are newly set that we do not have a nonzero counter for.
|
||||
* This is purely an approximate approach for "new coverage" bits.
|
||||
*/
|
||||
int approximateNewCoverageCount(const CoverageFilter &incoming) const;
|
||||
|
||||
/**
|
||||
* Compare a standard Bloom (bit-based, e.g., 16 bytes => 128 bits) to see how many bits
|
||||
* are newly set that we do not have a nonzero counter for, vs. total approx. input
|
||||
* This is purely an approximate approach for "new coverage" ratio.
|
||||
*/
|
||||
float approximateCoverageRatio(const CoverageFilter &incoming) const;
|
||||
|
||||
private:
|
||||
uint32_t instantiationTime_;
|
||||
|
||||
/**
|
||||
* The storage array, sized for all counters combined.
|
||||
* e.g. for NUM_UNKNOWN_NODE_COUNTERS=64, BITS_PER_UNKNOWN_NODE_COUNTER=4 => 64*4=256 bits => 256/8=32 bytes.
|
||||
*/
|
||||
std::array<uint8_t, STORAGE_BYTES> storage_;
|
||||
|
||||
/**
|
||||
* Retrieve the integer value of the counter at position idx
|
||||
* (0 <= idx < NUM_UNKNOWN_NODE_COUNTERS).
|
||||
*/
|
||||
uint8_t getCounterValue(size_t idx) const;
|
||||
|
||||
/**
|
||||
* Set the counter at position idx to val (clamped to max representable).
|
||||
*/
|
||||
void setCounterValue(size_t idx, uint8_t val);
|
||||
|
||||
/**
|
||||
* Returns true if this instance is stale (based on instantiation time).
|
||||
*/
|
||||
bool isStale() const;
|
||||
|
||||
/**
|
||||
* Increment the counter at idx by 1 (clamped to max).
|
||||
*/
|
||||
void incrementCounter(size_t idx);
|
||||
|
||||
/**
|
||||
* Decrement the counter at idx by 1 (if >0).
|
||||
*/
|
||||
void decrementCounter(size_t idx);
|
||||
|
||||
void computeHashIndices(NodeNum value, size_t outIndices[BLOOM_HASH_FUNCTIONS]) const;
|
||||
|
||||
size_t hashGeneric(NodeNum value, uint64_t seed) const;
|
||||
};
|
@ -152,7 +152,7 @@ void FloodingRouter::storeCoverageFilterInPacket(const CoverageFilter &filter, m
|
||||
void FloodingRouter::mergeMyCoverage(CoverageFilter &coverage)
|
||||
{
|
||||
// Retrieve recent direct neighbors within the time window
|
||||
std::vector<NodeNum> recentNeighbors = nodeDB->getDistinctRecentDirectNeighborIds(RECENCY_THRESHOLD_MINUTES * 60);
|
||||
std::vector<NodeNum> recentNeighbors = nodeDB->getCoveredNodes(RECENCY_THRESHOLD_MINUTES * 60);
|
||||
for (auto &nodeId : recentNeighbors) {
|
||||
coverage.add(nodeId);
|
||||
}
|
||||
@ -177,7 +177,7 @@ float FloodingRouter::calculateForwardProbability(const CoverageFilter &incoming
|
||||
}
|
||||
|
||||
// Retrieve recent direct neighbors within the time window
|
||||
std::vector<NodeNum> recentNeighbors = nodeDB->getDistinctRecentDirectNeighborIds(RECENCY_THRESHOLD_MINUTES * 60);
|
||||
std::vector<NodeNum> recentNeighbors = nodeDB->getCoveredNodes(RECENCY_THRESHOLD_MINUTES * 60);
|
||||
|
||||
if (recentNeighbors.empty()) {
|
||||
// No neighbors to add coverage for
|
||||
@ -207,20 +207,14 @@ float FloodingRouter::calculateForwardProbability(const CoverageFilter &incoming
|
||||
coverageRatio = static_cast<float>(uncovered) / static_cast<float>(neighbors);
|
||||
}
|
||||
|
||||
// Compare our unknown node coverage filter to our updated coverage filter
|
||||
// We use the updated coverage filter because we don't want to double count nodes
|
||||
// that have already made it into the main in memory nodedb storage mechanism
|
||||
float unknownNodeCoverageRatio = nodeDB->getUnknownCoverage().approximateCoverageRatio(updated);
|
||||
|
||||
// unknownNodeCoverageRatio is inherently iffy so don't scale up its contribution to the probability of rebroadcast
|
||||
// This essentially makes the forward probability non-zero for nodes that have a set of "unknown" neighbors
|
||||
float forwardProb = BASE_FORWARD_PROB + (coverageRatio * COVERAGE_SCALE_FACTOR) + unknownNodeCoverageRatio;
|
||||
float forwardProb = BASE_FORWARD_PROB + (coverageRatio * COVERAGE_SCALE_FACTOR);
|
||||
|
||||
// Clamp probability between 0 and 1
|
||||
forwardProb = std::min(std::max(forwardProb, 0.0f), 1.0f);
|
||||
|
||||
LOG_DEBUG("CoverageRatio=%.2f, UnknownNodeCoverageRatio=%.2f, ForwardProb=%.2f (Uncovered=%d, Total=%zu)", coverageRatio,
|
||||
unknownNodeCoverageRatio, forwardProb, uncovered, neighbors);
|
||||
LOG_DEBUG("CoverageRatio=%.2f, ForwardProb=%.2f (Uncovered=%d, Total=%zu)", coverageRatio, forwardProb, uncovered, neighbors);
|
||||
|
||||
return forwardProb;
|
||||
}
|
@ -819,6 +819,31 @@ void NodeDB::clearLocalPosition()
|
||||
setLocalPosition(meshtastic_Position_init_default);
|
||||
}
|
||||
|
||||
bool NodeDB::isValidCandidateForCoverage(const meshtastic_NodeInfoLite &node)
|
||||
{
|
||||
// 1) Exclude self
|
||||
if (node.num == getNodeNum()) {
|
||||
return false;
|
||||
}
|
||||
// 2) Exclude ignored
|
||||
if (node.is_ignored) {
|
||||
return false;
|
||||
}
|
||||
// 3) Exclude nodes that aren't direct neighbors
|
||||
if (!node.has_hops_away || node.hops_away != 0) {
|
||||
return false;
|
||||
}
|
||||
// 4) Exclude MQTT-based nodes if desired
|
||||
if (node.via_mqtt) {
|
||||
return false;
|
||||
}
|
||||
// 5) Must have last_heard
|
||||
if (node.last_heard == 0) {
|
||||
return false;
|
||||
}
|
||||
return true; // If we pass all checks, it's valid
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Retrieves a list of distinct recent direct neighbor NodeNums.
|
||||
*
|
||||
@ -832,66 +857,71 @@ void NodeDB::clearLocalPosition()
|
||||
* @param timeWindowSecs The time window in seconds to consider a node as "recently heard."
|
||||
* @return std::vector<NodeNum> A vector containing the NodeNums of recent direct neighbors.
|
||||
*/
|
||||
std::vector<NodeNum> NodeDB::getDistinctRecentDirectNeighborIds(uint32_t timeWindowSecs)
|
||||
std::vector<NodeNum> NodeDB::getCoveredNodes(uint32_t timeWindowSecs)
|
||||
{
|
||||
uint32_t now = getTime();
|
||||
NodeNum localNode = getNodeNum();
|
||||
|
||||
// Temporary vector to hold neighbors with their SNR for sorting
|
||||
std::vector<std::pair<NodeNum, float>> neighborsWithSnr;
|
||||
neighborsWithSnr.reserve(MAX_NEIGHBORS_PER_HOP); // Reserve space to avoid multiple reallocations
|
||||
// We'll collect (nodeNum, last_heard, snr) for both main DB + ephemeral
|
||||
struct NodeCandidate {
|
||||
NodeNum num;
|
||||
uint32_t lastHeard;
|
||||
float snr;
|
||||
};
|
||||
|
||||
std::vector<NodeCandidate> allCandidates;
|
||||
allCandidates.reserve(numMeshNodes + ephemeralNodes.size());
|
||||
|
||||
// 1) Collect from main node vector
|
||||
for (size_t i = 0; i < numMeshNodes; ++i) {
|
||||
const meshtastic_NodeInfoLite &node = meshNodes->at(i);
|
||||
const auto &node = meshNodes->at(i);
|
||||
|
||||
// Skip our own node entry
|
||||
if (node.num == localNode) {
|
||||
if (!isValidCandidateForCoverage(node)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip ignored nodes
|
||||
if (node.is_ignored) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this node is a direct neighbor (hops_away == 0)
|
||||
if (!node.has_hops_away || node.hops_away != 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip nodes heard via MQTT
|
||||
if (node.via_mqtt) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the node was heard recently within the time window
|
||||
if (node.last_heard > 0 && (now - node.last_heard <= timeWindowSecs)) {
|
||||
neighborsWithSnr.emplace_back(node.num, node.snr);
|
||||
uint32_t age = now - node.last_heard;
|
||||
if (age <= timeWindowSecs) {
|
||||
allCandidates.push_back(NodeCandidate{node.num, node.last_heard, node.snr});
|
||||
}
|
||||
}
|
||||
|
||||
LOG_DEBUG("Found %zu candidates before limiting.", neighborsWithSnr.size());
|
||||
// 2) Collect from ephemeral node vector
|
||||
for (const auto &node : ephemeralNodes) {
|
||||
if (!isValidCandidateForCoverage(node)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the number of candidates exceeds MAX_NEIGHBORS_PER_HOP, select the top N based on SNR
|
||||
if (neighborsWithSnr.size() > MAX_NEIGHBORS_PER_HOP) {
|
||||
// Use nth_element to partially sort the vector, bringing the top N SNRs to the front
|
||||
std::nth_element(neighborsWithSnr.begin(), neighborsWithSnr.begin() + MAX_NEIGHBORS_PER_HOP, neighborsWithSnr.end(),
|
||||
[](const std::pair<NodeNum, float> &a, const std::pair<NodeNum, float> &b) {
|
||||
return a.second > b.second; // Sort in descending order of SNR
|
||||
});
|
||||
|
||||
// Resize to keep only the top N neighbors
|
||||
neighborsWithSnr.resize(MAX_NEIGHBORS_PER_HOP);
|
||||
uint32_t age = now - node.last_heard;
|
||||
if (age <= timeWindowSecs) {
|
||||
allCandidates.push_back(NodeCandidate{node.num, node.last_heard, node.snr});
|
||||
}
|
||||
}
|
||||
|
||||
// Extract NodeNums from the sorted and limited list
|
||||
// We want the most recent, and highest SNR neighbors to determine
|
||||
// the most likely coverage this node will offer on its hop
|
||||
// In this case recency is more important than SNR because we need a fresh picture of coverage
|
||||
std::sort(allCandidates.begin(), allCandidates.end(), [](const NodeCandidate &a, const NodeCandidate &b) {
|
||||
// 1) Descending by lastHeard
|
||||
if (a.lastHeard != b.lastHeard) {
|
||||
return a.lastHeard > b.lastHeard;
|
||||
}
|
||||
// 2) If tie, descending by snr
|
||||
return a.snr > b.snr;
|
||||
});
|
||||
|
||||
// 4) Reduce to MAX_NEIGHBORS_PER_HOP
|
||||
if (allCandidates.size() > MAX_NEIGHBORS_PER_HOP) {
|
||||
allCandidates.resize(MAX_NEIGHBORS_PER_HOP);
|
||||
}
|
||||
|
||||
// 5) Extract just the node ids for return
|
||||
std::vector<NodeNum> recentNeighbors;
|
||||
recentNeighbors.reserve(neighborsWithSnr.size());
|
||||
for (const auto &pair : neighborsWithSnr) {
|
||||
recentNeighbors.push_back(pair.first);
|
||||
recentNeighbors.reserve(allCandidates.size());
|
||||
for (auto &cand : allCandidates) {
|
||||
recentNeighbors.push_back(cand.num);
|
||||
}
|
||||
|
||||
LOG_DEBUG("Returning %zu recent direct neighbors within %u seconds.", recentNeighbors.size(), timeWindowSecs);
|
||||
return recentNeighbors;
|
||||
}
|
||||
|
||||
@ -914,6 +944,9 @@ uint32_t NodeDB::secondsSinceLastNodeHeard()
|
||||
void NodeDB::cleanupMeshDB()
|
||||
{
|
||||
int newPos = 0, removed = 0;
|
||||
std::vector<meshtastic_NodeInfoLite> newlyEphemeral;
|
||||
newlyEphemeral.reserve(numMeshNodes);
|
||||
|
||||
for (int i = 0; i < numMeshNodes; i++) {
|
||||
if (meshNodes->at(i).has_user) {
|
||||
if (meshNodes->at(i).last_heard > maxLastHeard_) {
|
||||
@ -926,20 +959,18 @@ void NodeDB::cleanupMeshDB()
|
||||
}
|
||||
meshNodes->at(newPos++) = meshNodes->at(i);
|
||||
} else {
|
||||
// Check if this unknown node is a direct neighbor (hops_away == 0)
|
||||
if (meshNodes->at(i).has_hops_away && meshNodes->at(i).hops_away == 0) {
|
||||
// ADD for unknown coverage:
|
||||
// If this node doesn't have user data, we consider it "unknown"
|
||||
// and add it to the unknownCoverage_ filter:
|
||||
unknownCoverage_.add(meshNodes->at(i).num);
|
||||
}
|
||||
|
||||
removed++;
|
||||
|
||||
// If this node doesn't have user data, we consider it "unknown" and ephemeral
|
||||
newlyEphemeral.push_back(meshNodes->at(i));
|
||||
}
|
||||
}
|
||||
numMeshNodes -= removed;
|
||||
std::fill(devicestate.node_db_lite.begin() + numMeshNodes, devicestate.node_db_lite.begin() + numMeshNodes + removed,
|
||||
meshtastic_NodeInfoLite());
|
||||
|
||||
ephemeralNodes = std::move(newlyEphemeral);
|
||||
|
||||
LOG_DEBUG("cleanupMeshDB purged %d entries", removed);
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,6 @@
|
||||
#include <assert.h>
|
||||
#include <vector>
|
||||
|
||||
#include "CountingCoverageFilter.h"
|
||||
#include "MeshTypes.h"
|
||||
#include "NodeStatus.h"
|
||||
#include "configuration.h"
|
||||
@ -65,6 +64,7 @@ class NodeDB
|
||||
|
||||
public:
|
||||
std::vector<meshtastic_NodeInfoLite> *meshNodes;
|
||||
std::vector<meshtastic_NodeInfoLite> ephemeralNodes;
|
||||
bool updateGUI = false; // we think the gui should definitely be redrawn, screen will clear this once handled
|
||||
meshtastic_NodeInfoLite *updateGUIforNode = NULL; // if currently showing this node, we think you should update the GUI
|
||||
Observable<const meshtastic::NodeStatus *> newStatus;
|
||||
@ -175,16 +175,13 @@ class NodeDB
|
||||
* @param timeWindowSecs The time window in seconds to consider a node as "recently heard."
|
||||
* @return std::vector<NodeNum> A vector containing the NodeNums of recent direct neighbors.
|
||||
*/
|
||||
std::vector<NodeNum> getDistinctRecentDirectNeighborIds(uint32_t timeWindowSecs);
|
||||
std::vector<NodeNum> getCoveredNodes(uint32_t timeWindowSecs);
|
||||
|
||||
uint32_t secondsSinceLastNodeHeard();
|
||||
|
||||
const CountingCoverageFilter &getUnknownCoverage() const { return unknownCoverage_; }
|
||||
|
||||
private:
|
||||
uint32_t lastNodeDbSave = 0; // when we last saved our db to flash
|
||||
uint32_t maxLastHeard_ = 0; // the most recent last_heard value we've seen
|
||||
CountingCoverageFilter unknownCoverage_;
|
||||
|
||||
/// Find a node in our DB, create an empty NodeInfoLite if missing
|
||||
meshtastic_NodeInfoLite *getOrCreateMeshNode(NodeNum n);
|
||||
@ -213,6 +210,8 @@ class NodeDB
|
||||
|
||||
bool saveChannelsToDisk();
|
||||
bool saveDeviceStateToDisk();
|
||||
|
||||
bool isValidCandidateForCoverage(const meshtastic_NodeInfoLite &node)
|
||||
};
|
||||
|
||||
extern NodeDB *nodeDB;
|
||||
|
Loading…
Reference in New Issue
Block a user