mirror of
https://github.com/Nheko-Reborn/nheko.git
synced 2024-11-25 20:48:52 +03:00
Continue fetching past messages when searching
This commit is contained in:
parent
56a4e97296
commit
76347f1c6f
6 changed files with 94 additions and 54 deletions
|
@ -1013,20 +1013,22 @@ Cache::getOlmSessions(const std::string &curve25519)
|
||||||
{
|
{
|
||||||
using namespace mtx::crypto;
|
using namespace mtx::crypto;
|
||||||
|
|
||||||
auto txn = lmdb::txn::begin(env_);
|
try {
|
||||||
auto db = getOlmSessionsDb(txn, curve25519);
|
auto txn = ro_txn(env_);
|
||||||
|
auto db = getOlmSessionsDb(txn, curve25519);
|
||||||
|
|
||||||
std::string_view session_id, unused;
|
std::string_view session_id, unused;
|
||||||
std::vector<std::string> res;
|
std::vector<std::string> res;
|
||||||
|
|
||||||
auto cursor = lmdb::cursor::open(txn, db);
|
auto cursor = lmdb::cursor::open(txn, db);
|
||||||
while (cursor.get(session_id, unused, MDB_NEXT))
|
while (cursor.get(session_id, unused, MDB_NEXT))
|
||||||
res.emplace_back(session_id);
|
res.emplace_back(session_id);
|
||||||
cursor.close();
|
cursor.close();
|
||||||
|
|
||||||
txn.commit();
|
return res;
|
||||||
|
} catch (...) {
|
||||||
return res;
|
return {};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -2173,18 +2175,22 @@ Cache::roomIds()
|
||||||
std::string
|
std::string
|
||||||
Cache::previousBatchToken(const std::string &room_id)
|
Cache::previousBatchToken(const std::string &room_id)
|
||||||
{
|
{
|
||||||
auto txn = lmdb::txn::begin(env_, nullptr);
|
auto txn = ro_txn(env_);
|
||||||
auto orderDb = getEventOrderDb(txn, room_id);
|
try {
|
||||||
|
auto orderDb = getEventOrderDb(txn, room_id);
|
||||||
|
|
||||||
auto cursor = lmdb::cursor::open(txn, orderDb);
|
auto cursor = lmdb::cursor::open(txn, orderDb);
|
||||||
std::string_view indexVal, val;
|
std::string_view indexVal, val;
|
||||||
if (!cursor.get(indexVal, val, MDB_FIRST)) {
|
if (!cursor.get(indexVal, val, MDB_FIRST)) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
auto j = nlohmann::json::parse(val);
|
||||||
|
|
||||||
|
return j.value("prev_batch", "");
|
||||||
|
} catch (...) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
auto j = nlohmann::json::parse(val);
|
|
||||||
|
|
||||||
return j.value("prev_batch", "");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Cache::Messages
|
Cache::Messages
|
||||||
|
@ -3206,10 +3212,10 @@ Cache::pendingEvents(const std::string &room_id)
|
||||||
std::optional<mtx::events::collections::TimelineEvent>
|
std::optional<mtx::events::collections::TimelineEvent>
|
||||||
Cache::firstPendingMessage(const std::string &room_id)
|
Cache::firstPendingMessage(const std::string &room_id)
|
||||||
{
|
{
|
||||||
auto txn = lmdb::txn::begin(env_);
|
auto txn = ro_txn(env_);
|
||||||
auto pending = getPendingMessagesDb(txn, room_id);
|
auto pending = getPendingMessagesDb(txn, room_id);
|
||||||
|
|
||||||
{
|
try {
|
||||||
auto pendingCursor = lmdb::cursor::open(txn, pending);
|
auto pendingCursor = lmdb::cursor::open(txn, pending);
|
||||||
std::string_view tsIgnored, pendingTxn;
|
std::string_view tsIgnored, pendingTxn;
|
||||||
while (pendingCursor.get(tsIgnored, pendingTxn, MDB_NEXT)) {
|
while (pendingCursor.get(tsIgnored, pendingTxn, MDB_NEXT)) {
|
||||||
|
@ -3225,7 +3231,6 @@ Cache::firstPendingMessage(const std::string &room_id)
|
||||||
from_json(nlohmann::json::parse(event), te);
|
from_json(nlohmann::json::parse(event), te);
|
||||||
|
|
||||||
pendingCursor.close();
|
pendingCursor.close();
|
||||||
txn.commit();
|
|
||||||
return te;
|
return te;
|
||||||
} catch (std::exception &e) {
|
} catch (std::exception &e) {
|
||||||
nhlog::db()->error("Failed to parse message from cache {}", e.what());
|
nhlog::db()->error("Failed to parse message from cache {}", e.what());
|
||||||
|
@ -3233,10 +3238,8 @@ Cache::firstPendingMessage(const std::string &room_id)
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (const lmdb::error &e) {
|
||||||
}
|
}
|
||||||
|
|
||||||
txn.commit();
|
|
||||||
|
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3998,33 +4001,36 @@ Cache::hasEnoughPowerLevel(const std::vector<mtx::events::EventType> &eventTypes
|
||||||
using namespace mtx::events;
|
using namespace mtx::events;
|
||||||
using namespace mtx::events::state;
|
using namespace mtx::events::state;
|
||||||
|
|
||||||
auto txn = lmdb::txn::begin(env_);
|
auto txn = ro_txn(env_);
|
||||||
auto db = getStatesDb(txn, room_id);
|
try {
|
||||||
|
auto db = getStatesDb(txn, room_id);
|
||||||
|
|
||||||
int64_t min_event_level = std::numeric_limits<int64_t>::max();
|
int64_t min_event_level = std::numeric_limits<int64_t>::max();
|
||||||
int64_t user_level = std::numeric_limits<int64_t>::min();
|
int64_t user_level = std::numeric_limits<int64_t>::min();
|
||||||
|
|
||||||
std::string_view event;
|
std::string_view event;
|
||||||
bool res = db.get(txn, to_string(EventType::RoomPowerLevels), event);
|
bool res = db.get(txn, to_string(EventType::RoomPowerLevels), event);
|
||||||
|
|
||||||
if (res) {
|
if (res) {
|
||||||
try {
|
try {
|
||||||
StateEvent<PowerLevels> msg =
|
StateEvent<PowerLevels> msg =
|
||||||
nlohmann::json::parse(std::string_view(event.data(), event.size()))
|
nlohmann::json::parse(std::string_view(event.data(), event.size()))
|
||||||
.get<StateEvent<PowerLevels>>();
|
.get<StateEvent<PowerLevels>>();
|
||||||
|
|
||||||
user_level = msg.content.user_level(user_id);
|
user_level = msg.content.user_level(user_id);
|
||||||
|
|
||||||
for (const auto &ty : eventTypes)
|
for (const auto &ty : eventTypes)
|
||||||
min_event_level = std::min(min_event_level, msg.content.state_level(to_string(ty)));
|
min_event_level =
|
||||||
} catch (const nlohmann::json::exception &e) {
|
std::min(min_event_level, msg.content.state_level(to_string(ty)));
|
||||||
nhlog::db()->warn("failed to parse m.room.power_levels event: {}", e.what());
|
} catch (const nlohmann::json::exception &e) {
|
||||||
|
nhlog::db()->warn("failed to parse m.room.power_levels event: {}", e.what());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return user_level >= min_event_level;
|
||||||
|
} catch (...) {
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
txn.commit();
|
|
||||||
|
|
||||||
return user_level >= min_event_level;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<std::string>
|
std::vector<std::string>
|
||||||
|
|
|
@ -80,8 +80,8 @@ EventStore::EventStore(std::string room_id, QObject *)
|
||||||
emit beginInsertRows(toExternalIdx(newFirst), toExternalIdx(this->first - 1));
|
emit beginInsertRows(toExternalIdx(newFirst), toExternalIdx(this->first - 1));
|
||||||
this->first = newFirst;
|
this->first = newFirst;
|
||||||
emit endInsertRows();
|
emit endInsertRows();
|
||||||
emit fetchedMore();
|
|
||||||
emit dataChanged(toExternalIdx(oldFirst), toExternalIdx(oldFirst));
|
emit dataChanged(toExternalIdx(oldFirst), toExternalIdx(oldFirst));
|
||||||
|
emit fetchedMore();
|
||||||
} else {
|
} else {
|
||||||
auto range = cache::client()->getTimelineRange(room_id_);
|
auto range = cache::client()->getTimelineRange(room_id_);
|
||||||
|
|
||||||
|
@ -725,10 +725,11 @@ EventStore::decryptEvent(const IdIndex &idx,
|
||||||
case olm::DecryptionErrorCode::ParsingFailed:
|
case olm::DecryptionErrorCode::ParsingFailed:
|
||||||
break;
|
break;
|
||||||
case olm::DecryptionErrorCode::ReplayAttack:
|
case olm::DecryptionErrorCode::ReplayAttack:
|
||||||
nhlog::crypto()->critical("Reply attack while decryptiong event {} in room {} from {}!",
|
nhlog::crypto()->critical(
|
||||||
e.event_id,
|
"Replay attack while decryptiong event {} in room {} from {}!",
|
||||||
room_id_,
|
e.event_id,
|
||||||
e.sender);
|
room_id_,
|
||||||
|
e.sender);
|
||||||
break;
|
break;
|
||||||
case olm::DecryptionErrorCode::NoError:
|
case olm::DecryptionErrorCode::NoError:
|
||||||
// unreachable
|
// unreachable
|
||||||
|
|
|
@ -19,8 +19,10 @@ TimelineFilter::setThreadId(const QString &t)
|
||||||
if (this->threadId != t) {
|
if (this->threadId != t) {
|
||||||
this->threadId = t;
|
this->threadId = t;
|
||||||
invalidateFilter();
|
invalidateFilter();
|
||||||
|
|
||||||
|
fetchMore({});
|
||||||
|
emit threadIdChanged();
|
||||||
}
|
}
|
||||||
emit threadIdChanged();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -30,21 +32,45 @@ TimelineFilter::setContentFilter(const QString &c)
|
||||||
if (this->contentFilter != c) {
|
if (this->contentFilter != c) {
|
||||||
this->contentFilter = c;
|
this->contentFilter = c;
|
||||||
invalidateFilter();
|
invalidateFilter();
|
||||||
|
|
||||||
|
fetchMore({});
|
||||||
|
emit contentFilterChanged();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
TimelineFilter::fetchAgain()
|
||||||
|
{
|
||||||
|
if (threadId.isEmpty() && contentFilter.isEmpty())
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (auto s = source()) {
|
||||||
|
if (rowCount() == cachedCount && s->canFetchMore(QModelIndex()))
|
||||||
|
s->fetchMore(QModelIndex());
|
||||||
|
else
|
||||||
|
cachedCount = rowCount();
|
||||||
}
|
}
|
||||||
emit contentFilterChanged();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
TimelineFilter::setSource(TimelineModel *s)
|
TimelineFilter::setSource(TimelineModel *s)
|
||||||
{
|
{
|
||||||
if (auto orig = this->source(); orig != s) {
|
if (auto orig = this->source(); orig != s) {
|
||||||
if (orig)
|
cachedCount = 0;
|
||||||
|
|
||||||
|
if (orig) {
|
||||||
disconnect(orig,
|
disconnect(orig,
|
||||||
&TimelineModel::currentIndexChanged,
|
&TimelineModel::currentIndexChanged,
|
||||||
this,
|
this,
|
||||||
&TimelineFilter::currentIndexChanged);
|
&TimelineFilter::currentIndexChanged);
|
||||||
|
disconnect(orig, &TimelineModel::fetchedMore, this, &TimelineFilter::fetchAgain);
|
||||||
|
}
|
||||||
|
|
||||||
this->setSourceModel(s);
|
this->setSourceModel(s);
|
||||||
|
|
||||||
connect(s, &TimelineModel::currentIndexChanged, this, &TimelineFilter::currentIndexChanged);
|
connect(s, &TimelineModel::currentIndexChanged, this, &TimelineFilter::currentIndexChanged);
|
||||||
|
connect(s, &TimelineModel::fetchedMore, this, &TimelineFilter::fetchAgain);
|
||||||
|
|
||||||
emit sourceChanged();
|
emit sourceChanged();
|
||||||
invalidateFilter();
|
invalidateFilter();
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,9 +45,13 @@ signals:
|
||||||
void sourceChanged();
|
void sourceChanged();
|
||||||
void currentIndexChanged();
|
void currentIndexChanged();
|
||||||
|
|
||||||
|
private slots:
|
||||||
|
void fetchAgain();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
bool filterAcceptsRow(int source_row, const QModelIndex &source_parent) const override;
|
bool filterAcceptsRow(int source_row, const QModelIndex &source_parent) const override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QString threadId, contentFilter;
|
QString threadId, contentFilter;
|
||||||
|
int cachedCount = 0;
|
||||||
};
|
};
|
||||||
|
|
|
@ -449,6 +449,7 @@ TimelineModel::TimelineModel(TimelineViewManager *manager, QString room_id, QObj
|
||||||
connect(&events, &EventStore::fetchedMore, this, [this]() {
|
connect(&events, &EventStore::fetchedMore, this, [this]() {
|
||||||
setPaginationInProgress(false);
|
setPaginationInProgress(false);
|
||||||
updateLastMessage();
|
updateLastMessage();
|
||||||
|
emit fetchedMore();
|
||||||
});
|
});
|
||||||
connect(&events, &EventStore::fetchedMore, this, &TimelineModel::checkAfterFetch);
|
connect(&events, &EventStore::fetchedMore, this, &TimelineModel::checkAfterFetch);
|
||||||
connect(&events,
|
connect(&events,
|
||||||
|
|
|
@ -465,6 +465,8 @@ signals:
|
||||||
|
|
||||||
void scrollTargetChanged();
|
void scrollTargetChanged();
|
||||||
|
|
||||||
|
void fetchedMore();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
template<typename T>
|
template<typename T>
|
||||||
void sendEncryptedMessage(mtx::events::RoomEvent<T> msg, mtx::events::EventType eventType);
|
void sendEncryptedMessage(mtx::events::RoomEvent<T> msg, mtx::events::EventType eventType);
|
||||||
|
|
Loading…
Reference in a new issue