Continue fetching past messages when searching

This commit is contained in:
Nicolas Werner 2022-11-03 23:26:59 +01:00
parent 56a4e97296
commit 76347f1c6f
No known key found for this signature in database
GPG key ID: C8D75E610773F2D9
6 changed files with 94 additions and 54 deletions

View file

@ -1013,7 +1013,8 @@ Cache::getOlmSessions(const std::string &curve25519)
{
using namespace mtx::crypto;
auto txn = lmdb::txn::begin(env_);
try {
auto txn = ro_txn(env_);
auto db = getOlmSessionsDb(txn, curve25519);
std::string_view session_id, unused;
@ -1024,9 +1025,10 @@ Cache::getOlmSessions(const std::string &curve25519)
res.emplace_back(session_id);
cursor.close();
txn.commit();
return res;
} catch (...) {
return {};
}
}
void
@ -2173,7 +2175,8 @@ Cache::roomIds()
std::string
Cache::previousBatchToken(const std::string &room_id)
{
auto txn = lmdb::txn::begin(env_, nullptr);
auto txn = ro_txn(env_);
try {
auto orderDb = getEventOrderDb(txn, room_id);
auto cursor = lmdb::cursor::open(txn, orderDb);
@ -2185,6 +2188,9 @@ Cache::previousBatchToken(const std::string &room_id)
auto j = nlohmann::json::parse(val);
return j.value("prev_batch", "");
} catch (...) {
return "";
}
}
Cache::Messages
@ -3206,10 +3212,10 @@ Cache::pendingEvents(const std::string &room_id)
std::optional<mtx::events::collections::TimelineEvent>
Cache::firstPendingMessage(const std::string &room_id)
{
auto txn = lmdb::txn::begin(env_);
auto txn = ro_txn(env_);
auto pending = getPendingMessagesDb(txn, room_id);
{
try {
auto pendingCursor = lmdb::cursor::open(txn, pending);
std::string_view tsIgnored, pendingTxn;
while (pendingCursor.get(tsIgnored, pendingTxn, MDB_NEXT)) {
@ -3225,7 +3231,6 @@ Cache::firstPendingMessage(const std::string &room_id)
from_json(nlohmann::json::parse(event), te);
pendingCursor.close();
txn.commit();
return te;
} catch (std::exception &e) {
nhlog::db()->error("Failed to parse message from cache {}", e.what());
@ -3233,10 +3238,8 @@ Cache::firstPendingMessage(const std::string &room_id)
continue;
}
}
} catch (const lmdb::error &e) {
}
txn.commit();
return std::nullopt;
}
@ -3998,7 +4001,8 @@ Cache::hasEnoughPowerLevel(const std::vector<mtx::events::EventType> &eventTypes
using namespace mtx::events;
using namespace mtx::events::state;
auto txn = lmdb::txn::begin(env_);
auto txn = ro_txn(env_);
try {
auto db = getStatesDb(txn, room_id);
int64_t min_event_level = std::numeric_limits<int64_t>::max();
@ -4016,15 +4020,17 @@ Cache::hasEnoughPowerLevel(const std::vector<mtx::events::EventType> &eventTypes
user_level = msg.content.user_level(user_id);
for (const auto &ty : eventTypes)
min_event_level = std::min(min_event_level, msg.content.state_level(to_string(ty)));
min_event_level =
std::min(min_event_level, msg.content.state_level(to_string(ty)));
} catch (const nlohmann::json::exception &e) {
nhlog::db()->warn("failed to parse m.room.power_levels event: {}", e.what());
}
}
txn.commit();
return user_level >= min_event_level;
} catch (...) {
return false;
}
}
std::vector<std::string>

View file

@ -80,8 +80,8 @@ EventStore::EventStore(std::string room_id, QObject *)
emit beginInsertRows(toExternalIdx(newFirst), toExternalIdx(this->first - 1));
this->first = newFirst;
emit endInsertRows();
emit fetchedMore();
emit dataChanged(toExternalIdx(oldFirst), toExternalIdx(oldFirst));
emit fetchedMore();
} else {
auto range = cache::client()->getTimelineRange(room_id_);
@ -725,7 +725,8 @@ EventStore::decryptEvent(const IdIndex &idx,
case olm::DecryptionErrorCode::ParsingFailed:
break;
case olm::DecryptionErrorCode::ReplayAttack:
nhlog::crypto()->critical("Reply attack while decryptiong event {} in room {} from {}!",
nhlog::crypto()->critical(
"Replay attack while decryptiong event {} in room {} from {}!",
e.event_id,
room_id_,
e.sender);

View file

@ -19,8 +19,10 @@ TimelineFilter::setThreadId(const QString &t)
if (this->threadId != t) {
this->threadId = t;
invalidateFilter();
}
fetchMore({});
emit threadIdChanged();
}
}
void
@ -30,21 +32,45 @@ TimelineFilter::setContentFilter(const QString &c)
if (this->contentFilter != c) {
this->contentFilter = c;
invalidateFilter();
}
fetchMore({});
emit contentFilterChanged();
}
}
void
TimelineFilter::fetchAgain()
{
if (threadId.isEmpty() && contentFilter.isEmpty())
return;
if (auto s = source()) {
if (rowCount() == cachedCount && s->canFetchMore(QModelIndex()))
s->fetchMore(QModelIndex());
else
cachedCount = rowCount();
}
}
void
TimelineFilter::setSource(TimelineModel *s)
{
if (auto orig = this->source(); orig != s) {
if (orig)
cachedCount = 0;
if (orig) {
disconnect(orig,
&TimelineModel::currentIndexChanged,
this,
&TimelineFilter::currentIndexChanged);
disconnect(orig, &TimelineModel::fetchedMore, this, &TimelineFilter::fetchAgain);
}
this->setSourceModel(s);
connect(s, &TimelineModel::currentIndexChanged, this, &TimelineFilter::currentIndexChanged);
connect(s, &TimelineModel::fetchedMore, this, &TimelineFilter::fetchAgain);
emit sourceChanged();
invalidateFilter();
}

View file

@ -45,9 +45,13 @@ signals:
void sourceChanged();
void currentIndexChanged();
private slots:
void fetchAgain();
protected:
bool filterAcceptsRow(int source_row, const QModelIndex &source_parent) const override;
private:
QString threadId, contentFilter;
int cachedCount = 0;
};

View file

@ -449,6 +449,7 @@ TimelineModel::TimelineModel(TimelineViewManager *manager, QString room_id, QObj
connect(&events, &EventStore::fetchedMore, this, [this]() {
setPaginationInProgress(false);
updateLastMessage();
emit fetchedMore();
});
connect(&events, &EventStore::fetchedMore, this, &TimelineModel::checkAfterFetch);
connect(&events,

View file

@ -465,6 +465,8 @@ signals:
void scrollTargetChanged();
void fetchedMore();
private:
template<typename T>
void sendEncryptedMessage(mtx::events::RoomEvent<T> msg, mtx::events::EventType eventType);