2019-01-04 02:21:02 +01:00
|
|
|
// Matrix Construct
|
|
|
|
//
|
|
|
|
// Copyright (C) Matrix Construct Developers, Authors & Contributors
|
|
|
|
// Copyright (C) 2016-2018 Jason Volk <jason@zemos.net>
|
|
|
|
//
|
|
|
|
// Permission to use, copy, modify, and/or distribute this software for any
|
|
|
|
// purpose with or without fee is hereby granted, provided that the above
|
|
|
|
// copyright notice and this permission notice is present in all copies. The
|
|
|
|
// full license for this software is available in the LICENSE file.
|
|
|
|
|
|
|
|
namespace ircd::m::sync
|
|
|
|
{
|
2019-07-16 22:29:15 +02:00
|
|
|
static bool _room_timeline_append(data &, json::stack::array &, const m::event::idx &, const m::event &);
|
2019-02-24 20:59:53 +01:00
|
|
|
static event::id::buf _room_timeline_polylog_events(data &, const m::room &, bool &, bool &);
|
|
|
|
static bool room_timeline_polylog(data &);
|
2019-04-08 01:10:40 +02:00
|
|
|
|
|
|
|
static bool _room_timeline_linear_command(data &);
|
2019-02-24 20:59:53 +01:00
|
|
|
static bool room_timeline_linear(data &);
|
2019-03-10 02:31:26 +01:00
|
|
|
|
|
|
|
extern conf::item<size_t> limit_default;
|
2019-07-04 12:05:38 +02:00
|
|
|
extern conf::item<size_t> limit_initial_default;
|
2019-01-04 02:21:02 +01:00
|
|
|
extern item room_timeline;
|
|
|
|
}
|
|
|
|
|
2019-09-09 21:05:53 +02:00
|
|
|
ircd::mapi::header
|
|
|
|
IRCD_MODULE
|
|
|
|
{
|
|
|
|
"Client Sync :Room Timeline"
|
|
|
|
};
|
|
|
|
|
2019-01-04 02:21:02 +01:00
|
|
|
decltype(ircd::m::sync::room_timeline)
|
|
|
|
ircd::m::sync::room_timeline
|
|
|
|
{
|
2019-01-09 00:10:06 +01:00
|
|
|
"rooms.timeline",
|
2019-01-10 05:39:12 +01:00
|
|
|
room_timeline_polylog,
|
2019-08-16 12:55:07 +02:00
|
|
|
room_timeline_linear
|
2019-01-10 05:39:12 +01:00
|
|
|
};
|
|
|
|
|
2019-03-10 02:31:26 +01:00
|
|
|
decltype(ircd::m::sync::limit_default)
|
|
|
|
ircd::m::sync::limit_default
|
|
|
|
{
|
|
|
|
{ "name", "ircd.client.sync.rooms.timeline.limit.default" },
|
|
|
|
{ "default", 10L },
|
|
|
|
};
|
|
|
|
|
2019-07-04 12:05:38 +02:00
|
|
|
decltype(ircd::m::sync::limit_initial_default)
|
|
|
|
ircd::m::sync::limit_initial_default
|
|
|
|
{
|
|
|
|
{ "name", "ircd.client.sync.rooms.timeline.limit_initial.default" },
|
|
|
|
{ "default", 1L },
|
|
|
|
};
|
|
|
|
|
2019-02-24 20:59:53 +01:00
|
|
|
bool
|
2019-01-10 05:39:12 +01:00
|
|
|
ircd::m::sync::room_timeline_linear(data &data)
|
2019-01-04 02:21:02 +01:00
|
|
|
{
|
2019-02-28 03:24:12 +01:00
|
|
|
if(!data.event_idx)
|
|
|
|
return false;
|
|
|
|
|
2019-04-08 01:10:40 +02:00
|
|
|
if(!data.room)
|
2019-03-08 22:42:54 +01:00
|
|
|
return false;
|
|
|
|
|
2019-04-08 01:10:40 +02:00
|
|
|
if(!data.membership && *data.room != data.user_room)
|
2019-03-08 22:42:54 +01:00
|
|
|
return false;
|
|
|
|
|
2019-04-08 01:10:40 +02:00
|
|
|
assert(data.event);
|
|
|
|
const bool command
|
|
|
|
{
|
|
|
|
*data.room == data.user_room &&
|
|
|
|
startswith(json::get<"type"_>(*data.event), "ircd.cmd") &&
|
2019-10-01 05:50:58 +02:00
|
|
|
(json::get<"sender"_>(*data.event) == me() ||
|
2019-04-08 01:10:40 +02:00
|
|
|
json::get<"sender"_>(*data.event) == data.user.user_id)
|
|
|
|
};
|
|
|
|
|
2019-06-06 01:57:27 +02:00
|
|
|
json::stack::object rooms
|
|
|
|
{
|
|
|
|
*data.out, "rooms"
|
|
|
|
};
|
|
|
|
|
2019-04-08 01:10:40 +02:00
|
|
|
if(command)
|
|
|
|
return _room_timeline_linear_command(data);
|
|
|
|
|
2019-09-06 02:31:54 +02:00
|
|
|
const ssize_t &viewport_size
|
|
|
|
{
|
|
|
|
room::events::viewport_size
|
|
|
|
};
|
2019-06-07 12:58:25 +02:00
|
|
|
|
2019-09-16 02:26:17 +02:00
|
|
|
if(likely(viewport_size >= 0))
|
2019-09-06 02:31:54 +02:00
|
|
|
if(json::get<"depth"_>(*data.event) + viewport_size < data.room_depth)
|
|
|
|
return false;
|
2019-06-06 01:57:27 +02:00
|
|
|
|
2019-03-08 22:42:54 +01:00
|
|
|
json::stack::object membership_
|
|
|
|
{
|
|
|
|
*data.out, data.membership
|
|
|
|
};
|
|
|
|
|
|
|
|
json::stack::object room_
|
|
|
|
{
|
|
|
|
*data.out, data.room->room_id
|
|
|
|
};
|
|
|
|
|
|
|
|
json::stack::object timeline
|
|
|
|
{
|
|
|
|
*data.out, "timeline"
|
|
|
|
};
|
|
|
|
|
2019-09-16 02:26:17 +02:00
|
|
|
const bool is_own_membership
|
|
|
|
{
|
|
|
|
json::get<"type"_>(*data.event) == "m.room.member"
|
|
|
|
&& json::get<"state_key"_>(*data.event) == data.user.user_id
|
|
|
|
};
|
|
|
|
|
|
|
|
const bool is_own_join
|
|
|
|
{
|
2020-04-21 02:24:03 +02:00
|
|
|
is_own_membership
|
|
|
|
&& data.membership == "join"
|
|
|
|
};
|
|
|
|
|
|
|
|
const auto last_membership_state_idx
|
|
|
|
{
|
|
|
|
is_own_join?
|
|
|
|
m::room::state::prev(data.event_idx):
|
|
|
|
0UL
|
|
|
|
};
|
|
|
|
|
|
|
|
const bool is_own_rejoin
|
|
|
|
{
|
|
|
|
last_membership_state_idx?
|
|
|
|
m::membership(last_membership_state_idx, "join"):
|
|
|
|
false
|
2019-09-16 02:26:17 +02:00
|
|
|
};
|
|
|
|
|
2019-09-16 21:06:25 +02:00
|
|
|
// Branch to backfill the user's timeline before their own join event to
|
|
|
|
// the room. This simply reuses the polylog handler as if they were
|
2020-04-21 02:24:03 +02:00
|
|
|
// initial-syncing the room. This branch is not taken for rejoins (i.e
|
|
|
|
// displayname and avatar changes).
|
|
|
|
if(is_own_join && !is_own_rejoin)
|
2019-09-16 02:26:17 +02:00
|
|
|
{
|
|
|
|
const scope_restore range_first
|
|
|
|
{
|
|
|
|
data.range.first, last_membership_state_idx
|
|
|
|
};
|
|
|
|
|
|
|
|
bool ret{false}, limited{false};
|
|
|
|
const auto prev_batch
|
|
|
|
{
|
|
|
|
_room_timeline_polylog_events(data, *data.room, limited, ret)
|
|
|
|
};
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2019-01-09 00:10:06 +01:00
|
|
|
json::stack::array array
|
|
|
|
{
|
2019-02-27 00:50:58 +01:00
|
|
|
*data.out, "events"
|
2019-01-09 00:10:06 +01:00
|
|
|
};
|
|
|
|
|
2019-07-16 22:29:15 +02:00
|
|
|
return _room_timeline_append(data, array, data.event_idx, *data.event);
|
2019-01-10 05:39:12 +01:00
|
|
|
}
|
|
|
|
|
2019-04-08 01:10:40 +02:00
|
|
|
bool
|
|
|
|
ircd::m::sync::_room_timeline_linear_command(data &data)
|
|
|
|
{
|
|
|
|
const m::room &room
|
|
|
|
{
|
|
|
|
unquote(json::get<"content"_>(*data.event).get("room_id"))
|
|
|
|
};
|
|
|
|
|
|
|
|
const scope_restore _room
|
|
|
|
{
|
|
|
|
data.room, &room
|
|
|
|
};
|
|
|
|
|
|
|
|
const scope_restore _membership
|
|
|
|
{
|
|
|
|
data.membership, "join"_sv
|
|
|
|
};
|
|
|
|
|
|
|
|
json::stack::object membership_
|
|
|
|
{
|
|
|
|
*data.out, data.membership
|
|
|
|
};
|
|
|
|
|
|
|
|
json::stack::object room_
|
|
|
|
{
|
|
|
|
*data.out, data.room->room_id
|
|
|
|
};
|
|
|
|
|
|
|
|
json::stack::object timeline
|
|
|
|
{
|
|
|
|
*data.out, "timeline"
|
|
|
|
};
|
|
|
|
|
|
|
|
json::stack::array array
|
|
|
|
{
|
|
|
|
*data.out, "events"
|
|
|
|
};
|
|
|
|
|
|
|
|
m::event event{*data.event};
|
|
|
|
json::get<"type"_>(event) = "m.room.message";
|
|
|
|
json::get<"room_id"_>(event) = room.room_id;
|
|
|
|
const scope_restore _event
|
|
|
|
{
|
|
|
|
data.event, &event
|
|
|
|
};
|
|
|
|
|
2019-07-16 22:29:15 +02:00
|
|
|
return _room_timeline_append(data, array, data.event_idx, event);
|
2019-04-08 01:10:40 +02:00
|
|
|
}
|
|
|
|
|
2019-02-24 20:59:53 +01:00
|
|
|
bool
|
2019-01-10 05:39:12 +01:00
|
|
|
ircd::m::sync::room_timeline_polylog(data &data)
|
|
|
|
{
|
2019-02-22 18:25:00 +01:00
|
|
|
if(!apropos(data, data.room_head))
|
2019-02-24 20:59:53 +01:00
|
|
|
return false;
|
2019-01-26 01:18:21 +01:00
|
|
|
|
2019-01-10 05:39:12 +01:00
|
|
|
// events
|
2019-02-22 18:25:00 +01:00
|
|
|
assert(data.room);
|
2019-02-24 20:59:53 +01:00
|
|
|
bool limited{false}, ret{false};
|
2019-01-10 05:39:12 +01:00
|
|
|
m::event::id::buf prev
|
2019-01-04 02:21:02 +01:00
|
|
|
{
|
2019-02-24 20:59:53 +01:00
|
|
|
_room_timeline_polylog_events(data, *data.room, limited, ret)
|
2019-01-10 05:39:12 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
// limited
|
|
|
|
json::stack::member
|
|
|
|
{
|
2019-02-27 00:50:58 +01:00
|
|
|
*data.out, "limited", json::value{limited}
|
2019-01-10 05:39:12 +01:00
|
|
|
};
|
2019-02-24 20:59:53 +01:00
|
|
|
|
2019-09-18 03:11:41 +02:00
|
|
|
// prev_batch
|
|
|
|
if(likely(prev))
|
|
|
|
json::stack::member
|
|
|
|
{
|
|
|
|
*data.out, "prev_batch", string_view{prev}
|
|
|
|
};
|
|
|
|
|
2019-02-24 20:59:53 +01:00
|
|
|
return ret;
|
2019-01-10 05:39:12 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
ircd::m::event::id::buf
|
|
|
|
ircd::m::sync::_room_timeline_polylog_events(data &data,
|
|
|
|
const m::room &room,
|
2019-02-24 20:59:53 +01:00
|
|
|
bool &limited,
|
|
|
|
bool &ret)
|
2019-01-10 05:39:12 +01:00
|
|
|
{
|
|
|
|
json::stack::array array
|
|
|
|
{
|
2019-02-27 00:50:58 +01:00
|
|
|
*data.out, "events"
|
2019-01-04 02:21:02 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
// messages seeks to the newest event, but the client wants the oldest
|
|
|
|
// event first so we seek down first and then iterate back up. Due to
|
|
|
|
// an issue with rocksdb's prefix-iteration this iterator becomes
|
|
|
|
// toxic as soon as it becomes invalid. As a result we have to copy the
|
2019-08-25 22:55:44 +02:00
|
|
|
// event_idx on the way down in case of renewing the iterator for the
|
2019-01-04 02:21:02 +01:00
|
|
|
// way back. This is not a big deal but rocksdb should fix their shit.
|
|
|
|
m::event::id::buf event_id;
|
2019-08-25 22:55:44 +02:00
|
|
|
m::event::idx event_idx {0};
|
2019-08-30 23:26:07 +02:00
|
|
|
m::room::events it
|
2019-01-04 02:21:02 +01:00
|
|
|
{
|
2019-08-06 02:24:33 +02:00
|
|
|
room
|
2019-01-04 02:21:02 +01:00
|
|
|
};
|
|
|
|
|
2019-07-04 12:05:38 +02:00
|
|
|
const ssize_t limit
|
|
|
|
{
|
|
|
|
data.phased && data.range.first == 0?
|
|
|
|
ssize_t(limit_initial_default): // phased + initial=true
|
|
|
|
ssize_t(limit_default)
|
|
|
|
};
|
|
|
|
|
2019-09-18 02:18:13 +02:00
|
|
|
ssize_t i(0), prefetched(0);
|
2019-03-31 22:09:36 +02:00
|
|
|
for(; it && i <= limit; --it)
|
2019-01-04 02:21:02 +01:00
|
|
|
{
|
2019-08-25 22:55:44 +02:00
|
|
|
event_idx = it.event_idx();
|
|
|
|
if(!i && event_idx >= data.range.second)
|
2019-03-31 22:09:36 +02:00
|
|
|
continue;
|
|
|
|
|
2019-08-25 22:55:44 +02:00
|
|
|
if(event_idx < data.range.first)
|
2019-01-04 02:21:02 +01:00
|
|
|
break;
|
2019-03-31 22:09:36 +02:00
|
|
|
|
2019-09-18 02:18:13 +02:00
|
|
|
if(limit > 1)
|
|
|
|
prefetched += m::prefetch(event_idx);
|
|
|
|
|
2019-03-31 22:09:36 +02:00
|
|
|
++i;
|
2019-01-04 02:21:02 +01:00
|
|
|
}
|
|
|
|
|
2019-09-18 03:17:28 +02:00
|
|
|
limited = i > limit;
|
|
|
|
if(i > 1 && !it)
|
2019-08-25 22:55:44 +02:00
|
|
|
it.seek(event_idx);
|
2019-01-04 02:21:02 +01:00
|
|
|
|
2019-09-18 03:17:28 +02:00
|
|
|
if(i > 1 && it)
|
|
|
|
--i, ++it;
|
|
|
|
|
|
|
|
if(i > 0 && it)
|
|
|
|
for(++it; i > 0 && it; --i, ++it)
|
2019-01-11 01:56:32 +01:00
|
|
|
{
|
2019-08-25 22:55:44 +02:00
|
|
|
const m::event &event
|
|
|
|
{
|
|
|
|
*it
|
|
|
|
};
|
|
|
|
|
2019-09-18 03:11:41 +02:00
|
|
|
ret |= _room_timeline_append(data, array, it.event_idx(), event);
|
2019-01-11 01:56:32 +01:00
|
|
|
}
|
2019-01-04 02:21:02 +01:00
|
|
|
|
2020-04-02 00:42:33 +02:00
|
|
|
return m::event_id(std::nothrow, event_idx);
|
2019-01-04 02:21:02 +01:00
|
|
|
}
|
2019-02-28 01:01:14 +01:00
|
|
|
|
2019-07-16 22:29:15 +02:00
|
|
|
bool
|
2019-02-28 01:01:14 +01:00
|
|
|
ircd::m::sync::_room_timeline_append(data &data,
|
|
|
|
json::stack::array &events,
|
|
|
|
const m::event::idx &event_idx,
|
|
|
|
const m::event &event)
|
|
|
|
{
|
2019-08-03 01:56:18 +02:00
|
|
|
m::event::append::opts opts;
|
2019-03-12 00:02:48 +01:00
|
|
|
opts.event_idx = &event_idx;
|
|
|
|
opts.client_txnid = &data.client_txnid;
|
|
|
|
opts.user_id = &data.user.user_id;
|
|
|
|
opts.user_room = &data.user_room;
|
2019-04-15 21:16:48 +02:00
|
|
|
opts.room_depth = &data.room_depth;
|
2019-08-03 01:56:18 +02:00
|
|
|
return m::event::append(events, event, opts);
|
2019-02-28 01:01:14 +01:00
|
|
|
}
|