2019-08-18 09:02:59 +02:00
|
|
|
// Matrix Construct
|
|
|
|
//
|
|
|
|
// Copyright (C) Matrix Construct Developers, Authors & Contributors
|
|
|
|
// Copyright (C) 2016-2018 Jason Volk <jason@zemos.net>
|
|
|
|
//
|
|
|
|
// Permission to use, copy, modify, and/or distribute this software for any
|
|
|
|
// purpose with or without fee is hereby granted, provided that the above
|
|
|
|
// copyright notice and this permission notice is present in all copies. The
|
|
|
|
// full license for this software is available in the LICENSE file.
|
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
namespace ircd::m
|
|
|
|
{
|
|
|
|
static void append_v1(json::stack::array &, const event::id &);
|
|
|
|
static void append_v3(json::stack::array &, const event::id &);
|
|
|
|
}
|
|
|
|
|
2019-12-08 00:26:25 +01:00
|
|
|
ircd::m::room::head::generate::generate(const mutable_buffer &buf,
|
|
|
|
const m::room::head &head,
|
|
|
|
const opts &opts)
|
2019-08-18 09:02:59 +02:00
|
|
|
{
|
2019-12-10 20:07:44 +01:00
|
|
|
if(empty(buf))
|
2019-12-08 00:26:25 +01:00
|
|
|
return;
|
|
|
|
|
2019-08-18 09:02:59 +02:00
|
|
|
json::stack out{buf};
|
|
|
|
{
|
2019-12-08 00:26:25 +01:00
|
|
|
json::stack::array array
|
|
|
|
{
|
|
|
|
out
|
|
|
|
};
|
|
|
|
|
|
|
|
const generate g
|
|
|
|
{
|
|
|
|
array, head, opts
|
|
|
|
};
|
|
|
|
|
|
|
|
this->depth = g.depth;
|
|
|
|
}
|
|
|
|
|
|
|
|
this->array = out.completed();
|
2019-08-18 09:02:59 +02:00
|
|
|
}
|
|
|
|
|
2019-12-08 00:26:25 +01:00
|
|
|
ircd::m::room::head::generate::generate(json::stack::array &out,
|
|
|
|
const m::room::head &head,
|
|
|
|
const opts &opts)
|
2019-12-10 21:12:57 +01:00
|
|
|
try
|
2019-08-18 09:02:59 +02:00
|
|
|
{
|
2019-12-08 00:26:25 +01:00
|
|
|
if(!head.room)
|
|
|
|
return;
|
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// Query the room version unless hinted in the opts
|
2019-08-18 09:02:59 +02:00
|
|
|
char versionbuf[32];
|
|
|
|
const auto version
|
|
|
|
{
|
2019-12-10 20:10:10 +01:00
|
|
|
opts.version?:
|
|
|
|
m::version(versionbuf, head.room, std::nothrow)
|
2019-08-18 09:02:59 +02:00
|
|
|
};
|
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// The output format depends on the room version; we select an output
|
|
|
|
// function for the format here so we can abstractly call append().
|
2019-08-18 09:02:59 +02:00
|
|
|
const auto &append
|
|
|
|
{
|
2019-12-10 20:07:44 +01:00
|
|
|
version == "1" || version == "2"?
|
|
|
|
append_v1:
|
|
|
|
append_v3
|
2019-08-18 09:02:59 +02:00
|
|
|
};
|
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// When the top_head option is given we query for that here
|
2020-11-30 09:48:04 +01:00
|
|
|
std::tuple<m::id::event::buf, int64_t, m::event::idx> top_head;
|
|
|
|
if(opts.need_top_head)
|
|
|
|
top_head = m::top(std::nothrow, head.room.room_id);
|
2019-08-18 09:02:59 +02:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// Iterate the room head; starts with oldest events
|
2019-12-08 00:26:25 +01:00
|
|
|
bool need_top_head{opts.need_top_head};
|
|
|
|
bool need_my_head{opts.need_my_head};
|
2019-12-10 20:07:44 +01:00
|
|
|
ssize_t limit(opts.limit);
|
2020-09-24 13:21:05 +02:00
|
|
|
head.for_each([&](const event::idx &event_idx, const event::id &event_id)
|
2019-08-18 09:02:59 +02:00
|
|
|
{
|
2020-02-19 21:38:07 +01:00
|
|
|
// Determine the depth for metrics
|
|
|
|
const int64_t depth
|
|
|
|
{
|
|
|
|
event_id == std::get<0>(top_head)?
|
|
|
|
std::get<int64_t>(top_head):
|
|
|
|
m::get<int64_t>(std::nothrow, event_idx, "depth", -1L)
|
|
|
|
};
|
|
|
|
|
|
|
|
if(unlikely(depth < 0))
|
|
|
|
{
|
|
|
|
log::derror
|
|
|
|
{
|
|
|
|
log, "Missing depth for %s idx:%lu in room head of %s",
|
|
|
|
string_view{event_id},
|
|
|
|
event_idx,
|
|
|
|
string_view{head.room.room_id},
|
|
|
|
};
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// When using the need_my_head option, if we hit a head which
|
|
|
|
// originated from this server we mark that is no longer needed.
|
|
|
|
if(need_my_head && event::my(event_idx))
|
|
|
|
need_my_head = false;
|
|
|
|
|
|
|
|
// If we hit the top_head during the loop we can mark that satisfied.
|
|
|
|
if(need_top_head && event_id == std::get<0>(top_head))
|
|
|
|
need_top_head = false;
|
|
|
|
|
|
|
|
// Two reference slots are reserved to fulfill these features; the
|
|
|
|
// loop will iterate without appending anything else.
|
|
|
|
const ssize_t remain
|
2019-12-08 00:26:25 +01:00
|
|
|
{
|
2019-12-10 20:07:44 +01:00
|
|
|
limit - need_my_head - need_top_head
|
|
|
|
};
|
2019-12-08 00:26:25 +01:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// Skip/continue the loop if all that remains are reserved slots.
|
|
|
|
if(remain <= 0)
|
|
|
|
return true;
|
2019-12-08 00:26:25 +01:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// Add this head reference to result to output.
|
|
|
|
append(out, event_id);
|
2019-12-08 00:26:25 +01:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// Indicate if this depth is highest or lowest of the set.
|
|
|
|
this->depth[0] = std::min(depth, this->depth[0]);
|
|
|
|
this->depth[1] = std::max(depth, this->depth[1]);
|
2019-12-08 00:26:25 +01:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// Continue loop until we're out of slots.
|
|
|
|
return --limit > 0;
|
2019-08-18 09:02:59 +02:00
|
|
|
});
|
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// If the iteration did not provide us with the top_head and the opts
|
|
|
|
// require it, we add that here.
|
2020-11-30 09:48:04 +01:00
|
|
|
if(need_top_head && std::get<0>(top_head))
|
2019-08-18 09:02:59 +02:00
|
|
|
{
|
2019-12-08 00:26:25 +01:00
|
|
|
assert(limit > 0);
|
2019-12-10 20:07:44 +01:00
|
|
|
append(out, std::get<0>(top_head));
|
2019-12-08 00:26:25 +01:00
|
|
|
this->depth[1] = std::get<1>(top_head);
|
2019-12-10 20:07:44 +01:00
|
|
|
need_top_head = false;
|
2019-12-08 00:26:25 +01:00
|
|
|
--limit;
|
2020-11-30 09:48:04 +01:00
|
|
|
if(need_my_head && event::my(std::get<2>(top_head)))
|
2020-02-19 21:45:04 +01:00
|
|
|
need_my_head = false;
|
2019-08-18 09:02:59 +02:00
|
|
|
}
|
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// If the iteration did not provide us with any heads from this origin
|
2020-08-06 01:30:42 +02:00
|
|
|
// and the opts require it, we find and add that here. Also if no heads
|
|
|
|
// whatsoever have been found this branch is also taken.
|
|
|
|
if(need_my_head || size_t(limit) == opts.limit)
|
2019-12-10 20:07:44 +01:00
|
|
|
for(m::room::events it{head.room}; it; --it)
|
2019-12-08 00:26:25 +01:00
|
|
|
{
|
2020-08-06 01:30:42 +02:00
|
|
|
if(need_my_head && !event::my(it.event_idx()))
|
2019-12-10 20:07:44 +01:00
|
|
|
continue;
|
|
|
|
|
|
|
|
const auto event_id
|
2019-12-08 00:26:25 +01:00
|
|
|
{
|
2020-04-02 00:42:33 +02:00
|
|
|
m::event_id(std::nothrow, it.event_idx())
|
2019-12-10 20:07:44 +01:00
|
|
|
};
|
2019-12-08 00:26:25 +01:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
if(unlikely(!event_id))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
assert(limit > 0);
|
|
|
|
append(out, event_id);
|
|
|
|
const int64_t &depth(it.depth());
|
|
|
|
this->depth[0] = std::min(depth, this->depth[0]);
|
|
|
|
this->depth[1] = std::max(depth, this->depth[1]);
|
|
|
|
need_my_head = false;
|
|
|
|
--limit;
|
|
|
|
break;
|
|
|
|
}
|
2019-12-08 00:26:25 +01:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
assert(limit >= 0);
|
2020-04-04 00:17:58 +02:00
|
|
|
if(unlikely(opts.limit && size_t(limit) == opts.limit))
|
2020-03-26 02:23:36 +01:00
|
|
|
throw error
|
|
|
|
{
|
|
|
|
"Failed to find any events at the room head"
|
|
|
|
};
|
2019-12-10 20:07:44 +01:00
|
|
|
}
|
2019-12-10 21:12:57 +01:00
|
|
|
catch(const std::exception &e)
|
|
|
|
{
|
|
|
|
log::error
|
|
|
|
{
|
|
|
|
log, "%s prev_events generator :%s",
|
|
|
|
string_view{head.room},
|
|
|
|
e.what(),
|
|
|
|
};
|
|
|
|
|
|
|
|
throw;
|
|
|
|
}
|
2019-12-08 00:26:25 +01:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
void
|
|
|
|
ircd::m::append_v1(json::stack::array &out,
|
|
|
|
const event::id &event_id)
|
|
|
|
{
|
|
|
|
json::stack::array prev
|
|
|
|
{
|
|
|
|
out
|
|
|
|
};
|
2019-12-08 00:26:25 +01:00
|
|
|
|
2019-12-10 20:07:44 +01:00
|
|
|
// [0]
|
2020-11-30 09:48:04 +01:00
|
|
|
assert(event_id);
|
2019-12-10 20:07:44 +01:00
|
|
|
prev.append(event_id);
|
|
|
|
|
|
|
|
// [1]
|
|
|
|
json::stack::object nilly
|
|
|
|
{
|
|
|
|
prev
|
|
|
|
};
|
|
|
|
|
|
|
|
json::stack::member
|
|
|
|
{
|
|
|
|
nilly, "", ""
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
ircd::m::append_v3(json::stack::array &out,
|
|
|
|
const event::id &event_id)
|
|
|
|
{
|
2020-11-30 09:48:04 +01:00
|
|
|
assert(event_id);
|
2019-12-10 20:07:44 +01:00
|
|
|
out.append(event_id);
|
2019-08-18 09:02:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
size_t
|
|
|
|
ircd::m::room::head::count()
|
|
|
|
const
|
|
|
|
{
|
|
|
|
size_t ret(0);
|
|
|
|
for_each([&ret]
|
2022-07-04 22:15:53 +02:00
|
|
|
(const event::idx &event_idx, const event::id &event_id) noexcept
|
2019-08-18 09:02:59 +02:00
|
|
|
{
|
|
|
|
++ret;
|
|
|
|
return true;
|
|
|
|
});
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
ircd::m::room::head::has(const event::id &event_id)
|
|
|
|
const
|
|
|
|
{
|
|
|
|
bool ret{false};
|
|
|
|
for_each([&ret, &event_id]
|
2022-07-04 22:15:53 +02:00
|
|
|
(const event::idx &event_idx, const event::id &event_id_) noexcept
|
2019-08-18 09:02:59 +02:00
|
|
|
{
|
|
|
|
ret = event_id_ == event_id;
|
|
|
|
return !ret; // for_each protocol: false to break
|
|
|
|
});
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
ircd::m::room::head::for_each(const closure &closure)
|
|
|
|
const
|
|
|
|
{
|
|
|
|
auto it
|
|
|
|
{
|
|
|
|
dbs::room_head.begin(room.room_id)
|
|
|
|
};
|
|
|
|
|
|
|
|
for(; it; ++it)
|
|
|
|
{
|
|
|
|
const event::id &event_id
|
|
|
|
{
|
|
|
|
dbs::room_head_key(it->first)
|
|
|
|
};
|
|
|
|
|
|
|
|
const event::idx &event_idx
|
|
|
|
{
|
|
|
|
byte_view<event::idx>{it->second}
|
|
|
|
};
|
|
|
|
|
|
|
|
if(!closure(event_idx, event_id))
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// special tools
|
|
|
|
//
|
|
|
|
|
|
|
|
size_t
|
|
|
|
ircd::m::room::head::reset(const head &head)
|
|
|
|
{
|
|
|
|
size_t ret{0};
|
|
|
|
const auto &room{head.room};
|
2019-08-30 23:26:07 +02:00
|
|
|
m::room::events it{room};
|
2019-08-18 09:02:59 +02:00
|
|
|
if(!it)
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
// Replacement will be the single new head
|
2023-02-18 04:49:23 +01:00
|
|
|
const m::event::fetch replacement
|
2019-08-18 09:02:59 +02:00
|
|
|
{
|
2023-02-18 04:49:23 +01:00
|
|
|
it.event_idx()
|
2019-08-18 09:02:59 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
db::txn txn
|
|
|
|
{
|
|
|
|
*m::dbs::events
|
|
|
|
};
|
|
|
|
|
|
|
|
// Iterate all of the existing heads with a delete operation
|
2023-02-11 04:43:00 +01:00
|
|
|
m::dbs::opts opts;
|
2019-08-18 09:02:59 +02:00
|
|
|
opts.op = db::op::DELETE;
|
|
|
|
opts.appendix.reset();
|
|
|
|
opts.appendix.set(dbs::appendix::ROOM_HEAD);
|
|
|
|
head.for_each([&room, &opts, &txn, &ret]
|
|
|
|
(const m::event::idx &event_idx, const m::event::id &event_id)
|
|
|
|
{
|
|
|
|
const m::event::fetch event
|
|
|
|
{
|
2020-04-03 04:50:00 +02:00
|
|
|
std::nothrow, event_idx
|
2019-08-18 09:02:59 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
if(!event.valid)
|
|
|
|
{
|
|
|
|
log::derror
|
|
|
|
{
|
|
|
|
m::log, "Invalid event '%s' idx %lu in head for %s",
|
|
|
|
string_view{event_id},
|
|
|
|
event_idx,
|
|
|
|
string_view{room.room_id}
|
|
|
|
};
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
opts.event_idx = event_idx;
|
|
|
|
m::dbs::write(txn, event, opts);
|
|
|
|
++ret;
|
|
|
|
return true;
|
|
|
|
});
|
|
|
|
|
|
|
|
// Finally add the replacement to the txn
|
|
|
|
opts.op = db::op::SET;
|
|
|
|
opts.event_idx = it.event_idx();
|
|
|
|
m::dbs::write(txn, replacement, opts);
|
|
|
|
|
|
|
|
// Commit txn
|
|
|
|
txn();
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t
|
|
|
|
ircd::m::room::head::rebuild(const head &head)
|
|
|
|
{
|
|
|
|
static const m::event::fetch::opts fopts
|
|
|
|
{
|
2022-07-02 20:37:49 +02:00
|
|
|
db::gopts
|
|
|
|
{
|
|
|
|
.cache = false,
|
|
|
|
},
|
2019-08-18 09:02:59 +02:00
|
|
|
};
|
|
|
|
|
2022-07-02 20:37:49 +02:00
|
|
|
size_t ret{0};
|
2019-08-30 23:26:07 +02:00
|
|
|
m::room::events it
|
2019-08-18 09:02:59 +02:00
|
|
|
{
|
|
|
|
head.room, 0UL, &fopts
|
|
|
|
};
|
|
|
|
|
|
|
|
if(!it)
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
db::txn txn
|
|
|
|
{
|
|
|
|
*m::dbs::events
|
|
|
|
};
|
|
|
|
|
2023-02-18 04:49:23 +01:00
|
|
|
m::dbs::opts opts
|
|
|
|
{
|
|
|
|
.op = db::op::SET,
|
|
|
|
};
|
|
|
|
|
|
|
|
m::event::fetch event;
|
2019-08-18 09:02:59 +02:00
|
|
|
for(; it; ++it)
|
|
|
|
{
|
2023-02-18 04:49:23 +01:00
|
|
|
if(!seek(std::nothrow, event, it.event_idx()))
|
|
|
|
continue;
|
|
|
|
|
2019-08-18 09:02:59 +02:00
|
|
|
opts.event_idx = it.event_idx();
|
|
|
|
opts.appendix.reset();
|
|
|
|
opts.appendix.set(dbs::appendix::ROOM_HEAD);
|
|
|
|
m::dbs::write(txn, event, opts);
|
|
|
|
++ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
txn();
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
ircd::m::room::head::modify(const m::event::id &event_id,
|
|
|
|
const db::op &op,
|
|
|
|
const bool &refs)
|
|
|
|
{
|
|
|
|
const m::event::fetch event
|
|
|
|
{
|
|
|
|
event_id
|
|
|
|
};
|
|
|
|
|
|
|
|
db::txn txn
|
|
|
|
{
|
|
|
|
*m::dbs::events
|
|
|
|
};
|
|
|
|
|
|
|
|
// Iterate all of the existing heads with a delete operation
|
2023-02-11 04:43:00 +01:00
|
|
|
m::dbs::opts opts;
|
2019-08-18 09:02:59 +02:00
|
|
|
opts.op = op;
|
|
|
|
opts.event_idx = event.event_idx;
|
|
|
|
opts.appendix.reset();
|
|
|
|
opts.appendix.set(dbs::appendix::ROOM_HEAD);
|
|
|
|
m::dbs::write(txn, event, opts);
|
|
|
|
|
|
|
|
// Commit txn
|
|
|
|
txn();
|
|
|
|
}
|