mirror of
https://github.com/matrix-construct/construct
synced 2024-11-16 15:00:51 +01:00
modules/media/upload: File media in a file room.
This commit is contained in:
parent
d35a0190f7
commit
8ced86e503
1 changed files with 44 additions and 64 deletions
|
@ -37,89 +37,69 @@ post__upload(client &client,
|
||||||
request.head.content_type
|
request.head.content_type
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto filename
|
const auto &server
|
||||||
|
{
|
||||||
|
my_host()
|
||||||
|
};
|
||||||
|
|
||||||
|
const auto &filename
|
||||||
{
|
{
|
||||||
request.query["filename"]
|
request.query["filename"]
|
||||||
};
|
};
|
||||||
|
|
||||||
char pathbuf[32];
|
char randbuf[32];
|
||||||
const auto path
|
const auto randstr
|
||||||
{
|
{
|
||||||
rand::string(rand::dict::alpha, pathbuf)
|
rand::string(rand::dict::alpha, randbuf)
|
||||||
};
|
};
|
||||||
|
|
||||||
sha256 hash;
|
const m::room::id::buf room_id
|
||||||
size_t offset{0};
|
|
||||||
while(offset < size(request.content))
|
|
||||||
{
|
{
|
||||||
const string_view pending
|
file_room_id(server, randstr)
|
||||||
{
|
|
||||||
data(request.content) + offset, size(request.content) - offset
|
|
||||||
};
|
|
||||||
|
|
||||||
const auto appended
|
|
||||||
{
|
|
||||||
fs::append(path, pending, offset)
|
|
||||||
};
|
|
||||||
|
|
||||||
hash.update(appended);
|
|
||||||
offset += size(appended);
|
|
||||||
}
|
|
||||||
assert(offset == client.content_consumed);
|
|
||||||
|
|
||||||
char buffer[4_KiB];
|
|
||||||
while(client.content_consumed < request.head.content_length)
|
|
||||||
{
|
|
||||||
const size_t remain
|
|
||||||
{
|
|
||||||
request.head.content_length - client.content_consumed
|
|
||||||
};
|
|
||||||
|
|
||||||
const mutable_buffer buf
|
|
||||||
{
|
|
||||||
buffer, std::min(remain, sizeof(buf))
|
|
||||||
};
|
|
||||||
|
|
||||||
const string_view read
|
|
||||||
{
|
|
||||||
data(buf), read_few(*client.sock, buf)
|
|
||||||
};
|
|
||||||
|
|
||||||
client.content_consumed += size(read); do
|
|
||||||
{
|
|
||||||
const auto appended
|
|
||||||
{
|
|
||||||
fs::append(path, read, offset)
|
|
||||||
};
|
|
||||||
|
|
||||||
hash.update(appended);
|
|
||||||
offset += size(appended);
|
|
||||||
}
|
|
||||||
while(offset < client.content_consumed);
|
|
||||||
assert(offset == client.content_consumed);
|
|
||||||
}
|
|
||||||
assert(offset == request.head.content_length);
|
|
||||||
|
|
||||||
char hashbuf[32];
|
|
||||||
hash.digest(hashbuf);
|
|
||||||
|
|
||||||
char b58buf[64];
|
|
||||||
const auto new_path
|
|
||||||
{
|
|
||||||
b58encode(b58buf, hashbuf)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fs::rename(path, new_path);
|
m::vm::opts::commit vmopts;
|
||||||
|
vmopts.history = false;
|
||||||
|
const m::room room
|
||||||
|
{
|
||||||
|
room_id, &vmopts
|
||||||
|
};
|
||||||
|
|
||||||
|
create(room, request.user_id, "file");
|
||||||
|
|
||||||
|
const unique_buffer<mutable_buffer> buf
|
||||||
|
{
|
||||||
|
request.head.content_length
|
||||||
|
};
|
||||||
|
|
||||||
|
copy(buf, request.content);
|
||||||
|
client.content_consumed += read_all(*client.sock, buf);
|
||||||
|
assert(client.content_consumed == request.head.content_length);
|
||||||
|
|
||||||
|
const size_t written
|
||||||
|
{
|
||||||
|
write_file(room, buf, content_type)
|
||||||
|
};
|
||||||
|
|
||||||
char uribuf[256];
|
char uribuf[256];
|
||||||
const string_view content_uri
|
const string_view content_uri
|
||||||
{
|
{
|
||||||
fmt::sprintf
|
fmt::sprintf
|
||||||
{
|
{
|
||||||
uribuf, "mxc://%s/%s", my_host(), new_path
|
uribuf, "mxc://%s/%s", server, randstr
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
log::debug
|
||||||
|
{
|
||||||
|
"%s uploaded %zu bytes uri: `%s' file_room: %s :%s",
|
||||||
|
request.user_id,
|
||||||
|
request.head.content_length,
|
||||||
|
content_uri,
|
||||||
|
string_view{room.room_id},
|
||||||
|
filename
|
||||||
|
};
|
||||||
|
|
||||||
return resource::response
|
return resource::response
|
||||||
{
|
{
|
||||||
client, http::CREATED, json::members
|
client, http::CREATED, json::members
|
||||||
|
|
Loading…
Reference in a new issue