sha256_media integration

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2024-06-23 20:48:40 +00:00
parent efbdced535
commit 3480074f61
8 changed files with 299 additions and 282 deletions

View file

@ -300,8 +300,13 @@ pub struct Config {
#[serde(default)]
pub allow_guests_auto_join_rooms: bool,
#[serde(default = "true_fn")]
pub media_startup_check: bool,
#[serde(default = "true_fn")]
pub media_compat_file_link: bool,
#[serde(default = "Vec::new")]
pub prevent_media_downloads_from: Vec<OwnedServerName>,
#[serde(default = "Vec::new")]
pub forbidden_remote_server_names: Vec<OwnedServerName>,
#[serde(default = "Vec::new")]
@ -758,6 +763,8 @@ impl fmt::Display for Config {
"RocksDB Compaction Idle IOPriority",
&self.rocksdb_compaction_ioprio_idle.to_string(),
),
("Media integrity checks on startup", &self.media_startup_check.to_string()),
("Media compatibility filesystem links", &self.media_compat_file_link.to_string()),
("Prevent Media Downloads From", {
let mut lst = vec![];
for domain in &self.prevent_media_downloads_from {

View file

@ -129,9 +129,6 @@ sentry_telemetry = [
"conduit-core/sentry_telemetry",
"conduit-router/sentry_telemetry",
]
sha256_media = [
"conduit-service/sha256_media",
]
systemd = [
"conduit-router/systemd",
]

View file

@ -35,9 +35,6 @@ release_max_log_level = [
"log/max_level_trace",
"log/release_max_level_info",
]
sha256_media = [
"dep:sha2",
]
[dependencies]
async-trait.workspace = true
@ -66,7 +63,6 @@ rustyline-async.optional = true
serde_json.workspace = true
serde.workspace = true
serde_yaml.workspace = true
sha2.optional = true
sha2.workspace = true
termimad.workspace = true
termimad.optional = true

View file

@ -1,9 +1,12 @@
use std::{
collections::{HashMap, HashSet},
ffi::{OsStr, OsString},
fs::{self},
io::Write,
mem::size_of,
path::PathBuf,
sync::Arc,
time::Instant,
};
use conduit::{debug, debug_info, debug_warn, error, info, utils, warn, Config, Error, Result};
@ -18,10 +21,12 @@ use ruma::{
use crate::services;
/// The current schema version.
/// * If the database is opened at lesser version we apply migrations up to this
/// version.
/// * If the database is opened at greater version we reject with error.
const DATABASE_VERSION: u64 = 13 + cfg!(feature = "sha256_media") as u64;
/// - If database is opened at greater version we reject with error. The
/// software must be updated for backward-incompatible changes.
/// - If database is opened at lesser version we apply migrations up to this.
/// Note that named-feature migrations may also be performed when opening at
/// equal or lesser version. These are expected to be backward-compatible.
const DATABASE_VERSION: u64 = 13;
pub(crate) async fn migrations(db: &KeyValueDatabase, config: &Config) -> Result<()> {
// Matrix resource ownership is based on the server name; changing it
@ -119,9 +124,10 @@ async fn migrate(db: &KeyValueDatabase, config: &Config) -> Result<()> {
db_lt_13(db, config).await?;
}
#[cfg(feature = "sha256_media")]
if services().globals.database_version()? < 14 {
feat_sha256_media(db, config).await?;
if db.global.get(b"feat_sha256_media")?.is_none() {
migrate_sha256_media(db, config).await?;
} else if config.media_startup_check {
checkup_sha256_media(db, config).await?;
}
if db
@ -250,7 +256,7 @@ async fn db_lt_3(db: &KeyValueDatabase, _config: &Config) -> Result<()> {
}
#[allow(deprecated)]
let path = services().globals.get_media_file(&key);
let path = services().media.get_media_file(&key);
let mut file = fs::File::create(path)?;
file.write_all(&content)?;
db.mediaid_file.insert(&key, &[])?;
@ -688,29 +694,110 @@ async fn db_lt_13(_db: &KeyValueDatabase, config: &Config) -> Result<()> {
Ok(())
}
#[cfg(feature = "sha256_media")]
async fn feat_sha256_media(db: &KeyValueDatabase, _config: &Config) -> Result<()> {
use std::path::PathBuf;
warn!("Mgrating legacy base64 file names to sha256 file names");
/// Migrates a media directory from legacy base64 file names to sha2 file names.
/// All errors are fatal. Upon success the database is keyed to not perform this
/// again.
async fn migrate_sha256_media(db: &KeyValueDatabase, _config: &Config) -> Result<()> {
warn!("Migrating legacy base64 file names to sha256 file names");
// Move old media files to new names
let mut changes = Vec::<(PathBuf, PathBuf)>::new();
for (key, _) in db.mediaid_file.iter() {
let old = services().globals.get_media_file(&key);
let new = services().globals.get_media_file_new(&key);
let old = services().media.get_media_file_b64(&key);
let new = services().media.get_media_file_sha256(&key);
debug!(?key, ?old, ?new, num = changes.len(), "change");
changes.push((old, new));
debug!(?old, ?new, num = changes.len(), "change");
}
// move the file to the new location
for (old_path, path) in changes {
if old_path.exists() {
tokio::fs::rename(&old_path, &path).await?;
tokio::fs::symlink(&path, &old_path).await?;
}
}
services().globals.bump_database_version(14)?;
info!("Migration: 13 -> 14 finished");
// Apply fix from when sha256_media was backward-incompat and bumped the schema
// version from 13 to 14. For users satisfying these conditions we can go back.
if services().globals.database_version()? == 14 && DATABASE_VERSION == 13 {
services().globals.bump_database_version(13)?;
}
db.global.insert(b"feat_sha256_media", &[])?;
info!("Finished applying sha256_media");
Ok(())
}
/// Check is run on startup for prior-migrated media directories. This handles:
/// - Going back and forth to non-sha256 legacy binaries (e.g. upstream).
/// - Deletion of artifacts in the media directory which will then fall out of
/// sync with the database.
async fn checkup_sha256_media(db: &KeyValueDatabase, config: &Config) -> Result<()> {
use crate::media::encode_key;
debug!("Checking integrity of media directory");
let media = &services().media;
let timer = Instant::now();
let dir = media.get_media_dir();
let files: HashSet<OsString> = fs::read_dir(dir)?
.filter_map(|ent| ent.map_or(None, |ent| Some(ent.path().into_os_string())))
.collect();
for key in media.db.get_all_media_keys() {
let new_path = media.get_media_file_sha256(&key).into_os_string();
let old_path = media.get_media_file_b64(&key).into_os_string();
if let Err(e) = handle_media_check(db, config, &files, &key, &new_path, &old_path).await {
error!(
media_id = ?encode_key(&key), ?new_path, ?old_path,
"Failed to resolve media check failure: {e}"
);
}
}
debug_info!(
elapsed = ?timer.elapsed(),
"Finished checking media directory"
);
Ok(())
}
async fn handle_media_check(
db: &KeyValueDatabase, config: &Config, files: &HashSet<OsString>, key: &[u8], new_path: &OsStr, old_path: &OsStr,
) -> Result<()> {
use crate::media::encode_key;
let old_exists = files.contains(old_path);
let new_exists = files.contains(new_path);
if !old_exists && !new_exists {
error!(
media_id = ?encode_key(key), ?new_path, ?old_path,
"Media is missing at all paths. Removing from database..."
);
db.mediaid_file.remove(key)?;
db.mediaid_user.remove(key)?;
}
if config.media_compat_file_link && !old_exists && new_exists {
debug_warn!(
media_id = ?encode_key(key), ?new_path, ?old_path,
"Media found but missing legacy link. Fixing..."
);
tokio::fs::symlink(&new_path, &old_path).await?;
}
if config.media_compat_file_link && !new_exists && old_exists {
debug_warn!(
media_id = ?encode_key(key), ?new_path, ?old_path,
"Legacy media found without sha256 migration. Fixing..."
);
tokio::fs::rename(&old_path, &new_path).await?;
tokio::fs::symlink(&new_path, &old_path).await?;
}
Ok(())
}

View file

@ -7,13 +7,10 @@ pub(super) mod updates;
use std::{
collections::{BTreeMap, HashMap},
fs,
path::PathBuf,
sync::Arc,
time::Instant,
};
use base64::{engine::general_purpose, Engine as _};
use conduit::utils;
use data::Data;
use hickory_resolver::TokioAsyncResolver;
@ -128,8 +125,6 @@ impl Service {
.expect("@conduit:server_name is valid"),
};
fs::create_dir_all(s.get_media_folder())?;
if !s
.supported_room_versions()
.contains(&s.config.default_room_version)
@ -314,39 +309,6 @@ impl Service {
pub fn bump_database_version(&self, new_version: u64) -> Result<()> { self.db.bump_database_version(new_version) }
pub fn get_media_folder(&self) -> PathBuf {
let mut r = PathBuf::new();
r.push(self.config.database_path.clone());
r.push("media");
r
}
/// new SHA256 file name media function, requires "sha256_media" feature
/// flag enabled and database migrated uses SHA256 hash of the base64 key as
/// the file name
#[cfg(feature = "sha256_media")]
pub fn get_media_file_new(&self, key: &[u8]) -> PathBuf {
let mut r = PathBuf::new();
r.push(self.config.database_path.clone());
r.push("media");
// Using the hash of the base64 key as the filename
// This is to prevent the total length of the path from exceeding the maximum
// length in most filesystems
r.push(general_purpose::URL_SAFE_NO_PAD.encode(<sha2::Sha256 as sha2::Digest>::digest(key)));
r
}
/// old base64 file name media function
/// This is the old version of `get_media_file` that uses the full base64
/// key as the filename.
pub fn get_media_file(&self, key: &[u8]) -> PathBuf {
let mut r = PathBuf::new();
r.push(self.config.database_path.clone());
r.push("media");
r.push(general_purpose::URL_SAFE_NO_PAD.encode(key));
r
}
pub fn well_known_client(&self) -> &Option<Url> { &self.config.well_known.client }
pub fn well_known_server(&self) -> &Option<OwnedServerName> { &self.config.well_known.server }

View file

@ -1,18 +1,22 @@
mod data;
use std::{collections::HashMap, io::Cursor, sync::Arc, time::SystemTime};
mod tests;
use std::{collections::HashMap, io::Cursor, path::PathBuf, sync::Arc, time::SystemTime};
use base64::{engine::general_purpose, Engine as _};
use conduit::{debug, debug_error, error, utils, Error, Result, Server};
use data::Data;
use database::KeyValueDatabase;
use image::imageops::FilterType;
use ruma::{OwnedMxcUri, OwnedUserId};
use serde::Serialize;
use tokio::{
fs::{self, File},
fs,
io::{AsyncReadExt, AsyncWriteExt, BufReader},
sync::{Mutex, RwLock},
};
use tracing::{debug, error};
use crate::{services, utils, Error, Result};
use crate::services;
#[derive(Debug)]
pub struct FileMeta {
@ -39,11 +43,20 @@ pub struct UrlPreviewData {
}
pub struct Service {
server: Arc<Server>,
pub(super) db: Arc<dyn Data>,
pub url_preview_mutex: RwLock<HashMap<String, Arc<Mutex<()>>>>,
}
impl Service {
pub fn build(server: &Arc<Server>, db: &Arc<KeyValueDatabase>) -> Self {
Self {
server: server.clone(),
db: db.clone(),
url_preview_mutex: RwLock::new(HashMap::new()),
}
}
/// Uploads a file.
pub async fn create(
&self, sender_user: Option<OwnedUserId>, mxc: String, content_disposition: Option<&str>,
@ -58,21 +71,8 @@ impl Service {
.create_file_metadata(None, mxc, 0, 0, content_disposition, content_type)?
};
let path;
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(feature = "sha256_media")]
{
path = services().globals.get_media_file_new(&key);
};
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(not(feature = "sha256_media"))]
{
path = services().globals.get_media_file(&key);
};
let mut f = File::create(path).await?;
//TODO: Dangling metadata in database if creation fails
let mut f = self.create_media_file(&key).await?;
f.write_all(file).await?;
Ok(())
@ -82,24 +82,7 @@ impl Service {
pub async fn delete(&self, mxc: String) -> Result<()> {
if let Ok(keys) = self.db.search_mxc_metadata_prefix(mxc.clone()) {
for key in keys {
let file_path;
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(feature = "sha256_media")]
{
file_path = services().globals.get_media_file_new(&key);
};
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(not(feature = "sha256_media"))]
{
file_path = services().globals.get_media_file(&key);
};
debug!("Got local file path: {:?}", file_path);
debug!("Deleting local file {:?} from filesystem, original MXC: {}", file_path, mxc);
fs::remove_file(file_path).await?;
self.remove_media_file(&key).await?;
debug!("Deleting MXC {mxc} from database");
self.db.delete_file_mxc(mxc.clone())?;
@ -128,21 +111,8 @@ impl Service {
.create_file_metadata(None, mxc, width, height, content_disposition, content_type)?
};
let path;
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(feature = "sha256_media")]
{
path = services().globals.get_media_file_new(&key);
};
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(not(feature = "sha256_media"))]
{
path = services().globals.get_media_file(&key);
};
let mut f = File::create(path).await?;
//TODO: Dangling metadata in database if creation fails
let mut f = self.create_media_file(&key).await?;
f.write_all(file).await?;
Ok(())
@ -151,22 +121,9 @@ impl Service {
/// Downloads a file.
pub async fn get(&self, mxc: String) -> Result<Option<FileMeta>> {
if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc, 0, 0) {
let path;
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(feature = "sha256_media")]
{
path = services().globals.get_media_file_new(&key);
};
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(not(feature = "sha256_media"))]
{
path = services().globals.get_media_file(&key);
};
let mut file = Vec::new();
BufReader::new(File::open(path).await?)
let path = self.get_media_file(&key);
BufReader::new(fs::File::open(path).await?)
.read_to_end(&mut file)
.await?;
@ -233,24 +190,11 @@ impl Service {
continue;
}
let path;
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(feature = "sha256_media")]
{
path = services().globals.get_media_file_new(&key);
};
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(not(feature = "sha256_media"))]
{
path = services().globals.get_media_file(&key);
};
debug!("MXC path: {:?}", path);
let path = self.get_media_file(&key);
debug!("MXC path: {path:?}");
let file_metadata = fs::metadata(path.clone()).await?;
debug!("File metadata: {:?}", file_metadata);
debug!("File metadata: {file_metadata:?}");
let file_created_at = match file_metadata.created() {
Ok(value) => value,
@ -328,22 +272,9 @@ impl Service {
if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), width, height) {
// Using saved thumbnail
let path;
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(feature = "sha256_media")]
{
path = services().globals.get_media_file_new(&key);
};
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(not(feature = "sha256_media"))]
{
path = services().globals.get_media_file(&key);
};
let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?;
let path = self.get_media_file(&key);
fs::File::open(path).await?.read_to_end(&mut file).await?;
Ok(Some(FileMeta {
content_disposition,
@ -352,22 +283,9 @@ impl Service {
}))
} else if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), 0, 0) {
// Generate a thumbnail
let path;
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(feature = "sha256_media")]
{
path = services().globals.get_media_file_new(&key);
};
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(not(feature = "sha256_media"))]
{
path = services().globals.get_media_file(&key);
};
let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?;
let path = self.get_media_file(&key);
fs::File::open(path).await?.read_to_end(&mut file).await?;
if let Ok(image) = image::load_from_memory(&file) {
let original_width = image.width();
@ -433,21 +351,7 @@ impl Service {
content_type.as_deref(),
)?;
let path;
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(feature = "sha256_media")]
{
path = services().globals.get_media_file_new(&thumbnail_key);
};
#[allow(clippy::unnecessary_operation)] // error[E0658]: attributes on expressions are experimental
#[cfg(not(feature = "sha256_media"))]
{
path = services().globals.get_media_file(&thumbnail_key);
};
let mut f = File::create(path).await?;
let mut f = self.create_media_file(&thumbnail_key).await?;
f.write_all(&thumbnail_bytes).await?;
Ok(Some(FileMeta {
@ -483,99 +387,81 @@ impl Service {
.expect("valid system time");
self.db.set_url_preview(url, data, now)
}
pub async fn create_media_dir(&self) -> Result<()> {
let dir = self.get_media_dir();
Ok(fs::create_dir_all(dir).await?)
}
#[cfg(test)]
mod tests {
#[cfg(feature = "sha256_media")]
#[tokio::test]
async fn long_file_names_works() {
use std::path::PathBuf;
async fn remove_media_file(&self, key: &[u8]) -> Result<()> {
let path = self.get_media_file(key);
let legacy = self.get_media_file_b64(key);
debug!(?key, ?path, ?legacy, "Removing media file");
use base64::{engine::general_purpose, Engine as _};
use super::*;
struct MockedKVDatabase;
impl Data for MockedKVDatabase {
fn create_file_metadata(
&self, _sender_user: Option<&str>, mxc: String, width: u32, height: u32,
content_disposition: Option<&str>, content_type: Option<&str>,
) -> Result<Vec<u8>> {
// copied from src/database/key_value/media.rs
let mut key = mxc.as_bytes().to_vec();
key.push(0xFF);
key.extend_from_slice(&width.to_be_bytes());
key.extend_from_slice(&height.to_be_bytes());
key.push(0xFF);
key.extend_from_slice(
content_disposition
.as_ref()
.map(|f| f.as_bytes())
.unwrap_or_default(),
);
key.push(0xFF);
key.extend_from_slice(
content_type
.as_ref()
.map(|c| c.as_bytes())
.unwrap_or_default(),
);
Ok(key)
let file_rm = fs::remove_file(&path);
let legacy_rm = fs::remove_file(&legacy);
let (file_rm, legacy_rm) = tokio::join!(file_rm, legacy_rm);
if let Err(e) = legacy_rm {
if self.server.config.media_compat_file_link {
debug_error!(?key, ?legacy, "Failed to remove legacy media symlink: {e}");
}
}
fn delete_file_mxc(&self, _mxc: String) -> Result<()> { todo!() }
fn search_mxc_metadata_prefix(&self, _mxc: String) -> Result<Vec<Vec<u8>>> { todo!() }
fn get_all_media_keys(&self) -> Vec<Vec<u8>> { todo!() }
fn search_file_metadata(
&self, _mxc: String, _width: u32, _height: u32,
) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
todo!()
Ok(file_rm?)
}
fn remove_url_preview(&self, _url: &str) -> Result<()> { todo!() }
async fn create_media_file(&self, key: &[u8]) -> Result<fs::File> {
let path = self.get_media_file(key);
debug!(?key, ?path, "Creating media file");
fn set_url_preview(
&self, _url: &str, _data: &UrlPreviewData, _timestamp: std::time::Duration,
) -> Result<()> {
todo!()
}
fn get_url_preview(&self, _url: &str) -> Option<UrlPreviewData> { todo!() }
}
let db: Arc<MockedKVDatabase> = Arc::new(MockedKVDatabase);
let media = Service {
db,
url_preview_mutex: RwLock::new(HashMap::new()),
};
let mxc = "mxc://example.com/ascERGshawAWawugaAcauga".to_owned();
let width = 100;
let height = 100;
let content_disposition = "attachment; filename=\"this is a very long file name with spaces and special \
characters like äöüß and even emoji like 🦀.png\"";
let content_type = "image/png";
let key = media
.db
.create_file_metadata(None, mxc, width, height, Some(content_disposition), Some(content_type))
.unwrap();
let mut r = PathBuf::from("/tmp/media");
// r.push(base64::encode_config(key, base64::URL_SAFE_NO_PAD));
// use the sha256 hash of the key as the file name instead of the key itself
// this is because the base64 encoded key can be longer than 255 characters.
r.push(general_purpose::URL_SAFE_NO_PAD.encode(<sha2::Sha256 as sha2::Digest>::digest(key)));
// Check that the file path is not longer than 255 characters
// (255 is the maximum length of a file path on most file systems)
assert!(
r.to_str().unwrap().len() <= 255,
"File path is too long: {}",
r.to_str().unwrap().len()
let file = fs::File::create(&path).await?;
if self.server.config.media_compat_file_link {
let legacy = self.get_media_file_b64(key);
if let Err(e) = fs::symlink(&path, &legacy).await {
debug_error!(
key = ?encode_key(key), ?path, ?legacy,
"Failed to create legacy media symlink: {e}"
);
}
}
Ok(file)
}
#[inline]
pub fn get_media_file(&self, key: &[u8]) -> PathBuf { self.get_media_file_sha256(key) }
/// new SHA256 file name media function. requires database migrated. uses
/// SHA256 hash of the base64 key as the file name
pub fn get_media_file_sha256(&self, key: &[u8]) -> PathBuf {
let mut r = self.get_media_dir();
// Using the hash of the base64 key as the filename
// This is to prevent the total length of the path from exceeding the maximum
// length in most filesystems
let digest = <sha2::Sha256 as sha2::Digest>::digest(key);
let encoded = encode_key(&digest);
r.push(encoded);
r
}
/// old base64 file name media function
/// This is the old version of `get_media_file` that uses the full base64
/// key as the filename.
pub fn get_media_file_b64(&self, key: &[u8]) -> PathBuf {
let mut r = self.get_media_dir();
let encoded = encode_key(key);
r.push(encoded);
r
}
pub fn get_media_dir(&self) -> PathBuf {
let mut r = PathBuf::new();
r.push(self.server.config.database_path.clone());
r.push("media");
r
}
}
#[inline]
#[must_use]
pub fn encode_key(key: &[u8]) -> String { general_purpose::URL_SAFE_NO_PAD.encode(key) }

View file

@ -0,0 +1,84 @@
#![cfg(test)]
#[tokio::test]
async fn long_file_names_works() {
use std::path::PathBuf;
use base64::{engine::general_purpose, Engine as _};
use super::*;
struct MockedKVDatabase;
impl Data for MockedKVDatabase {
fn create_file_metadata(
&self, _sender_user: Option<&str>, mxc: String, width: u32, height: u32, content_disposition: Option<&str>,
content_type: Option<&str>,
) -> Result<Vec<u8>> {
// copied from src/database/key_value/media.rs
let mut key = mxc.as_bytes().to_vec();
key.push(0xFF);
key.extend_from_slice(&width.to_be_bytes());
key.extend_from_slice(&height.to_be_bytes());
key.push(0xFF);
key.extend_from_slice(
content_disposition
.as_ref()
.map(|f| f.as_bytes())
.unwrap_or_default(),
);
key.push(0xFF);
key.extend_from_slice(
content_type
.as_ref()
.map(|c| c.as_bytes())
.unwrap_or_default(),
);
Ok(key)
}
fn delete_file_mxc(&self, _mxc: String) -> Result<()> { todo!() }
fn search_mxc_metadata_prefix(&self, _mxc: String) -> Result<Vec<Vec<u8>>> { todo!() }
fn get_all_media_keys(&self) -> Vec<Vec<u8>> { todo!() }
fn search_file_metadata(
&self, _mxc: String, _width: u32, _height: u32,
) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
todo!()
}
fn remove_url_preview(&self, _url: &str) -> Result<()> { todo!() }
fn set_url_preview(&self, _url: &str, _data: &UrlPreviewData, _timestamp: std::time::Duration) -> Result<()> {
todo!()
}
fn get_url_preview(&self, _url: &str) -> Option<UrlPreviewData> { todo!() }
}
let db: Arc<MockedKVDatabase> = Arc::new(MockedKVDatabase);
let mxc = "mxc://example.com/ascERGshawAWawugaAcauga".to_owned();
let width = 100;
let height = 100;
let content_disposition = "attachment; filename=\"this is a very long file name with spaces and special \
characters like äöüß and even emoji like 🦀.png\"";
let content_type = "image/png";
let key = db
.create_file_metadata(None, mxc, width, height, Some(content_disposition), Some(content_type))
.unwrap();
let mut r = PathBuf::from("/tmp/media");
// r.push(base64::encode_config(key, base64::URL_SAFE_NO_PAD));
// use the sha256 hash of the key as the file name instead of the key itself
// this is because the base64 encoded key can be longer than 255 characters.
r.push(general_purpose::URL_SAFE_NO_PAD.encode(<sha2::Sha256 as sha2::Digest>::digest(key)));
// Check that the file path is not longer than 255 characters
// (255 is the maximum length of a file path on most file systems)
assert!(
r.to_str().unwrap().len() <= 255,
"File path is too long: {}",
r.to_str().unwrap().len()
);
}

View file

@ -136,10 +136,7 @@ impl Services {
key_backups: key_backups::Service {
db: db.clone(),
},
media: media::Service {
db: db.clone(),
url_preview_mutex: RwLock::new(HashMap::new()),
},
media: media::Service::build(&server, &db),
sending: sending::Service::build(db.clone(), config),
globals: globals::Service::load(db.clone(), config)?,
server,
@ -277,6 +274,7 @@ bad_signature_ratelimiter: {bad_signature_ratelimiter}
pub async fn start(&self) -> Result<()> {
debug_info!("Starting services");
self.media.create_media_dir().await?;
globals::migrations::migrations(&self.db, &self.globals.config).await?;
globals::emerg_access::init_emergency_access();