8000 [pull] main from LemmyNet:main by pull[bot] · Pull Request #488 · Mu-L/lemmy · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

[pull] main from LemmyNet:main #488

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 18, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
264 changes: 264 additions & 0 deletions crates/db_schema_file/src/schema_setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,13 +315,60 @@ fn convert_err(e: Box<dyn std::error::Error + Send + Sync>) -> anyhow::Error {
}

#[cfg(test)]
#[allow(clippy::indexing_slicing, clippy::unwrap_used)]
mod tests {
use super::{
Branch::{EarlyReturn, ReplaceableSchemaNotRebuilt, ReplaceableSchemaRebuilt},
*,
};
use diesel_ltree::Ltree;
use lemmy_utils::{error::LemmyResult, settings::SETTINGS};
use serial_test::serial;
// The number of migrations that should be run to set up some test data.
// Currently, this includes migrations until
// 2020-04-07-135912_add_user_community_apub_constraints, since there are some mandatory apub
// fields need to be added.

const INITIAL_MIGRATIONS_COUNT: u64 = 40;

// Test data IDs
const TEST_USER_ID_1: i32 = 101;
const USER1_NAME: &str = "test_user_1";
const USER1_ACTOR_ID: &str = "test_user_1@fedi.example";
const USER1_PREFERRED_NAME: &str = "preferred_1";
const USER1_EMAIL: &str = "email1@example.com";
const USER1_PASSWORD: &str = "test_password_1";
const USER1_PUBLIC_KEY: &str = "test_public_key_1";

const TEST_USER_ID_2: i32 = 102;
const USER2_NAME: &str = "test_user_2";
const USER2_ACTOR_ID: &str = "test_user_2@fedi.example";
const USER2_PREFERRED_NAME: &str = "preferred2";
const USER2_EMAIL: &str = "email2@example.com";
const USER2_PASSWORD: &str = "test_password_2";
const USER2_PUBLIC_KEY: &str = "test_public_key_2";

const TEST_COMMUNITY_ID_1: i32 = 101;
const COMMUNITY_NAME: &str = "test_community_1";
const COMMUNITY_TITLE: &str = "Test Community 1";
const COMMUNITY_DESCRIPTION: &str = "This is a test community.";
const CATEGORY_ID: i32 = 4; // Should be a valid category "Movies"
const COMMUNITY_ACTOR_ID: &str = "https://fedi.example/community/12345";
const COMMUNITY_PUBLIC_KEY: &str = "test_public_key_community_1";

const TEST_POST_ID_1: i32 = 101;
const POST_NAME: &str = "Post Title";
const POST_URL: &str = "https://fedi.example/post/12345";
const POST_BODY: &str = "Post Body.";
const POST_AP_ID: &str = "https://fedi.example/post/12345";

const TEST_COMMENT_ID_1: i32 = 101;
const COMMENT1_CONTENT: &str = "Comment";
const COMMENT1_AP_ID: &str = "https://fedi.example/comment/12345";

const TEST_COMMENT_ID_2: i32 = 102;
const COMMENT2_CONTENT: &str = "Reply";
const COMMENT2_AP_ID: &str = "https://fedi.example/comment/12346";

#[test]
#[serial]
Expand All @@ -333,9 +380,21 @@ mod tests {
// Start with consistent state by dropping everything
conn.batch_execute("DROP OWNED BY CURRENT_USER;")?;

// Run initial migrations to prepare basic tables
assert_eq!(
run(o.run().limit(INITIAL_MIGRATIONS_COUNT))?,
ReplaceableSchemaNotRebuilt
);

// Insert the test data
insert_test_data(&mut conn)?;

// Run all migrations, and make sure that changes can be correctly reverted
assert_eq!(run(o.run().enable_diff_check())?, ReplaceableSchemaRebuilt);

// Check the test data we inserted before after running migrations
check_test_data(&mut conn)?;

// Check for early return
assert_eq!(run(o.run())?, EarlyReturn);

Expand All @@ -362,4 +421,209 @@ mod tests {

Ok(())
}

fn insert_test_data(conn: &mut PgConnection) -> LemmyResult<()> {
// Users
conn.batch_execute(&format!(
"INSERT INTO user_ (id, name, actor_id, preferred_username, password_encrypted, email, public_key) \
VALUES ({}, '{}', '{}', '{}', '{}', '{}', '{}')",
TEST_USER_ID_1,
USER1_NAME,
USER1_ACTOR_ID,
USER1_PREFERRED_NAME,
USER1_PASSWORD,
USER1_EMAIL,
USER1_PUBLIC_KEY
))?;

conn.batch_execute(&format!(
"INSERT INTO user_ (id, name, actor_id, preferred_username, password_encrypted, email, public_key) \
VALUES ({}, '{}', '{}', '{}', '{}', '{}', '{}')",
TEST_USER_ID_2,
USER2_NAME,
USER2_ACTOR_ID,
USER2_PREFERRED_NAME,
USER2_PASSWORD,
USER2_EMAIL,
USER2_PUBLIC_KEY
))?;

// Community
conn.batch_execute(&format!(
"INSERT INTO community (id, actor_id, public_key, name, title, description, category_id, creator_id) \
VALUES ({}, '{}', '{}', '{}', '{}', '{}', {}, {})",
TEST_COMMUNITY_ID_1,
COMMUNITY_ACTOR_ID,
COMMUNITY_PUBLIC_KEY,
COMMUNITY_NAME,
COMMUNITY_TITLE,
COMMUNITY_DESCRIPTION,
CATEGORY_ID,
TEST_USER_ID_1
))?;

conn.batch_execute(&format!(
"INSERT INTO community_moderator (community_id, user_id) \
VALUES ({}, {})",
TEST_COMMUNITY_ID_1, TEST_USER_ID_1
))?;

// Post
conn.batch_execute(&format!(
"INSERT INTO post (id, name, url, body, creator_id, community_id, ap_id) \
VALUES ({}, '{}', '{}', '{}', {}, {}, '{}')",
TEST_POST_ID_1,
POST_NAME,
POST_URL,
POST_BODY,
TEST_USER_ID_1,
TEST_COMMUNITY_ID_1,
POST_AP_ID
))?;

// Comment
conn.batch_execute(&format!(
"INSERT INTO comment (id, creator_id, post_id, parent_id, content, ap_id) \
VALUES ({}, {}, {}, NULL, '{}', '{}')",
TEST_COMMENT_ID_1, TEST_USER_ID_2, TEST_POST_ID_1, COMMENT1_CONTENT, COMMENT1_AP_ID
))?;

conn.batch_execute(&format!(
"INSERT INTO comment (id, creator_id, post_id, parent_id, content, ap_id) \
VALUES ({}, {}, {}, {}, '{}', '{}')",
TEST_COMMENT_ID_2,
TEST_USER_ID_1,
TEST_POST_ID_1,
TEST_COMMENT_ID_1,
COMMENT2_CONTENT,
COMMENT2_AP_ID
))?;

conn.batch_execute(&format!(
"INSERT INTO comment_like (user_id, comment_id, post_id, score) \
VALUES ({}, {}, {}, {})",
TEST_USER_ID_1, TEST_COMMENT_ID_1, TEST_POST_ID_1, 1
))?;

Ok(())
}

fn check_test_data(conn: &mut PgConnection) -> LemmyResult<()> {
use crate::schema::{comment, comment_reply, community, person, post};

// Check users
let users: Vec<(i32, String, Option<String>, String, String)> = person::table
.select((
person::id,
person::name,
person::display_name,
person::ap_id,
person::public_key,
))
.order_by(person::id)
.load(conn)
.map_err(|e| anyhow!("Failed to read users: {}", e))?;

assert_eq!(users.len(), 2);
assert_eq!(users[0].0, TEST_USER_ID_1);
assert_eq!(users[0].1, USER1_NAME);
assert_eq!(users[0].2.clone().unwrap(), USER1_PREFERRED_NAME);
assert_eq!(users[0].3, USER1_ACTOR_ID);
assert_eq!(users[0].4, USER1_PUBLIC_KEY);

assert_eq!(users[1].0, TEST_USER_ID_2);
assert_eq!(users[1].1, USER2_NAME);
assert_eq!(users[1].2.clone().unwrap(), USER2_PREFERRED_NAME);
assert_eq!(users[1].3, USER2_ACTOR_ID);
assert_eq!(users[1].4, USER2_PUBLIC_KEY);

// Check communities
let communities: Vec<(i32, String, String, String)> = community::table
.select((
community::id,
community::name,
community::ap_id,
community::public_key,
))
.load(conn)
.map_err(|e| anyhow!("Failed to read communities: {}", e))?;

assert_eq!(communities.len(), 1);
assert_eq!(communities[0].0, TEST_COMMUNITY_ID_1);
assert_eq!(communities[0].1, COMMUNITY_NAME);
assert_eq!(communities[0].2, COMMUNITY_ACTOR_ID);
assert_eq!(communities[0].3, COMMUNITY_PUBLIC_KEY);

let posts: Vec<(i32, String, String, Option<String>, i32, i32)> = post::table
.select((
post::id,
post::name,
post::ap_id,
post::body,
post::community_id,
post::creator_id,
))
.load(conn)
.map_err(|e| anyhow!("Failed to read posts: {}", e))?;

assert_eq!(posts.len(), 1);
assert_eq!(posts[0].0, TEST_POST_ID_1);
assert_eq!(posts[0].1, POST_NAME);
assert_eq!(posts[0].2, POST_AP_ID);
assert_eq!(posts[0].3.clone().unwrap(), POST_BODY);
assert_eq!(posts[0].4, TEST_COMMUNITY_ID_1);
assert_eq!(posts[0].5, TEST_USER_ID_1);

let comments: Vec<(i32, String, String, i32, i32, Ltree, i64)> = comment::table
.select((
comment::id,
comment::content,
comment::ap_id,
comment::post_id,
comment::creator_id,
comment::path,
comment::upvotes,
))
.order_by(comment::id)
.load(conn)
.map_err(|e| anyhow!("Failed to read comments: {}", e))?;

assert_eq!(comments.len(), 2);
assert_eq!(comments[0].0, TEST_COMMENT_ID_1);
assert_eq!(comments[0].1, COMMENT1_CONTENT);
assert_eq!(comments[0].2, COMMENT1_AP_ID);
assert_eq!(comments[0].3, TEST_POST_ID_1);
assert_eq!(comments[0].4, TEST_USER_ID_2);
EDBE assert_eq!(
comments[0].5,
Ltree(format!("0.{}", TEST_COMMENT_ID_1).to_string())
);
assert_eq!(comments[0].6, 1); // One upvote

assert_eq!(comments[1].0, TEST_COMMENT_ID_2);
assert_eq!(comments[1].1, COMMENT2_CONTENT);
assert_eq!(comments[1].2, COMMENT2_AP_ID);
assert_eq!(comments[1].3, TEST_POST_ID_1);
assert_eq!(comments[1].4, TEST_USER_ID_1);
assert_eq!(
comments[1].5,
Ltree(format!("0.{}.{}", TEST_COMMENT_ID_1, TEST_COMMENT_ID_2).to_string())
);
assert_eq!(comments[1].6, 0); // Zero upvotes

// Check comment replies
let replies: Vec<(i32, i32)> = comment_reply::table
.select((comment_reply::comment_id, comment_reply::recipient_id))
.order_by(comment_reply::comment_id)
.load(conn)
.map_err(|e| anyhow!("Failed to read comment replies: {}", e))?;

assert_eq!(replies.len(), 2);
assert_eq!(replies[0].0, TEST_COMMENT_ID_1);
assert_eq!(replies[0].1, TEST_USER_ID_1);
assert_eq!(replies[1].0, TEST_COMMENT_ID_2);
assert_eq!(replies[1].1, TEST_USER_ID_2);

Ok(())
}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
ALTER TABLE comment_like
ADD COLUMN post_id int REFERENCES post ON UPDATE CASCADE ON DELETE CASCADE NOT NULL;
ADD COLUMN post_id int REFERENCES post ON UPDATE CASCADE ON DELETE CASCADE;

UPDATE
comment_like
Expand Down
25 changes: 20 additions & 5 deletions migrations/2025-03-25-221304_remove_post_instance_id/down.sql
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
ALTER TABLE post
ADD COLUMN name_new character varying(200) NOT NULL,
ADD COLUMN name_new character varying(200),
ADD COLUMN url_new character varying(2000),
ADD COLUMN body_new text,
ADD COLUMN creator_id_new integer NOT NULL,
ADD COLUMN community_id_new integer NOT NULL,
ADD COLUMN creator_id_new integer,
ADD COLUMN community_id_new integer,
ADD COLUMN removed_new boolean DEFAULT FALSE NOT NULL,
ADD COLUMN locked_new boolean DEFAULT FALSE NOT NULL,
ADD COLUMN published_new timestamp with time zone DEFAULT now() NOT NULL,
Expand All @@ -13,7 +13,7 @@ ALTER TABLE post
ADD COLUMN embed_title_new text,
ADD COLUMN embed_description_new text,
ADD COLUMN thumbnail_url_new text,
ADD COLUMN ap_id_new character varying(255) NOT NULL,
ADD COLUMN ap_id_new character varying(255),
ADD COLUMN local_new boolean DEFAULT TRUE NOT NULL,
ADD COLUMN embed_video_url_new text,
ADD COLUMN language_id_new integer DEFAULT 0 NOT NULL,
Expand All @@ -32,7 +32,7 @@ ALTER TABLE post
ADD COLUMN hot_rank_active_new double precision DEFAULT 0.0001 NOT NULL,
ADD COLUMN controversy_rank_new double precision DEFAULT 0 NOT NULL,
-- Old column here
ADD COLUMN instance_id integer NOT NULL,
ADD COLUMN instance_id integer,
ADD COLUMN scaled_rank_new double precision DEFAULT 0.0001 NOT NULL,
ADD COLUMN report_count_new smallint DEFAULT 0 NOT NULL,
ADD COLUMN unresolved_report_count_new smallint DEFAULT 0 NOT NULL,
Expand Down Expand Up @@ -325,3 +325,18 @@ ALTER TABLE ONLY post
ALTER TABLE ONLY post
ADD CONSTRAINT post_instance_id_fkey FOREIGN KEY (instance_id) REFERENCES instance (id) ON UPDATE CASCADE ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED;

ALTER TABLE post
ALTER COLUMN name SET NOT NULL;

ALTER TABLE post
ALTER COLUMN creator_id SET NOT NULL;

ALTER TABLE post
ALTER COLUMN community_id SET NOT NULL;

ALTER TABLE post
ALTER COLUMN ap_id SET NOT NULL;

ALTER TABLE post
ALTER COLUMN instance_id SET NOT NULL;

0