Setup framework to do integration testing (#50)
This commit is contained in:
10
.github/workflows/tests.yaml
vendored
10
.github/workflows/tests.yaml
vendored
@@ -17,6 +17,7 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
|
- uses: Swatinem/rust-cache@v1
|
||||||
- uses: actions-rs/cargo@v1
|
- uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: check
|
command: check
|
||||||
@@ -31,9 +32,12 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
|
- uses: Swatinem/rust-cache@v1
|
||||||
|
- run: cd compressor_integration_tests && docker-compose up -d
|
||||||
- uses: actions-rs/cargo@v1
|
- uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: test
|
command: test
|
||||||
|
args: --workspace
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
name: Rustfmt
|
name: Rustfmt
|
||||||
@@ -45,7 +49,8 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- run: rustup component add rustfmt
|
components: rustfmt
|
||||||
|
- uses: Swatinem/rust-cache@v1
|
||||||
- uses: actions-rs/cargo@v1
|
- uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: fmt
|
command: fmt
|
||||||
@@ -61,7 +66,8 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- run: rustup component add clippy
|
components: clippy
|
||||||
|
- uses: Swatinem/rust-cache@v1
|
||||||
- uses: actions-rs/cargo@v1
|
- uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -2,5 +2,5 @@
|
|||||||
**/*.rs.bk
|
**/*.rs.bk
|
||||||
*.data
|
*.data
|
||||||
*.old
|
*.old
|
||||||
out.sql
|
**.sql
|
||||||
*.csv
|
*.csv
|
||||||
36
Cargo.lock
generated
36
Cargo.lock
generated
@@ -99,6 +99,20 @@ dependencies = [
|
|||||||
"vec_map",
|
"vec_map",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "compressor_integration_tests"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"openssl",
|
||||||
|
"postgres",
|
||||||
|
"postgres-openssl",
|
||||||
|
"rand",
|
||||||
|
"serial_test",
|
||||||
|
"state-map",
|
||||||
|
"string_cache",
|
||||||
|
"synapse_compress_state",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "console"
|
name = "console"
|
||||||
version = "0.14.1"
|
version = "0.14.1"
|
||||||
@@ -909,6 +923,28 @@ version = "1.0.126"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03"
|
checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serial_test"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e0bccbcf40c8938196944a3da0e133e031a33f4d6b72db3bda3cc556e361905d"
|
||||||
|
dependencies = [
|
||||||
|
"lazy_static",
|
||||||
|
"parking_lot",
|
||||||
|
"serial_test_derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serial_test_derive"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b2acd6defeddb41eb60bb468f8825d0cfd0c2a76bc03bfd235b6a1dc4f6a1ad5"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sha2"
|
name = "sha2"
|
||||||
version = "0.9.5"
|
version = "0.9.5"
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
[workspace]
|
||||||
|
members = ["compressor_integration_tests"]
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
authors = ["Erik Johnston"]
|
authors = ["Erik Johnston"]
|
||||||
description = "A tool to compress some state in a Synapse instance's database"
|
description = "A tool to compress some state in a Synapse instance's database"
|
||||||
|
|||||||
18
compressor_integration_tests/Cargo.toml
Normal file
18
compressor_integration_tests/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
[package]
|
||||||
|
name = "compressor_integration_tests"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
string_cache = "0.8.0"
|
||||||
|
serial_test = "0.5.1"
|
||||||
|
openssl = "0.10.32"
|
||||||
|
postgres = "0.19.0"
|
||||||
|
postgres-openssl = "0.5.0"
|
||||||
|
rand = "0.8.0"
|
||||||
|
synapse_compress_state = { path = "../" }
|
||||||
|
|
||||||
|
[dependencies.state-map]
|
||||||
|
git = "https://github.com/matrix-org/rust-matrix-state-map"
|
||||||
28
compressor_integration_tests/database_setup.sh
Executable file
28
compressor_integration_tests/database_setup.sh
Executable file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
#N.B. the database setup comes from:
|
||||||
|
#https://github.com/matrix-org/synapse/blob/develop/synapse/storage/schema/state/full_schemas/54/full.sql
|
||||||
|
|
||||||
|
# Setup the required tables for testing
|
||||||
|
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<SQLCODE
|
||||||
|
|
||||||
|
CREATE TABLE state_groups (
|
||||||
|
id BIGINT PRIMARY KEY,
|
||||||
|
room_id TEXT NOT NULL,
|
||||||
|
event_id TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE state_groups_state (
|
||||||
|
state_group BIGINT NOT NULL,
|
||||||
|
room_id TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
state_key TEXT NOT NULL,
|
||||||
|
event_id TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE state_group_edges (
|
||||||
|
state_group BIGINT NOT NULL,
|
||||||
|
prev_state_group BIGINT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
SQLCODE
|
||||||
20
compressor_integration_tests/docker-compose.yaml
Normal file
20
compressor_integration_tests/docker-compose.yaml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
version: '3'
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: "postgres:latest"
|
||||||
|
|
||||||
|
ports:
|
||||||
|
# N.B. format is [port on machine]:[port to expose from container]
|
||||||
|
- 5432:5432
|
||||||
|
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: synapse_user
|
||||||
|
POSTGRES_PASSWORD: synapse_pass
|
||||||
|
POSTGRES_DB: synapse
|
||||||
|
PGDATA: /tmp/data
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- ./database_setup.sh:/docker-entrypoint-initdb.d/1_database_setup.sh
|
||||||
|
|
||||||
|
tmpfs:
|
||||||
|
/tmp/data
|
||||||
107
compressor_integration_tests/src/lib.rs
Normal file
107
compressor_integration_tests/src/lib.rs
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
use openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
|
||||||
|
use postgres::Client;
|
||||||
|
use postgres_openssl::MakeTlsConnector;
|
||||||
|
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||||
|
use std::{borrow::Cow, collections::BTreeMap, fmt};
|
||||||
|
|
||||||
|
use synapse_compress_state::StateGroupEntry;
|
||||||
|
|
||||||
|
pub mod map_builder;
|
||||||
|
|
||||||
|
pub static DB_URL: &str = "postgresql://synapse_user:synapse_pass@localhost/synapse";
|
||||||
|
|
||||||
|
/// Adds the contents of a state group map to the testing database
|
||||||
|
pub fn add_contents_to_database(room_id: &str, state_group_map: &BTreeMap<i64, StateGroupEntry>) {
|
||||||
|
// connect to the database
|
||||||
|
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
|
||||||
|
builder.set_verify(SslVerifyMode::NONE);
|
||||||
|
let connector = MakeTlsConnector::new(builder.build());
|
||||||
|
|
||||||
|
let mut client = Client::connect(DB_URL, connector).unwrap();
|
||||||
|
|
||||||
|
// build up the query
|
||||||
|
let mut sql = "".to_string();
|
||||||
|
|
||||||
|
for (sg, entry) in state_group_map {
|
||||||
|
// create the entry for state_groups
|
||||||
|
sql.push_str(&format!(
|
||||||
|
"INSERT INTO state_groups (id, room_id, event_id) VALUES ({},{},{});\n",
|
||||||
|
sg,
|
||||||
|
PGEscape(room_id),
|
||||||
|
PGEscape("left_blank")
|
||||||
|
));
|
||||||
|
|
||||||
|
// create the entry in state_group_edges IF exists
|
||||||
|
if let Some(prev_sg) = entry.prev_state_group {
|
||||||
|
sql.push_str(&format!(
|
||||||
|
"INSERT INTO state_group_edges (state_group, prev_state_group) VALUES ({}, {});\n",
|
||||||
|
sg, prev_sg
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// write entry for each row in delta
|
||||||
|
if !entry.state_map.is_empty() {
|
||||||
|
sql.push_str("INSERT INTO state_groups_state (state_group, room_id, type, state_key, event_id) VALUES");
|
||||||
|
|
||||||
|
let mut first = true;
|
||||||
|
for ((t, s), e) in entry.state_map.iter() {
|
||||||
|
if first {
|
||||||
|
sql.push_str(" ");
|
||||||
|
first = false;
|
||||||
|
} else {
|
||||||
|
sql.push_str(" ,");
|
||||||
|
}
|
||||||
|
sql.push_str(&format!(
|
||||||
|
"({}, {}, {}, {}, {})",
|
||||||
|
sg,
|
||||||
|
PGEscape(room_id),
|
||||||
|
PGEscape(t),
|
||||||
|
PGEscape(s),
|
||||||
|
PGEscape(e)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
sql.push_str(";\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
client.batch_execute(&sql).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clears the contents of the testing database
|
||||||
|
pub fn empty_database() {
|
||||||
|
// connect to the database
|
||||||
|
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
|
||||||
|
builder.set_verify(SslVerifyMode::NONE);
|
||||||
|
let connector = MakeTlsConnector::new(builder.build());
|
||||||
|
|
||||||
|
let mut client = Client::connect(DB_URL, connector).unwrap();
|
||||||
|
|
||||||
|
// delete all the contents from all three tables
|
||||||
|
let sql = r"
|
||||||
|
DELETE FROM state_groups;
|
||||||
|
DELETE FROM state_group_edges;
|
||||||
|
DELETE FROM state_groups_state;
|
||||||
|
";
|
||||||
|
|
||||||
|
client.batch_execute(sql).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Safely escape the strings in sql queries
|
||||||
|
struct PGEscape<'a>(pub &'a str);
|
||||||
|
|
||||||
|
impl<'a> fmt::Display for PGEscape<'a> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let mut delim = Cow::from("$$");
|
||||||
|
while self.0.contains(&delim as &str) {
|
||||||
|
let s: String = thread_rng()
|
||||||
|
.sample_iter(&Alphanumeric)
|
||||||
|
.take(10)
|
||||||
|
.map(char::from)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
delim = format!("${}$", s).into();
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(f, "{}{}{}", delim, self.0, delim)
|
||||||
|
}
|
||||||
|
}
|
||||||
36
compressor_integration_tests/src/map_builder.rs
Normal file
36
compressor_integration_tests/src/map_builder.rs
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
use state_map::StateMap;
|
||||||
|
use synapse_compress_state::StateGroupEntry;
|
||||||
|
|
||||||
|
/// Generates long chain of state groups each with state deltas
|
||||||
|
///
|
||||||
|
/// If called wiht start=0, end=13 this would build the following:
|
||||||
|
///
|
||||||
|
/// 0-1-2-3-4-5-6-7-8-9-10-11-12-13
|
||||||
|
///
|
||||||
|
/// Where each group i has state:
|
||||||
|
/// ('node','is', i)
|
||||||
|
/// ('group', j, 'seen') - for all j less than i
|
||||||
|
pub fn line_with_state(start: i64, end: i64) -> BTreeMap<i64, StateGroupEntry> {
|
||||||
|
let mut initial: BTreeMap<i64, StateGroupEntry> = BTreeMap::new();
|
||||||
|
let mut prev = None;
|
||||||
|
|
||||||
|
for i in start..=end {
|
||||||
|
let mut entry = StateGroupEntry {
|
||||||
|
in_range: true,
|
||||||
|
prev_state_group: prev,
|
||||||
|
state_map: StateMap::new(),
|
||||||
|
};
|
||||||
|
entry
|
||||||
|
.state_map
|
||||||
|
.insert("group", &i.to_string(), "seen".into());
|
||||||
|
entry.state_map.insert("node", "is", i.to_string().into());
|
||||||
|
|
||||||
|
initial.insert(i, entry);
|
||||||
|
|
||||||
|
prev = Some(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
initial
|
||||||
|
}
|
||||||
@@ -0,0 +1,55 @@
|
|||||||
|
use compressor_integration_tests::{
|
||||||
|
add_contents_to_database, empty_database, map_builder::line_with_state, DB_URL,
|
||||||
|
};
|
||||||
|
use serial_test::serial;
|
||||||
|
use synapse_compress_state::{run, Config};
|
||||||
|
|
||||||
|
// Remember to add #[serial(db)] before any test that access the database.
|
||||||
|
// Only one test with this annotation can run at once - preventing
|
||||||
|
// concurrency bugs.
|
||||||
|
//
|
||||||
|
// You will probably also want to use common::empty_database() at the start
|
||||||
|
// of each test as well (since their order of execution is not guaranteed)
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[serial(db)]
|
||||||
|
fn run_succeeds_without_crashing() {
|
||||||
|
// This starts with the following structure
|
||||||
|
//
|
||||||
|
// 0-1-2-3-4-5-6-7-8-9-10-11-12-13
|
||||||
|
//
|
||||||
|
// Each group i has state:
|
||||||
|
// ('node','is', i)
|
||||||
|
// ('group', j, 'seen') - for all j less than i
|
||||||
|
let initial = line_with_state(0, 13);
|
||||||
|
|
||||||
|
empty_database();
|
||||||
|
add_contents_to_database("room1", &initial);
|
||||||
|
|
||||||
|
let db_url = DB_URL.to_string();
|
||||||
|
let room_id = "room1".to_string();
|
||||||
|
let output_file = Some("./tests/tmp/run_succeeds_without_crashing.sql".to_string());
|
||||||
|
let min_state_group = None;
|
||||||
|
let groups_to_compress = None;
|
||||||
|
let min_saved_rows = None;
|
||||||
|
let max_state_group = None;
|
||||||
|
let level_sizes = "3,3".to_string();
|
||||||
|
let transactions = true;
|
||||||
|
let graphs = false;
|
||||||
|
|
||||||
|
let config = Config::new(
|
||||||
|
db_url.clone(),
|
||||||
|
room_id.clone(),
|
||||||
|
output_file,
|
||||||
|
min_state_group,
|
||||||
|
groups_to_compress,
|
||||||
|
min_saved_rows,
|
||||||
|
max_state_group,
|
||||||
|
level_sizes,
|
||||||
|
transactions,
|
||||||
|
graphs,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
run(config);
|
||||||
|
}
|
||||||
1
compressor_integration_tests/tests/tmp/README.md
Normal file
1
compressor_integration_tests/tests/tmp/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
This folder is where sql files generated by the integration tests are saved
|
||||||
@@ -44,9 +44,9 @@ use database::PGEscape;
|
|||||||
/// delta from that previous group (or the full state if no previous group)
|
/// delta from that previous group (or the full state if no previous group)
|
||||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct StateGroupEntry {
|
pub struct StateGroupEntry {
|
||||||
in_range: bool,
|
pub in_range: bool,
|
||||||
prev_state_group: Option<i64>,
|
pub prev_state_group: Option<i64>,
|
||||||
state_map: StateMap<Atom>,
|
pub state_map: StateMap<Atom>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper struct for parsing the `level_sizes` argument.
|
/// Helper struct for parsing the `level_sizes` argument.
|
||||||
|
|||||||
Reference in New Issue
Block a user