Setup framework to do integration testing (#50)
This commit is contained in:
18
compressor_integration_tests/Cargo.toml
Normal file
18
compressor_integration_tests/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "compressor_integration_tests"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
string_cache = "0.8.0"
|
||||
serial_test = "0.5.1"
|
||||
openssl = "0.10.32"
|
||||
postgres = "0.19.0"
|
||||
postgres-openssl = "0.5.0"
|
||||
rand = "0.8.0"
|
||||
synapse_compress_state = { path = "../" }
|
||||
|
||||
[dependencies.state-map]
|
||||
git = "https://github.com/matrix-org/rust-matrix-state-map"
|
||||
28
compressor_integration_tests/database_setup.sh
Executable file
28
compressor_integration_tests/database_setup.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/sh
|
||||
|
||||
#N.B. the database setup comes from:
|
||||
#https://github.com/matrix-org/synapse/blob/develop/synapse/storage/schema/state/full_schemas/54/full.sql
|
||||
|
||||
# Setup the required tables for testing
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<SQLCODE
|
||||
|
||||
CREATE TABLE state_groups (
|
||||
id BIGINT PRIMARY KEY,
|
||||
room_id TEXT NOT NULL,
|
||||
event_id TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE state_groups_state (
|
||||
state_group BIGINT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
state_key TEXT NOT NULL,
|
||||
event_id TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE state_group_edges (
|
||||
state_group BIGINT NOT NULL,
|
||||
prev_state_group BIGINT NOT NULL
|
||||
);
|
||||
|
||||
SQLCODE
|
||||
20
compressor_integration_tests/docker-compose.yaml
Normal file
20
compressor_integration_tests/docker-compose.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
version: '3'
|
||||
services:
|
||||
postgres:
|
||||
image: "postgres:latest"
|
||||
|
||||
ports:
|
||||
# N.B. format is [port on machine]:[port to expose from container]
|
||||
- 5432:5432
|
||||
|
||||
environment:
|
||||
POSTGRES_USER: synapse_user
|
||||
POSTGRES_PASSWORD: synapse_pass
|
||||
POSTGRES_DB: synapse
|
||||
PGDATA: /tmp/data
|
||||
|
||||
volumes:
|
||||
- ./database_setup.sh:/docker-entrypoint-initdb.d/1_database_setup.sh
|
||||
|
||||
tmpfs:
|
||||
/tmp/data
|
||||
107
compressor_integration_tests/src/lib.rs
Normal file
107
compressor_integration_tests/src/lib.rs
Normal file
@@ -0,0 +1,107 @@
|
||||
use openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
|
||||
use postgres::Client;
|
||||
use postgres_openssl::MakeTlsConnector;
|
||||
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||
use std::{borrow::Cow, collections::BTreeMap, fmt};
|
||||
|
||||
use synapse_compress_state::StateGroupEntry;
|
||||
|
||||
pub mod map_builder;
|
||||
|
||||
pub static DB_URL: &str = "postgresql://synapse_user:synapse_pass@localhost/synapse";
|
||||
|
||||
/// Adds the contents of a state group map to the testing database
|
||||
pub fn add_contents_to_database(room_id: &str, state_group_map: &BTreeMap<i64, StateGroupEntry>) {
|
||||
// connect to the database
|
||||
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
|
||||
builder.set_verify(SslVerifyMode::NONE);
|
||||
let connector = MakeTlsConnector::new(builder.build());
|
||||
|
||||
let mut client = Client::connect(DB_URL, connector).unwrap();
|
||||
|
||||
// build up the query
|
||||
let mut sql = "".to_string();
|
||||
|
||||
for (sg, entry) in state_group_map {
|
||||
// create the entry for state_groups
|
||||
sql.push_str(&format!(
|
||||
"INSERT INTO state_groups (id, room_id, event_id) VALUES ({},{},{});\n",
|
||||
sg,
|
||||
PGEscape(room_id),
|
||||
PGEscape("left_blank")
|
||||
));
|
||||
|
||||
// create the entry in state_group_edges IF exists
|
||||
if let Some(prev_sg) = entry.prev_state_group {
|
||||
sql.push_str(&format!(
|
||||
"INSERT INTO state_group_edges (state_group, prev_state_group) VALUES ({}, {});\n",
|
||||
sg, prev_sg
|
||||
));
|
||||
}
|
||||
|
||||
// write entry for each row in delta
|
||||
if !entry.state_map.is_empty() {
|
||||
sql.push_str("INSERT INTO state_groups_state (state_group, room_id, type, state_key, event_id) VALUES");
|
||||
|
||||
let mut first = true;
|
||||
for ((t, s), e) in entry.state_map.iter() {
|
||||
if first {
|
||||
sql.push_str(" ");
|
||||
first = false;
|
||||
} else {
|
||||
sql.push_str(" ,");
|
||||
}
|
||||
sql.push_str(&format!(
|
||||
"({}, {}, {}, {}, {})",
|
||||
sg,
|
||||
PGEscape(room_id),
|
||||
PGEscape(t),
|
||||
PGEscape(s),
|
||||
PGEscape(e)
|
||||
));
|
||||
}
|
||||
sql.push_str(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
client.batch_execute(&sql).unwrap();
|
||||
}
|
||||
|
||||
// Clears the contents of the testing database
|
||||
pub fn empty_database() {
|
||||
// connect to the database
|
||||
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
|
||||
builder.set_verify(SslVerifyMode::NONE);
|
||||
let connector = MakeTlsConnector::new(builder.build());
|
||||
|
||||
let mut client = Client::connect(DB_URL, connector).unwrap();
|
||||
|
||||
// delete all the contents from all three tables
|
||||
let sql = r"
|
||||
DELETE FROM state_groups;
|
||||
DELETE FROM state_group_edges;
|
||||
DELETE FROM state_groups_state;
|
||||
";
|
||||
|
||||
client.batch_execute(sql).unwrap();
|
||||
}
|
||||
|
||||
/// Safely escape the strings in sql queries
|
||||
struct PGEscape<'a>(pub &'a str);
|
||||
|
||||
impl<'a> fmt::Display for PGEscape<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let mut delim = Cow::from("$$");
|
||||
while self.0.contains(&delim as &str) {
|
||||
let s: String = thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(10)
|
||||
.map(char::from)
|
||||
.collect();
|
||||
|
||||
delim = format!("${}$", s).into();
|
||||
}
|
||||
|
||||
write!(f, "{}{}{}", delim, self.0, delim)
|
||||
}
|
||||
}
|
||||
36
compressor_integration_tests/src/map_builder.rs
Normal file
36
compressor_integration_tests/src/map_builder.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use state_map::StateMap;
|
||||
use synapse_compress_state::StateGroupEntry;
|
||||
|
||||
/// Generates long chain of state groups each with state deltas
|
||||
///
|
||||
/// If called wiht start=0, end=13 this would build the following:
|
||||
///
|
||||
/// 0-1-2-3-4-5-6-7-8-9-10-11-12-13
|
||||
///
|
||||
/// Where each group i has state:
|
||||
/// ('node','is', i)
|
||||
/// ('group', j, 'seen') - for all j less than i
|
||||
pub fn line_with_state(start: i64, end: i64) -> BTreeMap<i64, StateGroupEntry> {
|
||||
let mut initial: BTreeMap<i64, StateGroupEntry> = BTreeMap::new();
|
||||
let mut prev = None;
|
||||
|
||||
for i in start..=end {
|
||||
let mut entry = StateGroupEntry {
|
||||
in_range: true,
|
||||
prev_state_group: prev,
|
||||
state_map: StateMap::new(),
|
||||
};
|
||||
entry
|
||||
.state_map
|
||||
.insert("group", &i.to_string(), "seen".into());
|
||||
entry.state_map.insert("node", "is", i.to_string().into());
|
||||
|
||||
initial.insert(i, entry);
|
||||
|
||||
prev = Some(i)
|
||||
}
|
||||
|
||||
initial
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
use compressor_integration_tests::{
|
||||
add_contents_to_database, empty_database, map_builder::line_with_state, DB_URL,
|
||||
};
|
||||
use serial_test::serial;
|
||||
use synapse_compress_state::{run, Config};
|
||||
|
||||
// Remember to add #[serial(db)] before any test that access the database.
|
||||
// Only one test with this annotation can run at once - preventing
|
||||
// concurrency bugs.
|
||||
//
|
||||
// You will probably also want to use common::empty_database() at the start
|
||||
// of each test as well (since their order of execution is not guaranteed)
|
||||
|
||||
#[test]
|
||||
#[serial(db)]
|
||||
fn run_succeeds_without_crashing() {
|
||||
// This starts with the following structure
|
||||
//
|
||||
// 0-1-2-3-4-5-6-7-8-9-10-11-12-13
|
||||
//
|
||||
// Each group i has state:
|
||||
// ('node','is', i)
|
||||
// ('group', j, 'seen') - for all j less than i
|
||||
let initial = line_with_state(0, 13);
|
||||
|
||||
empty_database();
|
||||
add_contents_to_database("room1", &initial);
|
||||
|
||||
let db_url = DB_URL.to_string();
|
||||
let room_id = "room1".to_string();
|
||||
let output_file = Some("./tests/tmp/run_succeeds_without_crashing.sql".to_string());
|
||||
let min_state_group = None;
|
||||
let groups_to_compress = None;
|
||||
let min_saved_rows = None;
|
||||
let max_state_group = None;
|
||||
let level_sizes = "3,3".to_string();
|
||||
let transactions = true;
|
||||
let graphs = false;
|
||||
|
||||
let config = Config::new(
|
||||
db_url.clone(),
|
||||
room_id.clone(),
|
||||
output_file,
|
||||
min_state_group,
|
||||
groups_to_compress,
|
||||
min_saved_rows,
|
||||
max_state_group,
|
||||
level_sizes,
|
||||
transactions,
|
||||
graphs,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
run(config);
|
||||
}
|
||||
1
compressor_integration_tests/tests/tmp/README.md
Normal file
1
compressor_integration_tests/tests/tmp/README.md
Normal file
@@ -0,0 +1 @@
|
||||
This folder is where sql files generated by the integration tests are saved
|
||||
Reference in New Issue
Block a user