Skip to content

Commit 2709fe0

Browse files
committedJun 11, 2024·
tests: Add sanity integration test
The test creates, indexes, searches and then drops a small hdfs logs file. Using `testcontainers-rs`, the test runs a postgres instance inside a container.
1 parent 0da2292 commit 2709fe0

File tree

6 files changed

+562
-23
lines changed

6 files changed

+562
-23
lines changed
 

‎Cargo.lock

+420-20
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎Cargo.toml

+3
Original file line numberDiff line numberDiff line change
@@ -34,3 +34,6 @@ uuid = { version = "1.8.0", features = ["v7"] }
3434

3535
[dev-dependencies]
3636
async-tempfile = "0.5.0"
37+
ctor = "0.2.8"
38+
testcontainers = "0.17.0"
39+
testcontainers-modules = { version = "0.5.0", features = ["postgres"] }

‎src/config/mod.rs

+11-3
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ pub mod number;
66
pub mod static_object;
77
pub mod text;
88

9-
use std::{ops::Deref, path::Path, vec::IntoIter};
9+
use std::{ops::Deref, path::Path, str::FromStr, vec::IntoIter};
1010

1111
use color_eyre::eyre::Result;
1212
use serde::{Deserialize, Serialize};
@@ -232,7 +232,7 @@ pub struct IndexSchema {
232232
}
233233

234234
#[derive(Debug, Clone, Serialize, Deserialize)]
235-
pub(crate) struct IndexConfig {
235+
pub struct IndexConfig {
236236
pub name: String,
237237
pub path: String,
238238

@@ -246,6 +246,14 @@ pub(crate) struct IndexConfig {
246246
impl IndexConfig {
247247
pub async fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> {
248248
let config_str = read_to_string(path).await?;
249-
Ok(serde_yaml::from_str(&config_str)?)
249+
Self::from_str(&config_str)
250+
}
251+
}
252+
253+
impl FromStr for IndexConfig {
254+
type Err = color_eyre::Report;
255+
256+
fn from_str(s: &str) -> Result<Self> {
257+
Ok(serde_yaml::from_str(s)?)
250258
}
251259
}

‎tests/common/mod.rs

+55
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
use std::{
2+
fs::canonicalize,
3+
path::{Path, PathBuf},
4+
};
5+
6+
use color_eyre::Result;
7+
use sqlx::{migrate::Migrator, postgres::PgPoolOptions, PgPool};
8+
use testcontainers::{runners::AsyncRunner, ContainerAsync};
9+
use testcontainers_modules::postgres::Postgres as PostgresContainer;
10+
11+
static MIGRATOR: Migrator = sqlx::migrate!();
12+
13+
const MAX_DB_CONNECTIONS: u32 = 100;
14+
15+
pub struct Postgres {
16+
/// Keep container alive (container is deleted on drop).
17+
_container: ContainerAsync<PostgresContainer>,
18+
19+
/// The underlying sqlx connection to the postgres inside the container.
20+
pub pool: PgPool,
21+
}
22+
23+
async fn open_db_pool(url: &str) -> Result<PgPool> {
24+
Ok(PgPoolOptions::new()
25+
.max_connections(MAX_DB_CONNECTIONS)
26+
.connect(url)
27+
.await?)
28+
}
29+
30+
pub async fn run_postgres() -> Result<Postgres> {
31+
let container = PostgresContainer::default().start().await?;
32+
let pool = open_db_pool(&format!(
33+
"postgres://postgres:postgres@127.0.0.1:{}/postgres",
34+
container.get_host_port_ipv4(5432).await?
35+
))
36+
.await?;
37+
38+
MIGRATOR.run(&pool).await?;
39+
40+
Ok(Postgres {
41+
_container: container,
42+
pool,
43+
})
44+
}
45+
46+
pub fn get_test_file_path(test_file: &str) -> PathBuf {
47+
canonicalize(&Path::new(file!()))
48+
.unwrap()
49+
.parent()
50+
.unwrap()
51+
.parent()
52+
.unwrap()
53+
.join("test_files")
54+
.join(test_file)
55+
}

‎tests/config.rs

+71
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
mod common;
2+
3+
use std::str::FromStr;
4+
5+
use clap::Parser;
6+
use color_eyre::Result;
7+
use ctor::ctor;
8+
use pretty_env_logger::formatted_timed_builder;
9+
use tokio::sync::mpsc;
10+
use toshokan::{
11+
args::{DropArgs, IndexArgs, SearchArgs},
12+
commands::{
13+
create::run_create_from_config, drop::run_drop, index::run_index,
14+
search::run_search_with_callback,
15+
},
16+
config::IndexConfig,
17+
};
18+
19+
use crate::common::{get_test_file_path, run_postgres};
20+
21+
#[ctor]
22+
fn init() {
23+
color_eyre::install().unwrap();
24+
25+
let mut log_builder = formatted_timed_builder();
26+
log_builder.parse_filters("toshokan=trace,opendal::services=info");
27+
log_builder.try_init().unwrap();
28+
}
29+
30+
#[tokio::test]
31+
async fn test_example_config() -> Result<()> {
32+
let postgres = run_postgres().await?;
33+
let config = IndexConfig::from_str(include_str!("../example_config.yaml"))?;
34+
35+
run_create_from_config(&config, &postgres.pool).await?;
36+
37+
run_index(
38+
IndexArgs::parse_from([
39+
"",
40+
&config.name,
41+
&get_test_file_path("hdfs-logs-multitenants-2.json").to_string_lossy(),
42+
]),
43+
&postgres.pool,
44+
)
45+
.await?;
46+
47+
let (tx, mut rx) = mpsc::channel(1);
48+
run_search_with_callback(
49+
SearchArgs::parse_from([
50+
"",
51+
&config.name,
52+
"tenant_id:>50 AND severity_text:INFO",
53+
"--limit",
54+
"1",
55+
]),
56+
&postgres.pool,
57+
Box::new(move |doc| {
58+
tx.try_send(doc).unwrap();
59+
}),
60+
)
61+
.await?;
62+
63+
assert_eq!(
64+
rx.recv().await.unwrap(),
65+
r#"{"attributes":{"class":"org.apache.hadoop.hdfs.server.datanode.DataNode"},"body":"PacketResponder: BP-108841162-10.10.34.11-1440074360971:blk_1074072698_331874, type=HAS_DOWNSTREAM_IN_PIPELINE terminating","resource":{"service":"datanode/01"},"severity_text":"INFO","tenant_id":58,"timestamp":"2016-04-13T06:46:53Z"}"#
66+
);
67+
68+
run_drop(DropArgs::parse_from(["", &config.name]), &postgres.pool).await?;
69+
70+
Ok(())
71+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
{"timestamp":1460530013,"severity_text":"INFO","body":"PacketResponder: BP-108841162-10.10.34.11-1440074360971:blk_1074072698_331874, type=HAS_DOWNSTREAM_IN_PIPELINE terminating","resource":{"service":"datanode/01"},"attributes":{"class":"org.apache.hadoop.hdfs.server.datanode.DataNode"},"tenant_id":58}
2+
{"timestamp":1460530014,"severity_text":"INFO","body":"Receiving BP-108841162-10.10.34.11-1440074360971:blk_1074072706_331882 src: /10.10.34.33:42666 dest: /10.10.34.11:50010","resource":{"service":"datanode/01"},"attributes":{"class":"org.apache.hadoop.hdfs.server.datanode.DataNode"},"tenant_id":46}

0 commit comments

Comments
 (0)
Please sign in to comment.