Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Analytics tests #177

Merged
merged 32 commits into from
May 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion .github/workflows/connect-test-local.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,11 @@ jobs:
run: |
docker-compose down
rm -rf .infra/target
docker-compose up -d --no-deps --force-recreate --remove-orphans
rm -rf .infra/backups
rm -rf .infra/config
rm -rf .infra/logs
rm -rf .infra/ofelia_logs
echo ${{secrets.TESTING_LAPTOP_PASSWORD}} | sudo -S ./scripts/clean_start.sh
- name: Setup Grafana
working-directory: ./grafana
run: |
Expand Down Expand Up @@ -114,3 +118,7 @@ jobs:
run: |
docker-compose down
rm -rf .infra/target
rm -rf .infra/backups
rm -rf .infra/config
rm -rf .infra/logs
rm -rf .infra/ofelia_logs
7 changes: 6 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
/target
/Cargo.lock
/.vscode
/grafana-client-gen/build
/grafana-client-gen/build
/infra/target
/infra/config
/infra/logs
/infra/backups
/infra/ofelia_logs
3 changes: 1 addition & 2 deletions database/migrations/0001_types.sql
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ CREATE TYPE geo_location AS (
CREATE TYPE event_type_enum AS ENUM (
'AppConnect',
'AppDisconnect',
'ClientConnectInit',
'ClientConnectResolve'
'ClientConnect',
'ClientDisconnect',
'SignMessage',
'SignTransaction',
Expand Down
11 changes: 8 additions & 3 deletions database/migrations/0014_domain_verifications.sql
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
CREATE TABLE domain_verifications(
domain_name TEXT PRIMARY KEY,
domain_name TEXT NOT NULL,
app_id TEXT NOT NULL,
code TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL,
finished_at TIMESTAMPTZ
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
finished_at TIMESTAMPTZ,
PRIMARY KEY (domain_name, app_id) -- One app can only verify particular domain once
);

-- Safety measure to prevent verification blockade in case of malicious intent
CREATE UNIQUE INDEX idx_unique_verified_domains ON domain_verifications (domain_name)
WHERE finished_at IS NOT NULL;

CREATE INDEX domain_verifications_app_id_idx ON domain_verifications(app_id);
17 changes: 17 additions & 0 deletions database/src/tables/domain_verifications/select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,21 @@ impl Db {
.await
.map_err(|e| e.into());
}

pub async fn get_domain_verification_by_domain_name_and_app_id(
&self,
domain_name: &String,
app_id: &String,
) -> Result<Option<DomainVerification>, DbError> {
let query =
format!("SELECT * FROM {DOMAIN_VERIFICATIONS_TABLE_NAME} WHERE domain_name = $1 AND app_id = $2");
let typed_query = query_as::<_, DomainVerification>(&query);

return typed_query
.bind(&domain_name)
.bind(&app_id)
.fetch_optional(&self.connection_pool)
.await
.map_err(|e| e.into());
}
}
50 changes: 49 additions & 1 deletion database/src/tables/domain_verifications/update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,16 @@ impl Db {
&self,
tx: &mut sqlx::Transaction<'_, sqlx::Postgres>,
domain_name: &String,
app_id: &String,
) -> Result<(), DbError> {
let query_body = format!(
"UPDATE {DOMAIN_VERIFICATIONS_TABLE_NAME} SET finished_at = $1 WHERE domain_name = $2"
"UPDATE {DOMAIN_VERIFICATIONS_TABLE_NAME} SET finished_at = $1 WHERE domain_name = $2 AND app_id = $3 AND finished_at IS NULL"
);

let query_result = query(&query_body)
.bind(&get_current_datetime())
.bind(&domain_name)
.bind(&app_id)
.execute(&mut **tx)
.await;

Expand All @@ -50,3 +52,49 @@ impl Db {
}
}
}

#[cfg(feature = "cloud_integration_tests")]
#[cfg(test)]
mod tests {

#[tokio::test]
async fn test_domain_verification() {
let db = super::Db::connect_to_the_pool().await;
db.truncate_all_tables().await.unwrap();

let domain_name = "valid_domain_name".to_string();
let first_app_id = "first_app_id".to_string();
let second_app_id = "second_app_id".to_string();

let code = "code".to_string();

// Start verification by first app
db.create_new_domain_verification_entry(&domain_name, &first_app_id, &code)
.await
.unwrap();

// Try to verify the same domain by the same app
db.create_new_domain_verification_entry(&domain_name, &first_app_id, &code)
.await
.unwrap_err();

// Try to start verification by the second app for the same domain
db.create_new_domain_verification_entry(&domain_name, &second_app_id, &code)
.await
.unwrap();

// Finish verification
let mut tx = db.connection_pool.begin().await.unwrap();
db.finish_domain_verification(&mut tx, &domain_name, &first_app_id)
.await
.unwrap();
tx.commit().await.unwrap();

// Try to finish verification of the same domain, should fail
let mut tx = db.connection_pool.begin().await.unwrap();
db.finish_domain_verification(&mut tx, &domain_name, &second_app_id)
.await
.unwrap_err();
tx.rollback().await.unwrap();
}
}
3 changes: 1 addition & 2 deletions database/src/tables/events/app_disconnect/table_struct.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
use sqlx::{postgres::PgRow, FromRow, Row};

pub const EVENT_APP_DISCONNECT_TABLE_NAME: &str = "event_app_disconnect";
pub const EVENT_APP_DISCONNECT_KEYS: &str =
"event_id, session_id, device_metadata, lang, timezone, new_session";
pub const EVENT_APP_DISCONNECT_KEYS: &str = "event_id, session_id";

#[derive(Clone, Debug, Eq, PartialEq)]
pub struct AppDisconnectEvent {
Expand Down
31 changes: 29 additions & 2 deletions infra/.env
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,5 +1,32 @@
ENV=DEV # PROD or DEV
PGDATA=/home/postgres/pgdata/data
ENV=DEV # Does nothing for now

# Database Configuration
# Those two has to be the same
POSTGRES_USER=admin1234
PGUSER=admin1234
# -----------------------
POSTGRES_PASSWORD=password12345
POSTGRES_DB=connect_db
PG_DATA=/home/postgres/pgdata

# Images
TIMESCALEDB_IMAGE=timescale/timescaledb-ha:pg15-ts2.10
OFELIA_IMAGE=mcuadros/ofelia:988d988

# Volume Bindings
TIMESCALEDB_DATA=./target
TIMESCALEDB_BACKUPS=./backups
TIMESCALEDB_LOGS=./logs
TIMESCALEDB_PGBACKREST_CONFIG=./config
OFELIA_LOGS=./ofelia_logs
CUSTOM_ENTRYPOINT=./scripts/custom_entrypoint.sh

# Ofelia Configuration
OFELIA_SMTP_HOST=smtp.example.com
OFELIA_SMTP_PORT=587
# Those two has to be the same
[email protected]
[email protected]
# -----------------------
OFELIA_SMTP_PASSWORD=examplepassword
[email protected]
64 changes: 58 additions & 6 deletions infra/docker-compose.yaml
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,14 +1,66 @@
services:
timescaledb:
image: timescale/timescaledb-ha:pg15
image: ${TIMESCALEDB_IMAGE}
ports:
- 5432:5432
volumes:
- ./target:/var/lib/postgresql/data
- ${TIMESCALEDB_DATA}:/home/postgres/pgdata
- ${TIMESCALEDB_BACKUPS}:/var/lib/pgbackrest
- ${TIMESCALEDB_PGBACKREST_CONFIG}:/home/postgres/pgdata/backup
- ${TIMESCALEDB_LOGS}:/var/log
- ${CUSTOM_ENTRYPOINT}:/usr/local/bin/custom_entrypoint.sh
entrypoint: ["/usr/local/bin/custom_entrypoint.sh"]
command: ["postgres"]
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
interval: 30s
timeout: 10s
retries: 3
restart: no
env_file:
- .env
environment:
- POSTGRES_USER
- POSTGRES_PASSWORD
- POSTGRES_DB
- TIMESCALEDB_TELEMETRY=off
ENV: ${ENV}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: ${POSTGRES_DB}
PG_DATA: ${PG_DATA}
PGUSER: ${PGUSER}
labels:
ofelia.enabled: "true"
# Schedule job to backup timescaledb, commands can be changed to instead run scripts in order to log more data
# Perform full backup every day at 00:00
ofelia.job-exec.full-backup.user: "postgres"
ofelia.job-exec.full-backup.schedule: "0 0 * * *"
ofelia.job-exec.full-backup.command: "pgbackrest --stanza=db --type=full --log-level-stderr=info backup"
# Perform diff backup every 15 minutes (900 seconds)
ofelia.job-exec.diff-backup.schedule: "@every 900s"
ofelia.job-exec.diff-backup.user: "postgres"
ofelia.job-exec.diff-backup.command: "pgbackrest --stanza=db --type=diff --log-level-stderr=info backup"


# Service for running shedule job to backup timescaledb
# https://github.com/mcuadros/ofelia
ofelia:
image: ${OFELIA_IMAGE}
depends_on:
timescaledb:
condition: service_healthy
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
- ${OFELIA_LOGS}:/tmp/logs:rw
command: daemon --docker
env_file:
- .env
labels:
# Save logs locally and via email reports
ofelia.save-folder: "./tmp/logs"
ofelia.smtp-host: "${OFELIA_SMTP_HOST}"
ofelia.smtp-port: "${OFELIA_SMTP_PORT}"
ofelia.smtp-user: "${OFELIA_SMTP_USER}"
ofelia.smtp-password: "${OFELIA_SMTP_PASSWORD}"
ofelia.email-to: "${OFELIA_EMAIL_TO}"
ofelia.email-from: "${OFELIA_EMAIL_FROM}"



Loading
Loading