Skip to content

Commit

Permalink
chai-api (#12)
Browse files Browse the repository at this point in the history
a simple REST api for grabbing db tables
  • Loading branch information
jhheider authored Oct 21, 2024
1 parent 0b365c7 commit 8c6ef06
Show file tree
Hide file tree
Showing 14 changed files with 421 additions and 0 deletions.
86 changes: 86 additions & 0 deletions .github/workflows/chai-api.ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
name: api.ci

on:
push:
branches: [main]
pull_request:

env:
CARGO_TERM_COLOR: always

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
test:
name: test
runs-on: ubuntu-latest

services:
postgres:
image: postgres
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: s3cr3t
POSTGRES_DB: chai
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5435:5432

steps:
- uses: actions/checkout@v3
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y postgresql-client
- name: Run tests
run: cargo test --verbose
working-directory: api
env:
DATABASE_URL: postgresql://postgres:s3cr3t@localhost:5435/chai

fmt:
name: Rustfmt
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt
- run: cargo fmt --all -- --check
working-directory: api

clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: clippy
- run: cargo clippy --all-targets --all-features -- -D warnings
working-directory: api

docker-build:
name: Build Docker Image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build
uses: docker/build-push-action@v3
with:
context: ./api
push: false # Set push to false to prevent pushing the image
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -227,3 +227,11 @@ psql "postgresql://postgres:s3cr3t@localhost:5435/chai" -c "SELECT count(id) FRO
```sh
psql "postgresql://postgres:s3cr3t@localhost:5435/chai" -c "SELECT * FROM load_history;"
```

### restart-api

Refreshes table knowledge from the db.

```sh
docker-compose restart api
```
4 changes: 4 additions & 0 deletions api/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/target
.git
.gitignore
README.md
4 changes: 4 additions & 0 deletions api/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/target
**/*.rs.bk
Cargo.lock
.env
23 changes: 23 additions & 0 deletions api/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
[package]
name = "chai-api"
version = "0.1.0"
edition = "2021"
authors = ["Jacob Heider <[email protected]>"]
description = "A simple REST API for the CHAI database"
readme = "README.md"
license = "MIT"
repository = "https://github.com/teaxyz/chai-oss"

[dependencies]
actix-web = "4.3"
dotenv = "0.15"
tokio = { version = "1", features = ["full"] }
log = "0.4"
env_logger = "0.10"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
chrono = { version = "0.4", features = ["serde"] }
tokio-postgres = { version = "0.7", features = [
"with-serde_json-1",
"with-chrono-0_4",
] }
20 changes: 20 additions & 0 deletions api/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
FROM lukemathwalker/cargo-chef:latest-rust-1.78.0 as chef
WORKDIR /app

FROM chef as planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json

FROM chef as builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release --recipe-path recipe.json
COPY . .
RUN cargo build --release

FROM debian:bookworm-slim as runtime
WORKDIR /app
RUN apt-get update && apt-get install -y curl openssl ca-certificates && rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/chai-api /usr/local/bin
ENV DATABASE_URL=postgresql://postgres:s3cr3t@db:5432/chai
EXPOSE 8080
CMD ["chai-api"]
14 changes: 14 additions & 0 deletions api/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# CHAI API

CHAI API is a REST API service for accessing the CHAI database, which contains package manager data.

## Features

- List all tables in the database
- Fetch paginated data from any table
- Heartbeat endpoint for health checks

## Requirements

- Rust 1.67 or later
- PostgreSQL database
7 changes: 7 additions & 0 deletions api/src/app_state.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
use std::sync::Arc;
use tokio_postgres::Client;

pub struct AppState {
pub client: Arc<Client>,
pub tables: Arc<Vec<String>>,
}
30 changes: 30 additions & 0 deletions api/src/db.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
use std::sync::Arc;
use tokio_postgres::{Client, NoTls};

pub async fn create_db_client(database_url: &str) -> Arc<Client> {
let (client, connection) = tokio_postgres::connect(database_url, NoTls)
.await
.expect("Failed to connect to PostgreSQL");

tokio::spawn(async move {
if let Err(e) = connection.await {
log::error!("Database connection error: {}", e);
}
});

Arc::new(client)
}

pub async fn get_tables(client: &Arc<Client>) -> Vec<String> {
let rows = client
.query(
"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'",
&[],
)
.await
.expect("Failed to fetch tables");

rows.into_iter()
.map(|row| row.get::<_, String>("table_name"))
.collect()
}
90 changes: 90 additions & 0 deletions api/src/handlers.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
use actix_web::{get, web, HttpResponse, Responder};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};

use crate::app_state::AppState;
use crate::utils::{get_column_names, rows_to_json};

#[derive(Deserialize)]
struct PaginationParams {
page: Option<i64>,
limit: Option<i64>,
}

#[derive(Serialize)]
struct PaginatedResponse {
table: String,
total_count: i64,
page: i64,
limit: i64,
total_pages: i64,
columns: Vec<String>,
data: Vec<Value>,
}

#[get("/tables")]
pub async fn list_tables(data: web::Data<AppState>) -> impl Responder {
HttpResponse::Ok().json(&*data.tables)
}

#[get("/heartbeat")]
pub async fn heartbeat() -> impl Responder {
HttpResponse::Ok().body("OK")
}

#[get("/{table}")]
pub async fn get_table(
path: web::Path<String>,
query: web::Query<PaginationParams>,
data: web::Data<AppState>,
) -> impl Responder {
let table = path.into_inner();
if !data.tables.contains(&table) {
return HttpResponse::NotFound().json(json!({
"error": format!("Table '{}' not found", table)
}));
}

let page = query.page.unwrap_or(1).max(1);
let limit = query.limit.unwrap_or(200).clamp(1, 1000);
let offset = (page - 1) * limit;

let count_query = format!("SELECT COUNT(*) FROM {}", table);
let data_query = format!("SELECT * FROM {} LIMIT $1 OFFSET $2", table);

match data.client.query_one(&count_query, &[]).await {
Ok(count_row) => {
let total_count: i64 = count_row.get(0);
let total_pages = (total_count as f64 / limit as f64).ceil() as i64;

match data.client.query(&data_query, &[&limit, &offset]).await {
Ok(rows) => {
let columns = get_column_names(&rows);
let data = rows_to_json(&rows);
let response = PaginatedResponse {
table,
total_count,
page,
limit,
total_pages,
columns,
data,
};
HttpResponse::Ok().json(response)
}
Err(e) => {
log::error!("Database query error: {}", e);
HttpResponse::InternalServerError().json(json!({
"error": "An error occurred while querying the database"
}))
}
}
}
Err(e) => {
log::error!("Database count query error: {}", e);
HttpResponse::InternalServerError().json(json!({
"error": "An error occurred while counting rows in the database"
}))
}
}
}
13 changes: 13 additions & 0 deletions api/src/logging.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
use env_logger::Env;

pub fn setup_logger() {
env_logger::init_from_env(Env::default().default_filter_or("info"));
}

pub struct Logger;

impl Logger {
pub fn default() -> actix_web::middleware::Logger {
actix_web::middleware::Logger::new("%a '%r' %s %b '%{Referer}i' '%{User-Agent}i' %T")
}
}
47 changes: 47 additions & 0 deletions api/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
mod app_state;
mod db;
mod handlers;
mod logging;
mod utils;

use actix_web::{web, App, HttpServer};
use dotenv::dotenv;
use std::env;
use std::sync::Arc;

use crate::app_state::AppState;
use crate::db::create_db_client;
use crate::handlers::{get_table, heartbeat, list_tables};
use crate::logging::setup_logger;

#[actix_web::main]
async fn main() -> std::io::Result<()> {
dotenv().ok();
setup_logger();

let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let host = env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string());
let port = env::var("PORT").unwrap_or_else(|_| "8080".to_string());
let bind_address = format!("{}:{}", host, port);

let client = create_db_client(&database_url).await;
let tables = Arc::new(db::get_tables(&client).await);

log::info!("Available tables: {:?}", tables);
log::info!("Starting server at http://{}", bind_address);

HttpServer::new(move || {
App::new()
.wrap(logging::Logger::default())
.app_data(web::Data::new(AppState {
client: Arc::clone(&client),
tables: Arc::clone(&tables),
}))
.service(list_tables)
.service(heartbeat)
.service(get_table)
})
.bind(&bind_address)?
.run()
.await
}
Loading

0 comments on commit 8c6ef06

Please sign in to comment.