Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ECO-616] Add assorted order book PR tweaks #520

Merged
merged 2 commits into from
Oct 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/docker/compose.dss.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ services:
- postgres
environment:
- PGWS_DB_URI=postgres://econia:econia@postgres/econia
- PGWS_JWT_SECRET=conjunctivodacryocystorhinostomy
# This has to be at least 32 characters long.
- PGWS_JWT_SECRET=econia_0000000000000000000000000
- PGWS_CHECK_LISTENER_INTERVAL=1000
- PGWS_LISTEN_CHANNEL=econiaws
ports:
Expand Down
3 changes: 2 additions & 1 deletion src/rust/aggregator/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,5 @@ For example, a 1-minute candle stick data flow would follow the following princi

# SQLX

The aggregator uses SQLX. In order for the requests to be checked and the crate to be compiled when the database is offline, you have to run `cargo sqlx prepare --workspace` from the Rust root (`src/rust`) when updating or creating a request.
The aggregator uses [SQLx](https://github.com/launchbadge/sqlx/blob/main/README.md).
In order for the requests to be checked and the crate to be compiled when the database is offline, you have to run `cargo sqlx prepare --workspace` from the Rust root (`src/rust`) when updating or creating a request.
33 changes: 28 additions & 5 deletions src/rust/aggregator/src/data/user_history.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
use anyhow::anyhow;
use bigdecimal::{BigDecimal, Zero, num_bigint::ToBigInt};
use bigdecimal::{num_bigint::ToBigInt, BigDecimal, Zero};
use chrono::{DateTime, Duration, Utc};
use sqlx::{PgConnection, PgPool, Postgres, Transaction};

use super::{Data, DataAggregationError, DataAggregationResult};

/// Number of bits to shift when encoding transaction version.
const SHIFT_TXN_VERSION: u8 = 64;

#[derive(sqlx::Type, Debug)]
#[sqlx(type_name = "order_status", rename_all = "lowercase")]
pub enum OrderStatus {
Expand Down Expand Up @@ -137,8 +140,19 @@ impl Data for UserHistory {
.await
.map_err(|e| DataAggregationError::ProcessingError(anyhow!(e)))?;
for x in &limit_events {
let txn = x.txn_version.to_bigint().ok_or(DataAggregationError::ProcessingError(anyhow!("txn_version not integer")))? << 64;
let event = x.event_idx.to_bigint().ok_or(DataAggregationError::ProcessingError(anyhow!("event_idx not integer")))?;
let txn = x
.txn_version
.to_bigint()
.ok_or(DataAggregationError::ProcessingError(anyhow!(
"txn_version not integer"
)))?
<< SHIFT_TXN_VERSION;
let event = x
.event_idx
.to_bigint()
.ok_or(DataAggregationError::ProcessingError(anyhow!(
"event_idx not integer"
)))?;
let txn_event: BigDecimal = BigDecimal::from(txn | event);
sqlx::query!(
r#"
Expand Down Expand Up @@ -425,8 +439,17 @@ async fn aggregate_change<'a>(
(record.order_type, record.remaining_size);
// If its a limit order and needs reordering
if matches!(order_type, OrderType::Limit) && &original_size < new_size {
let txn = txn_version.to_bigint().ok_or(DataAggregationError::ProcessingError(anyhow!("txn_version not integer")))? << 64;
let event = event_idx.to_bigint().ok_or(DataAggregationError::ProcessingError(anyhow!("event_idx not integer")))?;
let txn = txn_version
.to_bigint()
.ok_or(DataAggregationError::ProcessingError(anyhow!(
"txn_version not integer"
)))?
<< SHIFT_TXN_VERSION;
let event = event_idx
.to_bigint()
.ok_or(DataAggregationError::ProcessingError(anyhow!(
"event_idx not integer"
)))?;
let txn_event: BigDecimal = BigDecimal::from(txn & event);
sqlx::query!(
r#"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ CREATE EXTENSION pgjwt CASCADE;


CREATE FUNCTION api.jwt (json) RETURNS TEXT AS $$
SELECT sign((CONCAT(CONCAT('{"mode": "r","channels": ', $1->>'channels'::text),'}'))::json, 'conjunctivodacryocystorhinostomy')
SELECT sign((CONCAT(CONCAT('{"mode": "r","channels": ', $1->>'channels'::text),'}'))::json, 'econia_0000000000000000000000000')
$$ LANGUAGE SQL;