Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch to VDAF-13. #3504

Merged
merged 1 commit into from
Nov 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 46 additions & 12 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ cfg-if = "1.0.0"
chrono = { version = "0.4.38", default-features = false }
clap = { version = "4.5.21", features = ["cargo", "derive", "env"] }
console-subscriber = "0.4.1"
constcat = "0.5"
deadpool = "0.12.1"
deadpool-postgres = "0.14.0"
derivative = "2.2.0"
Expand Down Expand Up @@ -73,7 +74,9 @@ postgres-types = "0.2.8"
pretty_assertions = "1.4.1"
# Disable default features so that individual workspace crates can choose to
# re-enable them
prio = { version = "0.16.7", default-features = false, features = ["experimental"] }
# TODO(#3436): switch to a released version of libprio, once there is a released version implementing VDAF-13
# prio = { version = "0.16.7", default-features = false, features = ["experimental"] }
prio = { git = "https://github.com/divviup/libprio-rs", rev = "c85e537682a7932edc0c44c80049df0429d6fa4c", default-features = false, features = ["experimental"] }
prometheus = "0.13.4"
querystring = "1.1.0"
quickcheck = { version = "1.0.3", default-features = false }
Expand Down
8 changes: 5 additions & 3 deletions aggregator/src/aggregator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ use janus_core::{
retries::retry_http_request_notify,
time::{Clock, DurationExt, IntervalExt, TimeExt},
vdaf::{
new_prio3_sum_vec_field64_multiproof_hmacsha256_aes128,
new_prio3_sum_vec_field64_multiproof_hmacsha256_aes128, vdaf_application_context,
Prio3SumVecField64MultiproofHmacSha256Aes128, VdafInstance, VERIFY_KEY_LENGTH,
},
Runtime,
Expand Down Expand Up @@ -1978,12 +1978,13 @@ impl VdafOps {

move || {
let span = info_span!(parent: parent_span, "handle_aggregate_init_generic threadpool task");
let ctx = vdaf_application_context(task.id());

req
.prepare_inits()
.par_iter()
.enumerate()
.try_for_each_with((sender, span), |(sender, span), (ord, prepare_init)| {
.try_for_each(|(ord, prepare_init)| {
let _entered = span.enter();

// If decryption fails, then the aggregator MUST fail with error `hpke-decrypt-error`. (§4.4.2.2)
Expand Down Expand Up @@ -2182,14 +2183,15 @@ impl VdafOps {
trace_span!("VDAF preparation (helper initialization)").in_scope(|| {
vdaf.helper_initialized(
verify_key.as_bytes(),
&ctx,
&agg_param,
/* report ID is used as VDAF nonce */
prepare_init.report_share().metadata().id().as_ref(),
&public_share,
&input_share,
prepare_init.message(),
)
.and_then(|transition| transition.evaluate(&vdaf))
.and_then(|transition| transition.evaluate(&ctx, &vdaf))
.map_err(|error| {
handle_ping_pong_error(
task.id(),
Expand Down
1 change: 1 addition & 0 deletions aggregator/src/aggregator/aggregate_init_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ where
) -> (ReportShare, VdafTranscript<VERIFY_KEY_SIZE, V>) {
let transcript = run_vdaf(
&self.vdaf,
self.task.id(),
self.task.vdaf_verify_key().unwrap().as_bytes(),
&self.aggregation_param,
report_metadata.id(),
Expand Down
11 changes: 6 additions & 5 deletions aggregator/src/aggregator/aggregation_job_continue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use janus_aggregator_core::{
},
task::AggregatorTask,
};
use janus_core::time::Clock;
use janus_core::{time::Clock, vdaf::vdaf_application_context};
use janus_messages::{
AggregationJobContinueReq, AggregationJobResp, PrepareResp, PrepareStepResult, ReportError,
Role,
Expand Down Expand Up @@ -159,17 +159,18 @@ impl VdafOps {

move || {
let span = info_span!(parent: parent_span, "step_aggregation_job threadpool task");
let ctx = vdaf_application_context(task.id());

prep_steps_and_ras.into_par_iter().try_for_each_with(
(sender, span),
|(sender, span), (prep_step, report_aggregation, prep_state)| {
prep_steps_and_ras.into_par_iter().try_for_each(
|(prep_step, report_aggregation, prep_state)| {
let _entered = span.enter();

let (report_aggregation_state, prepare_step_result, output_share) =
trace_span!("VDAF preparation (helper continuation)")
.in_scope(|| {
// Continue with the incoming message.
vdaf.helper_continued(
&ctx,
PingPongState::Continued(prep_state.clone()),
aggregation_job.aggregation_parameter(),
prep_step.message(),
Expand All @@ -181,7 +182,7 @@ impl VdafOps {
transition,
} => {
let (new_state, message) =
transition.evaluate(vdaf.as_ref())?;
transition.evaluate(&ctx, vdaf.as_ref())?;
let (report_aggregation_state, output_share) =
match new_state {
// Helper did not finish. Store the new
Expand Down
13 changes: 13 additions & 0 deletions aggregator/src/aggregator/aggregation_job_creator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -976,6 +976,7 @@ mod tests {
let leader_report_metadata = ReportMetadata::new(random(), report_time);
let leader_transcript = run_vdaf(
vdaf.as_ref(),
leader_task.id(),
leader_task.vdaf_verify_key().unwrap().as_bytes(),
&(),
leader_report_metadata.id(),
Expand Down Expand Up @@ -1166,6 +1167,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand Down Expand Up @@ -1344,6 +1346,7 @@ mod tests {
let first_report_metadata = ReportMetadata::new(random(), report_time);
let first_transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
first_report_metadata.id(),
Expand All @@ -1360,6 +1363,7 @@ mod tests {
let second_report_metadata = ReportMetadata::new(random(), report_time);
let second_transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
second_report_metadata.id(),
Expand Down Expand Up @@ -1556,6 +1560,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand Down Expand Up @@ -1740,6 +1745,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand Down Expand Up @@ -1937,6 +1943,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand Down Expand Up @@ -2098,6 +2105,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand Down Expand Up @@ -2211,6 +2219,7 @@ mod tests {
let last_report_metadata = ReportMetadata::new(random(), report_time);
let last_transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
last_report_metadata.id(),
Expand Down Expand Up @@ -2358,6 +2367,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand Down Expand Up @@ -2474,6 +2484,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand Down Expand Up @@ -2629,6 +2640,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time_1);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand All @@ -2649,6 +2661,7 @@ mod tests {
let report_metadata = ReportMetadata::new(random(), report_time_2);
let transcript = run_vdaf(
vdaf.as_ref(),
task.id(),
task.vdaf_verify_key().unwrap().as_bytes(),
&(),
report_metadata.id(),
Expand Down
Loading
Loading