Implement protocol side updates.

This commit is contained in:
FiveMovesAhead 2025-11-05 13:52:04 +00:00
parent 4c1e094535
commit b060d38b4e
9 changed files with 84 additions and 450 deletions

View File

@ -1,6 +1,6 @@
pub use anyhow::Result;
use serde_json::{Map, Value};
use std::collections::{HashMap, HashSet};
use std::collections::HashMap;
use tig_structs::{config::*, core::*};
#[allow(async_fn_in_trait)]
@ -12,21 +12,11 @@ pub trait Context {
evidence: String,
) -> Result<String>;
async fn get_benchmark_details(&self, benchmark_id: &String) -> Option<BenchmarkDetails>;
async fn get_benchmark_data(
&self,
benchmark_id: &String,
) -> Option<(
Option<HashSet<u64>>,
Option<HashSet<u64>>,
Option<HashSet<u64>>,
)>;
async fn add_benchmark_to_mempool(
&self,
benchmark_id: String,
details: BenchmarkDetails,
non_solution_nonces: Option<HashSet<u64>>,
solution_nonces: Option<HashSet<u64>>,
discarded_solution_nonces: Option<HashSet<u64>>,
solution_quality: Vec<i32>,
) -> Result<()>;
async fn get_binary_details(&self, code_id: &String) -> Option<BinaryDetails>;
async fn add_binary_to_mempool(&self, code_id: String, details: BinaryDetails) -> Result<()>;
@ -97,7 +87,6 @@ pub struct AddBlockCache {
pub active_players_block_data: HashMap<String, PlayerBlockData>,
pub active_opow_block_data: HashMap<String, OPoWBlockData>,
pub active_challenges_block_data: HashMap<String, ChallengeBlockData>,
pub active_challenges_prev_block_data: HashMap<String, ChallengeBlockData>,
pub active_codes_state: HashMap<String, CodeState>,
pub active_codes_details: HashMap<String, CodeDetails>,
pub active_codes_block_data: HashMap<String, CodeBlockData>,
@ -105,6 +94,5 @@ pub struct AddBlockCache {
pub active_advances_state: HashMap<String, AdvanceState>,
pub active_advances_details: HashMap<String, AdvanceDetails>,
pub active_advances_block_data: HashMap<String, AdvanceBlockData>,
pub active_solutions: Vec<(BenchmarkSettings, u64, u64, u64)>,
pub confirmed_num_solutions: HashMap<String, u64>,
pub active_benchmarks: Vec<(BenchmarkSettings, i32, u64)>,
}

View File

@ -1,11 +1,10 @@
use crate::context::*;
use anyhow::{anyhow, Result};
use logging_timer::time;
use rand::{rngs::StdRng, seq::IteratorRandom, Rng, SeedableRng};
use rand::{rngs::StdRng, Rng, SeedableRng};
use serde_json::{Map, Value};
use std::collections::HashSet;
use tig_structs::core::*;
use tig_utils::*;
#[time]
pub async fn submit_precommit<T: Context>(
@ -51,11 +50,17 @@ pub async fn submit_precommit<T: Context>(
return Err(anyhow!("Invalid algorithm '{}'", settings.algorithm_id));
}
// verify difficulty
let difficulty = &settings.difficulty;
// verify size
let challenge_config = &config.challenges[&settings.challenge_id];
if difficulty.len() != challenge_config.difficulty.parameter_names.len() {
return Err(anyhow!("Invalid difficulty '{:?}'", difficulty));
if settings.size < challenge_config.difficulty.size_range[0]
|| settings.size > challenge_config.difficulty.size_range[1]
{
return Err(anyhow!(
"Invalid size '{}'. Must be in range [{}, {}]",
settings.size,
challenge_config.difficulty.size_range[0],
challenge_config.difficulty.size_range[1]
));
}
if num_nonces < challenge_config.benchmarks.min_num_nonces {
@ -66,28 +71,9 @@ pub async fn submit_precommit<T: Context>(
));
}
let challenge_data = ctx
.get_challenge_block_data(&settings.challenge_id, &settings.block_id)
.await
.unwrap();
let (lower_frontier, upper_frontier) = if challenge_data.scaling_factor > 1f64 {
(challenge_data.base_frontier, challenge_data.scaled_frontier)
} else {
(challenge_data.scaled_frontier, challenge_data.base_frontier)
};
if lower_frontier
.iter()
.any(|lower_point| pareto_compare(difficulty, lower_point) == ParetoCompare::BDominatesA)
|| upper_frontier.iter().any(|upper_point| {
pareto_compare(difficulty, upper_point) == ParetoCompare::ADominatesB
})
{
return Err(anyhow!("Invalid difficulty. Out of bounds"));
}
// verify player has sufficient balance
let submission_fee =
challenge_data.base_fee + challenge_data.per_nonce_fee * PreciseNumber::from(num_nonces);
let submission_fee = challenge_config.benchmarks.base_fee
+ challenge_config.benchmarks.per_nonce_fee * PreciseNumber::from(num_nonces);
if !ctx
.get_player_state(&player_id)
.await
@ -117,9 +103,7 @@ pub async fn submit_benchmark<T: Context>(
player_id: String,
benchmark_id: String,
merkle_root: MerkleHash,
non_solution_nonces: Option<HashSet<u64>>,
solution_nonces: Option<HashSet<u64>>,
discarded_solution_nonces: Option<HashSet<u64>>,
solution_quality: Vec<i32>,
seed: u64,
) -> Result<()> {
// check benchmark is not duplicate
@ -142,89 +126,36 @@ pub async fn submit_benchmark<T: Context>(
// check at least 2 sets of nonces are provided
let precommit_details = ctx.get_precommit_details(&benchmark_id).await.unwrap();
let num_nonces = precommit_details.num_nonces as u64;
let mut nonces_sets = vec![
&solution_nonces,
&discarded_solution_nonces,
&non_solution_nonces,
];
nonces_sets.sort_by_key(|x| x.is_none());
if nonces_sets[1].is_none() || nonces_sets[2].is_some() {
return Err(anyhow!("Exactly 2 sets of nonces must be provided"));
}
let set_a = nonces_sets[0].as_ref().unwrap();
let set_b = nonces_sets[1].as_ref().unwrap();
if !set_a.is_disjoint(set_b) {
return Err(anyhow!("Nonces sets must be disjoint.",));
}
let set_c_size = num_nonces as usize - set_a.len() - set_b.len();
if set_a.len() > set_c_size || set_b.len() > set_c_size {
return Err(anyhow!("The 2 smaller sets of nonces must be submitted"));
}
if !set_a.iter().all(|n| *n < num_nonces) || !set_b.iter().all(|n| *n < num_nonces) {
return Err(anyhow!("Invalid nonces"));
}
// random sample nonces
let config = ctx.get_config().await;
let mut rng = StdRng::seed_from_u64(seed);
let benchmark_config = &config.challenges[&settings.challenge_id].benchmarks;
let max_samples = config.challenges[&settings.challenge_id]
.benchmarks
.max_samples;
let mut sampled_nonces = HashSet::new();
for set_x in [
&solution_nonces,
&discarded_solution_nonces,
&non_solution_nonces,
] {
let break_size = sampled_nonces.len() + benchmark_config.max_samples;
if let Some(set_x) = set_x {
if !set_x.is_empty() {
for _ in 0..25 {
if sampled_nonces.len() == break_size {
break;
}
sampled_nonces.insert(*set_x.iter().choose(&mut rng).unwrap());
}
}
} else {
// this set is at least 1/3 of the total nonces
for _ in 0..25 {
if sampled_nonces.len() == break_size {
break;
}
let nonce = rng.gen_range(0..num_nonces);
if !set_a.contains(&nonce) && !set_b.contains(&nonce) {
sampled_nonces.insert(nonce);
}
}
for _ in 0..25 {
if sampled_nonces.len() == max_samples {
break;
}
let nonce = rng.gen_range(0..precommit_details.num_nonces);
if sampled_nonces.contains(&nonce) {
continue;
}
sampled_nonces.insert(nonce);
}
let num_other_nonces = (set_a.len() + set_b.len()) as u64;
let num_solutions = if let Some(solution_nonces) = &solution_nonces {
solution_nonces.len() as u64
} else {
num_nonces - num_other_nonces
};
let num_discarded_solutions =
if let Some(discarded_solution_nonces) = &discarded_solution_nonces {
discarded_solution_nonces.len() as u64
} else {
num_nonces - num_other_nonces
};
let num_non_solutions = num_nonces - num_solutions - num_discarded_solutions;
let average_solution_quality =
solution_quality.iter().sum::<i32>() / (solution_quality.len() as i32);
ctx.add_benchmark_to_mempool(
benchmark_id,
BenchmarkDetails {
num_solutions,
num_discarded_solutions,
num_non_solutions,
average_solution_quality,
merkle_root,
sampled_nonces,
},
non_solution_nonces,
solution_nonces,
discarded_solution_nonces,
solution_quality,
)
.await?;
Ok(())
@ -247,21 +178,6 @@ pub async fn submit_proof<T: Context>(
.get_benchmark_details(&benchmark_id)
.await
.ok_or_else(|| anyhow!("Benchmark needs to be submitted first."))?;
let (solution_nonces, discarded_solution_nonces, non_solution_nonces) =
ctx.get_benchmark_data(&benchmark_id).await.unwrap();
// expect that exactly 2 sets of nonces are provided
let mut nonces_sets = vec![
&solution_nonces,
&discarded_solution_nonces,
&non_solution_nonces,
];
nonces_sets.sort_by_key(|x| x.is_none());
let set_x = nonces_sets[0]
.as_ref()
.unwrap()
.union(nonces_sets[1].as_ref().unwrap())
.cloned()
.collect::<HashSet<u64>>();
// check player owns benchmark
let settings = ctx.get_precommit_settings(&benchmark_id).await.unwrap();
@ -285,31 +201,6 @@ pub async fn submit_proof<T: Context>(
}
// verify merkle_proofs
let ChallengeBlockData {
mut hash_threshold,
average_solution_ratio,
..
} = ctx
.get_challenge_block_data(&settings.challenge_id, &settings.block_id)
.await
.ok_or_else(|| anyhow!("Block too old"))?;
// use reliability to adjust hash threshold
let solution_ratio = (benchmark_details.num_solutions
+ benchmark_details.num_discarded_solutions) as f64
/ num_nonces as f64;
let reliability = if average_solution_ratio == 0.0 {
1.0
} else if solution_ratio == 0.0 {
0.0
} else {
(solution_ratio / average_solution_ratio).min(1.0)
};
let denominator = 1000u64;
let numerator = (reliability * denominator as f64) as u64;
(U256::from(hash_threshold.clone().0) / U256::from(denominator) * U256::from(numerator))
.to_big_endian(&mut hash_threshold.0);
let mut verification_result = Ok(());
let max_branch_len = (64 - (num_nonces - 1).leading_zeros()) as usize;
for merkle_proof in merkle_proofs.iter() {
@ -328,34 +219,6 @@ pub async fn submit_proof<T: Context>(
}
let output_meta_data = OutputMetaData::from(merkle_proof.leaf.clone());
let hash = MerkleHash::from(output_meta_data);
if hash.0 > hash_threshold.0 {
// if nonce is a solution, it must be below hash_threshold
if solution_nonces
.as_ref()
.is_some_and(|x| x.contains(&merkle_proof.leaf.nonce))
|| (solution_nonces.is_none() && !set_x.contains(&merkle_proof.leaf.nonce))
{
verification_result = Err(anyhow!(
"Invalid merkle hash for solution @ nonce {} does not meet threshold",
merkle_proof.leaf.nonce
));
break;
}
} else {
// if nonce is a discarded solution, it must be above hash_threshold
if discarded_solution_nonces
.as_ref()
.is_some_and(|x| x.contains(&merkle_proof.leaf.nonce))
|| (discarded_solution_nonces.is_none()
&& !set_x.contains(&merkle_proof.leaf.nonce))
{
verification_result = Err(anyhow!(
"Invalid merkle hash for discarded solution @ nonce {} meets threshold",
merkle_proof.leaf.nonce
));
break;
}
}
let result = merkle_proof
.branch
.calc_merkle_root(&hash, merkle_proof.leaf.nonce as usize);

View File

@ -1,17 +0,0 @@
use crate::context::*;
use logging_timer::time;
#[time]
pub(crate) async fn update(cache: &mut AddBlockCache) {
let AddBlockCache {
config,
active_challenges_block_data,
..
} = cache;
for (challenge_id, challenge_data) in active_challenges_block_data.iter_mut() {
let benchmarks_config = &config.challenges[challenge_id].benchmarks;
challenge_data.base_fee = benchmarks_config.min_base_fee;
challenge_data.per_nonce_fee = benchmarks_config.min_per_nonce_fee;
}
}

View File

@ -1,6 +1,5 @@
pub mod algorithms;
pub mod benchmarks;
pub mod challenges;
pub mod opow;
pub mod players;
pub mod rewards;

View File

@ -13,15 +13,13 @@ pub(crate) async fn update(cache: &mut AddBlockCache) {
block_details,
block_data,
active_challenges_block_data,
active_challenges_prev_block_data,
active_codes_state,
active_codes_details,
active_codes_block_data,
active_solutions,
active_benchmarks,
active_players_state,
active_players_block_data,
active_opow_block_data,
confirmed_num_solutions,
..
} = cache;
@ -45,41 +43,31 @@ pub(crate) async fn update(cache: &mut AddBlockCache) {
let phase_in_period = config.opow.cutoff_phase_in_period;
let phase_in_end = phase_in_start + phase_in_period;
let mut num_solutions_by_player_by_challenge = HashMap::<String, HashMap<String, u64>>::new();
for (settings, num_solutions, _, _) in active_solutions.iter() {
*num_solutions_by_player_by_challenge
let mut num_nonces_by_player_by_challenge = HashMap::<String, HashMap<String, u64>>::new();
for (settings, _, num_nonces) in active_benchmarks.iter() {
*num_nonces_by_player_by_challenge
.entry(settings.player_id.clone())
.or_default()
.entry(settings.challenge_id.clone())
.or_default() += *num_solutions;
.or_default() += *num_nonces;
}
for (player_id, num_solutions_by_challenge) in num_solutions_by_player_by_challenge.iter() {
for (player_id, num_nonces_by_challenge) in num_nonces_by_player_by_challenge.iter() {
let opow_data = active_opow_block_data.get_mut(player_id).unwrap();
let min_num_solutions = active_challenge_ids
let min_num_nonces = active_challenge_ids
.iter()
.map(|id| {
num_solutions_by_challenge
.get(id)
.cloned()
.unwrap_or_default()
})
.map(|id| num_nonces_by_challenge.get(id).cloned().unwrap_or_default())
.min()
.unwrap_or_default();
let mut cutoff = (min_num_solutions as f64 * config.opow.cutoff_multiplier).ceil() as u64;
let mut cutoff = (min_num_nonces as f64 * config.opow.cutoff_multiplier).ceil() as u64;
if phase_in_challenge_ids.len() > 0 && phase_in_end > block_details.height {
let phase_in_min_num_solutions = active_challenge_ids
let phase_in_min_num_nonces = active_challenge_ids
.iter()
.filter(|&id| !phase_in_challenge_ids.contains(id))
.map(|id| {
num_solutions_by_challenge
.get(id)
.cloned()
.unwrap_or_default()
})
.map(|id| num_nonces_by_challenge.get(id).cloned().unwrap_or_default())
.min()
.unwrap_or_default();
let phase_in_cutoff =
(phase_in_min_num_solutions as f64 * config.opow.cutoff_multiplier).ceil() as u64;
(phase_in_min_num_nonces as f64 * config.opow.cutoff_multiplier).ceil() as u64;
let phase_in_weight =
(phase_in_end - block_details.height) as f64 / phase_in_period as f64;
cutoff = (phase_in_cutoff as f64 * phase_in_weight
@ -88,45 +76,19 @@ pub(crate) async fn update(cache: &mut AddBlockCache) {
opow_data.cutoff = cutoff;
}
// update hash threshold
let denominator: u64 = 1_000_000_000_000_000;
for challenge_id in active_challenge_ids.iter() {
let difficulty_config = &config.challenges[challenge_id].difficulty;
let max_delta = U256::MAX / U256::from(denominator)
* U256::from(
(difficulty_config.hash_threshold_max_percent_delta * denominator as f64) as u64,
);
let prev_hash_threshold = active_challenges_prev_block_data
.get(challenge_id)
.map(|x| U256::from(x.hash_threshold.clone().0))
.unwrap_or(U256::MAX);
let current_solution_rate = *confirmed_num_solutions.get(challenge_id).unwrap_or(&0);
let target_threshold = if current_solution_rate == 0 {
U256::MAX
} else {
(prev_hash_threshold / U256::from(current_solution_rate))
.saturating_mul(U256::from(difficulty_config.target_solution_rate))
};
let diff = prev_hash_threshold.abs_diff(target_threshold);
let delta = (diff / U256::from(100)).min(max_delta);
let hash_threshold = if prev_hash_threshold > target_threshold {
prev_hash_threshold.saturating_sub(delta)
} else {
prev_hash_threshold.saturating_add(delta)
};
let challenge_data = active_challenges_block_data.get_mut(challenge_id).unwrap();
hash_threshold.to_big_endian(&mut challenge_data.hash_threshold.0);
}
// update qualifiers
let mut solutions_by_challenge =
HashMap::<String, Vec<(&BenchmarkSettings, &u64, &u64, &u64)>>::new();
for (settings, num_solutions, num_discarded_solutions, num_nonces) in active_solutions.iter() {
solutions_by_challenge
let mut benchmarks_by_challenge =
HashMap::<String, Vec<(&BenchmarkSettings, &i32, &u64, Point)>>::new();
for (settings, average_solution_quality, num_nonces) in active_benchmarks.iter() {
benchmarks_by_challenge
.entry(settings.challenge_id.clone())
.or_default()
.push((settings, num_solutions, num_discarded_solutions, num_nonces));
.push((
settings,
average_solution_quality,
num_nonces,
vec![settings.size as i32, average_solution_quality.clone()],
));
}
let max_qualifiers_by_player = active_opow_ids
@ -140,17 +102,14 @@ pub(crate) async fn update(cache: &mut AddBlockCache) {
.collect::<HashMap<String, u64>>();
for challenge_id in active_challenge_ids.iter() {
if !solutions_by_challenge.contains_key(challenge_id) {
if !benchmarks_by_challenge.contains_key(challenge_id) {
continue;
}
let challenge_config = &config.challenges[challenge_id];
let solutions = solutions_by_challenge.get_mut(challenge_id).unwrap();
let points = solutions
let benchmarks = benchmarks_by_challenge.get_mut(challenge_id).unwrap();
let points = benchmarks
.iter()
.filter(|(_, &num_solutions, &num_discarded_solutions, _)| {
num_solutions > 0 || num_discarded_solutions > 0
})
.map(|(settings, _, _, _)| settings.difficulty.clone())
.map(|(_, _, _, difficulty)| difficulty.clone())
.collect::<Frontier>();
let mut frontier_indexes = HashMap::<Point, usize>::new();
for (frontier_index, frontier) in pareto_algorithm(&points, false).into_iter().enumerate() {
@ -158,45 +117,35 @@ pub(crate) async fn update(cache: &mut AddBlockCache) {
frontier_indexes.insert(point, frontier_index);
}
}
let mut solutions_by_frontier_idx =
HashMap::<usize, Vec<(&BenchmarkSettings, &u64, &u64, &u64)>>::new();
for &x in solutions.iter() {
if !points.contains(&x.0.difficulty) {
continue;
}
solutions_by_frontier_idx
.entry(frontier_indexes[&x.0.difficulty])
let mut benchmarks_by_frontier_idx =
HashMap::<usize, Vec<&(&BenchmarkSettings, &i32, &u64, Point)>>::new();
for x in benchmarks.iter() {
benchmarks_by_frontier_idx
.entry(frontier_indexes[&x.3])
.or_default()
.push(x);
}
let challenge_data = active_challenges_block_data.get_mut(challenge_id).unwrap();
let mut player_code_solutions = HashMap::<String, HashMap<String, u64>>::new();
let mut player_solutions = HashMap::<String, u64>::new();
let mut player_discarded_solutions = HashMap::<String, u64>::new();
let mut player_code_nonces = HashMap::<String, HashMap<String, u64>>::new();
let mut player_nonces = HashMap::<String, u64>::new();
for frontier_idx in 0..solutions_by_frontier_idx.len() {
for (settings, &num_solutions, &num_discarded_solutions, &num_nonces) in
solutions_by_frontier_idx[&frontier_idx].iter()
for frontier_idx in 0..benchmarks_by_frontier_idx.len() {
for (settings, _, &num_nonces, difficulty) in
benchmarks_by_frontier_idx[&frontier_idx].iter()
{
let BenchmarkSettings {
player_id,
algorithm_id,
difficulty,
..
} = settings;
*player_code_solutions
*player_code_nonces
.entry(player_id.clone())
.or_default()
.entry(algorithm_id.clone())
.or_default() += num_solutions;
*player_solutions.entry(player_id.clone()).or_default() += num_solutions;
*player_discarded_solutions
.entry(player_id.clone())
.or_default() += num_discarded_solutions;
*player_nonces.entry(player_id.clone()).or_default() += num_nonces as u64;
.or_default() += num_nonces;
*player_nonces.entry(player_id.clone()).or_default() += num_nonces;
challenge_data
.qualifier_difficulties
@ -204,46 +153,28 @@ pub(crate) async fn update(cache: &mut AddBlockCache) {
}
// check if we have enough qualifiers
let player_solution_ratio: HashMap<String, f64> = player_solutions
let player_qualifiers: HashMap<String, u64> = player_nonces
.keys()
.map(|player_id| {
(
player_id.clone(),
(player_solutions[player_id] + player_discarded_solutions[player_id])
as f64
/ player_nonces[player_id] as f64,
)
})
.collect();
let player_qualifiers: HashMap<String, u64> = player_solution_ratio
.keys()
.map(|player_id| {
(
player_id.clone(),
max_qualifiers_by_player[player_id].min(player_solutions[player_id]),
max_qualifiers_by_player[player_id].min(player_nonces[player_id]),
)
})
.collect();
let num_qualifiers = player_qualifiers.values().sum::<u64>();
if num_qualifiers >= challenge_config.difficulty.total_qualifiers_threshold
|| frontier_idx == solutions_by_frontier_idx.len() - 1
|| frontier_idx == benchmarks_by_frontier_idx.len() - 1
{
let mut sum_weighted_solution_ratio = 0.0;
for player_id in player_qualifiers.keys() {
let opow_data = active_opow_block_data.get_mut(player_id).unwrap();
opow_data
.num_qualifiers_by_challenge
.insert(challenge_id.clone(), player_qualifiers[player_id]);
opow_data
.solution_ratio_by_challenge
.insert(challenge_id.clone(), player_solution_ratio[player_id]);
sum_weighted_solution_ratio +=
player_solution_ratio[player_id] * player_qualifiers[player_id] as f64;
if player_qualifiers[player_id] > 0 {
for algorithm_id in player_code_solutions[player_id].keys() {
for algorithm_id in player_code_nonces[player_id].keys() {
if !active_code_ids.contains(algorithm_id) {
continue; // algorithm is banned
}
@ -252,129 +183,19 @@ pub(crate) async fn update(cache: &mut AddBlockCache) {
code_data.num_qualifiers_by_player.insert(
player_id.clone(),
(player_qualifiers[player_id] as f64
* player_code_solutions[player_id][algorithm_id] as f64
/ player_solutions[player_id] as f64)
* player_code_nonces[player_id][algorithm_id] as f64
/ player_nonces[player_id] as f64)
.ceil() as u64,
);
}
}
}
challenge_data.num_qualifiers = num_qualifiers;
challenge_data.average_solution_ratio = if num_qualifiers == 0 {
0.0
} else {
sum_weighted_solution_ratio / num_qualifiers as f64
};
break;
}
}
}
// update frontiers
for challenge_id in active_challenge_ids.iter() {
let challenge_config = &config.challenges[challenge_id];
let challenge_data = active_challenges_block_data.get_mut(challenge_id).unwrap();
let min_frontier = &challenge_config.difficulty.min_frontier;
let min_difficulty = min_frontier.iter().fold(vec![i32::MAX; 2], |mut acc, x| {
acc[0] = acc[0].min(x[0]);
acc[1] = acc[1].min(x[1]);
acc
});
let max_frontier = &challenge_config.difficulty.max_frontier;
let max_difficulty = max_frontier.iter().fold(vec![i32::MIN; 2], |mut acc, x| {
acc[0] = acc[0].max(x[0]);
acc[1] = acc[1].max(x[1]);
acc
});
let points = challenge_data
.qualifier_difficulties
.iter()
.map(|d| d.iter().map(|x| -x).collect()) // mirror the points so easiest difficulties are first
.collect::<Frontier>();
let (base_frontier, scaling_factor, scaled_frontier) = if points.len() == 0 {
let base_frontier: Frontier = min_frontier.clone();
let scaling_factor = 1.0;
let scaled_frontier = base_frontier.clone();
(base_frontier, scaling_factor, scaled_frontier)
} else {
let mut base_frontier = pareto_algorithm(&points, true)
.pop()
.unwrap()
.into_iter()
.map(|d| d.into_iter().map(|x| -x).collect())
.collect::<Frontier>(); // mirror the points back;
base_frontier = extend_frontier(&base_frontier, &min_difficulty, &max_difficulty);
let mut scaling_factor = (challenge_data.num_qualifiers as f64
/ challenge_config.difficulty.total_qualifiers_threshold as f64)
.min(challenge_config.difficulty.max_scaling_factor);
if scaling_factor < 1.0 {
base_frontier = scale_frontier(
&base_frontier,
&min_difficulty,
&max_difficulty,
scaling_factor,
);
base_frontier = extend_frontier(&base_frontier, &min_difficulty, &max_difficulty);
scaling_factor =
(1.0 / scaling_factor).min(challenge_config.difficulty.max_scaling_factor);
}
// find set of points from base_frontier and min_frontier that are dominate or equal to each other
base_frontier = base_frontier
.iter()
.filter(|p1| {
min_frontier
.iter()
.all(|p2| pareto_compare(p1, p2) != ParetoCompare::BDominatesA)
})
.chain(min_frontier.iter().filter(|p1| {
base_frontier
.iter()
.all(|p2| pareto_compare(p1, p2) != ParetoCompare::BDominatesA)
}))
.filter(|p| p.iter().zip(min_difficulty.iter()).all(|(x1, x2)| x1 >= x2))
.cloned()
.collect::<HashSet<Point>>()
.into_iter()
.collect();
let mut scaled_frontier = scale_frontier(
&base_frontier,
&min_difficulty,
&max_difficulty,
scaling_factor,
);
scaled_frontier = extend_frontier(&scaled_frontier, &min_difficulty, &max_difficulty);
// find set of points from scaled_frontier and max_frontier that are dominated by or equal to each other
scaled_frontier = scaled_frontier
.iter()
.filter(|p1| {
max_frontier
.iter()
.all(|p2| pareto_compare(p1, p2) != ParetoCompare::ADominatesB)
})
.chain(max_frontier.iter().filter(|p1| {
scaled_frontier
.iter()
.all(|p2| pareto_compare(p1, p2) != ParetoCompare::ADominatesB)
}))
.filter(|p| p.iter().zip(max_difficulty.iter()).all(|(x1, x2)| x1 <= x2))
.cloned()
.collect::<HashSet<Point>>()
.into_iter()
.collect();
(base_frontier, scaling_factor, scaled_frontier)
};
challenge_data.base_frontier = base_frontier;
challenge_data.scaled_frontier = scaled_frontier;
challenge_data.scaling_factor = scaling_factor;
}
// update influence
if active_opow_ids.len() == 0 {
return;
@ -445,7 +266,6 @@ pub(crate) async fn update(cache: &mut AddBlockCache) {
);
2
]);
let num_factors = PreciseNumber::from(factor_weights.len());
let mut weights = Vec::<PreciseNumber>::new();
for player_id in active_opow_ids.iter() {

View File

@ -13,7 +13,6 @@ pub async fn add_block<T: Context>(ctx: &T) {
contracts::players::update(&mut cache).await;
contracts::opow::update(&mut cache).await;
contracts::algorithms::update(&mut cache).await;
contracts::challenges::update(&mut cache).await;
contracts::rewards::update(&mut cache).await;
ctx.commit_block_cache(cache).await;
}

View File

@ -60,12 +60,11 @@ serializable_struct_with_getters! {
serializable_struct_with_getters! {
BenchmarksConfig {
min_num_nonces: u64,
min_num_solutions: u64,
submission_delay_multiplier: f64,
max_samples: usize,
lifespan_period: u32,
min_per_nonce_fee: PreciseNumber,
min_base_fee: PreciseNumber,
per_nonce_fee: PreciseNumber,
base_fee: PreciseNumber,
runtime_config: RuntimeConfig,
}
}
@ -83,13 +82,8 @@ serializable_struct_with_getters! {
}
serializable_struct_with_getters! {
DifficultyConfig {
parameter_names: Vec<String>,
min_frontier: Frontier,
max_frontier: Frontier,
max_scaling_factor: f64,
size_range: [u32; 2],
total_qualifiers_threshold: u64,
target_solution_rate: u64,
hash_threshold_max_percent_delta: f64,
}
}
serializable_struct_with_getters! {

View File

@ -10,9 +10,7 @@ serializable_struct_with_getters! {
id: String,
details: BenchmarkDetails,
state: BenchmarkState,
solution_nonces: Option<HashSet<u64>>,
discarded_solution_nonces: Option<HashSet<u64>>,
non_solution_nonces: Option<HashSet<u64>>,
solution_quality: Option<Vec<i32>>,
}
}
serializable_struct_with_getters! {
@ -150,7 +148,7 @@ serializable_struct_with_getters! {
block_id: String,
challenge_id: String,
algorithm_id: String,
difficulty: Vec<i32>,
size: u32,
}
}
impl BenchmarkSettings {
@ -160,9 +158,7 @@ impl BenchmarkSettings {
}
serializable_struct_with_getters! {
BenchmarkDetails {
num_solutions: u64,
num_discarded_solutions: u64,
num_non_solutions: u64,
average_solution_quality: i32,
merkle_root: MerkleHash,
sampled_nonces: HashSet<u64>,
}
@ -293,13 +289,6 @@ serializable_struct_with_getters! {
ChallengeBlockData {
num_qualifiers: u64,
qualifier_difficulties: HashSet<Point>,
average_solution_ratio: f64,
base_frontier: Frontier,
scaled_frontier: Frontier,
scaling_factor: f64,
base_fee: PreciseNumber,
per_nonce_fee: PreciseNumber,
hash_threshold: MerkleHash,
}
}
@ -376,7 +365,6 @@ serializable_struct_with_getters! {
delegators: HashSet<String>,
reward_share: PreciseNumber,
coinbase: HashMap<String, PreciseNumber>,
solution_ratio_by_challenge: HashMap<String, f64>,
imbalance: PreciseNumber,
influence: PreciseNumber,
reward: PreciseNumber,

View File

@ -22,7 +22,7 @@ fn test_calc_seed() {
block_id: "some_block".to_string(),
challenge_id: "some_challenge".to_string(),
algorithm_id: "some_algorithm".to_string(),
difficulty: vec![1, 2, 3],
size: 50,
};
let rand_hash = "random_hash".to_string();