diff --git a/tig-protocol/Cargo.toml b/tig-protocol/Cargo.toml index 66cc90d..41325c0 100644 --- a/tig-protocol/Cargo.toml +++ b/tig-protocol/Cargo.toml @@ -10,6 +10,7 @@ edition.workspace = true [dependencies] anyhow = { version = "1.0.81" } logging_timer = "1.1.1" +hex = "0.4.3" rand = "0.8.4" serde = { version = "1.0.196", features = ["derive"] } serde_json = { version = "1.0.113" } diff --git a/tig-protocol/src/add_block.rs b/tig-protocol/src/add_block.rs index 10d409c..bad0d84 100644 --- a/tig-protocol/src/add_block.rs +++ b/tig-protocol/src/add_block.rs @@ -397,7 +397,7 @@ async fn setup_cache( #[time] async fn create_block(ctx: &T) -> (Block, AddBlockCache) { let latest_block = ctx - .get_block(BlockFilter::Latest, false) + .get_block(BlockFilter::LastConfirmed, false) .await .unwrap_or_else(|e| panic!("get_block error: {:?}", e)) .expect("No latest block found"); @@ -679,684 +679,6 @@ async fn update_deposits(ctx: &T, block: &Block, cache: &mut AddBloc } } -#[time] -async fn update_cutoffs(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - let mut phase_in_challenge_ids: HashSet = - cache.active_challenges.keys().cloned().collect(); - for algorithm in cache.active_algorithms.values() { - if algorithm - .state() - .round_pushed - .is_some_and(|r| r + 1 <= block.details.round) - { - phase_in_challenge_ids.remove(&algorithm.details.challenge_id); - } - } - - let mut num_solutions_by_player_by_challenge = HashMap::>::new(); - for (settings, num_solutions) in cache.active_solutions.values() { - *num_solutions_by_player_by_challenge - .entry(settings.player_id.clone()) - .or_default() - .entry(settings.challenge_id.clone()) - .or_default() += *num_solutions; - } - - for (player_id, num_solutions_by_challenge) in num_solutions_by_player_by_challenge.iter() { - let data = cache - .active_players - .get_mut(player_id) - .unwrap() - .block_data - .as_mut() - .unwrap(); - let phase_in_start = (block.details.round - 1) * config.rounds.blocks_per_round; - let phase_in_period = config.qualifiers.cutoff_phase_in_period.unwrap(); - let phase_in_end = phase_in_start + phase_in_period; - let min_cutoff = config.qualifiers.min_cutoff.clone().unwrap(); - let min_num_solutions = cache - .active_challenges - .keys() - .map(|id| num_solutions_by_challenge.get(id).unwrap_or(&0).clone()) - .min() - .unwrap(); - let mut cutoff = min_cutoff - .max((min_num_solutions as f64 * config.qualifiers.cutoff_multiplier).ceil() as u32); - if phase_in_challenge_ids.len() > 0 && phase_in_end > block.details.height { - let phase_in_min_num_solutions = cache - .active_challenges - .keys() - .filter(|&id| !phase_in_challenge_ids.contains(id)) - .map(|id| num_solutions_by_challenge.get(id).unwrap_or(&0).clone()) - .min() - .unwrap(); - let phase_in_cutoff = min_cutoff.max( - (phase_in_min_num_solutions as f64 * config.qualifiers.cutoff_multiplier).ceil() - as u32, - ); - let phase_in_weight = - (phase_in_end - block.details.height) as f64 / phase_in_period as f64; - cutoff = (phase_in_cutoff as f64 * phase_in_weight - + cutoff as f64 * (1.0 - phase_in_weight)) as u32; - } - data.cutoff = Some(cutoff); - } -} - -#[time] -async fn update_solution_signature_thresholds(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - - let confirmed_proof_ids = &block.data().confirmed_proof_ids; - let mut num_solutions_by_player_by_challenge = HashMap::>::new(); - let mut new_solutions_by_player_by_challenge = HashMap::>::new(); - for (benchmark_id, (settings, num_solutions)) in cache.active_solutions.iter() { - *num_solutions_by_player_by_challenge - .entry(settings.player_id.clone()) - .or_default() - .entry(settings.challenge_id.clone()) - .or_default() += *num_solutions; - if confirmed_proof_ids.contains(benchmark_id) { - *new_solutions_by_player_by_challenge - .entry(settings.player_id.clone()) - .or_default() - .entry(settings.challenge_id.clone()) - .or_default() += *num_solutions; - } - } - - let mut solutions_rate_by_challenge = HashMap::::new(); - for (player_id, new_solutions_by_challenge) in new_solutions_by_player_by_challenge.iter() { - let cutoff = *cache - .active_players - .get(player_id) - .unwrap() - .block_data() - .cutoff(); - for (challenge_id, new_solutions) in new_solutions_by_challenge.iter() { - let num_solutions = - num_solutions_by_player_by_challenge[player_id][challenge_id].clone(); - *solutions_rate_by_challenge - .entry(challenge_id.clone()) - .or_default() += - new_solutions.saturating_sub(num_solutions - cutoff.min(num_solutions)); - } - } - - for challenge in cache.active_challenges.values_mut() { - let max_threshold = u32::MAX as f64; - let current_threshold = match &cache.prev_challenges.get(&challenge.id).unwrap().block_data - { - Some(data) => *data.solution_signature_threshold() as f64, - None => max_threshold, - }; - let current_rate = *solutions_rate_by_challenge.get(&challenge.id).unwrap_or(&0) as f64; - - let equilibrium_rate = config.qualifiers.total_qualifiers_threshold as f64 - / config.benchmark_submissions.lifespan_period as f64; - let target_rate = config.solution_signature.equilibrium_rate_multiplier * equilibrium_rate; - let target_threshold = if current_rate == 0.0 { - max_threshold - } else { - (current_threshold * target_rate / current_rate).clamp(0.0, max_threshold) - }; - - let threshold_decay = config.solution_signature.threshold_decay.unwrap_or(0.99); - let block_data = challenge.block_data.as_mut().unwrap(); - block_data.solution_signature_threshold = Some( - (current_threshold * threshold_decay + target_threshold * (1.0 - threshold_decay)) - .clamp(0.0, max_threshold) as u32, - ); - } -} - -#[time] -async fn update_fees(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - let PrecommitSubmissionsConfig { - min_base_fee, - min_per_nonce_fee, - target_num_precommits, - max_fee_percentage_delta, - .. - } = config.precommit_submissions(); - let num_precommits_by_challenge = cache.mempool_precommits.iter().fold( - HashMap::::new(), - |mut map, precommit| { - *map.entry(precommit.settings.challenge_id.clone()) - .or_default() += 1; - map - }, - ); - let target_num_precommits = PreciseNumber::from(*target_num_precommits); - let max_fee_percent_delta = PreciseNumber::from_f64(*max_fee_percentage_delta); - let one = PreciseNumber::from(1); - let zero = PreciseNumber::from(0); - for challenge in cache.active_challenges.values_mut() { - let num_precommits = PreciseNumber::from( - num_precommits_by_challenge - .get(&challenge.id) - .unwrap_or(&0) - .clone(), - ); - let mut percent_delta = num_precommits / target_num_precommits; - if num_precommits >= target_num_precommits { - percent_delta = percent_delta - one; - } else { - percent_delta = one - percent_delta; - } - if percent_delta > max_fee_percent_delta { - percent_delta = max_fee_percent_delta; - } - let current_base_fee = - match &cache.prev_challenges.get(&challenge.id).unwrap().block_data { - Some(data) => data.base_fee.as_ref().unwrap_or(&zero), - None => &zero, - } - .clone(); - let mut base_fee = if num_precommits >= target_num_precommits { - current_base_fee * (one + percent_delta) - } else { - current_base_fee * (one - percent_delta) - }; - if base_fee < *min_base_fee { - base_fee = *min_base_fee; - } - let block_data = challenge.block_data.as_mut().unwrap(); - block_data.base_fee = Some(base_fee); - block_data.per_nonce_fee = Some(min_per_nonce_fee.clone()); - } -} - -fn find_smallest_range_dimension(points: &Frontier) -> usize { - (0..2) - .min_by_key(|&d| { - let (min, max) = points - .iter() - .map(|p| p[d]) - .fold((i32::MAX, i32::MIN), |(min, max), val| { - (min.min(val), max.max(val)) - }); - max - min - }) - .unwrap() -} - -fn pareto_algorithm(points: Frontier, only_one: bool) -> Vec { - if points.is_empty() { - return Vec::new(); - } - let dimension = find_smallest_range_dimension(&points); - let sort_dimension = 1 - dimension; - - let mut buckets: HashMap> = HashMap::new(); - for point in points { - buckets.entry(point[dimension]).or_default().push(point); - } - for (_, group) in buckets.iter_mut() { - // sort descending - group.sort_unstable_by(|a, b| b[sort_dimension].cmp(&a[sort_dimension])); - } - let mut result = Vec::new(); - while !buckets.is_empty() { - let points: HashSet = buckets.values().map(|group| group[0].clone()).collect(); - let frontier = points.pareto_frontier(); - for point in frontier.iter() { - let bucket = buckets.get_mut(&point[dimension]).unwrap(); - bucket.remove(0); - if bucket.is_empty() { - buckets.remove(&point[dimension]); - } - } - result.push(frontier); - if only_one { - break; - } - } - result -} - -#[time] -async fn update_qualifiers(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - - let mut solutions_by_challenge = HashMap::>::new(); - for (settings, num_solutions) in cache.active_solutions.values() { - solutions_by_challenge - .entry(settings.challenge_id.clone()) - .or_default() - .push((settings, num_solutions)); - } - - let mut max_qualifiers_by_player = HashMap::::new(); - for challenge in cache.active_challenges.values_mut() { - let block_data = challenge.block_data.as_mut().unwrap(); - block_data.num_qualifiers = Some(0); - block_data.qualifier_difficulties = Some(HashSet::new()); - } - for algorithm in cache.active_algorithms.values_mut() { - let block_data = algorithm.block_data.as_mut().unwrap(); - block_data.num_qualifiers_by_player = Some(HashMap::new()); - } - for player in cache.active_players.values_mut() { - let block_data = player.block_data.as_mut().unwrap(); - max_qualifiers_by_player.insert(player.id.clone(), *block_data.cutoff()); - block_data.num_qualifiers_by_challenge = Some(HashMap::new()); - } - - for (challenge_id, challenge) in cache.active_challenges.iter_mut() { - if !solutions_by_challenge.contains_key(challenge_id) { - continue; - } - let solutions = solutions_by_challenge.get_mut(challenge_id).unwrap(); - let points = solutions - .iter() - .map(|(settings, _)| settings.difficulty.clone()) - .collect::(); - let mut frontier_indexes = HashMap::::new(); - for (frontier_index, frontier) in pareto_algorithm(points, false).into_iter().enumerate() { - for point in frontier { - frontier_indexes.insert(point, frontier_index); - } - } - solutions.sort_by(|(a_settings, _), (b_settings, _)| { - let a_index = frontier_indexes[&a_settings.difficulty]; - let b_index = frontier_indexes[&b_settings.difficulty]; - a_index.cmp(&b_index) - }); - - let mut max_qualifiers_by_player = max_qualifiers_by_player.clone(); - let mut curr_frontier_index = 0; - let challenge_data = challenge.block_data.as_mut().unwrap(); - for (settings, &num_solutions) in solutions.iter() { - let BenchmarkSettings { - player_id, - algorithm_id, - challenge_id, - difficulty, - .. - } = settings; - - if curr_frontier_index != frontier_indexes[difficulty] - && *challenge_data.num_qualifiers() > config.qualifiers.total_qualifiers_threshold - { - break; - } - let difficulty_parameters = &config.difficulty.parameters[challenge_id]; - let min_difficulty = difficulty_parameters.min_difficulty(); - let max_difficulty = difficulty_parameters.max_difficulty(); - if (0..difficulty.len()) - .into_iter() - .any(|i| difficulty[i] < min_difficulty[i] || difficulty[i] > max_difficulty[i]) - { - continue; - } - curr_frontier_index = frontier_indexes[difficulty]; - let player_data = cache - .active_players - .get_mut(player_id) - .unwrap() - .block_data - .as_mut() - .unwrap(); - let algorithm_data = cache - .active_algorithms - .get_mut(algorithm_id) - .unwrap() - .block_data - .as_mut() - .unwrap(); - - let max_qualifiers = max_qualifiers_by_player.get(player_id).unwrap().clone(); - let num_qualifiers = num_solutions.min(max_qualifiers); - max_qualifiers_by_player.insert(player_id.clone(), max_qualifiers - num_qualifiers); - - if num_qualifiers > 0 { - *player_data - .num_qualifiers_by_challenge - .as_mut() - .unwrap() - .entry(challenge_id.clone()) - .or_default() += num_qualifiers; - *algorithm_data - .num_qualifiers_by_player - .as_mut() - .unwrap() - .entry(player_id.clone()) - .or_default() += num_qualifiers; - *challenge_data.num_qualifiers.as_mut().unwrap() += num_qualifiers; - } - challenge_data - .qualifier_difficulties - .as_mut() - .unwrap() - .insert(difficulty.clone()); - } - } -} - -#[time] -async fn update_frontiers(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - - for challenge in cache.active_challenges.values_mut() { - let block_data = challenge.block_data.as_mut().unwrap(); - - let difficulty_parameters = &config.difficulty.parameters[&challenge.id]; - let min_difficulty = difficulty_parameters.min_difficulty(); - let max_difficulty = difficulty_parameters.max_difficulty(); - - let points = block_data - .qualifier_difficulties() - .iter() - .map(|d| d.iter().map(|x| -x).collect()) // mirror the points so easiest difficulties are first - .collect::(); - let (base_frontier, scaling_factor, scaled_frontier) = if points.len() == 0 { - let base_frontier: Frontier = vec![min_difficulty.clone()].into_iter().collect(); - let scaling_factor = 0.0; - let scaled_frontier = base_frontier.clone(); - (base_frontier, scaling_factor, scaled_frontier) - } else { - let base_frontier = pareto_algorithm(points, true) - .pop() - .unwrap() - .into_iter() - .map(|d| d.into_iter().map(|x| -x).collect()) - .collect::() // mirror the points back; - .extend(&min_difficulty, &max_difficulty); - let scaling_factor = (*block_data.num_qualifiers() as f64 - / config.qualifiers.total_qualifiers_threshold as f64) - .min(config.difficulty.max_scaling_factor); - let scaled_frontier = base_frontier - .scale(&min_difficulty, &max_difficulty, scaling_factor) - .extend(&min_difficulty, &max_difficulty); - (base_frontier, scaling_factor, scaled_frontier) - }; - - block_data.base_frontier = Some(base_frontier); - block_data.scaled_frontier = Some(scaled_frontier); - block_data.scaling_factor = Some(scaling_factor); - } -} - -#[time] -async fn update_influence(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - let active_player_ids = &block.data().active_player_ids; - if active_player_ids.len() == 0 { - return; - } - - let mut num_qualifiers_by_challenge = HashMap::::new(); - for challenge in cache.active_challenges.values() { - num_qualifiers_by_challenge.insert( - challenge.id.clone(), - *challenge.block_data().num_qualifiers(), - ); - } - - let total_deposit = cache - .active_players - .values() - .map(|p| p.block_data().deposit.clone().unwrap()) - .sum::(); - - let zero = PreciseNumber::from(0); - let one = PreciseNumber::from(1); - let imbalance_multiplier = - PreciseNumber::from_f64(config.optimisable_proof_of_work.imbalance_multiplier); - let num_challenges = PreciseNumber::from(cache.active_challenges.len()); - - let mut weights = Vec::::new(); - for player_id in active_player_ids.iter() { - let data = cache - .active_players - .get_mut(player_id) - .unwrap() - .block_data - .as_mut() - .unwrap(); - - let mut percent_qualifiers = Vec::::new(); - for challenge_id in cache.active_challenges.keys() { - let num_qualifiers = num_qualifiers_by_challenge[challenge_id]; - let num_qualifiers_by_player = *data - .num_qualifiers_by_challenge() - .get(challenge_id) - .unwrap_or(&0); - - percent_qualifiers.push(if num_qualifiers_by_player == 0 { - PreciseNumber::from(0) - } else { - PreciseNumber::from(num_qualifiers_by_player) / PreciseNumber::from(num_qualifiers) - }); - } - let OptimisableProofOfWorkConfig { - avg_percent_qualifiers_multiplier, - enable_proof_of_deposit, - .. - } = &config.optimisable_proof_of_work; - if enable_proof_of_deposit.is_some_and(|x| x) { - let max_percent_rolling_deposit = - PreciseNumber::from_f64(avg_percent_qualifiers_multiplier.clone().unwrap()) - * percent_qualifiers.arithmetic_mean(); - let percent_rolling_deposit = if total_deposit == zero { - zero.clone() - } else { - data.deposit.clone().unwrap() / total_deposit - }; - let qualifying_percent_rolling_deposit = - if percent_rolling_deposit > max_percent_rolling_deposit { - max_percent_rolling_deposit.clone() - } else { - percent_rolling_deposit - }; - percent_qualifiers.push(qualifying_percent_rolling_deposit.clone()); - data.qualifying_percent_rolling_deposit = Some(qualifying_percent_rolling_deposit); - } - - let mean = percent_qualifiers.arithmetic_mean(); - let variance = percent_qualifiers.variance(); - let cv_sqr = if mean == zero { - zero.clone() - } else { - variance / (mean * mean) - }; - - let imbalance = cv_sqr / (num_challenges - one); - let imbalance_penalty = - one - PreciseNumber::approx_inv_exp(imbalance_multiplier * imbalance); - - weights.push(mean * (one - imbalance_penalty)); - - data.imbalance = Some(imbalance); - data.imbalance_penalty = Some(imbalance_penalty); - } - - let influences = weights.normalise(); - for (player_id, &influence) in active_player_ids.iter().zip(influences.iter()) { - let data = cache - .active_players - .get_mut(player_id) - .unwrap() - .block_data - .as_mut() - .unwrap(); - data.influence = Some(influence); - } -} - -#[time] -async fn update_adoption(cache: &mut AddBlockCache) { - let mut algorithms_by_challenge = HashMap::>::new(); - for algorithm in cache.active_algorithms.values_mut() { - algorithms_by_challenge - .entry(algorithm.details.challenge_id.clone()) - .or_default() - .push(algorithm); - } - - for challenge_id in cache.active_challenges.keys() { - let algorithms = algorithms_by_challenge.get_mut(challenge_id); - if algorithms.is_none() { - continue; - } - let algorithms = algorithms.unwrap(); - - let mut weights = Vec::::new(); - for algorithm in algorithms.iter() { - let mut weight = PreciseNumber::from(0); - for (player_id, &num_qualifiers) in - algorithm.block_data().num_qualifiers_by_player().iter() - { - let num_qualifiers = PreciseNumber::from(num_qualifiers); - let player_data = cache.active_players.get(player_id).unwrap().block_data(); - let influence = player_data.influence.unwrap(); - let player_num_qualifiers = PreciseNumber::from( - *player_data - .num_qualifiers_by_challenge - .as_ref() - .unwrap() - .get(challenge_id) - .unwrap(), - ); - - weight = weight + influence * num_qualifiers / player_num_qualifiers; - } - weights.push(weight); - } - - let adoption = weights.normalise(); - for (algorithm, adoption) in algorithms.iter_mut().zip(adoption) { - algorithm.block_data.as_mut().unwrap().adoption = Some(adoption); - } - } -} - -#[time] -async fn update_innovator_rewards(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - - let adoption_threshold = - PreciseNumber::from_f64(config.algorithm_submissions.adoption_threshold); - let zero = PreciseNumber::from(0); - let mut eligible_algorithms_by_challenge = HashMap::>::new(); - for algorithm in cache.active_algorithms.values_mut() { - let is_merged = algorithm.state().round_merged.is_some(); - let is_banned = algorithm.state().banned.clone(); - let data = algorithm.block_data.as_mut().unwrap(); - data.reward = Some(zero.clone()); - - if !is_banned - && (*data.adoption() >= adoption_threshold || (is_merged && *data.adoption() > zero)) - { - eligible_algorithms_by_challenge - .entry(algorithm.details.challenge_id.clone()) - .or_default() - .push(algorithm); - } - } - if eligible_algorithms_by_challenge.len() == 0 { - return; - } - - let reward_pool_per_challenge = PreciseNumber::from_f64(get_block_reward(block)) - * PreciseNumber::from_f64(config.rewards.distribution.optimisations) - / PreciseNumber::from(eligible_algorithms_by_challenge.len()); - - let zero = PreciseNumber::from(0); - for algorithms in eligible_algorithms_by_challenge.values_mut() { - let mut total_adoption = zero.clone(); - for algorithm in algorithms.iter() { - total_adoption = total_adoption + algorithm.block_data().adoption(); - } - - for algorithm in algorithms.iter_mut() { - let data = algorithm.block_data.as_mut().unwrap(); - let adoption = *data.adoption(); - data.reward = Some(reward_pool_per_challenge * adoption / total_adoption); - } - } -} - -#[time] -async fn update_benchmarker_rewards(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - - let reward_pool = PreciseNumber::from_f64(get_block_reward(block)) - * PreciseNumber::from_f64(config.rewards.distribution.benchmarkers); - - for player in cache.active_players.values_mut() { - let data = player.block_data.as_mut().unwrap(); - let influence = *data.influence(); - data.reward = Some(influence * reward_pool); - } -} - -#[time] -async fn update_merge_points(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - - let adoption_threshold = - PreciseNumber::from_f64(config.algorithm_submissions.adoption_threshold); - for algorithm in cache.active_algorithms.values_mut() { - let is_merged = algorithm.state().round_merged.is_some(); - let data = algorithm.block_data.as_mut().unwrap(); - - // first block of the round - let prev_merge_points = if block.details.height % config.rounds.blocks_per_round == 0 { - 0 - } else { - match &cache.prev_algorithms.get(&algorithm.id).unwrap().block_data { - Some(data) => *data.merge_points(), - None => 0, - } - }; - data.merge_points = Some(if is_merged || *data.adoption() < adoption_threshold { - prev_merge_points - } else { - prev_merge_points + 1 - }); - } -} - -#[time] -async fn update_merges(block: &Block, cache: &mut AddBlockCache) { - let config = block.config(); - - // last block of the round - if (block.details.height + 1) % config.rounds.blocks_per_round != 0 { - return; - } - - let mut algorithm_to_merge_by_challenge = HashMap::::new(); - for algorithm in cache.active_algorithms.values_mut() { - let challenge_id = algorithm.details.challenge_id.clone(); - let data = algorithm.block_data(); - - if algorithm.state().round_merged.is_some() - || *data.merge_points() < config.algorithm_submissions.merge_points_threshold - { - continue; - } - if !algorithm_to_merge_by_challenge.contains_key(&challenge_id) - || algorithm_to_merge_by_challenge[&challenge_id] - .block_data() - .merge_points - < data.merge_points - { - algorithm_to_merge_by_challenge.insert(challenge_id, algorithm); - } - } - - let round_merged = block.details.round + 1; - for algorithm in algorithm_to_merge_by_challenge.values_mut() { - let state = algorithm.state.as_mut().unwrap(); - state.round_merged = Some(round_merged); - } -} - #[time] async fn commit_changes(ctx: &T, block: &Block, cache: &mut AddBlockCache) { for precommit in cache.mempool_precommits.drain(..) { diff --git a/tig-protocol/src/context.rs b/tig-protocol/src/context.rs index a43f7b8..e6ded50 100644 --- a/tig-protocol/src/context.rs +++ b/tig-protocol/src/context.rs @@ -14,7 +14,6 @@ pub enum SubmissionType { #[derive(Debug, Clone, PartialEq)] pub enum AlgorithmsFilter { - Id(String), Name(String), TxHash(String), Mempool, @@ -22,29 +21,25 @@ pub enum AlgorithmsFilter { } #[derive(Debug, Clone, PartialEq)] pub enum BenchmarksFilter { - Id(String), - Mempool { from_block_started: u32 }, - Confirmed { from_block_started: u32 }, + Mempool, + Confirmed, } #[derive(Debug, Clone, PartialEq)] pub enum BlockFilter { - Latest, + Current, + LastConfirmed, Height(u32), - Id(String), Round(u32), } #[derive(Debug, Clone, PartialEq)] pub enum ChallengesFilter { - Id(String), - Name(String), Mempool, Confirmed, } #[derive(Debug, Clone, PartialEq)] pub enum FraudsFilter { - BenchmarkId(String), - Mempool { from_block_started: u32 }, - Confirmed { from_block_started: u32 }, + Mempool, + Confirmed, } #[derive(Debug, Clone, PartialEq)] pub enum PlayersFilter { @@ -55,72 +50,69 @@ pub enum PlayersFilter { } #[derive(Debug, Clone, PartialEq)] pub enum PrecommitsFilter { - BenchmarkId(String), Settings(BenchmarkSettings), - Mempool { from_block_started: u32 }, - Confirmed { from_block_started: u32 }, + Mempool, + Confirmed, } #[derive(Debug, Clone, PartialEq)] pub enum ProofsFilter { - BenchmarkId(String), - Mempool { from_block_started: u32 }, - Confirmed { from_block_started: u32 }, + Mempool, + Confirmed, } #[derive(Debug, Clone, PartialEq)] pub enum TopUpsFilter { - Id(String), PlayerId(String), Mempool, Confirmed, } #[derive(Debug, Clone, PartialEq)] pub enum WasmsFilter { - AlgorithmId(String), Mempool, Confirmed, } #[allow(async_fn_in_trait)] pub trait Context { - async fn get_algorithms( + async fn get_algorithm_ids(&self, filter: AlgorithmsFilter) -> Vec; + async fn get_algorithm_state(&self, algorithm_id: &String) -> Option; + async fn get_benchmark_ids(&self, filter: BenchmarksFilter) -> Vec; + async fn get_benchmark_details(&self, benchmark_id: &String) -> Option; + async fn get_benchmark_state(&self, benchmark_id: &String) -> Option; + async fn confirm_benchmark( &self, - filter: AlgorithmsFilter, - block_data: Option, - include_data: bool, - ) -> ContextResult>; - async fn get_benchmarks( + benchmark_id: String, + details: BenchmarkDetails, + solution_nonces: HashSet, + ) -> ContextResult<()>; + async fn get_block_id(&self, filter: BlockFilter) -> Option; + async fn get_block_details(&self, block_id: &String) -> Option; + async fn get_challenge_ids(&self, filter: ChallengesFilter) -> Vec; + async fn get_challenge_state(&self, challenge_id: &String) -> Option; + async fn get_challenge_block_data( &self, - filter: BenchmarksFilter, - include_data: bool, - ) -> ContextResult>; - async fn get_block( + challenge_id: &String, + block_id: &String, + ) -> Option; + async fn get_config(&self) -> ProtocolConfig; + async fn get_fraud_ids(&self, filter: FraudsFilter) -> Vec; + async fn get_player_ids(&self, filter: PlayersFilter) -> Vec; + async fn get_player_state(&self, player_id: &String) -> Option; + async fn get_player_block_data( &self, - filter: BlockFilter, - include_data: bool, - ) -> ContextResult>; - async fn get_challenges( + player_id: &String, + block_id: &String, + ) -> Option; + async fn get_precommit_ids(&self, filter: PrecommitsFilter) -> Vec; + async fn get_precommit_settings(&self, benchmark_id: &String) -> Option; + async fn get_precommit_details(&self, benchmark_id: &String) -> Option; + async fn confirm_precommit( &self, - filter: ChallengesFilter, - block_data: Option, - ) -> ContextResult>; - async fn get_config(&self) -> ContextResult; - async fn get_frauds( - &self, - filter: FraudsFilter, - include_data: bool, - ) -> ContextResult>; - async fn get_players( - &self, - filter: PlayersFilter, - block_data: Option, - ) -> ContextResult>; - async fn get_precommits(&self, filter: PrecommitsFilter) -> ContextResult>; - async fn get_proofs( - &self, - filter: ProofsFilter, - include_data: bool, - ) -> ContextResult>; - async fn get_topups(&self, filter: TopUpsFilter) -> ContextResult>; - async fn get_wasms(&self, filter: WasmsFilter) -> ContextResult>; + settings: BenchmarkSettings, + details: PrecommitDetails, + ) -> ContextResult; + async fn get_proofs_ids(&self, filter: ProofsFilter) -> Vec; + async fn get_proof_state(&self, benchmark_id: &String) -> Option; + async fn get_topup_ids(&self, filter: TopUpsFilter) -> Vec; + async fn get_wasm_ids(&self, filter: WasmsFilter) -> Vec; async fn verify_solution( &self, settings: &BenchmarkSettings, diff --git a/tig-protocol/src/contracts/algorithms.rs b/tig-protocol/src/contracts/algorithms.rs new file mode 100644 index 0000000..f180c57 --- /dev/null +++ b/tig-protocol/src/contracts/algorithms.rs @@ -0,0 +1,209 @@ +use crate::{context::*, error::*}; +use logging_timer::time; +use std::collections::HashSet; +use tig_structs::core::*; +use tig_utils::*; + +#[time] +pub(crate) async fn submit_algorithm( + ctx: &T, + player_id: String, + algorithm_name: String, + challenge_id: String, + breakthrough_id: Option, + r#type: AlgorithmType, +) -> ProtocolResult { + let config = ctx.get_config().await; + let curr_block_id = ctx.get_block_id(BlockFilter::Current).await.unwrap(); + let curr_block_details = ctx.get_block_details(&curr_block_id).await.unwrap(); + if !ctx + .get_challenge_state(&challenge_id) + .await + .is_some_and(|s| s.round_active <= curr_block_details.round) + { + return Err(anyhow!("Invalid challenge '{}'", challenge_id)); + } + if let Some(breakthrough_id) = breakthrough_id { + if ctx.get_breakthrough_state(&breakthrough_id).await.is_none() { + return Err(anyhow!("Invalid breakthrough '{}'", breakthrough_id)); + } + } + + if !ctx + .get_player_state(&player_id) + .await + .is_some_and(|s| s.available_fee_balance >= config.algorithms.submission_fee) + { + return Err(anyhow!("Insufficient balance")); + } + + let algorithm_id = ctx + .confirm_algorithm(AlgorithmDetails { + name: algorithm_name, + challenge_id, + breakthrough_id, + r#type, + player_id, + fee_paid: config.algorithms.submission_fee, + }) + .await?; + Ok(algorithm_id) +} + +#[time] +pub(crate) async fn submit_binary( + ctx: &T, + player_id: String, + algorithm_id: String, + compile_success: bool, + download_url: Option, +) -> ProtocolResult { + Ok(algorithm_id) +} + +#[time] +pub(crate) async fn submit_breakthrough( + ctx: &T, + player_id: String, + breakthrough_name: String, +) -> ProtocolResult { + // check player_state has sufficient fee balance + // check name + // confirm breakthrough + + Ok(algorithm_id) +} + +/* + add_block.update_votes + update vote tallies for each breakthrough (only consider player_block_data.deposit_by_round where round > min_lock_period_to_vote) + + add_block.update_adoption + breakthrough adoption = sum(algorith.adoption where aglorithm.breakthrough_id == breakthrough.id) + + add_block.update_merge_points + if adoption < threshold or not merged: + continue + if not merged: + add merge point + eligible to earn rewards (pro-rata with adoption) + need to update and track academic_fund_address.. + + add_block.update_merges + for each breakthrough where curr_round + 1 == breakthrough.round_pushed + vote_period_rounds + min_percent_yes_votes < sum(yes_votes) / sum(yes_votes + no_votes) + set breakthrough_state.round_active + + for each breakthrough where merge_points_threshold < merge_points + set breakthrough_state.round_merged.. +*/ + +#[time] +async fn update_adoption(cache: &mut AddBlockCache) { + let mut algorithms_by_challenge = HashMap::>::new(); + for algorithm in cache.active_algorithms.values_mut() { + algorithms_by_challenge + .entry(algorithm.details.challenge_id.clone()) + .or_default() + .push(algorithm); + } + + for challenge_id in cache.active_challenges.keys() { + let algorithms = algorithms_by_challenge.get_mut(challenge_id); + if algorithms.is_none() { + continue; + } + let algorithms = algorithms.unwrap(); + + let mut weights = Vec::::new(); + for algorithm in algorithms.iter() { + let mut weight = PreciseNumber::from(0); + for (player_id, &num_qualifiers) in + algorithm.block_data().num_qualifiers_by_player().iter() + { + let num_qualifiers = PreciseNumber::from(num_qualifiers); + let player_data = cache.active_players.get(player_id).unwrap().block_data(); + let influence = player_data.influence.unwrap(); + let player_num_qualifiers = PreciseNumber::from( + *player_data + .num_qualifiers_by_challenge + .as_ref() + .unwrap() + .get(challenge_id) + .unwrap(), + ); + + weight = weight + influence * num_qualifiers / player_num_qualifiers; + } + weights.push(weight); + } + + let adoption = weights.normalise(); + for (algorithm, adoption) in algorithms.iter_mut().zip(adoption) { + algorithm.block_data.as_mut().unwrap().adoption = Some(adoption); + } + } +} + +#[time] +async fn update_merge_points(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + + let adoption_threshold = + PreciseNumber::from_f64(config.algorithm_submissions.adoption_threshold); + for algorithm in cache.active_algorithms.values_mut() { + let is_merged = algorithm.state().round_merged.is_some(); + let data = algorithm.block_data.as_mut().unwrap(); + + // first block of the round + let prev_merge_points = if block.details.height % config.rounds.blocks_per_round == 0 { + 0 + } else { + match &cache.prev_algorithms.get(&algorithm.id).unwrap().block_data { + Some(data) => *data.merge_points(), + None => 0, + } + }; + data.merge_points = Some(if is_merged || *data.adoption() < adoption_threshold { + prev_merge_points + } else { + prev_merge_points + 1 + }); + } +} + +#[time] +async fn update_merges(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + + // last block of the round + if (block.details.height + 1) % config.rounds.blocks_per_round != 0 { + return; + } + + let mut algorithm_to_merge_by_challenge = HashMap::::new(); + for algorithm in cache.active_algorithms.values_mut() { + let challenge_id = algorithm.details.challenge_id.clone(); + let data = algorithm.block_data(); + + if algorithm.state().round_merged.is_some() + || *data.merge_points() < config.algorithm_submissions.merge_points_threshold + { + continue; + } + if !algorithm_to_merge_by_challenge.contains_key(&challenge_id) + || algorithm_to_merge_by_challenge[&challenge_id] + .block_data() + .merge_points + < data.merge_points + { + algorithm_to_merge_by_challenge.insert(challenge_id, algorithm); + } + } + + let round_merged = block.details.round + 1; + for algorithm in algorithm_to_merge_by_challenge.values_mut() { + let state = algorithm.state.as_mut().unwrap(); + state.round_merged = Some(round_merged); + } +} diff --git a/tig-protocol/src/contracts/benchmarks.rs b/tig-protocol/src/contracts/benchmarks.rs new file mode 100644 index 0000000..6f48a72 --- /dev/null +++ b/tig-protocol/src/contracts/benchmarks.rs @@ -0,0 +1,283 @@ +use crate::{context::*, error::*}; +use anyhow::{anyhow, Result}; +use logging_timer::time; +use rand::{seq::IteratorRandom, thread_rng, Rng}; +use std::collections::HashSet; +use tig_structs::core::*; +use tig_utils::*; + +#[time] +pub(crate) async fn submit_precommit( + ctx: &T, + player_id: String, + settings: BenchmarkSettings, + num_nonces: u32, +) -> Result { + if player_id != settings.player_id { + return Err(anyhow!("Invalid settings.player_id. Must be {}", player_id)); + } + + if num_nonces == 0 { + return Err(anyhow!("Invalid num_nonces. Must be greater than 0")); + } + + let config = ctx.get_config().await; + + let latest_block_id = ctx.get_block_id(BlockFilter::LastConfirmed).await.unwrap(); + if latest_block_id != settings.block_id { + return Err(anyhow!("Invalid block_id. Must be latest block")); + } + let latest_block_details = ctx.get_block_details(&latest_block_id).await.unwrap(); + + // verify challenge is active + if !ctx + .get_challenge_state(&settings.challenge_id) + .await + .is_some_and(|s| s.round_active <= latest_block_details.round) + { + return Err(anyhow!("Invalid challenge '{}'", settings.challenge_id)); + } + + // verify algorithm is active + if !ctx + .get_algorithm_state(&settings.algorithm_id) + .await + .is_some_and(|s| { + !s.banned + && s.round_active + .is_some_and(|r| r <= latest_block_details.round) + }) + { + return Err(anyhow!("Invalid algorithm '{}'", settings.algorithm_id)); + } + + // verify difficulty + let difficulty = &settings.difficulty; + let difficulty_parameters = &config.challenges.difficulty_parameters[&settings.challenge_id]; + if difficulty.len() != difficulty_parameters.len() + || difficulty + .iter() + .zip(difficulty_parameters.iter()) + .any(|(d, p)| *d < p.min_value || *d > p.max_value) + { + return Err(anyhow!("Invalid difficulty '{:?}'", difficulty)); + } + + let challenge_data = ctx + .get_challenge_block_data(&settings.challenge_id, &latest_block_id) + .await + .unwrap(); + let (lower_frontier, upper_frontier) = if challenge_data.scaling_factor > 1f64 { + (challenge_data.base_frontier, challenge_data.scaled_frontier) + } else { + (challenge_data.scaled_frontier, challenge_data.base_frontier) + }; + if lower_frontier + .iter() + .any(|lower_point| difficulty.pareto_compare(lower_point) == ParetoCompare::BDominatesA) + || upper_frontier + .iter() + .any(|upper_point| difficulty.pareto_compare(upper_point) == ParetoCompare::ADominatesB) + { + return Err(anyhow!("Invalid difficulty. Out of bounds")); + } + + // verify player has sufficient balance + let submission_fee = + challenge_data.base_fee + challenge_data.per_nonce_fee * PreciseNumber::from(num_nonces); + if !ctx + .get_player_state(&player_id) + .await + .is_some_and(|s| s.available_fee_balance >= submission_fee) + { + return Err(anyhow!("Insufficient balance")); + } + + let benchmark_id = ctx + .confirm_precommit( + settings, + PrecommitDetails { + block_started: latest_block_details.height, + num_nonces, + rand_hash: hex::encode(thread_rng().gen::<[u8; 16]>()), + fee_paid: submission_fee, + }, + ) + .await?; + Ok(benchmark_id) +} + +#[time] +pub(crate) async fn submit_benchmark( + ctx: &T, + player_id: String, + benchmark_id: String, + merkle_root: MerkleHash, + solution_nonces: HashSet, +) -> Result<()> { + // check benchmark is not duplicate + if ctx.get_benchmark_state(&benchmark_id).await.is_some() { + return Err(anyhow!("Duplicate benchmark: {}", benchmark_id)); + } + + // check player owns benchmark + let expected_player_id = ctx + .get_precommit_settings(&benchmark_id) + .await + .ok_or_else(|| anyhow!("No corresponding precommit: {}", benchmark_id))? + .player_id; + if player_id != expected_player_id { + return Err(anyhow!( + "Invalid submitting player: {}. Expected: {}", + player_id, + expected_player_id + )); + } + + // check solution nonces is valid + let num_nonces = ctx + .get_precommit_details(&benchmark_id) + .await + .unwrap() + .num_nonces as u64; + if !solution_nonces.iter().all(|n| *n < num_nonces) { + return Err(anyhow!("Invalid solution nonces")); + } + + // random sample nonces + let config = ctx.get_config().await; + let mut sampled_nonces = HashSet::new(); + let mut rng = thread_rng(); + let max_samples = config.benchmarks.max_samples; + if !solution_nonces.is_empty() { + for _ in 0..25 { + sampled_nonces.insert(*solution_nonces.iter().choose(&mut rng).unwrap()); + if sampled_nonces.len() == max_samples { + break; + } + } + } + let max_samples = sampled_nonces.len() + config.benchmarks.min_num_solutions as usize; + for _ in 0..25 { + sampled_nonces.insert(rng.gen_range(0..num_nonces)); + if sampled_nonces.len() == max_samples { + break; + } + } + + ctx.confirm_benchmark( + benchmark_id, + BenchmarkDetails { + num_solutions: solution_nonces.len() as u32, + merkle_root, + sampled_nonces, + }, + solution_nonces, + ) + .await?; + Ok(()) +} + +#[time] +pub(crate) async fn submit_proof( + ctx: &T, + player_id: String, + benchmark_id: String, + merkle_proofs: Vec, +) -> Result> { + // check proof is not duplicate + if ctx.get_proof_state(&benchmark_id).await.is_some() { + return Err(anyhow!("Duplicate proof: {}", benchmark_id)); + } + + // check benchmark is submitted + let benchmark_details = ctx + .get_benchmark_details(&benchmark_id) + .await + .ok_or_else(|| anyhow!("No corresponding benchmark: {}", benchmark_id))?; + + // check player owns benchmark + let settings = ctx.get_precommit_settings(&benchmark_id).await.unwrap(); + if player_id != settings.player_id { + return Err(anyhow!( + "Invalid submitting player: {}. Expected: {}", + player_id, + settings.player_id + )); + } + + // verify + let precommit_details = ctx.get_precommit_details(&benchmark_id).await.unwrap(); + let proof_nonces: HashSet = merkle_proofs.iter().map(|p| p.leaf.nonce).collect(); + let sampled_nonces = &benchmark_details.sampled_nonces; + if sampled_nonces != proof_nonces || sampled_nonces.len() != merkle_proofs.len() { + return Err(anyhow!( + "Invalid merkle proofs. Does not match sampled nonces" + )); + } + + // verify merkle_proofs + let mut verification_result = Ok(()); + let max_branch_len = (64 - (*precommit_details.num_nonces - 1).leading_zeros()) as usize; + for merkle_proof in merkle_proofs.iter() { + if merkle_proof.branch.0.len() > max_branch_len + || merkle_proof + .branch + .0 + .iter() + .any(|(d, _)| *d as usize > max_branch_len) + { + return Err(ProtocolError::InvalidMerkleProof { + nonce: merkle_proof.leaf.nonce.clone(), + }); + } + let output_meta_data = OutputMetaData::from(merkle_proof.leaf.clone()); + let hash = MerkleHash::from(output_meta_data); + let result = merkle_proof + .branch + .calc_merkle_root(&hash, merkle_proof.leaf.nonce as usize); + if !result + .is_ok_and(|actual_merkle_root| actual_merkle_root == benchmark_details.merkle_root) + { + verification_result = Err(ProtocolError::InvalidMerkleProof { + nonce: merkle_proof.leaf.nonce.clone(), + }); + } + } + + if verification_result.is_ok() { + for p in merkle_proofs.iter() { + if ctx + .verify_solution(&settings, p.leaf.nonce, &p.leaf.solution) + .await + .unwrap_or_else(|e| panic!("verify_solution error: {:?}", e)) + .is_err() + { + verification_result = Err(ProtocolError::InvalidSolution { + nonce: p.leaf.nonce, + }); + } + } + }; + ctx.confirm_proof(benchmark_id, merkle_proofs) + .await + .unwrap_or_else(|e| panic!("add_proof_to_mempool error: {:?}", e)); + if let Err(e) = verification_result { + submit_fraud(benchmark_id, e.to_string()) + .await + .unwrap_or_else(|e| panic!("add_fraud_to_mempool error: {:?}", e)); + return Ok(Err(e.to_string())); + } + Ok(Ok(())) +} + +#[time] +pub(crate) async fn submit_fraud( + ctx: &T, + player_id: String, + benchmark_id: String, + allegation: String, +) -> ProtocolResult> { +} + +// update active benchmarks diff --git a/tig-protocol/src/contracts/challenges.rs b/tig-protocol/src/contracts/challenges.rs new file mode 100644 index 0000000..cd29e92 --- /dev/null +++ b/tig-protocol/src/contracts/challenges.rs @@ -0,0 +1,124 @@ +#[time] +async fn update_solution_signature_thresholds(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + + let confirmed_proof_ids = &block.data().confirmed_proof_ids; + let mut num_solutions_by_player_by_challenge = HashMap::>::new(); + let mut new_solutions_by_player_by_challenge = HashMap::>::new(); + for (benchmark_id, (settings, num_solutions)) in cache.active_solutions.iter() { + *num_solutions_by_player_by_challenge + .entry(settings.player_id.clone()) + .or_default() + .entry(settings.challenge_id.clone()) + .or_default() += *num_solutions; + if confirmed_proof_ids.contains(benchmark_id) { + *new_solutions_by_player_by_challenge + .entry(settings.player_id.clone()) + .or_default() + .entry(settings.challenge_id.clone()) + .or_default() += *num_solutions; + } + } + + let mut solutions_rate_by_challenge = HashMap::::new(); + for (player_id, new_solutions_by_challenge) in new_solutions_by_player_by_challenge.iter() { + let cutoff = *cache + .active_players + .get(player_id) + .unwrap() + .block_data() + .cutoff(); + for (challenge_id, new_solutions) in new_solutions_by_challenge.iter() { + let num_solutions = + num_solutions_by_player_by_challenge[player_id][challenge_id].clone(); + *solutions_rate_by_challenge + .entry(challenge_id.clone()) + .or_default() += + new_solutions.saturating_sub(num_solutions - cutoff.min(num_solutions)); + } + } + + for challenge in cache.active_challenges.values_mut() { + let max_threshold = u32::MAX as f64; + let current_threshold = match &cache.prev_challenges.get(&challenge.id).unwrap().block_data + { + Some(data) => *data.solution_signature_threshold() as f64, + None => max_threshold, + }; + let current_rate = *solutions_rate_by_challenge.get(&challenge.id).unwrap_or(&0) as f64; + + let equilibrium_rate = config.qualifiers.total_qualifiers_threshold as f64 + / config.benchmark_submissions.lifespan_period as f64; + let target_rate = config.solution_signature.equilibrium_rate_multiplier * equilibrium_rate; + let target_threshold = if current_rate == 0.0 { + max_threshold + } else { + (current_threshold * target_rate / current_rate).clamp(0.0, max_threshold) + }; + + let threshold_decay = config.solution_signature.threshold_decay.unwrap_or(0.99); + let block_data = challenge.block_data.as_mut().unwrap(); + block_data.solution_signature_threshold = Some( + (current_threshold * threshold_decay + target_threshold * (1.0 - threshold_decay)) + .clamp(0.0, max_threshold) as u32, + ); + } +} + +#[time] +async fn update_fees(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + let PrecommitSubmissionsConfig { + min_base_fee, + min_per_nonce_fee, + target_num_precommits, + max_fee_percentage_delta, + .. + } = config.precommit_submissions(); + let num_precommits_by_challenge = cache.mempool_precommits.iter().fold( + HashMap::::new(), + |mut map, precommit| { + *map.entry(precommit.settings.challenge_id.clone()) + .or_default() += 1; + map + }, + ); + let target_num_precommits = PreciseNumber::from(*target_num_precommits); + let max_fee_percent_delta = PreciseNumber::from_f64(*max_fee_percentage_delta); + let one = PreciseNumber::from(1); + let zero = PreciseNumber::from(0); + for challenge in cache.active_challenges.values_mut() { + let num_precommits = PreciseNumber::from( + num_precommits_by_challenge + .get(&challenge.id) + .unwrap_or(&0) + .clone(), + ); + let mut percent_delta = num_precommits / target_num_precommits; + if num_precommits >= target_num_precommits { + percent_delta = percent_delta - one; + } else { + percent_delta = one - percent_delta; + } + if percent_delta > max_fee_percent_delta { + percent_delta = max_fee_percent_delta; + } + let current_base_fee = + match &cache.prev_challenges.get(&challenge.id).unwrap().block_data { + Some(data) => data.base_fee.as_ref().unwrap_or(&zero), + None => &zero, + } + .clone(); + let mut base_fee = if num_precommits >= target_num_precommits { + current_base_fee * (one + percent_delta) + } else { + current_base_fee * (one - percent_delta) + }; + if base_fee < *min_base_fee { + base_fee = *min_base_fee; + } + let block_data = challenge.block_data.as_mut().unwrap(); + block_data.base_fee = Some(base_fee); + block_data.per_nonce_fee = Some(min_per_nonce_fee.clone()); + } +} diff --git a/tig-protocol/src/contracts/mod.rs b/tig-protocol/src/contracts/mod.rs new file mode 100644 index 0000000..40fd1af --- /dev/null +++ b/tig-protocol/src/contracts/mod.rs @@ -0,0 +1,3 @@ +pub mod algorithms; +pub mod benchmarks; +pub mod players; diff --git a/tig-protocol/src/contracts/opow.rs b/tig-protocol/src/contracts/opow.rs new file mode 100644 index 0000000..3369aca --- /dev/null +++ b/tig-protocol/src/contracts/opow.rs @@ -0,0 +1,382 @@ +#[time] +async fn update_cutoffs(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + let mut phase_in_challenge_ids: HashSet = + cache.active_challenges.keys().cloned().collect(); + for algorithm in cache.active_algorithms.values() { + if algorithm + .state() + .round_pushed + .is_some_and(|r| r + 1 <= block.details.round) + { + phase_in_challenge_ids.remove(&algorithm.details.challenge_id); + } + } + + let mut num_solutions_by_player_by_challenge = HashMap::>::new(); + for (settings, num_solutions) in cache.active_solutions.values() { + *num_solutions_by_player_by_challenge + .entry(settings.player_id.clone()) + .or_default() + .entry(settings.challenge_id.clone()) + .or_default() += *num_solutions; + } + + for (player_id, num_solutions_by_challenge) in num_solutions_by_player_by_challenge.iter() { + let data = cache + .active_players + .get_mut(player_id) + .unwrap() + .block_data + .as_mut() + .unwrap(); + let phase_in_start = (block.details.round - 1) * config.rounds.blocks_per_round; + let phase_in_period = config.qualifiers.cutoff_phase_in_period.unwrap(); + let phase_in_end = phase_in_start + phase_in_period; + let min_cutoff = config.qualifiers.min_cutoff.clone().unwrap(); + let min_num_solutions = cache + .active_challenges + .keys() + .map(|id| num_solutions_by_challenge.get(id).unwrap_or(&0).clone()) + .min() + .unwrap(); + let mut cutoff = min_cutoff + .max((min_num_solutions as f64 * config.qualifiers.cutoff_multiplier).ceil() as u32); + if phase_in_challenge_ids.len() > 0 && phase_in_end > block.details.height { + let phase_in_min_num_solutions = cache + .active_challenges + .keys() + .filter(|&id| !phase_in_challenge_ids.contains(id)) + .map(|id| num_solutions_by_challenge.get(id).unwrap_or(&0).clone()) + .min() + .unwrap(); + let phase_in_cutoff = min_cutoff.max( + (phase_in_min_num_solutions as f64 * config.qualifiers.cutoff_multiplier).ceil() + as u32, + ); + let phase_in_weight = + (phase_in_end - block.details.height) as f64 / phase_in_period as f64; + cutoff = (phase_in_cutoff as f64 * phase_in_weight + + cutoff as f64 * (1.0 - phase_in_weight)) as u32; + } + data.cutoff = Some(cutoff); + } +} + +fn find_smallest_range_dimension(points: &Frontier) -> usize { + (0..2) + .min_by_key(|&d| { + let (min, max) = points + .iter() + .map(|p| p[d]) + .fold((i32::MAX, i32::MIN), |(min, max), val| { + (min.min(val), max.max(val)) + }); + max - min + }) + .unwrap() +} + +fn pareto_algorithm(points: Frontier, only_one: bool) -> Vec { + if points.is_empty() { + return Vec::new(); + } + let dimension = find_smallest_range_dimension(&points); + let sort_dimension = 1 - dimension; + + let mut buckets: HashMap> = HashMap::new(); + for point in points { + buckets.entry(point[dimension]).or_default().push(point); + } + for (_, group) in buckets.iter_mut() { + // sort descending + group.sort_unstable_by(|a, b| b[sort_dimension].cmp(&a[sort_dimension])); + } + let mut result = Vec::new(); + while !buckets.is_empty() { + let points: HashSet = buckets.values().map(|group| group[0].clone()).collect(); + let frontier = points.pareto_frontier(); + for point in frontier.iter() { + let bucket = buckets.get_mut(&point[dimension]).unwrap(); + bucket.remove(0); + if bucket.is_empty() { + buckets.remove(&point[dimension]); + } + } + result.push(frontier); + if only_one { + break; + } + } + result +} + +#[time] +async fn update_qualifiers(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + + let mut solutions_by_challenge = HashMap::>::new(); + for (settings, num_solutions) in cache.active_solutions.values() { + solutions_by_challenge + .entry(settings.challenge_id.clone()) + .or_default() + .push((settings, num_solutions)); + } + + let mut max_qualifiers_by_player = HashMap::::new(); + for challenge in cache.active_challenges.values_mut() { + let block_data = challenge.block_data.as_mut().unwrap(); + block_data.num_qualifiers = Some(0); + block_data.qualifier_difficulties = Some(HashSet::new()); + } + for algorithm in cache.active_algorithms.values_mut() { + let block_data = algorithm.block_data.as_mut().unwrap(); + block_data.num_qualifiers_by_player = Some(HashMap::new()); + } + for player in cache.active_players.values_mut() { + let block_data = player.block_data.as_mut().unwrap(); + max_qualifiers_by_player.insert(player.id.clone(), *block_data.cutoff()); + block_data.num_qualifiers_by_challenge = Some(HashMap::new()); + } + + for (challenge_id, challenge) in cache.active_challenges.iter_mut() { + if !solutions_by_challenge.contains_key(challenge_id) { + continue; + } + let solutions = solutions_by_challenge.get_mut(challenge_id).unwrap(); + let points = solutions + .iter() + .map(|(settings, _)| settings.difficulty.clone()) + .collect::(); + let mut frontier_indexes = HashMap::::new(); + for (frontier_index, frontier) in pareto_algorithm(points, false).into_iter().enumerate() { + for point in frontier { + frontier_indexes.insert(point, frontier_index); + } + } + solutions.sort_by(|(a_settings, _), (b_settings, _)| { + let a_index = frontier_indexes[&a_settings.difficulty]; + let b_index = frontier_indexes[&b_settings.difficulty]; + a_index.cmp(&b_index) + }); + + let mut max_qualifiers_by_player = max_qualifiers_by_player.clone(); + let mut curr_frontier_index = 0; + let challenge_data = challenge.block_data.as_mut().unwrap(); + for (settings, &num_solutions) in solutions.iter() { + let BenchmarkSettings { + player_id, + algorithm_id, + challenge_id, + difficulty, + .. + } = settings; + + if curr_frontier_index != frontier_indexes[difficulty] + && *challenge_data.num_qualifiers() > config.qualifiers.total_qualifiers_threshold + { + break; + } + let difficulty_parameters = &config.difficulty.parameters[challenge_id]; + let min_difficulty = difficulty_parameters.min_difficulty(); + let max_difficulty = difficulty_parameters.max_difficulty(); + if (0..difficulty.len()) + .into_iter() + .any(|i| difficulty[i] < min_difficulty[i] || difficulty[i] > max_difficulty[i]) + { + continue; + } + curr_frontier_index = frontier_indexes[difficulty]; + let player_data = cache + .active_players + .get_mut(player_id) + .unwrap() + .block_data + .as_mut() + .unwrap(); + let algorithm_data = cache + .active_algorithms + .get_mut(algorithm_id) + .unwrap() + .block_data + .as_mut() + .unwrap(); + + let max_qualifiers = max_qualifiers_by_player.get(player_id).unwrap().clone(); + let num_qualifiers = num_solutions.min(max_qualifiers); + max_qualifiers_by_player.insert(player_id.clone(), max_qualifiers - num_qualifiers); + + if num_qualifiers > 0 { + *player_data + .num_qualifiers_by_challenge + .as_mut() + .unwrap() + .entry(challenge_id.clone()) + .or_default() += num_qualifiers; + *algorithm_data + .num_qualifiers_by_player + .as_mut() + .unwrap() + .entry(player_id.clone()) + .or_default() += num_qualifiers; + *challenge_data.num_qualifiers.as_mut().unwrap() += num_qualifiers; + } + challenge_data + .qualifier_difficulties + .as_mut() + .unwrap() + .insert(difficulty.clone()); + } + } +} + +#[time] +async fn update_frontiers(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + + for challenge in cache.active_challenges.values_mut() { + let block_data = challenge.block_data.as_mut().unwrap(); + + let difficulty_parameters = &config.difficulty.parameters[&challenge.id]; + let min_difficulty = difficulty_parameters.min_difficulty(); + let max_difficulty = difficulty_parameters.max_difficulty(); + + let points = block_data + .qualifier_difficulties() + .iter() + .map(|d| d.iter().map(|x| -x).collect()) // mirror the points so easiest difficulties are first + .collect::(); + let (base_frontier, scaling_factor, scaled_frontier) = if points.len() == 0 { + let base_frontier: Frontier = vec![min_difficulty.clone()].into_iter().collect(); + let scaling_factor = 0.0; + let scaled_frontier = base_frontier.clone(); + (base_frontier, scaling_factor, scaled_frontier) + } else { + let base_frontier = pareto_algorithm(points, true) + .pop() + .unwrap() + .into_iter() + .map(|d| d.into_iter().map(|x| -x).collect()) + .collect::() // mirror the points back; + .extend(&min_difficulty, &max_difficulty); + let scaling_factor = (*block_data.num_qualifiers() as f64 + / config.qualifiers.total_qualifiers_threshold as f64) + .min(config.difficulty.max_scaling_factor); + let scaled_frontier = base_frontier + .scale(&min_difficulty, &max_difficulty, scaling_factor) + .extend(&min_difficulty, &max_difficulty); + (base_frontier, scaling_factor, scaled_frontier) + }; + + block_data.base_frontier = Some(base_frontier); + block_data.scaled_frontier = Some(scaled_frontier); + block_data.scaling_factor = Some(scaling_factor); + } +} + +#[time] +async fn update_influence(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + let active_player_ids = &block.data().active_player_ids; + if active_player_ids.len() == 0 { + return; + } + + let mut num_qualifiers_by_challenge = HashMap::::new(); + for challenge in cache.active_challenges.values() { + num_qualifiers_by_challenge.insert( + challenge.id.clone(), + *challenge.block_data().num_qualifiers(), + ); + } + + let total_deposit = cache + .active_players + .values() + .map(|p| p.block_data().deposit.clone().unwrap()) + .sum::(); + + let zero = PreciseNumber::from(0); + let one = PreciseNumber::from(1); + let imbalance_multiplier = + PreciseNumber::from_f64(config.optimisable_proof_of_work.imbalance_multiplier); + let num_challenges = PreciseNumber::from(cache.active_challenges.len()); + + let mut weights = Vec::::new(); + for player_id in active_player_ids.iter() { + let data = cache + .active_players + .get_mut(player_id) + .unwrap() + .block_data + .as_mut() + .unwrap(); + + let mut percent_qualifiers = Vec::::new(); + for challenge_id in cache.active_challenges.keys() { + let num_qualifiers = num_qualifiers_by_challenge[challenge_id]; + let num_qualifiers_by_player = *data + .num_qualifiers_by_challenge() + .get(challenge_id) + .unwrap_or(&0); + + percent_qualifiers.push(if num_qualifiers_by_player == 0 { + PreciseNumber::from(0) + } else { + PreciseNumber::from(num_qualifiers_by_player) / PreciseNumber::from(num_qualifiers) + }); + } + let OptimisableProofOfWorkConfig { + avg_percent_qualifiers_multiplier, + enable_proof_of_deposit, + .. + } = &config.optimisable_proof_of_work; + if enable_proof_of_deposit.is_some_and(|x| x) { + let max_percent_rolling_deposit = + PreciseNumber::from_f64(avg_percent_qualifiers_multiplier.clone().unwrap()) + * percent_qualifiers.arithmetic_mean(); + let percent_rolling_deposit = if total_deposit == zero { + zero.clone() + } else { + data.deposit.clone().unwrap() / total_deposit + }; + let qualifying_percent_rolling_deposit = + if percent_rolling_deposit > max_percent_rolling_deposit { + max_percent_rolling_deposit.clone() + } else { + percent_rolling_deposit + }; + percent_qualifiers.push(qualifying_percent_rolling_deposit.clone()); + data.qualifying_percent_rolling_deposit = Some(qualifying_percent_rolling_deposit); + } + + let mean = percent_qualifiers.arithmetic_mean(); + let variance = percent_qualifiers.variance(); + let cv_sqr = if mean == zero { + zero.clone() + } else { + variance / (mean * mean) + }; + + let imbalance = cv_sqr / (num_challenges - one); + let imbalance_penalty = + one - PreciseNumber::approx_inv_exp(imbalance_multiplier * imbalance); + + weights.push(mean * (one - imbalance_penalty)); + + data.imbalance = Some(imbalance); + data.imbalance_penalty = Some(imbalance_penalty); + } + + let influences = weights.normalise(); + for (player_id, &influence) in active_player_ids.iter().zip(influences.iter()) { + let data = cache + .active_players + .get_mut(player_id) + .unwrap() + .block_data + .as_mut() + .unwrap(); + data.influence = Some(influence); + } +} diff --git a/tig-protocol/src/contracts/players.rs b/tig-protocol/src/contracts/players.rs new file mode 100644 index 0000000..b191cb0 --- /dev/null +++ b/tig-protocol/src/contracts/players.rs @@ -0,0 +1,168 @@ +use crate::{context::*, error::*}; +use logging_timer::time; +use std::collections::HashSet; +use tig_structs::core::*; +use tig_utils::*; + +#[time] +pub(crate) async fn submit_topup( + ctx: &T, + player_id: String, + tx_hash: String, + event_log_idx: u32, + amount: PreciseNumber, + verify_event_log: bool, +) -> ProtocolResult<()> { + if verify_event_log { + let block = ctx + .get_block(BlockFilter::LastConfirmed, false) + .await + .unwrap_or_else(|e| panic!("get_block error: {:?}", e)) + .expect("No latest block found"); + + if ctx + .get_topups(TopUpsFilter::Id(tx_hash.clone())) + .await + .unwrap_or_else(|e| panic!("get_topups error: {:?}", e)) + .first() + .is_some() + { + return Err(ProtocolError::DuplicateTransaction { + tx_hash: tx_hash.clone(), + }); + } + + let transaction = + ctx.get_transaction(&tx_hash) + .await + .map_err(|_| ProtocolError::InvalidTransaction { + tx_hash: tx_hash.clone(), + })?; + if player.id != transaction.sender { + return Err(ProtocolError::InvalidTransactionSender { + tx_hash: tx_hash.clone(), + expected_sender: player.id.clone(), + actual_sender: transaction.sender.clone(), + }); + } + let burn_address = block.config().erc20.burn_address.clone(); + if transaction.receiver != burn_address { + return Err(ProtocolError::InvalidTransactionReceiver { + tx_hash: tx_hash.clone(), + expected_receiver: burn_address, + actual_receiver: transaction.receiver.clone(), + }); + } + + let expected_amount = block.config().precommit_submissions().topup_amount.clone(); + if transaction.amount != expected_amount { + return Err(ProtocolError::InvalidTransactionAmount { + tx_hash: tx_hash.clone(), + expected_amount: jsonify(&expected_amount), + actual_amount: jsonify(&transaction.amount), + }); + } + }; + ctx.confirm_topup( + &tx_hash, + TopUpDetails { + player_id: player.id.clone(), + amount: topup_amount, + }, + ) + .await; + Ok(()) +} + +#[time] +pub(crate) async fn submit_deposit( + ctx: &T, + player_id: String, + tx_hash: String, + log_idx: u32, + amount: PreciseNumber, + start_timestamp: u64, + end_timestamp: u64, + verify_event_log: bool, +) -> ProtocolResult<()> { + if !skip_verification {}; + ctx.confirm_deposit( + &tx_hash, + TopUpDetails { + player_id: player.id.clone(), + amount: topup_amount, + }, + ) + .await; + Ok(()) +} + +#[time] +pub(crate) async fn submit_vote( + ctx: &T, + player_id: String, + breakthrough_id: String, + yes_vote: bool, +) -> ProtocolResult<()> { + let lastest_block_id = ctx.get_block_id(BlockFilter::LastConfirmed).await.unwrap(); + let breakthrough = ctx.get_breakthrough_state(&breakthrough_id).await.unwrap(); + // check breakthrough exists + // check breakthrough is voteable + // check player hasnt already voted + // check player has deposit + + let player_data = ctx + .get_player_block_data(&player_id, &lastest_block_id) + .await + .unwrap(); + + // confirm vote + Ok(()) +} + +#[time] +pub(crate) async fn submit_delegate( + ctx: &T, + player_id: String, + delegatee: String, +) -> ProtocolResult<()> { + // check any player_block_data.deposit_by_rounds is non-zero + // check block_confirmed of last delegate + period_between_redelegate < curr_block.height + // update player_state.delegatee + // confirm delegate + Ok(()) +} + +// update_deposits + +#[time] +async fn update_deposits(ctx: &T, block: &Block, cache: &mut AddBlockCache) { + let decay = match &block + .config() + .optimisable_proof_of_work + .rolling_deposit_decay + { + Some(decay) => PreciseNumber::from_f64(*decay), + None => return, // Proof of deposit not implemented for these blocks + }; + let eth_block_num = block.details.eth_block_num(); + let zero = PreciseNumber::from(0); + let one = PreciseNumber::from(1); + for player in cache.active_players.values_mut() { + let rolling_deposit = match &cache.prev_players.get(&player.id).unwrap().block_data { + Some(data) => data.rolling_deposit.clone(), + None => None, + } + .unwrap_or_else(|| zero.clone()); + + let data = player.block_data.as_mut().unwrap(); + let deposit = ctx + .get_player_deposit(eth_block_num, &player.id) + .await + .unwrap() + .unwrap_or_else(|| zero.clone()); + data.rolling_deposit = Some(decay * rolling_deposit + (one - decay) * deposit); + data.deposit = Some(deposit); + data.qualifying_percent_rolling_deposit = Some(zero.clone()); + } +} diff --git a/tig-protocol/src/contracts/rewards.rs b/tig-protocol/src/contracts/rewards.rs new file mode 100644 index 0000000..5ca3831 --- /dev/null +++ b/tig-protocol/src/contracts/rewards.rs @@ -0,0 +1,64 @@ +#[time] +async fn update_innovator_rewards(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + + let adoption_threshold = + PreciseNumber::from_f64(config.algorithm_submissions.adoption_threshold); + let zero = PreciseNumber::from(0); + let mut eligible_algorithms_by_challenge = HashMap::>::new(); + for algorithm in cache.active_algorithms.values_mut() { + let is_merged = algorithm.state().round_merged.is_some(); + let is_banned = algorithm.state().banned.clone(); + let data = algorithm.block_data.as_mut().unwrap(); + data.reward = Some(zero.clone()); + + if !is_banned + && (*data.adoption() >= adoption_threshold || (is_merged && *data.adoption() > zero)) + { + eligible_algorithms_by_challenge + .entry(algorithm.details.challenge_id.clone()) + .or_default() + .push(algorithm); + } + } + if eligible_algorithms_by_challenge.len() == 0 { + return; + } + + let reward_pool_per_challenge = PreciseNumber::from_f64(get_block_reward(block)) + * PreciseNumber::from_f64(config.rewards.distribution.optimisations) + / PreciseNumber::from(eligible_algorithms_by_challenge.len()); + + let zero = PreciseNumber::from(0); + for algorithms in eligible_algorithms_by_challenge.values_mut() { + let mut total_adoption = zero.clone(); + for algorithm in algorithms.iter() { + total_adoption = total_adoption + algorithm.block_data().adoption(); + } + + for algorithm in algorithms.iter_mut() { + let data = algorithm.block_data.as_mut().unwrap(); + let adoption = *data.adoption(); + data.reward = Some(reward_pool_per_challenge * adoption / total_adoption); + } + } +} + +#[time] +async fn update_benchmarker_rewards(block: &Block, cache: &mut AddBlockCache) { + let config = block.config(); + + let reward_pool = PreciseNumber::from_f64(get_block_reward(block)) + * PreciseNumber::from_f64(config.rewards.distribution.benchmarkers); + + for player in cache.active_players.values_mut() { + let data = player.block_data.as_mut().unwrap(); + let influence = *data.influence(); + data.reward = Some(influence * reward_pool); + } +} + +/* +delegator rewards +breakthrough rewards +*/ diff --git a/tig-protocol/src/lib.rs b/tig-protocol/src/lib.rs index b20f9bb..e8d0cb2 100644 --- a/tig-protocol/src/lib.rs +++ b/tig-protocol/src/lib.rs @@ -1,12 +1,8 @@ mod add_block; pub mod context; +mod contracts; mod error; -mod submit_algorithm; -mod submit_benchmark; -mod submit_precommit; -mod submit_proof; -mod submit_topup; -mod verify_proof; +mod protocol; use context::*; pub use error::*; use std::collections::HashSet; @@ -27,7 +23,7 @@ impl<'a, T: Context> Protocol { details: AlgorithmDetails, code: String, ) -> ProtocolResult { - submit_algorithm::execute(&self.ctx, player, details, code).await + algorithms::execute(&self.ctx, player, details, code).await } pub async fn submit_precommit( @@ -46,7 +42,7 @@ impl<'a, T: Context> Protocol { merkle_root: MerkleHash, solution_nonces: HashSet, ) -> ProtocolResult<()> { - submit_benchmark::execute( + benchmarks::execute( &self.ctx, player, benchmark_id, @@ -66,7 +62,7 @@ impl<'a, T: Context> Protocol { } pub async fn submit_topup(&self, player: &Player, tx_hash: String) -> ProtocolResult<()> { - submit_topup::execute(&self.ctx, player, tx_hash).await + players::execute(&self.ctx, player, tx_hash).await } pub async fn verify_proof(&self, benchmark_id: &String) -> ProtocolResult> { diff --git a/tig-protocol/src/protocol.rs b/tig-protocol/src/protocol.rs new file mode 100644 index 0000000..cb03b52 --- /dev/null +++ b/tig-protocol/src/protocol.rs @@ -0,0 +1,24 @@ +use super::contracts::*; + +async fn add_block() { + // clone of player_state; internally sets round_merged, etc + let cache = ctx.build_block_cache().await; + + // filter active benchmarks + benchmarks::update + + // deposit calcs + players.update + + // calc influence + opow.update + + // calc adoption + algorithms.update + + // calc fees, solution signature + challenges.update + + // calc rewards + rewards.update +} diff --git a/tig-protocol/src/submit_algorithm.rs b/tig-protocol/src/submit_algorithm.rs deleted file mode 100644 index 3276ce2..0000000 --- a/tig-protocol/src/submit_algorithm.rs +++ /dev/null @@ -1,110 +0,0 @@ -use crate::{context::*, error::*}; -use logging_timer::time; -use std::collections::HashSet; -use tig_structs::core::*; -use tig_utils::*; - -#[time] -pub(crate) async fn execute( - ctx: &T, - player: &Player, - details: AlgorithmDetails, - code: String, -) -> ProtocolResult { - verify_challenge_exists(ctx, &details).await?; - verify_submission_fee(ctx, player, &details).await?; - let algorithm_id = ctx - .add_algorithm_to_mempool(details, code) - .await - .unwrap_or_else(|e| panic!("add_algorithm_to_mempool error: {:?}", e)); - Ok(algorithm_id) -} - -#[time] -async fn verify_challenge_exists( - ctx: &T, - details: &AlgorithmDetails, -) -> ProtocolResult<()> { - let latest_block = ctx - .get_block(BlockFilter::Latest, false) - .await - .unwrap_or_else(|e| panic!("get_block error: {:?}", e)) - .expect("Expecting latest block to exist"); - if !ctx - .get_challenges(ChallengesFilter::Id(details.challenge_id.clone()), None) - .await - .unwrap_or_else(|e| panic!("get_challenges error: {:?}", e)) - .first() - .is_some_and(|c| { - c.state() - .round_active - .as_ref() - .is_some_and(|r| *r <= latest_block.details.round) - }) - { - return Err(ProtocolError::InvalidChallenge { - challenge_id: details.challenge_id.clone(), - }); - } - Ok(()) -} - -#[time] -async fn verify_submission_fee( - ctx: &T, - player: &Player, - details: &AlgorithmDetails, -) -> ProtocolResult<()> { - let block = ctx - .get_block(BlockFilter::Latest, false) - .await - .unwrap_or_else(|e| panic!("get_block error: {:?}", e)) - .expect("No latest block found"); - - if ctx - .get_algorithms( - AlgorithmsFilter::TxHash(details.tx_hash.clone()), - None, - false, - ) - .await - .unwrap_or_else(|e| panic!("get_algorithms error: {:?}", e)) - .first() - .is_some() - { - return Err(ProtocolError::DuplicateTransaction { - tx_hash: details.tx_hash.clone(), - }); - } - - let transaction = ctx.get_transaction(&details.tx_hash).await.map_err(|_| { - ProtocolError::InvalidTransaction { - tx_hash: details.tx_hash.clone(), - } - })?; - if player.id != transaction.sender { - return Err(ProtocolError::InvalidTransactionSender { - tx_hash: details.tx_hash.clone(), - expected_sender: player.id.clone(), - actual_sender: transaction.sender.clone(), - }); - } - let burn_address = block.config().erc20.burn_address.clone(); - if transaction.receiver != burn_address { - return Err(ProtocolError::InvalidTransactionReceiver { - tx_hash: details.tx_hash.clone(), - expected_receiver: burn_address, - actual_receiver: transaction.receiver.clone(), - }); - } - - let expected_amount = block.config().algorithm_submissions.submission_fee; - if transaction.amount != expected_amount { - return Err(ProtocolError::InvalidTransactionAmount { - tx_hash: details.tx_hash.clone(), - expected_amount: jsonify(&expected_amount), - actual_amount: jsonify(&transaction.amount), - }); - } - Ok(()) -} diff --git a/tig-protocol/src/submit_benchmark.rs b/tig-protocol/src/submit_benchmark.rs deleted file mode 100644 index 730084a..0000000 --- a/tig-protocol/src/submit_benchmark.rs +++ /dev/null @@ -1,85 +0,0 @@ -use crate::{context::*, error::*}; -use logging_timer::time; -use std::collections::HashSet; -use tig_structs::core::*; - -#[time] -pub(crate) async fn execute( - ctx: &T, - player: &Player, - benchmark_id: &String, - merkle_root: MerkleHash, - solution_nonces: HashSet, -) -> ProtocolResult<()> { - verify_benchmark_not_already_submitted(ctx, benchmark_id).await?; - let precommit = get_precommit_by_id(ctx, benchmark_id).await?; - verify_benchmark_ownership(player, &precommit.settings)?; - verify_nonces(&precommit, &solution_nonces)?; - ctx.add_benchmark_to_mempool( - benchmark_id, - BenchmarkDetails { - num_solutions: solution_nonces.len() as u32, - merkle_root: Some(merkle_root), - }, - solution_nonces, - ) - .await - .unwrap_or_else(|e| panic!("add_benchmark_to_mempool error: {:?}", e)); - Ok(()) -} - -#[time] -async fn verify_benchmark_not_already_submitted( - ctx: &T, - benchmark_id: &String, -) -> ProtocolResult<()> { - if ctx - .get_benchmarks(BenchmarksFilter::Id(benchmark_id.clone()), false) - .await - .unwrap_or_else(|e| panic!("get_benchmarks error: {:?}", e)) - .first() - .is_some() - { - return Err(ProtocolError::DuplicateBenchmark { - benchmark_id: benchmark_id.to_string(), - }); - } - Ok(()) -} - -#[time] -async fn get_precommit_by_id( - ctx: &T, - benchmark_id: &String, -) -> ProtocolResult { - ctx.get_precommits(PrecommitsFilter::BenchmarkId(benchmark_id.clone())) - .await - .unwrap_or_else(|e| panic!("get_precommits error: {:?}", e)) - .pop() - .filter(|p| p.state.is_some()) - .ok_or_else(|| ProtocolError::InvalidPrecommit { - benchmark_id: benchmark_id.clone(), - }) -} - -#[time] -fn verify_benchmark_ownership(player: &Player, settings: &BenchmarkSettings) -> ProtocolResult<()> { - if player.id != settings.player_id { - return Err(ProtocolError::InvalidSubmittingPlayer { - actual_player_id: player.id.clone(), - expected_player_id: settings.player_id.clone(), - }); - } - Ok(()) -} - -#[time] -fn verify_nonces(precommit: &Precommit, solution_nonces: &HashSet) -> ProtocolResult<()> { - let num_nonces = *precommit.details.num_nonces.as_ref().unwrap() as u64; - for n in solution_nonces.iter() { - if *n >= num_nonces { - return Err(ProtocolError::InvalidBenchmarkNonce { nonce: *n }); - } - } - Ok(()) -} diff --git a/tig-protocol/src/submit_precommit.rs b/tig-protocol/src/submit_precommit.rs deleted file mode 100644 index f7ec497..0000000 --- a/tig-protocol/src/submit_precommit.rs +++ /dev/null @@ -1,231 +0,0 @@ -use crate::{context::*, error::*}; -use logging_timer::time; -use tig_structs::core::*; -use tig_utils::*; - -#[time] -pub(crate) async fn execute( - ctx: &T, - player: &Player, - settings: BenchmarkSettings, - num_nonces: u32, -) -> ProtocolResult { - verify_player_owns_benchmark(player, &settings)?; - verify_num_nonces(num_nonces)?; - let block = get_block_by_id(ctx, &settings.block_id).await?; - verify_sufficient_lifespan(ctx, &block).await?; - let challenge = get_challenge_by_id(ctx, &settings.challenge_id, &block).await?; - verify_algorithm(ctx, &settings.algorithm_id, &block).await?; - verify_benchmark_settings_are_unique(ctx, &settings).await?; - verify_benchmark_difficulty(&settings.difficulty, &challenge, &block)?; - let fee_paid = get_fee_paid(&player, num_nonces, &challenge)?; - let benchmark_id = ctx - .add_precommit_to_mempool( - settings, - PrecommitDetails { - block_started: block.details.height, - num_nonces: Some(num_nonces), - fee_paid: Some(fee_paid), - }, - ) - .await - .unwrap_or_else(|e| panic!("add_precommit_to_mempool error: {:?}", e)); - Ok(benchmark_id) -} - -#[time] -fn verify_player_owns_benchmark( - player: &Player, - settings: &BenchmarkSettings, -) -> ProtocolResult<()> { - if player.id != settings.player_id { - return Err(ProtocolError::InvalidSubmittingPlayer { - actual_player_id: player.id.clone(), - expected_player_id: settings.player_id.clone(), - }); - } - Ok(()) -} - -#[time] -fn verify_num_nonces(num_nonces: u32) -> ProtocolResult<()> { - if num_nonces == 0 { - return Err(ProtocolError::InvalidNumNonces { num_nonces }); - } - Ok(()) -} - -#[time] -async fn verify_sufficient_lifespan(ctx: &T, block: &Block) -> ProtocolResult<()> { - let latest_block = ctx - .get_block(BlockFilter::Latest, false) - .await - .unwrap_or_else(|e| panic!("get_block error: {:?}", e)) - .expect("Expecting latest block to exist"); - let config = block.config(); - let submission_delay = latest_block.details.height - block.details.height + 1; - if (submission_delay as f64 * (config.benchmark_submissions.submission_delay_multiplier + 1.0)) - as u32 - >= config.benchmark_submissions.lifespan_period - { - return Err(ProtocolError::InsufficientLifespan); - } - Ok(()) -} - -#[time] -async fn get_challenge_by_id( - ctx: &T, - challenge_id: &String, - block: &Block, -) -> ProtocolResult { - if !block.data().active_challenge_ids.contains(challenge_id) { - return Err(ProtocolError::InvalidChallenge { - challenge_id: challenge_id.clone(), - }); - } - let challenge = ctx - .get_challenges( - ChallengesFilter::Id(challenge_id.clone()), - Some(BlockFilter::Id(block.id.clone())), - ) - .await - .unwrap_or_else(|e| panic!("get_challenges error: {:?}", e)) - .first() - .map(|x| x.to_owned()) - .ok_or_else(|| ProtocolError::InvalidChallenge { - challenge_id: challenge_id.clone(), - })?; - Ok(challenge) -} - -#[time] -async fn verify_algorithm( - ctx: &T, - algorithm_id: &String, - block: &Block, -) -> ProtocolResult<()> { - if !ctx - .get_algorithms(AlgorithmsFilter::Id(algorithm_id.clone()), None, false) - .await - .unwrap_or_else(|e| panic!("get_algorithms error: {:?}", e)) - .pop() - .is_some_and(|a| a.state.is_some_and(|s| !s.banned)) - { - return Err(ProtocolError::InvalidAlgorithm { - algorithm_id: algorithm_id.clone(), - }); - } - if !block.data().active_algorithm_ids.contains(algorithm_id) { - return Err(ProtocolError::InvalidAlgorithm { - algorithm_id: algorithm_id.clone(), - }); - } - Ok(()) -} - -#[time] -async fn get_block_by_id(ctx: &T, block_id: &String) -> ProtocolResult { - ctx.get_block(BlockFilter::Id(block_id.clone()), true) - .await - .unwrap_or_else(|e| panic!("get_block error: {:?}", e)) - .ok_or_else(|| ProtocolError::InvalidBlock { - block_id: block_id.clone(), - }) -} - -#[time] -async fn verify_benchmark_settings_are_unique( - ctx: &T, - settings: &BenchmarkSettings, -) -> ProtocolResult<()> { - if ctx - .get_precommits(PrecommitsFilter::Settings(settings.clone())) - .await - .unwrap_or_else(|e| panic!("get_precommits error: {:?}", e)) - .first() - .is_some() - { - return Err(ProtocolError::DuplicateBenchmarkSettings { - settings: settings.clone(), - }); - } - - Ok(()) -} - -#[time] -fn verify_benchmark_difficulty( - difficulty: &Vec, - challenge: &Challenge, - block: &Block, -) -> ProtocolResult<()> { - let config = block.config(); - let difficulty_parameters = &config.difficulty.parameters[&challenge.id]; - - if difficulty.len() != difficulty_parameters.len() - || difficulty - .iter() - .zip(difficulty_parameters.iter()) - .any(|(d, p)| *d < p.min_value || *d > p.max_value) - { - return Err(ProtocolError::InvalidDifficulty { - difficulty: difficulty.clone(), - difficulty_parameters: difficulty_parameters.clone(), - }); - } - - let challenge_data = challenge.block_data(); - let (lower_frontier, upper_frontier) = if *challenge_data.scaling_factor() > 1f64 { - ( - challenge_data.base_frontier(), - challenge_data.scaled_frontier(), - ) - } else { - ( - challenge_data.scaled_frontier(), - challenge_data.base_frontier(), - ) - }; - match difficulty.within(lower_frontier, upper_frontier) { - PointCompareFrontiers::Above => { - return Err(ProtocolError::DifficultyAboveHardestFrontier { - difficulty: difficulty.clone(), - }); - } - PointCompareFrontiers::Below => { - return Err(ProtocolError::DifficultyBelowEasiestFrontier { - difficulty: difficulty.clone(), - }); - } - PointCompareFrontiers::Within => {} - } - - Ok(()) -} - -#[time] -fn get_fee_paid( - player: &Player, - num_nonces: u32, - challenge: &Challenge, -) -> ProtocolResult { - let num_nonces = PreciseNumber::from(num_nonces); - let fee_paid = challenge.block_data().base_fee().clone() - + challenge.block_data().per_nonce_fee().clone() * num_nonces; - if !player - .state - .as_ref() - .is_some_and(|s| *s.available_fee_balance.as_ref().unwrap() >= fee_paid) - { - return Err(ProtocolError::InsufficientFeeBalance { - fee_paid, - available_fee_balance: player - .state - .as_ref() - .map(|s| s.available_fee_balance().clone()) - .unwrap_or(PreciseNumber::from(0)), - }); - } - Ok(fee_paid) -} diff --git a/tig-protocol/src/submit_proof.rs b/tig-protocol/src/submit_proof.rs deleted file mode 100644 index d4750ce..0000000 --- a/tig-protocol/src/submit_proof.rs +++ /dev/null @@ -1,167 +0,0 @@ -use crate::{context::*, error::*}; -use logging_timer::time; -use std::collections::HashSet; -use tig_structs::core::*; -use tig_utils::MerkleHash; - -#[time] -pub(crate) async fn execute( - ctx: &T, - player: &Player, - benchmark_id: &String, - merkle_proofs: Vec, -) -> ProtocolResult> { - verify_proof_not_already_submitted(ctx, benchmark_id).await?; - let precommit = get_precommit_by_id(ctx, benchmark_id).await?; - verify_benchmark_ownership(player, &precommit.settings)?; - let benchmark = get_benchmark_by_id(ctx, benchmark_id).await?; - verify_sampled_nonces(&benchmark, &merkle_proofs)?; - let mut verification_result = verify_merkle_proofs(&precommit, &benchmark, &merkle_proofs); - if verification_result.is_ok() { - verification_result = verify_solutions_are_valid(ctx, &precommit, &merkle_proofs).await; - }; - ctx.add_proof_to_mempool(benchmark_id, merkle_proofs) - .await - .unwrap_or_else(|e| panic!("add_proof_to_mempool error: {:?}", e)); - if let Err(e) = verification_result { - ctx.add_fraud_to_mempool(benchmark_id, e.to_string()) - .await - .unwrap_or_else(|e| panic!("add_fraud_to_mempool error: {:?}", e)); - return Ok(Err(e.to_string())); - } - Ok(Ok(())) -} - -#[time] -async fn get_precommit_by_id( - ctx: &T, - benchmark_id: &String, -) -> ProtocolResult { - ctx.get_precommits(PrecommitsFilter::BenchmarkId(benchmark_id.clone())) - .await - .unwrap_or_else(|e| panic!("get_precommits error: {:?}", e)) - .pop() - .filter(|p| p.state.is_some()) - .ok_or_else(|| ProtocolError::InvalidPrecommit { - benchmark_id: benchmark_id.clone(), - }) -} - -#[time] -async fn get_benchmark_by_id( - ctx: &T, - benchmark_id: &String, -) -> ProtocolResult { - ctx.get_benchmarks(BenchmarksFilter::Id(benchmark_id.clone()), true) - .await - .unwrap_or_else(|e| panic!("add_benchmark_to_mempool error: {:?}", e)) - .pop() - .filter(|b| b.state.is_some()) - .ok_or_else(|| ProtocolError::InvalidBenchmark { - benchmark_id: benchmark_id.to_string(), - }) -} - -#[time] -async fn verify_proof_not_already_submitted( - ctx: &T, - benchmark_id: &String, -) -> ProtocolResult<()> { - if ctx - .get_proofs(ProofsFilter::BenchmarkId(benchmark_id.clone()), false) - .await - .unwrap_or_else(|e| panic!("get_proofs error: {:?}", e)) - .first() - .is_some() - { - return Err(ProtocolError::DuplicateProof { - benchmark_id: benchmark_id.to_string(), - }); - } - Ok(()) -} - -#[time] -fn verify_benchmark_ownership(player: &Player, settings: &BenchmarkSettings) -> ProtocolResult<()> { - let expected_player_id = settings.player_id.clone(); - if player.id != expected_player_id { - return Err(ProtocolError::InvalidSubmittingPlayer { - actual_player_id: player.id.to_string(), - expected_player_id, - }); - } - Ok(()) -} - -#[time] -fn verify_merkle_proofs( - precommit: &Precommit, - benchmark: &Benchmark, - merkle_proofs: &Vec, -) -> ProtocolResult<()> { - let max_branch_len = - (64 - (*precommit.details.num_nonces.as_ref().unwrap() - 1).leading_zeros()) as usize; - let expected_merkle_root = benchmark.details.merkle_root.clone().unwrap(); - for merkle_proof in merkle_proofs.iter() { - let branch = merkle_proof.branch.as_ref().unwrap(); - if branch.0.len() > max_branch_len - || branch.0.iter().any(|(d, _)| *d as usize > max_branch_len) - { - return Err(ProtocolError::InvalidMerkleProof { - nonce: merkle_proof.leaf.nonce.clone(), - }); - } - let output_meta_data = OutputMetaData::from(merkle_proof.leaf.clone()); - let hash = MerkleHash::from(output_meta_data); - let result = merkle_proof - .branch - .as_ref() - .unwrap() - .calc_merkle_root(&hash, merkle_proof.leaf.nonce as usize); - if !result.is_ok_and(|actual_merkle_root| actual_merkle_root == expected_merkle_root) { - return Err(ProtocolError::InvalidMerkleProof { - nonce: merkle_proof.leaf.nonce.clone(), - }); - } - } - Ok(()) -} - -#[time] -fn verify_sampled_nonces( - benchmark: &Benchmark, - merkle_proofs: &Vec, -) -> ProtocolResult<()> { - let sampled_nonces = benchmark.state().sampled_nonces().clone(); - let proof_nonces: HashSet = merkle_proofs.iter().map(|p| p.leaf.nonce).collect(); - - if sampled_nonces != proof_nonces || sampled_nonces.len() != merkle_proofs.len() { - return Err(ProtocolError::InvalidProofNonces { - submitted_nonces: merkle_proofs.iter().map(|p| p.leaf.nonce).collect(), - expected_nonces: sampled_nonces.into_iter().collect(), - }); - } - Ok(()) -} - -#[time] -async fn verify_solutions_are_valid( - ctx: &T, - precommit: &Precommit, - merkle_proofs: &Vec, -) -> ProtocolResult<()> { - for p in merkle_proofs.iter() { - if ctx - .verify_solution(&precommit.settings, p.leaf.nonce, &p.leaf.solution) - .await - .unwrap_or_else(|e| panic!("verify_solution error: {:?}", e)) - .is_err() - { - return Err(ProtocolError::InvalidSolution { - nonce: p.leaf.nonce, - }); - } - } - - Ok(()) -} diff --git a/tig-protocol/src/submit_topup.rs b/tig-protocol/src/submit_topup.rs deleted file mode 100644 index 7a94431..0000000 --- a/tig-protocol/src/submit_topup.rs +++ /dev/null @@ -1,81 +0,0 @@ -use crate::{context::*, error::*}; -use logging_timer::time; -use std::collections::HashSet; -use tig_structs::core::*; -use tig_utils::*; - -#[time] -pub(crate) async fn execute( - ctx: &T, - player: &Player, - tx_hash: String, -) -> ProtocolResult<()> { - let topup_amount = verify_topup_tx(ctx, player, &tx_hash).await?; - ctx.add_topup_to_mempool( - &tx_hash, - TopUpDetails { - player_id: player.id.clone(), - amount: topup_amount, - }, - ) - .await - .unwrap_or_else(|e| panic!("add_topup_to_mempool error: {:?}", e)); - Ok(()) -} - -#[time] -async fn verify_topup_tx( - ctx: &T, - player: &Player, - tx_hash: &String, -) -> ProtocolResult { - let block = ctx - .get_block(BlockFilter::Latest, false) - .await - .unwrap_or_else(|e| panic!("get_block error: {:?}", e)) - .expect("No latest block found"); - - if ctx - .get_topups(TopUpsFilter::Id(tx_hash.clone())) - .await - .unwrap_or_else(|e| panic!("get_topups error: {:?}", e)) - .first() - .is_some() - { - return Err(ProtocolError::DuplicateTransaction { - tx_hash: tx_hash.clone(), - }); - } - - let transaction = - ctx.get_transaction(&tx_hash) - .await - .map_err(|_| ProtocolError::InvalidTransaction { - tx_hash: tx_hash.clone(), - })?; - if player.id != transaction.sender { - return Err(ProtocolError::InvalidTransactionSender { - tx_hash: tx_hash.clone(), - expected_sender: player.id.clone(), - actual_sender: transaction.sender.clone(), - }); - } - let burn_address = block.config().erc20.burn_address.clone(); - if transaction.receiver != burn_address { - return Err(ProtocolError::InvalidTransactionReceiver { - tx_hash: tx_hash.clone(), - expected_receiver: burn_address, - actual_receiver: transaction.receiver.clone(), - }); - } - - let expected_amount = block.config().precommit_submissions().topup_amount.clone(); - if transaction.amount != expected_amount { - return Err(ProtocolError::InvalidTransactionAmount { - tx_hash: tx_hash.clone(), - expected_amount: jsonify(&expected_amount), - actual_amount: jsonify(&transaction.amount), - }); - } - Ok(expected_amount) -} diff --git a/tig-structs/src/config.rs b/tig-structs/src/config.rs index 1f2c895..ea267d7 100644 --- a/tig-structs/src/config.rs +++ b/tig-structs/src/config.rs @@ -6,17 +6,29 @@ use tig_utils::PreciseNumber; serializable_struct_with_getters! { ProtocolConfig { + algorithms: AlgorithmsConfig, + benchmarks: BenchmarksConfig, + breakthroughs: BreakthroughsConfig, + challenges: ChallengesConfig, + deposits: DepositsConfig, erc20: ERC20Config, - benchmark_submissions: BenchmarkSubmissionsConfig, - precommit_submissions: Option, - wasm_vm: WasmVMConfig, - solution_signature: SolutionSignatureConfig, - qualifiers: QualifiersConfig, - difficulty: DifficultyConfig, - optimisable_proof_of_work: OptimisableProofOfWorkConfig, + opow: OPoWConfig, rounds: RoundsConfig, - algorithm_submissions: AlgorithmSubmissionsConfig, rewards: RewardsConfig, + runtime: RuntimeConfig, + } +} + +serializable_struct_with_getters! { + BreakthroughsConfig { + academic_fund_address: String, + min_percent_yes_votes: f64, + vote_period_rounds: u32, + min_lock_period_to_vote: u32, + submission_fee: PreciseNumber, + adoption_threshold: f64, + merge_points_threshold: u32, + push_delay: u32, } } serializable_struct_with_getters! { @@ -24,46 +36,52 @@ serializable_struct_with_getters! { rpc_url: String, chain_id: String, token_address: String, - burn_address: String, } } serializable_struct_with_getters! { - BenchmarkSubmissionsConfig { + DepositsConfig { + lock_address: String, + min_lock_amount: PreciseNumber, + min_lock_period_secs: u64, + max_lock_period_rounds: u32, + lock_period_multiplier: f64, + max_reward_share: f64, + deposit_to_qualifier_ratio: f64, + period_between_redelegate: u32, + } +} +serializable_struct_with_getters! { + BenchmarksConfig { min_num_solutions: u32, submission_delay_multiplier: f64, max_samples: usize, - lifespan_period: u32, - } -} -serializable_struct_with_getters! { - PrecommitSubmissionsConfig { + max_active_period_blocks: u32, min_per_nonce_fee: PreciseNumber, min_base_fee: PreciseNumber, max_fee_percentage_delta: f64, target_num_precommits: u32, - topup_amount: PreciseNumber, } } serializable_struct_with_getters! { - WasmVMConfig { + TopUpsConfig { + topup_address: String, + min_topup_amount: PreciseNumber, + } +} +serializable_struct_with_getters! { + RuntimeConfig { max_memory: u64, max_fuel: u64, } } serializable_struct_with_getters! { - SolutionSignatureConfig { + ChallengesConfig { max_percent_delta: Option, threshold_decay: Option, equilibrium_rate_multiplier: f64, percent_error_multiplier: Option, - } -} -serializable_struct_with_getters! { - QualifiersConfig { - cutoff_phase_in_period: Option, - cutoff_multiplier: f64, - total_qualifiers_threshold: u32, - min_cutoff: Option, + max_scaling_factor: f64, + difficulty_parameters: HashMap>, } } serializable_struct_with_getters! { @@ -86,17 +104,14 @@ impl MinMaxDifficulty for Vec { } } serializable_struct_with_getters! { - DifficultyConfig { - max_scaling_factor: f64, - parameters: HashMap>, - } -} -serializable_struct_with_getters! { - OptimisableProofOfWorkConfig { + OPoWConfig { imbalance_multiplier: f64, - avg_percent_qualifiers_multiplier: Option, enable_proof_of_deposit: Option, - rolling_deposit_decay: Option, + cutoff_phase_in_period: Option, + cutoff_multiplier: f64, + total_qualifiers_threshold: u32, + min_cutoff: Option, + deposit_to_cutoff_cap_ratio: f64, } } serializable_struct_with_getters! { @@ -105,7 +120,7 @@ serializable_struct_with_getters! { } } serializable_struct_with_getters! { - AlgorithmSubmissionsConfig { + AlgorithmsConfig { submission_fee: PreciseNumber, adoption_threshold: f64, merge_points_threshold: u32, diff --git a/tig-structs/src/core.rs b/tig-structs/src/core.rs index da8ec82..bbd694a 100644 --- a/tig-structs/src/core.rs +++ b/tig-structs/src/core.rs @@ -9,19 +9,26 @@ serializable_struct_with_getters! { Algorithm { id: String, details: AlgorithmDetails, - state: Option, + state: AlgorithmState, block_data: Option, - code: Option, + round_earnings: PreciseNumber, } } serializable_struct_with_getters! { Benchmark { id: String, details: BenchmarkDetails, - state: Option, + state: BenchmarkState, solution_nonces: Option>, } } +serializable_struct_with_getters! { + Binary { + algorithm_id: String, + details: BinaryDetails, + state: BinaryState, + } +} serializable_struct_with_getters! { Block { id: String, @@ -30,20 +37,57 @@ serializable_struct_with_getters! { config: Option, } } +serializable_struct_with_getters! { + Breakthrough { + id: String, + details: BreakthroughDetails, + state: BreakthroughState, + block_data: Option, + } +} serializable_struct_with_getters! { Challenge { id: String, details: ChallengeDetails, - state: Option, + state: ChallengeState, block_data: Option, } } +serializable_struct_with_getters! { + Delegate { + id: String, + details: DelegateDetails, + state: DelegateState, + } +} +serializable_struct_with_getters! { + Deposit { + id: String, + details: DepositDetails, + state: DepositState, + } +} +serializable_struct_with_getters! { + Fraud { + benchmark_id: String, + state: FraudState, + allegation: Option, + } +} +serializable_struct_with_getters! { + OPoW { + player_id: String, + block_data: Option, + round_earnings: PreciseNumber, + } +} serializable_struct_with_getters! { Player { id: String, details: PlayerDetails, - state: Option, + state: PlayerState, block_data: Option, + round_earnings_by_type: HashMap, } } serializable_struct_with_getters! { @@ -51,69 +95,78 @@ serializable_struct_with_getters! { benchmark_id: String, details: PrecommitDetails, settings: BenchmarkSettings, - state: Option, + state: PrecommitState, } } serializable_struct_with_getters! { MerkleProof { leaf: OutputData, - branch: Option, + branch: MerkleBranch, } } serializable_struct_with_getters! { Proof { benchmark_id: String, - state: Option, + details: ProofDetails, + state: ProofState, merkle_proofs: Option>, } } serializable_struct_with_getters! { - Fraud { - benchmark_id: String, - state: Option, - allegation: Option, + RewardShare { + id: String, + details: RewardShareDetails, + state: RewardShareState, } } serializable_struct_with_getters! { TopUp { id: String, details: TopUpDetails, - state: Option, + state: TopUpState, } } serializable_struct_with_getters! { - Wasm { - algorithm_id: String, - details: WasmDetails, - state: Option, + Vote { + id: String, + details: VoteDetails, + state: VoteState, } } // Algorithm child structs +#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum AlgorithmType { + Wasm, + Ptx, +} serializable_struct_with_getters! { AlgorithmDetails { name: String, player_id: String, challenge_id: String, - tx_hash: String, + breakthrough_id: Option, + r#type: AlgorithmType, + fee_paid: PreciseNumber, } } serializable_struct_with_getters! { AlgorithmState { - block_confirmed: Option, - round_submitted: Option, + block_confirmed: u32, + round_submitted: u32, round_pushed: Option, + round_active: Option, round_merged: Option, banned: bool, } } serializable_struct_with_getters! { AlgorithmBlockData { - num_qualifiers_by_player: Option>, - adoption: Option, - merge_points: Option, - reward: Option, - round_earnings: Option, + num_qualifiers_by_player: HashMap, + adoption: PreciseNumber, + merge_points: u32, + reward: PreciseNumber, } } @@ -135,13 +188,13 @@ impl BenchmarkSettings { serializable_struct_with_getters! { BenchmarkDetails { num_solutions: u32, - merkle_root: Option, + merkle_root: MerkleHash, + sampled_nonces: HashSet, } } serializable_struct_with_getters! { BenchmarkState { - block_confirmed: Option, - sampled_nonces: Option>, + block_confirmed: u32, } } serializable_struct_with_getters! { @@ -173,42 +226,86 @@ impl From for MerkleHash { } } +// Binary child structs +serializable_struct_with_getters! { + BinaryDetails { + compile_success: bool, + download_url: Option, + } +} +serializable_struct_with_getters! { + BinaryState { + block_confirmed: u32, + } +} + // Block child structs +#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)] +#[serde(rename_all = "lowercase")] +pub enum TxType { + Algorithm, + Benchmark, + Binary, + Breakthrough, + Challenge, + Delegate, + Deposit, + Fraud, + Precommit, + Proof, + RewardShare, + Topup, + Vote, +} +#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)] +#[serde(rename_all = "lowercase")] +pub enum SupplyType { + Circulating, + Locked, + Burnt, +} serializable_struct_with_getters! { BlockDetails { prev_block_id: String, height: u32, round: u32, + num_confirmed: HashMap, + num_active: HashMap, eth_block_num: Option, - fees_paid: Option, - num_confirmed_challenges: Option, - num_confirmed_algorithms: Option, - num_confirmed_benchmarks: Option, - num_confirmed_precommits: Option, - num_confirmed_proofs: Option, - num_confirmed_frauds: Option, - num_confirmed_topups: Option, - num_confirmed_wasms: Option, - num_active_challenges: Option, - num_active_algorithms: Option, - num_active_benchmarks: Option, - num_active_players: Option, + supply: HashMap, // circulating, locked, burnt, + timestamp: u64, } } serializable_struct_with_getters! { BlockData { - confirmed_challenge_ids: HashSet, - confirmed_algorithm_ids: HashSet, - confirmed_benchmark_ids: HashSet, - confirmed_precommit_ids: HashSet, - confirmed_proof_ids: HashSet, - confirmed_fraud_ids: HashSet, - confirmed_topup_ids: HashSet, - confirmed_wasm_ids: HashSet, - active_challenge_ids: HashSet, - active_algorithm_ids: HashSet, - active_benchmark_ids: HashSet, - active_player_ids: HashSet, + confirmed_ids: HashMap>, + active_ids: HashMap>, + } +} + +// Breakthrough child structs +serializable_struct_with_getters! { + BreakthroughDetails { + name: String, + player_id: String, + challenge_id: String, + } +} +serializable_struct_with_getters! { + BreakthroughState { + block_confirmed: u32, + round_submitted: u32, + round_pushed: Option, + round_active: Option, + round_merged: Option, + vote_tally: HashMap, + } +} +serializable_struct_with_getters! { + BreakthroughBlockData { + adoption: PreciseNumber, + merge_points: u32, + reward: PreciseNumber, } } @@ -220,20 +317,71 @@ serializable_struct_with_getters! { } serializable_struct_with_getters! { ChallengeState { - block_confirmed: Option, - round_active: Option, + block_confirmed: u32, + round_active: u32, } } serializable_struct_with_getters! { ChallengeBlockData { - solution_signature_threshold: Option, - num_qualifiers: Option, - qualifier_difficulties: Option>, - base_frontier: Option, - scaled_frontier: Option, - scaling_factor: Option, - base_fee: Option, - per_nonce_fee: Option, + solution_signature_threshold: u32, + num_qualifiers: u32, + qualifier_difficulties: HashSet, + base_frontier: Frontier, + scaled_frontier: Frontier, + scaling_factor: f64, + base_fee: PreciseNumber, + per_nonce_fee: PreciseNumber, + } +} + +// Delegate child structs +serializable_struct_with_getters! { + DelegateDetails { + player_id: String, + delegatee: String, + } +} +serializable_struct_with_getters! { + DelegateState { + block_confirmed: u32, + } +} + +// Deposit child structs +serializable_struct_with_getters! { + DepositDetails { + player_id: String, + tx_hash: String, + log_idx: u32, + amount: PreciseNumber, + start_timestamp: u64, + end_timestamp: u64, + } +} +serializable_struct_with_getters! { + DepositState { + block_confirmed: u32, + } +} + +// Fraud child structs +serializable_struct_with_getters! { + FraudState { + block_confirmed: u32, + } +} + +// OPoW child structs +serializable_struct_with_getters! { + OPoWBlockData { + num_qualifiers_by_challenge: HashMap, + cutoff: u32, + associated_deposit: PreciseNumber, + delegators: HashSet, + deposit_share: PreciseNumber, + imbalance: PreciseNumber, + influence: PreciseNumber, + reward: PreciseNumber, } } @@ -246,22 +394,26 @@ serializable_struct_with_getters! { } serializable_struct_with_getters! { PlayerState { - total_fees_paid: Option, - available_fee_balance: Option, + total_fees_paid: PreciseNumber, + available_fee_balance: PreciseNumber, + delegatee: String, + votes: HashMap, + reward_share: PreciseNumber, } } +#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)] +#[serde(rename_all = "lowercase")] +pub enum RewardType { + Benchmarker, + Algorithm, + Breakthrough, + Delegator, +} serializable_struct_with_getters! { PlayerBlockData { - num_qualifiers_by_challenge: Option>, - cutoff: Option, - deposit: Option, - rolling_deposit: Option, - qualifying_percent_rolling_deposit: Option, - imbalance: Option, - imbalance_penalty: Option, - influence: Option, - reward: Option, - round_earnings: Option, + reward_by_type: HashMap, + deposit_by_rounds: HashMap, + weighted_deposit: PreciseNumber, } } @@ -269,22 +421,26 @@ serializable_struct_with_getters! { serializable_struct_with_getters! { PrecommitDetails { block_started: u32, - num_nonces: Option, - fee_paid: Option, + num_nonces: u32, + rand_hash: String, + fee_paid: PreciseNumber, } } serializable_struct_with_getters! { PrecommitState { - block_confirmed: Option, - rand_hash: Option, + block_confirmed: u32, } } // Proof child structs +serializable_struct_with_getters! { + ProofDetails { + submission_delay: u32, + } +} serializable_struct_with_getters! { ProofState { - block_confirmed: Option, - submission_delay: Option, + block_confirmed: u32, } } pub type Solution = Map; @@ -302,10 +458,16 @@ impl OutputData { } } -// Fraud child structs +// RewardShare child structs serializable_struct_with_getters! { - FraudState { - block_confirmed: Option, + RewardShareDetails { + player_id: String, + share: PreciseNumber, + } +} +serializable_struct_with_getters! { + RewardShareState { + block_confirmed: u32, } } @@ -313,24 +475,27 @@ serializable_struct_with_getters! { serializable_struct_with_getters! { TopUpDetails { player_id: String, + tx_hash: String, + log_idx: u32, amount: PreciseNumber, } } serializable_struct_with_getters! { TopUpState { - block_confirmed: Option, + block_confirmed: u32, } } -// Wasm child structs +// Vote child structs serializable_struct_with_getters! { - WasmDetails { - compile_success: bool, - download_url: Option, + VoteDetails { + player_id: String, + breakthrough_id: String, + is_breakthrough: bool, } } serializable_struct_with_getters! { - WasmState { - block_confirmed: Option, + VoteState { + block_confirmed: u32, } }