This commit is contained in:
FiveMovesAhead 2024-11-18 08:43:36 +08:00
parent ff208a5152
commit f33ffc8460
19 changed files with 1618 additions and 1544 deletions

View File

@ -10,6 +10,7 @@ edition.workspace = true
[dependencies]
anyhow = { version = "1.0.81" }
logging_timer = "1.1.1"
hex = "0.4.3"
rand = "0.8.4"
serde = { version = "1.0.196", features = ["derive"] }
serde_json = { version = "1.0.113" }

View File

@ -397,7 +397,7 @@ async fn setup_cache<T: Context>(
#[time]
async fn create_block<T: Context>(ctx: &T) -> (Block, AddBlockCache) {
let latest_block = ctx
.get_block(BlockFilter::Latest, false)
.get_block(BlockFilter::LastConfirmed, false)
.await
.unwrap_or_else(|e| panic!("get_block error: {:?}", e))
.expect("No latest block found");
@ -679,684 +679,6 @@ async fn update_deposits<T: Context>(ctx: &T, block: &Block, cache: &mut AddBloc
}
}
#[time]
async fn update_cutoffs(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let mut phase_in_challenge_ids: HashSet<String> =
cache.active_challenges.keys().cloned().collect();
for algorithm in cache.active_algorithms.values() {
if algorithm
.state()
.round_pushed
.is_some_and(|r| r + 1 <= block.details.round)
{
phase_in_challenge_ids.remove(&algorithm.details.challenge_id);
}
}
let mut num_solutions_by_player_by_challenge = HashMap::<String, HashMap<String, u32>>::new();
for (settings, num_solutions) in cache.active_solutions.values() {
*num_solutions_by_player_by_challenge
.entry(settings.player_id.clone())
.or_default()
.entry(settings.challenge_id.clone())
.or_default() += *num_solutions;
}
for (player_id, num_solutions_by_challenge) in num_solutions_by_player_by_challenge.iter() {
let data = cache
.active_players
.get_mut(player_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
let phase_in_start = (block.details.round - 1) * config.rounds.blocks_per_round;
let phase_in_period = config.qualifiers.cutoff_phase_in_period.unwrap();
let phase_in_end = phase_in_start + phase_in_period;
let min_cutoff = config.qualifiers.min_cutoff.clone().unwrap();
let min_num_solutions = cache
.active_challenges
.keys()
.map(|id| num_solutions_by_challenge.get(id).unwrap_or(&0).clone())
.min()
.unwrap();
let mut cutoff = min_cutoff
.max((min_num_solutions as f64 * config.qualifiers.cutoff_multiplier).ceil() as u32);
if phase_in_challenge_ids.len() > 0 && phase_in_end > block.details.height {
let phase_in_min_num_solutions = cache
.active_challenges
.keys()
.filter(|&id| !phase_in_challenge_ids.contains(id))
.map(|id| num_solutions_by_challenge.get(id).unwrap_or(&0).clone())
.min()
.unwrap();
let phase_in_cutoff = min_cutoff.max(
(phase_in_min_num_solutions as f64 * config.qualifiers.cutoff_multiplier).ceil()
as u32,
);
let phase_in_weight =
(phase_in_end - block.details.height) as f64 / phase_in_period as f64;
cutoff = (phase_in_cutoff as f64 * phase_in_weight
+ cutoff as f64 * (1.0 - phase_in_weight)) as u32;
}
data.cutoff = Some(cutoff);
}
}
#[time]
async fn update_solution_signature_thresholds(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let confirmed_proof_ids = &block.data().confirmed_proof_ids;
let mut num_solutions_by_player_by_challenge = HashMap::<String, HashMap<String, u32>>::new();
let mut new_solutions_by_player_by_challenge = HashMap::<String, HashMap<String, u32>>::new();
for (benchmark_id, (settings, num_solutions)) in cache.active_solutions.iter() {
*num_solutions_by_player_by_challenge
.entry(settings.player_id.clone())
.or_default()
.entry(settings.challenge_id.clone())
.or_default() += *num_solutions;
if confirmed_proof_ids.contains(benchmark_id) {
*new_solutions_by_player_by_challenge
.entry(settings.player_id.clone())
.or_default()
.entry(settings.challenge_id.clone())
.or_default() += *num_solutions;
}
}
let mut solutions_rate_by_challenge = HashMap::<String, u32>::new();
for (player_id, new_solutions_by_challenge) in new_solutions_by_player_by_challenge.iter() {
let cutoff = *cache
.active_players
.get(player_id)
.unwrap()
.block_data()
.cutoff();
for (challenge_id, new_solutions) in new_solutions_by_challenge.iter() {
let num_solutions =
num_solutions_by_player_by_challenge[player_id][challenge_id].clone();
*solutions_rate_by_challenge
.entry(challenge_id.clone())
.or_default() +=
new_solutions.saturating_sub(num_solutions - cutoff.min(num_solutions));
}
}
for challenge in cache.active_challenges.values_mut() {
let max_threshold = u32::MAX as f64;
let current_threshold = match &cache.prev_challenges.get(&challenge.id).unwrap().block_data
{
Some(data) => *data.solution_signature_threshold() as f64,
None => max_threshold,
};
let current_rate = *solutions_rate_by_challenge.get(&challenge.id).unwrap_or(&0) as f64;
let equilibrium_rate = config.qualifiers.total_qualifiers_threshold as f64
/ config.benchmark_submissions.lifespan_period as f64;
let target_rate = config.solution_signature.equilibrium_rate_multiplier * equilibrium_rate;
let target_threshold = if current_rate == 0.0 {
max_threshold
} else {
(current_threshold * target_rate / current_rate).clamp(0.0, max_threshold)
};
let threshold_decay = config.solution_signature.threshold_decay.unwrap_or(0.99);
let block_data = challenge.block_data.as_mut().unwrap();
block_data.solution_signature_threshold = Some(
(current_threshold * threshold_decay + target_threshold * (1.0 - threshold_decay))
.clamp(0.0, max_threshold) as u32,
);
}
}
#[time]
async fn update_fees(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let PrecommitSubmissionsConfig {
min_base_fee,
min_per_nonce_fee,
target_num_precommits,
max_fee_percentage_delta,
..
} = config.precommit_submissions();
let num_precommits_by_challenge = cache.mempool_precommits.iter().fold(
HashMap::<String, u32>::new(),
|mut map, precommit| {
*map.entry(precommit.settings.challenge_id.clone())
.or_default() += 1;
map
},
);
let target_num_precommits = PreciseNumber::from(*target_num_precommits);
let max_fee_percent_delta = PreciseNumber::from_f64(*max_fee_percentage_delta);
let one = PreciseNumber::from(1);
let zero = PreciseNumber::from(0);
for challenge in cache.active_challenges.values_mut() {
let num_precommits = PreciseNumber::from(
num_precommits_by_challenge
.get(&challenge.id)
.unwrap_or(&0)
.clone(),
);
let mut percent_delta = num_precommits / target_num_precommits;
if num_precommits >= target_num_precommits {
percent_delta = percent_delta - one;
} else {
percent_delta = one - percent_delta;
}
if percent_delta > max_fee_percent_delta {
percent_delta = max_fee_percent_delta;
}
let current_base_fee =
match &cache.prev_challenges.get(&challenge.id).unwrap().block_data {
Some(data) => data.base_fee.as_ref().unwrap_or(&zero),
None => &zero,
}
.clone();
let mut base_fee = if num_precommits >= target_num_precommits {
current_base_fee * (one + percent_delta)
} else {
current_base_fee * (one - percent_delta)
};
if base_fee < *min_base_fee {
base_fee = *min_base_fee;
}
let block_data = challenge.block_data.as_mut().unwrap();
block_data.base_fee = Some(base_fee);
block_data.per_nonce_fee = Some(min_per_nonce_fee.clone());
}
}
fn find_smallest_range_dimension(points: &Frontier) -> usize {
(0..2)
.min_by_key(|&d| {
let (min, max) = points
.iter()
.map(|p| p[d])
.fold((i32::MAX, i32::MIN), |(min, max), val| {
(min.min(val), max.max(val))
});
max - min
})
.unwrap()
}
fn pareto_algorithm(points: Frontier, only_one: bool) -> Vec<Frontier> {
if points.is_empty() {
return Vec::new();
}
let dimension = find_smallest_range_dimension(&points);
let sort_dimension = 1 - dimension;
let mut buckets: HashMap<i32, Vec<Point>> = HashMap::new();
for point in points {
buckets.entry(point[dimension]).or_default().push(point);
}
for (_, group) in buckets.iter_mut() {
// sort descending
group.sort_unstable_by(|a, b| b[sort_dimension].cmp(&a[sort_dimension]));
}
let mut result = Vec::new();
while !buckets.is_empty() {
let points: HashSet<Point> = buckets.values().map(|group| group[0].clone()).collect();
let frontier = points.pareto_frontier();
for point in frontier.iter() {
let bucket = buckets.get_mut(&point[dimension]).unwrap();
bucket.remove(0);
if bucket.is_empty() {
buckets.remove(&point[dimension]);
}
}
result.push(frontier);
if only_one {
break;
}
}
result
}
#[time]
async fn update_qualifiers(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let mut solutions_by_challenge = HashMap::<String, Vec<(&BenchmarkSettings, &u32)>>::new();
for (settings, num_solutions) in cache.active_solutions.values() {
solutions_by_challenge
.entry(settings.challenge_id.clone())
.or_default()
.push((settings, num_solutions));
}
let mut max_qualifiers_by_player = HashMap::<String, u32>::new();
for challenge in cache.active_challenges.values_mut() {
let block_data = challenge.block_data.as_mut().unwrap();
block_data.num_qualifiers = Some(0);
block_data.qualifier_difficulties = Some(HashSet::new());
}
for algorithm in cache.active_algorithms.values_mut() {
let block_data = algorithm.block_data.as_mut().unwrap();
block_data.num_qualifiers_by_player = Some(HashMap::new());
}
for player in cache.active_players.values_mut() {
let block_data = player.block_data.as_mut().unwrap();
max_qualifiers_by_player.insert(player.id.clone(), *block_data.cutoff());
block_data.num_qualifiers_by_challenge = Some(HashMap::new());
}
for (challenge_id, challenge) in cache.active_challenges.iter_mut() {
if !solutions_by_challenge.contains_key(challenge_id) {
continue;
}
let solutions = solutions_by_challenge.get_mut(challenge_id).unwrap();
let points = solutions
.iter()
.map(|(settings, _)| settings.difficulty.clone())
.collect::<Frontier>();
let mut frontier_indexes = HashMap::<Point, usize>::new();
for (frontier_index, frontier) in pareto_algorithm(points, false).into_iter().enumerate() {
for point in frontier {
frontier_indexes.insert(point, frontier_index);
}
}
solutions.sort_by(|(a_settings, _), (b_settings, _)| {
let a_index = frontier_indexes[&a_settings.difficulty];
let b_index = frontier_indexes[&b_settings.difficulty];
a_index.cmp(&b_index)
});
let mut max_qualifiers_by_player = max_qualifiers_by_player.clone();
let mut curr_frontier_index = 0;
let challenge_data = challenge.block_data.as_mut().unwrap();
for (settings, &num_solutions) in solutions.iter() {
let BenchmarkSettings {
player_id,
algorithm_id,
challenge_id,
difficulty,
..
} = settings;
if curr_frontier_index != frontier_indexes[difficulty]
&& *challenge_data.num_qualifiers() > config.qualifiers.total_qualifiers_threshold
{
break;
}
let difficulty_parameters = &config.difficulty.parameters[challenge_id];
let min_difficulty = difficulty_parameters.min_difficulty();
let max_difficulty = difficulty_parameters.max_difficulty();
if (0..difficulty.len())
.into_iter()
.any(|i| difficulty[i] < min_difficulty[i] || difficulty[i] > max_difficulty[i])
{
continue;
}
curr_frontier_index = frontier_indexes[difficulty];
let player_data = cache
.active_players
.get_mut(player_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
let algorithm_data = cache
.active_algorithms
.get_mut(algorithm_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
let max_qualifiers = max_qualifiers_by_player.get(player_id).unwrap().clone();
let num_qualifiers = num_solutions.min(max_qualifiers);
max_qualifiers_by_player.insert(player_id.clone(), max_qualifiers - num_qualifiers);
if num_qualifiers > 0 {
*player_data
.num_qualifiers_by_challenge
.as_mut()
.unwrap()
.entry(challenge_id.clone())
.or_default() += num_qualifiers;
*algorithm_data
.num_qualifiers_by_player
.as_mut()
.unwrap()
.entry(player_id.clone())
.or_default() += num_qualifiers;
*challenge_data.num_qualifiers.as_mut().unwrap() += num_qualifiers;
}
challenge_data
.qualifier_difficulties
.as_mut()
.unwrap()
.insert(difficulty.clone());
}
}
}
#[time]
async fn update_frontiers(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
for challenge in cache.active_challenges.values_mut() {
let block_data = challenge.block_data.as_mut().unwrap();
let difficulty_parameters = &config.difficulty.parameters[&challenge.id];
let min_difficulty = difficulty_parameters.min_difficulty();
let max_difficulty = difficulty_parameters.max_difficulty();
let points = block_data
.qualifier_difficulties()
.iter()
.map(|d| d.iter().map(|x| -x).collect()) // mirror the points so easiest difficulties are first
.collect::<Frontier>();
let (base_frontier, scaling_factor, scaled_frontier) = if points.len() == 0 {
let base_frontier: Frontier = vec![min_difficulty.clone()].into_iter().collect();
let scaling_factor = 0.0;
let scaled_frontier = base_frontier.clone();
(base_frontier, scaling_factor, scaled_frontier)
} else {
let base_frontier = pareto_algorithm(points, true)
.pop()
.unwrap()
.into_iter()
.map(|d| d.into_iter().map(|x| -x).collect())
.collect::<Frontier>() // mirror the points back;
.extend(&min_difficulty, &max_difficulty);
let scaling_factor = (*block_data.num_qualifiers() as f64
/ config.qualifiers.total_qualifiers_threshold as f64)
.min(config.difficulty.max_scaling_factor);
let scaled_frontier = base_frontier
.scale(&min_difficulty, &max_difficulty, scaling_factor)
.extend(&min_difficulty, &max_difficulty);
(base_frontier, scaling_factor, scaled_frontier)
};
block_data.base_frontier = Some(base_frontier);
block_data.scaled_frontier = Some(scaled_frontier);
block_data.scaling_factor = Some(scaling_factor);
}
}
#[time]
async fn update_influence(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let active_player_ids = &block.data().active_player_ids;
if active_player_ids.len() == 0 {
return;
}
let mut num_qualifiers_by_challenge = HashMap::<String, u32>::new();
for challenge in cache.active_challenges.values() {
num_qualifiers_by_challenge.insert(
challenge.id.clone(),
*challenge.block_data().num_qualifiers(),
);
}
let total_deposit = cache
.active_players
.values()
.map(|p| p.block_data().deposit.clone().unwrap())
.sum::<PreciseNumber>();
let zero = PreciseNumber::from(0);
let one = PreciseNumber::from(1);
let imbalance_multiplier =
PreciseNumber::from_f64(config.optimisable_proof_of_work.imbalance_multiplier);
let num_challenges = PreciseNumber::from(cache.active_challenges.len());
let mut weights = Vec::<PreciseNumber>::new();
for player_id in active_player_ids.iter() {
let data = cache
.active_players
.get_mut(player_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
let mut percent_qualifiers = Vec::<PreciseNumber>::new();
for challenge_id in cache.active_challenges.keys() {
let num_qualifiers = num_qualifiers_by_challenge[challenge_id];
let num_qualifiers_by_player = *data
.num_qualifiers_by_challenge()
.get(challenge_id)
.unwrap_or(&0);
percent_qualifiers.push(if num_qualifiers_by_player == 0 {
PreciseNumber::from(0)
} else {
PreciseNumber::from(num_qualifiers_by_player) / PreciseNumber::from(num_qualifiers)
});
}
let OptimisableProofOfWorkConfig {
avg_percent_qualifiers_multiplier,
enable_proof_of_deposit,
..
} = &config.optimisable_proof_of_work;
if enable_proof_of_deposit.is_some_and(|x| x) {
let max_percent_rolling_deposit =
PreciseNumber::from_f64(avg_percent_qualifiers_multiplier.clone().unwrap())
* percent_qualifiers.arithmetic_mean();
let percent_rolling_deposit = if total_deposit == zero {
zero.clone()
} else {
data.deposit.clone().unwrap() / total_deposit
};
let qualifying_percent_rolling_deposit =
if percent_rolling_deposit > max_percent_rolling_deposit {
max_percent_rolling_deposit.clone()
} else {
percent_rolling_deposit
};
percent_qualifiers.push(qualifying_percent_rolling_deposit.clone());
data.qualifying_percent_rolling_deposit = Some(qualifying_percent_rolling_deposit);
}
let mean = percent_qualifiers.arithmetic_mean();
let variance = percent_qualifiers.variance();
let cv_sqr = if mean == zero {
zero.clone()
} else {
variance / (mean * mean)
};
let imbalance = cv_sqr / (num_challenges - one);
let imbalance_penalty =
one - PreciseNumber::approx_inv_exp(imbalance_multiplier * imbalance);
weights.push(mean * (one - imbalance_penalty));
data.imbalance = Some(imbalance);
data.imbalance_penalty = Some(imbalance_penalty);
}
let influences = weights.normalise();
for (player_id, &influence) in active_player_ids.iter().zip(influences.iter()) {
let data = cache
.active_players
.get_mut(player_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
data.influence = Some(influence);
}
}
#[time]
async fn update_adoption(cache: &mut AddBlockCache) {
let mut algorithms_by_challenge = HashMap::<String, Vec<&mut Algorithm>>::new();
for algorithm in cache.active_algorithms.values_mut() {
algorithms_by_challenge
.entry(algorithm.details.challenge_id.clone())
.or_default()
.push(algorithm);
}
for challenge_id in cache.active_challenges.keys() {
let algorithms = algorithms_by_challenge.get_mut(challenge_id);
if algorithms.is_none() {
continue;
}
let algorithms = algorithms.unwrap();
let mut weights = Vec::<PreciseNumber>::new();
for algorithm in algorithms.iter() {
let mut weight = PreciseNumber::from(0);
for (player_id, &num_qualifiers) in
algorithm.block_data().num_qualifiers_by_player().iter()
{
let num_qualifiers = PreciseNumber::from(num_qualifiers);
let player_data = cache.active_players.get(player_id).unwrap().block_data();
let influence = player_data.influence.unwrap();
let player_num_qualifiers = PreciseNumber::from(
*player_data
.num_qualifiers_by_challenge
.as_ref()
.unwrap()
.get(challenge_id)
.unwrap(),
);
weight = weight + influence * num_qualifiers / player_num_qualifiers;
}
weights.push(weight);
}
let adoption = weights.normalise();
for (algorithm, adoption) in algorithms.iter_mut().zip(adoption) {
algorithm.block_data.as_mut().unwrap().adoption = Some(adoption);
}
}
}
#[time]
async fn update_innovator_rewards(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let adoption_threshold =
PreciseNumber::from_f64(config.algorithm_submissions.adoption_threshold);
let zero = PreciseNumber::from(0);
let mut eligible_algorithms_by_challenge = HashMap::<String, Vec<&mut Algorithm>>::new();
for algorithm in cache.active_algorithms.values_mut() {
let is_merged = algorithm.state().round_merged.is_some();
let is_banned = algorithm.state().banned.clone();
let data = algorithm.block_data.as_mut().unwrap();
data.reward = Some(zero.clone());
if !is_banned
&& (*data.adoption() >= adoption_threshold || (is_merged && *data.adoption() > zero))
{
eligible_algorithms_by_challenge
.entry(algorithm.details.challenge_id.clone())
.or_default()
.push(algorithm);
}
}
if eligible_algorithms_by_challenge.len() == 0 {
return;
}
let reward_pool_per_challenge = PreciseNumber::from_f64(get_block_reward(block))
* PreciseNumber::from_f64(config.rewards.distribution.optimisations)
/ PreciseNumber::from(eligible_algorithms_by_challenge.len());
let zero = PreciseNumber::from(0);
for algorithms in eligible_algorithms_by_challenge.values_mut() {
let mut total_adoption = zero.clone();
for algorithm in algorithms.iter() {
total_adoption = total_adoption + algorithm.block_data().adoption();
}
for algorithm in algorithms.iter_mut() {
let data = algorithm.block_data.as_mut().unwrap();
let adoption = *data.adoption();
data.reward = Some(reward_pool_per_challenge * adoption / total_adoption);
}
}
}
#[time]
async fn update_benchmarker_rewards(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let reward_pool = PreciseNumber::from_f64(get_block_reward(block))
* PreciseNumber::from_f64(config.rewards.distribution.benchmarkers);
for player in cache.active_players.values_mut() {
let data = player.block_data.as_mut().unwrap();
let influence = *data.influence();
data.reward = Some(influence * reward_pool);
}
}
#[time]
async fn update_merge_points(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let adoption_threshold =
PreciseNumber::from_f64(config.algorithm_submissions.adoption_threshold);
for algorithm in cache.active_algorithms.values_mut() {
let is_merged = algorithm.state().round_merged.is_some();
let data = algorithm.block_data.as_mut().unwrap();
// first block of the round
let prev_merge_points = if block.details.height % config.rounds.blocks_per_round == 0 {
0
} else {
match &cache.prev_algorithms.get(&algorithm.id).unwrap().block_data {
Some(data) => *data.merge_points(),
None => 0,
}
};
data.merge_points = Some(if is_merged || *data.adoption() < adoption_threshold {
prev_merge_points
} else {
prev_merge_points + 1
});
}
}
#[time]
async fn update_merges(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
// last block of the round
if (block.details.height + 1) % config.rounds.blocks_per_round != 0 {
return;
}
let mut algorithm_to_merge_by_challenge = HashMap::<String, &mut Algorithm>::new();
for algorithm in cache.active_algorithms.values_mut() {
let challenge_id = algorithm.details.challenge_id.clone();
let data = algorithm.block_data();
if algorithm.state().round_merged.is_some()
|| *data.merge_points() < config.algorithm_submissions.merge_points_threshold
{
continue;
}
if !algorithm_to_merge_by_challenge.contains_key(&challenge_id)
|| algorithm_to_merge_by_challenge[&challenge_id]
.block_data()
.merge_points
< data.merge_points
{
algorithm_to_merge_by_challenge.insert(challenge_id, algorithm);
}
}
let round_merged = block.details.round + 1;
for algorithm in algorithm_to_merge_by_challenge.values_mut() {
let state = algorithm.state.as_mut().unwrap();
state.round_merged = Some(round_merged);
}
}
#[time]
async fn commit_changes<T: Context>(ctx: &T, block: &Block, cache: &mut AddBlockCache) {
for precommit in cache.mempool_precommits.drain(..) {

View File

@ -14,7 +14,6 @@ pub enum SubmissionType {
#[derive(Debug, Clone, PartialEq)]
pub enum AlgorithmsFilter {
Id(String),
Name(String),
TxHash(String),
Mempool,
@ -22,29 +21,25 @@ pub enum AlgorithmsFilter {
}
#[derive(Debug, Clone, PartialEq)]
pub enum BenchmarksFilter {
Id(String),
Mempool { from_block_started: u32 },
Confirmed { from_block_started: u32 },
Mempool,
Confirmed,
}
#[derive(Debug, Clone, PartialEq)]
pub enum BlockFilter {
Latest,
Current,
LastConfirmed,
Height(u32),
Id(String),
Round(u32),
}
#[derive(Debug, Clone, PartialEq)]
pub enum ChallengesFilter {
Id(String),
Name(String),
Mempool,
Confirmed,
}
#[derive(Debug, Clone, PartialEq)]
pub enum FraudsFilter {
BenchmarkId(String),
Mempool { from_block_started: u32 },
Confirmed { from_block_started: u32 },
Mempool,
Confirmed,
}
#[derive(Debug, Clone, PartialEq)]
pub enum PlayersFilter {
@ -55,72 +50,69 @@ pub enum PlayersFilter {
}
#[derive(Debug, Clone, PartialEq)]
pub enum PrecommitsFilter {
BenchmarkId(String),
Settings(BenchmarkSettings),
Mempool { from_block_started: u32 },
Confirmed { from_block_started: u32 },
Mempool,
Confirmed,
}
#[derive(Debug, Clone, PartialEq)]
pub enum ProofsFilter {
BenchmarkId(String),
Mempool { from_block_started: u32 },
Confirmed { from_block_started: u32 },
Mempool,
Confirmed,
}
#[derive(Debug, Clone, PartialEq)]
pub enum TopUpsFilter {
Id(String),
PlayerId(String),
Mempool,
Confirmed,
}
#[derive(Debug, Clone, PartialEq)]
pub enum WasmsFilter {
AlgorithmId(String),
Mempool,
Confirmed,
}
#[allow(async_fn_in_trait)]
pub trait Context {
async fn get_algorithms(
async fn get_algorithm_ids(&self, filter: AlgorithmsFilter) -> Vec<String>;
async fn get_algorithm_state(&self, algorithm_id: &String) -> Option<AlgorithmState>;
async fn get_benchmark_ids(&self, filter: BenchmarksFilter) -> Vec<String>;
async fn get_benchmark_details(&self, benchmark_id: &String) -> Option<BenchmarkDetails>;
async fn get_benchmark_state(&self, benchmark_id: &String) -> Option<BenchmarkState>;
async fn confirm_benchmark(
&self,
filter: AlgorithmsFilter,
block_data: Option<BlockFilter>,
include_data: bool,
) -> ContextResult<Vec<Algorithm>>;
async fn get_benchmarks(
benchmark_id: String,
details: BenchmarkDetails,
solution_nonces: HashSet<u64>,
) -> ContextResult<()>;
async fn get_block_id(&self, filter: BlockFilter) -> Option<String>;
async fn get_block_details(&self, block_id: &String) -> Option<BlockDetails>;
async fn get_challenge_ids(&self, filter: ChallengesFilter) -> Vec<String>;
async fn get_challenge_state(&self, challenge_id: &String) -> Option<ChallengeState>;
async fn get_challenge_block_data(
&self,
filter: BenchmarksFilter,
include_data: bool,
) -> ContextResult<Vec<Benchmark>>;
async fn get_block(
challenge_id: &String,
block_id: &String,
) -> Option<ChallengeBlockData>;
async fn get_config(&self) -> ProtocolConfig;
async fn get_fraud_ids(&self, filter: FraudsFilter) -> Vec<String>;
async fn get_player_ids(&self, filter: PlayersFilter) -> Vec<String>;
async fn get_player_state(&self, player_id: &String) -> Option<PlayerState>;
async fn get_player_block_data(
&self,
filter: BlockFilter,
include_data: bool,
) -> ContextResult<Option<Block>>;
async fn get_challenges(
player_id: &String,
block_id: &String,
) -> Option<PlayerBlockData>;
async fn get_precommit_ids(&self, filter: PrecommitsFilter) -> Vec<String>;
async fn get_precommit_settings(&self, benchmark_id: &String) -> Option<BenchmarkSettings>;
async fn get_precommit_details(&self, benchmark_id: &String) -> Option<PrecommitDetails>;
async fn confirm_precommit(
&self,
filter: ChallengesFilter,
block_data: Option<BlockFilter>,
) -> ContextResult<Vec<Challenge>>;
async fn get_config(&self) -> ContextResult<ProtocolConfig>;
async fn get_frauds(
&self,
filter: FraudsFilter,
include_data: bool,
) -> ContextResult<Vec<Fraud>>;
async fn get_players(
&self,
filter: PlayersFilter,
block_data: Option<BlockFilter>,
) -> ContextResult<Vec<Player>>;
async fn get_precommits(&self, filter: PrecommitsFilter) -> ContextResult<Vec<Precommit>>;
async fn get_proofs(
&self,
filter: ProofsFilter,
include_data: bool,
) -> ContextResult<Vec<Proof>>;
async fn get_topups(&self, filter: TopUpsFilter) -> ContextResult<Vec<TopUp>>;
async fn get_wasms(&self, filter: WasmsFilter) -> ContextResult<Vec<Wasm>>;
settings: BenchmarkSettings,
details: PrecommitDetails,
) -> ContextResult<String>;
async fn get_proofs_ids(&self, filter: ProofsFilter) -> Vec<String>;
async fn get_proof_state(&self, benchmark_id: &String) -> Option<ProofState>;
async fn get_topup_ids(&self, filter: TopUpsFilter) -> Vec<String>;
async fn get_wasm_ids(&self, filter: WasmsFilter) -> Vec<String>;
async fn verify_solution(
&self,
settings: &BenchmarkSettings,

View File

@ -0,0 +1,209 @@
use crate::{context::*, error::*};
use logging_timer::time;
use std::collections::HashSet;
use tig_structs::core::*;
use tig_utils::*;
#[time]
pub(crate) async fn submit_algorithm<T: Context>(
ctx: &T,
player_id: String,
algorithm_name: String,
challenge_id: String,
breakthrough_id: Option<String>,
r#type: AlgorithmType,
) -> ProtocolResult<String> {
let config = ctx.get_config().await;
let curr_block_id = ctx.get_block_id(BlockFilter::Current).await.unwrap();
let curr_block_details = ctx.get_block_details(&curr_block_id).await.unwrap();
if !ctx
.get_challenge_state(&challenge_id)
.await
.is_some_and(|s| s.round_active <= curr_block_details.round)
{
return Err(anyhow!("Invalid challenge '{}'", challenge_id));
}
if let Some(breakthrough_id) = breakthrough_id {
if ctx.get_breakthrough_state(&breakthrough_id).await.is_none() {
return Err(anyhow!("Invalid breakthrough '{}'", breakthrough_id));
}
}
if !ctx
.get_player_state(&player_id)
.await
.is_some_and(|s| s.available_fee_balance >= config.algorithms.submission_fee)
{
return Err(anyhow!("Insufficient balance"));
}
let algorithm_id = ctx
.confirm_algorithm(AlgorithmDetails {
name: algorithm_name,
challenge_id,
breakthrough_id,
r#type,
player_id,
fee_paid: config.algorithms.submission_fee,
})
.await?;
Ok(algorithm_id)
}
#[time]
pub(crate) async fn submit_binary<T: Context>(
ctx: &T,
player_id: String,
algorithm_id: String,
compile_success: bool,
download_url: Option<String>,
) -> ProtocolResult<String> {
Ok(algorithm_id)
}
#[time]
pub(crate) async fn submit_breakthrough<T: Context>(
ctx: &T,
player_id: String,
breakthrough_name: String,
) -> ProtocolResult<String> {
// check player_state has sufficient fee balance
// check name
// confirm breakthrough
Ok(algorithm_id)
}
/*
add_block.update_votes
update vote tallies for each breakthrough (only consider player_block_data.deposit_by_round where round > min_lock_period_to_vote)
add_block.update_adoption
breakthrough adoption = sum(algorith.adoption where aglorithm.breakthrough_id == breakthrough.id)
add_block.update_merge_points
if adoption < threshold or not merged:
continue
if not merged:
add merge point
eligible to earn rewards (pro-rata with adoption)
need to update and track academic_fund_address..
add_block.update_merges
for each breakthrough where curr_round + 1 == breakthrough.round_pushed + vote_period_rounds
min_percent_yes_votes < sum(yes_votes) / sum(yes_votes + no_votes)
set breakthrough_state.round_active
for each breakthrough where merge_points_threshold < merge_points
set breakthrough_state.round_merged..
*/
#[time]
async fn update_adoption(cache: &mut AddBlockCache) {
let mut algorithms_by_challenge = HashMap::<String, Vec<&mut Algorithm>>::new();
for algorithm in cache.active_algorithms.values_mut() {
algorithms_by_challenge
.entry(algorithm.details.challenge_id.clone())
.or_default()
.push(algorithm);
}
for challenge_id in cache.active_challenges.keys() {
let algorithms = algorithms_by_challenge.get_mut(challenge_id);
if algorithms.is_none() {
continue;
}
let algorithms = algorithms.unwrap();
let mut weights = Vec::<PreciseNumber>::new();
for algorithm in algorithms.iter() {
let mut weight = PreciseNumber::from(0);
for (player_id, &num_qualifiers) in
algorithm.block_data().num_qualifiers_by_player().iter()
{
let num_qualifiers = PreciseNumber::from(num_qualifiers);
let player_data = cache.active_players.get(player_id).unwrap().block_data();
let influence = player_data.influence.unwrap();
let player_num_qualifiers = PreciseNumber::from(
*player_data
.num_qualifiers_by_challenge
.as_ref()
.unwrap()
.get(challenge_id)
.unwrap(),
);
weight = weight + influence * num_qualifiers / player_num_qualifiers;
}
weights.push(weight);
}
let adoption = weights.normalise();
for (algorithm, adoption) in algorithms.iter_mut().zip(adoption) {
algorithm.block_data.as_mut().unwrap().adoption = Some(adoption);
}
}
}
#[time]
async fn update_merge_points(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let adoption_threshold =
PreciseNumber::from_f64(config.algorithm_submissions.adoption_threshold);
for algorithm in cache.active_algorithms.values_mut() {
let is_merged = algorithm.state().round_merged.is_some();
let data = algorithm.block_data.as_mut().unwrap();
// first block of the round
let prev_merge_points = if block.details.height % config.rounds.blocks_per_round == 0 {
0
} else {
match &cache.prev_algorithms.get(&algorithm.id).unwrap().block_data {
Some(data) => *data.merge_points(),
None => 0,
}
};
data.merge_points = Some(if is_merged || *data.adoption() < adoption_threshold {
prev_merge_points
} else {
prev_merge_points + 1
});
}
}
#[time]
async fn update_merges(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
// last block of the round
if (block.details.height + 1) % config.rounds.blocks_per_round != 0 {
return;
}
let mut algorithm_to_merge_by_challenge = HashMap::<String, &mut Algorithm>::new();
for algorithm in cache.active_algorithms.values_mut() {
let challenge_id = algorithm.details.challenge_id.clone();
let data = algorithm.block_data();
if algorithm.state().round_merged.is_some()
|| *data.merge_points() < config.algorithm_submissions.merge_points_threshold
{
continue;
}
if !algorithm_to_merge_by_challenge.contains_key(&challenge_id)
|| algorithm_to_merge_by_challenge[&challenge_id]
.block_data()
.merge_points
< data.merge_points
{
algorithm_to_merge_by_challenge.insert(challenge_id, algorithm);
}
}
let round_merged = block.details.round + 1;
for algorithm in algorithm_to_merge_by_challenge.values_mut() {
let state = algorithm.state.as_mut().unwrap();
state.round_merged = Some(round_merged);
}
}

View File

@ -0,0 +1,283 @@
use crate::{context::*, error::*};
use anyhow::{anyhow, Result};
use logging_timer::time;
use rand::{seq::IteratorRandom, thread_rng, Rng};
use std::collections::HashSet;
use tig_structs::core::*;
use tig_utils::*;
#[time]
pub(crate) async fn submit_precommit<T: Context>(
ctx: &T,
player_id: String,
settings: BenchmarkSettings,
num_nonces: u32,
) -> Result<String> {
if player_id != settings.player_id {
return Err(anyhow!("Invalid settings.player_id. Must be {}", player_id));
}
if num_nonces == 0 {
return Err(anyhow!("Invalid num_nonces. Must be greater than 0"));
}
let config = ctx.get_config().await;
let latest_block_id = ctx.get_block_id(BlockFilter::LastConfirmed).await.unwrap();
if latest_block_id != settings.block_id {
return Err(anyhow!("Invalid block_id. Must be latest block"));
}
let latest_block_details = ctx.get_block_details(&latest_block_id).await.unwrap();
// verify challenge is active
if !ctx
.get_challenge_state(&settings.challenge_id)
.await
.is_some_and(|s| s.round_active <= latest_block_details.round)
{
return Err(anyhow!("Invalid challenge '{}'", settings.challenge_id));
}
// verify algorithm is active
if !ctx
.get_algorithm_state(&settings.algorithm_id)
.await
.is_some_and(|s| {
!s.banned
&& s.round_active
.is_some_and(|r| r <= latest_block_details.round)
})
{
return Err(anyhow!("Invalid algorithm '{}'", settings.algorithm_id));
}
// verify difficulty
let difficulty = &settings.difficulty;
let difficulty_parameters = &config.challenges.difficulty_parameters[&settings.challenge_id];
if difficulty.len() != difficulty_parameters.len()
|| difficulty
.iter()
.zip(difficulty_parameters.iter())
.any(|(d, p)| *d < p.min_value || *d > p.max_value)
{
return Err(anyhow!("Invalid difficulty '{:?}'", difficulty));
}
let challenge_data = ctx
.get_challenge_block_data(&settings.challenge_id, &latest_block_id)
.await
.unwrap();
let (lower_frontier, upper_frontier) = if challenge_data.scaling_factor > 1f64 {
(challenge_data.base_frontier, challenge_data.scaled_frontier)
} else {
(challenge_data.scaled_frontier, challenge_data.base_frontier)
};
if lower_frontier
.iter()
.any(|lower_point| difficulty.pareto_compare(lower_point) == ParetoCompare::BDominatesA)
|| upper_frontier
.iter()
.any(|upper_point| difficulty.pareto_compare(upper_point) == ParetoCompare::ADominatesB)
{
return Err(anyhow!("Invalid difficulty. Out of bounds"));
}
// verify player has sufficient balance
let submission_fee =
challenge_data.base_fee + challenge_data.per_nonce_fee * PreciseNumber::from(num_nonces);
if !ctx
.get_player_state(&player_id)
.await
.is_some_and(|s| s.available_fee_balance >= submission_fee)
{
return Err(anyhow!("Insufficient balance"));
}
let benchmark_id = ctx
.confirm_precommit(
settings,
PrecommitDetails {
block_started: latest_block_details.height,
num_nonces,
rand_hash: hex::encode(thread_rng().gen::<[u8; 16]>()),
fee_paid: submission_fee,
},
)
.await?;
Ok(benchmark_id)
}
#[time]
pub(crate) async fn submit_benchmark<T: Context>(
ctx: &T,
player_id: String,
benchmark_id: String,
merkle_root: MerkleHash,
solution_nonces: HashSet<u64>,
) -> Result<()> {
// check benchmark is not duplicate
if ctx.get_benchmark_state(&benchmark_id).await.is_some() {
return Err(anyhow!("Duplicate benchmark: {}", benchmark_id));
}
// check player owns benchmark
let expected_player_id = ctx
.get_precommit_settings(&benchmark_id)
.await
.ok_or_else(|| anyhow!("No corresponding precommit: {}", benchmark_id))?
.player_id;
if player_id != expected_player_id {
return Err(anyhow!(
"Invalid submitting player: {}. Expected: {}",
player_id,
expected_player_id
));
}
// check solution nonces is valid
let num_nonces = ctx
.get_precommit_details(&benchmark_id)
.await
.unwrap()
.num_nonces as u64;
if !solution_nonces.iter().all(|n| *n < num_nonces) {
return Err(anyhow!("Invalid solution nonces"));
}
// random sample nonces
let config = ctx.get_config().await;
let mut sampled_nonces = HashSet::new();
let mut rng = thread_rng();
let max_samples = config.benchmarks.max_samples;
if !solution_nonces.is_empty() {
for _ in 0..25 {
sampled_nonces.insert(*solution_nonces.iter().choose(&mut rng).unwrap());
if sampled_nonces.len() == max_samples {
break;
}
}
}
let max_samples = sampled_nonces.len() + config.benchmarks.min_num_solutions as usize;
for _ in 0..25 {
sampled_nonces.insert(rng.gen_range(0..num_nonces));
if sampled_nonces.len() == max_samples {
break;
}
}
ctx.confirm_benchmark(
benchmark_id,
BenchmarkDetails {
num_solutions: solution_nonces.len() as u32,
merkle_root,
sampled_nonces,
},
solution_nonces,
)
.await?;
Ok(())
}
#[time]
pub(crate) async fn submit_proof<T: Context>(
ctx: &T,
player_id: String,
benchmark_id: String,
merkle_proofs: Vec<MerkleProof>,
) -> Result<Result<(), String>> {
// check proof is not duplicate
if ctx.get_proof_state(&benchmark_id).await.is_some() {
return Err(anyhow!("Duplicate proof: {}", benchmark_id));
}
// check benchmark is submitted
let benchmark_details = ctx
.get_benchmark_details(&benchmark_id)
.await
.ok_or_else(|| anyhow!("No corresponding benchmark: {}", benchmark_id))?;
// check player owns benchmark
let settings = ctx.get_precommit_settings(&benchmark_id).await.unwrap();
if player_id != settings.player_id {
return Err(anyhow!(
"Invalid submitting player: {}. Expected: {}",
player_id,
settings.player_id
));
}
// verify
let precommit_details = ctx.get_precommit_details(&benchmark_id).await.unwrap();
let proof_nonces: HashSet<u64> = merkle_proofs.iter().map(|p| p.leaf.nonce).collect();
let sampled_nonces = &benchmark_details.sampled_nonces;
if sampled_nonces != proof_nonces || sampled_nonces.len() != merkle_proofs.len() {
return Err(anyhow!(
"Invalid merkle proofs. Does not match sampled nonces"
));
}
// verify merkle_proofs
let mut verification_result = Ok(());
let max_branch_len = (64 - (*precommit_details.num_nonces - 1).leading_zeros()) as usize;
for merkle_proof in merkle_proofs.iter() {
if merkle_proof.branch.0.len() > max_branch_len
|| merkle_proof
.branch
.0
.iter()
.any(|(d, _)| *d as usize > max_branch_len)
{
return Err(ProtocolError::InvalidMerkleProof {
nonce: merkle_proof.leaf.nonce.clone(),
});
}
let output_meta_data = OutputMetaData::from(merkle_proof.leaf.clone());
let hash = MerkleHash::from(output_meta_data);
let result = merkle_proof
.branch
.calc_merkle_root(&hash, merkle_proof.leaf.nonce as usize);
if !result
.is_ok_and(|actual_merkle_root| actual_merkle_root == benchmark_details.merkle_root)
{
verification_result = Err(ProtocolError::InvalidMerkleProof {
nonce: merkle_proof.leaf.nonce.clone(),
});
}
}
if verification_result.is_ok() {
for p in merkle_proofs.iter() {
if ctx
.verify_solution(&settings, p.leaf.nonce, &p.leaf.solution)
.await
.unwrap_or_else(|e| panic!("verify_solution error: {:?}", e))
.is_err()
{
verification_result = Err(ProtocolError::InvalidSolution {
nonce: p.leaf.nonce,
});
}
}
};
ctx.confirm_proof(benchmark_id, merkle_proofs)
.await
.unwrap_or_else(|e| panic!("add_proof_to_mempool error: {:?}", e));
if let Err(e) = verification_result {
submit_fraud(benchmark_id, e.to_string())
.await
.unwrap_or_else(|e| panic!("add_fraud_to_mempool error: {:?}", e));
return Ok(Err(e.to_string()));
}
Ok(Ok(()))
}
#[time]
pub(crate) async fn submit_fraud<T: Context>(
ctx: &T,
player_id: String,
benchmark_id: String,
allegation: String,
) -> ProtocolResult<Result<(), String>> {
}
// update active benchmarks

View File

@ -0,0 +1,124 @@
#[time]
async fn update_solution_signature_thresholds(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let confirmed_proof_ids = &block.data().confirmed_proof_ids;
let mut num_solutions_by_player_by_challenge = HashMap::<String, HashMap<String, u32>>::new();
let mut new_solutions_by_player_by_challenge = HashMap::<String, HashMap<String, u32>>::new();
for (benchmark_id, (settings, num_solutions)) in cache.active_solutions.iter() {
*num_solutions_by_player_by_challenge
.entry(settings.player_id.clone())
.or_default()
.entry(settings.challenge_id.clone())
.or_default() += *num_solutions;
if confirmed_proof_ids.contains(benchmark_id) {
*new_solutions_by_player_by_challenge
.entry(settings.player_id.clone())
.or_default()
.entry(settings.challenge_id.clone())
.or_default() += *num_solutions;
}
}
let mut solutions_rate_by_challenge = HashMap::<String, u32>::new();
for (player_id, new_solutions_by_challenge) in new_solutions_by_player_by_challenge.iter() {
let cutoff = *cache
.active_players
.get(player_id)
.unwrap()
.block_data()
.cutoff();
for (challenge_id, new_solutions) in new_solutions_by_challenge.iter() {
let num_solutions =
num_solutions_by_player_by_challenge[player_id][challenge_id].clone();
*solutions_rate_by_challenge
.entry(challenge_id.clone())
.or_default() +=
new_solutions.saturating_sub(num_solutions - cutoff.min(num_solutions));
}
}
for challenge in cache.active_challenges.values_mut() {
let max_threshold = u32::MAX as f64;
let current_threshold = match &cache.prev_challenges.get(&challenge.id).unwrap().block_data
{
Some(data) => *data.solution_signature_threshold() as f64,
None => max_threshold,
};
let current_rate = *solutions_rate_by_challenge.get(&challenge.id).unwrap_or(&0) as f64;
let equilibrium_rate = config.qualifiers.total_qualifiers_threshold as f64
/ config.benchmark_submissions.lifespan_period as f64;
let target_rate = config.solution_signature.equilibrium_rate_multiplier * equilibrium_rate;
let target_threshold = if current_rate == 0.0 {
max_threshold
} else {
(current_threshold * target_rate / current_rate).clamp(0.0, max_threshold)
};
let threshold_decay = config.solution_signature.threshold_decay.unwrap_or(0.99);
let block_data = challenge.block_data.as_mut().unwrap();
block_data.solution_signature_threshold = Some(
(current_threshold * threshold_decay + target_threshold * (1.0 - threshold_decay))
.clamp(0.0, max_threshold) as u32,
);
}
}
#[time]
async fn update_fees(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let PrecommitSubmissionsConfig {
min_base_fee,
min_per_nonce_fee,
target_num_precommits,
max_fee_percentage_delta,
..
} = config.precommit_submissions();
let num_precommits_by_challenge = cache.mempool_precommits.iter().fold(
HashMap::<String, u32>::new(),
|mut map, precommit| {
*map.entry(precommit.settings.challenge_id.clone())
.or_default() += 1;
map
},
);
let target_num_precommits = PreciseNumber::from(*target_num_precommits);
let max_fee_percent_delta = PreciseNumber::from_f64(*max_fee_percentage_delta);
let one = PreciseNumber::from(1);
let zero = PreciseNumber::from(0);
for challenge in cache.active_challenges.values_mut() {
let num_precommits = PreciseNumber::from(
num_precommits_by_challenge
.get(&challenge.id)
.unwrap_or(&0)
.clone(),
);
let mut percent_delta = num_precommits / target_num_precommits;
if num_precommits >= target_num_precommits {
percent_delta = percent_delta - one;
} else {
percent_delta = one - percent_delta;
}
if percent_delta > max_fee_percent_delta {
percent_delta = max_fee_percent_delta;
}
let current_base_fee =
match &cache.prev_challenges.get(&challenge.id).unwrap().block_data {
Some(data) => data.base_fee.as_ref().unwrap_or(&zero),
None => &zero,
}
.clone();
let mut base_fee = if num_precommits >= target_num_precommits {
current_base_fee * (one + percent_delta)
} else {
current_base_fee * (one - percent_delta)
};
if base_fee < *min_base_fee {
base_fee = *min_base_fee;
}
let block_data = challenge.block_data.as_mut().unwrap();
block_data.base_fee = Some(base_fee);
block_data.per_nonce_fee = Some(min_per_nonce_fee.clone());
}
}

View File

@ -0,0 +1,3 @@
pub mod algorithms;
pub mod benchmarks;
pub mod players;

View File

@ -0,0 +1,382 @@
#[time]
async fn update_cutoffs(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let mut phase_in_challenge_ids: HashSet<String> =
cache.active_challenges.keys().cloned().collect();
for algorithm in cache.active_algorithms.values() {
if algorithm
.state()
.round_pushed
.is_some_and(|r| r + 1 <= block.details.round)
{
phase_in_challenge_ids.remove(&algorithm.details.challenge_id);
}
}
let mut num_solutions_by_player_by_challenge = HashMap::<String, HashMap<String, u32>>::new();
for (settings, num_solutions) in cache.active_solutions.values() {
*num_solutions_by_player_by_challenge
.entry(settings.player_id.clone())
.or_default()
.entry(settings.challenge_id.clone())
.or_default() += *num_solutions;
}
for (player_id, num_solutions_by_challenge) in num_solutions_by_player_by_challenge.iter() {
let data = cache
.active_players
.get_mut(player_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
let phase_in_start = (block.details.round - 1) * config.rounds.blocks_per_round;
let phase_in_period = config.qualifiers.cutoff_phase_in_period.unwrap();
let phase_in_end = phase_in_start + phase_in_period;
let min_cutoff = config.qualifiers.min_cutoff.clone().unwrap();
let min_num_solutions = cache
.active_challenges
.keys()
.map(|id| num_solutions_by_challenge.get(id).unwrap_or(&0).clone())
.min()
.unwrap();
let mut cutoff = min_cutoff
.max((min_num_solutions as f64 * config.qualifiers.cutoff_multiplier).ceil() as u32);
if phase_in_challenge_ids.len() > 0 && phase_in_end > block.details.height {
let phase_in_min_num_solutions = cache
.active_challenges
.keys()
.filter(|&id| !phase_in_challenge_ids.contains(id))
.map(|id| num_solutions_by_challenge.get(id).unwrap_or(&0).clone())
.min()
.unwrap();
let phase_in_cutoff = min_cutoff.max(
(phase_in_min_num_solutions as f64 * config.qualifiers.cutoff_multiplier).ceil()
as u32,
);
let phase_in_weight =
(phase_in_end - block.details.height) as f64 / phase_in_period as f64;
cutoff = (phase_in_cutoff as f64 * phase_in_weight
+ cutoff as f64 * (1.0 - phase_in_weight)) as u32;
}
data.cutoff = Some(cutoff);
}
}
fn find_smallest_range_dimension(points: &Frontier) -> usize {
(0..2)
.min_by_key(|&d| {
let (min, max) = points
.iter()
.map(|p| p[d])
.fold((i32::MAX, i32::MIN), |(min, max), val| {
(min.min(val), max.max(val))
});
max - min
})
.unwrap()
}
fn pareto_algorithm(points: Frontier, only_one: bool) -> Vec<Frontier> {
if points.is_empty() {
return Vec::new();
}
let dimension = find_smallest_range_dimension(&points);
let sort_dimension = 1 - dimension;
let mut buckets: HashMap<i32, Vec<Point>> = HashMap::new();
for point in points {
buckets.entry(point[dimension]).or_default().push(point);
}
for (_, group) in buckets.iter_mut() {
// sort descending
group.sort_unstable_by(|a, b| b[sort_dimension].cmp(&a[sort_dimension]));
}
let mut result = Vec::new();
while !buckets.is_empty() {
let points: HashSet<Point> = buckets.values().map(|group| group[0].clone()).collect();
let frontier = points.pareto_frontier();
for point in frontier.iter() {
let bucket = buckets.get_mut(&point[dimension]).unwrap();
bucket.remove(0);
if bucket.is_empty() {
buckets.remove(&point[dimension]);
}
}
result.push(frontier);
if only_one {
break;
}
}
result
}
#[time]
async fn update_qualifiers(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let mut solutions_by_challenge = HashMap::<String, Vec<(&BenchmarkSettings, &u32)>>::new();
for (settings, num_solutions) in cache.active_solutions.values() {
solutions_by_challenge
.entry(settings.challenge_id.clone())
.or_default()
.push((settings, num_solutions));
}
let mut max_qualifiers_by_player = HashMap::<String, u32>::new();
for challenge in cache.active_challenges.values_mut() {
let block_data = challenge.block_data.as_mut().unwrap();
block_data.num_qualifiers = Some(0);
block_data.qualifier_difficulties = Some(HashSet::new());
}
for algorithm in cache.active_algorithms.values_mut() {
let block_data = algorithm.block_data.as_mut().unwrap();
block_data.num_qualifiers_by_player = Some(HashMap::new());
}
for player in cache.active_players.values_mut() {
let block_data = player.block_data.as_mut().unwrap();
max_qualifiers_by_player.insert(player.id.clone(), *block_data.cutoff());
block_data.num_qualifiers_by_challenge = Some(HashMap::new());
}
for (challenge_id, challenge) in cache.active_challenges.iter_mut() {
if !solutions_by_challenge.contains_key(challenge_id) {
continue;
}
let solutions = solutions_by_challenge.get_mut(challenge_id).unwrap();
let points = solutions
.iter()
.map(|(settings, _)| settings.difficulty.clone())
.collect::<Frontier>();
let mut frontier_indexes = HashMap::<Point, usize>::new();
for (frontier_index, frontier) in pareto_algorithm(points, false).into_iter().enumerate() {
for point in frontier {
frontier_indexes.insert(point, frontier_index);
}
}
solutions.sort_by(|(a_settings, _), (b_settings, _)| {
let a_index = frontier_indexes[&a_settings.difficulty];
let b_index = frontier_indexes[&b_settings.difficulty];
a_index.cmp(&b_index)
});
let mut max_qualifiers_by_player = max_qualifiers_by_player.clone();
let mut curr_frontier_index = 0;
let challenge_data = challenge.block_data.as_mut().unwrap();
for (settings, &num_solutions) in solutions.iter() {
let BenchmarkSettings {
player_id,
algorithm_id,
challenge_id,
difficulty,
..
} = settings;
if curr_frontier_index != frontier_indexes[difficulty]
&& *challenge_data.num_qualifiers() > config.qualifiers.total_qualifiers_threshold
{
break;
}
let difficulty_parameters = &config.difficulty.parameters[challenge_id];
let min_difficulty = difficulty_parameters.min_difficulty();
let max_difficulty = difficulty_parameters.max_difficulty();
if (0..difficulty.len())
.into_iter()
.any(|i| difficulty[i] < min_difficulty[i] || difficulty[i] > max_difficulty[i])
{
continue;
}
curr_frontier_index = frontier_indexes[difficulty];
let player_data = cache
.active_players
.get_mut(player_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
let algorithm_data = cache
.active_algorithms
.get_mut(algorithm_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
let max_qualifiers = max_qualifiers_by_player.get(player_id).unwrap().clone();
let num_qualifiers = num_solutions.min(max_qualifiers);
max_qualifiers_by_player.insert(player_id.clone(), max_qualifiers - num_qualifiers);
if num_qualifiers > 0 {
*player_data
.num_qualifiers_by_challenge
.as_mut()
.unwrap()
.entry(challenge_id.clone())
.or_default() += num_qualifiers;
*algorithm_data
.num_qualifiers_by_player
.as_mut()
.unwrap()
.entry(player_id.clone())
.or_default() += num_qualifiers;
*challenge_data.num_qualifiers.as_mut().unwrap() += num_qualifiers;
}
challenge_data
.qualifier_difficulties
.as_mut()
.unwrap()
.insert(difficulty.clone());
}
}
}
#[time]
async fn update_frontiers(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
for challenge in cache.active_challenges.values_mut() {
let block_data = challenge.block_data.as_mut().unwrap();
let difficulty_parameters = &config.difficulty.parameters[&challenge.id];
let min_difficulty = difficulty_parameters.min_difficulty();
let max_difficulty = difficulty_parameters.max_difficulty();
let points = block_data
.qualifier_difficulties()
.iter()
.map(|d| d.iter().map(|x| -x).collect()) // mirror the points so easiest difficulties are first
.collect::<Frontier>();
let (base_frontier, scaling_factor, scaled_frontier) = if points.len() == 0 {
let base_frontier: Frontier = vec![min_difficulty.clone()].into_iter().collect();
let scaling_factor = 0.0;
let scaled_frontier = base_frontier.clone();
(base_frontier, scaling_factor, scaled_frontier)
} else {
let base_frontier = pareto_algorithm(points, true)
.pop()
.unwrap()
.into_iter()
.map(|d| d.into_iter().map(|x| -x).collect())
.collect::<Frontier>() // mirror the points back;
.extend(&min_difficulty, &max_difficulty);
let scaling_factor = (*block_data.num_qualifiers() as f64
/ config.qualifiers.total_qualifiers_threshold as f64)
.min(config.difficulty.max_scaling_factor);
let scaled_frontier = base_frontier
.scale(&min_difficulty, &max_difficulty, scaling_factor)
.extend(&min_difficulty, &max_difficulty);
(base_frontier, scaling_factor, scaled_frontier)
};
block_data.base_frontier = Some(base_frontier);
block_data.scaled_frontier = Some(scaled_frontier);
block_data.scaling_factor = Some(scaling_factor);
}
}
#[time]
async fn update_influence(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let active_player_ids = &block.data().active_player_ids;
if active_player_ids.len() == 0 {
return;
}
let mut num_qualifiers_by_challenge = HashMap::<String, u32>::new();
for challenge in cache.active_challenges.values() {
num_qualifiers_by_challenge.insert(
challenge.id.clone(),
*challenge.block_data().num_qualifiers(),
);
}
let total_deposit = cache
.active_players
.values()
.map(|p| p.block_data().deposit.clone().unwrap())
.sum::<PreciseNumber>();
let zero = PreciseNumber::from(0);
let one = PreciseNumber::from(1);
let imbalance_multiplier =
PreciseNumber::from_f64(config.optimisable_proof_of_work.imbalance_multiplier);
let num_challenges = PreciseNumber::from(cache.active_challenges.len());
let mut weights = Vec::<PreciseNumber>::new();
for player_id in active_player_ids.iter() {
let data = cache
.active_players
.get_mut(player_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
let mut percent_qualifiers = Vec::<PreciseNumber>::new();
for challenge_id in cache.active_challenges.keys() {
let num_qualifiers = num_qualifiers_by_challenge[challenge_id];
let num_qualifiers_by_player = *data
.num_qualifiers_by_challenge()
.get(challenge_id)
.unwrap_or(&0);
percent_qualifiers.push(if num_qualifiers_by_player == 0 {
PreciseNumber::from(0)
} else {
PreciseNumber::from(num_qualifiers_by_player) / PreciseNumber::from(num_qualifiers)
});
}
let OptimisableProofOfWorkConfig {
avg_percent_qualifiers_multiplier,
enable_proof_of_deposit,
..
} = &config.optimisable_proof_of_work;
if enable_proof_of_deposit.is_some_and(|x| x) {
let max_percent_rolling_deposit =
PreciseNumber::from_f64(avg_percent_qualifiers_multiplier.clone().unwrap())
* percent_qualifiers.arithmetic_mean();
let percent_rolling_deposit = if total_deposit == zero {
zero.clone()
} else {
data.deposit.clone().unwrap() / total_deposit
};
let qualifying_percent_rolling_deposit =
if percent_rolling_deposit > max_percent_rolling_deposit {
max_percent_rolling_deposit.clone()
} else {
percent_rolling_deposit
};
percent_qualifiers.push(qualifying_percent_rolling_deposit.clone());
data.qualifying_percent_rolling_deposit = Some(qualifying_percent_rolling_deposit);
}
let mean = percent_qualifiers.arithmetic_mean();
let variance = percent_qualifiers.variance();
let cv_sqr = if mean == zero {
zero.clone()
} else {
variance / (mean * mean)
};
let imbalance = cv_sqr / (num_challenges - one);
let imbalance_penalty =
one - PreciseNumber::approx_inv_exp(imbalance_multiplier * imbalance);
weights.push(mean * (one - imbalance_penalty));
data.imbalance = Some(imbalance);
data.imbalance_penalty = Some(imbalance_penalty);
}
let influences = weights.normalise();
for (player_id, &influence) in active_player_ids.iter().zip(influences.iter()) {
let data = cache
.active_players
.get_mut(player_id)
.unwrap()
.block_data
.as_mut()
.unwrap();
data.influence = Some(influence);
}
}

View File

@ -0,0 +1,168 @@
use crate::{context::*, error::*};
use logging_timer::time;
use std::collections::HashSet;
use tig_structs::core::*;
use tig_utils::*;
#[time]
pub(crate) async fn submit_topup<T: Context>(
ctx: &T,
player_id: String,
tx_hash: String,
event_log_idx: u32,
amount: PreciseNumber,
verify_event_log: bool,
) -> ProtocolResult<()> {
if verify_event_log {
let block = ctx
.get_block(BlockFilter::LastConfirmed, false)
.await
.unwrap_or_else(|e| panic!("get_block error: {:?}", e))
.expect("No latest block found");
if ctx
.get_topups(TopUpsFilter::Id(tx_hash.clone()))
.await
.unwrap_or_else(|e| panic!("get_topups error: {:?}", e))
.first()
.is_some()
{
return Err(ProtocolError::DuplicateTransaction {
tx_hash: tx_hash.clone(),
});
}
let transaction =
ctx.get_transaction(&tx_hash)
.await
.map_err(|_| ProtocolError::InvalidTransaction {
tx_hash: tx_hash.clone(),
})?;
if player.id != transaction.sender {
return Err(ProtocolError::InvalidTransactionSender {
tx_hash: tx_hash.clone(),
expected_sender: player.id.clone(),
actual_sender: transaction.sender.clone(),
});
}
let burn_address = block.config().erc20.burn_address.clone();
if transaction.receiver != burn_address {
return Err(ProtocolError::InvalidTransactionReceiver {
tx_hash: tx_hash.clone(),
expected_receiver: burn_address,
actual_receiver: transaction.receiver.clone(),
});
}
let expected_amount = block.config().precommit_submissions().topup_amount.clone();
if transaction.amount != expected_amount {
return Err(ProtocolError::InvalidTransactionAmount {
tx_hash: tx_hash.clone(),
expected_amount: jsonify(&expected_amount),
actual_amount: jsonify(&transaction.amount),
});
}
};
ctx.confirm_topup(
&tx_hash,
TopUpDetails {
player_id: player.id.clone(),
amount: topup_amount,
},
)
.await;
Ok(())
}
#[time]
pub(crate) async fn submit_deposit<T: Context>(
ctx: &T,
player_id: String,
tx_hash: String,
log_idx: u32,
amount: PreciseNumber,
start_timestamp: u64,
end_timestamp: u64,
verify_event_log: bool,
) -> ProtocolResult<()> {
if !skip_verification {};
ctx.confirm_deposit(
&tx_hash,
TopUpDetails {
player_id: player.id.clone(),
amount: topup_amount,
},
)
.await;
Ok(())
}
#[time]
pub(crate) async fn submit_vote<T: Context>(
ctx: &T,
player_id: String,
breakthrough_id: String,
yes_vote: bool,
) -> ProtocolResult<()> {
let lastest_block_id = ctx.get_block_id(BlockFilter::LastConfirmed).await.unwrap();
let breakthrough = ctx.get_breakthrough_state(&breakthrough_id).await.unwrap();
// check breakthrough exists
// check breakthrough is voteable
// check player hasnt already voted
// check player has deposit
let player_data = ctx
.get_player_block_data(&player_id, &lastest_block_id)
.await
.unwrap();
// confirm vote
Ok(())
}
#[time]
pub(crate) async fn submit_delegate<T: Context>(
ctx: &T,
player_id: String,
delegatee: String,
) -> ProtocolResult<()> {
// check any player_block_data.deposit_by_rounds is non-zero
// check block_confirmed of last delegate + period_between_redelegate < curr_block.height
// update player_state.delegatee
// confirm delegate
Ok(())
}
// update_deposits
#[time]
async fn update_deposits<T: Context>(ctx: &T, block: &Block, cache: &mut AddBlockCache) {
let decay = match &block
.config()
.optimisable_proof_of_work
.rolling_deposit_decay
{
Some(decay) => PreciseNumber::from_f64(*decay),
None => return, // Proof of deposit not implemented for these blocks
};
let eth_block_num = block.details.eth_block_num();
let zero = PreciseNumber::from(0);
let one = PreciseNumber::from(1);
for player in cache.active_players.values_mut() {
let rolling_deposit = match &cache.prev_players.get(&player.id).unwrap().block_data {
Some(data) => data.rolling_deposit.clone(),
None => None,
}
.unwrap_or_else(|| zero.clone());
let data = player.block_data.as_mut().unwrap();
let deposit = ctx
.get_player_deposit(eth_block_num, &player.id)
.await
.unwrap()
.unwrap_or_else(|| zero.clone());
data.rolling_deposit = Some(decay * rolling_deposit + (one - decay) * deposit);
data.deposit = Some(deposit);
data.qualifying_percent_rolling_deposit = Some(zero.clone());
}
}

View File

@ -0,0 +1,64 @@
#[time]
async fn update_innovator_rewards(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let adoption_threshold =
PreciseNumber::from_f64(config.algorithm_submissions.adoption_threshold);
let zero = PreciseNumber::from(0);
let mut eligible_algorithms_by_challenge = HashMap::<String, Vec<&mut Algorithm>>::new();
for algorithm in cache.active_algorithms.values_mut() {
let is_merged = algorithm.state().round_merged.is_some();
let is_banned = algorithm.state().banned.clone();
let data = algorithm.block_data.as_mut().unwrap();
data.reward = Some(zero.clone());
if !is_banned
&& (*data.adoption() >= adoption_threshold || (is_merged && *data.adoption() > zero))
{
eligible_algorithms_by_challenge
.entry(algorithm.details.challenge_id.clone())
.or_default()
.push(algorithm);
}
}
if eligible_algorithms_by_challenge.len() == 0 {
return;
}
let reward_pool_per_challenge = PreciseNumber::from_f64(get_block_reward(block))
* PreciseNumber::from_f64(config.rewards.distribution.optimisations)
/ PreciseNumber::from(eligible_algorithms_by_challenge.len());
let zero = PreciseNumber::from(0);
for algorithms in eligible_algorithms_by_challenge.values_mut() {
let mut total_adoption = zero.clone();
for algorithm in algorithms.iter() {
total_adoption = total_adoption + algorithm.block_data().adoption();
}
for algorithm in algorithms.iter_mut() {
let data = algorithm.block_data.as_mut().unwrap();
let adoption = *data.adoption();
data.reward = Some(reward_pool_per_challenge * adoption / total_adoption);
}
}
}
#[time]
async fn update_benchmarker_rewards(block: &Block, cache: &mut AddBlockCache) {
let config = block.config();
let reward_pool = PreciseNumber::from_f64(get_block_reward(block))
* PreciseNumber::from_f64(config.rewards.distribution.benchmarkers);
for player in cache.active_players.values_mut() {
let data = player.block_data.as_mut().unwrap();
let influence = *data.influence();
data.reward = Some(influence * reward_pool);
}
}
/*
delegator rewards
breakthrough rewards
*/

View File

@ -1,12 +1,8 @@
mod add_block;
pub mod context;
mod contracts;
mod error;
mod submit_algorithm;
mod submit_benchmark;
mod submit_precommit;
mod submit_proof;
mod submit_topup;
mod verify_proof;
mod protocol;
use context::*;
pub use error::*;
use std::collections::HashSet;
@ -27,7 +23,7 @@ impl<'a, T: Context> Protocol<T> {
details: AlgorithmDetails,
code: String,
) -> ProtocolResult<String> {
submit_algorithm::execute(&self.ctx, player, details, code).await
algorithms::execute(&self.ctx, player, details, code).await
}
pub async fn submit_precommit(
@ -46,7 +42,7 @@ impl<'a, T: Context> Protocol<T> {
merkle_root: MerkleHash,
solution_nonces: HashSet<u64>,
) -> ProtocolResult<()> {
submit_benchmark::execute(
benchmarks::execute(
&self.ctx,
player,
benchmark_id,
@ -66,7 +62,7 @@ impl<'a, T: Context> Protocol<T> {
}
pub async fn submit_topup(&self, player: &Player, tx_hash: String) -> ProtocolResult<()> {
submit_topup::execute(&self.ctx, player, tx_hash).await
players::execute(&self.ctx, player, tx_hash).await
}
pub async fn verify_proof(&self, benchmark_id: &String) -> ProtocolResult<Result<(), String>> {

View File

@ -0,0 +1,24 @@
use super::contracts::*;
async fn add_block() {
// clone of player_state; internally sets round_merged, etc
let cache = ctx.build_block_cache().await;
// filter active benchmarks
benchmarks::update
// deposit calcs
players.update
// calc influence
opow.update
// calc adoption
algorithms.update
// calc fees, solution signature
challenges.update
// calc rewards
rewards.update
}

View File

@ -1,110 +0,0 @@
use crate::{context::*, error::*};
use logging_timer::time;
use std::collections::HashSet;
use tig_structs::core::*;
use tig_utils::*;
#[time]
pub(crate) async fn execute<T: Context>(
ctx: &T,
player: &Player,
details: AlgorithmDetails,
code: String,
) -> ProtocolResult<String> {
verify_challenge_exists(ctx, &details).await?;
verify_submission_fee(ctx, player, &details).await?;
let algorithm_id = ctx
.add_algorithm_to_mempool(details, code)
.await
.unwrap_or_else(|e| panic!("add_algorithm_to_mempool error: {:?}", e));
Ok(algorithm_id)
}
#[time]
async fn verify_challenge_exists<T: Context>(
ctx: &T,
details: &AlgorithmDetails,
) -> ProtocolResult<()> {
let latest_block = ctx
.get_block(BlockFilter::Latest, false)
.await
.unwrap_or_else(|e| panic!("get_block error: {:?}", e))
.expect("Expecting latest block to exist");
if !ctx
.get_challenges(ChallengesFilter::Id(details.challenge_id.clone()), None)
.await
.unwrap_or_else(|e| panic!("get_challenges error: {:?}", e))
.first()
.is_some_and(|c| {
c.state()
.round_active
.as_ref()
.is_some_and(|r| *r <= latest_block.details.round)
})
{
return Err(ProtocolError::InvalidChallenge {
challenge_id: details.challenge_id.clone(),
});
}
Ok(())
}
#[time]
async fn verify_submission_fee<T: Context>(
ctx: &T,
player: &Player,
details: &AlgorithmDetails,
) -> ProtocolResult<()> {
let block = ctx
.get_block(BlockFilter::Latest, false)
.await
.unwrap_or_else(|e| panic!("get_block error: {:?}", e))
.expect("No latest block found");
if ctx
.get_algorithms(
AlgorithmsFilter::TxHash(details.tx_hash.clone()),
None,
false,
)
.await
.unwrap_or_else(|e| panic!("get_algorithms error: {:?}", e))
.first()
.is_some()
{
return Err(ProtocolError::DuplicateTransaction {
tx_hash: details.tx_hash.clone(),
});
}
let transaction = ctx.get_transaction(&details.tx_hash).await.map_err(|_| {
ProtocolError::InvalidTransaction {
tx_hash: details.tx_hash.clone(),
}
})?;
if player.id != transaction.sender {
return Err(ProtocolError::InvalidTransactionSender {
tx_hash: details.tx_hash.clone(),
expected_sender: player.id.clone(),
actual_sender: transaction.sender.clone(),
});
}
let burn_address = block.config().erc20.burn_address.clone();
if transaction.receiver != burn_address {
return Err(ProtocolError::InvalidTransactionReceiver {
tx_hash: details.tx_hash.clone(),
expected_receiver: burn_address,
actual_receiver: transaction.receiver.clone(),
});
}
let expected_amount = block.config().algorithm_submissions.submission_fee;
if transaction.amount != expected_amount {
return Err(ProtocolError::InvalidTransactionAmount {
tx_hash: details.tx_hash.clone(),
expected_amount: jsonify(&expected_amount),
actual_amount: jsonify(&transaction.amount),
});
}
Ok(())
}

View File

@ -1,85 +0,0 @@
use crate::{context::*, error::*};
use logging_timer::time;
use std::collections::HashSet;
use tig_structs::core::*;
#[time]
pub(crate) async fn execute<T: Context>(
ctx: &T,
player: &Player,
benchmark_id: &String,
merkle_root: MerkleHash,
solution_nonces: HashSet<u64>,
) -> ProtocolResult<()> {
verify_benchmark_not_already_submitted(ctx, benchmark_id).await?;
let precommit = get_precommit_by_id(ctx, benchmark_id).await?;
verify_benchmark_ownership(player, &precommit.settings)?;
verify_nonces(&precommit, &solution_nonces)?;
ctx.add_benchmark_to_mempool(
benchmark_id,
BenchmarkDetails {
num_solutions: solution_nonces.len() as u32,
merkle_root: Some(merkle_root),
},
solution_nonces,
)
.await
.unwrap_or_else(|e| panic!("add_benchmark_to_mempool error: {:?}", e));
Ok(())
}
#[time]
async fn verify_benchmark_not_already_submitted<T: Context>(
ctx: &T,
benchmark_id: &String,
) -> ProtocolResult<()> {
if ctx
.get_benchmarks(BenchmarksFilter::Id(benchmark_id.clone()), false)
.await
.unwrap_or_else(|e| panic!("get_benchmarks error: {:?}", e))
.first()
.is_some()
{
return Err(ProtocolError::DuplicateBenchmark {
benchmark_id: benchmark_id.to_string(),
});
}
Ok(())
}
#[time]
async fn get_precommit_by_id<T: Context>(
ctx: &T,
benchmark_id: &String,
) -> ProtocolResult<Precommit> {
ctx.get_precommits(PrecommitsFilter::BenchmarkId(benchmark_id.clone()))
.await
.unwrap_or_else(|e| panic!("get_precommits error: {:?}", e))
.pop()
.filter(|p| p.state.is_some())
.ok_or_else(|| ProtocolError::InvalidPrecommit {
benchmark_id: benchmark_id.clone(),
})
}
#[time]
fn verify_benchmark_ownership(player: &Player, settings: &BenchmarkSettings) -> ProtocolResult<()> {
if player.id != settings.player_id {
return Err(ProtocolError::InvalidSubmittingPlayer {
actual_player_id: player.id.clone(),
expected_player_id: settings.player_id.clone(),
});
}
Ok(())
}
#[time]
fn verify_nonces(precommit: &Precommit, solution_nonces: &HashSet<u64>) -> ProtocolResult<()> {
let num_nonces = *precommit.details.num_nonces.as_ref().unwrap() as u64;
for n in solution_nonces.iter() {
if *n >= num_nonces {
return Err(ProtocolError::InvalidBenchmarkNonce { nonce: *n });
}
}
Ok(())
}

View File

@ -1,231 +0,0 @@
use crate::{context::*, error::*};
use logging_timer::time;
use tig_structs::core::*;
use tig_utils::*;
#[time]
pub(crate) async fn execute<T: Context>(
ctx: &T,
player: &Player,
settings: BenchmarkSettings,
num_nonces: u32,
) -> ProtocolResult<String> {
verify_player_owns_benchmark(player, &settings)?;
verify_num_nonces(num_nonces)?;
let block = get_block_by_id(ctx, &settings.block_id).await?;
verify_sufficient_lifespan(ctx, &block).await?;
let challenge = get_challenge_by_id(ctx, &settings.challenge_id, &block).await?;
verify_algorithm(ctx, &settings.algorithm_id, &block).await?;
verify_benchmark_settings_are_unique(ctx, &settings).await?;
verify_benchmark_difficulty(&settings.difficulty, &challenge, &block)?;
let fee_paid = get_fee_paid(&player, num_nonces, &challenge)?;
let benchmark_id = ctx
.add_precommit_to_mempool(
settings,
PrecommitDetails {
block_started: block.details.height,
num_nonces: Some(num_nonces),
fee_paid: Some(fee_paid),
},
)
.await
.unwrap_or_else(|e| panic!("add_precommit_to_mempool error: {:?}", e));
Ok(benchmark_id)
}
#[time]
fn verify_player_owns_benchmark(
player: &Player,
settings: &BenchmarkSettings,
) -> ProtocolResult<()> {
if player.id != settings.player_id {
return Err(ProtocolError::InvalidSubmittingPlayer {
actual_player_id: player.id.clone(),
expected_player_id: settings.player_id.clone(),
});
}
Ok(())
}
#[time]
fn verify_num_nonces(num_nonces: u32) -> ProtocolResult<()> {
if num_nonces == 0 {
return Err(ProtocolError::InvalidNumNonces { num_nonces });
}
Ok(())
}
#[time]
async fn verify_sufficient_lifespan<T: Context>(ctx: &T, block: &Block) -> ProtocolResult<()> {
let latest_block = ctx
.get_block(BlockFilter::Latest, false)
.await
.unwrap_or_else(|e| panic!("get_block error: {:?}", e))
.expect("Expecting latest block to exist");
let config = block.config();
let submission_delay = latest_block.details.height - block.details.height + 1;
if (submission_delay as f64 * (config.benchmark_submissions.submission_delay_multiplier + 1.0))
as u32
>= config.benchmark_submissions.lifespan_period
{
return Err(ProtocolError::InsufficientLifespan);
}
Ok(())
}
#[time]
async fn get_challenge_by_id<T: Context>(
ctx: &T,
challenge_id: &String,
block: &Block,
) -> ProtocolResult<Challenge> {
if !block.data().active_challenge_ids.contains(challenge_id) {
return Err(ProtocolError::InvalidChallenge {
challenge_id: challenge_id.clone(),
});
}
let challenge = ctx
.get_challenges(
ChallengesFilter::Id(challenge_id.clone()),
Some(BlockFilter::Id(block.id.clone())),
)
.await
.unwrap_or_else(|e| panic!("get_challenges error: {:?}", e))
.first()
.map(|x| x.to_owned())
.ok_or_else(|| ProtocolError::InvalidChallenge {
challenge_id: challenge_id.clone(),
})?;
Ok(challenge)
}
#[time]
async fn verify_algorithm<T: Context>(
ctx: &T,
algorithm_id: &String,
block: &Block,
) -> ProtocolResult<()> {
if !ctx
.get_algorithms(AlgorithmsFilter::Id(algorithm_id.clone()), None, false)
.await
.unwrap_or_else(|e| panic!("get_algorithms error: {:?}", e))
.pop()
.is_some_and(|a| a.state.is_some_and(|s| !s.banned))
{
return Err(ProtocolError::InvalidAlgorithm {
algorithm_id: algorithm_id.clone(),
});
}
if !block.data().active_algorithm_ids.contains(algorithm_id) {
return Err(ProtocolError::InvalidAlgorithm {
algorithm_id: algorithm_id.clone(),
});
}
Ok(())
}
#[time]
async fn get_block_by_id<T: Context>(ctx: &T, block_id: &String) -> ProtocolResult<Block> {
ctx.get_block(BlockFilter::Id(block_id.clone()), true)
.await
.unwrap_or_else(|e| panic!("get_block error: {:?}", e))
.ok_or_else(|| ProtocolError::InvalidBlock {
block_id: block_id.clone(),
})
}
#[time]
async fn verify_benchmark_settings_are_unique<T: Context>(
ctx: &T,
settings: &BenchmarkSettings,
) -> ProtocolResult<()> {
if ctx
.get_precommits(PrecommitsFilter::Settings(settings.clone()))
.await
.unwrap_or_else(|e| panic!("get_precommits error: {:?}", e))
.first()
.is_some()
{
return Err(ProtocolError::DuplicateBenchmarkSettings {
settings: settings.clone(),
});
}
Ok(())
}
#[time]
fn verify_benchmark_difficulty(
difficulty: &Vec<i32>,
challenge: &Challenge,
block: &Block,
) -> ProtocolResult<()> {
let config = block.config();
let difficulty_parameters = &config.difficulty.parameters[&challenge.id];
if difficulty.len() != difficulty_parameters.len()
|| difficulty
.iter()
.zip(difficulty_parameters.iter())
.any(|(d, p)| *d < p.min_value || *d > p.max_value)
{
return Err(ProtocolError::InvalidDifficulty {
difficulty: difficulty.clone(),
difficulty_parameters: difficulty_parameters.clone(),
});
}
let challenge_data = challenge.block_data();
let (lower_frontier, upper_frontier) = if *challenge_data.scaling_factor() > 1f64 {
(
challenge_data.base_frontier(),
challenge_data.scaled_frontier(),
)
} else {
(
challenge_data.scaled_frontier(),
challenge_data.base_frontier(),
)
};
match difficulty.within(lower_frontier, upper_frontier) {
PointCompareFrontiers::Above => {
return Err(ProtocolError::DifficultyAboveHardestFrontier {
difficulty: difficulty.clone(),
});
}
PointCompareFrontiers::Below => {
return Err(ProtocolError::DifficultyBelowEasiestFrontier {
difficulty: difficulty.clone(),
});
}
PointCompareFrontiers::Within => {}
}
Ok(())
}
#[time]
fn get_fee_paid(
player: &Player,
num_nonces: u32,
challenge: &Challenge,
) -> ProtocolResult<PreciseNumber> {
let num_nonces = PreciseNumber::from(num_nonces);
let fee_paid = challenge.block_data().base_fee().clone()
+ challenge.block_data().per_nonce_fee().clone() * num_nonces;
if !player
.state
.as_ref()
.is_some_and(|s| *s.available_fee_balance.as_ref().unwrap() >= fee_paid)
{
return Err(ProtocolError::InsufficientFeeBalance {
fee_paid,
available_fee_balance: player
.state
.as_ref()
.map(|s| s.available_fee_balance().clone())
.unwrap_or(PreciseNumber::from(0)),
});
}
Ok(fee_paid)
}

View File

@ -1,167 +0,0 @@
use crate::{context::*, error::*};
use logging_timer::time;
use std::collections::HashSet;
use tig_structs::core::*;
use tig_utils::MerkleHash;
#[time]
pub(crate) async fn execute<T: Context>(
ctx: &T,
player: &Player,
benchmark_id: &String,
merkle_proofs: Vec<MerkleProof>,
) -> ProtocolResult<Result<(), String>> {
verify_proof_not_already_submitted(ctx, benchmark_id).await?;
let precommit = get_precommit_by_id(ctx, benchmark_id).await?;
verify_benchmark_ownership(player, &precommit.settings)?;
let benchmark = get_benchmark_by_id(ctx, benchmark_id).await?;
verify_sampled_nonces(&benchmark, &merkle_proofs)?;
let mut verification_result = verify_merkle_proofs(&precommit, &benchmark, &merkle_proofs);
if verification_result.is_ok() {
verification_result = verify_solutions_are_valid(ctx, &precommit, &merkle_proofs).await;
};
ctx.add_proof_to_mempool(benchmark_id, merkle_proofs)
.await
.unwrap_or_else(|e| panic!("add_proof_to_mempool error: {:?}", e));
if let Err(e) = verification_result {
ctx.add_fraud_to_mempool(benchmark_id, e.to_string())
.await
.unwrap_or_else(|e| panic!("add_fraud_to_mempool error: {:?}", e));
return Ok(Err(e.to_string()));
}
Ok(Ok(()))
}
#[time]
async fn get_precommit_by_id<T: Context>(
ctx: &T,
benchmark_id: &String,
) -> ProtocolResult<Precommit> {
ctx.get_precommits(PrecommitsFilter::BenchmarkId(benchmark_id.clone()))
.await
.unwrap_or_else(|e| panic!("get_precommits error: {:?}", e))
.pop()
.filter(|p| p.state.is_some())
.ok_or_else(|| ProtocolError::InvalidPrecommit {
benchmark_id: benchmark_id.clone(),
})
}
#[time]
async fn get_benchmark_by_id<T: Context>(
ctx: &T,
benchmark_id: &String,
) -> ProtocolResult<Benchmark> {
ctx.get_benchmarks(BenchmarksFilter::Id(benchmark_id.clone()), true)
.await
.unwrap_or_else(|e| panic!("add_benchmark_to_mempool error: {:?}", e))
.pop()
.filter(|b| b.state.is_some())
.ok_or_else(|| ProtocolError::InvalidBenchmark {
benchmark_id: benchmark_id.to_string(),
})
}
#[time]
async fn verify_proof_not_already_submitted<T: Context>(
ctx: &T,
benchmark_id: &String,
) -> ProtocolResult<()> {
if ctx
.get_proofs(ProofsFilter::BenchmarkId(benchmark_id.clone()), false)
.await
.unwrap_or_else(|e| panic!("get_proofs error: {:?}", e))
.first()
.is_some()
{
return Err(ProtocolError::DuplicateProof {
benchmark_id: benchmark_id.to_string(),
});
}
Ok(())
}
#[time]
fn verify_benchmark_ownership(player: &Player, settings: &BenchmarkSettings) -> ProtocolResult<()> {
let expected_player_id = settings.player_id.clone();
if player.id != expected_player_id {
return Err(ProtocolError::InvalidSubmittingPlayer {
actual_player_id: player.id.to_string(),
expected_player_id,
});
}
Ok(())
}
#[time]
fn verify_merkle_proofs(
precommit: &Precommit,
benchmark: &Benchmark,
merkle_proofs: &Vec<MerkleProof>,
) -> ProtocolResult<()> {
let max_branch_len =
(64 - (*precommit.details.num_nonces.as_ref().unwrap() - 1).leading_zeros()) as usize;
let expected_merkle_root = benchmark.details.merkle_root.clone().unwrap();
for merkle_proof in merkle_proofs.iter() {
let branch = merkle_proof.branch.as_ref().unwrap();
if branch.0.len() > max_branch_len
|| branch.0.iter().any(|(d, _)| *d as usize > max_branch_len)
{
return Err(ProtocolError::InvalidMerkleProof {
nonce: merkle_proof.leaf.nonce.clone(),
});
}
let output_meta_data = OutputMetaData::from(merkle_proof.leaf.clone());
let hash = MerkleHash::from(output_meta_data);
let result = merkle_proof
.branch
.as_ref()
.unwrap()
.calc_merkle_root(&hash, merkle_proof.leaf.nonce as usize);
if !result.is_ok_and(|actual_merkle_root| actual_merkle_root == expected_merkle_root) {
return Err(ProtocolError::InvalidMerkleProof {
nonce: merkle_proof.leaf.nonce.clone(),
});
}
}
Ok(())
}
#[time]
fn verify_sampled_nonces(
benchmark: &Benchmark,
merkle_proofs: &Vec<MerkleProof>,
) -> ProtocolResult<()> {
let sampled_nonces = benchmark.state().sampled_nonces().clone();
let proof_nonces: HashSet<u64> = merkle_proofs.iter().map(|p| p.leaf.nonce).collect();
if sampled_nonces != proof_nonces || sampled_nonces.len() != merkle_proofs.len() {
return Err(ProtocolError::InvalidProofNonces {
submitted_nonces: merkle_proofs.iter().map(|p| p.leaf.nonce).collect(),
expected_nonces: sampled_nonces.into_iter().collect(),
});
}
Ok(())
}
#[time]
async fn verify_solutions_are_valid<T: Context>(
ctx: &T,
precommit: &Precommit,
merkle_proofs: &Vec<MerkleProof>,
) -> ProtocolResult<()> {
for p in merkle_proofs.iter() {
if ctx
.verify_solution(&precommit.settings, p.leaf.nonce, &p.leaf.solution)
.await
.unwrap_or_else(|e| panic!("verify_solution error: {:?}", e))
.is_err()
{
return Err(ProtocolError::InvalidSolution {
nonce: p.leaf.nonce,
});
}
}
Ok(())
}

View File

@ -1,81 +0,0 @@
use crate::{context::*, error::*};
use logging_timer::time;
use std::collections::HashSet;
use tig_structs::core::*;
use tig_utils::*;
#[time]
pub(crate) async fn execute<T: Context>(
ctx: &T,
player: &Player,
tx_hash: String,
) -> ProtocolResult<()> {
let topup_amount = verify_topup_tx(ctx, player, &tx_hash).await?;
ctx.add_topup_to_mempool(
&tx_hash,
TopUpDetails {
player_id: player.id.clone(),
amount: topup_amount,
},
)
.await
.unwrap_or_else(|e| panic!("add_topup_to_mempool error: {:?}", e));
Ok(())
}
#[time]
async fn verify_topup_tx<T: Context>(
ctx: &T,
player: &Player,
tx_hash: &String,
) -> ProtocolResult<PreciseNumber> {
let block = ctx
.get_block(BlockFilter::Latest, false)
.await
.unwrap_or_else(|e| panic!("get_block error: {:?}", e))
.expect("No latest block found");
if ctx
.get_topups(TopUpsFilter::Id(tx_hash.clone()))
.await
.unwrap_or_else(|e| panic!("get_topups error: {:?}", e))
.first()
.is_some()
{
return Err(ProtocolError::DuplicateTransaction {
tx_hash: tx_hash.clone(),
});
}
let transaction =
ctx.get_transaction(&tx_hash)
.await
.map_err(|_| ProtocolError::InvalidTransaction {
tx_hash: tx_hash.clone(),
})?;
if player.id != transaction.sender {
return Err(ProtocolError::InvalidTransactionSender {
tx_hash: tx_hash.clone(),
expected_sender: player.id.clone(),
actual_sender: transaction.sender.clone(),
});
}
let burn_address = block.config().erc20.burn_address.clone();
if transaction.receiver != burn_address {
return Err(ProtocolError::InvalidTransactionReceiver {
tx_hash: tx_hash.clone(),
expected_receiver: burn_address,
actual_receiver: transaction.receiver.clone(),
});
}
let expected_amount = block.config().precommit_submissions().topup_amount.clone();
if transaction.amount != expected_amount {
return Err(ProtocolError::InvalidTransactionAmount {
tx_hash: tx_hash.clone(),
expected_amount: jsonify(&expected_amount),
actual_amount: jsonify(&transaction.amount),
});
}
Ok(expected_amount)
}

View File

@ -6,17 +6,29 @@ use tig_utils::PreciseNumber;
serializable_struct_with_getters! {
ProtocolConfig {
algorithms: AlgorithmsConfig,
benchmarks: BenchmarksConfig,
breakthroughs: BreakthroughsConfig,
challenges: ChallengesConfig,
deposits: DepositsConfig,
erc20: ERC20Config,
benchmark_submissions: BenchmarkSubmissionsConfig,
precommit_submissions: Option<PrecommitSubmissionsConfig>,
wasm_vm: WasmVMConfig,
solution_signature: SolutionSignatureConfig,
qualifiers: QualifiersConfig,
difficulty: DifficultyConfig,
optimisable_proof_of_work: OptimisableProofOfWorkConfig,
opow: OPoWConfig,
rounds: RoundsConfig,
algorithm_submissions: AlgorithmSubmissionsConfig,
rewards: RewardsConfig,
runtime: RuntimeConfig,
}
}
serializable_struct_with_getters! {
BreakthroughsConfig {
academic_fund_address: String,
min_percent_yes_votes: f64,
vote_period_rounds: u32,
min_lock_period_to_vote: u32,
submission_fee: PreciseNumber,
adoption_threshold: f64,
merge_points_threshold: u32,
push_delay: u32,
}
}
serializable_struct_with_getters! {
@ -24,46 +36,52 @@ serializable_struct_with_getters! {
rpc_url: String,
chain_id: String,
token_address: String,
burn_address: String,
}
}
serializable_struct_with_getters! {
BenchmarkSubmissionsConfig {
DepositsConfig {
lock_address: String,
min_lock_amount: PreciseNumber,
min_lock_period_secs: u64,
max_lock_period_rounds: u32,
lock_period_multiplier: f64,
max_reward_share: f64,
deposit_to_qualifier_ratio: f64,
period_between_redelegate: u32,
}
}
serializable_struct_with_getters! {
BenchmarksConfig {
min_num_solutions: u32,
submission_delay_multiplier: f64,
max_samples: usize,
lifespan_period: u32,
}
}
serializable_struct_with_getters! {
PrecommitSubmissionsConfig {
max_active_period_blocks: u32,
min_per_nonce_fee: PreciseNumber,
min_base_fee: PreciseNumber,
max_fee_percentage_delta: f64,
target_num_precommits: u32,
topup_amount: PreciseNumber,
}
}
serializable_struct_with_getters! {
WasmVMConfig {
TopUpsConfig {
topup_address: String,
min_topup_amount: PreciseNumber,
}
}
serializable_struct_with_getters! {
RuntimeConfig {
max_memory: u64,
max_fuel: u64,
}
}
serializable_struct_with_getters! {
SolutionSignatureConfig {
ChallengesConfig {
max_percent_delta: Option<f64>,
threshold_decay: Option<f64>,
equilibrium_rate_multiplier: f64,
percent_error_multiplier: Option<f64>,
}
}
serializable_struct_with_getters! {
QualifiersConfig {
cutoff_phase_in_period: Option<u32>,
cutoff_multiplier: f64,
total_qualifiers_threshold: u32,
min_cutoff: Option<u32>,
max_scaling_factor: f64,
difficulty_parameters: HashMap<String, Vec<DifficultyParameter>>,
}
}
serializable_struct_with_getters! {
@ -86,17 +104,14 @@ impl MinMaxDifficulty for Vec<DifficultyParameter> {
}
}
serializable_struct_with_getters! {
DifficultyConfig {
max_scaling_factor: f64,
parameters: HashMap<String, Vec<DifficultyParameter>>,
}
}
serializable_struct_with_getters! {
OptimisableProofOfWorkConfig {
OPoWConfig {
imbalance_multiplier: f64,
avg_percent_qualifiers_multiplier: Option<f64>,
enable_proof_of_deposit: Option<bool>,
rolling_deposit_decay: Option<f64>,
cutoff_phase_in_period: Option<u32>,
cutoff_multiplier: f64,
total_qualifiers_threshold: u32,
min_cutoff: Option<u32>,
deposit_to_cutoff_cap_ratio: f64,
}
}
serializable_struct_with_getters! {
@ -105,7 +120,7 @@ serializable_struct_with_getters! {
}
}
serializable_struct_with_getters! {
AlgorithmSubmissionsConfig {
AlgorithmsConfig {
submission_fee: PreciseNumber,
adoption_threshold: f64,
merge_points_threshold: u32,

View File

@ -9,19 +9,26 @@ serializable_struct_with_getters! {
Algorithm {
id: String,
details: AlgorithmDetails,
state: Option<AlgorithmState>,
state: AlgorithmState,
block_data: Option<AlgorithmBlockData>,
code: Option<String>,
round_earnings: PreciseNumber,
}
}
serializable_struct_with_getters! {
Benchmark {
id: String,
details: BenchmarkDetails,
state: Option<BenchmarkState>,
state: BenchmarkState,
solution_nonces: Option<HashSet<u64>>,
}
}
serializable_struct_with_getters! {
Binary {
algorithm_id: String,
details: BinaryDetails,
state: BinaryState,
}
}
serializable_struct_with_getters! {
Block {
id: String,
@ -30,20 +37,57 @@ serializable_struct_with_getters! {
config: Option<ProtocolConfig>,
}
}
serializable_struct_with_getters! {
Breakthrough {
id: String,
details: BreakthroughDetails,
state: BreakthroughState,
block_data: Option<BreakthroughBlockData>,
}
}
serializable_struct_with_getters! {
Challenge {
id: String,
details: ChallengeDetails,
state: Option<ChallengeState>,
state: ChallengeState,
block_data: Option<ChallengeBlockData>,
}
}
serializable_struct_with_getters! {
Delegate {
id: String,
details: DelegateDetails,
state: DelegateState,
}
}
serializable_struct_with_getters! {
Deposit {
id: String,
details: DepositDetails,
state: DepositState,
}
}
serializable_struct_with_getters! {
Fraud {
benchmark_id: String,
state: FraudState,
allegation: Option<String>,
}
}
serializable_struct_with_getters! {
OPoW {
player_id: String,
block_data: Option<OPoWBlockData>,
round_earnings: PreciseNumber,
}
}
serializable_struct_with_getters! {
Player {
id: String,
details: PlayerDetails,
state: Option<PlayerState>,
state: PlayerState,
block_data: Option<PlayerBlockData>,
round_earnings_by_type: HashMap<RewardType, PreciseNumber>,
}
}
serializable_struct_with_getters! {
@ -51,69 +95,78 @@ serializable_struct_with_getters! {
benchmark_id: String,
details: PrecommitDetails,
settings: BenchmarkSettings,
state: Option<PrecommitState>,
state: PrecommitState,
}
}
serializable_struct_with_getters! {
MerkleProof {
leaf: OutputData,
branch: Option<MerkleBranch>,
branch: MerkleBranch,
}
}
serializable_struct_with_getters! {
Proof {
benchmark_id: String,
state: Option<ProofState>,
details: ProofDetails,
state: ProofState,
merkle_proofs: Option<Vec<MerkleProof>>,
}
}
serializable_struct_with_getters! {
Fraud {
benchmark_id: String,
state: Option<FraudState>,
allegation: Option<String>,
RewardShare {
id: String,
details: RewardShareDetails,
state: RewardShareState,
}
}
serializable_struct_with_getters! {
TopUp {
id: String,
details: TopUpDetails,
state: Option<TopUpState>,
state: TopUpState,
}
}
serializable_struct_with_getters! {
Wasm {
algorithm_id: String,
details: WasmDetails,
state: Option<WasmState>,
Vote {
id: String,
details: VoteDetails,
state: VoteState,
}
}
// Algorithm child structs
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum AlgorithmType {
Wasm,
Ptx,
}
serializable_struct_with_getters! {
AlgorithmDetails {
name: String,
player_id: String,
challenge_id: String,
tx_hash: String,
breakthrough_id: Option<String>,
r#type: AlgorithmType,
fee_paid: PreciseNumber,
}
}
serializable_struct_with_getters! {
AlgorithmState {
block_confirmed: Option<u32>,
round_submitted: Option<u32>,
block_confirmed: u32,
round_submitted: u32,
round_pushed: Option<u32>,
round_active: Option<u32>,
round_merged: Option<u32>,
banned: bool,
}
}
serializable_struct_with_getters! {
AlgorithmBlockData {
num_qualifiers_by_player: Option<HashMap<String, u32>>,
adoption: Option<PreciseNumber>,
merge_points: Option<u32>,
reward: Option<PreciseNumber>,
round_earnings: Option<PreciseNumber>,
num_qualifiers_by_player: HashMap<String, u32>,
adoption: PreciseNumber,
merge_points: u32,
reward: PreciseNumber,
}
}
@ -135,13 +188,13 @@ impl BenchmarkSettings {
serializable_struct_with_getters! {
BenchmarkDetails {
num_solutions: u32,
merkle_root: Option<MerkleHash>,
merkle_root: MerkleHash,
sampled_nonces: HashSet<u64>,
}
}
serializable_struct_with_getters! {
BenchmarkState {
block_confirmed: Option<u32>,
sampled_nonces: Option<HashSet<u64>>,
block_confirmed: u32,
}
}
serializable_struct_with_getters! {
@ -173,42 +226,86 @@ impl From<OutputData> for MerkleHash {
}
}
// Binary child structs
serializable_struct_with_getters! {
BinaryDetails {
compile_success: bool,
download_url: Option<String>,
}
}
serializable_struct_with_getters! {
BinaryState {
block_confirmed: u32,
}
}
// Block child structs
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[serde(rename_all = "lowercase")]
pub enum TxType {
Algorithm,
Benchmark,
Binary,
Breakthrough,
Challenge,
Delegate,
Deposit,
Fraud,
Precommit,
Proof,
RewardShare,
Topup,
Vote,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[serde(rename_all = "lowercase")]
pub enum SupplyType {
Circulating,
Locked,
Burnt,
}
serializable_struct_with_getters! {
BlockDetails {
prev_block_id: String,
height: u32,
round: u32,
num_confirmed: HashMap<TxType, u32>,
num_active: HashMap<String, u32>,
eth_block_num: Option<String>,
fees_paid: Option<PreciseNumber>,
num_confirmed_challenges: Option<u32>,
num_confirmed_algorithms: Option<u32>,
num_confirmed_benchmarks: Option<u32>,
num_confirmed_precommits: Option<u32>,
num_confirmed_proofs: Option<u32>,
num_confirmed_frauds: Option<u32>,
num_confirmed_topups: Option<u32>,
num_confirmed_wasms: Option<u32>,
num_active_challenges: Option<u32>,
num_active_algorithms: Option<u32>,
num_active_benchmarks: Option<u32>,
num_active_players: Option<u32>,
supply: HashMap<SupplyType, PreciseNumber>, // circulating, locked, burnt,
timestamp: u64,
}
}
serializable_struct_with_getters! {
BlockData {
confirmed_challenge_ids: HashSet<String>,
confirmed_algorithm_ids: HashSet<String>,
confirmed_benchmark_ids: HashSet<String>,
confirmed_precommit_ids: HashSet<String>,
confirmed_proof_ids: HashSet<String>,
confirmed_fraud_ids: HashSet<String>,
confirmed_topup_ids: HashSet<String>,
confirmed_wasm_ids: HashSet<String>,
active_challenge_ids: HashSet<String>,
active_algorithm_ids: HashSet<String>,
active_benchmark_ids: HashSet<String>,
active_player_ids: HashSet<String>,
confirmed_ids: HashMap<TxType, HashSet<String>>,
active_ids: HashMap<String, HashSet<String>>,
}
}
// Breakthrough child structs
serializable_struct_with_getters! {
BreakthroughDetails {
name: String,
player_id: String,
challenge_id: String,
}
}
serializable_struct_with_getters! {
BreakthroughState {
block_confirmed: u32,
round_submitted: u32,
round_pushed: Option<u32>,
round_active: Option<u32>,
round_merged: Option<u32>,
vote_tally: HashMap<bool, PreciseNumber>,
}
}
serializable_struct_with_getters! {
BreakthroughBlockData {
adoption: PreciseNumber,
merge_points: u32,
reward: PreciseNumber,
}
}
@ -220,20 +317,71 @@ serializable_struct_with_getters! {
}
serializable_struct_with_getters! {
ChallengeState {
block_confirmed: Option<u32>,
round_active: Option<u32>,
block_confirmed: u32,
round_active: u32,
}
}
serializable_struct_with_getters! {
ChallengeBlockData {
solution_signature_threshold: Option<u32>,
num_qualifiers: Option<u32>,
qualifier_difficulties: Option<HashSet<Point>>,
base_frontier: Option<Frontier>,
scaled_frontier: Option<Frontier>,
scaling_factor: Option<f64>,
base_fee: Option<PreciseNumber>,
per_nonce_fee: Option<PreciseNumber>,
solution_signature_threshold: u32,
num_qualifiers: u32,
qualifier_difficulties: HashSet<Point>,
base_frontier: Frontier,
scaled_frontier: Frontier,
scaling_factor: f64,
base_fee: PreciseNumber,
per_nonce_fee: PreciseNumber,
}
}
// Delegate child structs
serializable_struct_with_getters! {
DelegateDetails {
player_id: String,
delegatee: String,
}
}
serializable_struct_with_getters! {
DelegateState {
block_confirmed: u32,
}
}
// Deposit child structs
serializable_struct_with_getters! {
DepositDetails {
player_id: String,
tx_hash: String,
log_idx: u32,
amount: PreciseNumber,
start_timestamp: u64,
end_timestamp: u64,
}
}
serializable_struct_with_getters! {
DepositState {
block_confirmed: u32,
}
}
// Fraud child structs
serializable_struct_with_getters! {
FraudState {
block_confirmed: u32,
}
}
// OPoW child structs
serializable_struct_with_getters! {
OPoWBlockData {
num_qualifiers_by_challenge: HashMap<String, u32>,
cutoff: u32,
associated_deposit: PreciseNumber,
delegators: HashSet<String>,
deposit_share: PreciseNumber,
imbalance: PreciseNumber,
influence: PreciseNumber,
reward: PreciseNumber,
}
}
@ -246,22 +394,26 @@ serializable_struct_with_getters! {
}
serializable_struct_with_getters! {
PlayerState {
total_fees_paid: Option<PreciseNumber>,
available_fee_balance: Option<PreciseNumber>,
total_fees_paid: PreciseNumber,
available_fee_balance: PreciseNumber,
delegatee: String,
votes: HashMap<String, bool>,
reward_share: PreciseNumber,
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[serde(rename_all = "lowercase")]
pub enum RewardType {
Benchmarker,
Algorithm,
Breakthrough,
Delegator,
}
serializable_struct_with_getters! {
PlayerBlockData {
num_qualifiers_by_challenge: Option<HashMap<String, u32>>,
cutoff: Option<u32>,
deposit: Option<PreciseNumber>,
rolling_deposit: Option<PreciseNumber>,
qualifying_percent_rolling_deposit: Option<PreciseNumber>,
imbalance: Option<PreciseNumber>,
imbalance_penalty: Option<PreciseNumber>,
influence: Option<PreciseNumber>,
reward: Option<PreciseNumber>,
round_earnings: Option<PreciseNumber>,
reward_by_type: HashMap<RewardType, PreciseNumber>,
deposit_by_rounds: HashMap<u32, PreciseNumber>,
weighted_deposit: PreciseNumber,
}
}
@ -269,22 +421,26 @@ serializable_struct_with_getters! {
serializable_struct_with_getters! {
PrecommitDetails {
block_started: u32,
num_nonces: Option<u32>,
fee_paid: Option<PreciseNumber>,
num_nonces: u32,
rand_hash: String,
fee_paid: PreciseNumber,
}
}
serializable_struct_with_getters! {
PrecommitState {
block_confirmed: Option<u32>,
rand_hash: Option<String>,
block_confirmed: u32,
}
}
// Proof child structs
serializable_struct_with_getters! {
ProofDetails {
submission_delay: u32,
}
}
serializable_struct_with_getters! {
ProofState {
block_confirmed: Option<u32>,
submission_delay: Option<u32>,
block_confirmed: u32,
}
}
pub type Solution = Map<String, Value>;
@ -302,10 +458,16 @@ impl OutputData {
}
}
// Fraud child structs
// RewardShare child structs
serializable_struct_with_getters! {
FraudState {
block_confirmed: Option<u32>,
RewardShareDetails {
player_id: String,
share: PreciseNumber,
}
}
serializable_struct_with_getters! {
RewardShareState {
block_confirmed: u32,
}
}
@ -313,24 +475,27 @@ serializable_struct_with_getters! {
serializable_struct_with_getters! {
TopUpDetails {
player_id: String,
tx_hash: String,
log_idx: u32,
amount: PreciseNumber,
}
}
serializable_struct_with_getters! {
TopUpState {
block_confirmed: Option<u32>,
block_confirmed: u32,
}
}
// Wasm child structs
// Vote child structs
serializable_struct_with_getters! {
WasmDetails {
compile_success: bool,
download_url: Option<String>,
VoteDetails {
player_id: String,
breakthrough_id: String,
is_breakthrough: bool,
}
}
serializable_struct_with_getters! {
WasmState {
block_confirmed: Option<u32>,
VoteState {
block_confirmed: u32,
}
}