Deprecate tig-worker.

This commit is contained in:
FiveMovesAhead 2025-05-20 11:09:46 +01:00
parent 9c1d871c8a
commit 65cca6cce2
7 changed files with 8 additions and 440 deletions

View File

@ -6,8 +6,8 @@ members = [
"tig-protocol",
"tig-runtime",
"tig-structs",
"tig-utils", "tig-verifier",
"tig-worker",
"tig-utils",
"tig-verifier",
]
exclude = []
resolver = "2"

View File

@ -47,19 +47,15 @@ WORKDIR /tmp/tig-monorepo
RUN if command -v nvcc > /dev/null 2>&1; then \
cargo build -r -p tig-runtime --features cuda && \
cargo build -r -p tig-verifier --features cuda && \
cargo build -r -p tig-worker --features cuda; \
cargo build -r -p tig-verifier --features cuda; \
else \
cargo build -r -p tig-runtime && \
cargo build -r -p tig-verifier && \
cargo build -r -p tig-worker; \
cargo build -r -p tig-verifier; \
fi && \
mv target/release/tig-runtime /usr/local/bin/ && \
mv target/release/tig-verifier /usr/local/bin/ && \
mv target/release/tig-worker /usr/local/bin/ && \
chmod +x /usr/local/bin/tig-runtime && \
chmod +x /usr/local/bin/tig-verifier && \
chmod +x /usr/local/bin/tig-worker && \
rm -rf tig-monorepo
COPY tig-binary/scripts /usr/local/bin/

View File

@ -9,12 +9,10 @@ ENV DEBIAN_FRONTEND=noninteractive
COPY --from=dev /usr/local/bin/tig-runtime /usr/local/bin/tig-runtime
COPY --from=dev /usr/local/bin/tig-verifier /usr/local/bin/tig-verifier
COPY --from=dev /usr/local/bin/tig-worker /usr/local/bin/tig-worker
COPY --from=dev /usr/local/lib/rust /usr/local/lib/rust
RUN chmod +x /usr/local/bin/tig-runtime && \
chmod +x /usr/local/bin/tig-verifier && \
chmod +x /usr/local/bin/tig-worker && \
echo "export LD_LIBRARY_PATH=\"${LD_LIBRARY_PATH}:/usr/local/lib/rust\"" >> /etc/bash.bashrc
RUN apt update && apt install -y python3 python3-pip

View File

@ -41,6 +41,10 @@ See last section on how to find your player_id & api_key.
2. Delete the database: `rm -rf db_data`
3. Start your master
## Optimising your Master Config
See [docs.tig.foundation](https://docs.tig.foundation/benchmarking/benchmarker-config)
# Connecting Slaves
1. Run the appropiate [runtime docker image](https://github.com/tig-foundation/tig-monorepo/pkgs/container/tig-monorepo%2Fruntime) for your slave. Available flavours are:
@ -65,10 +69,6 @@ See last section on how to find your player_id & api_key.
* To use a different port, use the option `--port <MASTER_PORT>`
* To see all options, use `--help`
# Optimising your Config
See [docs.tig.foundation](https://docs.tig.foundation/benchmarking/benchmarker-config)
# Finding your API Key
## Mainnet

View File

@ -1,25 +0,0 @@
[package]
name = "tig-worker"
version = "0.1.0"
readme = "README.md"
license = "https://github.com/tig-foundation/tig-monorepo/tree/main/docs/agreements/end_user_license_agreement.pdf"
authors.workspace = true
repository.workspace = true
edition.workspace = true
[dependencies]
anyhow = "1.0.81"
clap = { version = "4.5.4" }
cudarc = { version = "0.16.2", features = [
"cuda-version-from-build-system",
], optional = true }
futures = "0.3"
serde = { version = "1.0.196", features = ["derive"] }
serde_json = { version = "1.0.113", features = ["preserve_order"] }
tempfile = "3.19.1"
tig-structs = { path = "../tig-structs" }
tig-utils = { path = "../tig-utils" }
tokio = { version = "1.0", features = ["full"] }
[features]
cuda = ["cudarc"]

View File

@ -1,130 +0,0 @@
# tig-worker
A Rust crate for executing a batch of instances using [`tig-runtime`](../tig-runtime/README.md), aggregating outputs, and calculating Merkle root.
# Getting Started
`tig-worker` executes a number of `tig-runtime` concurrently. Each `tig-runtime` loads an algorithm shared object (compiled from `tig-binary`), which expects a specific version of rust standard libraries to be available on `LD_LIBRARY_PATH`.
Users who don't intend to customise `tig-worker` are recommended to download pre-compiled version available in [TIG's runtime docker images](https://github.com/tig-foundation/tig-monorepo/pkgs/container/tig-monorepo%2Fruntime).
**Example:**
```
docker run -it ghcr.io/tig-foundation/tig-monorepo/runtime:0.0.1-aarch64
# tig-worker is already on PATH
```
## Compiling (using dev docker image)
The required rust environment for development are available via [TIG's development docker images](https://github.com/tig-foundation/tig-monorepo/pkgs/container/tig-monorepo%2Fdev).
**Example:**
```
docker run -it -v $(pwd):/app ghcr.io/tig-foundation/tig-monorepo/dev:0.0.1-aarch64
# cargo build -p tig-worker --release
```
## Compiling (local setup)
Users who intend to customise `tig-worker` need to install a specific version of rust:
1. Install rust version `nightly-2025-02-10`
```
ARCH=$(uname -m)
RUST_TARGET=$(if [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then
echo "aarch64-unknown-linux-gnu";
else
echo "x86_64-unknown-linux-gnu";
fi)
rustup install nightly-2025-02-10
rustup default nightly-2025-02-10
rustup component add rust-src
rustup target add $RUST_TARGET
RUST_LIBDIR=$(rustc --print target-libdir --target=$RUST_TARGET)
ln -s $RUST_LIBDIR /usr/local/lib/rust
echo "export LD_LIBRARY_PATH=\"${LD_LIBRARY_PATH}:/usr/local/lib/rust\"" >> ~/.bashrc
```
2. Compile `tig-worker`
```
# for cuda version, add --features cuda
cargo build -p tig-worker --release --target $RUST_TARGET
```
# Usage
```
Usage: tig-worker [OPTIONS] <RUNTIME> <SETTINGS> <RAND_HASH> <START_NONCE> <NUM_NONCES> <BATCH_SIZE> <BINARY>
Arguments:
<RUNTIME> Path to tig-runtime executable
<SETTINGS> Settings json string or path to json file
<RAND_HASH> A string used in seed generation
<START_NONCE> Starting nonce
<NUM_NONCES> Number of nonces to compute
<BATCH_SIZE> Batch size for Merkle tree
<BINARY> Path to a shared object (*.so) file
Options:
--ptx [<PTX>] Path to a CUDA ptx file
--fuel [<FUEL>] Optional maximum fuel parameter for runtime [default: 2000000000]
--workers [<WORKERS>] Number of worker threads [default: 1]
--output [<OUTPUT_FOLDER>] If set, the data for nonce will be saved as '<nonce>.json' in this folder
-h, --help Print help
```
**Example:**
```
SETTINGS='{"challenge_id":"c001","difficulty":[5000,415],"algorithm_id":"","player_id":"","block_id":""}'
RANDHASH='rand_hash'$
NONCE=1337
FUEL=987654321123456789
SO_PATH=./tig-algorithms/lib/satisfiability/aarch64/better_sat.so
tig-worker \tig-runtime $SETTINGS $RANDHASH $NONCE $SO_PATH --fuel $FUEL
```
## Compute Solution
Given settings, nonce and the WASM for an algorithm, `tig-worker` computes the solution data (runtime_signature, fuel_consumed, solution). This sub-command does not verify whether the solution is valid or not.
* If the algorithm results in an error, `tig-worker` will terminate with exit code 1 and print error to stderr.
* If the algorithm returns a solution, `tig-worker` will terminate with exit code 0 and print the solution data to stdout.
```
Usage: tig-worker compute_solution [OPTIONS] <SETTINGS> <RAND_HASH> <NONCE> <WASM>
Arguments:
<SETTINGS> Settings json string or path to json file
<RAND_HASH> A string used in seed generation
<NONCE> Nonce value
<WASM> Path to a wasm file
Options:
--fuel [<FUEL>] Optional maximum fuel parameter for WASM VM [default: 1000000000]
--mem [<MEM>] Optional maximum memory parameter for WASM VM [default: 1000000000]
-h, --help Print help
```
**Example:**
```
SETTINGS='{"challenge_id":"c001","difficulty":[5000,415],"algorithm_id":"","player_id":"","block_id":""}'
START=0
NUM_NONCES=8
BATCH_SIZE=8
SO_PATH=./tig-algorithms/lib/satisfiability/aarch64/better_sat.so
RAND_HASH=random_string
tig-worker \tig-runtime $SETTINGS $RAND_HASH $START $NUM_NONCES $BATCH_SIZE $SO_PATH --workers 8
```
**Example Output:**
```
{"merkle_root":"ab3f7ea08a2b991217bd9b08299b063bc77a0239af3a826d3b0ea91ca3384f98","solution_nonces":[6,3,4,5,7,2,1,0]}
```
# License
[End User License Agreement](../docs/agreements/end_user_license_agreement.pdf)

View File

@ -1,271 +0,0 @@
use anyhow::{anyhow, Result};
use clap::{arg, Command};
use futures::stream::{self, StreamExt};
use serde_json::json;
use std::{fs, path::PathBuf};
use tempfile::NamedTempFile;
use tig_structs::core::*;
use tig_utils::{compress_obj, decompress_obj, dejsonify, jsonify, MerkleHash, MerkleTree};
use tokio::runtime::Runtime;
fn cli() -> Command {
Command::new("tig-worker")
.about("Computes batch of nonces and generates Merkle proofs")
.arg_required_else_help(true)
.arg(arg!(<RUNTIME> "Path to tig-runtime executable").value_parser(clap::value_parser!(PathBuf)))
.arg(
arg!(<SETTINGS> "Settings json string or path to json file")
.value_parser(clap::value_parser!(String)),
)
.arg(
arg!(<RAND_HASH> "A string used in seed generation")
.value_parser(clap::value_parser!(String)),
)
.arg(arg!(<START_NONCE> "Starting nonce").value_parser(clap::value_parser!(u64)))
.arg(
arg!(<NUM_NONCES> "Number of nonces to compute")
.value_parser(clap::value_parser!(u64)),
)
.arg(
arg!(<BATCH_SIZE> "Batch size for Merkle tree")
.value_parser(clap::value_parser!(u64)),
)
.arg(
arg!(<BINARY> "Path to a shared object (*.so) file")
.value_parser(clap::value_parser!(PathBuf)),
)
.arg(
arg!(--ptx [PTX] "Path to a CUDA ptx file")
.value_parser(clap::value_parser!(PathBuf)),
)
.arg(
arg!(--fuel [FUEL] "Optional maximum fuel parameter for runtime")
.default_value("2000000000")
.value_parser(clap::value_parser!(u64)),
)
.arg(
arg!(--workers [WORKERS] "Number of worker threads")
.default_value("1")
.value_parser(clap::value_parser!(usize)),
)
.arg(
arg!(--output [OUTPUT_FOLDER] "If set, the data for nonce will be saved as '<nonce>.json' in this folder")
.value_parser(clap::value_parser!(PathBuf)),
)
}
fn main() {
let matches = cli().get_matches();
if let Err(e) = compute_batch(
matches.get_one::<PathBuf>("RUNTIME").unwrap().clone(),
matches.get_one::<String>("SETTINGS").unwrap().clone(),
matches.get_one::<String>("RAND_HASH").unwrap().clone(),
*matches.get_one::<u64>("START_NONCE").unwrap(),
*matches.get_one::<u64>("NUM_NONCES").unwrap(),
*matches.get_one::<u64>("BATCH_SIZE").unwrap(),
matches.get_one::<PathBuf>("BINARY").unwrap().clone(),
matches.get_one::<PathBuf>("ptx").cloned(),
*matches.get_one::<u64>("fuel").unwrap(),
*matches.get_one::<usize>("workers").unwrap(),
matches.get_one::<PathBuf>("output").cloned(),
) {
eprintln!("Error: {}", e);
std::process::exit(1);
}
}
fn compute_batch(
runtime_path: PathBuf,
settings: String,
rand_hash: String,
start_nonce: u64,
num_nonces: u64,
batch_size: u64,
binary_path: PathBuf,
ptx_path: Option<PathBuf>,
max_fuel: u64,
num_workers: usize,
output_folder: Option<PathBuf>,
) -> Result<()> {
if num_nonces == 0 || batch_size < num_nonces {
return Err(anyhow!(
"Invalid number of nonces. Must be non-zero and less than batch size"
));
}
if batch_size == 0 || (batch_size & (batch_size - 1)) != 0 {
return Err(anyhow!("Batch size must be a power of 2"));
}
if let Some(path) = &output_folder {
fs::create_dir_all(path)?;
}
let settings = load_settings(&settings);
match settings.challenge_id.as_str() {
"c004" | "c005" => {
#[cfg(not(feature = "cuda"))]
panic!("tig-worker was not compiled with '--features cuda'");
#[cfg(feature = "cuda")]
if ptx_path.is_none() {
return Err(anyhow!(
"PTX file is required for challenge {}",
settings.challenge_id
));
}
}
_ => {
if ptx_path.is_some() {
return Err(anyhow!(
"PTX file is not required for challenge {}",
settings.challenge_id
));
}
}
}
let settings = jsonify(&settings);
let runtime = Runtime::new()?;
runtime.block_on(async {
let mut hashes = vec![MerkleHash::null(); num_nonces as usize];
let mut solution_nonces = Vec::new();
// Create a stream of nonces and process them concurrently
let results = stream::iter(start_nonce..(start_nonce + num_nonces))
.map(|nonce| {
let runtime_path = runtime_path.clone();
let settings = settings.clone();
let rand_hash = rand_hash.clone();
let binary_path = binary_path.clone();
let output_folder = output_folder.clone();
#[cfg(feature = "cuda")]
let ptx_path = ptx_path.clone();
#[cfg(feature = "cuda")]
let num_gpus = cudarc::runtime::result::device::get_count().unwrap() as u64;
tokio::spawn(async move {
let temp_file = NamedTempFile::new()?;
let mut cmd = std::process::Command::new(runtime_path);
cmd.arg(settings)
.arg(rand_hash)
.arg(nonce.to_string())
.arg(binary_path)
.arg("--output")
.arg(temp_file.path())
.arg("--compress");
#[cfg(feature = "cuda")]
if let Some(ptx_path) = ptx_path {
cmd.arg("--ptx")
.arg(ptx_path)
.arg("--gpu")
.arg((nonce % num_gpus).to_string());
}
let output = cmd.output().unwrap();
let exit_code = output.status.code();
let is_solution = output.status.success();
if exit_code == Some(87) {
// out of fuel
// let mut runtime_signature = 0;
// let stdout = String::from_utf8_lossy(&output.stdout);
// let mut lines = stdout.lines().rev();
// while let Some(line) = lines.next() {
// if line.starts_with("Runtime signature: ") {
// if let Some(sig) = line.strip_prefix("Runtime signature: ") {
// if let Ok(sig) = sig.trim().parse::<u64>() {
// runtime_signature = sig;
// break;
// }
// }
// }
// }
let output_data = OutputData {
nonce,
solution: Solution::new(),
fuel_consumed: max_fuel + 1,
runtime_signature: 0,
#[cfg(target_arch = "x86_64")]
cpu_arch: CPUArchitecture::AMD64,
#[cfg(target_arch = "aarch64")]
cpu_arch: CPUArchitecture::AARCH64,
};
let hash = MerkleHash::from(output_data.clone());
Ok::<(u64, MerkleHash, bool, Option<OutputData>), anyhow::Error>((
nonce,
hash,
is_solution,
output_folder.is_some().then(|| output_data),
))
} else if is_solution || exit_code == Some(86) || exit_code == Some(85) {
let bytes = fs::read(temp_file.path())?;
let output_data: OutputData = decompress_obj(&bytes)?;
let hash = MerkleHash::from(output_data.clone());
Ok::<(u64, MerkleHash, bool, Option<OutputData>), anyhow::Error>((
nonce,
hash,
is_solution,
output_folder.is_some().then(|| output_data),
))
} else {
Err(anyhow!(
"Failed to compute nonce {}: {:?}",
nonce,
output.status
))
}
})
})
.buffer_unordered(num_workers)
.collect::<Vec<_>>()
.await;
let mut dump = Vec::new();
for result in results {
let (nonce, hash, is_solution, output_data) = result??;
if is_solution {
solution_nonces.push(nonce);
}
if let Some(output_data) = output_data {
dump.push(output_data);
}
*hashes.get_mut((nonce - start_nonce) as usize).unwrap() = hash;
}
if let Some(path) = output_folder {
dump.sort_by_key(|data| data.nonce);
fs::write(&path.join("data.zlib"), compress_obj(&dump))?;
fs::write(&path.join("hashes.zlib"), compress_obj(&hashes))?;
}
let tree = MerkleTree::new(hashes, batch_size as usize)?;
let merkle_root = tree.calc_merkle_root();
let result = json!({
"merkle_root": merkle_root,
"solution_nonces": solution_nonces,
});
println!("{}", jsonify(&result));
Ok(())
})
}
fn load_settings(settings: &str) -> BenchmarkSettings {
let settings = if settings.ends_with(".json") {
fs::read_to_string(settings).unwrap_or_else(|_| {
eprintln!("Failed to read settings file: {}", settings);
std::process::exit(1);
})
} else {
settings.to_string()
};
dejsonify::<BenchmarkSettings>(&settings).unwrap_or_else(|_| {
eprintln!("Failed to parse settings");
std::process::exit(1);
})
}