Expose low-memory indexing via --jsonrpc-import flag

(instead of --skip_bulk_import)
This commit is contained in:
Roman Zeyde 2018-08-07 20:04:08 +03:00
parent e09816db65
commit 200ef16ed0
No known key found for this signature in database
GPG Key ID: 87CAE5FA46917CBB
2 changed files with 5 additions and 5 deletions

View File

@ -38,7 +38,7 @@ fn run_server(config: &Config) -> Result<()> {
// Perform initial indexing from local blk*.dat block files.
let store = DBStore::open(&config.db_path);
let index = Index::load(&store, &daemon, &metrics, config.index_batch_size)?;
let store = if config.skip_bulk_import {
let store = if config.jsonrpc_import {
index.update(&store, &signal)?; // slower: uses JSONRPC for fetching blocks
full_compaction(store)
} else {

View File

@ -22,7 +22,7 @@ pub struct Config {
pub cookie: Option<String>, // for bitcoind JSONRPC authentication ("USER:PASSWORD")
pub electrum_rpc_addr: SocketAddr, // for serving Electrum clients
pub monitoring_addr: SocketAddr, // for Prometheus monitoring
pub skip_bulk_import: bool, // slower initial indexing, for low-memory systems
pub jsonrpc_import: bool, // slower initial indexing, for low-memory systems
pub index_batch_size: usize, // number of blocks to index in parallel
pub bulk_index_threads: usize, // number of threads to use for bulk indexing
}
@ -85,8 +85,8 @@ impl Config {
.takes_value(true),
)
.arg(
Arg::with_name("skip_bulk_import")
.long("skip-bulk-import")
Arg::with_name("jsonrpc_import")
.long("jsonrpc-import")
.help("Use JSONRPC instead of directly importing blk*.dat files. Useful for remote full node or low memory system"),
)
.arg(
@ -181,7 +181,7 @@ impl Config {
cookie,
electrum_rpc_addr,
monitoring_addr,
skip_bulk_import: m.is_present("skip_bulk_import"),
jsonrpc_import: m.is_present("jsonrpc_import"),
index_batch_size: value_t_or_exit!(m, "index_batch_size", usize),
bulk_index_threads,
};