@ -439,6 +477,7 @@ class BackfillerWorker(object):
@argh.arg('--static-nodes',help='Nodes to always backfill from. Comma seperated if multiple. By default empty.')
@argh.arg('--backdoor-port',help='Port for gevent.backdoor access. By default disabled.')
@argh.arg('--start',help='If a datetime only backfill hours after that datetime. If a number, bacfill hours more recent than that number of hours ago. If None (default), all hours are backfilled.')
@argh.arg('--delete-before',help='Delete hours older than this number of hours ago. If 0 (default) do not delete any hours.')
@argh.arg('--run-once',help='If True, backfill only once. By default False.')
@argh.arg('--node-file',help="Name of file listing nodes to backfill from. One node per line in the form NAME URI with whitespace only lines or lines starting with '#' ignored. If None (default) do not get nodes from a file.")
@argh.arg('--node-database',help='Postgres conection string for database to fetch a list of nodes from. Either a space-separated list of key=value pairs, or a URI like: postgresql://USER:PASSWORD@HOST/DBNAME?KEY=VALUE . If None (default) do not get nodes from database.')
@ -446,9 +485,9 @@ class BackfillerWorker(object):
@argh.arg('--download-concurrency',help='Max number of concurrent segment downloads from a single node. Increasing this number may increase throughput but too high a value can overload the server or cause timeouts.')
@argh.arg('--recent-cutoff',help='Minimum age for a segment before we will backfill it, to prevent us backfilling segments we could have just downloaded ourselves instead. Expressed as number of seconds.')