Auto-save after every 100 results are calculated.

- Saving is still done on shutdown regardless.
- If the program is not shutdown cleanly at most 100 results will
  be lost and need to be recalculated.
This commit is contained in:
Andrew Hamilton 2018-08-14 16:17:37 +10:00
parent 4157e5b04c
commit f9645c12bd
2 changed files with 17 additions and 6 deletions

View file

@ -638,9 +638,9 @@ class Screen:
for index in range(worker_count): for index in range(worker_count):
worker_ = worker.Worker(self._is_paused, is_being_tested) worker_ = worker.Worker(self._is_paused, is_being_tested)
workers.append(worker_) workers.append(worker_)
future = worker_.job_runner( future = worker_.job_runner(self, self._summary, self._log,
self._summary, self._log, self._summary._jobs_added_event, self._summary._jobs_added_event,
self._appearance_changed_event) self._appearance_changed_event)
worker_.future = asyncio.ensure_future(future, worker_.future = asyncio.ensure_future(future,
loop=self._main_loop) loop=self._main_loop)
self.workers = workers self.workers = workers
@ -853,6 +853,12 @@ class Screen:
subprocess.Popen(["xdg-open", path], stdout=subprocess.PIPE, subprocess.Popen(["xdg-open", path], stdout=subprocess.PIPE,
stderr=subprocess.PIPE) stderr=subprocess.PIPE)
def save(self):
worker.Worker.unsaved_jobs_total = 0
pickle_path = os.path.join(tools.CACHE_PATH, "summary.pickle")
open_compressed = functools.partial(gzip.open, compresslevel=1)
tools.dump_pickle_safe(self, pickle_path, open=open_compressed)
def _select_entry_at_position(self, x, y, view_width, view_height): def _select_entry_at_position(self, x, y, view_width, view_height):
border_width = 1 border_width = 1
if x < border_width or y < border_width or x > view_width or \ if x < border_width or y < border_width or x > view_width or \
@ -1057,8 +1063,7 @@ def main(root_path, loop, worker_count=None, editor_command=None, theme=None,
log.log_message("Program stopped.") log.log_message("Program stopped.")
finally: finally:
notifier.stop() notifier.stop()
open_compressed = functools.partial(gzip.open, compresslevel=1) screen.save()
tools.dump_pickle_safe(screen, pickle_path, open=open_compressed)
@contextlib.contextmanager @contextlib.contextmanager

View file

@ -12,6 +12,8 @@ import vigil.tools as tools
class Worker: class Worker:
unsaved_jobs_total = 0
def __init__(self, is_already_paused, is_being_tested): def __init__(self, is_already_paused, is_being_tested):
self.is_already_paused = is_already_paused self.is_already_paused = is_already_paused
self.is_being_tested = is_being_tested self.is_being_tested = is_being_tested
@ -34,7 +36,7 @@ class Worker:
data = await self.process.stdout.readline() data = await self.process.stdout.readline()
return tools.Status(int(data)) return tools.Status(int(data))
async def job_runner(self, summary, log, jobs_added_event, async def job_runner(self, screen, summary, log, jobs_added_event,
appearance_changed_event): appearance_changed_event):
await self.create_process() await self.create_process()
while True: while True:
@ -50,6 +52,10 @@ class Worker:
os.kill(os.getpid(), signal.SIGINT) os.kill(os.getpid(), signal.SIGINT)
break break
await self.result.run(log, appearance_changed_event, self) await self.result.run(log, appearance_changed_event, self)
self.unsaved_jobs_total += 1
if self.unsaved_jobs_total == 100:
log.log_message("Auto-saving...")
screen.save()
summary.completed_total += 1 summary.completed_total += 1
jobs_added_event.clear() jobs_added_event.clear()