diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index ae9dfeed1c5..2069ba78193 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -607,6 +607,15 @@ def _write_serial(self, docnames: Sequence[str]) -> None: self.write_doc(docname, doctree) def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None: + # warm up caches/compile templates using the first document + with TimeIt(f'{__file__}: {inspect.currentframe().f_code.co_name}: write 1st doc', self.runtime, logger): + firstname, docnames = docnames[0], docnames[1:] + self.app.phase = BuildPhase.RESOLVING + doctree = self.env.get_and_resolve_doctree(firstname, self) + self.app.phase = BuildPhase.WRITING + self.write_doc_serialized(firstname, doctree) + self.write_doc(firstname, doctree) + def write_process(worker_id: int, docname: str) -> tuple[list[str], dict[str, str]]: # print(f'{getpid()}: {docname}') # print(f'{worker_id}: {docname}') @@ -615,7 +624,7 @@ def write_process(worker_id: int, docname: str) -> tuple[list[str], dict[str, st self.app.phase = BuildPhase.WRITING doctree = self.env.get_and_resolve_doctree(docname, self) - # self.write_doc_serialized(docname, doctree) + self.write_doc_serialized(docname, doctree) self.write_doc(docname, doctree) # get warnings to log them in the main process, removing empty values # (in case the last ends on \n) @@ -630,15 +639,6 @@ def write_process(worker_id: int, docname: str) -> tuple[list[str], dict[str, st # new_warnings.append(warn_clean) return self.app._warnings_parallel, self.images - # warm up caches/compile templates using the first document - with TimeIt(f'{__file__}: {inspect.currentframe().f_code.co_name}: write 1st doc', self.runtime, logger): - firstname, docnames = docnames[0], docnames[1:] - self.app.phase = BuildPhase.RESOLVING - doctree = self.env.get_and_resolve_doctree(firstname, self) - self.app.phase = BuildPhase.WRITING - self.write_doc_serialized(firstname, doctree) - self.write_doc(firstname, doctree) - # tasks = ParallelTasks(nproc) # chunks = make_chunks(docnames, nproc) @@ -652,15 +652,15 @@ def on_chunk_done(args: list[tuple[str, NoneType]], result: NoneType) -> None: self.app.phase = BuildPhase.RESOLVING if docnames: - measure_1 = f'{__file__}: {inspect.currentframe().f_code.co_name}: get_doctree_write' - measure_2 = f'{__file__}: {inspect.currentframe().f_code.co_name}: write_doc_serialized' - for docname in docnames: - with TimeIt(measure_1, self.runtime, logger, False): - doctree = self.env.get_doctree_write(docname) - with TimeIt(measure_2, self.runtime, logger, False): - self.write_doc_serialized(docname, doctree) - logger.info(f'{measure_1}: {self.runtime[measure_1]}') - logger.info(f'{measure_2}: {self.runtime[measure_2]}') + # measure_1 = f'{__file__}: {inspect.currentframe().f_code.co_name}: get_doctree_write' + # measure_2 = f'{__file__}: {inspect.currentframe().f_code.co_name}: write_doc_serialized' + # for docname in docnames: + # with TimeIt(measure_1, self.runtime, logger, False): + # doctree = self.env.get_doctree_write(docname) + # with TimeIt(measure_2, self.runtime, logger, False): + # self.write_doc_serialized(docname, doctree) + # logger.info(f'{measure_1}: {self.runtime[measure_1]}') + # logger.info(f'{measure_2}: {self.runtime[measure_2]}') with WorkerPool(n_jobs=nproc, start_method='fork', use_dill=False, pass_worker_id=True, enable_insights=True) as pool: # args = []