diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 495cf9f694d..1ce2390653a 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -653,48 +653,50 @@ def on_chunk_done(args: list[tuple[str, NoneType]], result: NoneType) -> None: next(progress) self.app.phase = BuildPhase.RESOLVING - measure_1 = f'{__file__}: {inspect.currentframe().f_code.co_name}: get_doctree_write' - measure_2 = f'{__file__}: {inspect.currentframe().f_code.co_name}: write_doc_serialized' - for docname in docnames: - with TimeIt(measure_1, self.runtime, logger, False): - doctree = self.env.get_doctree_write(docname) - with TimeIt(measure_2, self.runtime, logger, False): - self.write_doc_serialized(docname, doctree) - logger.info(f'{measure_1}: {self.runtime[measure_1]}') - logger.info(f'{measure_2}: {self.runtime[measure_2]}') - - with WorkerPool(n_jobs=nproc, start_method='fork', use_dill=False, pass_worker_id=True, enable_insights=True) as pool: - # args = [] - # for docname in docnames: - # doctree = self.env.get_and_resolve_doctree(docname, self) - # self.write_doc_serialized(docname, doctree) - # args.append([(docname, doctree)]) - # import pickle - - # from dill import dumps, loads - # doctree_dilled = dumps(args[0][0][1], protocol=pickle.HIGHEST_PROTOCOL) - # doctree_readback = loads(doctree_dilled) - with TimeIt(f'{__file__}: {inspect.currentframe().f_code.co_name}: WorkerPool', self.runtime, logger): - results = pool.map(write_process, docnames, worker_lifespan=5, progress_bar=True) - for result in results: - warnings, images = result - for warning in warnings: - logger.warning(warning) - self.images.update(images) - insights = pool.get_insights() - import json - print(json.dumps(insights, indent=2)) - # print(insights) - # for chunk in chunks: - # arg = [] - # for docname in chunk: - # doctree = self.env.get_and_resolve_doctree(docname, self) - # self.write_doc_serialized(docname, doctree) - # arg.append((docname, doctree)) - # tasks.add_task(write_process, arg, on_chunk_done) - - # # make sure all threads have finished - # tasks.join() + if docnames: + measure_1 = f'{__file__}: {inspect.currentframe().f_code.co_name}: get_doctree_write' + measure_2 = f'{__file__}: {inspect.currentframe().f_code.co_name}: write_doc_serialized' + for docname in docnames: + with TimeIt(measure_1, self.runtime, logger, False): + doctree = self.env.get_doctree_write(docname) + with TimeIt(measure_2, self.runtime, logger, False): + self.write_doc_serialized(docname, doctree) + logger.info(f'{measure_1}: {self.runtime[measure_1]}') + logger.info(f'{measure_2}: {self.runtime[measure_2]}') + + with WorkerPool(n_jobs=nproc, start_method='fork', use_dill=False, pass_worker_id=True, enable_insights=True) as pool: + # args = [] + # for docname in docnames: + # doctree = self.env.get_and_resolve_doctree(docname, self) + # self.write_doc_serialized(docname, doctree) + # args.append([(docname, doctree)]) + # import pickle + + # from dill import dumps, loads + # doctree_dilled = dumps(args[0][0][1], protocol=pickle.HIGHEST_PROTOCOL) + # doctree_readback = loads(doctree_dilled) + with TimeIt(f'{__file__}: {inspect.currentframe().f_code.co_name}: WorkerPool', self.runtime, logger): + results = pool.map(write_process, docnames, worker_lifespan=5, progress_bar=True) + for result in results: + warnings, images = result + for warning in warnings: + logger.warning(warning) + self.images.update(images) + insights = pool.get_insights() + import json + print(json.dumps(insights, indent=2)) + # print(insights) + # for chunk in chunks: + # arg = [] + # for docname in chunk: + # doctree = self.env.get_and_resolve_doctree(docname, self) + # self.write_doc_serialized(docname, doctree) + # arg.append((docname, doctree)) + # tasks.add_task(write_process, arg, on_chunk_done) + + # # make sure all threads have finished + # tasks.join() + import json print(json.dumps(self.runtime, indent=2)) logger.info('')